diff --git a/.github/ISSUE_TEMPLATE/bug.md b/.github/ISSUE_TEMPLATE/bug.md index d6a891e1c518..6e53805900c4 100644 --- a/.github/ISSUE_TEMPLATE/bug.md +++ b/.github/ISSUE_TEMPLATE/bug.md @@ -2,7 +2,7 @@ name: "\U0001F41B Bug report" about: Report a bug about the Dotty Compiler title: '' -labels: itype:bug +labels: itype:bug, stat:needs triage assignees: '' --- @@ -14,6 +14,12 @@ If you're not sure what version you're using, run `print scalaVersion` from sbt ## Minimized code + + ```Scala println("hello, world") ``` diff --git a/.github/ISSUE_TEMPLATE/crash.md b/.github/ISSUE_TEMPLATE/crash.md index dc935e310548..5a4cfe3f5e15 100644 --- a/.github/ISSUE_TEMPLATE/crash.md +++ b/.github/ISSUE_TEMPLATE/crash.md @@ -2,7 +2,7 @@ name: "\U0001F4A5 Crash report" about: Report a Dotty compiler crash title: '' -labels: itype:bug, itype:crash +labels: itype:bug, itype:crash, stat:needs triage assignees: '' --- @@ -14,6 +14,12 @@ If you're not sure what version you're using, run `print scalaVersion` from sbt ## Minimized code + + ```Scala println("hello, world") ``` diff --git a/.github/ISSUE_TEMPLATE/other-issue.md b/.github/ISSUE_TEMPLATE/other-issue.md index cfa8618c542a..07a9fe77846a 100644 --- a/.github/ISSUE_TEMPLATE/other-issue.md +++ b/.github/ISSUE_TEMPLATE/other-issue.md @@ -1,9 +1,10 @@ --- -name: "Other issue" +name: Other issue about: Report an issue about the Dotty Compiler (not bug or crash) title: '' -labels: +labels: stat:needs triage assignees: '' + --- ## Compiler version diff --git a/.github/ISSUE_TEMPLATE/syntax-highlight.md b/.github/ISSUE_TEMPLATE/syntax-highlight.md index 17b720d4d2aa..ffb8fbd32c46 100644 --- a/.github/ISSUE_TEMPLATE/syntax-highlight.md +++ b/.github/ISSUE_TEMPLATE/syntax-highlight.md @@ -1,10 +1,13 @@ --- name: Syntax highlighting -about: Please create a syntax highlighting issue here https://github.com/scala/vscode-scala-syntax/issues +about: Please create a syntax highlighting issue in a respective repo title: '' labels: '' assignees: '' --- -Please create a syntax highlighting issue here: [scala/vscode-scala-syntax](https://github.com/scala/vscode-scala-syntax/issues). +Please create a syntax highlighting issue here +* VS Code / GitHub: https://github.com/scala/vscode-scala-syntax/issues +* IntelliJ: https://youtrack.jetbrains.com/issues/SCL?q=tag:%20%7BScala%203%7D%20tag:%20%7BSyntax%20Highlighting%7D +* highlight.js: https://github.com/highlightjs/highlight.js/issues diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 20b59eb56aa6..d9da3baee124 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -35,10 +35,7 @@ jobs: - ${{ github.workspace }}/../../cache/ivy:/root/.ivy2/cache - ${{ github.workspace }}/../../cache/general:/root/.cache if: "github.event_name == 'schedule' && github.repository == 'lampepfl/dotty' - || ( - github.event_name == 'push' - && !startsWith(github.event.ref, 'refs/tags/sbt-dotty-') - ) + || github.event_name == 'push' || ( github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]') @@ -69,7 +66,7 @@ jobs: - name: Test run: | - ./project/scripts/sbt ";compile ;test" + ./project/scripts/sbt ";dist/pack; compile ;test" ./project/scripts/cmdTests test: @@ -82,10 +79,7 @@ jobs: - ${{ github.workspace }}/../../cache/ivy:/root/.ivy2/cache - ${{ github.workspace }}/../../cache/general:/root/.cache if: "github.event_name == 'schedule' && github.repository == 'lampepfl/dotty' - || ( - github.event_name == 'push' - && !startsWith(github.event.ref, 'refs/tags/sbt-dotty-') - ) + || github.event_name == 'push' || ( github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]') @@ -117,19 +111,19 @@ jobs: - name: Cmd Tests run: | - ./project/scripts/sbt ";scala3-bootstrapped/compile ;scala3-bootstrapped/test;sjsSandbox/run;sjsSandbox/test;sjsJUnitTests/test;sjsCompilerTests/test ;sbt-dotty/scripted scala2-compat/* ;configureIDE ;stdlib-bootstrapped/test:run ;stdlib-bootstrapped-tasty-tests/test" - ./project/scripts/bootstrapCmdTests + ./project/scripts/sbt ";dist/pack; scala3-bootstrapped/compile; scala3-bootstrapped/test;sjsSandbox/run;sjsSandbox/test;sjsJUnitTests/test;sjsCompilerTests/test ;sbt-test/scripted scala2-compat/* ;configureIDE ;stdlib-bootstrapped/test:run ;stdlib-bootstrapped-tasty-tests/test; scala3-compiler-bootstrapped/scala3CompilerCoursierTest:test" + ./project/scripts/cmdTests + ./project/scripts/bootstrappedOnlyCmdTests + + - name: MiMa + run: | + ./project/scripts/sbt ";scala3-interfaces/mimaReportBinaryIssues ;scala3-library-bootstrapped/mimaReportBinaryIssues ;scala3-library-bootstrappedJS/mimaReportBinaryIssues; tasty-core-bootstrapped/mimaReportBinaryIssues" test_windows_fast: runs-on: [self-hosted, Windows] if: "( github.event_name == 'push' - && github.ref != 'refs/heads/master' - && !startsWith(github.event.ref, 'refs/tags/sbt-dotty-') - ) - || ( - github.event_name == 'pull_request' - && !contains(github.event.pull_request.body, '[skip ci]') + && github.ref != 'refs/heads/main' ) || ( github.event_name == 'pull_request' @@ -161,6 +155,10 @@ jobs: run: '"C:\Program Files\Git\bin\bash" ./project/scripts/winCmdTests' shell: cmd + - name: win tests + run: './project/scripts/winCmdTests.bat' + shell: cmd + - name: Scala.js Test run: sbt ";sjsJUnitTests/test ;sjsCompilerTests/test" shell: cmd @@ -168,10 +166,7 @@ jobs: test_windows_full: runs-on: [self-hosted, Windows] if: "github.event_name == 'schedule' && github.repository == 'lampepfl/dotty' - || ( - github.event_name == 'push' - && !startsWith(github.event.ref, 'refs/tags/sbt-dotty-') - ) + || github.event_name == 'push' || ( github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]') @@ -187,7 +182,7 @@ jobs: uses: actions/checkout@v2 - name: Test - run: sbt ";scala3-bootstrapped/compile ;scala3-bootstrapped/test" + run: sbt ";dist/pack ;scala3-bootstrapped/compile ;scala3-bootstrapped/test" shell: cmd - name: Scala.js Test @@ -323,6 +318,49 @@ jobs: git submodule update --init --recursive --jobs 7 ./project/scripts/sbt "community-build/testOnly dotty.communitybuild.CommunityBuildTestC" + community_build_forward_compat: + runs-on: [self-hosted, Linux] + container: + image: lampepfl/dotty:2021-03-22 + options: --cpu-shares 4096 + volumes: + - ${{ github.workspace }}/../../cache/sbt:/root/.sbt + - ${{ github.workspace }}/../../cache/ivy:/root/.ivy2/cache + - ${{ github.workspace }}/../../cache/general:/root/.cache + if: "github.event_name == 'schedule' && github.repository == 'lampepfl/dotty' + || ( + github.event_name == 'pull_request' + && !contains(github.event.pull_request.body, '[skip ci]') + && !contains(github.event.pull_request.body, '[skip community_build]') + && contains(github.event.pull_request.body, '[test_forward_compat]') + ) + || ( + github.event_name == 'workflow_dispatch' + && github.repository == 'lampepfl/dotty' + )" + + steps: + - name: Reset existing repo + run: git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true + + - name: Checkout cleanup script + uses: actions/checkout@v2 + + - name: Cleanup + run: .github/workflows/cleanup.sh + + - name: Git Checkout + uses: actions/checkout@v2 + + - name: Add SBT proxy repositories + run: cp -vf .github/workflows/repositories /root/.sbt/ ; true + + - name: Test + run: | + git submodule sync + git submodule update --init --recursive --jobs 7 + ./project/scripts/sbt "community-build/testOnly dotty.communitybuild.CommunityBuildTestForwardCompat" + test_sbt: runs-on: [self-hosted, Linux] container: @@ -361,7 +399,7 @@ jobs: run: cp -vf .github/workflows/repositories /root/.sbt/ ; true - name: Test sbt - run: ./project/scripts/sbt "sbt-dotty/scripted; sbt-community-build/scripted" + run: ./project/scripts/sbt "sbt-test/scripted; sbt-community-build/scripted" test_java8: runs-on: [self-hosted, Linux] @@ -377,7 +415,6 @@ jobs: || ( github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags/') - && !startsWith(github.event.ref, 'refs/tags/sbt-dotty-') ) || ( github.event_name == 'pull_request' @@ -410,8 +447,9 @@ jobs: - name: Test run: | - ./project/scripts/sbt ";scala3-bootstrapped/compile ;scala3-bootstrapped/test;sjsSandbox/run;sjsSandbox/test;sjsJUnitTests/test;sjsCompilerTests/test ;sbt-dotty/scripted scala2-compat/* ;configureIDE ;stdlib-bootstrapped/test:run ;stdlib-bootstrapped-tasty-tests/test" - ./project/scripts/bootstrapCmdTests + ./project/scripts/sbt ";dist/pack ;scala3-bootstrapped/compile ;scala3-bootstrapped/test;sjsSandbox/run;sjsSandbox/test;sjsJUnitTests/test;sjsCompilerTests/test ;sbt-test/scripted scala2-compat/* ;configureIDE ;stdlib-bootstrapped/test:run ;stdlib-bootstrapped-tasty-tests/test" + ./project/scripts/cmdTests + ./project/scripts/bootstrappedOnlyCmdTests publish_nightly: runs-on: [self-hosted, Linux] @@ -422,7 +460,7 @@ jobs: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt - ${{ github.workspace }}/../../cache/ivy:/root/.ivy2/cache - ${{ github.workspace }}/../../cache/general:/root/.cache - needs: [test_non_bootstrapped, test, community_build_a, community_build_b, community_build_c, test_sbt, test_java8] + needs: [test_non_bootstrapped, test, community_build_a, community_build_b, community_build_c, community_build_forward_compat, test_sbt, test_java8] if: "(github.event_name == 'schedule' || github.event_name == 'workflow_dispatch') && github.repository == 'lampepfl/dotty'" env: NIGHTLYBUILD: yes @@ -447,7 +485,20 @@ jobs: - name: Add SBT proxy repositories run: cp -vf .github/workflows/repositories /root/.sbt/ ; true + - name: Get version string for this build + run: | + ver=$(./project/scripts/sbt "print scala3-compiler-bootstrapped/version" | tail -n1) + echo "This build version: $ver" + echo "THISBUILD_VERSION=$ver" >> $GITHUB_ENV + + - name: Check whether not yet published + id: not_yet_published + continue-on-error: true + run: | + ! ./project/scripts/is-version-published.sh "$THISBUILD_VERSION" + - name: Publish Nightly + if: "steps.not_yet_published.outcome == 'success'" run: | ./project/scripts/sbtPublish ";project scala3-bootstrapped ;publishSigned ;sonatypeBundleRelease" @@ -464,9 +515,13 @@ jobs: if: "(github.event_name == 'schedule' || github.event_name == 'workflow_dispatch') && github.repository == 'lampepfl/dotty'" env: NIGHTLYBUILD: yes - BOT_TOKEN: ${{ secrets.BOT_TOKEN }} # If you need to change this: - # Generate one at https://github.com/settings/tokens - # Make sure you have the write permissions to the repo: https://github.com/lampepfl/dotty-website + DOTTY_WEBSITE_BOT_TOKEN: ${{ secrets.BOT_TOKEN }} # If you need to change this: + # Generate one at https://github.com/settings/tokens + # Make sure you have the write permissions to the repo: https://github.com/lampepfl/dotty-website + # Currently unused token, no need to deploy anything to docs.scala-lang + # DOCS_SCALALANG_BOT_TOKEN: ${{ secrets.DOCS_SCALALANG_BOT_TOKEN }} # If you need to change this: + # Generate one at https://github.com/settings/tokens + # Make sure you have the write permissions to the repo: https://github.com/scala/docs.scala-lang steps: - name: Reset existing repo @@ -488,14 +543,15 @@ jobs: run: | ./project/scripts/genDocs -doc-snapshot - - name: Deploy Website + - name: Deploy Website to dotty-website uses: peaceiris/actions-gh-pages@v3 with: - personal_token: ${{ secrets.BOT_TOKEN }} + personal_token: ${{ env.DOTTY_WEBSITE_BOT_TOKEN }} publish_dir: docs/_site external_repository: lampepfl/dotty-website publish_branch: gh-pages + publish_release: runs-on: [self-hosted, Linux] container: @@ -507,8 +563,7 @@ jobs: - ${{ github.workspace }}/../../cache/general:/root/.cache needs: [test_non_bootstrapped, test, community_build_a, community_build_b, community_build_c, test_sbt, test_java8] if: "github.event_name == 'push' - && startsWith(github.event.ref, 'refs/tags/') - && !startsWith(github.event.ref, 'refs/tags/sbt-dotty-')" + && startsWith(github.event.ref, 'refs/tags/')" env: RELEASEBUILD: yes @@ -582,94 +637,6 @@ jobs: asset_name: sha256sum.txt asset_content_type: text/plain - release_documentation: - runs-on: [self-hosted, Linux] - container: - image: lampepfl/dotty:2021-03-22 - options: --cpu-shares 4096 - volumes: - - ${{ github.workspace }}/../../cache/sbt:/root/.sbt - - ${{ github.workspace }}/../../cache/ivy:/root/.ivy2/cache - - ${{ github.workspace }}/../../cache/general:/root/.cache - needs: [publish_release] - if: "github.event_name == 'push' - && startsWith(github.event.ref, 'refs/tags/') - && !startsWith(github.event.ref, 'refs/tags/sbt-dotty-')" - - env: - RELEASEBUILD: yes - BOT_TOKEN: ${{ secrets.BOT_TOKEN }} # If you need to change this: - # Generate one at https://github.com/settings/tokens - # Make sure you have the write permissions to the repo: https://github.com/lampepfl/dotty-website - - steps: - - name: Reset existing repo - run: git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true - - - name: Checkout cleanup script - uses: actions/checkout@v2 - - - name: Cleanup - run: .github/workflows/cleanup.sh - - - name: Git Checkout - uses: actions/checkout@v2 - - - name: Add SBT proxy repositories - run: cp -vf .github/workflows/repositories /root/.sbt/ ; true - - - name: Generate Website - run: | - ./project/scripts/genDocs -doc-snapshot - - - name: Deploy Website - uses: peaceiris/actions-gh-pages@v3 - with: - personal_token: ${{ secrets.BOT_TOKEN }} - publish_dir: docs/_site - external_repository: lampepfl/dotty-website - publish_branch: gh-pages - - publish_sbt_release: - runs-on: [self-hosted, Linux] - container: - image: lampepfl/dotty:2021-03-22 - options: --cpu-shares 4096 - volumes: - - ${{ github.workspace }}/../../cache/sbt:/root/.sbt - - ${{ github.workspace }}/../../cache/ivy:/root/.ivy2/cache - - ${{ github.workspace }}/../../cache/general:/root/.cache - needs: [community_build_a, community_build_b, community_build_c, test_sbt] - if: "github.event_name == 'push' - && startsWith(github.event.ref, 'refs/tags/sbt-dotty-')" - - env: - RELEASEBUILD: yes - PGP_PW: ${{ secrets.PGP_PW }} # PGP passphrase - PGP_SECRET: ${{ secrets.PGP_SECRET }} # Export your private and public PGP key to an *.asc file, take the file's contents as a string - SONATYPE_PW: ${{ secrets.SONATYPE_PW }} - SONATYPE_USER: ${{ secrets.SONATYPE_USER }} - - steps: - - name: Reset existing repo - run: git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true - - - name: Checkout cleanup script - uses: actions/checkout@v2 - - - name: Cleanup - run: .github/workflows/cleanup.sh - - - name: Git Checkout - uses: actions/checkout@v2 - - - name: Add SBT proxy repositories - run: cp -vf .github/workflows/repositories /root/.sbt/ ; true - - - name: Publish Dotty SBT Plugin Release - run: | - ./project/scripts/sbtPublish ";project sbt-dotty ;publishSigned ;sonatypeBundleRelease" - open_issue_on_failure: runs-on: [self-hosted, Linux] container: diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml index 1547a0569f70..53800df7bb00 100644 --- a/.github/workflows/cla.yml +++ b/.github/workflows/cla.yml @@ -6,3 +6,5 @@ jobs: steps: - uses: actions/checkout@v2 - run: ./project/scripts/check-cla.sh + env: + AUTHOR: ${{ github.event.pull_request.user.login }} diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml new file mode 100644 index 000000000000..820843778428 --- /dev/null +++ b/.github/workflows/releases.yml @@ -0,0 +1,27 @@ +name: Releases +on: + workflow_dispatch: + +jobs: + publish_release: + runs-on: [self-hosted, Linux] + container: + image: lampepfl/dotty:2021-03-22 + options: --cpu-shares 4096 + + env: + SDKMAN_KEY: ${{ secrets.SDKMAN_KEY }} + SDKMAN_TOKEN: ${{ secrets.SDKMAN_TOKEN }} + + steps: + - name: Reset existing repo + run: git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true + + - name: Cleanup + run: .github/workflows/cleanup.sh + + - name: Git Checkout + uses: actions/checkout@v2 + + - name: Publish to SDKMAN + run: .github/workflows/scripts/publish-sdkman.sh diff --git a/.github/workflows/scaladoc.yaml b/.github/workflows/scaladoc.yaml index 269950f68962..50c8009f3e8e 100644 --- a/.github/workflows/scaladoc.yaml +++ b/.github/workflows/scaladoc.yaml @@ -13,7 +13,7 @@ jobs: && !contains(github.event.pull_request.body, '[skip docs]') ) || contains(github.event.ref, 'scaladoc') - || contains(github.event.ref, 'master')" + || contains(github.event.ref, 'main')" steps: - name: Git Checkout @@ -36,10 +36,12 @@ jobs: java-version: 11 - name: Compile and test scala3doc-js - run: ./project/scripts/sbt scaladoc-js/test + run: ./project/scripts/sbt scaladoc-js-main/test - name: Compile and test - run: ./project/scripts/sbt scaladoc/test + run: | + ./project/scripts/sbt scaladoc/test + ./project/scripts/cmdScaladocTests - name: Locally publish self run: ./project/scripts/sbt scaladoc/publishLocal @@ -54,7 +56,7 @@ jobs: run: ./project/scripts/sbt scaladoc/generateScalaDocumentation - name: Generate documentation for example project using dotty-sbt - run: ./project/scripts/sbt "sbt-dotty/scripted sbt-dotty/scaladoc" + run: ./project/scripts/sbt "sbt-test/scripted sbt-dotty/scaladoc" - name: Generate index file run: scaladoc/scripts/mk-index.sh scaladoc/output > scaladoc/output/index.html @@ -71,17 +73,16 @@ jobs: az storage container create --name $DOC_DEST --account-name scala3docstorage --public-access container az storage blob upload-batch -s scaladoc/output -d $DOC_DEST --account-name scala3docstorage - community-docs: - env: - AZURE_STORAGE_SAS_TOKEN: ${{ secrets.AZURE_STORAGE_SAS_TOKEN }} + stdlib-sourcelinks-test: runs-on: ubuntu-latest - if: "( github.event_name == 'pull_request' + # if false - disable flaky test + if: "false && (( github.event_name == 'pull_request' && !contains(github.event.pull_request.body, '[skip ci]') && !contains(github.event.pull_request.body, '[skip docs]') ) || contains(github.event.ref, 'scaladoc') || contains(github.event.ref, 'scala3doc') - || contains(github.event.ref, 'master')" + || contains(github.event.ref, 'main'))" steps: - name: Git Checkout @@ -92,20 +93,5 @@ jobs: with: java-version: 8 - - name: Init submodules - run: git submodule update --init --recursive --jobs 7 - - - name: Generate docs - run: ./project/scripts/sbt "community-build/run doc all docsOutput" - - - name: Upload documentation to server - uses: azure/CLI@v1 - if: env.AZURE_STORAGE_SAS_TOKEN - env: - PR_NUMBER: ${{ github.event.pull_request.number }} - with: - inlineScript: | - DOC_DEST=pr-${PR_NUMBER:-${GITHUB_REF##*/}}-docs - echo uplading docs to https://scala3doc.virtuslab.com/$DOC_DEST - az storage container create --name $DOC_DEST --account-name scala3docstorage --public-access container - az storage blob upload-batch -s community-build/docsOutput -d $DOC_DEST --account-name scala3docstorage + - name: Test sourcelinks to stdlib + run: true # ./project/scripts/sbt scaladoc/sourceLinksIntegrationTest:test diff --git a/.github/workflows/scripts/publish-sdkman.sh b/.github/workflows/scripts/publish-sdkman.sh new file mode 100755 index 000000000000..07d35a72a65e --- /dev/null +++ b/.github/workflows/scripts/publish-sdkman.sh @@ -0,0 +1,50 @@ +#!/usr/bin/env bash + +# This is script for publishing scala on SDKMAN. +# Script resolves the latest stable version of scala and then send REST request to SDKMAN Vendor API. +# It's releasing and announcing the release of scala on SDKMAN. +# +# Requirement: +# - the latest stable version of scala should be available in github artifacts + +set -u + +# latest stable dotty version +DOTTY_VERSION=$(curl -s https://api.github.com/repos/lampepfl/dotty/releases/latest | grep '"tag_name":' | sed -E 's/.*"([^"]+)".*/\1/') +DOTTY_URL="https://github.com/lampepfl/dotty/releases/download/$DOTTY_VERSION/scala3-$DOTTY_VERSION.zip" + +# checking if dotty version is available +if ! curl --output /dev/null --silent --head --fail "$DOTTY_URL"; then + echo "URL doesn't exist: $DOTTY_URL" + exit 1 +fi + +# Release a new Candidate Version +curl --silent --show-error --fail \ + -X POST \ + -H "Consumer-Key: $SDKMAN_KEY" \ + -H "Consumer-Token: $SDKMAN_TOKEN" \ + -H "Content-Type: application/json" \ + -H "Accept: application/json" \ + -d '{"candidate": "scala", "version": "'"$DOTTY_VERSION"'", "url": "'"$DOTTY_URL"'"}' \ + https://vendors.sdkman.io/release + +if [[ $? -ne 0 ]]; then + echo "Fail sending POST request to releasing scala on SDKMAN." + exit 1 +fi + +# Set DOTTY_VERSION as Default for Candidate +curl --silent --show-error --fail \ + -X PUT \ + -H "Consumer-Key: $SDKMAN_KEY" \ + -H "Consumer-Token: $SDKMAN_TOKEN" \ + -H "Content-Type: application/json" \ + -H "Accept: application/json" \ + -d '{"candidate": "scala", "version": "'"$DOTTY_VERSION"'"}' \ + https://vendors.sdkman.io/default + +if [[ $? -ne 0 ]]; then + echo "Fail sending PUT request to announcing the release of scala on SDKMAN." + exit 1 +fi diff --git a/.gitignore b/.gitignore index 0ba8e15fd3f3..e182b7114a07 100644 --- a/.gitignore +++ b/.gitignore @@ -58,6 +58,9 @@ tests/partest-generated/ tests/locks/ /test-classes/ +# Benchmarks +bench/tests-generated + # Ignore output files but keep the directory out/ build/ @@ -96,3 +99,13 @@ community-build/dotty-community-build-deps # Bloop .bsp + +# Coursier +cs + +# Coursier test product +compiler/test-coursier/run/*.jar + +# docs related +contributors.js +content-contributors.css diff --git a/.gitmodules b/.gitmodules index d922c875223a..f93c2c9a9857 100644 --- a/.gitmodules +++ b/.gitmodules @@ -100,10 +100,6 @@ [submodule "community-build/community-projects/requests-scala"] path = community-build/community-projects/requests-scala url = https://github.com/dotty-staging/requests-scala.git -[submodule "community-build/community-projects/cats-effect-2"] - path = community-build/community-projects/cats-effect-2 - url = https://github.com/dotty-staging/cats-effect.git - branch = series/2.x [submodule "community-build/community-projects/cats-effect-3"] path = community-build/community-projects/cats-effect-3 url = https://github.com/dotty-staging/cats-effect.git @@ -184,6 +180,9 @@ [submodule "community-build/community-projects/protoquill"] path = community-build/community-projects/protoquill url = https://github.com/dotty-staging/protoquill.git +[submodule "community-build/community-projects/Monocle"] + path = community-build/community-projects/Monocle + url = https://github.com/dotty-staging/Monocle.git [submodule "community-build/community-projects/onnx-scala"] path = community-build/community-projects/onnx-scala url = https://github.com/dotty-staging/onnx-scala.git @@ -193,3 +192,61 @@ [submodule "community-build/community-projects/play-json"] path = community-build/community-projects/play-json url = https://github.com/dotty-staging/play-json.git +[submodule "community-build/community-projects/scalatestplus-testng"] + path = community-build/community-projects/scalatestplus-testng + url = https://github.com/dotty-staging/scalatestplus-testng.git +[submodule "community-build/community-projects/munit-cats-effect"] + path = community-build/community-projects/munit-cats-effect + url = https://github.com/dotty-staging/munit-cats-effect.git +[submodule "community-build/community-projects/scalacheck-effect"] + path = community-build/community-projects/scalacheck-effect + url = https://github.com/dotty-staging/scalacheck-effect.git +[submodule "community-build/community-projects/fs2"] + path = community-build/community-projects/fs2 + url = https://github.com/dotty-staging/fs2.git +[submodule "community-build/community-projects/libretto"] + path = community-build/community-projects/libretto + url = https://github.com/dotty-staging/libretto.git +[submodule "community-build/community-projects/jackson-module-scala"] + path = community-build/community-projects/jackson-module-scala + url = https://github.com/dotty-staging/jackson-module-scala.git +[submodule "community-build/community-projects/scala-java8-compat"] + path = community-build/community-projects/scala-java8-compat + url = https://github.com/dotty-staging/scala-java8-compat.git +[submodule "community-build/community-projects/specs2"] + path = community-build/community-projects/specs2 + url = https://github.com/dotty-staging/specs2.git +[submodule "community-build/community-projects/spire"] + path = community-build/community-projects/spire + url = https://github.com/dotty-staging/spire.git +[submodule "community-build/community-projects/munit-forward-compat"] + path = community-build/community-projects/munit-forward-compat + url = https://github.com/dotty-staging/munit.git +[submodule "community-build/community-projects/discipline-forward-compat"] + path = community-build/community-projects/discipline-forward-compat + url = https://github.com/dotty-staging/discipline.git +[submodule "community-build/community-projects/discipline-munit-forward-compat"] + path = community-build/community-projects/discipline-munit-forward-compat + url = https://github.com/dotty-staging/discipline-munit.git +[submodule "community-build/community-projects/discipline-specs2-forward-compat"] + path = community-build/community-projects/discipline-specs2-forward-compat + url = https://github.com/dotty-staging/discipline-specs2.git +[submodule "community-build/community-projects/simulacrum-scalafix-forward-compat"] + path = community-build/community-projects/simulacrum-scalafix-forward-compat + url = https://github.com/dotty-staging/simulacrum-scalafix.git +[submodule "community-build/community-projects/cats-forward-compat"] + path = community-build/community-projects/cats-forward-compat + url = https://github.com/dotty-staging/cats.git +[submodule "community-build/community-projects/cats-mtl-forward-compat"] + path = community-build/community-projects/cats-mtl-forward-compat + url = https://github.com/dotty-staging/cats-mtl.git +[submodule "community-build/community-projects/coop-forward-compat"] + path = community-build/community-projects/coop-forward-compat + url = https://github.com/dotty-staging/coop.git +[submodule "community-build/community-projects/cats-effect-3-forward-compat"] + path = community-build/community-projects/cats-effect-3-forward-compat + url = https://github.com/dotty-staging/cats-effect.git + branch = series/3.x +[submodule "community-build/community-projects/scalacheck-forward-compat"] + path = community-build/community-projects/scalacheck-forward-compat + url = https://github.com/dotty-staging/scalacheck diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 3b622e9f57c8..19af50ebd49f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -6,7 +6,7 @@ These guidelines are meant to be a living document that should be changed and ad This is the process for committing code to the Scala project. There are of course exceptions to these rules, for example minor changes to comments and documentation, fixing a broken build etc. -1. Make sure you have signed the [Scala CLA](http://typesafe.com/contribute/cla/scala), if not, sign it. +1. Make sure you have signed the [Scala CLA](https://www.lightbend.com/contribute/cla/scala), if not, sign it. 2. Before starting to work on a feature or a fix, it's good practice to ensure that: 1. There is a ticket for your work in the project's [issue tracker](https://github.com/lampepfl/dotty/issues); 2. The ticket has been discussed and prioritized by the team. diff --git a/MAINTENANCE.md b/MAINTENANCE.md new file mode 100644 index 000000000000..cb98344e1ea0 --- /dev/null +++ b/MAINTENANCE.md @@ -0,0 +1,76 @@ +# Issue Tsar Role +This document formally defines the Issue Tsar role. This is a repository maintenance role that is assigned to core contributors on rotating basis. + +## Responsibilities +Issue Tsar is responsible for: +- Health of the CI, nightly releases and benchmark infrastructure. +- PRs of external contributors: assigning someone to review, or handling themselves. +- Triaging issues (especially new): + - Each issue needs to be assigned an `itype` and 1 or more `area` labels. + - Where applicable, new issues need to be designated for the Spree or Semester projects with the corresponding labels. + - Regressions from an earlier Scala 3 release must be classified with the “regression” label and assigned to the next release’s milestone. + - Modifying issue labels to best capture information about the issues + - Attempting to reproduce the issue (or label “stat:cannot reproduce”) + - Further minimizing the issue or asking the reporter of the issue to minimize it correctly (or label “stat:needs minimization”) + +Other core teammates are responsible for providing information to the Issue Tsar in a timely manner when it is requested if they have that information. + +## Assignment +Issue Tsar is appointed for 7 days and is responsible for what is specified in the “Responsibilities” section during those 7 days. Their assumption of the role starts from the Dotty Meeting on Monday and ends on the next Dotty Meeting on Monday. + +During the Dotty Meeting, an Issue Tsar is assigned for the current week and for the week after that. + +Issue Tsardom schedule is maintained in the [Issue Tsardom Statistics spreadsheet](https://docs.google.com/spreadsheets/d/19IAqNzHfJ9rsii3EsjIGwPz5BLTFJs_byGM3FprmX3E/edit?usp=sharing). So, someone who knows their availability several weeks ahead into the future can assign themselves to be an issue tsar well ahead of time. + +## Prerequisites +An issue tsar needs to have all the accesses and privileges required to get their job done. This might include: +- Admin rights in lampepfl/dotty repository +- Admin rights in lampepfl/dotty-feature-requests repository +- Permissions to create new repositories in lampepfl organization (needed to fork repositories for the community build) +- Access to the LAMP slack to be able to ask for help with the infrastructure, triaging and such + +## Procedures +To ensure proper health of the infrastructure, the Tsar regularly monitors its proper operation. If a malfunction is detected, the Tsar's job is to ensure that someone is working on it (or solve it on their own). + +If it is unclear what area an issue belongs to, the Tsar asks for advice from other team members on Slack or GitHub. If, after asking for advice, it turns out that nobody in the team knows how to classify it, the issue must be classified with a “stat:needs triage” label. + +If it is unclear who should review an external PR, the Tsar asks for advice from the rest of the core team. If after asking for advice, it is still unclear who should do it, the reviewer for such a PR will be decided at the next Dotty meeting. + +In general, if anything else is unclear for proper fulfillment of responsibilities, the Tsar must proactively seek advice from other team members on Slack or other channels. + +## Reporting +At the end of their Tsardom, the Tsar reports to the team during the Dotty meeting on the following points: + +- Whether there were any incidents with the CI, nightlies and benchmarks, how they were resolved and what steps were taken to prevent them from happening in the future. +- How many new external contributors’ PRs were there and what they were about (in brief). +- How many new issues were opened during their Tsardom period? Were there any areas that got a lot of issues? How many regressions from a prior Scala 3 release were there? Which were designated for an MSc project or an Issue Spree? +- If new labels were created or old ones were removed, or there is any other feedback on how to improve the Tsardom, mention that. +- Unassigned PRs and issues that the team failed to classify: bring them one by one so that the team can make a decision on them. + +# Maintenance List +The following is the list of all the principal areas of the compiler and the core teams members who are responsible for their maintenance: + +- Typer: @odersky, @smarter, (@dwijnand) +- Erasure: @smarter, @odersky +- Enums: @bishabosha +- Export: @bishabosha, @odersky +- Pattern Matching: @dwijnand, (@liufengyun), @sjrd +- Inline: @nicolasstucki, @odersky +- Metaprogramming (Quotes, Reflect, Staging): @nicolasstucki, @aherlihy +- Match types: @OlivierBlanvillain, @dwijnand +- GADT: @abgruszecki, @dwijnand +- Scaladoc: @KacperFKorban, @BarkingBad, @pikinier20 +- Initialization checker: @olhotak, @liufengyun, @anatoliykmetyuk +- Safe nulls: @noti0na1, @olhotak +- tailrec: @sjrd, @mbovel +- JS backend: @sjrd +- forward compat (-scala-release): @prolativ, @Kordyjan, (@nicolasstucki) +- Benchmarks: @anatoliykmetyuk, @mbovel +- REPL: @dwijnand, @anatoliykmetyuk, @prolativ +- CI: @anatoliykmetyuk +- Community Build: @anatoliykmetyuk +- Vulpix: @dwijnand, @prolativ +- JVM backend: @Kordyjan, (@sjrd) +- Derivation & Mirrors: @bishabosha, (@dwijnand) +- Linting (especially unused warnings) / Reporting UX : VirtusLab TBD? +- Java-compat: @Kordyjan diff --git a/NOTICE.md b/NOTICE.md index b116155369a8..64b5f9122db7 100644 --- a/NOTICE.md +++ b/NOTICE.md @@ -82,9 +82,9 @@ major authors were omitted by oversight. modifications. They were originally authored by Lex Spoon, Som Snytt, Adriaan Moors, Paul Phillips and others. - * dotty.tools.dottydoc: The Dottydoc documentation utility ships some + * dotty.tools.scaladoc: The Scaladoc documentation utility ships some third-party JavaScript and CSS libraries which are located under - dotty-doc/resources/css/, dotty-doc/resources/js/, docs/css/ and + scaladoc/resources/dotty_res/styles/, scaladoc/resources/dotty_res/scripts/, docs/css/ and docs/js/. Please refer to the license header of the concerned files for details. diff --git a/README.md b/README.md index 2c566d863c7b..fa564654fb38 100644 --- a/README.md +++ b/README.md @@ -1,19 +1,24 @@ Dotty ===== -[![Dotty CI](https://github.com/lampepfl/dotty/workflows/Dotty/badge.svg?branch=master)](https://github.com/lampepfl/dotty/actions?query=branch%3Amaster) -[![Join the chat at https://gitter.im/lampepfl/dotty](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/lampepfl/dotty?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) +[![Dotty CI](https://github.com/lampepfl/dotty/workflows/Dotty/badge.svg?branch=master)](https://github.com/lampepfl/dotty/actions?query=branch%3Amain) +[![Join the chat at https://discord.com/invite/scala](https://img.shields.io/discord/632150470000902164)](https://discord.com/invite/scala) -* [Homepage](http://dotty.epfl.ch) +* [Homepage](https://dotty.epfl.ch) * [Documentation](https://dotty.epfl.ch/docs) Try it out ========== To try it in your project see also the [Getting Started User Guide](https://dotty.epfl.ch/#getting-started). +Building a Local Distribution +============================= +1. `sbt dist/packArchive` +2. Find the newly-built distributions in `dist/target/` + Code of Conduct =============== Dotty uses the [Scala Code of Conduct](https://www.scala-lang.org/conduct.html) -for all communication and discussion. This includes both GitHub, Gitter chat and +for all communication and discussion. This includes both GitHub, Discord and other more direct lines of communication such as email. How to Contribute diff --git a/bench/profiles/ci.yml b/bench/profiles/ci.yml new file mode 100644 index 000000000000..0571d509a286 --- /dev/null +++ b/bench/profiles/ci.yml @@ -0,0 +1,61 @@ +scripts: + + re2s: + - measure 1 1 1 $(find $PROG_HOME/tests/re2s/src -name *.scala) + + implicit-cache: + - measure 1 1 1 $PROG_HOME/dotty/tests/bench/implicit_cache.scala + + implicit-cache-from-tasty: + - source $PROG_HOME/dotty/bench/scripts/implicit-cache-from-tasty + + implicit-scope-loop: + - measure 1 1 1 $PROG_HOME/dotty/tests/bench/implicit-scope-loop.scala + + i1535: + - measure 1 1 1 $PROG_HOME/dotty/tests/bench/i1535.scala + + i1687: + - measure 1 1 1 $PROG_HOME/dotty/tests/bench/i1687.scala + + empty-class: + - measure 1 1 1 $PROG_HOME/dotty/tests/bench/empty-class.scala + + empty-object: + - measure 1 1 1 $PROG_HOME/dotty/tests/bench/empty-object.scala + + empty-file: + - measure 1 1 1 $PROG_HOME/dotty/tests/bench/empty-file.scala + + patmatexhaust: + - measure 1 1 1 $PROG_HOME/dotty/tests/bench/patmatexhaust.scala + + exhaustivity-I: + - measure 1 1 1 $PROG_HOME/dotty/tests/bench/exhaustivity-I.scala + + exhaustivity-S: + - measure 1 1 1 $PROG_HOME/dotty/tests/bench/exhaustivity-S.scala + + exhaustivity-T: + - measure 1 1 1 $PROG_HOME/dotty/tests/bench/exhaustivity-T.scala + + exhaustivity-V: + - measure 1 1 1 $PROG_HOME/dotty/tests/bench/exhaustivity-V.scala + + implicitNums: + - measure 1 1 1 $PROG_HOME/dotty/tests/bench/implicitNums.scala + + implicitNums-from-tasty: + - source $PROG_HOME/dotty/bench/scripts/implicitNums-from-tasty + + inductive-implicits: + - measure 1 1 1 $PROG_HOME/dotty/tests/bench/inductive-implicits.scala + + scalap: + - source $PROG_HOME/dotty/bench/scripts/scalap + + power-macro-power-inlined-1: + - source $PROG_HOME/dotty/bench/scripts/power-macro-1 + + power-macro-power-inlined-1k: + - source $PROG_HOME/dotty/bench/scripts/power-macro-1k diff --git a/bench/profiles/compiletime.yml b/bench/profiles/compiletime.yml new file mode 100644 index 000000000000..fd77df7dfe9a --- /dev/null +++ b/bench/profiles/compiletime.yml @@ -0,0 +1,51 @@ +charts: + + - name: "Compile-time sums of constant integer types (generated)" + url: https://github.com/lampepfl/dotty/blob/main/bench/src/main/scala/generateBenchmarks.scala + lines: + - key: compiletime-sum-constants + label: bootstrapped + + - name: "Compile-time sums of term reference types (generated)" + url: https://github.com/lampepfl/dotty/blob/main/bench/src/main/scala/generateBenchmarks.scala + lines: + - key: compiletime-sum-termrefs + label: bootstrapped + + - name: "Sums of term references, result type inferred (generated)" + url: https://github.com/lampepfl/dotty/blob/main/bench/src/main/scala/generateBenchmarks.scala + lines: + - key: compiletime-sum-termrefs-terms + label: bootstrapped + + - name: "Compile-time sums of type applications (generated)" + url: https://github.com/lampepfl/dotty/blob/main/bench/src/main/scala/generateBenchmarks.scala + lines: + - key: compiletime-sum-applications + label: bootstrapped + + - name: "Compile-time additions inside multiplications (generated)" + url: https://github.com/lampepfl/dotty/blob/main/bench/src/main/scala/generateBenchmarks.scala + lines: + - key: compiletime-distribute + label: bootstrapped + +scripts: + + compiletime-sum-constants: + - measure 6 6 7 1 $PROG_HOME/dotty/bench/tests-generated/compiletime-ops/sum-constants.scala + + compiletime-sum-termrefs: + - measure 6 6 7 1 $PROG_HOME/dotty/bench/tests-generated/compiletime-ops/sum-termrefs.scala + + compiletime-sum-termrefs-terms: + - measure 6 6 7 1 $PROG_HOME/dotty/bench/tests-generated/compiletime-ops/sum-termrefs-terms.scala + + compiletime-sum-applications: + - measure 6 6 7 1 $PROG_HOME/dotty/bench/tests-generated/compiletime-ops/sum-applications.scala + + compiletime-distribute: + - measure 6 6 7 1 $PROG_HOME/dotty/bench/tests-generated/compiletime-ops/distribute.scala + +config: + pr_base_url: "https://github.com/lampepfl/dotty/pull/" diff --git a/bench/profiles/default.yml b/bench/profiles/default.yml new file mode 100644 index 000000000000..22ed6d5f31df --- /dev/null +++ b/bench/profiles/default.yml @@ -0,0 +1,14 @@ +includes: + - projects.yml + - implicits.yml + - typing.yml + - exhaustivity.yml + - misc.yml + - empty.yml + - quotes.yml + - tuples.yml + - compiletime.yml + + +config: + pr_base_url: "https://github.com/lampepfl/dotty/pull/" diff --git a/bench/profiles/empty.yml b/bench/profiles/empty.yml new file mode 100644 index 000000000000..ac571e64e831 --- /dev/null +++ b/bench/profiles/empty.yml @@ -0,0 +1,33 @@ +charts: + + - name: "empty class" + url: https://github.com/lampepfl/dotty/blob/main/tests/bench/empty-class.scala + lines: + - key: empty-class + label: bootstrapped + + - name: "empty object" + url: https://github.com/lampepfl/dotty/blob/main/tests/bench/empty-object.scala + lines: + - key: empty-object + label: bootstrapped + + - name: "empty file" + url: https://github.com/lampepfl/dotty/blob/main/tests/bench/empty-file.scala + lines: + - key: empty-file + label: bootstrapped + +scripts: + + empty-class: + - measure $PROG_HOME/dotty/tests/bench/empty-class.scala + + empty-object: + - measure $PROG_HOME/dotty/tests/bench/empty-object.scala + + empty-file: + - measure $PROG_HOME/dotty/tests/bench/empty-file.scala + +config: + pr_base_url: "https://github.com/lampepfl/dotty/pull/" diff --git a/bench/profiles/exhaustivity.yml b/bench/profiles/exhaustivity.yml new file mode 100644 index 000000000000..af6eb4041f6c --- /dev/null +++ b/bench/profiles/exhaustivity.yml @@ -0,0 +1,86 @@ +charts: + - name: "exhaustivity check" + url: https://github.com/lampepfl/dotty/blob/main/tests/bench/patmatexhaust.scala + lines: + - key: patmatexhaust + label: bootstrapped + + - name: "exhaustivity I" + url: https://github.com/lampepfl/dotty/blob/main/tests/bench/exhaustivity-I.scala + lines: + - key: exhaustivity-I + label: bootstrapped + + - name: "exhaustivity S" + url: https://github.com/lampepfl/dotty/blob/main/tests/bench/exhaustivity-S.scala + lines: + - key: exhaustivity-S + label: bootstrapped + + - name: "exhaustivity T" + url: https://github.com/lampepfl/dotty/blob/main/tests/bench/exhaustivity-T.scala + lines: + - key: exhaustivity-T + label: bootstrapped + + - name: "exhaustivity V" + url: https://github.com/lampepfl/dotty/blob/main/tests/bench/exhaustivity-V.scala + lines: + - key: exhaustivity-V + label: bootstrapped + + - name: "exhaustivity MIPS" + url: https://github.com/lampepfl/dotty/blob/main/tests/patmat/i7186.scala + lines: + - key: exhaustivity-mips + label: bootstrapped + + - name: "exhaustivity i12241" + url: https://github.com/lampepfl/dotty/blob/main/tests/patmat/i12241.scala + lines: + - key: exhaustivity-i12241 + label: bootstrapped + + - name: "exhaustivity i12358" + url: https://github.com/lampepfl/dotty/blob/main/tests/patmat/i12358.scala + lines: + - key: exhaustivity-i12358 + label: bootstrapped + + - name: "exhaustivity i13565" + url: https://github.com/lampepfl/dotty/blob/main/tests/pos/i13565.scala + lines: + - key: exhaustivity-i13565 + label: bootstrapped + +scripts: + + patmatexhaust: + - measure 20 40 3 $PROG_HOME/dotty/tests/bench/patmatexhaust.scala + + exhaustivity-I: + - measure 20 40 3 $PROG_HOME/dotty/tests/bench/exhaustivity-I.scala + + exhaustivity-S: + - measure 20 40 3 $PROG_HOME/dotty/tests/bench/exhaustivity-S.scala + + exhaustivity-T: + - measure 20 40 3 $PROG_HOME/dotty/tests/bench/exhaustivity-T.scala + + exhaustivity-V: + - measure 20 40 3 $PROG_HOME/dotty/tests/bench/exhaustivity-V.scala + + exhaustivity-mips: + - measure 20 40 3 $PROG_HOME/dotty/tests/patmat/i7186.scala + + exhaustivity-i12241: + - measure 20 40 3 $PROG_HOME/dotty/tests/patmat/i12241.scala + + exhaustivity-i12358: + - measure 20 40 3 $PROG_HOME/dotty/tests/patmat/i12358.scala + + exhaustivity-i13565: + - measure 20 40 3 $PROG_HOME/dotty/tests/pos/i13565.scala + +config: + pr_base_url: "https://github.com/lampepfl/dotty/pull/" diff --git a/bench/profiles/implicits.yml b/bench/profiles/implicits.yml new file mode 100644 index 000000000000..3e944b5be28b --- /dev/null +++ b/bench/profiles/implicits.yml @@ -0,0 +1,51 @@ +charts: + - name: "implicit cache I" + url: https://github.com/lampepfl/dotty/blob/main/tests/bench/implicit_cache.scala + lines: + - key: implicit-cache + label: bootstrapped + - key: implicit-cache-from-tasty + label: from tasty + + - name: "implicit cache II" + url: https://github.com/lampepfl/dotty/blob/main/tests/bench/implicitNums.scala + lines: + - key: implicitNums + label: bootstrapped + - key: implicitNums-from-tasty + label: from tasty + + - name: "implicit scope loop" + url: https://github.com/lampepfl/dotty/blob/main/tests/bench/implicit-scope-loop.scala + lines: + - key: implicit-scope-loop + label: bootstrapped + + - name: "inductive implicits" + url: https://github.com/lampepfl/dotty/blob/main/tests/bench/inductive-implicits.scala + lines: + - key: inductive-implicits + label: bootstrapped + +scripts: + + implicit-cache: + - measure $PROG_HOME/dotty/tests/bench/implicit_cache.scala + + implicit-cache-from-tasty: + - source $PROG_HOME/dotty/bench/scripts/implicit-cache-from-tasty + + implicit-scope-loop: + - measure $PROG_HOME/dotty/tests/bench/implicit-scope-loop.scala + + inductive-implicits: + - measure $PROG_HOME/dotty/tests/bench/inductive-implicits.scala + + implicitNums: + - measure $PROG_HOME/dotty/tests/bench/implicitNums.scala + + implicitNums-from-tasty: + - source $PROG_HOME/dotty/bench/scripts/implicitNums-from-tasty + +config: + pr_base_url: "https://github.com/lampepfl/dotty/pull/" diff --git a/bench/profiles/misc.yml b/bench/profiles/misc.yml new file mode 100644 index 000000000000..668f8e60c176 --- /dev/null +++ b/bench/profiles/misc.yml @@ -0,0 +1,21 @@ +charts: + + - name: "issue #1535" + url: https://github.com/lampepfl/dotty/blob/main/tests/bench/i1535.scala + lines: + - key: i1535 + label: bootstrapped + + - name: "issue #1687" + url: https://github.com/lampepfl/dotty/blob/main/tests/bench/i1687.scala + lines: + - key: i1687 + label: bootstrapped + +scripts: + + i1535: + - measure $PROG_HOME/dotty/tests/bench/i1535.scala + + i1687: + - measure $PROG_HOME/dotty/tests/bench/i1687.scala diff --git a/bench/profiles/projects.yml b/bench/profiles/projects.yml new file mode 100644 index 000000000000..e012d36b0b01 --- /dev/null +++ b/bench/profiles/projects.yml @@ -0,0 +1,44 @@ +charts: + - name: "dotty (source changes over time)" + url: https://github.com/lampepfl/dotty + lines: + - key: dotty + label: bootstrapped + + - name: "scala stdlib-2.13" + url: https://github.com/dotty-staging/scala/commits/stdLib213-dotty-community-build + lines: + - key: stdlib213 + label: bootstrapped + + - name: scalap + url: https://github.com/dotty-staging/scala/commits/scalap-dotty-community-build-2.13 + lines: + - key: scalap + label: bootstrapped + + - name: re2 + url: https://github.com/lampepfl/bench/blob/main/tests/re2s + lines: + - key: re2s + label: bootstrapped + + +scripts: + dotty: + - measure -with-compiler $(find $PROG_HOME/dotty/compiler/src/dotty -name *.scala -o -name *.java) + + re2s: + - measure $(find $PROG_HOME/tests/re2s/src -name *.scala) + + # scalapb: + # - source $PROG_HOME/dotty/bench/scripts/scalapb + + scalap: + - source $PROG_HOME/dotty/bench/scripts/scalap + + stdlib213: + - source $PROG_HOME/dotty/bench/scripts/stdlib213 + +config: + pr_base_url: "https://github.com/lampepfl/dotty/pull/" diff --git a/bench/profiles/pull.yml b/bench/profiles/pull.yml new file mode 100644 index 000000000000..163d75a8769d --- /dev/null +++ b/bench/profiles/pull.yml @@ -0,0 +1,8 @@ +includes: + - projects.yml + - quotes.yml + - misc.yml + - empty.yml + +config: + pr_base_url: "https://github.com/lampepfl/dotty/pull/" diff --git a/bench/profiles/quotes.yml b/bench/profiles/quotes.yml new file mode 100644 index 000000000000..afd970543aa1 --- /dev/null +++ b/bench/profiles/quotes.yml @@ -0,0 +1,32 @@ +charts: + - name: "Inline a quote" + url: https://github.com/lampepfl/dotty/blob/main/tests/bench/power-macro/PowerInlined-1.scala + lines: + - key: power-macro-power-inlined-1 + label: bootstrapped + + - name: "Inline 1k quotes" + url: https://github.com/lampepfl/dotty/blob/main/tests/bench/power-macro/PowerInlined-1k.scala + lines: + - key: power-macro-power-inlined-1k + label: bootstrapped + + - name: "Quote String interpolation matching" + url: https://github.com/lampepfl/dotty/blob/main/tests/bench/string-interpolation-macro/Test.scala + lines: + - key: quote-string-interpolation-matching + label: bootstrapped + +scripts: + + power-macro-power-inlined-1: + - source $PROG_HOME/dotty/bench/scripts/power-macro-1 + + power-macro-power-inlined-1k: + - source $PROG_HOME/dotty/bench/scripts/power-macro-1k + + quote-string-interpolation-matching: + - source $PROG_HOME/dotty/bench/scripts/quote-string-interpolation-matching + +config: + pr_base_url: "https://github.com/lampepfl/dotty/pull/" diff --git a/bench/profiles/sbt.yml b/bench/profiles/sbt.yml new file mode 100644 index 000000000000..3ab0e43f3db2 --- /dev/null +++ b/bench/profiles/sbt.yml @@ -0,0 +1,15 @@ +charts: + - name: "dotty (source changes over time)" + url: https://github.com/lampepfl/dotty + lines: + - key: dotty + label: bootstrapped + - key: dotty-sbt + label: with sbt phases + +scripts: + dotty-sbt: + - measure -with-compiler -Yforce-sbt-phases -with-dotty $(find $PROG_HOME/dotty/compiler/src/dotty -name *.scala -o -name *.java) + +config: + pr_base_url: "https://github.com/lampepfl/dotty/pull/" diff --git a/bench/profiles/tuples.yml b/bench/profiles/tuples.yml new file mode 100644 index 000000000000..5e41ecf7c80d --- /dev/null +++ b/bench/profiles/tuples.yml @@ -0,0 +1,68 @@ +charts: + - name: "Tuple22 creation with Tuple22.apply" + url: https://github.com/lampepfl/dotty/blob/main/tests/bench/tuple22-creation-apply.scala + lines: + - key: tuple22-creation-apply + label: bootstrapped + + - name: "Tuple22 creation with *:" + url: https://github.com/lampepfl/dotty/blob/main/tests/bench/tuple22-creation-cons.scala + lines: + - key: tuple22-creation-cons + label: bootstrapped + + - name: "Tuple22.tail" + url: https://github.com/lampepfl/dotty/blob/main/tests/bench/tuple22-tails.scala + lines: + - key: tuple22-tails + label: bootstrapped + + - name: "Tuple22.apply" + url: https://github.com/lampepfl/dotty/blob/main/tests/bench/tuple22-apply.scala + lines: + - key: tuple22-apply + label: bootstrapped + + - name: "Tuple22.size" + url: https://github.com/lampepfl/dotty/blob/main/tests/bench/tuple22-size.scala + lines: + - key: tuple22-size + label: bootstrapped + + - name: "Tuple reverse (Runtime)" + url: https://github.com/lampepfl/dotty/blob/main/bench-run/src/main/scala/dotty/tools/benchmarks/tuples/TupleOps.scala#L59 + lines: + - key: tuple-reverse + label: bootstrapped + + - name: "Tuple flatMap (Runtime)" + url: https://github.com/lampepfl/dotty/blob/main/bench-run/src/main/scala/dotty/tools/benchmarks/tuples/TupleOps.scala#L64 + lines: + - key: tuple-flatMap + label: bootstrapped + +scripts: + + tuple22-creation-apply: + - measure $PROG_HOME/dotty/tests/bench/tuple22-creation-apply.scala + + tuple22-creation-cons: + - measure $PROG_HOME/dotty/tests/bench/tuple22-creation-cons.scala + + tuple22-tails: + - measure $PROG_HOME/dotty/tests/bench/tuple22-tails.scala + + tuple22-apply: + - measure $PROG_HOME/dotty/tests/bench/tuple22-apply.scala + + tuple22-size: + - measure $PROG_HOME/dotty/tests/bench/tuple22-size.scala + + tuple-reverse: + - measure-run TupleOps.reverse + + tuple-flatMap: + - measure-run TupleOps.flatMap + +config: + pr_base_url: "https://github.com/lampepfl/dotty/pull/" diff --git a/bench/profiles/typing.yml b/bench/profiles/typing.yml new file mode 100644 index 000000000000..f6476bca7006 --- /dev/null +++ b/bench/profiles/typing.yml @@ -0,0 +1,14 @@ +charts: + - name: "Find Ref" + url: https://github.com/lampepfl/dotty/blob/main/tests/bench/FindRef.scala + lines: + - key: find-ref + label: bootstrapped + +scripts: + + find-ref: + - measure $PROG_HOME/dotty/tests/bench/FindRef.scala + +config: + pr_base_url: "https://github.com/lampepfl/dotty/pull/" diff --git a/bench/scripts/implicit-cache-from-tasty b/bench/scripts/implicit-cache-from-tasty new file mode 100755 index 000000000000..2cbd1694c9ac --- /dev/null +++ b/bench/scripts/implicit-cache-from-tasty @@ -0,0 +1,8 @@ +set -e + +out="$PROG_HOME/tests/implicit_cache-out" +mkdir -p "$out" + +compile -d "$out" "$PROG_HOME/dotty/tests/bench/implicit_cache.scala" + +measure -from-tasty "$out/Test.tasty" "$out/A.tasty" "$out/Foo.tasty" diff --git a/bench/scripts/implicitNums-from-tasty b/bench/scripts/implicitNums-from-tasty new file mode 100755 index 000000000000..53c4ea3754da --- /dev/null +++ b/bench/scripts/implicitNums-from-tasty @@ -0,0 +1,8 @@ +set -e + +out="$PROG_HOME/tests/implicitNums-out" +mkdir -p "$out" + +compile -d "$out" "$PROG_HOME/dotty/tests/bench/implicitNums.scala" + +measure -from-tasty "$out/Test.tasty" diff --git a/bench/scripts/power-macro-1 b/bench/scripts/power-macro-1 new file mode 100755 index 000000000000..e6e43f31aa38 --- /dev/null +++ b/bench/scripts/power-macro-1 @@ -0,0 +1,7 @@ +set -e + +mkdir -p "$PROG_HOME/tests/power-macro/out" + +compile -d "$PROG_HOME/tests/power-macro/out" "$PROG_HOME/dotty/tests/bench/power-macro/PowerMacro.scala" + +measure "$PROG_HOME/dotty/tests/bench/power-macro/PowerInlined-1.scala" -classpath "$PROG_HOME/tests/power-macro/out" diff --git a/bench/scripts/power-macro-1k b/bench/scripts/power-macro-1k new file mode 100755 index 000000000000..ec9c4b1dffe8 --- /dev/null +++ b/bench/scripts/power-macro-1k @@ -0,0 +1,7 @@ +set -e + +mkdir -p "$PROG_HOME/tests/power-macro/out" + +compile -d "$PROG_HOME/tests/power-macro/out" "$PROG_HOME/dotty/tests/bench/power-macro/PowerMacro.scala" + +measure "$PROG_HOME/dotty/tests/bench/power-macro/PowerInlined-1k.scala" -classpath "$PROG_HOME/tests/power-macro/out" diff --git a/bench/scripts/quote-string-interpolation-matching b/bench/scripts/quote-string-interpolation-matching new file mode 100755 index 000000000000..003f299db32d --- /dev/null +++ b/bench/scripts/quote-string-interpolation-matching @@ -0,0 +1,7 @@ +set -e + +mkdir -p "$PROG_HOME/tests/string-interpolation-macro/out" + +compile -d "$PROG_HOME/tests/string-interpolation-macro/out" "$PROG_HOME/dotty/tests/bench/string-interpolation-macro/Macro.scala" + +measure "$PROG_HOME/dotty/tests/bench/string-interpolation-macro/Test.scala" -classpath "$PROG_HOME/tests/string-interpolation-macro/out" diff --git a/bench/scripts/scalap b/bench/scripts/scalap new file mode 100644 index 000000000000..f9b25632e8ae --- /dev/null +++ b/bench/scripts/scalap @@ -0,0 +1,5 @@ +set -e + +path=$(cs fetch -p org.scala-lang:scala-compiler:2.13.0) + +measure -source 3.0-migration -classpath "$path" $(find $PROG_HOME/dotty/community-build/community-projects/scalap/src/scalap -name "*.scala") diff --git a/bench/scripts/stdlib213 b/bench/scripts/stdlib213 new file mode 100644 index 000000000000..b70010b15644 --- /dev/null +++ b/bench/scripts/stdlib213 @@ -0,0 +1,7 @@ +set -e + +pattern="! -name AnyVal.scala ! -name language.scala -name *.scala -o -name *.java" +stdlib213=$(find $PROG_HOME/dotty/community-build/community-projects/stdLib213/src/library/scala $pattern) +cd $PROG_HOME/dotty +measure -language:implicitConversions $stdlib213 + diff --git a/bench/src/main/scala/Benchmarks.scala b/bench/src/main/scala/Benchmarks.scala index 6e0bae6e72de..51cd411cc13b 100644 --- a/bench/src/main/scala/Benchmarks.scala +++ b/bench/src/main/scala/Benchmarks.scala @@ -8,6 +8,8 @@ import reporting._ import org.openjdk.jmh.results.RunResult import org.openjdk.jmh.runner.Runner import org.openjdk.jmh.runner.options.OptionsBuilder +import org.openjdk.jmh.runner.options.TimeValue +//import org.openjdk.jmh.results.format.ResultFormatType import org.openjdk.jmh.annotations._ import org.openjdk.jmh.results.format._ import java.util.concurrent.TimeUnit @@ -21,8 +23,11 @@ import dotty.tools.io.AbstractFile object Bench { val COMPILE_OPTS_FILE = "compile.txt" + val GENERATED_BENCHMARKS_DIR = "tests-generated" def main(args: Array[String]): Unit = { + generateBenchmarks(GENERATED_BENCHMARKS_DIR) + if (args.isEmpty) { println("Missing ") return @@ -32,7 +37,7 @@ object Bench { val warmup = if (intArgs.length > 0) intArgs(0).toInt else 30 val iterations = if (intArgs.length > 1) intArgs(1).toInt else 20 val forks = if (intArgs.length > 2) intArgs(2).toInt else 1 - + val measurementTime = if (intArgs.length > 3) intArgs(3).toInt else 1 import File.{ separator => sep } @@ -48,7 +53,13 @@ object Bench { .mode(Mode.AverageTime) .timeUnit(TimeUnit.MILLISECONDS) .warmupIterations(warmup) + .warmupTime(TimeValue.seconds(measurementTime)) .measurementIterations(iterations) + .measurementTime(TimeValue.seconds(measurementTime)) + // To output results to bench/results.json, uncomment the 2 + // following lines and the ResultFormatType import. + //.result("results.json") + //.resultFormat(ResultFormatType.JSON) .forks(forks) .build diff --git a/bench/src/main/scala/generateBenchmarks.scala b/bench/src/main/scala/generateBenchmarks.scala new file mode 100644 index 000000000000..012d81e3f5a2 --- /dev/null +++ b/bench/src/main/scala/generateBenchmarks.scala @@ -0,0 +1,155 @@ +package dotty.tools.benchmarks + +import java.nio.file.{Files, Paths, Path} +import java.util.Random + +/** Generates benchmarks in `genDirName`. + * + * Called automatically by the benchmarks runner ([[Bench.main]]). + */ +def generateBenchmarks(genDirName: String) = + val thisFile = Paths.get("src/main/scala/generateBenchmarks.scala") + val genDir = Paths.get(genDirName) + + def generateBenchmark(subDirName: String, fileName: String, make: () => String) = + val outputDir = genDir.resolve(Paths.get(subDirName)) + Files.createDirectories(outputDir) + val file = outputDir.resolve(Paths.get(fileName)) + if !Files.exists(file) || + Files.getLastModifiedTime(file).toMillis() < + Files.getLastModifiedTime(thisFile).toMillis() then + println(f"Generate benchmark $file") + Files.write(file, make().getBytes()) + + // Big compile-time sums of constant integer types: (1.type + 2.type + …). + // This should ideally have a linear complexity. + generateBenchmark("compiletime-ops", "sum-constants.scala", () => + val innerSum = (1 to 50) // Limited to 50 to avoid stackoverflows in the compiler. + .map(i => f"$i") + .mkString(" + ") + val outerSum = (1 to 50) + .map(_ => f"($innerSum)") + .mkString(" + ") + val vals = (1 to 50) + .map(i => f"val v$i: $outerSum = ???") + .mkString("\n\n ") + + f""" +import scala.compiletime.ops.int.* + +object Test: + val one: 1 = ??? + val n: Int = ??? + val m: Int = ??? + + $vals + """ + ) + + // Big compile-time sums of term reference types: (one.type + m.type + n.type + // + one.type + m.type + n.type + …). This big type is normalized to (8000 + + // 8000 * m.type + 8000 * n.type). + generateBenchmark("compiletime-ops", "sum-termrefs.scala", () => + val innerSum = (1 to 40) + .map(_ => "one.type + m.type + n.type") + .mkString(" + ") + val outerSum = (1 to 20) + .map(_ => f"($innerSum)") + .mkString(" + ") + val vals = (1 to 4) + .map(i => f"val v$i: $outerSum = ???") + .mkString("\n\n ") + + f""" +import scala.compiletime.ops.int.* + +object Test: + val one: 1 = ??? + val n: Int = ??? + val m: Int = ??? + + $vals + """ + ) + + // Big compile-time sums of term references: (n + m + …). The result type is + // inferred. The goal of this benchmark is to measure the performance cost of + // inferring precise types for arithmetic operations. + generateBenchmark("compiletime-ops", "sum-termrefs-terms.scala", () => + val innerSum = (1 to 40) + .map(_ => "one + m + n") + .mkString(" + ") + val outerSum = (1 to 20) + .map(_ => f"($innerSum)") + .mkString(" + ") + val vals = (1 to 4) + .map(i => f"val v$i = $outerSum") + .mkString("\n\n ") + + f""" +import scala.compiletime.ops.int.* + +object Test: + val one: 1 = ??? + val n: Int = ??? + val m: Int = ??? + + $vals + """ + ) + + // Big compile-time product of sums of term references: (one + n + m) * (one + + // n + m) * …. The goal of this benchmark is to measure the performance impact + // of distributing addition over multiplication during compile-time operations + // normalization. + generateBenchmark("compiletime-ops", "distribute.scala", () => + val product = (1 to 18) + .map(_ => "(one.type + m.type + n.type)") + .mkString(" * ") + val vals = (1 to 50) + .map(i => f"val v$i: $product = ???") + .mkString("\n\n ") + + f""" +import scala.compiletime.ops.int.* + +object Test: + val one: 1 = ??? + val n: Int = ??? + val m: Int = ??? + + $vals + """ + ) + + def applicationCount = 14 + def applicationDepth = 10 + def applicationVals = 2 + + // Compile-time sums of big applications: Op[Op[…], Op[…]] + Op[Op[…], Op[…]] + // + …. Applications are deep balanced binary trees only differing in their + // very last (top-right) leafs. These applications are compared pairwise in + // order to sort the terms of the sum. + generateBenchmark("compiletime-ops", "sum-applications.scala", () => + def makeOp(depth: Int, last: Boolean, k: Int): String = + if depth == 0 then f"Op[one.type, ${if last then k.toString else "n.type"}]" + else f"Op[${makeOp(depth - 1, false, k)}, ${makeOp(depth - 1, last, k)}]" + val sum = (applicationCount to 1 by -1) + .map(k => makeOp(applicationDepth, true, k)) + .mkString(" + ") + val vals = (1 to applicationVals) + .map(i => f"val v$i: $sum = ???") + .mkString("\n\n ") + + f""" +import scala.compiletime.ops.int.* + +object Test: + val one: 1 = ??? + val n: Int = ??? + type SInt = Int & Singleton + type Op[A <: SInt, B <: SInt] <:SInt + + $vals + """ + ) diff --git a/bench/tests/compiletime-ops/empty.scala b/bench/tests/compiletime-ops/empty.scala new file mode 100644 index 000000000000..3da2acd61b75 --- /dev/null +++ b/bench/tests/compiletime-ops/empty.scala @@ -0,0 +1,6 @@ +import scala.compiletime.ops.int.* + +object Test: + val one: 1 = ??? + val n: Int = ??? + val m: Int = ??? diff --git a/build.sbt b/build.sbt index f6a2fe9d82bf..3ec2958324be 100644 --- a/build.sbt +++ b/build.sbt @@ -4,14 +4,12 @@ val `scala3-interfaces` = Build.`scala3-interfaces` val `scala3-compiler` = Build.`scala3-compiler` val `scala3-compiler-bootstrapped` = Build.`scala3-compiler-bootstrapped` val `scala3-library` = Build.`scala3-library` -val `scala3-library-js` = Build.`scala3-library-js` val `scala3-library-bootstrapped` = Build.`scala3-library-bootstrapped` val `scala3-library-bootstrappedJS` = Build.`scala3-library-bootstrappedJS` val `scala3-sbt-bridge` = Build.`scala3-sbt-bridge` val `scala3-sbt-bridge-tests` = Build.`scala3-sbt-bridge-tests` val `scala3-staging` = Build.`scala3-staging` val `scala3-tasty-inspector` = Build.`scala3-tasty-inspector` -val `scala3-tasty-inspector-nonbootstrapped` = Build.`scala3-tasty-inspector-nonbootstrapped` val `scala3-language-server` = Build.`scala3-language-server` val `scala3-bench` = Build.`scala3-bench` val `scala3-bench-bootstrapped` = Build.`scala3-bench-bootstrapped` @@ -21,11 +19,10 @@ val `tasty-core` = Build.`tasty-core` val `tasty-core-bootstrapped` = Build.`tasty-core-bootstrapped` val `tasty-core-scala2` = Build.`tasty-core-scala2` val scaladoc = Build.scaladoc -val `scaladoc-nonBootstrapped` = Build.`scaladoc-nonBootstrapped` val `scaladoc-testcases` = Build.`scaladoc-testcases` -val `scaladoc-testcases-nonBootstrapped` = Build.`scaladoc-testcases-nonBootstrapped` -val `scaladoc-js` = Build.`scaladoc-js` -val `scaladoc-js-nonBootstrapped` = Build.`scaladoc-js-nonBootstrapped` +val `scaladoc-js-common` = Build.`scaladoc-js-common` +val `scaladoc-js-main` = Build.`scaladoc-js-main` +val `scaladoc-js-contributors` = Build.`scaladoc-js-contributors` val `scala3-bench-run` = Build.`scala3-bench-run` val dist = Build.dist val `community-build` = Build.`community-build` @@ -35,7 +32,7 @@ val sjsSandbox = Build.sjsSandbox val sjsJUnitTests = Build.sjsJUnitTests val sjsCompilerTests = Build.sjsCompilerTests -val `sbt-dotty` = Build.`sbt-dotty` +val `sbt-test` = Build.`sbt-test` val `vscode-dotty` = Build.`vscode-dotty` inThisBuild(Build.thisBuildSettings) diff --git a/community-build/README.md b/community-build/README.md index a042f13b94ac..26396d372936 100644 --- a/community-build/README.md +++ b/community-build/README.md @@ -1,18 +1,22 @@ -# Dotty Community Build +# Scala 3 Community Build -This project contains tests to build and test a corpus of open sources Scala projects against the latest version of Dotty. +This project contains tests to build and test a corpus of open sources Scala projects against the latest version of Scala 3. + +## Running it locally To run the community build on a local machine, first fetch all the git submodules with `git submodule update --init` and run `sbt community-build/test` from the root of the dotty repo. +To run a single project, you can use the usual syntax for running a single JUnit test, for example `community-build/testOnly -- *shapeless` + ## Adding your project To add your project to the community build you can follow these steps: -1. Get your project to compile with Dotty. Instructions can be found on the [dotty-example-project](https://github.com/lampepfl/dotty-example-project). - See the submodules in [community-projects](https://github.com/lampepfl/dotty/tree/master/community-build/community-projects/) for examples of projects that compile with Dotty. +1. Get your project to compile with Scala 3. Instructions can be found on the [scala3-example-project](https://github.com/lampepfl/scala3-example-project). + See the submodules in [community-projects](https://github.com/lampepfl/dotty/tree/master/community-build/community-projects/) for examples of projects that compile with Scala 3. 2. Open a PR against this repo that: - Adds your project as a new git submodule - - `git submodule add https://github.com/lampepfl/XYZ.git community-build/community-projects/XYZ` + - `git submodule add https://github.com/dotty-staging/XYZ.git community-build/community-projects/XYZ` - Add the project to [projects.scala](https://github.com/lampepfl/dotty/blob/master/community-build/src/scala/dotty/communitybuild/projects.scala) - Adds a test in [CommunityBuildTest.scala](https://github.com/lampepfl/dotty/blob/master/community-build/test/scala/dotty/communitybuild/CommunityBuildTest.scala) diff --git a/community-build/community-projects/Lucre b/community-build/community-projects/Lucre index 87d680baaa23..412b1ac4b263 160000 --- a/community-build/community-projects/Lucre +++ b/community-build/community-projects/Lucre @@ -1 +1 @@ -Subproject commit 87d680baaa2355de5b062adcbdfc5005e787521b +Subproject commit 412b1ac4b2630b7e883822cca4ce0e6452a1bbfd diff --git a/community-build/community-projects/Monocle b/community-build/community-projects/Monocle new file mode 160000 index 000000000000..bc5781caf523 --- /dev/null +++ b/community-build/community-projects/Monocle @@ -0,0 +1 @@ +Subproject commit bc5781caf523eab7e2c0e92478cbbef568a1be43 diff --git a/community-build/community-projects/akka b/community-build/community-projects/akka index 5b9496313ca7..be87625f490b 160000 --- a/community-build/community-projects/akka +++ b/community-build/community-projects/akka @@ -1 +1 @@ -Subproject commit 5b9496313ca7be34e2a8149d38f8b1ce7c6f6b01 +Subproject commit be87625f490b529e646c897aebb1d271646d899a diff --git a/community-build/community-projects/cask b/community-build/community-projects/cask index d9e3ec387ade..03b6a24ab597 160000 --- a/community-build/community-projects/cask +++ b/community-build/community-projects/cask @@ -1 +1 @@ -Subproject commit d9e3ec387ade7f462fe696d25c647e3192daf566 +Subproject commit 03b6a24ab59796ff8f7a3dc5f28041fce816aaf9 diff --git a/community-build/community-projects/cats b/community-build/community-projects/cats index 2c27eb8b6fe5..878472d7bff4 160000 --- a/community-build/community-projects/cats +++ b/community-build/community-projects/cats @@ -1 +1 @@ -Subproject commit 2c27eb8b6fe567d5a7adcb8433646d6f718d3e94 +Subproject commit 878472d7bff4c3bfec8a265782c1e0d6a3147541 diff --git a/community-build/community-projects/cats-effect-2 b/community-build/community-projects/cats-effect-2 deleted file mode 160000 index f3d3bb3428fa..000000000000 --- a/community-build/community-projects/cats-effect-2 +++ /dev/null @@ -1 +0,0 @@ -Subproject commit f3d3bb3428faa9b2495310dfd520844c37018042 diff --git a/community-build/community-projects/cats-effect-3 b/community-build/community-projects/cats-effect-3 index 04155e429e23..af11317d40ce 160000 --- a/community-build/community-projects/cats-effect-3 +++ b/community-build/community-projects/cats-effect-3 @@ -1 +1 @@ -Subproject commit 04155e429e230224e61235c4e4dd125419f82f7a +Subproject commit af11317d40cee8979c4e40b60431bc0f3b9e03f0 diff --git a/community-build/community-projects/cats-effect-3-forward-compat b/community-build/community-projects/cats-effect-3-forward-compat new file mode 160000 index 000000000000..af11317d40ce --- /dev/null +++ b/community-build/community-projects/cats-effect-3-forward-compat @@ -0,0 +1 @@ +Subproject commit af11317d40cee8979c4e40b60431bc0f3b9e03f0 diff --git a/community-build/community-projects/cats-forward-compat b/community-build/community-projects/cats-forward-compat new file mode 160000 index 000000000000..878472d7bff4 --- /dev/null +++ b/community-build/community-projects/cats-forward-compat @@ -0,0 +1 @@ +Subproject commit 878472d7bff4c3bfec8a265782c1e0d6a3147541 diff --git a/community-build/community-projects/cats-mtl b/community-build/community-projects/cats-mtl index 1b0d1404f0eb..7679d606336a 160000 --- a/community-build/community-projects/cats-mtl +++ b/community-build/community-projects/cats-mtl @@ -1 +1 @@ -Subproject commit 1b0d1404f0eb104e72d4fd80c4ba285c9390fbc0 +Subproject commit 7679d606336a4da0e6dfca43c0a481db273cc10c diff --git a/community-build/community-projects/cats-mtl-forward-compat b/community-build/community-projects/cats-mtl-forward-compat new file mode 160000 index 000000000000..7679d606336a --- /dev/null +++ b/community-build/community-projects/cats-mtl-forward-compat @@ -0,0 +1 @@ +Subproject commit 7679d606336a4da0e6dfca43c0a481db273cc10c diff --git a/community-build/community-projects/coop b/community-build/community-projects/coop index d3bcd3622dc3..4babee9613a4 160000 --- a/community-build/community-projects/coop +++ b/community-build/community-projects/coop @@ -1 +1 @@ -Subproject commit d3bcd3622dc3a1f3257e2d861d724c04ab634b81 +Subproject commit 4babee9613a4bc0713d195676dd169c4f636a31a diff --git a/community-build/community-projects/coop-forward-compat b/community-build/community-projects/coop-forward-compat new file mode 160000 index 000000000000..4babee9613a4 --- /dev/null +++ b/community-build/community-projects/coop-forward-compat @@ -0,0 +1 @@ +Subproject commit 4babee9613a4bc0713d195676dd169c4f636a31a diff --git a/community-build/community-projects/discipline b/community-build/community-projects/discipline index 4e3f9678c847..b0865da0c5e0 160000 --- a/community-build/community-projects/discipline +++ b/community-build/community-projects/discipline @@ -1 +1 @@ -Subproject commit 4e3f9678c84702b079209ae9928d9b6c884a7514 +Subproject commit b0865da0c5e016ad1d45060f52885219256d3205 diff --git a/community-build/community-projects/discipline-forward-compat b/community-build/community-projects/discipline-forward-compat new file mode 160000 index 000000000000..b0865da0c5e0 --- /dev/null +++ b/community-build/community-projects/discipline-forward-compat @@ -0,0 +1 @@ +Subproject commit b0865da0c5e016ad1d45060f52885219256d3205 diff --git a/community-build/community-projects/discipline-munit b/community-build/community-projects/discipline-munit index e0a3aee65b85..014f8ca26fef 160000 --- a/community-build/community-projects/discipline-munit +++ b/community-build/community-projects/discipline-munit @@ -1 +1 @@ -Subproject commit e0a3aee65b85bf8d6bb53037e4bc33ebfdeaedff +Subproject commit 014f8ca26fefab7c32a2779b9d3382df14ccf860 diff --git a/community-build/community-projects/discipline-munit-forward-compat b/community-build/community-projects/discipline-munit-forward-compat new file mode 160000 index 000000000000..014f8ca26fef --- /dev/null +++ b/community-build/community-projects/discipline-munit-forward-compat @@ -0,0 +1 @@ +Subproject commit 014f8ca26fefab7c32a2779b9d3382df14ccf860 diff --git a/community-build/community-projects/discipline-specs2 b/community-build/community-projects/discipline-specs2 index b2fa931a44ee..3603b0874940 160000 --- a/community-build/community-projects/discipline-specs2 +++ b/community-build/community-projects/discipline-specs2 @@ -1 +1 @@ -Subproject commit b2fa931a44eeb6a6afa1343e57f963e73aafff59 +Subproject commit 3603b08749404f83946aab48203f8bd9f9410b49 diff --git a/community-build/community-projects/discipline-specs2-forward-compat b/community-build/community-projects/discipline-specs2-forward-compat new file mode 160000 index 000000000000..3603b0874940 --- /dev/null +++ b/community-build/community-projects/discipline-specs2-forward-compat @@ -0,0 +1 @@ +Subproject commit 3603b08749404f83946aab48203f8bd9f9410b49 diff --git a/community-build/community-projects/dotty-cps-async b/community-build/community-projects/dotty-cps-async index 614dc4a30557..b893454c4bbb 160000 --- a/community-build/community-projects/dotty-cps-async +++ b/community-build/community-projects/dotty-cps-async @@ -1 +1 @@ -Subproject commit 614dc4a30557bf7d282387d0c7efcdb59d97d76b +Subproject commit b893454c4bbbcc25cb99c2db0ba5989f3e1d6478 diff --git a/community-build/community-projects/endpoints4s b/community-build/community-projects/endpoints4s index 9433bf818e4a..3a667a3608ff 160000 --- a/community-build/community-projects/endpoints4s +++ b/community-build/community-projects/endpoints4s @@ -1 +1 @@ -Subproject commit 9433bf818e4ac06815017ec9344ce648bd52d82a +Subproject commit 3a667a3608ff9950c24e9b2b5038c71c1690a21d diff --git a/community-build/community-projects/fs2 b/community-build/community-projects/fs2 new file mode 160000 index 000000000000..bb69f51505d7 --- /dev/null +++ b/community-build/community-projects/fs2 @@ -0,0 +1 @@ +Subproject commit bb69f51505d7871af318da56be5281f25fa6b5af diff --git a/community-build/community-projects/izumi-reflect b/community-build/community-projects/izumi-reflect index 54051d0bca92..540f08283069 160000 --- a/community-build/community-projects/izumi-reflect +++ b/community-build/community-projects/izumi-reflect @@ -1 +1 @@ -Subproject commit 54051d0bca921706ef0a3f9f63264f6f57d50ef0 +Subproject commit 540f08283069aefd8a81fec1f3493c70217b6099 diff --git a/community-build/community-projects/jackson-module-scala b/community-build/community-projects/jackson-module-scala new file mode 160000 index 000000000000..13e3a34a46d6 --- /dev/null +++ b/community-build/community-projects/jackson-module-scala @@ -0,0 +1 @@ +Subproject commit 13e3a34a46d60dfcc0a787b46c73f2f3cd6cc379 diff --git a/community-build/community-projects/libretto b/community-build/community-projects/libretto new file mode 160000 index 000000000000..d229f3ccb9c4 --- /dev/null +++ b/community-build/community-projects/libretto @@ -0,0 +1 @@ +Subproject commit d229f3ccb9c49aa3b0fef1b3f7425e986155cc97 diff --git a/community-build/community-projects/munit b/community-build/community-projects/munit index 7006d9d3963c..662953cdb57f 160000 --- a/community-build/community-projects/munit +++ b/community-build/community-projects/munit @@ -1 +1 @@ -Subproject commit 7006d9d3963c0f1db5594e4a054a3531ee781c9a +Subproject commit 662953cdb57fec0d8e1baa7fcd1ab178a0bba8c6 diff --git a/community-build/community-projects/munit-cats-effect b/community-build/community-projects/munit-cats-effect new file mode 160000 index 000000000000..5a4ee90a625e --- /dev/null +++ b/community-build/community-projects/munit-cats-effect @@ -0,0 +1 @@ +Subproject commit 5a4ee90a625e01c47421a17d1b093b2f27b9f2c5 diff --git a/community-build/community-projects/munit-forward-compat b/community-build/community-projects/munit-forward-compat new file mode 160000 index 000000000000..662953cdb57f --- /dev/null +++ b/community-build/community-projects/munit-forward-compat @@ -0,0 +1 @@ +Subproject commit 662953cdb57fec0d8e1baa7fcd1ab178a0bba8c6 diff --git a/community-build/community-projects/onnx-scala b/community-build/community-projects/onnx-scala index ff0c47297fd2..3a5a45016d1a 160000 --- a/community-build/community-projects/onnx-scala +++ b/community-build/community-projects/onnx-scala @@ -1 +1 @@ -Subproject commit ff0c47297fd2885f8b56def3ea4fd22efdfe1e8b +Subproject commit 3a5a45016d1a48d2a84dc3159d3e08c1ad5ac587 diff --git a/community-build/community-projects/protoquill b/community-build/community-projects/protoquill index 524649385718..5f45b0048624 160000 --- a/community-build/community-projects/protoquill +++ b/community-build/community-projects/protoquill @@ -1 +1 @@ -Subproject commit 524649385718b3feed96cb4ac2725d5ee9ecf7ef +Subproject commit 5f45b00486240c5dcffff563299b989e7169be30 diff --git a/community-build/community-projects/scala-java8-compat b/community-build/community-projects/scala-java8-compat new file mode 160000 index 000000000000..57e1f7ccaa67 --- /dev/null +++ b/community-build/community-projects/scala-java8-compat @@ -0,0 +1 @@ +Subproject commit 57e1f7ccaa67dcac2b4227ab1a6ee0fcb5f42023 diff --git a/community-build/community-projects/scalacheck b/community-build/community-projects/scalacheck index ad6b6f997a91..976db31cd549 160000 --- a/community-build/community-projects/scalacheck +++ b/community-build/community-projects/scalacheck @@ -1 +1 @@ -Subproject commit ad6b6f997a9189dc9abdb187440cad4de5c33720 +Subproject commit 976db31cd549328167a90ecc6f5f31efa83cd845 diff --git a/community-build/community-projects/scalacheck-effect b/community-build/community-projects/scalacheck-effect new file mode 160000 index 000000000000..a275735caed7 --- /dev/null +++ b/community-build/community-projects/scalacheck-effect @@ -0,0 +1 @@ +Subproject commit a275735caed75eeb49b8ba1c4ef85d72ca6b61d8 diff --git a/community-build/community-projects/scalacheck-forward-compat b/community-build/community-projects/scalacheck-forward-compat new file mode 160000 index 000000000000..976db31cd549 --- /dev/null +++ b/community-build/community-projects/scalacheck-forward-compat @@ -0,0 +1 @@ +Subproject commit 976db31cd549328167a90ecc6f5f31efa83cd845 diff --git a/community-build/community-projects/scalatest b/community-build/community-projects/scalatest index d3b7db9af036..75f5a3be6632 160000 --- a/community-build/community-projects/scalatest +++ b/community-build/community-projects/scalatest @@ -1 +1 @@ -Subproject commit d3b7db9af036829e417e6a08940d9e4e406cfbf2 +Subproject commit 75f5a3be66322f69ea624bbf5f166fbd28c5ff9e diff --git a/community-build/community-projects/scalatestplus-junit b/community-build/community-projects/scalatestplus-junit index 3857010185d9..dd047825a880 160000 --- a/community-build/community-projects/scalatestplus-junit +++ b/community-build/community-projects/scalatestplus-junit @@ -1 +1 @@ -Subproject commit 3857010185d95c3b5fece4800759436ceebdc0e7 +Subproject commit dd047825a880bb467d69833dca198a27c8e30f87 diff --git a/community-build/community-projects/scalatestplus-scalacheck b/community-build/community-projects/scalatestplus-scalacheck index 010387f5854e..c9d4faeb8e1c 160000 --- a/community-build/community-projects/scalatestplus-scalacheck +++ b/community-build/community-projects/scalatestplus-scalacheck @@ -1 +1 @@ -Subproject commit 010387f5854eb473ddc71ecd2d6a0183dfebfbcb +Subproject commit c9d4faeb8e1c815bd932fd67a5d2fe138a2bbda8 diff --git a/community-build/community-projects/scalatestplus-testng b/community-build/community-projects/scalatestplus-testng new file mode 160000 index 000000000000..f7a439f1b207 --- /dev/null +++ b/community-build/community-projects/scalatestplus-testng @@ -0,0 +1 @@ +Subproject commit f7a439f1b2078b748bc904c5c774ad1205561e31 diff --git a/community-build/community-projects/scas b/community-build/community-projects/scas index 650b72f7baeb..372a9255ba77 160000 --- a/community-build/community-projects/scas +++ b/community-build/community-projects/scas @@ -1 +1 @@ -Subproject commit 650b72f7baeb4443f16fe4e923901837f5d9ad98 +Subproject commit 372a9255ba77ffae0f9fec3880ffdece4940d4ca diff --git a/community-build/community-projects/scodec b/community-build/community-projects/scodec index 84db5f776573..d59c0440b078 160000 --- a/community-build/community-projects/scodec +++ b/community-build/community-projects/scodec @@ -1 +1 @@ -Subproject commit 84db5f7765738024aebd8df2bf53151cc6d4d793 +Subproject commit d59c0440b078292dedff10b6743d6a086f468527 diff --git a/community-build/community-projects/scodec-bits b/community-build/community-projects/scodec-bits index 85de35c014b9..d513312fbfc9 160000 --- a/community-build/community-projects/scodec-bits +++ b/community-build/community-projects/scodec-bits @@ -1 +1 @@ -Subproject commit 85de35c014b948f0dd3f94e975c42d17deab4055 +Subproject commit d513312fbfc9fd54d0d3cde914b37e7755f7f6f4 diff --git a/community-build/community-projects/shapeless b/community-build/community-projects/shapeless index 94c1d4413373..e58dd710b5c4 160000 --- a/community-build/community-projects/shapeless +++ b/community-build/community-projects/shapeless @@ -1 +1 @@ -Subproject commit 94c1d44133739c7c57b64228caccc4fc938c2862 +Subproject commit e58dd710b5c46d54336242a7ba4bab90d440d4f8 diff --git a/community-build/community-projects/simulacrum-scalafix b/community-build/community-projects/simulacrum-scalafix index 75f7e10d30fb..2515271c46ad 160000 --- a/community-build/community-projects/simulacrum-scalafix +++ b/community-build/community-projects/simulacrum-scalafix @@ -1 +1 @@ -Subproject commit 75f7e10d30fb1b848b7adaf2a026320ee548b56a +Subproject commit 2515271c46ad46512a43d20e1e8ae0793433cf0b diff --git a/community-build/community-projects/simulacrum-scalafix-forward-compat b/community-build/community-projects/simulacrum-scalafix-forward-compat new file mode 160000 index 000000000000..2515271c46ad --- /dev/null +++ b/community-build/community-projects/simulacrum-scalafix-forward-compat @@ -0,0 +1 @@ +Subproject commit 2515271c46ad46512a43d20e1e8ae0793433cf0b diff --git a/community-build/community-projects/specs2 b/community-build/community-projects/specs2 new file mode 160000 index 000000000000..0652daeefb57 --- /dev/null +++ b/community-build/community-projects/specs2 @@ -0,0 +1 @@ +Subproject commit 0652daeefb57c2d51e3f16ea5c44929bdba722bf diff --git a/community-build/community-projects/spire b/community-build/community-projects/spire new file mode 160000 index 000000000000..c98b32c6844f --- /dev/null +++ b/community-build/community-projects/spire @@ -0,0 +1 @@ +Subproject commit c98b32c6844f501491ab3a22d6f5be50b9b80c8d diff --git a/community-build/community-projects/stdLib213 b/community-build/community-projects/stdLib213 index 891f92f01cfb..2c742834cf16 160000 --- a/community-build/community-projects/stdLib213 +++ b/community-build/community-projects/stdLib213 @@ -1 +1 @@ -Subproject commit 891f92f01cfbc900a1a1efada73530246babd075 +Subproject commit 2c742834cf162ab89e914bbc0a8b975874a2c3c2 diff --git a/community-build/src/scala/dotty/communitybuild/CommunityBuildRunner.scala b/community-build/src/scala/dotty/communitybuild/CommunityBuildRunner.scala new file mode 100644 index 000000000000..3d52a3ff0631 --- /dev/null +++ b/community-build/src/scala/dotty/communitybuild/CommunityBuildRunner.scala @@ -0,0 +1,92 @@ +package dotty.communitybuild + +import java.nio.file._ +import java.io.{PrintWriter, File} +import java.nio.charset.StandardCharsets.UTF_8 + +object CommunityBuildRunner: + + /** Depending on the mode of operation, either + * runs the test or updates the project. Updating + * means that all the dependencies are fetched but + * minimal other extra other work is done. Updating + * is necessary since we run tests each time on a fresh + * Docker container. We run the update on Docker container + * creation time to create the cache of the dependencies + * and avoid network overhead. See https://github.com/lampepfl/dotty-drone + * for more infrastructural details. + */ + extension (self: CommunityProject) def run()(using suite: CommunityBuildRunner): Unit = + if self.requiresExperimental && !self.compilerSupportExperimental then + log(s"Skipping ${self.project} - it needs experimental features unsupported in this build.") + return + self.dependencies().foreach(_.publish()) + self.testOnlyDependencies().foreach(_.publish()) + suite.runProject(self) + +trait CommunityBuildRunner: + + /** fails the current operation, can be specialised in a concrete Runner + * - overridden in `CommunityBuildTest` + */ + def failWith(msg: String): Nothing = throw IllegalStateException(msg) + + /** Build the given project with the published local compiler and sbt plugin. + * + * This test reads the compiler version from community-build/dotty-bootstrapped.version + * and expects community-build/sbt-dotty-sbt to set the compiler plugin. + * + * @param project The project name, should be a git submodule in community-build/ + * @param command The binary file of the program used to test the project – usually + * a build tool like SBT or Mill + * @param arguments Arguments to pass to the testing program + */ + def runProject(projectDef: CommunityProject): Unit = + val project = projectDef.project + val command = projectDef.binaryName + val arguments = projectDef.buildCommands + val compilerVersion = projectDef.compilerVersion + + @annotation.tailrec + def execTimes(task: () => Int, timesToRerun: Int): Boolean = + val exitCode = task() + if exitCode == 0 + then true + else if timesToRerun == 0 + then false + else + log(s"Rerunning tests in $project because of a previous run failure.") + execTimes(task, timesToRerun - 1) + + log(s"Building $project with dotty-bootstrapped $compilerVersion...") + + val projectDir = communitybuildDir.resolve("community-projects").resolve(project) + + if !Files.exists(projectDir.resolve(".git")) then + failWith(s""" + | + |Missing $project submodule. You can initialize this module using + | + | git submodule update --init community-build/community-projects/$project + | + |""".stripMargin) + + val testsCompletedSuccessfully = execTimes(projectDef.build, 3) + + if !testsCompletedSuccessfully then + failWith(s""" + | + |$command exited with an error code. To reproduce without JUnit, use: + | + | sbt community-build/prepareCommunityBuild + | cd community-build/community-projects/$project + | $command ${arguments.init.mkString(" ")} "${arguments.last}" + | + |For a faster feedback loop on SBT projects, one can try to extract a direct call to dotc + |using the sbt export command. For instance, for scalacheck, use + | sbt export jvm/test:compileIncremental + | + |""".stripMargin) + end runProject + +end CommunityBuildRunner diff --git a/community-build/src/scala/dotty/communitybuild/Main.scala b/community-build/src/scala/dotty/communitybuild/Main.scala index d135de12bdb0..7c3a39261eb0 100644 --- a/community-build/src/scala/dotty/communitybuild/Main.scala +++ b/community-build/src/scala/dotty/communitybuild/Main.scala @@ -5,6 +5,7 @@ import java.nio.file.Path import java.nio.file.Files import scala.sys.process._ +import CommunityBuildRunner.run object Main: @@ -56,7 +57,7 @@ object Main: val (toRun, ignored) = allProjects.partition( p => p.docCommand != null - && (!p.requiresExperimental || compilerSupportExperimental) + && (!p.requiresExperimental || p.compilerSupportExperimental) ) val paths = toRun.map { project => @@ -106,12 +107,15 @@ object Main: println(s"Documentation not found for ${failed.mkString(", ")}") sys.exit(1) + case "run" :: names if names.nonEmpty => + given CommunityBuildRunner() + withProjects(names, "Running")(_.run()) + case args => println("USAGE: ") - println("COMMAND is one of: publish doc run") + println("COMMAND is one of: publish, build, doc, doc all, run") println("Available projects are:") allProjects.foreach { k => println(s"\t${k.project}") } sys.exit(1) - diff --git a/community-build/src/scala/dotty/communitybuild/projects.scala b/community-build/src/scala/dotty/communitybuild/projects.scala index cc792159b104..1a23ccba2170 100644 --- a/community-build/src/scala/dotty/communitybuild/projects.scala +++ b/community-build/src/scala/dotty/communitybuild/projects.scala @@ -6,13 +6,10 @@ import java.nio.charset.StandardCharsets.UTF_8 lazy val communitybuildDir: Path = Paths.get(sys.props("user.dir")) -lazy val compilerVersion: String = +lazy val testedCompilerVersion: String = val file = communitybuildDir.resolve("scala3-bootstrapped.version") new String(Files.readAllBytes(file), UTF_8) -lazy val compilerSupportExperimental: Boolean = - compilerVersion.contains("SNAPSHOT") || compilerVersion.contains("NIGHTLY") - lazy val sbtPluginFilePath: String = // Workaround for https://github.com/sbt/sbt/issues/4395 new File(sys.props("user.home") + "/.sbt/1.0/plugins").mkdirs() @@ -39,16 +36,21 @@ sealed trait CommunityProject: val testCommand: String val publishCommand: String val docCommand: String - val dependencies: List[CommunityProject] + val dependencies: () => List[CommunityProject] + val testOnlyDependencies: () => List[CommunityProject] val binaryName: String val runCommandsArgs: List[String] = Nil val requiresExperimental: Boolean val environment: Map[String, String] = Map.empty + val compilerVersion: String final val projectDir = communitybuildDir.resolve("community-projects").resolve(project) + final val compilerSupportExperimental: Boolean = + compilerVersion.contains("SNAPSHOT") || compilerVersion.contains("NIGHTLY") + final def publishDependencies(): Unit = - dependencies.foreach(_.publish()) + dependencies().foreach(_.publish()) /** Publish this project to the local Maven repository */ final def publish(): Unit = @@ -86,9 +88,11 @@ end CommunityProject final case class MillCommunityProject( project: String, baseCommand: String, - dependencies: List[CommunityProject] = Nil, + dependencies: () => List[CommunityProject] = () => Nil, + testOnlyDependencies: () => List[CommunityProject] = () => Nil, ignoreDocs: Boolean = false, requiresExperimental: Boolean = false, + compilerVersion: String = testedCompilerVersion ) extends CommunityProject: override val binaryName: String = "./mill" override val testCommand = s"$baseCommand.test" @@ -103,11 +107,14 @@ final case class SbtCommunityProject( project: String, sbtTestCommand: String, extraSbtArgs: List[String] = Nil, - dependencies: List[CommunityProject] = Nil, + dependencies: () => List[CommunityProject] = () => Nil, + testOnlyDependencies: () => List[CommunityProject] = () => Nil, sbtPublishCommand: String = null, sbtDocCommand: String = null, scalacOptions: List[String] = SbtCommunityProject.scalacOptions, requiresExperimental: Boolean = false, + compilerVersion: String = testedCompilerVersion, + isForwardCompatProject: Boolean = false ) extends CommunityProject: override val binaryName: String = "sbt" @@ -116,6 +123,7 @@ final case class SbtCommunityProject( private val baseCommand = "clean; set Global/logLevel := Level.Error; set Global/updateOptions ~= (_.withLatestSnapshots(false)); " + ++ (if isForwardCompatProject then "set Global / isForwardCompatProject := true; " else "") ++ (if scalacOptions.isEmpty then "" else s"""set Global/scalacOptions ++= $scalacOptionsString;""") ++ s"++$compilerVersion!; " @@ -137,12 +145,25 @@ final case class SbtCommunityProject( case Some(ivyHome) => List(s"-Dsbt.ivy.home=$ivyHome") case _ => Nil extraSbtArgs ++ sbtProps ++ List( - "-sbt-version", "1.5.0", + "-sbt-version", "1.6.1", "-Dsbt.supershell=false", s"-Ddotty.communitybuild.dir=$communitybuildDir", s"--addPluginSbtFile=$sbtPluginFilePath" ) + def forwardCompat: SbtCommunityProject = + this.copy( + project = project + "-forward-compat", + dependencies = () => dependencies().map(forwardCompatMapping), + testOnlyDependencies = () => testOnlyDependencies().map(forwardCompatMapping), + isForwardCompatProject = true + ) + + def withScalaRelease(release: String): SbtCommunityProject = + this.copy( + scalacOptions = scalacOptions ++ Seq("-Yscala-release", release) + ) + object SbtCommunityProject: def scalacOptions = List( "-Xcheck-macros", @@ -163,89 +184,89 @@ object projects: lazy val utest = MillCommunityProject( project = "utest", - baseCommand = s"utest.jvm[$compilerVersion]", + baseCommand = s"utest.jvm[$testedCompilerVersion]", ignoreDocs = true ) lazy val sourcecode = MillCommunityProject( project = "sourcecode", - baseCommand = s"sourcecode.jvm[$compilerVersion]", + baseCommand = s"sourcecode.jvm[$testedCompilerVersion]", ignoreDocs = true ) lazy val oslib = MillCommunityProject( project = "os-lib", - baseCommand = s"os.jvm[$compilerVersion]", - dependencies = List(utest, sourcecode) + baseCommand = s"os.jvm[$testedCompilerVersion]", + dependencies = () => List(utest, sourcecode) ) lazy val oslibWatch = MillCommunityProject( project = "os-lib", - baseCommand = s"os.watch[$compilerVersion]", - dependencies = List(utest, sourcecode), + baseCommand = s"os.watch[$testedCompilerVersion]", + dependencies = () => List(utest, sourcecode), ignoreDocs = true ) lazy val ujson = MillCommunityProject( project = "upickle", - baseCommand = s"ujson.jvm[$compilerVersion]", - dependencies = List(geny) + baseCommand = s"ujson.jvm[$testedCompilerVersion]", + dependencies = () => List(geny) ) lazy val upickle = MillCommunityProject( project = "upickle", - baseCommand = s"upickle.jvm[$compilerVersion]", - dependencies = List(geny, utest) + baseCommand = s"upickle.jvm[$testedCompilerVersion]", + dependencies = () => List(geny, utest) ) lazy val upickleCore = MillCommunityProject( project = "upickle", - baseCommand = s"core.jvm[$compilerVersion]", - dependencies = List(geny, utest) + baseCommand = s"core.jvm[$testedCompilerVersion]", + dependencies = () => List(geny, utest) ) lazy val upickleImplicits = MillCommunityProject( project = "upickle", - baseCommand = s"implicits.jvm[$compilerVersion]", - dependencies = List(upickleCore, ujson) + baseCommand = s"implicits.jvm[$testedCompilerVersion]", + dependencies = () => List(upickleCore, ujson) ) lazy val upack = MillCommunityProject( project = "upickle", - baseCommand = s"upack.jvm[$compilerVersion]", - dependencies = List(ujson, upickleCore) + baseCommand = s"upack.jvm[$testedCompilerVersion]", + dependencies = () => List(ujson, upickleCore) ) lazy val geny = MillCommunityProject( project = "geny", - baseCommand = s"geny.jvm[$compilerVersion]", - dependencies = List(utest) + baseCommand = s"geny.jvm[$testedCompilerVersion]", + dependencies = () => List(utest) ) lazy val fansi = MillCommunityProject( project = "fansi", - baseCommand = s"fansi.jvm[$compilerVersion]", - dependencies = List(utest, sourcecode), + baseCommand = s"fansi.jvm[$testedCompilerVersion]", + dependencies = () => List(utest, sourcecode), ignoreDocs = true ) lazy val pprint = MillCommunityProject( project = "PPrint", - baseCommand = s"pprint.jvm[$compilerVersion]", - dependencies = List(fansi), + baseCommand = s"pprint.jvm[$testedCompilerVersion]", + dependencies = () => List(fansi), ignoreDocs = true ) lazy val requests = MillCommunityProject( project = "requests-scala", - baseCommand = s"requests[$compilerVersion]", - dependencies = List(geny, utest, ujson, upickleCore) + baseCommand = s"requests[$testedCompilerVersion]", + dependencies = () => List(geny, utest, ujson, upickleCore) ) lazy val cask = MillCommunityProject( project = "cask", - baseCommand = s"cask[$compilerVersion]", - dependencies = List(utest, geny, sourcecode, pprint, upickle, upickleImplicits, upack, requests) + baseCommand = s"cask[$testedCompilerVersion]", + dependencies = () => List(utest, geny, sourcecode, pprint, upickle, upickleImplicits, upack, requests) ) lazy val scas = MillCommunityProject( @@ -273,15 +294,31 @@ object projects: sbtDocCommand = forceDoc("jvm") ) - lazy val scalatest = SbtCommunityProject( + lazy val scalacheckForwardCompat = scalacheck.forwardCompat.withScalaRelease("3.0") + + lazy val scalatest: SbtCommunityProject = SbtCommunityProject( project = "scalatest", - sbtTestCommand = "scalacticDotty/clean;scalacticTestDotty/test; scalatestTestDotty/test", - sbtPublishCommand = "scalacticDotty/publishLocal; scalatestDotty/publishLocal", - sbtDocCommand = ";scalacticDotty/doc" // fails with missing type ;scalatestDotty/doc" + sbtTestCommand = + List( + "scalacticDotty/clean; scalacticDottyJS/clean", + + // Some scalatest's tests are flaky (https://github.com/scalatest/scalatest/issues/2049) + // so we disable them, this list is based on the one used in the Scala 2 community build + // (https://github.com/scala/community-build/blob/2.13.x/proj/scalatest.conf). + """set scalatestTest / Test / unmanagedSources / excludeFilter := HiddenFileFilter || "GeneratorSpec.scala" || "FrameworkSuite.scala" || "WaitersSpec.scala" || "TestSortingReporterSpec.scala" || "JavaFuturesSpec.scala" || "ParallelTestExecutionSpec.scala" || "TimeLimitsSpec.scala" || "TestThreadsStartingCounterSpec.scala" || "SuiteSortingReporterSpec.scala" || "CommonGeneratorsSpec.scala" || "PropCheckerAssertingSpec.scala" || "ConductorMethodsSuite.scala"""", + """set scalacticTest / Test / unmanagedSources / excludeFilter := HiddenFileFilter || "NonEmptyArraySpec.scala"""", + """set genRegularTests4 / Test / managedSources ~= (_.filterNot(_.getName == "FrameworkSuite.scala").filterNot(_.getName == "GeneratorSpec.scala").filterNot(_.getName == "CommonGeneratorsSpec.scala").filterNot(_.getName == "ParallelTestExecutionSpec.scala").filterNot(_.getName == "DispatchReporterSpec.scala").filterNot(_.getName == "TestThreadsStartingCounterSpec.scala").filterNot(_.getName == "EventuallySpec.scala"))""", + + "scalacticTestDotty/test; scalatestTestDotty/test; scalacticDottyJS/compile; scalatestDottyJS/compile" + ).mkString("; "), + sbtPublishCommand = "scalacticDotty/publishLocal; scalatestDotty/publishLocal; scalacticDottyJS/publishLocal; scalatestDottyJS/publishLocal", + sbtDocCommand = ";scalacticDotty/doc", // fails with missing type ;scalatestDotty/doc" // cannot take signature of (test: org.scalatest.concurrent.ConductorFixture#OneArgTest): // org.scalatest.Outcome // Problem parsing scalatest.dotty/target/scala-3.0.0-M2/src_managed/main/org/scalatest/concurrent/ConductorFixture.scala:[602..624..3843], documentation may not be generated. // dotty.tools.dotc.core.MissingType: + dependencies = () => List(scalaXml), + testOnlyDependencies = () => List(scalatestplusJunit, scalatestplusTestNG) ) lazy val scalatestplusScalacheck = SbtCommunityProject( @@ -289,19 +326,27 @@ object projects: sbtTestCommand = "scalatestPlusScalaCheckJVM/test", sbtPublishCommand = "scalatestPlusScalaCheckJVM/publishLocal", sbtDocCommand = "scalatestPlusScalaCheckJVM/doc", - dependencies = List(scalatest, scalacheck) + dependencies = () => List(scalatest, scalacheck) ) lazy val scalatestplusJunit = SbtCommunityProject( project = "scalatestplus-junit", sbtTestCommand = "scalatestplus-junit/test", sbtPublishCommand = "scalatestplus-junit/publishLocal", - dependencies = List(scalatest) + dependencies = () => List(scalatest) + ) + + lazy val scalatestplusTestNG = SbtCommunityProject( + project = "scalatestplus-testng", + sbtTestCommand = "test", + sbtPublishCommand = "publishLocal", + dependencies = () => List(scalatest) ) lazy val scalaXml = SbtCommunityProject( project = "scala-xml", - sbtTestCommand = "xml/test", + sbtTestCommand = "xml/test", + sbtPublishCommand = "xml/publishLocal", sbtDocCommand = "xml/doc" ) @@ -331,7 +376,7 @@ object projects: project = "minitest", sbtTestCommand = "test", sbtDocCommand = aggregateDoc("lawsJVM")("minitestJVM"), - dependencies = List(scalacheck) + dependencies = () => List(scalacheck) ) lazy val fastparse = SbtCommunityProject( @@ -344,8 +389,8 @@ object projects: lazy val stdLib213 = SbtCommunityProject( project = "stdLib213", extraSbtArgs = List("-Dscala.build.compileWithDotty=true"), - sbtTestCommand = """library/compile""", - sbtPublishCommand = """set library/Compile/packageDoc/publishArtifact := false; library/publishLocal""", + sbtTestCommand = """set Global / fatalWarnings := false; library/compile""", + sbtPublishCommand = """set Global / fatalWarnings := false; set library/Compile/packageDoc/publishArtifact := false; library/publishLocal""", // sbtDocCommand = "library/doc" // Does no compile? No idea :/ ) @@ -389,14 +434,14 @@ object projects: project = "sconfig", sbtTestCommand = "sconfigJVM/test", sbtDocCommand = "sconfigJVM/doc", - dependencies = List(scalaCollectionCompat) + dependencies = () => List(scalaCollectionCompat) ) lazy val zio = SbtCommunityProject( project = "zio", sbtTestCommand = "testJVMDotty", sbtDocCommand = forceDoc("coreJVM"), - dependencies = List(izumiReflect) + dependencies = () => List(izumiReflect) ) lazy val munit = SbtCommunityProject( @@ -404,16 +449,17 @@ object projects: sbtTestCommand = "testsJVM/test;testsJS/test;", sbtPublishCommand = "munitJVM/publishLocal; munitJS/publishLocal; munitScalacheckJVM/publishLocal; munitScalacheckJS/publishLocal; junit/publishLocal", sbtDocCommand = "junit/doc; munitJVM/doc", - dependencies = List(scalacheck) + dependencies = () => List(scalacheck) ) + lazy val munitForwardCompat = munit.forwardCompat.withScalaRelease("3.0") + lazy val scodecBits = SbtCommunityProject( project = "scodec-bits", sbtTestCommand = "coreJVM/test;coreJS/test", sbtPublishCommand = "coreJVM/publishLocal;coreJS/publishLocal", sbtDocCommand = "coreJVM/doc", - dependencies = List(munit), - requiresExperimental = true, + dependencies = () => List(munit), ) lazy val scodec = SbtCommunityProject( @@ -421,8 +467,7 @@ object projects: sbtTestCommand = "unitTests/test", // Adds package sbtDocCommand = "coreJVM/doc", - dependencies = List(munit, scodecBits), - requiresExperimental = true, + dependencies = () => List(munit, scodecBits), ) lazy val scalaParserCombinators = SbtCommunityProject( @@ -455,7 +500,7 @@ object projects: // [error] class scalaz.iteratee.Iteratee cannot be unpickled because no class file was found sbtDocCommand = forceDoc("effectJVM"), - dependencies = List(scalacheck) + dependencies = () => List(scalacheck) ) lazy val endpoints4s = SbtCommunityProject( @@ -464,25 +509,21 @@ object projects: sbtDocCommand = ";json-schemaJVM/doc ;algebraJVM/doc; openapiJVM/doc; http4s-server/doc ;http4s-client/doc ;play-server/doc ;play-client/doc ;akka-http-server/doc ;akka-http-client/doc", ) - lazy val catsEffect2 = SbtCommunityProject( - project = "cats-effect-2", - sbtTestCommand = "test", - sbtDocCommand = ";coreJVM/doc ;lawsJVM/doc", - dependencies = List(cats, disciplineMunit) - ) - lazy val catsEffect3 = SbtCommunityProject( project = "cats-effect-3", sbtTestCommand = "test", + sbtPublishCommand = "publishLocal", sbtDocCommand = ";coreJVM/doc ;lawsJVM/doc ;kernelJVM/doc", - dependencies = List(cats, coop, disciplineSpecs2, scalacheck) + dependencies = () => List(cats, coop, disciplineSpecs2, scalacheck) ) + lazy val catsEffect3ForwardCompat = catsEffect3.forwardCompat.copy(compilerVersion = "3.0.2") + lazy val scalaParallelCollections = SbtCommunityProject( project = "scala-parallel-collections", sbtTestCommand = "test", sbtDocCommand = forceDoc("core"), - dependencies = List(scalacheck) + dependencies = () => List(scalacheck) ) lazy val scalaCollectionCompat = SbtCommunityProject( @@ -491,6 +532,14 @@ object projects: sbtPublishCommand = "compat30/publishLocal", ) + lazy val scalaJava8Compat = SbtCommunityProject( + project = "scala-java8-compat", + // the fnGen subproject must be built with 2.12.x + sbtTestCommand = s"++2.12.14; ++$testedCompilerVersion; set fnGen/dependencyOverrides := Nil; test", + sbtPublishCommand = s"++2.12.14; ++$testedCompilerVersion; set fnGen/dependencyOverrides := Nil; publishLocal", + scalacOptions = Nil // avoid passing Scala 3 options to Scala 2.12 in fnGen subproject + ) + lazy val verify = SbtCommunityProject( project = "verify", sbtTestCommand = "verifyJVM/test", @@ -502,67 +551,81 @@ object projects: project = "discipline", sbtTestCommand = "coreJVM/test;coreJS/test", sbtPublishCommand = "set every credentials := Nil;coreJVM/publishLocal;coreJS/publishLocal", - dependencies = List(scalacheck) + dependencies = () => List(scalacheck) ) + lazy val disciplineForwardCompat = discipline.forwardCompat.withScalaRelease("3.0") + lazy val disciplineMunit = SbtCommunityProject( project = "discipline-munit", - sbtTestCommand = "test", + sbtTestCommand = "coreJVM/test;coreJS/test", sbtPublishCommand = "coreJVM/publishLocal;coreJS/publishLocal", - dependencies = List(discipline, munit) + dependencies = () => List(discipline, munit) ) + lazy val disciplineMunitForwardCompat = disciplineMunit.forwardCompat.withScalaRelease("3.0") + lazy val disciplineSpecs2 = SbtCommunityProject( project = "discipline-specs2", sbtTestCommand = "test", sbtPublishCommand = "coreJVM/publishLocal;coreJS/publishLocal", - dependencies = List(discipline), + dependencies = () => List(discipline), scalacOptions = SbtCommunityProject.scalacOptions.filter(_ != "-Ysafe-init") ) + lazy val disciplineSpecs2ForwardCompat = disciplineSpecs2.forwardCompat.withScalaRelease("3.0") + lazy val simulacrumScalafixAnnotations = SbtCommunityProject( project = "simulacrum-scalafix", sbtTestCommand = "annotation/test:compile;annotationJS/test:compile", sbtPublishCommand = "annotation/publishLocal;annotationJS/publishLocal", ) + lazy val simulacrumScalafixAnnotationsForwardCompat = simulacrumScalafixAnnotations.forwardCompat.withScalaRelease("3.0") + lazy val cats = SbtCommunityProject( project = "cats", sbtTestCommand = "set Global/scalaJSStage := FastOptStage;buildJVM;validateAllJS", sbtPublishCommand = "catsJVM/publishLocal;catsJS/publishLocal", - dependencies = List(discipline, disciplineMunit, scalacheck, simulacrumScalafixAnnotations), + dependencies = () => List(discipline, disciplineMunit, scalacheck, simulacrumScalafixAnnotations), scalacOptions = SbtCommunityProject.scalacOptions.filter(_ != "-Ysafe-init") // disable -Ysafe-init, due to -Xfatal-warning ) + lazy val catsForwardCompat = cats.forwardCompat.withScalaRelease("3.0") + lazy val catsMtl = SbtCommunityProject( project = "cats-mtl", sbtTestCommand = "testsJVM/test;testsJS/test", sbtPublishCommand = "coreJVM/publishLocal;coreJS/publishLocal;lawsJVM/publishLocal;lawsJS/publishLocal", - dependencies = List(cats, disciplineMunit) + dependencies = () => List(cats, disciplineMunit) ) + lazy val catsMtlForwardCompat = catsMtl.forwardCompat.copy(compilerVersion = "3.0.2") + lazy val coop = SbtCommunityProject( project = "coop", sbtTestCommand = "test", sbtPublishCommand = "coreJVM/publishLocal;coreJS/publishLocal", - dependencies = List(cats, catsMtl) + dependencies = () => List(cats, catsMtl) ) + lazy val coopForwardCompat = coop.forwardCompat.withScalaRelease("3.0") + // 'Sciss/Lucre' with its dependencies: lazy val scissEqual = SbtCommunityProject( project = "Equal", sbtTestCommand = "rootJVM/test", sbtPublishCommand = "rootJVM/publishLocal", - dependencies = List(scalatest), + dependencies = () => List(scalatest), ) lazy val scissFingerTree = SbtCommunityProject( project = "FingerTree", sbtTestCommand = "rootJVM/test", sbtPublishCommand = "rootJVM/publishLocal", - dependencies = List(scalatest), + dependencies = () => List(scalatest), ) lazy val scissLog = SbtCommunityProject( @@ -575,42 +638,42 @@ object projects: project = "Model", sbtTestCommand = "rootJVM/test", sbtPublishCommand = "rootJVM/publishLocal", - dependencies = List(scalatest), + dependencies = () => List(scalatest), ) lazy val scissNumbers = SbtCommunityProject( project = "Numbers", sbtTestCommand = "rootJVM/test", sbtPublishCommand = "rootJVM/publishLocal", - dependencies = List(scalatest), + dependencies = () => List(scalatest), ) lazy val scissSerial = SbtCommunityProject( project = "Serial", sbtTestCommand = "rootJVM/test", sbtPublishCommand = "rootJVM/publishLocal", - dependencies = List(scalatest), + dependencies = () => List(scalatest), ) lazy val scissAsyncFile = SbtCommunityProject( project = "AsyncFile", sbtTestCommand = "rootJVM/test", sbtPublishCommand = "rootJVM/publishLocal", - dependencies = List(scissLog, scalatest), + dependencies = () => List(scissLog, scalatest), ) lazy val scissSpan = SbtCommunityProject( project = "Span", sbtTestCommand = "rootJVM/test", sbtPublishCommand = "rootJVM/publishLocal", - dependencies = List(scissSerial, scalatest), + dependencies = () => List(scissSerial, scalatest), ) lazy val scalaSTM = SbtCommunityProject( project = "scala-stm", sbtTestCommand = "rootJVM/test", sbtPublishCommand = "rootJVM/publishLocal", - dependencies = List(scalatestplusJunit), + dependencies = () => List(scalatestplusJunit), ) lazy val scissLucre = SbtCommunityProject( @@ -618,14 +681,14 @@ object projects: sbtTestCommand = "adjunctJVM/test;baseJVM/test;confluentJVM/test;coreJVM/test;dataJVM/test;expr0JVM/test;expr1JVM/test;exprJVM/test;geomJVM/test;lucre-bdb/test;testsJVM/test", extraSbtArgs = List("-Dde.sciss.lucre.ShortTests=true"), sbtPublishCommand = "adjunctJVM/publishLocal;baseJVM/publishLocal;confluentJVM/publishLocal;coreJVM/publishLocal;dataJVM/publishLocal;expr0JVM/publishLocal;expr1JVM/publishLocal;exprJVM/publishLocal;geomJVM/publishLocal;lucre-bdb/publishLocal", - dependencies = List(scalaSTM, scissAsyncFile, scissEqual, scissFingerTree, scissLog, scissModel, scissNumbers, scissSerial, scissSpan, scalatest), + dependencies = () => List(scalaSTM, scissAsyncFile, scissEqual, scissFingerTree, scissLog, scissModel, scissNumbers, scissSerial, scissSpan, scalatest), ) lazy val izumiReflect = SbtCommunityProject( project = "izumi-reflect", sbtTestCommand = "test", sbtPublishCommand = "publishLocal", - dependencies = List(scalatest) + dependencies = () => List(scalatest) ) lazy val perspective = SbtCommunityProject( @@ -633,21 +696,28 @@ object projects: // No library with easy typeclasses to verify data against exist for Dotty, so no tests yet // Until then I guess this mainly serves to check that it still compiles at all sbtTestCommand = "dottyPerspectiveExamples/compile", - dependencies = List(cats) + dependencies = () => List(cats) ) lazy val akka = SbtCommunityProject( project = "akka", - extraSbtArgs = List(s"-Dakka.build.scalaVersion=$compilerVersion"), + extraSbtArgs = List(s"-Dakka.build.scalaVersion=$testedCompilerVersion"), sbtTestCommand = "set every targetSystemJdk := true; akka-actor-tests/Test/compile", - dependencies = List(scalatest, scalatestplusJunit, scalatestplusScalacheck) + dependencies = () => List(scalatest, scalatestplusJunit, scalatestplusScalacheck) + ) + + lazy val monocle = SbtCommunityProject( + project = "Monocle", + sbtTestCommand = "coreJVM/test; macrosJVM/test; testJVM/test", + dependencies = () => List(cats, munit, discipline, disciplineMunit) ) lazy val protoquill = SbtCommunityProject( project = "protoquill", - sbtTestCommand = "test", + extraSbtArgs = List("-Dcommunity=true", "-DcommunityRemote=true", "-Dquill.macro.stdout=true"), + sbtTestCommand = "runCommunityBuild", sbtPublishCommand = "publishLocal", - dependencies = List(), // TODO add scalatest and pprint (see protoquill/build.sbt) + dependencies = () => List(scalatest), scalacOptions = List("-language:implicitConversions"), // disabled -Ysafe-init, due to bug in macro ) @@ -655,18 +725,80 @@ object projects: project = "onnx-scala", sbtTestCommand = "test", sbtPublishCommand = "publishLocal", - dependencies = List(scalatest) + dependencies = () => List(scalatest) ) lazy val playJson = SbtCommunityProject( project = "play-json", sbtTestCommand = "test", sbtPublishCommand = "publishLocal", - dependencies = List(scalatest, scalatestplusScalacheck), + dependencies = () => List(scalatest, scalatestplusScalacheck), + ) + + lazy val munitCatsEffect = SbtCommunityProject( + project = "munit-cats-effect", + sbtTestCommand = "ce3JVM/test; ce3JS/test", + sbtPublishCommand = "ce3JVM/publishLocal; ce3JS/publishLocal", + dependencies = () => List(munit, catsEffect3) + ) + + lazy val scalacheckEffect = SbtCommunityProject( + project = "scalacheck-effect", + sbtTestCommand = "test", + sbtPublishCommand = "publishLocal", + dependencies = () => List(cats, catsEffect3, munit, scalacheck) + ) + + lazy val fs2 = SbtCommunityProject( + project = "fs2", + sbtTestCommand = "coreJVM/test; coreJS/test", // io/test requires JDK9+ + sbtPublishCommand = "coreJVM/publishLocal; coreJS/publishLocal", + dependencies = () => List(cats, catsEffect3, munitCatsEffect, scalacheckEffect, scodecBits) + ) + + lazy val libretto = SbtCommunityProject( + project = "libretto", + sbtTestCommand = "core/test; examples/compile", + sbtPublishCommand = "core/publishLocal; examples/publishLocal", + dependencies = () => List(scalatest) + ) + + lazy val jacksonModuleScala = SbtCommunityProject( + project = "jackson-module-scala", + sbtTestCommand = "test", + sbtPublishCommand = "publishLocal", + dependencies = () => List(scalaJava8Compat, scalatest) + ) + + lazy val specs2 = SbtCommunityProject( + project = "specs2", + sbtTestCommand = "core/testOnly -- exclude ci", + sbtPublishCommand = "core/publishLocal", + dependencies = () => List() + ) + + lazy val spire = SbtCommunityProject( + project = "spire", + sbtTestCommand = "test", + sbtPublishCommand = "publishLocal", + dependencies = () => List(cats, disciplineMunit) ) end projects +lazy val forwardCompatMapping = Map[CommunityProject, CommunityProject]( + projects.scalacheck -> projects.scalacheckForwardCompat, + projects.munit -> projects.munitForwardCompat, + projects.discipline -> projects.disciplineForwardCompat, + projects.disciplineMunit -> projects.disciplineMunitForwardCompat, + projects.disciplineSpecs2 -> projects.disciplineSpecs2ForwardCompat, + projects.simulacrumScalafixAnnotations -> projects.simulacrumScalafixAnnotationsForwardCompat, + projects.cats -> projects.catsForwardCompat, + projects.catsMtl -> projects.catsMtlForwardCompat, + projects.coop -> projects.coopForwardCompat, + projects.catsEffect3 -> projects.catsEffect3ForwardCompat, +) + def allProjects = List( projects.utest, projects.sourcecode, @@ -686,6 +818,7 @@ def allProjects = List( projects.intent, projects.algebra, projects.scalacheck, + projects.scalacheckForwardCompat, projects.scalatest, projects.scalatestplusScalacheck, projects.scalatestplusJunit, @@ -702,23 +835,31 @@ def allProjects = List( projects.sconfig, projects.zio, projects.munit, + projects.munitForwardCompat, projects.scodecBits, projects.scodec, projects.scalaParserCombinators, projects.dottyCpsAsync, projects.scalaz, projects.endpoints4s, - projects.catsEffect2, projects.catsEffect3, + projects.catsEffect3ForwardCompat, projects.scalaParallelCollections, projects.scalaCollectionCompat, + projects.scalaJava8Compat, projects.verify, projects.discipline, + projects.disciplineForwardCompat, projects.disciplineMunit, + projects.disciplineMunitForwardCompat, projects.disciplineSpecs2, + projects.disciplineSpecs2ForwardCompat, projects.simulacrumScalafixAnnotations, + projects.simulacrumScalafixAnnotationsForwardCompat, projects.cats, + projects.catsForwardCompat, projects.catsMtl, + projects.catsMtlForwardCompat, projects.coop, projects.scissEqual, projects.scissFingerTree, @@ -733,9 +874,19 @@ def allProjects = List( projects.izumiReflect, projects.perspective, projects.akka, + projects.monocle, projects.protoquill, projects.onnxScala, projects.playJson, + projects.scalatestplusTestNG, + projects.munitCatsEffect, + projects.scalacheckEffect, + projects.fs2, + projects.libretto, + projects.jacksonModuleScala, + projects.specs2, + projects.coop, + projects.coopForwardCompat ) lazy val projectMap = allProjects.groupBy(_.project) diff --git a/community-build/test/scala/dotty/communitybuild/CommunityBuildTest.scala b/community-build/test/scala/dotty/communitybuild/CommunityBuildTest.scala index eb16e2466bc8..f3f0bf90188e 100644 --- a/community-build/test/scala/dotty/communitybuild/CommunityBuildTest.scala +++ b/community-build/test/scala/dotty/communitybuild/CommunityBuildTest.scala @@ -7,95 +7,19 @@ import org.junit.{Ignore, Test} import org.junit.Assert.{assertEquals, fail} import org.junit.experimental.categories.Category -abstract class CommunityBuildTest: - given CommunityBuildTest = this - - /** Depending on the mode of operation, either - * runs the test or updates the project. Updating - * means that all the dependencies are fetched but - * minimal other extra other work is done. Updating - * is necessary since we run tests each time on a fresh - * Docker container. We run the update on Docker container - * creation time to create the cache of the dependencies - * and avoid network overhead. See https://github.com/lampepfl/dotty-drone - * for more infrastructural details. - */ - extension (self: CommunityProject) def run()(using suite: CommunityBuildTest): Unit = - if self.requiresExperimental && !compilerSupportExperimental then - println( - s"Skipping ${self.project} - it needs experimental features unsupported in this build." - ) - return - self.dependencies.foreach(_.publish()) - suite.test(self) - - /** Build the given project with the published local compiler and sbt plugin. - * - * This test reads the compiler version from community-build/dotty-bootstrapped.version - * and expects community-build/sbt-dotty-sbt to set the compiler plugin. - * - * @param project The project name, should be a git submodule in community-build/ - * @param command The binary file of the program used to test the project – usually - * a build tool like SBT or Mill - * @param arguments Arguments to pass to the testing program - */ - def test(projectDef: CommunityProject): Unit = { - val project = projectDef.project - val command = projectDef.binaryName - val arguments = projectDef.buildCommands - - @annotation.tailrec - def execTimes(task: () => Int, timesToRerun: Int): Boolean = - val exitCode = task() - if exitCode == 0 - then true - else if timesToRerun == 0 - then false - else - log(s"Rerunning tests in $project because of a previous run failure.") - execTimes(task, timesToRerun - 1) - - log(s"Building $project with dotty-bootstrapped $compilerVersion...") - - val projectDir = communitybuildDir.resolve("community-projects").resolve(project) - - if (!Files.exists(projectDir.resolve(".git"))) { - fail(s""" - | - |Missing $project submodule. You can initialize this module using - | - | git submodule update --init community-build/community-projects/$project - | - |""".stripMargin) - } - - val testsCompletedSuccessfully = execTimes(projectDef.build, 3) - - if (!testsCompletedSuccessfully) { - fail(s""" - | - |$command exited with an error code. To reproduce without JUnit, use: - | - | sbt community-build/prepareCommunityBuild - | cd community-build/community-projects/$project - | $command ${arguments.init.mkString(" ")} "${arguments.last}" - | - |For a faster feedback loop on SBT projects, one can try to extract a direct call to dotc - |using the sbt export command. For instance, for scalacheck, use - | sbt export jvm/test:compileIncremental - | - |""".stripMargin) - } - } -end CommunityBuildTest +import CommunityBuildRunner.run class TestCategory +given testRunner: CommunityBuildRunner with + override def failWith(msg: String) = { fail(msg); ??? } + @Category(Array(classOf[TestCategory])) -class CommunityBuildTestA extends CommunityBuildTest: +class CommunityBuildTestA: @Test def izumiReflect = projects.izumiReflect.run() @Test def scalaSTM = projects.scalaSTM.run() @Test def scalatest = projects.scalatest.run() + @Test def scalatestplusTestNG = projects.scalatestplusTestNG.run() // 'Sciss/Lucre' dependencies: // @Test def scissEqual = projects.scissEqual .run() // @Test def scissFingerTree = projects.scissFingerTree.run() @@ -110,24 +34,27 @@ class CommunityBuildTestA extends CommunityBuildTest: end CommunityBuildTestA @Category(Array(classOf[TestCategory])) -class CommunityBuildTestB extends CommunityBuildTest: +class CommunityBuildTestB: @Test def cats = projects.cats.run() - @Test def catsEffect2 = projects.catsEffect2.run() @Test def catsEffect3 = projects.catsEffect3.run() @Test def catsMtl = projects.catsMtl.run() @Test def coop = projects.coop.run() @Test def discipline = projects.discipline.run() @Test def disciplineMunit = projects.disciplineMunit.run() @Test def disciplineSpecs2 = projects.disciplineSpecs2.run() + @Test def fs2 = projects.fs2.run() @Test def munit = projects.munit.run() + @Test def munitCatsEffect = projects.munitCatsEffect.run() @Test def perspective = projects.perspective.run() + @Test def scalacheckEffect = projects.scalacheckEffect.run() @Test def scodec = projects.scodec.run() @Test def scodecBits = projects.scodecBits.run() + @Test def monocle = projects.monocle.run() @Test def simulacrumScalafixAnnotations = projects.simulacrumScalafixAnnotations.run() end CommunityBuildTestB @Category(Array(classOf[TestCategory])) -class CommunityBuildTestC extends CommunityBuildTest: +class CommunityBuildTestC: @Test def akka = projects.akka.run() @Test def algebra = projects.algebra.run() @Test def betterfiles = projects.betterfiles.run() @@ -140,6 +67,8 @@ class CommunityBuildTestC extends CommunityBuildTest: @Test def fastparse = projects.fastparse.run() @Test def geny = projects.geny.run() @Test def intent = projects.intent.run() + @Test def jacksonModuleScala = projects.jacksonModuleScala.run() + @Test def libretto = projects.libretto.run() @Test def minitest = projects.minitest.run() @Test def onnxScala = projects.onnxScala.run() @Test def oslib = projects.oslib.run() @@ -150,6 +79,7 @@ class CommunityBuildTestC extends CommunityBuildTest: @Test def requests = projects.requests.run() @Test def scalacheck = projects.scalacheck.run() @Test def scalaCollectionCompat = projects.scalaCollectionCompat.run() + @Test def scalaJava8Compat = projects.scalaJava8Compat.run() @Test def scalap = projects.scalap.run() @Test def scalaParallelCollections = projects.scalaParallelCollections.run() @Test def scalaParserCombinators = projects.scalaParserCombinators.run() @@ -157,10 +87,12 @@ class CommunityBuildTestC extends CommunityBuildTest: @Test def scalatestplusScalacheck = projects.scalatestplusScalacheck.run() @Test def scalaXml = projects.scalaXml.run() @Test def scalaz = projects.scalaz.run() - @Test def scas = if compilerSupportExperimental then projects.scas.run() + @Test def scas = projects.scas.run() @Test def sconfig = projects.sconfig.run() @Test def shapeless = projects.shapeless.run() + @Test def spire = projects.spire.run() @Test def sourcecode = projects.sourcecode.run() + @Test def specs2 = projects.specs2.run() @Test def stdLib213 = projects.stdLib213.run() @Test def ujson = projects.ujson.run() @Test def upickle = projects.upickle.run() @@ -168,3 +100,17 @@ class CommunityBuildTestC extends CommunityBuildTest: @Test def verify = projects.verify.run() @Test def xmlInterpolator = projects.xmlInterpolator.run() end CommunityBuildTestC + +@Category(Array(classOf[TestCategory])) +class CommunityBuildTestForwardCompat: + @Test def catsEffect3ForwardCompat = projects.catsEffect3ForwardCompat.run() + @Test def catsForwardCompat = projects.catsForwardCompat.run() + @Test def catsMtlForwardCompat = projects.catsMtlForwardCompat.run() + @Test def coopForwardCompat = projects.coopForwardCompat.run() + @Test def disciplineForwardCompat = projects.disciplineForwardCompat.run() + @Test def disciplineMunitForwardCompat = projects.disciplineMunitForwardCompat.run() + @Test def disciplineSpecs2ForwardCompat = projects.disciplineSpecs2ForwardCompat.run() + @Test def munitForwardCompat = projects.munitForwardCompat.run() + @Test def scalacheckForwardCompat = projects.scalacheckForwardCompat.run() + @Test def simulacrumScalafixAnnotationsForwardCompat = projects.simulacrumScalafixAnnotationsForwardCompat.run() +end CommunityBuildTestForwardCompat diff --git a/compiler/src/dotty/tools/MainGenericRunner.scala b/compiler/src/dotty/tools/MainGenericRunner.scala new file mode 100644 index 000000000000..b7ef90dcdc60 --- /dev/null +++ b/compiler/src/dotty/tools/MainGenericRunner.scala @@ -0,0 +1,274 @@ +package dotty.tools + + +import scala.annotation.tailrec +import scala.io.Source +import scala.util.{ Try, Success, Failure } +import java.io.File +import java.lang.Thread +import scala.annotation.internal.sharable +import dotty.tools.dotc.util.ClasspathFromClassloader +import dotty.tools.runner.ObjectRunner +import dotty.tools.dotc.config.Properties.envOrNone +import java.util.jar._ +import dotty.tools.io.Jar +import dotty.tools.runner.ScalaClassLoader +import java.nio.file.{Files, Paths, Path} +import dotty.tools.dotc.config.CommandLineParser +import dotty.tools.scripting.StringDriver + +enum ExecuteMode: + case Guess + case Script + case Repl + case Run + case PossibleRun + case Expression + +case class Settings( + verbose: Boolean = false, + classPath: List[String] = List.empty, + executeMode: ExecuteMode = ExecuteMode.Guess, + exitCode: Int = 0, + javaArgs: List[String] = List.empty, + scalaArgs: List[String] = List.empty, + residualArgs: List[String] = List.empty, + possibleEntryPaths: List[String] = List.empty, + scriptArgs: List[String] = List.empty, + targetScript: String = "", + targetExpression: String = "", + targetToRun: String = "", + save: Boolean = false, + modeShouldBePossibleRun: Boolean = false, + modeShouldBeRun: Boolean = false, + compiler: Boolean = false, +) { + def withExecuteMode(em: ExecuteMode): Settings = this.executeMode match + case ExecuteMode.Guess | ExecuteMode.PossibleRun => + this.copy(executeMode = em) + case _ => + println(s"execute_mode==[$executeMode], attempted overwrite by [$em]") + this.copy(exitCode = 1) + end withExecuteMode + + def withScalaArgs(args: String*): Settings = + this.copy(scalaArgs = scalaArgs.appendedAll(args.toList)) + + def withJavaArgs(args: String*): Settings = + this.copy(javaArgs = javaArgs.appendedAll(args.toList)) + + def withResidualArgs(args: String*): Settings = + this.copy(residualArgs = residualArgs.appendedAll(args.toList)) + + def withPossibleEntryPaths(args: String*): Settings = + this.copy(possibleEntryPaths = possibleEntryPaths.appendedAll(args.toList)) + + def withScriptArgs(args: String*): Settings = + this.copy(scriptArgs = scriptArgs.appendedAll(args.toList)) + + def withTargetScript(file: String): Settings = + Try(Source.fromFile(file)).toOption match + case Some(_) => this.copy(targetScript = file) + case None => + println(s"not found $file") + this.copy(exitCode = 2) + end withTargetScript + + def withTargetToRun(targetToRun: String): Settings = + this.copy(targetToRun = targetToRun) + + def withExpression(scalaSource: String): Settings = + this.copy(targetExpression = scalaSource) + + def withSave: Settings = + this.copy(save = true) + + def noSave: Settings = + this.copy(save = false) + + def withModeShouldBePossibleRun: Settings = + this.copy(modeShouldBePossibleRun = true) + + def withModeShouldBeRun: Settings = + this.copy(modeShouldBeRun = true) + + def withCompiler: Settings = + this.copy(compiler = true) +} + +object MainGenericRunner { + + val classpathSeparator = File.pathSeparator + + @sharable val javaOption = raw"""-J(.*)""".r + @sharable val scalaOption = raw"""@.*""".r + @sharable val colorOption = raw"""-color:.*""".r + @tailrec + def process(args: List[String], settings: Settings): Settings = args match + case Nil => + settings + case "-run" :: fqName :: tail => + process(tail, settings.withExecuteMode(ExecuteMode.Run).withTargetToRun(fqName)) + case ("-cp" | "-classpath" | "--class-path") :: cp :: tail => + val cpEntries = cp.split(classpathSeparator).toList + val singleEntryClasspath: Boolean = cpEntries.take(2).size == 1 + val globdir: String = if singleEntryClasspath then cp.replaceAll("[\\\\/][^\\\\/]*$", "") else "" // slash/backslash agnostic + def validGlobbedJar(s: String): Boolean = s.startsWith(globdir) && ((s.toLowerCase.endsWith(".jar") || s.toLowerCase.endsWith(".zip"))) + val (tailargs, newEntries) = if singleEntryClasspath && validGlobbedJar(cpEntries.head) then + // reassemble globbed wildcard classpath + // globdir is wildcard directory for globbed jar files, reconstruct the intended classpath + val cpJars = tail.takeWhile( f => validGlobbedJar(f) ) + val remainingArgs = tail.drop(cpJars.size) + (remainingArgs, cpEntries ++ cpJars) + else + (tail, cpEntries) + + process(tailargs, settings.copy(classPath = settings.classPath ++ newEntries.filter(_.nonEmpty))) + + case ("-version" | "--version") :: _ => + settings.copy( + executeMode = ExecuteMode.Repl, + residualArgs = List("-version") + ) + case ("-v" | "-verbose" | "--verbose") :: tail => + process( + tail, + settings.copy( + verbose = true, + residualArgs = settings.residualArgs :+ "-verbose" + ) + ) + case "-save" :: tail => + process(tail, settings.withSave) + case "-nosave" :: tail => + process(tail, settings.noSave) + case "-with-compiler" :: tail => + process(tail, settings.withCompiler) + case (o @ javaOption(striped)) :: tail => + process(tail, settings.withJavaArgs(striped).withScalaArgs(o)) + case (o @ scalaOption(_*)) :: tail => + val remainingArgs = (CommandLineParser.expandArg(o) ++ tail).toList + process(remainingArgs, settings) + case (o @ colorOption(_*)) :: tail => + process(tail, settings.withScalaArgs(o)) + case "-e" :: expression :: tail => + val mainSource = s"@main def main(args: String *): Unit =\n ${expression}" + settings + .withExecuteMode(ExecuteMode.Expression) + .withExpression(mainSource) + .withScriptArgs(tail*) + .noSave // -save not useful here + case arg :: tail => + val line = Try(Source.fromFile(arg).getLines.toList).toOption.flatMap(_.headOption) + lazy val hasScalaHashbang = { val s = line.getOrElse("") ; s.startsWith("#!") && s.contains("scala") } + if arg.endsWith(".scala") || arg.endsWith(".sc") || hasScalaHashbang then + settings + .withExecuteMode(ExecuteMode.Script) + .withTargetScript(arg) + .withScriptArgs(tail*) + else + val newSettings = if arg.startsWith("-") then settings else settings.withPossibleEntryPaths(arg).withModeShouldBePossibleRun + process(tail, newSettings.withResidualArgs(arg)) + + + def main(args: Array[String]): Unit = + val scalaOpts = envOrNone("SCALA_OPTS").toArray.flatMap(_.split(" ")).filter(_.nonEmpty) + val allArgs = scalaOpts ++ args + val settings = process(allArgs.toList, Settings()) + if settings.exitCode != 0 then System.exit(settings.exitCode) + + def removeCompiler(cp: Array[String]) = + if (!settings.compiler) then // Let's remove compiler from the classpath + val compilerLibs = Seq("scala3-compiler", "scala3-interfaces", "tasty-core", "scala-asm", "scala3-staging", "scala3-tasty-inspector") + cp.filterNot(c => compilerLibs.exists(c.contains)) + else + cp + + def run(settings: Settings): Unit = settings.executeMode match + case ExecuteMode.Repl => + val properArgs = + List("-classpath", settings.classPath.mkString(classpathSeparator)).filter(Function.const(settings.classPath.nonEmpty)) + ++ settings.residualArgs + repl.Main.main(properArgs.toArray) + + case ExecuteMode.PossibleRun => + val newClasspath = (settings.classPath :+ ".").flatMap(_.split(classpathSeparator).filter(_.nonEmpty)).map(File(_).toURI.toURL) + import dotty.tools.runner.RichClassLoader._ + val newClassLoader = ScalaClassLoader.fromURLsParallelCapable(newClasspath) + val targetToRun = settings.possibleEntryPaths.to(LazyList).find { entryPath => + newClassLoader.tryToLoadClass(entryPath).orElse { + Option.when(Jar.isJarOrZip(dotty.tools.io.Path(entryPath)))(Jar(entryPath).mainClass).flatten + }.isDefined + } + targetToRun match + case Some(fqName) => + run(settings.withTargetToRun(fqName).withResidualArgs(settings.residualArgs.filter { _ != fqName }*).withExecuteMode(ExecuteMode.Run)) + case None => + run(settings.withExecuteMode(ExecuteMode.Repl)) + case ExecuteMode.Run => + val scalaClasspath = ClasspathFromClassloader(Thread.currentThread().getContextClassLoader).split(classpathSeparator) + val newClasspath = (settings.classPath.flatMap(_.split(classpathSeparator).filter(_.nonEmpty)) ++ removeCompiler(scalaClasspath) :+ ".").map(File(_).toURI.toURL) + val res = ObjectRunner.runAndCatch(newClasspath, settings.targetToRun, settings.residualArgs).flatMap { + case ex: ClassNotFoundException if ex.getMessage == settings.targetToRun => + val file = settings.targetToRun + Jar(file).mainClass match + case Some(mc) => + ObjectRunner.runAndCatch(newClasspath :+ File(file).toURI.toURL, mc, settings.residualArgs) + case None => + Some(IllegalArgumentException(s"No main class defined in manifest in jar: $file")) + case ex => Some(ex) + } + errorFn("", res) + case ExecuteMode.Script => + val targetScript = Paths.get(settings.targetScript).toFile + val targetJar = settings.targetScript.replaceAll("[.][^\\/]*$", "")+".jar" + val precompiledJar = File(targetJar) + val mainClass = if !precompiledJar.isFile then "" else Jar(targetJar).mainClass.getOrElse("") + val jarIsValid = mainClass.nonEmpty && precompiledJar.lastModified >= targetScript.lastModified && settings.save + if jarIsValid then + // precompiledJar exists, is newer than targetScript, and manifest defines a mainClass + sys.props("script.path") = targetScript.toPath.toAbsolutePath.normalize.toString + val scalaClasspath = ClasspathFromClassloader(Thread.currentThread().getContextClassLoader).split(classpathSeparator) + val newClasspath = (settings.classPath.flatMap(_.split(classpathSeparator).filter(_.nonEmpty)) ++ removeCompiler(scalaClasspath) :+ ".").map(File(_).toURI.toURL) + val res = if mainClass.nonEmpty then + ObjectRunner.runAndCatch(newClasspath :+ File(targetJar).toURI.toURL, mainClass, settings.scriptArgs) + else + Some(IllegalArgumentException(s"No main class defined in manifest in jar: $precompiledJar")) + errorFn("", res) + + else + val properArgs = + List("-classpath", settings.classPath.mkString(classpathSeparator)).filter(Function.const(settings.classPath.nonEmpty)) + ++ settings.residualArgs + ++ (if settings.save then List("-save") else Nil) + ++ settings.scalaArgs + ++ List("-script", settings.targetScript) + ++ settings.scriptArgs + scripting.Main.main(properArgs.toArray) + case ExecuteMode.Expression => + val cp = settings.classPath match { + case Nil => "" + case list => list.mkString(classpathSeparator) + } + val cpArgs = if cp.isEmpty then Nil else List("-classpath", cp) + val properArgs = cpArgs ++ settings.residualArgs ++ settings.scalaArgs + val driver = StringDriver(properArgs.toArray, settings.targetExpression) + driver.compileAndRun(settings.classPath) + + case ExecuteMode.Guess => + if settings.modeShouldBePossibleRun then + run(settings.withExecuteMode(ExecuteMode.PossibleRun)) + else if settings.modeShouldBeRun then + run(settings.withExecuteMode(ExecuteMode.Run)) + else + run(settings.withExecuteMode(ExecuteMode.Repl)) + + run(settings) + + + def errorFn(str: String, e: Option[Throwable] = None, isFailure: Boolean = true): Boolean = { + if (str.nonEmpty) Console.err.println(str) + e.foreach(_.printStackTrace()) + !isFailure + } +} diff --git a/compiler/src/dotty/tools/backend/ScalaPrimitivesOps.scala b/compiler/src/dotty/tools/backend/ScalaPrimitivesOps.scala index 40b567b640b8..6b5bfbc3e00e 100644 --- a/compiler/src/dotty/tools/backend/ScalaPrimitivesOps.scala +++ b/compiler/src/dotty/tools/backend/ScalaPrimitivesOps.scala @@ -5,161 +5,161 @@ object ScalaPrimitivesOps extends ScalaPrimitivesOps class ScalaPrimitivesOps { // Arithmetic unary operations - final val POS = 1 // +x - final val NEG = 2 // -x - final val NOT = 3 // ~x + inline val POS = 1 // +x + inline val NEG = 2 // -x + inline val NOT = 3 // ~x // Arithmetic binary operations - final val ADD = 10 // x + y - final val SUB = 11 // x - y - final val MUL = 12 // x * y - final val DIV = 13 // x / y - final val MOD = 14 // x % y + inline val ADD = 10 // x + y + inline val SUB = 11 // x - y + inline val MUL = 12 // x * y + inline val DIV = 13 // x / y + inline val MOD = 14 // x % y // Bitwise operations - final val OR = 20 // x | y - final val XOR = 21 // x ^ y - final val AND = 22 // x & y + inline val OR = 20 // x | y + inline val XOR = 21 // x ^ y + inline val AND = 22 // x & y // Shift operations - final val LSL = 30 // x << y - final val LSR = 31 // x >>> y - final val ASR = 32 // x >> y + inline val LSL = 30 // x << y + inline val LSR = 31 // x >>> y + inline val ASR = 32 // x >> y // Comparison operations - final val ID = 40 // x eq y - final val NI = 41 // x ne y - final val EQ = 42 // x == y - final val NE = 43 // x != y - final val LT = 44 // x < y - final val LE = 45 // x <= y - final val GT = 46 // x > y - final val GE = 47 // x >= y + inline val ID = 40 // x eq y + inline val NI = 41 // x ne y + inline val EQ = 42 // x == y + inline val NE = 43 // x != y + inline val LT = 44 // x < y + inline val LE = 45 // x <= y + inline val GT = 46 // x > y + inline val GE = 47 // x >= y // Boolean unary operations - final val ZNOT = 50 // !x + inline val ZNOT = 50 // !x // Boolean binary operations - final val ZOR = 60 // x || y - final val ZAND = 61 // x && y + inline val ZOR = 60 // x || y + inline val ZAND = 61 // x && y // Array operations - final val LENGTH = 70 // x.length - final val APPLY = 71 // x(y) - final val UPDATE = 72 // x(y) = z + inline val LENGTH = 70 // x.length + inline val APPLY = 71 // x(y) + inline val UPDATE = 72 // x(y) = z // Any operations - final val IS = 80 // x.is[y] - final val AS = 81 // x.as[y] - final val HASH = 87 // x.## + inline val IS = 80 // x.is[y] + inline val AS = 81 // x.as[y] + inline val HASH = 87 // x.## // AnyRef operations - final val SYNCHRONIZED = 90 // x.synchronized(y) + inline val SYNCHRONIZED = 90 // x.synchronized(y) // String operations - final val CONCAT = 100 // String.valueOf(x)+String.valueOf(y) + inline val CONCAT = 100 // String.valueOf(x)+String.valueOf(y) // coercions - final val COERCE = 101 + inline val COERCE = 101 // RunTime operations - final val BOX = 110 // RunTime.box_(x) - final val UNBOX = 111 // RunTime.unbox_(x) - final val NEW_ZARRAY = 112 // RunTime.zarray(x) - final val NEW_BARRAY = 113 // RunTime.barray(x) - final val NEW_SARRAY = 114 // RunTime.sarray(x) - final val NEW_CARRAY = 115 // RunTime.carray(x) - final val NEW_IARRAY = 116 // RunTime.iarray(x) - final val NEW_LARRAY = 117 // RunTime.larray(x) - final val NEW_FARRAY = 118 // RunTime.farray(x) - final val NEW_DARRAY = 119 // RunTime.darray(x) - final val NEW_OARRAY = 120 // RunTime.oarray(x) - - final val ZARRAY_LENGTH = 131 // RunTime.zarray_length(x) - final val BARRAY_LENGTH = 132 // RunTime.barray_length(x) - final val SARRAY_LENGTH = 133 // RunTime.sarray_length(x) - final val CARRAY_LENGTH = 134 // RunTime.carray_length(x) - final val IARRAY_LENGTH = 135 // RunTime.iarray_length(x) - final val LARRAY_LENGTH = 136 // RunTime.larray_length(x) - final val FARRAY_LENGTH = 137 // RunTime.farray_length(x) - final val DARRAY_LENGTH = 138 // RunTime.darray_length(x) - final val OARRAY_LENGTH = 139 // RunTime.oarray_length(x) - - final val ZARRAY_GET = 140 // RunTime.zarray_get(x,y) - final val BARRAY_GET = 141 // RunTime.barray_get(x,y) - final val SARRAY_GET = 142 // RunTime.sarray_get(x,y) - final val CARRAY_GET = 143 // RunTime.carray_get(x,y) - final val IARRAY_GET = 144 // RunTime.iarray_get(x,y) - final val LARRAY_GET = 145 // RunTime.larray_get(x,y) - final val FARRAY_GET = 146 // RunTime.farray_get(x,y) - final val DARRAY_GET = 147 // RunTime.darray_get(x,y) - final val OARRAY_GET = 148 // RunTime.oarray_get(x,y) - - final val ZARRAY_SET = 150 // RunTime.zarray(x,y,z) - final val BARRAY_SET = 151 // RunTime.barray(x,y,z) - final val SARRAY_SET = 152 // RunTime.sarray(x,y,z) - final val CARRAY_SET = 153 // RunTime.carray(x,y,z) - final val IARRAY_SET = 154 // RunTime.iarray(x,y,z) - final val LARRAY_SET = 155 // RunTime.larray(x,y,z) - final val FARRAY_SET = 156 // RunTime.farray(x,y,z) - final val DARRAY_SET = 157 // RunTime.darray(x,y,z) - final val OARRAY_SET = 158 // RunTime.oarray(x,y,z) - - final val B2B = 200 // RunTime.b2b(x) - final val B2S = 201 // RunTime.b2s(x) - final val B2C = 202 // RunTime.b2c(x) - final val B2I = 203 // RunTime.b2i(x) - final val B2L = 204 // RunTime.b2l(x) - final val B2F = 205 // RunTime.b2f(x) - final val B2D = 206 // RunTime.b2d(x) - - final val S2B = 210 // RunTime.s2b(x) - final val S2S = 211 // RunTime.s2s(x) - final val S2C = 212 // RunTime.s2c(x) - final val S2I = 213 // RunTime.s2i(x) - final val S2L = 214 // RunTime.s2l(x) - final val S2F = 215 // RunTime.s2f(x) - final val S2D = 216 // RunTime.s2d(x) - - final val C2B = 220 // RunTime.c2b(x) - final val C2S = 221 // RunTime.c2s(x) - final val C2C = 222 // RunTime.c2c(x) - final val C2I = 223 // RunTime.c2i(x) - final val C2L = 224 // RunTime.c2l(x) - final val C2F = 225 // RunTime.c2f(x) - final val C2D = 226 // RunTime.c2d(x) - - final val I2B = 230 // RunTime.i2b(x) - final val I2S = 231 // RunTime.i2s(x) - final val I2C = 232 // RunTime.i2c(x) - final val I2I = 233 // RunTime.i2i(x) - final val I2L = 234 // RunTime.i2l(x) - final val I2F = 235 // RunTime.i2f(x) - final val I2D = 236 // RunTime.i2d(x) - - final val L2B = 240 // RunTime.l2b(x) - final val L2S = 241 // RunTime.l2s(x) - final val L2C = 242 // RunTime.l2c(x) - final val L2I = 243 // RunTime.l2i(x) - final val L2L = 244 // RunTime.l2l(x) - final val L2F = 245 // RunTime.l2f(x) - final val L2D = 246 // RunTime.l2d(x) - - final val F2B = 250 // RunTime.f2b(x) - final val F2S = 251 // RunTime.f2s(x) - final val F2C = 252 // RunTime.f2c(x) - final val F2I = 253 // RunTime.f2i(x) - final val F2L = 254 // RunTime.f2l(x) - final val F2F = 255 // RunTime.f2f(x) - final val F2D = 256 // RunTime.f2d(x) - - final val D2B = 260 // RunTime.d2b(x) - final val D2S = 261 // RunTime.d2s(x) - final val D2C = 262 // RunTime.d2c(x) - final val D2I = 263 // RunTime.d2i(x) - final val D2L = 264 // RunTime.d2l(x) - final val D2F = 265 // RunTime.d2f(x) - final val D2D = 266 // RunTime.d2d(x) + inline val BOX = 110 // RunTime.box_(x) + inline val UNBOX = 111 // RunTime.unbox_(x) + inline val NEW_ZARRAY = 112 // RunTime.zarray(x) + inline val NEW_BARRAY = 113 // RunTime.barray(x) + inline val NEW_SARRAY = 114 // RunTime.sarray(x) + inline val NEW_CARRAY = 115 // RunTime.carray(x) + inline val NEW_IARRAY = 116 // RunTime.iarray(x) + inline val NEW_LARRAY = 117 // RunTime.larray(x) + inline val NEW_FARRAY = 118 // RunTime.farray(x) + inline val NEW_DARRAY = 119 // RunTime.darray(x) + inline val NEW_OARRAY = 120 // RunTime.oarray(x) + + inline val ZARRAY_LENGTH = 131 // RunTime.zarray_length(x) + inline val BARRAY_LENGTH = 132 // RunTime.barray_length(x) + inline val SARRAY_LENGTH = 133 // RunTime.sarray_length(x) + inline val CARRAY_LENGTH = 134 // RunTime.carray_length(x) + inline val IARRAY_LENGTH = 135 // RunTime.iarray_length(x) + inline val LARRAY_LENGTH = 136 // RunTime.larray_length(x) + inline val FARRAY_LENGTH = 137 // RunTime.farray_length(x) + inline val DARRAY_LENGTH = 138 // RunTime.darray_length(x) + inline val OARRAY_LENGTH = 139 // RunTime.oarray_length(x) + + inline val ZARRAY_GET = 140 // RunTime.zarray_get(x,y) + inline val BARRAY_GET = 141 // RunTime.barray_get(x,y) + inline val SARRAY_GET = 142 // RunTime.sarray_get(x,y) + inline val CARRAY_GET = 143 // RunTime.carray_get(x,y) + inline val IARRAY_GET = 144 // RunTime.iarray_get(x,y) + inline val LARRAY_GET = 145 // RunTime.larray_get(x,y) + inline val FARRAY_GET = 146 // RunTime.farray_get(x,y) + inline val DARRAY_GET = 147 // RunTime.darray_get(x,y) + inline val OARRAY_GET = 148 // RunTime.oarray_get(x,y) + + inline val ZARRAY_SET = 150 // RunTime.zarray(x,y,z) + inline val BARRAY_SET = 151 // RunTime.barray(x,y,z) + inline val SARRAY_SET = 152 // RunTime.sarray(x,y,z) + inline val CARRAY_SET = 153 // RunTime.carray(x,y,z) + inline val IARRAY_SET = 154 // RunTime.iarray(x,y,z) + inline val LARRAY_SET = 155 // RunTime.larray(x,y,z) + inline val FARRAY_SET = 156 // RunTime.farray(x,y,z) + inline val DARRAY_SET = 157 // RunTime.darray(x,y,z) + inline val OARRAY_SET = 158 // RunTime.oarray(x,y,z) + + inline val B2B = 200 // RunTime.b2b(x) + inline val B2S = 201 // RunTime.b2s(x) + inline val B2C = 202 // RunTime.b2c(x) + inline val B2I = 203 // RunTime.b2i(x) + inline val B2L = 204 // RunTime.b2l(x) + inline val B2F = 205 // RunTime.b2f(x) + inline val B2D = 206 // RunTime.b2d(x) + + inline val S2B = 210 // RunTime.s2b(x) + inline val S2S = 211 // RunTime.s2s(x) + inline val S2C = 212 // RunTime.s2c(x) + inline val S2I = 213 // RunTime.s2i(x) + inline val S2L = 214 // RunTime.s2l(x) + inline val S2F = 215 // RunTime.s2f(x) + inline val S2D = 216 // RunTime.s2d(x) + + inline val C2B = 220 // RunTime.c2b(x) + inline val C2S = 221 // RunTime.c2s(x) + inline val C2C = 222 // RunTime.c2c(x) + inline val C2I = 223 // RunTime.c2i(x) + inline val C2L = 224 // RunTime.c2l(x) + inline val C2F = 225 // RunTime.c2f(x) + inline val C2D = 226 // RunTime.c2d(x) + + inline val I2B = 230 // RunTime.i2b(x) + inline val I2S = 231 // RunTime.i2s(x) + inline val I2C = 232 // RunTime.i2c(x) + inline val I2I = 233 // RunTime.i2i(x) + inline val I2L = 234 // RunTime.i2l(x) + inline val I2F = 235 // RunTime.i2f(x) + inline val I2D = 236 // RunTime.i2d(x) + + inline val L2B = 240 // RunTime.l2b(x) + inline val L2S = 241 // RunTime.l2s(x) + inline val L2C = 242 // RunTime.l2c(x) + inline val L2I = 243 // RunTime.l2i(x) + inline val L2L = 244 // RunTime.l2l(x) + inline val L2F = 245 // RunTime.l2f(x) + inline val L2D = 246 // RunTime.l2d(x) + + inline val F2B = 250 // RunTime.f2b(x) + inline val F2S = 251 // RunTime.f2s(x) + inline val F2C = 252 // RunTime.f2c(x) + inline val F2I = 253 // RunTime.f2i(x) + inline val F2L = 254 // RunTime.f2l(x) + inline val F2F = 255 // RunTime.f2f(x) + inline val F2D = 256 // RunTime.f2d(x) + + inline val D2B = 260 // RunTime.d2b(x) + inline val D2S = 261 // RunTime.d2s(x) + inline val D2C = 262 // RunTime.d2c(x) + inline val D2I = 263 // RunTime.d2i(x) + inline val D2L = 264 // RunTime.d2l(x) + inline val D2F = 265 // RunTime.d2f(x) + inline val D2D = 266 // RunTime.d2d(x) /** Check whether the given operation code is an array operation. */ def isArrayOp(code: Int): Boolean = diff --git a/compiler/src/dotty/tools/backend/jvm/AsmUtils.scala b/compiler/src/dotty/tools/backend/jvm/AsmUtils.scala index 7f5887d99a7f..8a71a09aa7ab 100644 --- a/compiler/src/dotty/tools/backend/jvm/AsmUtils.scala +++ b/compiler/src/dotty/tools/backend/jvm/AsmUtils.scala @@ -19,8 +19,8 @@ object AsmUtils { /** * Print the bytecode of classes generated by GenBCode to the standard output. */ - final val traceClassEnabled = false - final val traceClassPattern = "" + inline val traceClassEnabled = false + inline val traceClassPattern = "" /** * Print the bytedcode of classes as they are serialized by the ASM library. The serialization @@ -28,8 +28,8 @@ object AsmUtils { * introduces stack map frames, it computes the maximal stack sizes, and it replaces dead * code by NOPs (see also https://github.com/scala/scala/pull/3726#issuecomment-42861780). */ - final val traceSerializedClassEnabled = false - final val traceSerializedClassPattern = "" + inline val traceSerializedClassEnabled = false + inline val traceSerializedClassPattern = "" def traceMethod(mnode: MethodNode1): Unit = { println(s"Bytecode for method ${mnode.name}") diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala b/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala index 156551519cb9..96ae4e8a00af 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala @@ -3,6 +3,7 @@ package backend package jvm import scala.annotation.switch +import scala.collection.mutable.SortedMap import scala.tools.asm import scala.tools.asm.{Handle, Label, Opcodes} @@ -88,9 +89,23 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { case Assign(lhs, rhs) => val s = lhs.symbol val Local(tk, _, idx, _) = locals.getOrMakeLocal(s) - genLoad(rhs, tk) - lineNumber(tree) - bc.store(idx, tk) + + rhs match { + case Apply(Select(larg: Ident, nme.ADD), Literal(x) :: Nil) + if larg.symbol == s && tk.isIntSizedType && x.isShortRange => + lineNumber(tree) + bc.iinc(idx, x.intValue) + + case Apply(Select(larg: Ident, nme.SUB), Literal(x) :: Nil) + if larg.symbol == s && tk.isIntSizedType && Constant(-x.intValue).isShortRange => + lineNumber(tree) + bc.iinc(idx, -x.intValue) + + case _ => + genLoad(rhs, tk) + lineNumber(tree) + bc.store(idx, tk) + } case _ => genLoad(tree, UNIT) @@ -228,7 +243,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { resKind } - def genPrimitiveOp(tree: Apply, expectedType: BType): BType = tree match { + def genPrimitiveOp(tree: Apply, expectedType: BType): BType = (tree: @unchecked) match { case Apply(fun @ DesugaredSelect(receiver, _), _) => val sym = tree.symbol @@ -610,7 +625,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { } } - def genTypeApply(t: TypeApply): BType = t match { + def genTypeApply(t: TypeApply): BType = (t: @unchecked) match { case TypeApply(fun@DesugaredSelect(obj, _), targs) => val sym = fun.symbol @@ -826,61 +841,170 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { generatedType } - /* - * A Match node contains one or more case clauses, - * each case clause lists one or more Int values to use as keys, and a code block. - * Except the "default" case clause which (if it exists) doesn't list any Int key. - * - * On a first pass over the case clauses, we flatten the keys and their targets (the latter represented with asm.Labels). - * That representation allows JCodeMethodV to emit a lookupswitch or a tableswitch. - * - * On a second pass, we emit the switch blocks, one for each different target. + /* A Match node contains one or more case clauses, each case clause lists one or more + * Int/String values to use as keys, and a code block. The exception is the "default" case + * clause which doesn't list any key (there is exactly one of these per match). */ private def genMatch(tree: Match): BType = tree match { case Match(selector, cases) => lineNumber(tree) - genLoad(selector, INT) val generatedType = tpeTK(tree) + val postMatch = new asm.Label - var flatKeys: List[Int] = Nil - var targets: List[asm.Label] = Nil - var default: asm.Label = null - var switchBlocks: List[(asm.Label, Tree)] = Nil - - // collect switch blocks and their keys, but don't emit yet any switch-block. - for (caze @ CaseDef(pat, guard, body) <- cases) { - assert(guard == tpd.EmptyTree, guard) - val switchBlockPoint = new asm.Label - switchBlocks ::= (switchBlockPoint, body) - pat match { - case Literal(value) => - flatKeys ::= value.intValue - targets ::= switchBlockPoint - case Ident(nme.WILDCARD) => - assert(default == null, s"multiple default targets in a Match node, at ${tree.span}") - default = switchBlockPoint - case Alternative(alts) => - alts foreach { - case Literal(value) => - flatKeys ::= value.intValue - targets ::= switchBlockPoint - case _ => - abort(s"Invalid alternative in alternative pattern in Match node: $tree at: ${tree.span}") - } - case _ => - abort(s"Invalid pattern in Match node: $tree at: ${tree.span}") + // Only two possible selector types exist in `Match` trees at this point: Int and String + if (tpeTK(selector) == INT) { + + /* On a first pass over the case clauses, we flatten the keys and their + * targets (the latter represented with asm.Labels). That representation + * allows JCodeMethodV to emit a lookupswitch or a tableswitch. + * + * On a second pass, we emit the switch blocks, one for each different target. + */ + + var flatKeys: List[Int] = Nil + var targets: List[asm.Label] = Nil + var default: asm.Label = null + var switchBlocks: List[(asm.Label, Tree)] = Nil + + genLoad(selector, INT) + + // collect switch blocks and their keys, but don't emit yet any switch-block. + for (caze @ CaseDef(pat, guard, body) <- cases) { + assert(guard == tpd.EmptyTree, guard) + val switchBlockPoint = new asm.Label + switchBlocks ::= (switchBlockPoint, body) + pat match { + case Literal(value) => + flatKeys ::= value.intValue + targets ::= switchBlockPoint + case Ident(nme.WILDCARD) => + assert(default == null, s"multiple default targets in a Match node, at ${tree.span}") + default = switchBlockPoint + case Alternative(alts) => + alts foreach { + case Literal(value) => + flatKeys ::= value.intValue + targets ::= switchBlockPoint + case _ => + abort(s"Invalid alternative in alternative pattern in Match node: $tree at: ${tree.span}") + } + case _ => + abort(s"Invalid pattern in Match node: $tree at: ${tree.span}") + } } - } - bc.emitSWITCH(mkArrayReverse(flatKeys), mkArrayL(targets.reverse), default, MIN_SWITCH_DENSITY) + bc.emitSWITCH(mkArrayReverse(flatKeys), mkArrayL(targets.reverse), default, MIN_SWITCH_DENSITY) - // emit switch-blocks. - val postMatch = new asm.Label - for (sb <- switchBlocks.reverse) { - val (caseLabel, caseBody) = sb - markProgramPoint(caseLabel) - genLoad(caseBody, generatedType) - bc goTo postMatch + // emit switch-blocks. + for (sb <- switchBlocks.reverse) { + val (caseLabel, caseBody) = sb + markProgramPoint(caseLabel) + genLoad(caseBody, generatedType) + bc goTo postMatch + } + } else { + + /* Since the JVM doesn't have a way to switch on a string, we switch + * on the `hashCode` of the string then do an `equals` check (with a + * possible second set of jumps if blocks can be reach from multiple + * string alternatives). + * + * This mirrors the way that Java compiles `switch` on Strings. + */ + + var default: asm.Label = null + var indirectBlocks: List[(asm.Label, Tree)] = Nil + + import scala.collection.mutable + + // Cases grouped by their hashCode + val casesByHash = SortedMap.empty[Int, List[(String, Either[asm.Label, Tree])]] + var caseFallback: Tree = null + + for (caze @ CaseDef(pat, guard, body) <- cases) { + assert(guard == tpd.EmptyTree, guard) + pat match { + case Literal(value) => + val strValue = value.stringValue + casesByHash.updateWith(strValue.##) { existingCasesOpt => + val newCase = (strValue, Right(body)) + Some(newCase :: existingCasesOpt.getOrElse(Nil)) + } + case Ident(nme.WILDCARD) => + assert(default == null, s"multiple default targets in a Match node, at ${tree.span}") + default = new asm.Label + indirectBlocks ::= (default, body) + case Alternative(alts) => + // We need an extra basic block since multiple strings can lead to this code + val indirectCaseGroupLabel = new asm.Label + indirectBlocks ::= (indirectCaseGroupLabel, body) + alts foreach { + case Literal(value) => + val strValue = value.stringValue + casesByHash.updateWith(strValue.##) { existingCasesOpt => + val newCase = (strValue, Left(indirectCaseGroupLabel)) + Some(newCase :: existingCasesOpt.getOrElse(Nil)) + } + case _ => + abort(s"Invalid alternative in alternative pattern in Match node: $tree at: ${tree.span}") + } + + case _ => + abort(s"Invalid pattern in Match node: $tree at: ${tree.span}") + } + } + + // Organize the hashCode options into switch cases + var flatKeys: List[Int] = Nil + var targets: List[asm.Label] = Nil + var hashBlocks: List[(asm.Label, List[(String, Either[asm.Label, Tree])])] = Nil + for ((hashValue, hashCases) <- casesByHash) { + val switchBlockPoint = new asm.Label + hashBlocks ::= (switchBlockPoint, hashCases) + flatKeys ::= hashValue + targets ::= switchBlockPoint + } + + // Push the hashCode of the string (or `0` it is `null`) onto the stack and switch on it + genLoadIf( + If( + tree.selector.select(defn.Any_==).appliedTo(nullLiteral), + Literal(Constant(0)), + tree.selector.select(defn.Any_hashCode).appliedToNone + ), + INT + ) + bc.emitSWITCH(mkArrayReverse(flatKeys), mkArrayL(targets.reverse), default, MIN_SWITCH_DENSITY) + + // emit blocks for each hash case + for ((hashLabel, caseAlternatives) <- hashBlocks.reverse) { + markProgramPoint(hashLabel) + for ((caseString, indirectLblOrBody) <- caseAlternatives) { + val comparison = if (caseString == null) defn.Any_== else defn.Any_equals + val condp = Literal(Constant(caseString)).select(defn.Any_==).appliedTo(tree.selector) + val keepGoing = new asm.Label + indirectLblOrBody match { + case Left(jump) => + genCond(condp, jump, keepGoing, targetIfNoJump = keepGoing) + + case Right(caseBody) => + val thisCaseMatches = new asm.Label + genCond(condp, thisCaseMatches, keepGoing, targetIfNoJump = thisCaseMatches) + markProgramPoint(thisCaseMatches) + genLoad(caseBody, generatedType) + bc goTo postMatch + } + markProgramPoint(keepGoing) + } + bc goTo default + } + + // emit blocks for common patterns + for ((caseLabel, caseBody) <- indirectBlocks.reverse) { + markProgramPoint(caseLabel) + genLoad(caseBody, generatedType) + bc goTo postMatch + } } markProgramPoint(postMatch) @@ -1063,30 +1187,109 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { } } + /* Generate string concatenation + * + * On JDK 8: create and append using `StringBuilder` + * On JDK 9+: use `invokedynamic` with `StringConcatFactory` + */ def genStringConcat(tree: Tree): BType = { lineNumber(tree) liftStringConcat(tree) match { - // Optimization for expressions of the form "" + x. We can avoid the StringBuilder. + // Optimization for expressions of the form "" + x case List(Literal(Constant("")), arg) => genLoad(arg, ObjectReference) genCallMethod(defn.String_valueOf_Object, InvokeStyle.Static) case concatenations => - bc.genStartConcat - for (elem <- concatenations) { - val loadedElem = elem match { + val concatArguments = concatenations.view + .filter { + case Literal(Constant("")) => false // empty strings are no-ops in concatenation + case _ => true + } + .map { case Apply(boxOp, value :: Nil) if Erasure.Boxing.isBox(boxOp.symbol) && boxOp.symbol.denot.owner != defn.UnitModuleClass => // Eliminate boxing of primitive values. Boxing is introduced by erasure because // there's only a single synthetic `+` method "added" to the string class. value + case other => other + } + .toList + + // `StringConcatFactory` only got added in JDK 9, so use `StringBuilder` for lower + if (classfileVersion < asm.Opcodes.V9) { + + // Estimate capacity needed for the string builder + val approxBuilderSize = concatArguments.view.map { + case Literal(Constant(s: String)) => s.length + case Literal(c @ Constant(_)) if c.isNonUnitAnyVal => String.valueOf(c).length + case _ => 0 + }.sum + bc.genNewStringBuilder(approxBuilderSize) + + for (elem <- concatArguments) { + val elemType = tpeTK(elem) + genLoad(elem, elemType) + bc.genStringBuilderAppend(elemType) + } + bc.genStringBuilderEnd + } else { - case _ => elem + /* `StringConcatFactory#makeConcatWithConstants` accepts max 200 argument slots. If + * the string concatenation is longer (unlikely), we spill into multiple calls + */ + val MaxIndySlots = 200 + val TagArg = '\u0001' // indicates a hole (in the recipe string) for an argument + val TagConst = '\u0002' // indicates a hole (in the recipe string) for a constant + + val recipe = new StringBuilder() + val argTypes = Seq.newBuilder[asm.Type] + val constVals = Seq.newBuilder[String] + var totalArgSlots = 0 + var countConcats = 1 // ie. 1 + how many times we spilled + + for (elem <- concatArguments) { + val tpe = tpeTK(elem) + val elemSlots = tpe.size + + // Unlikely spill case + if (totalArgSlots + elemSlots >= MaxIndySlots) { + bc.genIndyStringConcat(recipe.toString, argTypes.result(), constVals.result()) + countConcats += 1 + totalArgSlots = 0 + recipe.setLength(0) + argTypes.clear() + constVals.clear() + } + + elem match { + case Literal(Constant(s: String)) => + if (s.contains(TagArg) || s.contains(TagConst)) { + totalArgSlots += elemSlots + recipe.append(TagConst) + constVals += s + } else { + recipe.append(s) + } + + case other => + totalArgSlots += elemSlots + recipe.append(TagArg) + val tpe = tpeTK(elem) + argTypes += tpe.toASMType + genLoad(elem, tpe) + } + } + bc.genIndyStringConcat(recipe.toString, argTypes.result(), constVals.result()) + + // If we spilled, generate one final concat + if (countConcats > 1) { + bc.genIndyStringConcat( + TagArg.toString * countConcats, + Seq.fill(countConcats)(StringRef.toASMType), + Seq.empty + ) } - val elemType = tpeTK(loadedElem) - genLoad(loadedElem, elemType) - bc.genConcat(elemType) } - bc.genEndConcat } StringRef } diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala b/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala index d8a38e263970..4085d87ad7ca 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala @@ -21,11 +21,13 @@ import dotty.tools.dotc.core.Names.Name import dotty.tools.dotc.core.NameKinds.ExpandedName import dotty.tools.dotc.core.Signature import dotty.tools.dotc.core.StdNames._ +import dotty.tools.dotc.core.NameKinds import dotty.tools.dotc.core.Symbols._ import dotty.tools.dotc.core.Types import dotty.tools.dotc.core.Types._ import dotty.tools.dotc.core.TypeErasure import dotty.tools.dotc.transform.GenericSignatures +import dotty.tools.dotc.transform.ElimErasedValueType import dotty.tools.io.AbstractFile import dotty.tools.dotc.report @@ -341,6 +343,7 @@ trait BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { def emitParamNames(jmethod: asm.MethodVisitor, params: List[Symbol]) = for param <- params do var access = asm.Opcodes.ACC_FINAL + if param.is(Artifact) then access |= asm.Opcodes.ACC_SYNTHETIC jmethod.visitParameter(param.name.mangledString, access) /* @@ -506,7 +509,7 @@ trait BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { * * must-single-thread */ - private def addForwarder(jclass: asm.ClassVisitor, module: Symbol, m: Symbol): Unit = { + private def addForwarder(jclass: asm.ClassVisitor, module: Symbol, m: Symbol, isSynthetic: Boolean): Unit = { val moduleName = internalName(module) val methodInfo = module.thisType.memberInfo(m) val paramJavaTypes: List[BType] = methodInfo.firstParamTypes map toTypeKind @@ -517,9 +520,10 @@ trait BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { * and we don't know what classes might be subclassing the companion class. See SI-4827. */ // TODO: evaluate the other flags we might be dropping on the floor here. - // TODO: ACC_SYNTHETIC ? val flags = GenBCodeOps.PublicStatic | ( if (m.is(JavaVarargs)) asm.Opcodes.ACC_VARARGS else 0 + ) | ( + if (isSynthetic) asm.Opcodes.ACC_SYNTHETIC else 0 ) // TODO needed? for(ann <- m.annotations) { ann.symbol.initialize } @@ -594,7 +598,16 @@ trait BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { report.log(s"No forwarder for non-public member $m") else { report.log(s"Adding static forwarder for '$m' from $jclassName to '$moduleClass'") - addForwarder(jclass, moduleClass, m) + // It would be simpler to not generate forwarders for these methods, + // but that wouldn't be binary-compatible with Scala 3.0.0, so instead + // we generate ACC_SYNTHETIC forwarders so Java compilers ignore them. + val isSynthetic = + m0.name.is(NameKinds.SyntheticSetterName) || + // Only hide bridges generated at Erasure, mixin forwarders are also + // marked as bridge but shouldn't be hidden since they don't have a + // non-bridge overload. + m0.is(Bridge) && m0.initial.validFor.firstPhaseId == erasurePhase.next.id + addForwarder(jclass, moduleClass, m, isSynthetic) } } } @@ -926,7 +939,7 @@ trait BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { // (one that doesn't erase to the actual signature). See run/t3452b for a test case. val memberTpe = atPhase(erasurePhase) { moduleClass.denot.thisType.memberInfo(sym) } - val erasedMemberType = TypeErasure.fullErasure(memberTpe) + val erasedMemberType = ElimErasedValueType.elimEVT(TypeErasure.transformInfo(sym, memberTpe)) if (erasedMemberType =:= sym.denot.info) getGenericSignatureHelper(sym, moduleClass, memberTpe).orNull else null diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala b/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala index 614548ff361e..fffefae8a165 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala @@ -34,7 +34,7 @@ trait BCodeIdiomatic { case (Some(release), None) => release case (None, Some(target)) => target case (Some(release), Some(_)) => - report.warning(s"The value of ${ctx.settings.Xtarget.name} was overriden by ${ctx.settings.release.name}") + report.warning(s"The value of ${ctx.settings.Xtarget.name} was overridden by ${ctx.settings.release.name}") release case (None, None) => "8" // least supported version by default @@ -224,24 +224,27 @@ trait BCodeIdiomatic { } // end of method genPrimitiveShift() - /* + /* Creates a new `StringBuilder` instance with the requested capacity + * * can-multi-thread */ - final def genStartConcat: Unit = { + final def genNewStringBuilder(size: Int): Unit = { jmethod.visitTypeInsn(Opcodes.NEW, JavaStringBuilderClassName) jmethod.visitInsn(Opcodes.DUP) + jmethod.visitLdcInsn(Integer.valueOf(size)) invokespecial( JavaStringBuilderClassName, INSTANCE_CONSTRUCTOR_NAME, - "()V", + "(I)V", itf = false ) } - /* + /* Issue a call to `StringBuilder#append` for the right element type + * * can-multi-thread */ - def genConcat(elemType: BType): Unit = { + final def genStringBuilderAppend(elemType: BType): Unit = { val paramType = elemType match { case ct: ClassBType if ct.isSubtypeOf(StringRef) => StringRef case ct: ClassBType if ct.isSubtypeOf(jlStringBufferRef) => jlStringBufferRef @@ -257,13 +260,38 @@ trait BCodeIdiomatic { invokevirtual(JavaStringBuilderClassName, "append", bt.descriptor) } - /* + /* Extract the built `String` from the `StringBuilder` + * * can-multi-thread */ - final def genEndConcat: Unit = { + final def genStringBuilderEnd: Unit = { invokevirtual(JavaStringBuilderClassName, "toString", "()Ljava/lang/String;") } + /* Concatenate top N arguments on the stack with `StringConcatFactory#makeConcatWithConstants` + * (only works for JDK 9+) + * + * can-multi-thread + */ + final def genIndyStringConcat( + recipe: String, + argTypes: Seq[asm.Type], + constants: Seq[String] + ): Unit = { + jmethod.visitInvokeDynamicInsn( + "makeConcatWithConstants", + asm.Type.getMethodDescriptor(StringRef.toASMType, argTypes:_*), + new asm.Handle( + asm.Opcodes.H_INVOKESTATIC, + "java/lang/invoke/StringConcatFactory", + "makeConcatWithConstants", + "(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/String;[Ljava/lang/Object;)Ljava/lang/invoke/CallSite;", + false + ), + (recipe +: constants):_* + ) + } + /* * Emits one or more conversion instructions based on the types given as arguments. * @@ -405,6 +433,7 @@ trait BCodeIdiomatic { final def load( idx: Int, tk: BType): Unit = { emitVarInsn(Opcodes.ILOAD, idx, tk) } // can-multi-thread final def store(idx: Int, tk: BType): Unit = { emitVarInsn(Opcodes.ISTORE, idx, tk) } // can-multi-thread + final def iinc( idx: Int, increment: Int): Unit = jmethod.visitIincInsn(idx, increment) // can-multi-thread final def aload( tk: BType): Unit = { emitTypeBased(JCodeMethodN.aloadOpcodes, tk) } // can-multi-thread final def astore(tk: BType): Unit = { emitTypeBased(JCodeMethodN.astoreOpcodes, tk) } // can-multi-thread diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala b/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala index 43a926001456..86150f114d2c 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala @@ -70,7 +70,7 @@ trait BCodeSkelBuilder extends BCodeHelpers { with BCJGenSigGen { // Strangely I can't find this in the asm code 255, but reserving 1 for "this" - final val MaximumJvmParameters = 254 + inline val MaximumJvmParameters = 254 // current class var cnode: ClassNode1 = null @@ -139,6 +139,7 @@ trait BCodeSkelBuilder extends BCodeHelpers { // See `tests/run/given-var.scala` // + // !!! Part of this logic is duplicated in JSCodeGen.genCompilationUnit claszSymbol.info.decls.foreach { f => if f.isField && !f.name.is(LazyBitMapName) then f.setFlag(JavaStatic) @@ -206,7 +207,7 @@ trait BCodeSkelBuilder extends BCodeHelpers { val optSerial: Option[Long] = claszSymbol.getAnnotation(defn.SerialVersionUIDAnnot).flatMap { annot => if (claszSymbol.is(Trait)) { - report.error("@SerialVersionUID does nothing on a trait", annot.tree.sourcePos) + report.warning("@SerialVersionUID does nothing on a trait", annot.tree.sourcePos) None } else { val vuid = annot.argumentConstant(0).map(_.longValue) @@ -291,7 +292,7 @@ trait BCodeSkelBuilder extends BCodeHelpers { emitAnnotations(cnode, claszSymbol.annotations ++ ssa) if (!isCZStaticModule && !isCZParcelable) { - val skipStaticForwarders = (claszSymbol.isInterface || claszSymbol.is(Module) || ctx.settings.XnoForwarders.value) + val skipStaticForwarders = (claszSymbol.is(Module) || ctx.settings.XnoForwarders.value) if (!skipStaticForwarders) { val lmoc = claszSymbol.companionModule // add static forwarders if there are no name conflicts; see bugs #363 and #1735 diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeSyncAndTry.scala b/compiler/src/dotty/tools/backend/jvm/BCodeSyncAndTry.scala index 04425b524b93..d326bcfa242a 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeSyncAndTry.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeSyncAndTry.scala @@ -27,7 +27,7 @@ trait BCodeSyncAndTry extends BCodeBodyBuilder { */ abstract class SyncAndTryBuilder(cunit: CompilationUnit) extends PlainBodyBuilder(cunit) { - def genSynchronized(tree: Apply, expectedType: BType): BType = tree match { + def genSynchronized(tree: Apply, expectedType: BType): BType = (tree: @unchecked) match { case Apply(TypeApply(fun, _), args) => val monitor = locals.makeLocal(ObjectReference, "monitor", defn.ObjectType, tree.span) val monCleanup = new asm.Label diff --git a/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala b/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala index d8cb5534e5e3..25a05077626a 100644 --- a/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala +++ b/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala @@ -5,7 +5,7 @@ package jvm import scala.tools.asm import scala.annotation.threadUnsafe import scala.collection.mutable -import scala.collection.generic.Clearable +import scala.collection.mutable.Clearable import dotty.tools.dotc.core.Flags._ import dotty.tools.dotc.core.Contexts._ @@ -13,7 +13,6 @@ import dotty.tools.dotc.core.Phases._ import dotty.tools.dotc.core.Symbols._ import dotty.tools.dotc.core.Phases.Phase import dotty.tools.dotc.transform.SymUtils._ -import dotty.tools.dotc.util.WeakHashSet /** * This class mainly contains the method classBTypeFromSymbol, which extracts the necessary @@ -49,7 +48,6 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I) extends BTypes { def newAnyRefMap[K <: AnyRef, V](): mutable.AnyRefMap[K, V] = new mutable.AnyRefMap[K, V]() def newWeakMap[K, V](): mutable.WeakHashMap[K, V] = new mutable.WeakHashMap[K, V]() def recordCache[T <: Clearable](cache: T): T = cache - def newWeakSet[K >: Null <: AnyRef](): WeakHashSet[K] = new WeakHashSet[K]() def newMap[K, V](): mutable.HashMap[K, V] = new mutable.HashMap[K, V]() def newSet[K](): mutable.Set[K] = new mutable.HashSet[K] } @@ -60,7 +58,6 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I) extends BTypes { def newWeakMap[K, V](): collection.mutable.WeakHashMap[K, V] def newMap[K, V](): collection.mutable.HashMap[K, V] def newSet[K](): collection.mutable.Set[K] - def newWeakSet[K >: Null <: AnyRef](): dotty.tools.dotc.util.WeakHashSet[K] def newAnyRefMap[K <: AnyRef, V](): collection.mutable.AnyRefMap[K, V] } @@ -214,7 +211,7 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I) extends BTypes { private def definedClasses(sym: Symbol, phase: Phase) = if (sym.isDefinedInCurrentRun) atPhase(phase) { - toDenot(sym).info.decls.filter(_.isClass) + toDenot(sym).info.decls.filter(sym => sym.isClass && !sym.isEffectivelyErased) } else Nil diff --git a/compiler/src/dotty/tools/backend/jvm/CollectSuperCalls.scala b/compiler/src/dotty/tools/backend/jvm/CollectSuperCalls.scala index 35bc25693d10..299c1c75d6cf 100644 --- a/compiler/src/dotty/tools/backend/jvm/CollectSuperCalls.scala +++ b/compiler/src/dotty/tools/backend/jvm/CollectSuperCalls.scala @@ -20,7 +20,9 @@ import dotty.tools.dotc.transform.MegaPhase.MiniPhase class CollectSuperCalls extends MiniPhase { import tpd._ - def phaseName: String = "collectSuperCalls" + override def phaseName: String = CollectSuperCalls.name + + override def description: String = CollectSuperCalls.description override def transformSelect(tree: Select)(using Context): Tree = { tree.qualifier match { @@ -40,3 +42,7 @@ class CollectSuperCalls extends MiniPhase { } } } + +object CollectSuperCalls: + val name: String = "collectSuperCalls" + val description: String = "find classes that are called with super" diff --git a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala index a5aa8abd1c7c..64e667d95b68 100644 --- a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala +++ b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala @@ -12,7 +12,6 @@ import scala.annotation.threadUnsafe import scala.collection.generic.Clearable import scala.collection.mutable import scala.reflect.ClassTag -import dotty.tools.dotc.util.WeakHashSet import dotty.tools.io.AbstractFile import scala.tools.asm.AnnotationVisitor import dotty.tools.dotc.core._ diff --git a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala index 0c3c24916da5..2f2f653ae2e2 100644 --- a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala +++ b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala @@ -34,7 +34,10 @@ import StdNames._ import dotty.tools.io._ class GenBCode extends Phase { - def phaseName: String = GenBCode.name + + override def phaseName: String = GenBCode.name + + override def description: String = GenBCode.description private val superCallsMap = new MutableSymbolMap[Set[ClassSymbol]] def registerSuperCall(sym: Symbol, calls: ClassSymbol): Unit = { @@ -42,6 +45,9 @@ class GenBCode extends Phase { superCallsMap.update(sym, old + calls) } + private val entryPoints = new mutable.HashSet[String]() + def registerEntryPoint(s: String): Unit = entryPoints += s + private var myOutput: AbstractFile = _ private def outputDir(using Context): AbstractFile = { @@ -61,21 +67,49 @@ class GenBCode extends Phase { override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = { + outputDir match + case jar: JarArchive => + updateJarManifestWithMainClass(jar, entryPoints.toList) + case _ => try super.runOn(units) - finally myOutput match { + finally outputDir match { case jar: JarArchive => if (ctx.run.suspendedUnits.nonEmpty) // If we close the jar the next run will not be able to write on the jar. // But if we do not close it we cannot use it as part of the macro classpath of the suspended files. report.error("Can not suspend and output to a jar at the same time. See suspension with -Xprint-suspension.") + jar.close() case _ => } } + + private def updateJarManifestWithMainClass(jarArchive: JarArchive, entryPoints: List[String])(using Context): Unit = + val mainClass = Option.when(!ctx.settings.XmainClass.isDefault)(ctx.settings.XmainClass.value).orElse { + entryPoints match + case List(mainClass) => + Some(mainClass) + case Nil => + report.warning("No Main-Class designated or discovered.") + None + case mcs => + report.warning(s"No Main-Class due to multiple entry points:\n ${mcs.mkString("\n ")}") + None + } + mainClass.map { mc => + val manifest = Jar.WManifest() + manifest.mainClass = mc + val file = jarArchive.subdirectoryNamed("META-INF").fileNamed("MANIFEST.MF") + val os = file.output + manifest.underlying.write(os) + os.close() + } + end updateJarManifestWithMainClass } object GenBCode { val name: String = "genBCode" + val description: String = "generate JVM bytecode" } class GenBCodePipeline(val int: DottyBackendInterface, val primitives: DottyPrimitives)(using Context) extends BCodeSyncAndTry { diff --git a/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala b/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala index a5eb4f0d117a..1579b4577933 100644 --- a/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala +++ b/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala @@ -6,7 +6,10 @@ import Phases._ /** Generates Scala.js IR files for the compilation unit. */ class GenSJSIR extends Phase { - def phaseName: String = "genSJSIR" + + override def phaseName: String = GenSJSIR.name + + override def description: String = GenSJSIR.description override def isRunnable(using Context): Boolean = super.isRunnable && ctx.settings.scalajs.value @@ -14,3 +17,7 @@ class GenSJSIR extends Phase { def run(using Context): Unit = new JSCodeGen().run() } + +object GenSJSIR: + val name: String = "genSJSIR" + val description: String = "generate .sjsir files for Scala.js" diff --git a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala index 38641cea93e4..56e16647d080 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala @@ -18,6 +18,7 @@ import Decorators._ import Flags._ import dotty.tools.dotc.ast.Trees._ import Names._ +import NameKinds.DefaultGetterName import Types._ import Symbols._ import Denotations._ @@ -41,7 +42,6 @@ import org.scalajs.ir.Trees.OptimizerHints import dotty.tools.dotc.transform.sjs.JSSymUtils._ import JSEncoding._ -import JSInterop._ import ScopedVar.withScopedVars /** Main codegen for Scala.js IR. @@ -103,6 +103,17 @@ class JSCodeGen()(using genCtx: Context) { } } + private def withPerMethodBodyState[A](methodSym: Symbol)(body: => A): A = { + withScopedVars( + currentMethodSym := methodSym, + thisLocalVarIdent := None, + isModuleInitialized := new ScopedVar.VarBox(false), + undefinedDefaultParams := mutable.Set.empty, + ) { + body + } + } + private def acquireContextualJSClassValue[A](f: Option[js.Tree] => A): A = { val jsClassValue = contextualJSClassValue.get withScopedVars( @@ -186,6 +197,23 @@ class JSCodeGen()(using genCtx: Context) { } val allTypeDefs = collectTypeDefs(cunit.tpdTree) + /* #13221 Set JavaStatic on all the Module fields of static module classes. + * This is necessary for `desugarIdent` not to crash in some obscure + * scenarios. + * + * !!! Part of this logic is duplicated in BCodeSkelBuilder.genPlainClass + * + * However, here we only do this for Module fields, not all fields. + */ + for (typeDef <- allTypeDefs) { + if (typeDef.symbol.is(ModuleClass)) { + typeDef.symbol.info.decls.foreach { f => + if (f.isField && f.is(Module)) + f.setFlag(JavaStatic) + } + } + } + val (anonJSClassTypeDefs, otherTypeDefs) = allTypeDefs.partition(td => td.symbol.isAnonymousClass && td.symbol.isJSType) @@ -455,6 +483,13 @@ class JSCodeGen()(using genCtx: Context) { i"genNonNativeJSClass() must be called only for non-native JS classes: $sym") assert(sym.superClass != NoSymbol, sym) + if (hasDefaultCtorArgsAndJSModule(sym)) { + report.error( + "Implementation restriction: " + + "constructors of non-native JS classes cannot have default parameters if their companion module is JS native.", + td) + } + val classIdent = encodeClassNameIdent(sym) val originalName = originalNameOfClass(sym) @@ -490,7 +525,7 @@ class JSCodeGen()(using genCtx: Context) { /* We add symbols that we have to expose here. This way we also * get inherited stuff that is implemented in this class. */ - dispatchMethodNames += jsNameOf(sym) + dispatchMethodNames += sym.jsName } } @@ -521,14 +556,25 @@ class JSCodeGen()(using genCtx: Context) { val topLevelExports = jsExportsGen.genTopLevelExports(sym) - val (jsClassCaptures, generatedConstructor) = - genJSClassCapturesAndConstructor(sym, constructorTrees.toList) + val (generatedConstructor, jsClassCaptures) = withNewLocalNameScope { + val isNested = sym.isNestedJSClass - /* If there is one, the JS super class value is always the first JS class - * capture. This is a JSCodeGen-specific invariant (the IR does not rely - * on this) enforced in genJSClassCapturesAndConstructor. - */ - val jsSuperClass = jsClassCaptures.map(_.head.ref) + if (isNested) + localNames.reserveLocalName(JSSuperClassParamName) + + val (captures, ctor) = genJSClassCapturesAndConstructor(constructorTrees.toList) + + val jsClassCaptures = if (isNested) { + val superParam = js.ParamDef(js.LocalIdent(JSSuperClassParamName), + NoOriginalName, jstpe.AnyType, mutable = false) + Some(superParam :: captures) + } else { + assert(captures.isEmpty, s"found non nested JS class with captures $captures at $pos") + None + } + + (ctor, jsClassCaptures) + } // Generate fields (and add to methods + ctors) val generatedMembers = { @@ -554,7 +600,7 @@ class JSCodeGen()(using genCtx: Context) { jsClassCaptures, Some(encodeClassNameIdent(sym.superClass)), genClassInterfaces(sym, forJSClass = true), - jsSuperClass, + jsSuperClass = jsClassCaptures.map(_.head.ref), None, hashedMemberDefs, topLevelExports)( @@ -743,7 +789,21 @@ class JSCodeGen()(using genCtx: Context) { def isExcluded(m: Symbol): Boolean = { def hasAccessBoundary = m.accessBoundary(defn.RootClass) ne defn.RootClass - m.is(Deferred) || m.isConstructor || hasAccessBoundary || (m.owner eq defn.ObjectClass) + + def isOfJLObject: Boolean = m.owner eq defn.ObjectClass + + def isDefaultParamOfJSNativeDef: Boolean = { + m.name.is(DefaultGetterName) && { + val info = new DefaultParamInfo(m) + !info.isForConstructor && info.attachedMethod.hasAnnotation(jsdefn.JSNativeAnnot) + } + } + + m.is(Deferred) + || m.isConstructor + || hasAccessBoundary + || isOfJLObject + || m.hasAnnotation(jsdefn.JSNativeAnnot) || isDefaultParamOfJSNativeDef // #4557 } val forwarders = for { @@ -959,47 +1019,361 @@ class JSCodeGen()(using genCtx: Context) { // Constructor of a non-native JS class ------------------------------------ - def genJSClassCapturesAndConstructor(classSym: Symbol, - constructorTrees: List[DefDef]): (Option[List[js.ParamDef]], js.JSMethodDef) = { - implicit val pos = classSym.span + def genJSClassCapturesAndConstructor(constructorTrees: List[DefDef])( + implicit pos: SourcePosition): (List[js.ParamDef], js.JSMethodDef) = { + /* We need to merge all Scala constructors into a single one because the + * IR, like JavaScript, only allows a single one. + * + * We do this by applying: + * 1. Applying runtime type based dispatch, just like exports. + * 2. Splitting secondary ctors into parts before and after the `this` call. + * 3. Topo-sorting all constructor statements and including/excluding + * them based on the overload that was chosen. + */ - if (hasDefaultCtorArgsAndJSModule(classSym)) { - report.error( - "Implementation restriction: " + - "constructors of non-native JS classes cannot have default parameters if their companion module is JS native.", - classSym.srcPos) - val ctorDef = js.JSMethodDef(js.MemberFlags.empty, - js.StringLiteral("constructor"), Nil, None, js.Skip())( - OptimizerHints.empty, None) - (None, ctorDef) - } else { - withNewLocalNameScope { - localNames.reserveLocalName(JSSuperClassParamName) + val (primaryTree :: Nil, secondaryTrees) = + constructorTrees.partition(_.symbol.isPrimaryConstructor) + + val primaryCtor = genPrimaryJSClassCtor(primaryTree) + val secondaryCtors = secondaryTrees.map(genSecondaryJSClassCtor(_)) + + // VarDefs for the parameters of all constructors. + val paramVarDefs = for { + vparam <- constructorTrees.flatMap(_.paramss.flatten) + } yield { + val sym = vparam.symbol + val tpe = toIRType(sym.info) + js.VarDef(encodeLocalSym(sym), originalNameOfLocal(sym), tpe, mutable = true, jstpe.zeroOf(tpe))(vparam.span) + } + + /* organize constructors in a called-by tree + * (the implicit root is the primary constructor) + */ + val ctorTree = { + val ctorToChildren = secondaryCtors + .groupBy(_.targetCtor) + .withDefaultValue(Nil) + + /* when constructing the call-by tree, we use pre-order traversal to + * assign overload numbers. + * this puts all descendants of a ctor in a range of overloads numbers. + * + * this property is useful, later, when we need to make statements + * conditional based on the chosen overload. + */ + var nextOverloadNum = 0 + def subTree[T <: JSCtor](ctor: T): ConstructorTree[T] = { + val overloadNum = nextOverloadNum + nextOverloadNum += 1 + val subtrees = ctorToChildren(ctor.sym).map(subTree(_)) + new ConstructorTree(overloadNum, ctor, subtrees) + } + + subTree(primaryCtor) + } + + /* prepare overload dispatch for all constructors. + * as a side-product, we retrieve the capture parameters. + */ + val (exports, jsClassCaptures) = { + val exports = List.newBuilder[jsExportsGen.Exported] + val jsClassCaptures = List.newBuilder[js.ParamDef] + + def add(tree: ConstructorTree[_ <: JSCtor]): Unit = { + val (e, c) = genJSClassCtorDispatch(tree.ctor.sym, + tree.ctor.paramsAndInfo, tree.overloadNum) + exports += e + jsClassCaptures ++= c + tree.subCtors.foreach(add(_)) + } + + add(ctorTree) + + (exports.result(), jsClassCaptures.result()) + } - val ctors: List[js.MethodDef] = constructorTrees.flatMap { tree => - genMethodWithCurrentLocalNameScope(tree) + val (formalArgs, restParam, overloadDispatchBody) = + jsExportsGen.genOverloadDispatch(JSName.Literal("constructor"), exports, jstpe.IntType) + + val overloadVar = js.VarDef(freshLocalIdent("overload"), NoOriginalName, + jstpe.IntType, mutable = false, overloadDispatchBody) + + val ctorStats = genJSClassCtorStats(overloadVar.ref, ctorTree) + + val constructorBody = js.Block( + paramVarDefs ::: List(overloadVar, ctorStats, js.Undefined())) + + val constructorDef = js.JSMethodDef( + js.MemberFlags.empty, + js.StringLiteral("constructor"), + formalArgs, restParam, constructorBody)(OptimizerHints.empty, None) + + (jsClassCaptures, constructorDef) + } + + private def genPrimaryJSClassCtor(dd: DefDef): PrimaryJSCtor = { + val sym = dd.symbol + val Block(stats, _) = dd.rhs + assert(sym.isPrimaryConstructor, s"called with non-primary ctor: $sym") + + var jsSuperCall: Option[js.JSSuperConstructorCall] = None + val jsStats = List.newBuilder[js.Tree] + + /* Move all statements after the super constructor call since JS + * cannot access `this` before the super constructor call. + * + * dotc inserts statements before the super constructor call for param + * accessor initializers (including val's and var's declared in the + * params). We move those after the super constructor call, and are + * therefore executed later than for a Scala class. + */ + withPerMethodBodyState(sym) { + stats.foreach { + case tree @ Apply(fun @ Select(Super(This(_), _), _), args) + if fun.symbol.isClassConstructor => + assert(jsSuperCall.isEmpty, s"Found 2 JS Super calls at ${dd.sourcePos}") + implicit val pos: Position = tree.span + jsSuperCall = Some(js.JSSuperConstructorCall(genActualJSArgs(fun.symbol, args))) + + case stat => + val jsStat = genStat(stat) + assert(jsSuperCall.isDefined || !jsStat.isInstanceOf[js.VarDef], + "Trying to move a local VarDef after the super constructor call of a non-native JS class at " + + dd.sourcePos) + jsStats += jsStat + } + } + + assert(jsSuperCall.isDefined, + s"Did not find Super call in primary JS construtor at ${dd.sourcePos}") + + new PrimaryJSCtor(sym, genParamsAndInfo(sym, dd.paramss), jsSuperCall.get :: jsStats.result()) + } + + private def genSecondaryJSClassCtor(dd: DefDef): SplitSecondaryJSCtor = { + val sym = dd.symbol + assert(!sym.isPrimaryConstructor, s"called with primary ctor $sym") + + def flattenBlocks(t: Tree): List[Tree] = t match { + case Block(stats, expr) => (stats :+ expr).flatMap(flattenBlocks) + case _ => t :: Nil + } + val stats = flattenBlocks(dd.rhs) + + val beforeThisCall = List.newBuilder[js.Tree] + var thisCall: Option[(Symbol, List[js.Tree])] = None + val afterThisCall = List.newBuilder[js.Tree] + + withPerMethodBodyState(sym) { + stats.foreach { + case tree @ Apply(fun @ Select(This(_), _), args) + if fun.symbol.isClassConstructor => + assert(thisCall.isEmpty, + s"duplicate this() call in secondary JS constructor at ${dd.sourcePos}") + + implicit val pos: Position = tree.span + val sym = fun.symbol + thisCall = Some((sym, genActualArgs(sym, args))) + + case stat => + val jsStat = genStat(stat) + if (thisCall.isEmpty) + beforeThisCall += jsStat + else + afterThisCall += jsStat + } + } + + assert(thisCall.isDefined, + i"could not find the this() call in secondary JS constructor at ${dd.sourcePos}:\n${stats.map(_.show).mkString("\n")}") + val Some((targetCtor, ctorArgs)) = thisCall + + new SplitSecondaryJSCtor(sym, genParamsAndInfo(sym, dd.paramss), + beforeThisCall.result(), targetCtor, ctorArgs, afterThisCall.result()) + } + + private def genParamsAndInfo(ctorSym: Symbol, + vparamss: List[ParamClause]): List[(Symbol, JSParamInfo)] = { + implicit val pos: SourcePosition = ctorSym.sourcePos + + val paramSyms = if (vparamss.isEmpty) Nil else vparamss.head.map(_.symbol) + paramSyms.zip(ctorSym.jsParamInfos) + } + + private def genJSClassCtorDispatch(ctorSym: Symbol, + allParamsAndInfos: List[(Symbol, JSParamInfo)], + overloadNum: Int): (jsExportsGen.Exported, List[js.ParamDef]) = { + + implicit val pos: SourcePosition = ctorSym.sourcePos + + /* `allParams` are the parameters as seen from inside the constructor body, + * i.e., the ones generated by the trees in the constructor body. + */ + val (captureParamsAndInfos, normalParamsAndInfos) = + allParamsAndInfos.partition(_._2.capture) + + /* For class captures, we need to generate different names than the ones + * used by the constructor body. This is necessary so that we can forward + * captures properly between constructor delegation calls. + */ + val (jsClassCaptures, captureAssigns) = (for { + (param, info) <- captureParamsAndInfos + } yield { + val ident = freshLocalIdent(param.name.toTermName) + val jsClassCapture = + js.ParamDef(ident, originalNameOfLocal(param), toIRType(info.info), mutable = false) + val captureAssign = + js.Assign(genVarRef(param), jsClassCapture.ref) + (jsClassCapture, captureAssign) + }).unzip + + val normalInfos = normalParamsAndInfos.map(_._2).toIndexedSeq + + val jsExport = new jsExportsGen.Exported(ctorSym, normalInfos) { + def genBody(formalArgsRegistry: jsExportsGen.FormalArgsRegistry): js.Tree = { + val paramAssigns = for { + ((param, info), i) <- normalParamsAndInfos.zipWithIndex + } yield { + val rhs = jsExportsGen.genScalaArg(this, i, formalArgsRegistry, info, static = true, + captures = captureParamsAndInfos.map(pi => genVarRef(pi._1)))( + prevArgsCount => normalParamsAndInfos.take(prevArgsCount).map(pi => genVarRef(pi._1))) + + js.Assign(genVarRef(param), rhs) } - val (captureParams, dispatch) = - jsExportsGen.genJSConstructorDispatch(constructorTrees.map(_.symbol)) + js.Block(captureAssigns ::: paramAssigns, js.IntLiteral(overloadNum)) + } + } + + (jsExport, jsClassCaptures) + } + + /** generates a sequence of JS constructor statements based on a constructor tree. */ + private def genJSClassCtorStats(overloadVar: js.VarRef, + ctorTree: ConstructorTree[PrimaryJSCtor])(implicit pos: Position): js.Tree = { - /* Ensure that the first JS class capture is a reference to the JS super class value. - * genNonNativeJSClass and genNewAnonJSClass rely on this. - */ - val captureParamsWithJSSuperClass = captureParams.map { params => - val jsSuperClassParam = js.ParamDef( - js.LocalIdent(JSSuperClassParamName), NoOriginalName, - jstpe.AnyType, mutable = false) - jsSuperClassParam :: params + /* generates a statement that conditionally executes body iff the chosen + * overload is any of the descendants of `tree` (including itself). + * + * here we use the property from building the trees, that a set of + * descendants always has a range of overload numbers. + */ + def ifOverload(tree: ConstructorTree[_], body: js.Tree): js.Tree = body match { + case js.Skip() => js.Skip() + + case body => + val x = overloadVar + val cond = { + import tree.{lo, hi} + + if (lo == hi) { + js.BinaryOp(js.BinaryOp.Int_==, js.IntLiteral(lo), x) + } else { + val lhs = js.BinaryOp(js.BinaryOp.Int_<=, js.IntLiteral(lo), x) + val rhs = js.BinaryOp(js.BinaryOp.Int_<=, x, js.IntLiteral(hi)) + js.If(lhs, rhs, js.BooleanLiteral(false))(jstpe.BooleanType) + } } - val ctorDef = JSConstructorGen.buildJSConstructorDef(dispatch, ctors, freshLocalIdent("overload")) { - msg => report.error(msg, classSym.srcPos) + js.If(cond, body, js.Skip())(jstpe.NoType) + } + + /* preStats / postStats use pre/post order traversal respectively to + * generate a topo-sorted sequence of statements. + */ + + def preStats(tree: ConstructorTree[SplitSecondaryJSCtor], + nextParamsAndInfo: List[(Symbol, JSParamInfo)]): js.Tree = { + val inner = tree.subCtors.map(preStats(_, tree.ctor.paramsAndInfo)) + + assert(tree.ctor.ctorArgs.size == nextParamsAndInfo.size, "param count mismatch") + val paramsInfosAndArgs = nextParamsAndInfo.zip(tree.ctor.ctorArgs) + + val (captureParamsInfosAndArgs, normalParamsInfosAndArgs) = + paramsInfosAndArgs.partition(_._1._2.capture) + + val captureAssigns = for { + ((param, _), arg) <- captureParamsInfosAndArgs + } yield { + js.Assign(genVarRef(param), arg) + } + + val normalAssigns = for { + (((param, info), arg), i) <- normalParamsInfosAndArgs.zipWithIndex + } yield { + val newArg = arg match { + case js.Transient(UndefinedParam) => + /* Go full circle: We have ignored the default param getter for + * this, we'll create it again. + * + * This seems not optimal: We could simply not ignore the calls to + * default param getters in the first place. + * + * However, this proves to be difficult: Because of translations in + * earlier phases, calls to default param getters may be assigned + * to temporary variables first (see the undefinedDefaultParams + * ScopedVar). If this happens, it becomes increasingly difficult + * to distinguish a default param getter call for a constructor + * call of *this* instance (in which case we would want to keep + * the default param getter call) from one for a *different* + * instance (in which case we would want to discard the default + * param getter call) + * + * Because of this, it ends up being easier to just re-create the + * default param getter call if necessary. + */ + implicit val pos: SourcePosition = tree.ctor.sym.sourcePos + jsExportsGen.genCallDefaultGetter(tree.ctor.sym, i, static = false, + captures = captureParamsInfosAndArgs.map(p => genVarRef(p._1._1)))( + prevArgsCount => normalParamsInfosAndArgs.take(prevArgsCount).map(p => genVarRef(p._1._1))) + + case arg => arg } - (captureParamsWithJSSuperClass, ctorDef) + js.Assign(genVarRef(param), newArg) } + + ifOverload(tree, js.Block( + inner ++ tree.ctor.beforeCall ++ captureAssigns ++ normalAssigns)) + } + + def postStats(tree: ConstructorTree[SplitSecondaryJSCtor]): js.Tree = { + val inner = tree.subCtors.map(postStats(_)) + ifOverload(tree, js.Block(tree.ctor.afterCall ++ inner)) } + + val primaryCtor = ctorTree.ctor + val secondaryCtorTrees = ctorTree.subCtors + + js.Block( + secondaryCtorTrees.map(preStats(_, primaryCtor.paramsAndInfo)) ++ + primaryCtor.body ++ + secondaryCtorTrees.map(postStats(_)) + ) + } + + private sealed trait JSCtor { + val sym: Symbol + val paramsAndInfo: List[(Symbol, JSParamInfo)] + } + + private class PrimaryJSCtor(val sym: Symbol, + val paramsAndInfo: List[(Symbol, JSParamInfo)], + val body: List[js.Tree]) extends JSCtor + + private class SplitSecondaryJSCtor(val sym: Symbol, + val paramsAndInfo: List[(Symbol, JSParamInfo)], + val beforeCall: List[js.Tree], + val targetCtor: Symbol, val ctorArgs: List[js.Tree], + val afterCall: List[js.Tree]) extends JSCtor + + private class ConstructorTree[Ctor <: JSCtor]( + val overloadNum: Int, val ctor: Ctor, + val subCtors: List[ConstructorTree[SplitSecondaryJSCtor]]) { + val lo: Int = overloadNum + val hi: Int = subCtors.lastOption.fold(lo)(_.hi) + + assert(lo <= hi, "bad overload range") } // Generate a method ------------------------------------------------------- @@ -1042,12 +1416,54 @@ class JSCodeGen()(using genCtx: Context) { val vparamss = dd.termParamss val rhs = dd.rhs - withScopedVars( - currentMethodSym := sym, - undefinedDefaultParams := mutable.Set.empty, - thisLocalVarIdent := None, - isModuleInitialized := new ScopedVar.VarBox(false) - ) { + /* Is this method a default accessor that should be ignored? + * + * This is the case iff one of the following applies: + * - It is a constructor default accessor and the linked class is a + * native JS class. + * - It is a default accessor for a native JS def, but with the caveat + * that its rhs must be `js.native` because of #4553. + * + * Both of those conditions can only happen if the default accessor is in + * a module class, so we use that as a fast way out. (But omitting that + * condition would not change the result.) + * + * This is different than `isJSDefaultParam` in `genApply`: we do not + * ignore default accessors of *non-native* JS types. Neither for + * constructor default accessor nor regular default accessors. We also + * do not need to worry about non-constructor members of native JS types, + * since for those, the entire member list is ignored in `genJSClassData`. + */ + def isIgnorableDefaultParam: Boolean = { + sym.name.is(DefaultGetterName) && sym.owner.is(ModuleClass) && { + val info = new DefaultParamInfo(sym) + if (info.isForConstructor) { + /* This is a default accessor for a constructor parameter. Check + * whether the attached constructor is a native JS constructor, + * which is the case iff the linked class is a native JS type. + */ + info.constructorOwner.hasAnnotation(jsdefn.JSNativeAnnot) + } else { + /* #4553 We need to ignore default accessors for JS native defs. + * However, because Scala.js <= 1.7.0 actually emitted code calling + * those accessors, we must keep default accessors that would + * compile. The only accessors we can actually get rid of are those + * that are `= js.native`. + */ + !sym.owner.isJSType && + info.attachedMethod.hasAnnotation(jsdefn.JSNativeAnnot) && { + dd.rhs match { + case MaybeAsInstanceOf(Apply(fun, _)) => + fun.symbol == jsdefn.JSPackage_native + case _ => + false + } + } + } + } + } + + withPerMethodBodyState(sym) { assert(vparamss.isEmpty || vparamss.tail.isEmpty, "Malformed parameter list: " + vparamss) val params = if (vparamss.isEmpty) Nil else vparamss.head.map(_.symbol) @@ -1066,9 +1482,17 @@ class JSCodeGen()(using genCtx: Context) { Some(js.MethodDef(js.MemberFlags.empty, methodName, originalName, jsParams, toIRType(patchedResultType(sym)), None)( OptimizerHints.empty, None)) - } else if (sym.isJSNativeCtorDefaultParam) { + } else if (isIgnorableDefaultParam) { // #11592 None + } else if (sym.is(Bridge) && sym.name.is(DefaultGetterName) && currentClassSym.isNonNativeJSClass) { + /* #12572 Bridges for default accessors in non-native JS classes must not be emitted, + * because they call another default accessor, making their entire body an + * that cannot be eliminated. + * Such methods are never called anyway, because they are filtered out in + * JSExportsGen.defaultGetterDenot(). + */ + None } else /*if (sym.isClassConstructor && isHijackedBoxedClass(sym.owner)) { None } else*/ { @@ -1095,15 +1519,9 @@ class JSCodeGen()(using genCtx: Context) { val methodDef = { if (sym.isClassConstructor) { - val body0 = genStat(rhs) - val body1 = { - val needsMove = currentClassSym.isNonNativeJSClass && sym.isPrimaryConstructor - if (needsMove) moveAllStatementsAfterSuperConstructorCall(body0) - else body0 - } val namespace = js.MemberNamespace.Constructor js.MethodDef(js.MemberFlags.empty.withNamespace(namespace), - methodName, originalName, jsParams, jstpe.NoType, Some(body1))( + methodName, originalName, jsParams, jstpe.NoType, Some(genStat(rhs)))( optimizerHints, None) } else { val namespace = if (isMethodStaticInIR(sym)) { @@ -1170,53 +1588,6 @@ class JSCodeGen()(using genCtx: Context) { } } - /** Moves all statements after the super constructor call. - * - * This is used for the primary constructor of a non-native JS class, - * because those cannot access `this` before the super constructor call. - * - * Normally, in Scala, param accessors (i.e., fields declared directly in - * constructor parameters) are initialized *before* the super constructor - * call. This is important for cases like - * - * abstract class A { - * def a: Int - * println(a) - * } - * class B(val a: Int) extends A - * - * where `a` is supposed to be correctly initialized by the time `println` - * is executed. - * - * However, in a JavaScript class, this is forbidden: it is not allowed to - * read the `this` value in a constructor before the super constructor call. - * - * Therefore, for JavaScript classes, we specifically move all those early - * assignments after the super constructor call, to comply with JavaScript - * limitations. This clearly introduces a semantic difference in - * initialization order between Scala classes and JavaScript classes, but - * there is nothing we can do about it. That difference in behavior is - * basically spec'ed in Scala.js the language, since specifying it any other - * way would prevent JavaScript classes from ever having constructor - * parameters. - * - * We do the same thing in Scala 2, obviously. - */ - private def moveAllStatementsAfterSuperConstructorCall(body: js.Tree): js.Tree = { - val bodyStats = body match { - case js.Block(stats) => stats - case _ => body :: Nil - } - - val (beforeSuper, superCall :: afterSuper) = - bodyStats.span(!_.isInstanceOf[js.JSSuperConstructorCall]) - - assert(!beforeSuper.exists(_.isInstanceOf[js.VarDef]), - s"Trying to move a local VarDef after the super constructor call of a non-native JS class at ${body.pos}") - - js.Block(superCall :: beforeSuper ::: afterSuper)(body.pos) - } - // ParamDefs --------------------------------------------------------------- def genParamDef(sym: Symbol): js.ParamDef = @@ -1361,8 +1732,12 @@ class JSCodeGen()(using genCtx: Context) { } case If(cond, thenp, elsep) => + val tpe = + if (isStat) jstpe.NoType + else toIRType(tree.tpe) + js.If(genExpr(cond), genStatOrExpr(thenp, isStat), - genStatOrExpr(elsep, isStat))(toIRType(tree.tpe)) + genStatOrExpr(elsep, isStat))(tpe) case Labeled(bind, expr) => js.Labeled(encodeLabelSym(bind.symbol), toIRType(tree.tpe), genStatOrExpr(expr, isStat)) @@ -1430,7 +1805,7 @@ class JSCodeGen()(using genCtx: Context) { */ js.Transient(UndefinedParam) } else { - js.VarRef(encodeLocalSym(sym))(toIRType(sym.info)) + genVarRef(sym) } } { select => genStatOrExpr(select, isStat) @@ -1515,9 +1890,7 @@ class JSCodeGen()(using genCtx: Context) { } case _ => - js.Assign( - js.VarRef(encodeLocalSym(sym))(toIRType(sym.info)), - genRhs) + js.Assign(genVarRef(sym), genRhs) } /** Array constructor */ @@ -1599,7 +1972,10 @@ class JSCodeGen()(using genCtx: Context) { val Try(block, catches, finalizer) = tree val blockAST = genStatOrExpr(block, isStat) - val resultType = toIRType(tree.tpe) + + val resultType = + if (isStat) jstpe.NoType + else toIRType(tree.tpe) val handled = if (catches.isEmpty) blockAST @@ -1692,13 +2068,52 @@ class JSCodeGen()(using genCtx: Context) { val args = tree.args val sym = tree.fun.symbol - val fun = tree.fun match { - case fun: Ident => desugarIdent(fun) - case fun => fun + /* Is the method a JS default accessor, which should become an + * `UndefinedParam` rather than being compiled normally. + * + * This is true iff one of the following conditions apply: + * - It is a constructor default param for the constructor of a JS class. + * - It is a default param of an instance method of a native JS type. + * - It is a default param of an instance method of a non-native JS type + * and the attached method is exposed. + * - It is a default param for a native JS def. + * + * This is different than `isIgnorableDefaultParam` in + * `genMethodWithCurrentLocalNameScope`: we include here the default + * accessors of *non-native* JS types (unless the corresponding methods are + * not exposed). We also need to handle non-constructor members of native + * JS types. + */ + def isJSDefaultParam: Boolean = { + sym.name.is(DefaultGetterName) && { + val info = new DefaultParamInfo(sym) + if (info.isForConstructor) { + /* This is a default accessor for a constructor parameter. Check + * whether the attached constructor is a JS constructor, which is + * the case iff the linked class is a JS type. + */ + info.constructorOwner.isJSType + } else { + if (sym.owner.isJSType) { + /* The default accessor is in a JS type. It is a JS default + * param iff the enclosing class is native or the attached method + * is exposed. + */ + !sym.owner.isNonNativeJSClass || info.attachedMethod.isJSExposed + } else { + /* The default accessor is in a Scala type. It is a JS default + * param iff the attached method is a native JS def. This can + * only happen if the owner is a module class, which we test + * first as a fast way out. + */ + sym.owner.is(ModuleClass) && info.attachedMethod.hasAnnotation(jsdefn.JSNativeAnnot) + } + } + } } - fun match { - case _ if sym.isJSDefaultParam => + tree.fun match { + case _ if isJSDefaultParam => js.Transient(UndefinedParam) case Select(Super(_, _), _) => @@ -2368,7 +2783,10 @@ class JSCodeGen()(using genCtx: Context) { js.UnaryOp(IntToLong, intValue) } case jstpe.FloatType => - js.UnaryOp(js.UnaryOp.DoubleToFloat, doubleValue) + if (from == jstpe.LongType) + js.UnaryOp(js.UnaryOp.LongToFloat, value) + else + js.UnaryOp(js.UnaryOp.DoubleToFloat, doubleValue) case jstpe.DoubleType => doubleValue } @@ -2476,27 +2894,7 @@ class JSCodeGen()(using genCtx: Context) { args: List[Tree]): js.Tree = { implicit val pos = tree.span - val arg = args.head - - /* Primitive number types such as scala.Int have a - * def +(s: String): String - * method, which is why we have to box the lhs sometimes. - * Otherwise, both lhs and rhs are already reference types (Any or String) - * so boxing is not necessary (in particular, rhs is never a primitive). - */ - assert(!isPrimitiveValueType(receiver.tpe) || arg.tpe.isRef(defn.StringClass)) - assert(!isPrimitiveValueType(arg.tpe)) - - val genLhs = { - val genLhs0 = genExpr(receiver) - // Box the receiver if it is a primitive value - if (!isPrimitiveValueType(receiver.tpe)) genLhs0 - else makePrimitiveBox(genLhs0, receiver.tpe) - } - - val genRhs = genExpr(arg) - - js.BinaryOp(js.BinaryOp.String_+, genLhs, genRhs) + js.BinaryOp(js.BinaryOp.String_+, genExpr(receiver), genExpr(args.head)) } /** Gen JS code for a call to Any.## */ @@ -2795,9 +3193,8 @@ class JSCodeGen()(using genCtx: Context) { s"Trying to call the super constructor of Object in a non-native JS class at $pos") genApplyMethod(genReceiver, sym, genScalaArgs) } else if (sym.isClassConstructor) { - assert(genReceiver.isInstanceOf[js.This], - s"Trying to call a JS super constructor with a non-`this` receiver at $pos") - js.JSSuperConstructorCall(genJSArgs) + throw new AssertionError( + s"calling a JS super constructor should have happened in genPrimaryJSClassCtor at $pos") } else if (sym.owner.isNonNativeJSClass && !sym.isJSExposed) { // Reroute to the static method genApplyJSClassMethod(genReceiver, sym, genScalaArgs) @@ -2863,12 +3260,6 @@ class JSCodeGen()(using genCtx: Context) { def abortMatch(msg: String): Nothing = throw new FatalError(s"$msg in switch-like pattern match at ${tree.span}: $tree") - /* Although GenBCode adapts the scrutinee and the cases to `int`, only - * true `int`s can reach the back-end, as asserted by the String-switch - * transformation in `cleanup`. Therefore, we do not adapt, preserving - * the `string`s and `null`s that come out of the pattern matching in - * Scala 2.13.2+. - */ val genSelector = genExpr(selector) // Sanity check: we can handle Ints and Strings (including `null`s), but nothing else @@ -2885,7 +3276,7 @@ class JSCodeGen()(using genCtx: Context) { case resType => resType } - var clauses: List[(List[js.Tree], js.Tree)] = Nil + var clauses: List[(List[js.MatchableLiteral], js.Tree)] = Nil var optDefaultClause: Option[js.Tree] = None for (caze @ CaseDef(pat, guard, body) <- cases) { @@ -2894,19 +3285,29 @@ class JSCodeGen()(using genCtx: Context) { val genBody = genStatOrExpr(body, isStat) + def invalidCase(): Nothing = + abortMatch("Invalid case") + + def genMatchableLiteral(tree: Literal): js.MatchableLiteral = { + genExpr(tree) match { + case matchableLiteral: js.MatchableLiteral => matchableLiteral + case otherExpr => invalidCase() + } + } + pat match { case lit: Literal => - clauses = (List(genExpr(lit)), genBody) :: clauses + clauses = (List(genMatchableLiteral(lit)), genBody) :: clauses case Ident(nme.WILDCARD) => optDefaultClause = Some(genBody) case Alternative(alts) => val genAlts = alts.map { - case lit: Literal => genExpr(lit) - case _ => abortMatch("Invalid case in alternative") + case lit: Literal => genMatchableLiteral(lit) + case _ => invalidCase() } clauses = (genAlts, genBody) :: clauses case _ => - abortMatch("Invalid case pattern") + invalidCase() } } @@ -2921,15 +3322,6 @@ class JSCodeGen()(using genCtx: Context) { * case is a typical product of `match`es that are full of * `case n if ... =>`, which are used instead of `if` chains for * convenience and/or readability. - * - * When no optimization applies, and any of the case values is not a - * literal int, we emit a series of `if..else` instead of a `js.Match`. - * This became necessary in 2.13.2 with strings and nulls. - * - * Note that dotc has not adopted String-switch-Matches yet, so these code - * paths are dead code at the moment. However, they already existed in the - * scalac, so were ported, to be immediately available and working when - * dotc starts emitting switch-Matches on Strings. */ def isInt(tree: js.Tree): Boolean = tree.tpe == jstpe.IntType @@ -2949,32 +3341,8 @@ class JSCodeGen()(using genCtx: Context) { js.If(js.BinaryOp(op, genSelector, uniqueAlt), caseRhs, defaultClause)(resultType) case _ => - if (isInt(genSelector) && - clauses.forall(_._1.forall(_.isInstanceOf[js.IntLiteral]))) { - // We have int literals only: use a js.Match - val intClauses = clauses.asInstanceOf[List[(List[js.IntLiteral], js.Tree)]] - js.Match(genSelector, intClauses, defaultClause)(resultType) - } else { - // We have other stuff: generate an if..else chain - val (tempSelectorDef, tempSelectorRef) = genSelector match { - case varRef: js.VarRef => - (js.Skip(), varRef) - case _ => - val varDef = js.VarDef(freshLocalIdent(), NoOriginalName, - genSelector.tpe, mutable = false, genSelector) - (varDef, varDef.ref) - } - val ifElseChain = clauses.foldRight(defaultClause) { (caze, elsep) => - val conds = caze._1.map { caseValue => - js.BinaryOp(js.BinaryOp.===, tempSelectorRef, caseValue) - } - val cond = conds.reduceRight[js.Tree] { (left, right) => - js.If(left, js.BooleanLiteral(true), right)(jstpe.BooleanType) - } - js.If(cond, caze._2, elsep)(resultType) - } - js.Block(tempSelectorDef, ifElseChain) - } + // We have more than one case: use a js.Match + js.Match(genSelector, clauses, defaultClause)(resultType) } } @@ -3041,7 +3409,7 @@ class JSCodeGen()(using genCtx: Context) { val formalAndActualCaptures = allCaptureValues.map { value => implicit val pos = value.span val (formalIdent, originalName) = value match { - case Ident(name) => (freshLocalIdent(name.toString), OriginalName(name.toString)) + case Ident(name) => (freshLocalIdent(name.toTermName), OriginalName(name.toString)) case This(_) => (freshLocalIdent("this"), thisOriginalName) case _ => (freshLocalIdent(), NoOriginalName) } @@ -3069,7 +3437,7 @@ class JSCodeGen()(using genCtx: Context) { val formalAndActualParams = formalParamNames.lazyZip(formalParamTypes).lazyZip(formalParamRepeateds).map { (name, tpe, repeated) => - val formalParam = js.ParamDef(freshLocalIdent(name.toString), + val formalParam = js.ParamDef(freshLocalIdent(name), OriginalName(name.toString), jstpe.AnyType, mutable = false) val actualParam = if (repeated) genJSArrayToVarArgs(formalParam.ref)(tree.sourcePos) @@ -3243,7 +3611,7 @@ class JSCodeGen()(using genCtx: Context) { } /** Gen a statically linked call to an instance method. */ - private def genApplyMethodMaybeStatically(receiver: js.Tree, method: Symbol, + def genApplyMethodMaybeStatically(receiver: js.Tree, method: Symbol, arguments: List[js.Tree])(implicit pos: Position): js.Tree = { if (method.isPrivate || method.isClassConstructor) genApplyMethodStatically(receiver, method, arguments) @@ -3428,6 +3796,15 @@ class JSCodeGen()(using genCtx: Context) { // BoxedUnit.UNIT, which is the boxed version of () js.Undefined() + case JS_IMPORT => + // js.import(arg) + val arg = genArgs1 + js.JSImportCall(arg) + + case JS_IMPORT_META => + // js.import.meta + js.JSImportMeta() + case JS_NATIVE => // js.native report.error( @@ -3718,46 +4095,24 @@ class JSCodeGen()(using genCtx: Context) { private def genActualJSArgs(sym: Symbol, args: List[Tree])( implicit pos: Position): List[js.TreeOrJSSpread] = { - def paramNamesAndTypes(using Context): List[(Names.TermName, Type)] = - sym.info.paramNamess.flatten.zip(sym.info.paramInfoss.flatten) - - val wereRepeated = atPhase(elimRepeatedPhase) { - val list = - for ((name, tpe) <- paramNamesAndTypes) - yield (name -> tpe.isRepeatedParam) - list.toMap - } - - val paramTypes = atPhase(elimErasedValueTypePhase) { - paramNamesAndTypes.toMap - } - var reversedArgs: List[js.TreeOrJSSpread] = Nil - val argsParamNamesAndTypes = args.zip(paramNamesAndTypes) - for ((arg, (paramName, paramType)) <- argsParamNamesAndTypes) { - val wasRepeated = wereRepeated.get(paramName) - - wasRepeated match { - case Some(true) => - reversedArgs = genJSRepeatedParam(arg) reverse_::: reversedArgs - - case Some(false) => - val unboxedArg = genExpr(arg) - val boxedArg = unboxedArg match { - case js.Transient(UndefinedParam) => - unboxedArg - case _ => - val tpe = paramTypes.getOrElse(paramName, paramType) - box(unboxedArg, tpe) - } - reversedArgs ::= boxedArg - - case None => - // This is a parameter introduced by erasure or lambdalift, which we ignore. - assert(sym.isClassConstructor, - i"Found an unknown param $paramName in method " + - i"${sym.fullName}, which is not a class constructor, at $pos") + for ((arg, info) <- args.zip(sym.jsParamInfos)) { + if (info.repeated) { + reversedArgs = genJSRepeatedParam(arg) reverse_::: reversedArgs + } else if (info.capture) { + // Ignore captures + assert(sym.isClassConstructor, + i"Found a capture param in method ${sym.fullName}, which is not a class constructor, at $pos") + } else { + val unboxedArg = genExpr(arg) + val boxedArg = unboxedArg match { + case js.Transient(UndefinedParam) => + unboxedArg + case _ => + box(unboxedArg, info.info) + } + reversedArgs ::= boxedArg } } @@ -3890,6 +4245,9 @@ class JSCodeGen()(using genCtx: Context) { } } + private def genVarRef(sym: Symbol)(implicit pos: Position): js.VarRef = + js.VarRef(encodeLocalSym(sym))(toIRType(sym.info)) + private def genAssignableField(sym: Symbol, qualifier: Tree)(implicit pos: SourcePosition): (js.AssignLhs, Boolean) = { def qual = genExpr(qualifier) @@ -4011,7 +4369,7 @@ class JSCodeGen()(using genCtx: Context) { js.LoadJSConstructor(encodeClassName(sym)) } - private final val GenericGlobalObjectInformationMsg = { + private inline val GenericGlobalObjectInformationMsg = { "\n " + "See https://www.scala-js.org/doc/interoperability/global-scope.html " + "for further information." @@ -4325,4 +4683,31 @@ object JSCodeGen { out.print("") } + /** Info about a default param accessor. + * + * The method must have a default getter name for this class to make sense. + */ + private class DefaultParamInfo(sym: Symbol)(using Context) { + private val methodName = sym.name.exclude(DefaultGetterName) + + def isForConstructor: Boolean = methodName == nme.CONSTRUCTOR + + /** When `isForConstructor` is true, returns the owner of the attached + * constructor. + */ + def constructorOwner: Symbol = sym.owner.linkedClass + + /** When `isForConstructor` is false, returns the method attached to the + * specified default accessor. + */ + def attachedMethod: Symbol = { + // If there are overloads, we need to find the one that has default params. + val overloads = sym.owner.info.decl(methodName) + if (!overloads.isOverloaded) + overloads.symbol + else + overloads.suchThat(_.is(HasDefaultParams, butNot = Bridge)).symbol + } + } + } diff --git a/compiler/src/dotty/tools/backend/sjs/JSConstructorGen.scala b/compiler/src/dotty/tools/backend/sjs/JSConstructorGen.scala deleted file mode 100644 index 25ec8ff53c6b..000000000000 --- a/compiler/src/dotty/tools/backend/sjs/JSConstructorGen.scala +++ /dev/null @@ -1,376 +0,0 @@ -package dotty.tools.backend.sjs - -import org.scalajs.ir -import org.scalajs.ir.{Position, Trees => js, Types => jstpe} -import org.scalajs.ir.Names._ -import org.scalajs.ir.OriginalName.NoOriginalName - -import JSCodeGen.UndefinedParam - -object JSConstructorGen { - - /** Builds one JS constructor out of several "init" methods and their - * dispatcher. - * - * This method and the rest of this file are copied verbatim from `GenJSCode` - * for scalac, since there is no dependency on the compiler trees/symbols/etc. - * We are only manipulating IR trees and types. - * - * The only difference is the two parameters `overloadIdent` and `reportError`, - * which are added so that this entire file can be even more isolated. - */ - def buildJSConstructorDef(dispatch: js.JSMethodDef, ctors: List[js.MethodDef], - overloadIdent: js.LocalIdent)( - reportError: String => Unit)( - implicit pos: Position): js.JSMethodDef = { - - val js.JSMethodDef(_, dispatchName, dispatchArgs, dispatchRestParam, dispatchResolution) = - dispatch - - val jsConstructorBuilder = mkJSConstructorBuilder(ctors, reportError) - - // Section containing the overload resolution and casts of parameters - val overloadSelection = mkOverloadSelection(jsConstructorBuilder, - overloadIdent, dispatchResolution) - - /* Section containing all the code executed before the call to `this` - * for every secondary constructor. - */ - val prePrimaryCtorBody = - jsConstructorBuilder.mkPrePrimaryCtorBody(overloadIdent) - - val primaryCtorBody = jsConstructorBuilder.primaryCtorBody - - /* Section containing all the code executed after the call to this for - * every secondary constructor. - */ - val postPrimaryCtorBody = - jsConstructorBuilder.mkPostPrimaryCtorBody(overloadIdent) - - val newBody = js.Block(overloadSelection ::: prePrimaryCtorBody :: - primaryCtorBody :: postPrimaryCtorBody :: js.Undefined() :: Nil) - - js.JSMethodDef(js.MemberFlags.empty, dispatchName, dispatchArgs, dispatchRestParam, newBody)( - dispatch.optimizerHints, None) - } - - private class ConstructorTree(val overrideNum: Int, val method: js.MethodDef, - val subConstructors: List[ConstructorTree]) { - - lazy val overrideNumBounds: (Int, Int) = - if (subConstructors.isEmpty) (overrideNum, overrideNum) - else (subConstructors.head.overrideNumBounds._1, overrideNum) - - def get(methodName: MethodName): Option[ConstructorTree] = { - if (methodName == this.method.methodName) { - Some(this) - } else { - subConstructors.iterator.map(_.get(methodName)).collectFirst { - case Some(node) => node - } - } - } - - def getParamRefs(implicit pos: Position): List[js.VarRef] = - method.args.map(_.ref) - - def getAllParamDefsAsVars(implicit pos: Position): List[js.VarDef] = { - val localDefs = method.args.map { pDef => - js.VarDef(pDef.name, pDef.originalName, pDef.ptpe, mutable = true, - jstpe.zeroOf(pDef.ptpe)) - } - localDefs ++ subConstructors.flatMap(_.getAllParamDefsAsVars) - } - } - - private class JSConstructorBuilder(root: ConstructorTree, reportError: String => Unit) { - - def primaryCtorBody: js.Tree = root.method.body.getOrElse( - throw new AssertionError("Found abstract constructor")) - - def hasSubConstructors: Boolean = root.subConstructors.nonEmpty - - def getOverrideNum(methodName: MethodName): Int = - root.get(methodName).fold(-1)(_.overrideNum) - - def getParamRefsFor(methodName: MethodName)(implicit pos: Position): List[js.VarRef] = - root.get(methodName).fold(List.empty[js.VarRef])(_.getParamRefs) - - def getAllParamDefsAsVars(implicit pos: Position): List[js.VarDef] = - root.getAllParamDefsAsVars - - def mkPrePrimaryCtorBody(overrideNumIdent: js.LocalIdent)( - implicit pos: Position): js.Tree = { - val overrideNumRef = js.VarRef(overrideNumIdent)(jstpe.IntType) - mkSubPreCalls(root, overrideNumRef) - } - - def mkPostPrimaryCtorBody(overrideNumIdent: js.LocalIdent)( - implicit pos: Position): js.Tree = { - val overrideNumRef = js.VarRef(overrideNumIdent)(jstpe.IntType) - js.Block(mkSubPostCalls(root, overrideNumRef)) - } - - private def mkSubPreCalls(constructorTree: ConstructorTree, - overrideNumRef: js.VarRef)(implicit pos: Position): js.Tree = { - val overrideNumss = constructorTree.subConstructors.map(_.overrideNumBounds) - val paramRefs = constructorTree.getParamRefs - val bodies = constructorTree.subConstructors.map { constructorTree => - mkPrePrimaryCtorBodyOnSndCtr(constructorTree, overrideNumRef, paramRefs) - } - overrideNumss.zip(bodies).foldRight[js.Tree](js.Skip()) { - case ((numBounds, body), acc) => - val cond = mkOverrideNumsCond(overrideNumRef, numBounds) - js.If(cond, body, acc)(jstpe.BooleanType) - } - } - - private def mkPrePrimaryCtorBodyOnSndCtr(constructorTree: ConstructorTree, - overrideNumRef: js.VarRef, outputParams: List[js.VarRef])( - implicit pos: Position): js.Tree = { - val subCalls = - mkSubPreCalls(constructorTree, overrideNumRef) - - val preSuperCall = { - def checkForUndefinedParams(args: List[js.Tree]): List[js.Tree] = { - def isUndefinedParam(tree: js.Tree): Boolean = tree match { - case js.Transient(UndefinedParam) => true - case _ => false - } - - if (!args.exists(isUndefinedParam)) { - args - } else { - /* If we find an undefined param here, we're in trouble, because - * the handling of a default param for the target constructor has - * already been done during overload resolution. If we store an - * `undefined` now, it will fall through without being properly - * processed. - * - * Since this seems very tricky to deal with, and a pretty rare - * use case (with a workaround), we emit an "implementation - * restriction" error. - */ - reportError( - "Implementation restriction: in a JS class, a secondary " + - "constructor calling another constructor with default " + - "parameters must provide the values of all parameters.") - - /* Replace undefined params by undefined to prevent subsequent - * compiler crashes. - */ - args.map { arg => - if (isUndefinedParam(arg)) - js.Undefined()(arg.pos) - else - arg - } - } - } - - constructorTree.method.body.get match { - case js.Block(stats) => - val beforeSuperCall = stats.takeWhile { - case js.ApplyStatic(_, _, mtd, _) => !mtd.name.isConstructor - case _ => true - } - val superCallParams = stats.collectFirst { - case js.ApplyStatic(_, _, mtd, js.This() :: args) - if mtd.name.isConstructor => - val checkedArgs = checkForUndefinedParams(args) - zipMap(outputParams, checkedArgs)(js.Assign(_, _)) - }.getOrElse(Nil) - - beforeSuperCall ::: superCallParams - - case js.ApplyStatic(_, _, mtd, js.This() :: args) - if mtd.name.isConstructor => - val checkedArgs = checkForUndefinedParams(args) - zipMap(outputParams, checkedArgs)(js.Assign(_, _)) - - case _ => Nil - } - } - - js.Block(subCalls :: preSuperCall) - } - - private def mkSubPostCalls(constructorTree: ConstructorTree, - overrideNumRef: js.VarRef)(implicit pos: Position): js.Tree = { - val overrideNumss = constructorTree.subConstructors.map(_.overrideNumBounds) - val bodies = constructorTree.subConstructors.map { ct => - mkPostPrimaryCtorBodyOnSndCtr(ct, overrideNumRef) - } - overrideNumss.zip(bodies).foldRight[js.Tree](js.Skip()) { - case ((numBounds, js.Skip()), acc) => acc - - case ((numBounds, body), acc) => - val cond = mkOverrideNumsCond(overrideNumRef, numBounds) - js.If(cond, body, acc)(jstpe.BooleanType) - } - } - - private def mkPostPrimaryCtorBodyOnSndCtr(constructorTree: ConstructorTree, - overrideNumRef: js.VarRef)(implicit pos: Position): js.Tree = { - val postSuperCall = { - constructorTree.method.body.get match { - case js.Block(stats) => - stats.dropWhile { - case js.ApplyStatic(_, _, mtd, _) => !mtd.name.isConstructor - case _ => true - }.tail - - case _ => Nil - } - } - js.Block(postSuperCall :+ mkSubPostCalls(constructorTree, overrideNumRef)) - } - - private def mkOverrideNumsCond(numRef: js.VarRef, - numBounds: (Int, Int))(implicit pos: Position) = numBounds match { - case (lo, hi) if lo == hi => - js.BinaryOp(js.BinaryOp.Int_==, js.IntLiteral(lo), numRef) - - case (lo, hi) if lo == hi - 1 => - val lhs = js.BinaryOp(js.BinaryOp.Int_==, numRef, js.IntLiteral(lo)) - val rhs = js.BinaryOp(js.BinaryOp.Int_==, numRef, js.IntLiteral(hi)) - js.If(lhs, js.BooleanLiteral(true), rhs)(jstpe.BooleanType) - - case (lo, hi) => - val lhs = js.BinaryOp(js.BinaryOp.Int_<=, js.IntLiteral(lo), numRef) - val rhs = js.BinaryOp(js.BinaryOp.Int_<=, numRef, js.IntLiteral(hi)) - js.BinaryOp(js.BinaryOp.Boolean_&, lhs, rhs) - js.If(lhs, rhs, js.BooleanLiteral(false))(jstpe.BooleanType) - } - } - - private def zipMap[T, U, V](xs: List[T], ys: List[U])( - f: (T, U) => V): List[V] = { - for ((x, y) <- xs zip ys) yield f(x, y) - } - - /** mkOverloadSelection return a list of `stats` with that starts with: - * 1) The definition for the local variable that will hold the overload - * resolution number. - * 2) The definitions of all local variables that are used as parameters - * in all the constructors. - * 3) The overload resolution match/if statements. For each overload the - * overload number is assigned and the parameters are cast and assigned - * to their corresponding variables. - */ - private def mkOverloadSelection(jsConstructorBuilder: JSConstructorBuilder, - overloadIdent: js.LocalIdent, dispatchResolution: js.Tree)( - implicit pos: Position): List[js.Tree] = { - - def deconstructApplyCtor(body: js.Tree): (List[js.Tree], MethodName, List[js.Tree]) = { - val (prepStats, applyCtor) = (body: @unchecked) match { - case applyCtor: js.ApplyStatic => - (Nil, applyCtor) - case js.Block(prepStats :+ (applyCtor: js.ApplyStatic)) => - (prepStats, applyCtor) - } - val js.ApplyStatic(_, _, js.MethodIdent(ctorName), js.This() :: ctorArgs) = - applyCtor - assert(ctorName.isConstructor, - s"unexpected super constructor call to non-constructor $ctorName at ${applyCtor.pos}") - (prepStats, ctorName, ctorArgs) - } - - if (!jsConstructorBuilder.hasSubConstructors) { - val (prepStats, ctorName, ctorArgs) = - deconstructApplyCtor(dispatchResolution) - - val refs = jsConstructorBuilder.getParamRefsFor(ctorName) - assert(refs.size == ctorArgs.size, s"at $pos") - val assignCtorParams = zipMap(refs, ctorArgs) { (ref, ctorArg) => - js.VarDef(ref.ident, NoOriginalName, ref.tpe, mutable = false, ctorArg) - } - - prepStats ::: assignCtorParams - } else { - val overloadRef = js.VarRef(overloadIdent)(jstpe.IntType) - - /* transformDispatch takes the body of the method generated by - * `genJSConstructorDispatch` and transform it recursively. - */ - def transformDispatch(tree: js.Tree): js.Tree = tree match { - // Parameter count resolution - case js.Match(selector, cases, default) => - val newCases = cases.map { - case (literals, body) => (literals, transformDispatch(body)) - } - val newDefault = transformDispatch(default) - js.Match(selector, newCases, newDefault)(tree.tpe) - - // Parameter type resolution - case js.If(cond, thenp, elsep) => - js.If(cond, transformDispatch(thenp), - transformDispatch(elsep))(tree.tpe) - - // Throw(StringLiteral(No matching overload)) - case tree: js.Throw => - tree - - // Overload resolution done, apply the constructor - case _ => - val (prepStats, ctorName, ctorArgs) = deconstructApplyCtor(tree) - - val num = jsConstructorBuilder.getOverrideNum(ctorName) - val overloadAssign = js.Assign(overloadRef, js.IntLiteral(num)) - - val refs = jsConstructorBuilder.getParamRefsFor(ctorName) - assert(refs.size == ctorArgs.size, s"at $pos") - val assignCtorParams = zipMap(refs, ctorArgs)(js.Assign(_, _)) - - js.Block(overloadAssign :: prepStats ::: assignCtorParams) - } - - val newDispatchResolution = transformDispatch(dispatchResolution) - val allParamDefsAsVars = jsConstructorBuilder.getAllParamDefsAsVars - val overrideNumDef = js.VarDef(overloadIdent, NoOriginalName, - jstpe.IntType, mutable = true, js.IntLiteral(0)) - - overrideNumDef :: allParamDefsAsVars ::: newDispatchResolution :: Nil - } - } - - private def mkJSConstructorBuilder(ctors: List[js.MethodDef], reportError: String => Unit)( - implicit pos: Position): JSConstructorBuilder = { - def findCtorForwarderCall(tree: js.Tree): MethodName = (tree: @unchecked) match { - case js.ApplyStatic(_, _, method, js.This() :: _) - if method.name.isConstructor => - method.name - - case js.Block(stats) => - stats.collectFirst { - case js.ApplyStatic(_, _, method, js.This() :: _) - if method.name.isConstructor => - method.name - }.get - } - - val (primaryCtor :: Nil, secondaryCtors) = ctors.partition { - _.body.get match { - case js.Block(stats) => - stats.exists(_.isInstanceOf[js.JSSuperConstructorCall]) - - case _: js.JSSuperConstructorCall => true - case _ => false - } - } - - val ctorToChildren = secondaryCtors.map { ctor => - findCtorForwarderCall(ctor.body.get) -> ctor - }.groupBy(_._1).map(kv => kv._1 -> kv._2.map(_._2)).withDefaultValue(Nil) - - var overrideNum = -1 - def mkConstructorTree(method: js.MethodDef): ConstructorTree = { - val subCtrTrees = ctorToChildren(method.methodName).map(mkConstructorTree) - overrideNum += 1 - new ConstructorTree(overrideNum, method, subCtrTrees) - } - - new JSConstructorBuilder(mkConstructorTree(primaryCtor), reportError: String => Unit) - } - -} diff --git a/compiler/src/dotty/tools/backend/sjs/JSDefinitions.scala b/compiler/src/dotty/tools/backend/sjs/JSDefinitions.scala index c73821430098..c02e0c030657 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSDefinitions.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSDefinitions.scala @@ -95,6 +95,9 @@ final class JSDefinitions()(using Context) { def JSExportStaticAnnot(using Context) = JSExportStaticAnnotType.symbol.asClass @threadUnsafe lazy val JSExportAllAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.JSExportAll") def JSExportAllAnnot(using Context) = JSExportAllAnnotType.symbol.asClass + + def JSAnnotPackage(using Context) = JSGlobalAnnot.owner.asClass + @threadUnsafe lazy val JSTypeAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.internal.JSType") def JSTypeAnnot(using Context) = JSTypeAnnotType.symbol.asClass @threadUnsafe lazy val JSOptionalAnnotType: TypeRef = requiredClassRef("scala.scalajs.js.annotation.internal.JSOptional") @@ -142,6 +145,13 @@ final class JSDefinitions()(using Context) { @threadUnsafe lazy val JSConstructorTag_materializeR = JSConstructorTagModule.requiredMethodRef("materialize") def JSConstructorTag_materialize(using Context) = JSConstructorTag_materializeR.symbol + @threadUnsafe lazy val JSImportModuleRef = requiredModuleRef("scala.scalajs.js.import") + def JSImportModule(using Context) = JSImportModuleRef.symbol + @threadUnsafe lazy val JSImport_applyR = JSImportModule.requiredMethodRef(nme.apply) + def JSImport_apply(using Context) = JSImport_applyR.symbol + @threadUnsafe lazy val JSImport_metaR = JSImportModule.requiredMethodRef("meta") + def JSImport_meta(using Context) = JSImport_metaR.symbol + @threadUnsafe lazy val RuntimePackageVal = requiredPackage("scala.scalajs.runtime") @threadUnsafe lazy val RuntimePackageClass = RuntimePackageVal.moduleClass.asClass @threadUnsafe lazy val RuntimePackage_wrapJavaScriptExceptionR = RuntimePackageClass.requiredMethodRef("wrapJavaScriptException") diff --git a/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala b/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala index c571a08980a1..ab2211b7073b 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala @@ -13,6 +13,7 @@ import Denotations._ import Flags._ import Names._ import NameKinds.DefaultGetterName +import NameOps._ import Periods._ import Phases._ import StdNames._ @@ -174,7 +175,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { js.TopLevelJSClassExportDef(info.moduleID, info.jsName) case Constructor | Method => - val exported = tups.map(t => Exported(t._2)) + val exported = tups.map(_._2) val methodDef = withNewLocalNameScope { genExportMethod(exported, JSName.Literal(info.jsName), static = true) @@ -330,32 +331,10 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { if (isProp) genExportProperty(alts, jsName, static) else - genExportMethod(alts.map(Exported.apply), jsName, static) + genExportMethod(alts, jsName, static) } } - def genJSConstructorDispatch(alts: List[Symbol]): (Option[List[js.ParamDef]], js.JSMethodDef) = { - val exporteds = alts.map(Exported.apply) - - val isConstructorOfNestedJSClass = exporteds.head.isConstructorOfNestedJSClass - assert(exporteds.tail.forall(_.isConstructorOfNestedJSClass == isConstructorOfNestedJSClass), - s"Alternative constructors $alts do not agree on whether they are in a nested JS class or not") - val captureParams = if (!isConstructorOfNestedJSClass) { - None - } else { - Some(for { - exported <- exporteds - param <- exported.captureParamsFront ::: exported.captureParamsBack - } yield { - param - }) - } - - val ctorDef = genExportMethod(exporteds, JSName.Literal("constructor"), static = false) - - (captureParams, ctorDef) - } - private def genExportProperty(alts: List[Symbol], jsName: JSName, static: Boolean): js.JSPropertyDef = { assert(!alts.isEmpty, s"genExportProperty with empty alternatives for $jsName") @@ -372,17 +351,11 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { val (getter, setters) = alts.partition(_.info.paramInfoss.head.isEmpty) // We can have at most one getter - if (getter.sizeIs > 1) { - /* Member export of properties should be caught earlier, so if we get - * here with a non-static export, something went horribly wrong. - */ - assert(static, s"Found more than one instance getter to export for name $jsName.") - for (duplicate <- getter.tail) - report.error(s"Duplicate static getter export with name '${jsName.displayName}'", duplicate) - } + if (getter.sizeIs > 1) + reportCannotDisambiguateError(jsName, alts) val getterBody = getter.headOption.map { getterSym => - genApplyForSingleExported(new FormalArgsRegistry(0, false), Exported(getterSym), static) + genApplyForSingleExported(new FormalArgsRegistry(0, false), new ExportedSymbol(getterSym, static), static) } val setterArgAndBody = { @@ -391,7 +364,8 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { } else { val formalArgsRegistry = new FormalArgsRegistry(1, false) val (List(arg), None) = formalArgsRegistry.genFormalArgs() - val body = genExportSameArgc(jsName, formalArgsRegistry, setters.map(Exported.apply), static, None) + val body = genOverloadDispatchSameArgc(jsName, formalArgsRegistry, + setters.map(new ExportedSymbol(_, static)), jstpe.AnyType, None) Some((arg, body)) } } @@ -399,10 +373,10 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { js.JSPropertyDef(flags, genExpr(jsName)(alts.head.sourcePos), getterBody, setterArgAndBody) } - private def genExportMethod(alts0: List[Exported], jsName: JSName, static: Boolean): js.JSMethodDef = { + private def genExportMethod(alts0: List[Symbol], jsName: JSName, static: Boolean)(using Context): js.JSMethodDef = { assert(alts0.nonEmpty, "need at least one alternative to generate exporter method") - implicit val pos = alts0.head.pos + implicit val pos: SourcePosition = alts0.head.sourcePos val namespace = if (static) js.MemberNamespace.PublicStatic @@ -411,12 +385,24 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { // toString() is always exported. We might need to add it here to get correct overloading. val alts = jsName match { - case JSName.Literal("toString") if alts0.forall(_.params.nonEmpty) => - Exported(defn.Any_toString) :: alts0 + case JSName.Literal("toString") if alts0.forall(_.info.paramInfoss.exists(_.nonEmpty)) => + defn.Any_toString :: alts0 case _ => alts0 } + val overloads = alts.map(new ExportedSymbol(_, static)) + + val (formalArgs, restParam, body) = + genOverloadDispatch(jsName, overloads, jstpe.AnyType) + + js.JSMethodDef(flags, genExpr(jsName), formalArgs, restParam, body)( + OptimizerHints.empty, None) + } + + def genOverloadDispatch(jsName: JSName, alts: List[Exported], tpe: jstpe.Type)( + using pos: SourcePosition): (List[js.ParamDef], Option[js.ParamDef], js.Tree) = { + // Create the formal args registry val hasVarArg = alts.exists(_.hasRepeatedParam) val minArgc = alts.map(_.minArgc).min @@ -437,14 +423,14 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { * ported to dotc. */ val body = - if (alts.tail.isEmpty) genApplyForSingleExported(formalArgsRegistry, alts.head, static) - else genExportMethodMultiAlts(formalArgsRegistry, maxNonRepeatedArgc, alts, jsName, static) + if (alts.tail.isEmpty) alts.head.genBody(formalArgsRegistry) + else genExportMethodMultiAlts(formalArgsRegistry, maxNonRepeatedArgc, alts, tpe, jsName) - js.JSMethodDef(flags, genExpr(jsName), formalArgs, restParam, body)(OptimizerHints.empty, None) + (formalArgs, restParam, body) } private def genExportMethodMultiAlts(formalArgsRegistry: FormalArgsRegistry, - maxNonRepeatedArgc: Int, alts: List[Exported], jsName: JSName, static: Boolean)( + maxNonRepeatedArgc: Int, alts: List[Exported], tpe: jstpe.Type, jsName: JSName)( implicit pos: SourcePosition): js.Tree = { // Generate tuples (argc, method) @@ -469,7 +455,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { if methods != altsWithVarArgs // exclude default case we're generating anyways for varargs } yield { // body of case to disambiguates methods with current count - val caseBody = genExportSameArgc(jsName, formalArgsRegistry, methods, static, Some(argc)) + val caseBody = genOverloadDispatchSameArgc(jsName, formalArgsRegistry, methods, tpe, Some(argc)) List(js.IntLiteral(argc - formalArgsRegistry.minArgc)) -> caseBody } @@ -477,7 +463,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { if (altsWithVarArgs.isEmpty) genThrowTypeError() else - genExportSameArgc(jsName, formalArgsRegistry, altsWithVarArgs, static, None) + genOverloadDispatchSameArgc(jsName, formalArgsRegistry, altsWithVarArgs, tpe, None) } val body = { @@ -491,7 +477,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { js.AsInstanceOf(js.JSSelect(restArgRef, js.StringLiteral("length")), jstpe.IntType), cases, defaultCase)( - jstpe.AnyType) + tpe) } } @@ -506,14 +492,14 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { * The registry of all the formal arguments * @param alts * Alternative methods - * @param static - * Whether we are generating a static method + * @param tpe + * Result type * @param maxArgc * Maximum number of arguments to use for disambiguation */ - private def genExportSameArgc(jsName: JSName, formalArgsRegistry: FormalArgsRegistry, - alts: List[Exported], static: Boolean, maxArgc: Option[Int]): js.Tree = { - genExportSameArgcRec(jsName, formalArgsRegistry, alts, paramIndex = 0, static, maxArgc) + private def genOverloadDispatchSameArgc(jsName: JSName, formalArgsRegistry: FormalArgsRegistry, + alts: List[Exported], tpe: jstpe.Type, maxArgc: Option[Int]): js.Tree = { + genOverloadDispatchSameArgcRec(jsName, formalArgsRegistry, alts, tpe, paramIndex = 0, maxArgc) } /** Resolves method calls to [[alts]] while assuming they have the same parameter count. @@ -524,27 +510,27 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { * The registry of all the formal arguments * @param alts * Alternative methods + * @param tpe + * Result type * @param paramIndex * Index where to start disambiguation (starts at 0, increases through recursion) - * @param static - * Whether we are generating a static method * @param maxArgc * Maximum number of arguments to use for disambiguation */ - private def genExportSameArgcRec(jsName: JSName, formalArgsRegistry: FormalArgsRegistry, alts: List[Exported], - paramIndex: Int, static: Boolean, maxArgc: Option[Int]): js.Tree = { + private def genOverloadDispatchSameArgcRec(jsName: JSName, formalArgsRegistry: FormalArgsRegistry, + alts: List[Exported], tpe: jstpe.Type, paramIndex: Int, maxArgc: Option[Int]): js.Tree = { implicit val pos = alts.head.pos if (alts.sizeIs == 1) { - genApplyForSingleExported(formalArgsRegistry, alts.head, static) + alts.head.genBody(formalArgsRegistry) } else if (maxArgc.exists(_ <= paramIndex) || !alts.exists(_.params.size > paramIndex)) { // We reach here in three cases: // 1. The parameter list has been exhausted // 2. The optional argument count restriction has triggered // 3. We only have (more than once) repeated parameters left // Therefore, we should fail - reportCannotDisambiguateError(jsName, alts) + reportCannotDisambiguateError(jsName, alts.map(_.sym)) js.Undefined() } else { val altsByTypeTest = groupByWithoutHashCode(alts) { exported => @@ -553,10 +539,25 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { if (altsByTypeTest.size == 1) { // Testing this parameter is not doing any us good - genExportSameArgcRec(jsName, formalArgsRegistry, alts, paramIndex + 1, static, maxArgc) + genOverloadDispatchSameArgcRec(jsName, formalArgsRegistry, alts, tpe, paramIndex + 1, maxArgc) } else { // Sort them so that, e.g., isInstanceOf[String] comes before isInstanceOf[Object] - val sortedAltsByTypeTest = topoSortDistinctsBy(altsByTypeTest)(_._1) + val sortedAltsByTypeTest = topoSortDistinctsWith(altsByTypeTest) { (lhs, rhs) => + (lhs._1, rhs._1) match { + // NoTypeTest is always last + case (_, NoTypeTest) => true + case (NoTypeTest, _) => false + + case (PrimitiveTypeTest(_, rank1), PrimitiveTypeTest(_, rank2)) => + rank1 <= rank2 + + case (InstanceOfTypeTest(t1), InstanceOfTypeTest(t2)) => + t1 <:< t2 + + case (_: PrimitiveTypeTest, _: InstanceOfTypeTest) => true + case (_: InstanceOfTypeTest, _: PrimitiveTypeTest) => false + } + } val defaultCase = genThrowTypeError() @@ -565,14 +566,10 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { implicit val pos = subAlts.head.pos val paramRef = formalArgsRegistry.genArgRef(paramIndex) - val genSubAlts = genExportSameArgcRec(jsName, formalArgsRegistry, - subAlts, paramIndex + 1, static, maxArgc) + val genSubAlts = genOverloadDispatchSameArgcRec(jsName, formalArgsRegistry, + subAlts, tpe, paramIndex + 1, maxArgc) - def hasDefaultParam = subAlts.exists { exported => - val params = exported.params - params.size > paramIndex && - params(paramIndex).hasDefault - } + def hasDefaultParam = subAlts.exists(_.hasDefaultAt(paramIndex)) val optCond = typeTest match { case PrimitiveTypeTest(tpe, _) => Some(js.IsInstanceOf(paramRef, tpe)) @@ -588,14 +585,14 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { js.BinaryOp(js.BinaryOp.===, paramRef, js.Undefined()))( jstpe.BooleanType) } - js.If(condOrUndef, genSubAlts, elsep)(jstpe.AnyType) + js.If(condOrUndef, genSubAlts, elsep)(tpe) } } } } } - private def reportCannotDisambiguateError(jsName: JSName, alts: List[Exported]): Unit = { + private def reportCannotDisambiguateError(jsName: JSName, alts: List[Symbol]): Unit = { val currentClass = currentClassSym.get /* Find a position that is in the current class for decent error reporting. @@ -604,21 +601,26 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { * same error in all compilers. */ val validPositions = alts.collect { - case alt if alt.sym.owner == currentClass => alt.pos + case alt if alt.owner == currentClass => alt.sourcePos } val pos: SourcePosition = if (validPositions.isEmpty) currentClass.sourcePos else validPositions.maxBy(_.point) val kind = - if (currentClass.isJSType) "method" - else "exported method" + if (alts.head.isJSGetter) "getter" + else if (alts.head.isJSSetter) "setter" + else "method" + + val fullKind = + if (currentClass.isJSType) kind + else "exported " + kind val displayName = jsName.displayName - val altsTypesInfo = alts.map(_.typeInfo).mkString("\n ") + val altsTypesInfo = alts.map(_.info.show).sorted.mkString("\n ") report.error( - s"Cannot disambiguate overloads for $kind $displayName with types\n $altsTypesInfo", + s"Cannot disambiguate overloads for $fullKind $displayName with types\n $altsTypesInfo", pos) } @@ -677,63 +679,54 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { implicit val pos = exported.pos - // Generate JS code to prepare arguments (repeated args, default getters and unboxes) - val jsArgPrep = genPrepareArgs(formalArgsRegistry, exported, static) - val jsArgPrepRefs = jsArgPrep.map(_.ref) + val varDefs = new mutable.ListBuffer[js.VarDef] - // Combine prep'ed formal arguments with captures - val allJSArgs = { - exported.captureParamsFront.map(_.ref) ::: - jsArgPrepRefs ::: - exported.captureParamsBack.map(_.ref) + for ((param, i) <- exported.params.zipWithIndex) { + val rhs = genScalaArg(exported, i, formalArgsRegistry, param, static, captures = Nil)( + prevArgsCount => varDefs.take(prevArgsCount).toList.map(_.ref)) + + varDefs += js.VarDef(freshLocalIdent("prep" + i), NoOriginalName, rhs.tpe, mutable = false, rhs) } - val jsResult = genResult(exported, allJSArgs, static) + val builtVarDefs = varDefs.result() + + val jsResult = genResult(exported, builtVarDefs.map(_.ref), static) - js.Block(jsArgPrep :+ jsResult) + js.Block(builtVarDefs :+ jsResult) } - /** Generate the necessary JavaScript code to prepare the arguments of an - * exported method (unboxing and default parameter handling) + /** Generates a Scala argument from dispatched JavaScript arguments + * (unboxing and default parameter handling). */ - private def genPrepareArgs(formalArgsRegistry: FormalArgsRegistry, exported: Exported, static: Boolean)( - implicit pos: SourcePosition): List[js.VarDef] = { - - val result = new mutable.ListBuffer[js.VarDef] + def genScalaArg(exported: Exported, paramIndex: Int, formalArgsRegistry: FormalArgsRegistry, + param: JSParamInfo, static: Boolean, captures: List[js.Tree])( + previousArgsValues: Int => List[js.Tree])( + implicit pos: SourcePosition): js.Tree = { - for ((param, i) <- exported.params.zipWithIndex) yield { - val verifiedOrDefault = if (param.isRepeated) { - genJSArrayToVarArgs(formalArgsRegistry.genVarargRef(i)) - } else { - val jsArg = formalArgsRegistry.genArgRef(i) + if (param.repeated) { + genJSArrayToVarArgs(formalArgsRegistry.genVarargRef(paramIndex)) + } else { + val jsArg = formalArgsRegistry.genArgRef(paramIndex) - // Unboxed argument (if it is defined) - val unboxedArg = unbox(jsArg, param.info) + // Unboxed argument (if it is defined) + val unboxedArg = unbox(jsArg, param.info) + if (exported.hasDefaultAt(paramIndex)) { // If argument is undefined and there is a default getter, call it - if (param.hasDefault) { - js.If(js.BinaryOp(js.BinaryOp.===, jsArg, js.Undefined()), { - genCallDefaultGetter(exported.sym, i, static) { - prevArgsCount => result.take(prevArgsCount).toList.map(_.ref) - } - }, { - // Otherwise, unbox the argument - unboxedArg - })(unboxedArg.tpe) - } else { - // Otherwise, it is always the unboxed argument + js.If(js.BinaryOp(js.BinaryOp.===, jsArg, js.Undefined()), { + genCallDefaultGetter(exported.sym, paramIndex, static, captures)(previousArgsValues) + }, { unboxedArg - } + })(unboxedArg.tpe) + } else { + // Otherwise, it is always the unboxed argument + unboxedArg } - - result += js.VarDef(freshLocalIdent("prep" + i), NoOriginalName, - verifiedOrDefault.tpe, mutable = false, verifiedOrDefault) } - - result.toList } - private def genCallDefaultGetter(sym: Symbol, paramIndex: Int, static: Boolean)( + def genCallDefaultGetter(sym: Symbol, paramIndex: Int, + static: Boolean, captures: List[js.Tree])( previousArgsValues: Int => List[js.Tree])( implicit pos: SourcePosition): js.Tree = { @@ -744,9 +737,33 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { assert(!defaultGetterDenot.isOverloaded, i"found overloaded default getter $defaultGetterDenot") val defaultGetter = defaultGetterDenot.symbol - val targetTree = - if (sym.isClassConstructor || static) genLoadModule(targetSym) - else js.This()(encodeClassType(targetSym)) + val targetTree = { + if (sym.isClassConstructor || static) { + if (targetSym.isStatic) { + assert(captures.isEmpty, i"expected empty captures for ${targetSym.fullName} at $pos") + genLoadModule(targetSym) + } else { + assert(captures.sizeIs == 1, "expected exactly one capture") + + // Find the module accessor. We cannot use memberBasedOnFlags because of scala-js/scala-js#4526. + val outer = targetSym.originalOwner + val name = atPhase(typerPhase)(targetSym.name.unexpandedName).sourceModuleName + val modAccessor = outer.info.allMembers.find { denot => + denot.symbol.is(Module) && denot.name.unexpandedName == name + }.getOrElse { + throw new AssertionError(i"could not find module accessor for ${targetSym.fullName} at $pos") + }.symbol + + val receiver = captures.head + if (outer.isJSType) + genApplyJSClassMethod(receiver, modAccessor, Nil) + else + genApplyMethodMaybeStatically(receiver, modAccessor, Nil) + } + } else { + js.This()(encodeClassType(targetSym)) + } + } // Pass previous arguments to defaultGetter val defaultGetterArgs = previousArgsValues(defaultGetter.info.paramInfoss.head.size) @@ -757,6 +774,13 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { js.Undefined() else genApplyJSClassMethod(targetTree, defaultGetter, defaultGetterArgs) + } else if (defaultGetter.owner == targetSym) { + /* We get here if a non-native constructor has a native companion. + * This is reported on a per-class level. + */ + assert(sym.isClassConstructor, + s"got non-constructor method $sym with default method in JS native companion") + js.Undefined() } else { report.error( "When overriding a native method with default arguments, " + @@ -774,7 +798,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { else sym.owner private def defaultGetterDenot(targetSym: Symbol, sym: Symbol, paramIndex: Int): Denotation = - targetSym.info.member(DefaultGetterName(sym.name.asTermName, paramIndex)) + targetSym.info.memberBasedOnFlags(DefaultGetterName(sym.name.asTermName, paramIndex), excluded = Bridge) private def defaultGetterDenot(sym: Symbol, paramIndex: Int): Denotation = defaultGetterDenot(targetSymForDefaultGetter(sym), sym, paramIndex) @@ -809,106 +833,33 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { private def genThrowTypeError(msg: String = "No matching overload")(implicit pos: Position): js.Tree = js.Throw(js.JSNew(js.JSGlobalRef("TypeError"), js.StringLiteral(msg) :: Nil)) - private final class ParamSpec(val info: Type, val isRepeated: Boolean, val hasDefault: Boolean) { - override def toString(): String = - i"ParamSpec($info, isRepeated = $isRepeated, hasDefault = $hasDefault)" - } - - private object ParamSpec { - def apply(methodSym: Symbol, infoAtElimRepeated: Type, infoAtElimEVT: Type, - methodHasDefaultParams: Boolean, paramIndex: Int): ParamSpec = { - val isRepeated = infoAtElimRepeated.isRepeatedParam - val info = - if (isRepeated) atPhase(elimRepeatedPhase)(infoAtElimRepeated.repeatedToSingle.widenDealias) - else infoAtElimEVT - val hasDefault = methodHasDefaultParams && defaultGetterDenot(methodSym, paramIndex).exists - new ParamSpec(info, isRepeated, hasDefault) - } - } - - // This is a case class because we rely on its structural equality - private final case class Exported(sym: Symbol) { - val isConstructorOfNestedJSClass = - sym.isClassConstructor && sym.owner.isNestedJSClass - - // params: List[ParamSpec] ; captureParams and captureParamsBack: List[js.ParamDef] - val (params, captureParamsFront, captureParamsBack) = { - val (paramNamesAtElimRepeated, paramInfosAtElimRepeated, methodHasDefaultParams) = - atPhase(elimRepeatedPhase)((sym.info.paramNamess.flatten, sym.info.paramInfoss.flatten, sym.hasDefaultParams)) - val (paramNamesAtElimEVT, paramInfosAtElimEVT) = - atPhase(elimErasedValueTypePhase)((sym.info.firstParamNames, sym.info.firstParamTypes)) - val (paramNamesNow, paramInfosNow) = - (sym.info.firstParamNames, sym.info.firstParamTypes) - - val formalParamCount = paramInfosAtElimRepeated.size - - def buildFormalParams(formalParamInfosAtElimEVT: List[Type]): IndexedSeq[ParamSpec] = { - (for { - (infoAtElimRepeated, infoAtElimEVT, paramIndex) <- - paramInfosAtElimRepeated.lazyZip(formalParamInfosAtElimEVT).lazyZip(0 until formalParamCount) - } yield { - ParamSpec(sym, infoAtElimRepeated, infoAtElimEVT, methodHasDefaultParams, paramIndex) - }).toIndexedSeq - } - - def buildCaptureParams(namesAndInfosNow: List[(TermName, Type)]): List[js.ParamDef] = { - implicit val pos: Position = sym.span - for ((name, info) <- namesAndInfosNow) yield { - js.ParamDef(freshLocalIdent(name.mangledString), NoOriginalName, toIRType(info), - mutable = false) - } - } + abstract class Exported( + val sym: Symbol, + // Parameters participating in overload resolution. + val params: scala.collection.immutable.IndexedSeq[JSParamInfo] + ) { + assert(!params.exists(_.capture), "illegal capture params in Exported") - if (!isConstructorOfNestedJSClass) { - // Easy case: all params are formal params - assert(paramInfosAtElimEVT.size == formalParamCount && paramInfosNow.size == formalParamCount, - s"Found $formalParamCount params entering elimRepeated but ${paramInfosAtElimEVT.size} params entering " + - s"elimErasedValueType and ${paramInfosNow.size} params at the back-end for non-lifted symbol ${sym.fullName}") - val formalParams = buildFormalParams(paramInfosAtElimEVT) - (formalParams, Nil, Nil) - } else if (formalParamCount == 0) { - // Fast path: all params are capture params - val captureParams = buildCaptureParams(paramNamesNow.zip(paramInfosNow)) - (IndexedSeq.empty, Nil, captureParams) + private val paramsHasDefault = { + if (!atPhase(elimRepeatedPhase)(sym.hasDefaultParams)) { + Vector.empty } else { - /* Slow path: we have to isolate formal params (which were already present at elimRepeated) - * from capture params (which are later, typically by erasure and/or lambdalift). - */ - - def findStartOfFormalParamsIn(paramNames: List[TermName]): Int = { - val start = paramNames.indexOfSlice(paramNamesAtElimRepeated) - assert(start >= 0, s"could not find formal param names $paramNamesAtElimRepeated in $paramNames") - start - } - - // Find the infos of formal params at elimEVT - val startOfFormalParamsAtElimEVT = findStartOfFormalParamsIn(paramNamesAtElimEVT) - val formalParamInfosAtElimEVT = paramInfosAtElimEVT.drop(startOfFormalParamsAtElimEVT).take(formalParamCount) - - // Build the formal param specs from their infos at elimRepeated and elimEVT - val formalParams = buildFormalParams(formalParamInfosAtElimEVT) - - // Find the formal params now to isolate the capture params (before and after the formal params) - val startOfFormalParamsNow = findStartOfFormalParamsIn(paramNamesNow) - val paramNamesAndInfosNow = paramNamesNow.zip(paramInfosNow) - val (captureParamsFrontNow, restOfParamsNow) = paramNamesAndInfosNow.splitAt(startOfFormalParamsNow) - val captureParamsBackNow = restOfParamsNow.drop(formalParamCount) - - // Build the capture param defs from the isolated capture params - val captureParamsFront = buildCaptureParams(captureParamsFrontNow) - val captureParamsBack = buildCaptureParams(captureParamsBackNow) - - (formalParams, captureParamsFront, captureParamsBack) + val targetSym = targetSymForDefaultGetter(sym) + params.indices.map(i => defaultGetterDenot(targetSym, sym, i).exists) } } - val hasRepeatedParam = params.nonEmpty && params.last.isRepeated + def hasDefaultAt(paramIndex: Int): Boolean = + paramIndex < paramsHasDefault.size && paramsHasDefault(paramIndex) + + val hasRepeatedParam = params.nonEmpty && params.last.repeated val minArgc = { // Find the first default param or repeated param - val firstOptionalParamIndex = params.indexWhere(p => p.hasDefault || p.isRepeated) - if (firstOptionalParamIndex == -1) params.size - else firstOptionalParamIndex + params + .indices + .find(i => hasDefaultAt(i) || params(i).repeated) + .getOrElse(params.size) } val maxNonRepeatedArgc = if (hasRepeatedParam) params.size - 1 else params.size @@ -925,6 +876,15 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { } def typeInfo: String = sym.info.toString + + def genBody(formalArgsRegistry: FormalArgsRegistry): js.Tree + } + + private class ExportedSymbol(sym: Symbol, static: Boolean) + extends Exported(sym, sym.jsParamInfos.toIndexedSeq) { + + def genBody(formalArgsRegistry: FormalArgsRegistry): js.Tree = + genApplyForSingleExported(formalArgsRegistry, this, static) } // !!! Hash codes of RTTypeTest are meaningless because of InstanceOfTypeTest @@ -944,46 +904,14 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { private case object NoTypeTest extends RTTypeTest - private object RTTypeTest { - given PartialOrdering[RTTypeTest] with { - override def tryCompare(lhs: RTTypeTest, rhs: RTTypeTest): Option[Int] = { - if (lteq(lhs, rhs)) if (lteq(rhs, lhs)) Some(0) else Some(-1) - else if (lteq(rhs, lhs)) Some(1) else None - } - - override def lteq(lhs: RTTypeTest, rhs: RTTypeTest): Boolean = { - (lhs, rhs) match { - // NoTypeTest is always last - case (_, NoTypeTest) => true - case (NoTypeTest, _) => false - - case (PrimitiveTypeTest(_, rank1), PrimitiveTypeTest(_, rank2)) => - rank1 <= rank2 - - case (InstanceOfTypeTest(t1), InstanceOfTypeTest(t2)) => - t1 <:< t2 - - case (_: PrimitiveTypeTest, _: InstanceOfTypeTest) => true - case (_: InstanceOfTypeTest, _: PrimitiveTypeTest) => false - } - } - - override def equiv(lhs: RTTypeTest, rhs: RTTypeTest): Boolean = { - lhs == rhs - } - } - } - /** Very simple O(n²) topological sort for elements assumed to be distinct. */ - private def topoSortDistinctsBy[A <: AnyRef, B](coll: List[A])(f: A => B)( - using ord: PartialOrdering[B]): List[A] = { - + private def topoSortDistinctsWith[A <: AnyRef](coll: List[A])(lteq: (A, A) => Boolean): List[A] = { @tailrec def loop(coll: List[A], acc: List[A]): List[A] = { if (coll.isEmpty) acc else if (coll.tail.isEmpty) coll.head :: acc else { - val (lhs, rhs) = coll.span(x => !coll.forall(y => (x eq y) || !ord.lteq(f(x), f(y)))) + val (lhs, rhs) = coll.span(x => !coll.forall(y => (x eq y) || !lteq(x, y))) assert(!rhs.isEmpty, s"cycle while ordering $coll") loop(lhs ::: rhs.tail, rhs.head :: acc) } @@ -1039,7 +967,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { m.toList } - private class FormalArgsRegistry(val minArgc: Int, needsRestParam: Boolean) { + class FormalArgsRegistry(val minArgc: Int, needsRestParam: Boolean) { private val fixedParamNames: scala.collection.immutable.IndexedSeq[jsNames.LocalName] = (0 until minArgc).toIndexedSeq.map(_ => freshLocalIdent("arg")(NoPosition).name) @@ -1083,14 +1011,14 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { js.VarRef(js.LocalIdent(restParamName))(jstpe.AnyType) } - def genAllArgsRefsForForwarder()(implicit pos: Position): List[js.Tree] = { + def genAllArgsRefsForForwarder()(implicit pos: Position): List[js.TreeOrJSSpread] = { val fixedArgRefs = fixedParamNames.toList.map { paramName => js.VarRef(js.LocalIdent(paramName))(jstpe.AnyType) } if (needsRestParam) { val restArgRef = js.VarRef(js.LocalIdent(restParamName))(jstpe.AnyType) - fixedArgRefs :+ restArgRef + fixedArgRefs :+ js.JSSpread(restArgRef) } else { fixedArgRefs } diff --git a/compiler/src/dotty/tools/backend/sjs/JSInterop.scala b/compiler/src/dotty/tools/backend/sjs/JSInterop.scala deleted file mode 100644 index c22af60bd179..000000000000 --- a/compiler/src/dotty/tools/backend/sjs/JSInterop.scala +++ /dev/null @@ -1,74 +0,0 @@ -package dotty.tools.backend.sjs - -import dotty.tools.dotc.core._ -import Contexts._ -import Flags._ -import Symbols._ -import NameOps._ -import StdNames._ -import Phases._ - -import dotty.tools.dotc.transform.sjs.JSSymUtils._ - -/** Management of the interoperability with JavaScript. - * - * This object only contains forwarders for extension methods in - * `transform.sjs.JSSymUtils`. They are kept to minimize changes in - * `JSCodeGen` in the short term, but it will eventually be removed. - */ -object JSInterop { - - /** Is this symbol a JavaScript type? */ - def isJSType(sym: Symbol)(using Context): Boolean = - sym.isJSType - - /** Should this symbol be translated into a JS getter? - * - * This is true for any parameterless method, i.e., defined without `()`. - * Unlike `SymDenotations.isGetter`, it applies to user-defined methods as - * much as *accessor* methods created for `val`s and `var`s. - */ - def isJSGetter(sym: Symbol)(using Context): Boolean = - sym.isJSGetter - - /** Should this symbol be translated into a JS setter? - * - * This is true for any method whose name ends in `_=`. - * Unlike `SymDenotations.isGetter`, it applies to user-defined methods as - * much as *accessor* methods created for `var`s. - */ - def isJSSetter(sym: Symbol)(using Context): Boolean = - sym.isJSSetter - - /** Should this symbol be translated into a JS bracket access? - * - * This is true for methods annotated with `@JSBracketAccess`. - */ - def isJSBracketAccess(sym: Symbol)(using Context): Boolean = - sym.isJSBracketAccess - - /** Should this symbol be translated into a JS bracket call? - * - * This is true for methods annotated with `@JSBracketCall`. - */ - def isJSBracketCall(sym: Symbol)(using Context): Boolean = - sym.isJSBracketCall - - /** Is this symbol a default param accessor for a JS method? - * - * For default param accessors of *constructors*, we need to test whether - * the companion *class* of the owner is a JS type; not whether the owner - * is a JS type. - */ - def isJSDefaultParam(sym: Symbol)(using Context): Boolean = - sym.isJSDefaultParam - - /** Gets the unqualified JS name of a symbol. - * - * If it is not explicitly specified with an `@JSName` annotation, the - * JS name is inferred from the Scala name. - */ - def jsNameOf(sym: Symbol)(using Context): JSName = - sym.jsName - -} diff --git a/compiler/src/dotty/tools/backend/sjs/JSPositions.scala b/compiler/src/dotty/tools/backend/sjs/JSPositions.scala index 84b6304d60eb..454b5b5c3a5c 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSPositions.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSPositions.scala @@ -39,11 +39,11 @@ class JSPositions()(using Context) { private def sourceAndSpan2irPos(source: SourceFile, span: Span): ir.Position = { if (!span.exists) ir.Position.NoPosition else { - // dotty positions are 1-based but IR positions are 0-based + // dotty positions and IR positions are both 0-based val irSource = span2irPosCache.toIRSource(source) val point = span.point - val line = source.offsetToLine(point) - 1 - val column = source.column(point) - 1 + val line = source.offsetToLine(point) + val column = source.column(point) ir.Position(irSource, line, column) } } diff --git a/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala b/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala index d4a96f29ca5c..ac7b492e3e1f 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala @@ -16,39 +16,42 @@ import scala.collection.mutable object JSPrimitives { - final val FirstJSPrimitiveCode = 300 + inline val FirstJSPrimitiveCode = 300 - final val DYNNEW = FirstJSPrimitiveCode + 1 // Instantiate a new JavaScript object + inline val DYNNEW = FirstJSPrimitiveCode + 1 // Instantiate a new JavaScript object - final val ARR_CREATE = DYNNEW + 1 // js.Array.apply (array literal syntax) + inline val ARR_CREATE = DYNNEW + 1 // js.Array.apply (array literal syntax) - final val TYPEOF = ARR_CREATE + 1 // typeof x - final val JS_NATIVE = TYPEOF + 1 // js.native. Marker method. Fails if tried to be emitted. + inline val TYPEOF = ARR_CREATE + 1 // typeof x + inline val JS_NATIVE = TYPEOF + 1 // js.native. Marker method. Fails if tried to be emitted. - final val UNITVAL = JS_NATIVE + 1 // () value, which is undefined + inline val UNITVAL = JS_NATIVE + 1 // () value, which is undefined - final val CONSTRUCTOROF = UNITVAL + 1 // runtime.constructorOf(clazz) - final val CREATE_INNER_JS_CLASS = CONSTRUCTOROF + 1 // runtime.createInnerJSClass - final val CREATE_LOCAL_JS_CLASS = CREATE_INNER_JS_CLASS + 1 // runtime.createLocalJSClass - final val WITH_CONTEXTUAL_JS_CLASS_VALUE = CREATE_LOCAL_JS_CLASS + 1 // runtime.withContextualJSClassValue - final val LINKING_INFO = WITH_CONTEXTUAL_JS_CLASS_VALUE + 1 // runtime.linkingInfo + inline val JS_IMPORT = UNITVAL + 1 // js.import.apply(specifier) + inline val JS_IMPORT_META = JS_IMPORT + 1 // js.import.meta - final val STRICT_EQ = LINKING_INFO + 1 // js.special.strictEquals - final val IN = STRICT_EQ + 1 // js.special.in - final val INSTANCEOF = IN + 1 // js.special.instanceof - final val DELETE = INSTANCEOF + 1 // js.special.delete - final val FORIN = DELETE + 1 // js.special.forin - final val DEBUGGER = FORIN + 1 // js.special.debugger + inline val CONSTRUCTOROF = JS_IMPORT_META + 1 // runtime.constructorOf(clazz) + inline val CREATE_INNER_JS_CLASS = CONSTRUCTOROF + 1 // runtime.createInnerJSClass + inline val CREATE_LOCAL_JS_CLASS = CREATE_INNER_JS_CLASS + 1 // runtime.createLocalJSClass + inline val WITH_CONTEXTUAL_JS_CLASS_VALUE = CREATE_LOCAL_JS_CLASS + 1 // runtime.withContextualJSClassValue + inline val LINKING_INFO = WITH_CONTEXTUAL_JS_CLASS_VALUE + 1 // runtime.linkingInfo - final val THROW = DEBUGGER + 1 + inline val STRICT_EQ = LINKING_INFO + 1 // js.special.strictEquals + inline val IN = STRICT_EQ + 1 // js.special.in + inline val INSTANCEOF = IN + 1 // js.special.instanceof + inline val DELETE = INSTANCEOF + 1 // js.special.delete + inline val FORIN = DELETE + 1 // js.special.forin + inline val DEBUGGER = FORIN + 1 // js.special.debugger - final val UNION_FROM = THROW + 1 // js.|.from - final val UNION_FROM_TYPE_CONSTRUCTOR = UNION_FROM + 1 // js.|.fromTypeConstructor + inline val THROW = DEBUGGER + 1 - final val REFLECT_SELECTABLE_SELECTDYN = UNION_FROM_TYPE_CONSTRUCTOR + 1 // scala.reflect.Selectable.selectDynamic - final val REFLECT_SELECTABLE_APPLYDYN = REFLECT_SELECTABLE_SELECTDYN + 1 // scala.reflect.Selectable.applyDynamic + inline val UNION_FROM = THROW + 1 // js.|.from + inline val UNION_FROM_TYPE_CONSTRUCTOR = UNION_FROM + 1 // js.|.fromTypeConstructor - final val LastJSPrimitiveCode = REFLECT_SELECTABLE_APPLYDYN + inline val REFLECT_SELECTABLE_SELECTDYN = UNION_FROM_TYPE_CONSTRUCTOR + 1 // scala.reflect.Selectable.selectDynamic + inline val REFLECT_SELECTABLE_APPLYDYN = REFLECT_SELECTABLE_SELECTDYN + 1 // scala.reflect.Selectable.applyDynamic + + inline val LastJSPrimitiveCode = REFLECT_SELECTABLE_APPLYDYN def isJSPrimitive(code: Int): Boolean = code >= FirstJSPrimitiveCode && code <= LastJSPrimitiveCode @@ -106,6 +109,9 @@ class JSPrimitives(ictx: Context) extends DottyPrimitives(ictx) { addPrimitive(defn.BoxedUnit_UNIT, UNITVAL) + addPrimitive(jsdefn.JSImport_apply, JS_IMPORT) + addPrimitive(jsdefn.JSImport_meta, JS_IMPORT_META) + addPrimitive(jsdefn.Runtime_constructorOf, CONSTRUCTOROF) addPrimitive(jsdefn.Runtime_createInnerJSClass, CREATE_INNER_JS_CLASS) addPrimitive(jsdefn.Runtime_createLocalJSClass, CREATE_LOCAL_JS_CLASS) diff --git a/compiler/src/dotty/tools/dotc/CompilationUnit.scala b/compiler/src/dotty/tools/dotc/CompilationUnit.scala index e858e01efc48..3c73b1126889 100644 --- a/compiler/src/dotty/tools/dotc/CompilationUnit.scala +++ b/compiler/src/dotty/tools/dotc/CompilationUnit.scala @@ -56,9 +56,6 @@ class CompilationUnit protected (val source: SourceFile) { */ var needsQuotePickling: Boolean = false - /** A structure containing a temporary map for generating inline accessors */ - val inlineAccessors: InlineAccessors = new InlineAccessors - var suspended: Boolean = false var suspendedAtInliningPhase: Boolean = false @@ -66,7 +63,7 @@ class CompilationUnit protected (val source: SourceFile) { def isSuspendable: Boolean = true /** Suspends the compilation unit by thowing a SuspendException - * and recoring the suspended compilation unit + * and recording the suspended compilation unit */ def suspend()(using Context): Nothing = assert(isSuspendable) @@ -125,7 +122,7 @@ object CompilationUnit { NoSource } else if (!source.file.exists) { - report.error(s"not found: ${source.file.path}") + report.error(s"source file not found: ${source.file.path}") NoSource } else source diff --git a/compiler/src/dotty/tools/dotc/Compiler.scala b/compiler/src/dotty/tools/dotc/Compiler.scala index ef0acb58492b..1ddc626d2646 100644 --- a/compiler/src/dotty/tools/dotc/Compiler.scala +++ b/compiler/src/dotty/tools/dotc/Compiler.scala @@ -3,12 +3,13 @@ package dotc import core._ import Contexts._ -import typer.{FrontEnd, RefChecks} +import typer.{TyperPhase, RefChecks} +import parsing.Parser import Phases.Phase import transform._ -import dotty.tools.backend.jvm.{CollectSuperCalls, GenBCode} import dotty.tools.backend -import dotty.tools.dotc.transform.localopt.StringInterpolatorOpt +import backend.jvm.{CollectSuperCalls, GenBCode} +import localopt.StringInterpolatorOpt /** The central class of the dotc compiler. The job of a compiler is to create * runs, which process given `phases` in a given `rootContext`. @@ -36,7 +37,8 @@ class Compiler { /** Phases dealing with the frontend up to trees ready for TASTY pickling */ protected def frontendPhases: List[List[Phase]] = - List(new FrontEnd) :: // Compiler frontend: scanner, parser, namer, typer + List(new Parser) :: // Compiler frontend: scanner, parser + List(new TyperPhase) :: // Compiler frontend: namer, typer List(new YCheckPositions) :: // YCheck positions List(new sbt.ExtractDependencies) :: // Sends information on classes' dependencies to sbt via callbacks List(new semanticdb.ExtractSemanticDB) :: // Extract info into .semanticdb files @@ -62,27 +64,30 @@ class Compiler { new ElimPackagePrefixes, // Eliminate references to package prefixes in Select nodes new CookComments, // Cook the comments: expand variables, doc, etc. new CheckStatic, // Check restrictions that apply to @static members + new CheckLoopingImplicits, // Check that implicit defs do not call themselves in an infinite loop new BetaReduce, // Reduce closure applications new InlineVals, // Check right hand-sides of an `inline val`s new ExpandSAMs, // Expand single abstract method closures to anonymous classes - new init.Checker) :: // Check initialization of objects - List(new ElimRepeated, // Rewrite vararg parameters and arguments + new ElimRepeated, // Rewrite vararg parameters and arguments + new RefChecks) :: // Various checks mostly related to abstract members and overriding + List(new init.Checker) :: // Check initialization of objects + List(new CrossVersionChecks, // Check issues related to deprecated and experimental new ProtectedAccessors, // Add accessors for protected members new ExtensionMethods, // Expand methods of value classes with extension methods new UncacheGivenAliases, // Avoid caching RHS of simple parameterless given aliases - new ByNameClosures, // Expand arguments to by-name parameters to closures + new ElimByName, // Map by-name parameters to functions new HoistSuperArgs, // Hoist complex arguments of supercalls to enclosing scope + new ForwardDepChecks, // Check that there are no forward references to local vals new SpecializeApplyMethods, // Adds specialized methods to FunctionN - new RefChecks) :: // Various checks mostly related to abstract members and overriding - List(new ElimOpaque, // Turn opaque into normal aliases new TryCatchPatterns, // Compile cases in try/catch - new PatternMatcher, // Compile pattern matches + new PatternMatcher) :: // Compile pattern matches + List(new ElimOpaque, // Turn opaque into normal aliases new sjs.ExplicitJSClasses, // Make all JS classes explicit (Scala.js only) new ExplicitOuter, // Add accessors to outer classes from nested ones. new ExplicitSelf, // Make references to non-trivial self types explicit as casts - new ElimByName, // Expand by-name parameter references - new StringInterpolatorOpt) :: // Optimizes raw and s string interpolators by rewriting them to string concatenations + new StringInterpolatorOpt) :: // Optimizes raw and s and f string interpolators by rewriting them to string concatenations or formats List(new PruneErasedDefs, // Drop erased definitions from scopes and simplify erased expressions + new UninitializedDefs, // Replaces `compiletime.uninitialized` by `_` new InlinePatterns, // Remove placeholders of inlined patterns new VCInlineMethods, // Inlines calls to value class methods new SeqLiterals, // Express vararg arguments as arrays @@ -102,6 +107,7 @@ class Compiler { List(new ElimErasedValueType, // Expand erased value types to their underlying implmementation types new PureStats, // Remove pure stats from blocks new VCElideAllocations, // Peep-hole optimization to eliminate unnecessary value class allocations + new EtaReduce, // Reduce eta expansions of pure paths to the underlying function reference new ArrayApply, // Optimize `scala.Array.apply([....])` and `scala.Array.apply(..., [....])` into `[...]` new sjs.AddLocalJSFakeNews, // Adds fake new invocations to local JS classes in calls to `createLocalJSClass` new ElimPolyFunction, // Rewrite PolyFunction subclasses to FunctionN subclasses @@ -120,14 +126,15 @@ class Compiler { new ElimStaticThis, // Replace `this` references to static objects by global identifiers new CountOuterAccesses) :: // Identify outer accessors that can be dropped List(new DropOuterAccessors, // Drop unused outer accessors + new CheckNoSuperThis, // Check that supercalls don't contain references to `this` new Flatten, // Lift all inner classes to package scope - new RenameLifted, // Renames lifted classes to local numbering scheme new TransformWildcards, // Replace wildcards with default values new MoveStatics, // Move static methods from companion to the class itself new ExpandPrivate, // Widen private definitions accessed from nested classes new RestoreScopes, // Repair scopes rendered invalid by moving definitions in prior phases of the group new SelectStatic, // get rid of selects that would be compiled into GetStatic new sjs.JUnitBootstrappers, // Generate JUnit-specific bootstrapper classes for Scala.js (not enabled by default) + new CollectEntryPoints, // Collect all entry points and save them in the context new CollectSuperCalls, // Find classes that are called with super new RepeatableAnnotations) :: // Aggregate repeatable annotations Nil diff --git a/compiler/src/dotty/tools/dotc/Driver.scala b/compiler/src/dotty/tools/dotc/Driver.scala index 20ae4da58d18..17177e72a7de 100644 --- a/compiler/src/dotty/tools/dotc/Driver.scala +++ b/compiler/src/dotty/tools/dotc/Driver.scala @@ -82,7 +82,7 @@ class Driver { Positioned.init(using ictx) inContext(ictx) { - if !ctx.settings.YdropComments.value || ctx.mode.is(Mode.ReadComments) then + if !ctx.settings.YdropComments.value || ctx.settings.YreadComments.value then ictx.setProperty(ContextDoc, new ContextDocstrings) val fileNamesOrNone = command.checkUsage(summary, sourcesRequired)(using ctx.settings)(using ctx.settingsState) fileNamesOrNone.map { fileNames => diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala index 6b51908c37d7..a258036466ff 100644 --- a/compiler/src/dotty/tools/dotc/Run.scala +++ b/compiler/src/dotty/tools/dotc/Run.scala @@ -9,20 +9,20 @@ import Types._ import Scopes._ import Names.Name import Denotations.Denotation -import typer.Typer +import typer.{Typer, PrepareInlineable} import typer.ImportInfo._ import Decorators._ import io.{AbstractFile, PlainFile, VirtualFile} import Phases.unfusedPhases import util._ -import reporting.Reporter +import reporting.{Reporter, Suppression, Action} +import reporting.Diagnostic +import reporting.Diagnostic.Warning import rewrites.Rewrites import profile.Profiler import printing.XprintMode -import parsing.Parsers.Parser -import parsing.JavaParsers.JavaParser import typer.ImplicitRunInfo import config.Feature import StdNames.nme @@ -57,45 +57,66 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint */ @volatile var isCancelled = false - /** Produces the following contexts, from outermost to innermost - * - * bootStrap: A context with next available runId and a scope consisting of - * the RootPackage _root_ - * start A context with RootClass as owner and the necessary initializations - * for type checking. - * imports For each element of RootImports, an import context - */ - protected def rootContext(using Context): Context = { - ctx.initialize() - ctx.base.setPhasePlan(comp.phases) - val rootScope = new MutableScope - val bootstrap = ctx.fresh - .setPeriod(Period(comp.nextRunId, FirstPhaseId)) - .setScope(rootScope) - rootScope.enter(ctx.definitions.RootPackage)(using bootstrap) - var start = bootstrap.fresh - .setOwner(defn.RootClass) - .setTyper(new Typer) - .addMode(Mode.ImplicitsEnabled) - .setTyperState(ctx.typerState.fresh(ctx.reporter)) - if ctx.settings.YexplicitNulls.value && !Feature.enabledBySetting(nme.unsafeNulls) then - start = start.addMode(Mode.SafeNulls) - ctx.initialize()(using start) // re-initialize the base context with start - start.setRun(this) - } - private var compiling = false - private var myCtx = rootContext(using ictx) - - /** The context created for this run */ - given runContext[Dummy_so_its_a_def]: Context = myCtx - assert(runContext.runId <= Periods.MaxPossibleRunId) - private var myUnits: List[CompilationUnit] = _ private var myUnitsCached: List[CompilationUnit] = _ private var myFiles: Set[AbstractFile] = _ + // `@nowarn` annotations by source file, populated during typer + private val mySuppressions: mutable.LinkedHashMap[SourceFile, mutable.ListBuffer[Suppression]] = mutable.LinkedHashMap.empty + // source files whose `@nowarn` annotations are processed + private val mySuppressionsComplete: mutable.Set[SourceFile] = mutable.Set.empty + // warnings issued before a source file's `@nowarn` annotations are processed, suspended so that `@nowarn` can filter them + private val mySuspendedMessages: mutable.LinkedHashMap[SourceFile, mutable.LinkedHashSet[Warning]] = mutable.LinkedHashMap.empty + + object suppressions: + // When the REPL creates a new run (ReplDriver.compile), parsing is already done in the old context, with the + // previous Run. Parser warnings were suspended in the old run and need to be copied over so they are not lost. + // Same as scala/scala/commit/79ca1408c7. + def initSuspendedMessages(oldRun: Run) = if oldRun != null then + mySuspendedMessages.clear() + mySuspendedMessages ++= oldRun.mySuspendedMessages + + def suppressionsComplete(source: SourceFile) = source == NoSource || mySuppressionsComplete(source) + + def addSuspendedMessage(warning: Warning) = + mySuspendedMessages.getOrElseUpdate(warning.pos.source, mutable.LinkedHashSet.empty) += warning + + def nowarnAction(dia: Diagnostic): Action.Warning.type | Action.Verbose.type | Action.Silent.type = + mySuppressions.getOrElse(dia.pos.source, Nil).find(_.matches(dia)) match { + case Some(s) => + s.markUsed() + if (s.verbose) Action.Verbose + else Action.Silent + case _ => + Action.Warning + } + + def addSuppression(sup: Suppression): Unit = + val source = sup.annotPos.source + mySuppressions.getOrElseUpdate(source, mutable.ListBuffer.empty) += sup + + def reportSuspendedMessages(source: SourceFile)(using Context): Unit = { + // sort suppressions. they are not added in any particular order because of lazy type completion + for (sups <- mySuppressions.get(source)) + mySuppressions(source) = sups.sortBy(sup => 0 - sup.start) + mySuppressionsComplete += source + mySuspendedMessages.remove(source).foreach(_.foreach(ctx.reporter.issueIfNotSuppressed)) + } + + def runFinished(hasErrors: Boolean): Unit = + // report suspended messages (in case the run finished before typer) + mySuspendedMessages.keysIterator.toList.foreach(reportSuspendedMessages) + // report unused nowarns only if all all phases are done + if !hasErrors && ctx.settings.WunusedHas.nowarn then + for { + source <- mySuppressions.keysIterator.toList + sups <- mySuppressions.remove(source) + sup <- sups.reverse + } if (!sup.used) + report.warning("@nowarn annotation does not suppress any warnings", sup.annotPos) + /** The compilation units currently being compiled, this may return different * results over time. */ @@ -212,6 +233,7 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint Stats.record(s"total trees at end of $phase", ast.Trees.ntrees) for (unit <- units) Stats.record(s"retained typed trees at end of $phase", unit.tpdTree.treeSize) + ctx.typerState.gc() } profiler.finished() @@ -221,7 +243,9 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint runCtx.setProfiler(Profiler()) unfusedPhases.foreach(_.initContext(runCtx)) runPhases(using runCtx) - if (!ctx.reporter.hasErrors) Rewrites.writeBack() + if (!ctx.reporter.hasErrors) + Rewrites.writeBack() + suppressions.runFinished(hasErrors = ctx.reporter.hasErrors) while (finalizeActions.nonEmpty) { val action = finalizeActions.remove(0) action() @@ -243,19 +267,13 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint .setCompilationUnit(unit) .withRootImports - def process()(using Context) = { - unit.untpdTree = - if (unit.isJava) new JavaParser(unit.source).parse() - else new Parser(unit.source).parse() - ctx.typer.lateEnter(unit.untpdTree) - def processUnit() = { - unit.tpdTree = ctx.typer.typedExpr(unit.untpdTree) - val phase = new transform.SetRootTree() - phase.run - } - if (typeCheck) - if (compiling) finalizeActions += (() => processUnit()) else processUnit() - } + def process()(using Context) = + ctx.typer.lateEnterUnit(doTypeCheck => + if typeCheck then + if compiling then finalizeActions += doTypeCheck + else doTypeCheck() + ) + process()(using unitCtx) } @@ -267,25 +285,23 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint val unit = ctx.compilationUnit val prevPhase = ctx.phase.prev // can be a mini-phase val fusedPhase = ctx.base.fusedContaining(prevPhase) - val treeString = unit.tpdTree.show(using ctx.withProperty(XprintMode, Some(()))) - - report.echo(s"result of $unit after $fusedPhase:") + val echoHeader = f"[[syntax trees at end of $fusedPhase%25s]] // ${unit.source}" + val tree = if ctx.isAfterTyper then unit.tpdTree else unit.untpdTree + val treeString = tree.show(using ctx.withProperty(XprintMode, Some(()))) last match { - case SomePrintedTree(phase, lastTreeSting) if lastTreeSting != treeString => - val msg = - if (!ctx.settings.XprintDiff.value && !ctx.settings.XprintDiffDel.value) treeString - else DiffUtil.mkColoredCodeDiff(treeString, lastTreeSting, ctx.settings.XprintDiffDel.value) - report.echo(msg) - SomePrintedTree(fusedPhase.toString, treeString) - - case SomePrintedTree(phase, lastTreeSting) => - report.echo(" Unchanged since " + phase) + case SomePrintedTree(phase, lastTreeString) if lastTreeString == treeString => + report.echo(s"$echoHeader: unchanged since $phase") last - case NoPrintedTree => - report.echo(treeString) - SomePrintedTree(fusedPhase.toString, treeString) + case SomePrintedTree(phase, lastTreeString) if ctx.settings.XprintDiff.value || ctx.settings.XprintDiffDel.value => + val diff = DiffUtil.mkColoredCodeDiff(treeString, lastTreeString, ctx.settings.XprintDiffDel.value) + report.echo(s"$echoHeader\n$diff\n") + SomePrintedTree(fusedPhase.phaseName, treeString) + + case _ => + report.echo(s"$echoHeader\n$treeString\n") + SomePrintedTree(fusedPhase.phaseName, treeString) } } @@ -320,4 +336,40 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint myUnits = null myUnitsCached = null } + + /** Produces the following contexts, from outermost to innermost + * + * bootStrap: A context with next available runId and a scope consisting of + * the RootPackage _root_ + * start A context with RootClass as owner and the necessary initializations + * for type checking. + * imports For each element of RootImports, an import context + */ + protected def rootContext(using Context): Context = { + ctx.initialize() + ctx.base.setPhasePlan(comp.phases) + val rootScope = new MutableScope(0) + val bootstrap = ctx.fresh + .setPeriod(Period(comp.nextRunId, FirstPhaseId)) + .setScope(rootScope) + rootScope.enter(ctx.definitions.RootPackage)(using bootstrap) + var start = bootstrap.fresh + .setOwner(defn.RootClass) + .setTyper(new Typer) + .addMode(Mode.ImplicitsEnabled) + .setTyperState(ctx.typerState.fresh(ctx.reporter)) + if ctx.settings.YexplicitNulls.value && !Feature.enabledBySetting(nme.unsafeNulls) then + start = start.addMode(Mode.SafeNulls) + ctx.initialize()(using start) // re-initialize the base context with start + + // `this` must be unchecked for safe initialization because by being passed to setRun during + // initialization, it is not yet considered fully initialized by the initialization checker + start.setRun(this: @unchecked) + } + + private var myCtx = rootContext(using ictx) + + /** The context created for this run */ + given runContext[Dummy_so_its_a_def]: Context = myCtx + assert(runContext.runId <= Periods.MaxPossibleRunId) } diff --git a/compiler/src/dotty/tools/dotc/ast/CheckTrees.scala.disabled b/compiler/src/dotty/tools/dotc/ast/CheckTrees.scala.disabled index 6eddf64fff15..6bf7530faf24 100644 --- a/compiler/src/dotty/tools/dotc/ast/CheckTrees.scala.disabled +++ b/compiler/src/dotty/tools/dotc/ast/CheckTrees.scala.disabled @@ -217,7 +217,7 @@ object CheckTrees { optionArg.argTypesHi match { case Nil => optionArg :: Nil - case tupleArgs if defn.isTupleType(optionArg) => + case tupleArgs if defn.isTupleNType(optionArg) => tupleArgs } case _ => diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 27362bff418f..8f0abf9cd999 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -7,7 +7,7 @@ import util.Spans._, Types._, Contexts._, Constants._, Names._, NameOps._, Flags import Symbols._, StdNames._, Trees._, Phases._, ContextOps._ import Decorators._, transform.SymUtils._ import NameKinds.{UniqueName, EvidenceParamName, DefaultGetterName} -import typer.{FrontEnd, Namer, Checking} +import typer.{TyperPhase, Namer, Checking} import util.{Property, SourceFile, SourcePosition} import config.Feature.{sourceVersion, migrateTo3, enabled} import config.SourceVersion._ @@ -182,6 +182,7 @@ object desugar { tpt = TypeTree(defn.UnitType), rhs = setterRhs ).withMods((mods | Accessor) &~ (CaseAccessor | GivenOrImplicit | Lazy)) + .dropEndMarker() // the end marker should only appear on the getter definition Thicket(vdef1, setter) } else vdef1 @@ -519,7 +520,11 @@ object desugar { enumCases.last.pushAttachment(DesugarEnums.DefinesEnumLookupMethods, ()) val enumCompanionRef = TermRefTree() val enumImport = - Import(enumCompanionRef, enumCases.flatMap(caseIds).map(ImportSelector(_))) + Import(enumCompanionRef, enumCases.flatMap(caseIds).map( + enumCase => + ImportSelector(enumCase.withSpan(enumCase.span.startPos)) + ) + ) (enumImport :: enumStats, enumCases, enumCompanionRef) } else (stats, Nil, EmptyTree) @@ -782,7 +787,7 @@ object desugar { DefDef( className.toTermName, joinParams(constrTparams, defParamss), classTypeRef, creatorExpr) - .withMods(companionMods | mods.flags.toTermFlags & GivenOrImplicit | Synthetic | Final) + .withMods(companionMods | mods.flags.toTermFlags & (GivenOrImplicit | Inline) | Final) .withSpan(cdef.span) :: Nil } @@ -808,7 +813,9 @@ object desugar { Nil } } - val classMods = if mods.is(Given) then mods &~ Given | Synthetic else mods + if mods.isAllOf(Given | Inline | Transparent) then + report.error("inline given instances cannot be trasparent", cdef) + val classMods = if mods.is(Given) then mods &~ (Inline | Transparent) | Synthetic else mods cpy.TypeDef(cdef: TypeDef)( name = className, rhs = cpy.Template(impl)(constr, parents1, clsDerived, self1, @@ -883,6 +890,7 @@ object desugar { val clsTmpl = cpy.Template(impl)(self = clsSelf, body = impl.body) val cls = TypeDef(clsName, clsTmpl) .withMods(mods.toTypeFlags & RetainedModuleClassFlags | ModuleClassCreationFlags) + .withEndMarker(copyFrom = mdef) // copy over the end marker position to the module class def Thicket(modul, classDef(cls).withSpan(mdef.span)) } } @@ -946,7 +954,7 @@ object desugar { tree.withMods(mods) else if tree.name.startsWith("$") && !tree.isBackquoted then report.error( - """Quoted pattern variable names starting with $ are not suported anymore. + """Quoted pattern variable names starting with $ are not supported anymore. |Use lower cases type pattern name instead. |""".stripMargin, tree.srcPos) @@ -1091,6 +1099,16 @@ object desugar { case IdPattern(named, tpt) => derivedValDef(original, named, tpt, rhs, mods) case _ => + + def filterWildcardGivenBinding(givenPat: Bind): Boolean = + givenPat.name != nme.WILDCARD + + def errorOnGivenBinding(bind: Bind)(using Context): Boolean = + report.error( + em"""${hl("given")} patterns are not allowed in a ${hl("val")} definition, + |please bind to an identifier and use an alias given.""".stripMargin, bind) + false + def isTuplePattern(arity: Int): Boolean = pat match { case Tuple(pats) if pats.size == arity => pats.forall(isVarPattern) @@ -1106,13 +1124,23 @@ object desugar { // - `pat` is a tuple of N variables or wildcard patterns like `(x1, x2, ..., xN)` val tupleOptimizable = forallResults(rhs, isMatchingTuple) + val inAliasGenerator = original match + case _: GenAlias => true + case _ => false + val vars = if (tupleOptimizable) // include `_` - pat match { - case Tuple(pats) => - pats.map { case id: Ident => id -> TypeTree() } - } - else getVariables(pat) // no `_` + pat match + case Tuple(pats) => pats.map { case id: Ident => id -> TypeTree() } + else + getVariables( + tree = pat, + shouldAddGiven = + if inAliasGenerator then + filterWildcardGivenBinding + else + errorOnGivenBinding + ) // no `_` val ids = for ((named, _) <- vars) yield Ident(named.name) val caseDef = CaseDef(pat, EmptyTree, makeTuple(ids)) @@ -1130,7 +1158,7 @@ object desugar { mods & Lazy | Synthetic | (if (ctx.owner.isClass) PrivateLocal else EmptyFlags) val firstDef = ValDef(tmpName, TypeTree(), matchExpr) - .withSpan(pat.span.union(rhs.span)).withMods(patMods) + .withSpan(pat.span.startPos).withMods(patMods) val useSelectors = vars.length <= 22 def selector(n: Int) = if useSelectors then Select(Ident(tmpName), nme.selectorName(n)) @@ -1239,6 +1267,18 @@ object desugar { makeOp(left, right, Span(left.span.start, op.span.end, op.span.start)) } + /** Translate throws type `A throws E1 | ... | En` to + * $throws[... $throws[A, E1] ... , En]. + */ + def throws(tpt: Tree, op: Ident, excepts: Tree)(using Context): AppliedTypeTree = excepts match + case Parens(excepts1) => + throws(tpt, op, excepts1) + case InfixOp(l, bar @ Ident(tpnme.raw.BAR), r) => + throws(throws(tpt, op, l), bar, r) + case e => + AppliedTypeTree( + TypeTree(defn.throwsAlias.typeRef).withSpan(op.span), tpt :: excepts :: Nil) + /** Translate tuple expressions of arity <= 22 * * () ==> () @@ -1299,7 +1339,9 @@ object desugar { if (nestedStats.isEmpty) pdef else { val name = packageObjectName(ctx.source) - val grouped = ModuleDef(name, Template(emptyConstructor, Nil, Nil, EmptyValDef, nestedStats)) + val grouped = + ModuleDef(name, Template(emptyConstructor, Nil, Nil, EmptyValDef, nestedStats)) + .withMods(Modifiers(Synthetic)) cpy.PackageDef(pdef)(pdef.pid, topStats :+ grouped) } } @@ -1376,29 +1418,6 @@ object desugar { FunctionWithMods(params, body, Modifiers(mods)) } - /** Add annotation to tree: - * tree @fullName - * - * The annotation is usually represented as a TypeTree referring to the class - * with the given name `fullName`. However, if the annotation matches a file name - * that is still to be entered, the annotation is represented as a cascade of `Selects` - * following `fullName`. This is necessary so that we avoid reading an annotation from - * the classpath that is also compiled from source. - */ - def makeAnnotated(fullName: String, tree: Tree)(using Context): Annotated = { - val parts = fullName.split('.') - val ttree = typerPhase match { - case phase: FrontEnd if phase.stillToBeEntered(parts.last) => - val prefix = - parts.init.foldLeft(Ident(nme.ROOTPKG): Tree)((qual, name) => - Select(qual, name.toTermName)) - Select(prefix, parts.last.toTypeName) - case _ => - TypeTree(requiredClass(fullName).typeRef) - } - Annotated(tree, New(ttree, Nil)) - } - private def derivedValDef(original: Tree, named: NameTree, tpt: Tree, rhs: Tree, mods: Modifiers)(using Context) = { val vdef = ValDef(named.name.asTermName, tpt, rhs) .withMods(mods) @@ -1554,14 +1573,21 @@ object desugar { } } + /** Is `pat` of the form `x`, `x T`, or `given T`? when used as the lhs of a generator, + * these are all considered irrefutable. + */ + def isVarBinding(pat: Tree): Boolean = pat match + case pat @ Bind(_, pat1) if pat.mods.is(Given) => isVarBinding(pat1) + case IdPattern(_) => true + case _ => false + def needsNoFilter(gen: GenFrom): Boolean = if (gen.checkMode == GenCheckMode.FilterAlways) // pattern was prefixed by `case` false - else ( - gen.checkMode != GenCheckMode.FilterNow || - IdPattern.unapply(gen.pat).isDefined || - isIrrefutable(gen.pat, gen.expr) - ) + else + gen.checkMode != GenCheckMode.FilterNow + || isVarBinding(gen.pat) + || isIrrefutable(gen.pat, gen.expr) /** rhs.name with a pattern filter on rhs unless `pat` is irrefutable when * matched against `rhs`. @@ -1604,6 +1630,8 @@ object desugar { def makePolyFunction(targs: List[Tree], body: Tree): Tree = body match { case Parens(body1) => makePolyFunction(targs, body1) + case Block(Nil, body1) => + makePolyFunction(targs, body1) case Function(vargs, res) => assert(targs.nonEmpty) // TODO: Figure out if we need a `PolyFunctionWithMods` instead. @@ -1787,16 +1815,21 @@ object desugar { /** Returns list of all pattern variables, possibly with their types, * without duplicates */ - private def getVariables(tree: Tree)(using Context): List[VarInfo] = { + private def getVariables(tree: Tree, shouldAddGiven: Context ?=> Bind => Boolean)(using Context): List[VarInfo] = { val buf = ListBuffer[VarInfo]() def seenName(name: Name) = buf exists (_._1.name == name) def add(named: NameTree, t: Tree): Unit = if (!seenName(named.name) && named.name.isTermName) buf += ((named, t)) def collect(tree: Tree): Unit = tree match { - case Bind(nme.WILDCARD, tree1) => + case tree @ Bind(nme.WILDCARD, tree1) => + if tree.mods.is(Given) then + val Typed(_, tpt) = tree1: @unchecked + if shouldAddGiven(tree) then + add(tree, tpt) collect(tree1) case tree @ Bind(_, Typed(tree1, tpt)) => - add(tree, tpt) + if !(tree.mods.is(Given) && !shouldAddGiven(tree)) then + add(tree, tpt) collect(tree1) case tree @ Bind(_, tree1) => add(tree, TypeTree()) @@ -1814,7 +1847,7 @@ object desugar { case SeqLiteral(elems, _) => elems foreach collect case Alternative(trees) => - for (tree <- trees; (vble, _) <- getVariables(tree)) + for (tree <- trees; (vble, _) <- getVariables(tree, shouldAddGiven)) report.error(IllegalVariableInPatternAlternative(), vble.srcPos) case Annotated(arg, _) => collect(arg) diff --git a/compiler/src/dotty/tools/dotc/ast/DesugarEnums.scala b/compiler/src/dotty/tools/dotc/ast/DesugarEnums.scala index def5cb39bee1..009aba09f2f6 100644 --- a/compiler/src/dotty/tools/dotc/ast/DesugarEnums.scala +++ b/compiler/src/dotty/tools/dotc/ast/DesugarEnums.scala @@ -104,7 +104,7 @@ object DesugarEnums { /** The following lists of definitions for an enum type E and known value cases e_0, ..., e_n: * - * private val $values = Array[E](this.e_0,...,this.e_n)(ClassTag[E](classOf[E])): @unchecked + * private val $values = Array[E](this.e_0,...,this.e_n)(ClassTag[E](classOf[E])) * def values = $values.clone * def valueOf($name: String) = $name match { * case "e_0" => this.e_0 @@ -118,15 +118,7 @@ object DesugarEnums { extension (tpe: NamedType) def ofRawEnum = AppliedTypeTree(ref(tpe), rawEnumClassRef) val privateValuesDef = - val uncheckedValues = - // Here we use an unchecked annotation to silence warnings from the init checker. Without it, we get a warning - // that simple enum cases are promoting this from warm to initialised. This is because we are populating the - // array by selecting enum values from `this`, a value under construction. - // Singleton enum values always construct a new anonymous class, which will not be checked by the init-checker, - // so this warning will always persist even if the implementation of the anonymous class is safe. - // TODO: remove @unchecked after https://github.com/lampepfl/dotty-feature-requests/issues/135 is resolved. - Annotated(ArrayLiteral(enumValues, rawEnumClassRef), New(ref(defn.UncheckedAnnot.typeRef), Nil)) - ValDef(nme.DOLLAR_VALUES, TypeTree(), uncheckedValues) + ValDef(nme.DOLLAR_VALUES, TypeTree(), ArrayLiteral(enumValues, rawEnumClassRef)) .withFlags(Private | Synthetic) val valuesDef = @@ -187,13 +179,12 @@ object DesugarEnums { * } */ private def enumValueCreator(using Context) = { - val fieldMethods = if isJavaEnum then Nil else ordinalMeth(Ident(nme.ordinalDollar_)) :: Nil val creator = New(Template( constr = emptyConstructor, parents = enumClassRef :: scalaRuntimeDot(tpnme.EnumValue) :: Nil, derived = Nil, self = EmptyValDef, - body = fieldMethods + body = Nil ).withAttachment(ExtendsSingletonMirror, ())) DefDef(nme.DOLLAR_NEW, List(List(param(nme.ordinalDollar_, defn.IntType), param(nme.nameDollar, defn.StringType))), @@ -278,8 +269,6 @@ object DesugarEnums { def param(name: TermName, typ: Type)(using Context): ValDef = param(name, TypeTree(typ)) def param(name: TermName, tpt: Tree)(using Context): ValDef = ValDef(name, tpt, EmptyTree).withFlags(Param) - private def isJavaEnum(using Context): Boolean = enumClass.derivesFrom(defn.JavaEnumClass) - def ordinalMeth(body: Tree)(using Context): DefDef = DefDef(nme.ordinal, Nil, TypeTree(defn.IntType), body).withAddedFlags(Synthetic) @@ -298,10 +287,8 @@ object DesugarEnums { expandSimpleEnumCase(name, mods, definesLookups, span) else { val (tag, scaffolding) = nextOrdinal(name, CaseKind.Object, definesLookups) - val impl1 = cpy.Template(impl)( - parents = impl.parents :+ scalaRuntimeDot(tpnme.EnumValue), - body = if isJavaEnum then Nil else ordinalMethLit(tag) :: Nil - ).withAttachment(ExtendsSingletonMirror, ()) + val impl1 = cpy.Template(impl)(parents = impl.parents :+ scalaRuntimeDot(tpnme.EnumValue), body = Nil) + .withAttachment(ExtendsSingletonMirror, ()) val vdef = ValDef(name, TypeTree(), New(impl1)).withMods(mods.withAddedFlags(EnumValue, span)) flatTree(vdef :: scaffolding).withSpan(span) } diff --git a/compiler/src/dotty/tools/dotc/ast/MainProxies.scala b/compiler/src/dotty/tools/dotc/ast/MainProxies.scala index 9178478c399b..2eafeca16e39 100644 --- a/compiler/src/dotty/tools/dotc/ast/MainProxies.scala +++ b/compiler/src/dotty/tools/dotc/ast/MainProxies.scala @@ -94,8 +94,20 @@ object MainProxies { val body = Try(call, handler :: Nil, EmptyTree) val mainArg = ValDef(nme.args, TypeTree(defn.ArrayType.appliedTo(defn.StringType)), EmptyTree) .withFlags(Param) + /** Replace typed `Ident`s that have been typed with a TypeSplice with the reference to the symbol. + * The annotations will be retype-checked in another scope that may not have the same imports. + */ + def insertTypeSplices = new TreeMap { + override def transform(tree: Tree)(using Context): Tree = tree match + case tree: tpd.Ident @unchecked => TypedSplice(tree) + case tree => super.transform(tree) + } + val annots = mainFun.annotations + .filterNot(_.matches(defn.MainAnnot)) + .map(annot => insertTypeSplices.transform(annot.tree)) val mainMeth = DefDef(nme.main, (mainArg :: Nil) :: Nil, TypeTree(defn.UnitType), body) .withFlags(JavaStatic) + .withAnnotations(annots) val mainTempl = Template(emptyConstructor, Nil, Nil, EmptyValDef, mainMeth :: Nil) val mainCls = TypeDef(mainFun.name.toTypeName, mainTempl) .withFlags(Final | Invisible) diff --git a/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala b/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala index ffc7d04faa52..730149381487 100644 --- a/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala +++ b/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala @@ -5,6 +5,10 @@ import core.Contexts._ import core.Decorators._ import util.Spans._ import Trees.{MemberDef, DefTree, WithLazyField} +import dotty.tools.dotc.core.Annotations.ConcreteAnnotation +import dotty.tools.dotc.core.Types.AnnotatedType +import dotty.tools.dotc.core.Types.ImportType +import dotty.tools.dotc.core.Types.Type /** Utility functions to go from typed to untyped ASTs */ // TODO: Handle trees with mixed source files @@ -56,19 +60,19 @@ object NavigateAST { * the given `span`. */ def untypedPath(span: Span)(using Context): List[Positioned] = - pathTo(span, ctx.compilationUnit.untpdTree) + pathTo(span, List(ctx.compilationUnit.untpdTree)) - /** The reverse path from node `from` to the node that closest encloses `span`, + /** The reverse path from any node in `from` to the node that closest encloses `span`, * or `Nil` if no such path exists. If a non-empty path is returned it starts with - * the node closest enclosing `span` and ends with `from`. + * the node closest enclosing `span` and ends with one of the nodes in `from`. * * @param skipZeroExtent If true, skip over zero-extent nodes in the search. These nodes * do not correspond to code the user wrote since their start and * end point are the same, so this is useful when trying to reconcile * nodes with source code. */ - def pathTo(span: Span, from: Positioned, skipZeroExtent: Boolean = false)(using Context): List[Positioned] = { + def pathTo(span: Span, from: List[Positioned], skipZeroExtent: Boolean = false)(using Context): List[Positioned] = { def childPath(it: Iterator[Any], path: List[Positioned]): List[Positioned] = { var bestFit: List[Positioned] = path while (it.hasNext) { @@ -86,6 +90,18 @@ object NavigateAST { } bestFit } + /* + * Annotations trees are located in the Type + */ + def unpackAnnotations(t: Type, path: List[Positioned]): List[Positioned] = + t match { + case ann: AnnotatedType => + unpackAnnotations(ann.parent, childPath(ann.annot.tree.productIterator, path)) + case imp: ImportType => + childPath(imp.expr.productIterator, path) + case other => + path + } def singlePath(p: Positioned, path: List[Positioned]): List[Positioned] = if (p.span.exists && !(skipZeroExtent && p.span.isZeroExtent) && p.span.contains(span)) { // FIXME: We shouldn't be manually forcing trees here, we should replace @@ -98,7 +114,12 @@ object NavigateAST { } childPath(p.productIterator, p :: path) } - else path - singlePath(from, Nil) + else { + p match { + case t: untpd.TypeTree => unpackAnnotations(t.typeOpt, path) + case _ => path + } + } + childPath(from.iterator, Nil) } } diff --git a/compiler/src/dotty/tools/dotc/ast/Positioned.scala b/compiler/src/dotty/tools/dotc/ast/Positioned.scala index 0c1f43b8f292..964b9855ae13 100644 --- a/compiler/src/dotty/tools/dotc/ast/Positioned.scala +++ b/compiler/src/dotty/tools/dotc/ast/Positioned.scala @@ -23,6 +23,8 @@ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends Src private var mySpan: Span = _ + private var mySource: SourceFile = src + /** A unique identifier in case -Yshow-tree-ids, or -Ydebug-tree-with-id * is set, -1 otherwise. */ @@ -36,7 +38,7 @@ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends Src ids.put(this, ownId) if ownId == debugId then println(s"Debug tree (id=$debugId) creation \n$this\n") - Reporter.displayPrompt(Console.in, new PrintWriter(Console.err, true)) + Thread.dumpStack() allocateId() @@ -48,7 +50,8 @@ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends Src span = envelope(src) - val source: SourceFile = src + def source: SourceFile = mySource + def sourcePos(using Context): SourcePosition = source.atSpan(span) /** This positioned item, widened to `SrcPos`. Used to make clear we only need the @@ -65,12 +68,11 @@ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends Src if (span == mySpan) this else { val newpd: this.type = - if (mySpan.isSynthetic) { - if (!mySpan.exists && span.exists) - envelope(source, span.startPos) // fill in children spans + if !mySpan.exists then + if span.exists then envelope(source, span.startPos) // fill in children spans this - } - else cloneIn(source) + else + cloneIn(source) newpd.span = span newpd } @@ -128,7 +130,7 @@ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends Src def cloneIn(src: SourceFile): this.type = { val newpd: this.type = clone.asInstanceOf[this.type] newpd.allocateId() - // assert(newpd.uniqueId != 2208, s"source = $this, ${this.uniqueId}, ${this.span}") + newpd.mySource = src newpd } diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index 73f1585427ac..8592b6d2e647 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -113,11 +113,11 @@ trait TreeInfo[T >: Untyped <: Type] { self: Trees.Instance[T] => case _ => 0 } - /** The (last) list of arguments of an application */ - def arguments(tree: Tree): List[Tree] = unsplice(tree) match { - case Apply(_, args) => args - case TypeApply(fn, _) => arguments(fn) - case Block(_, expr) => arguments(expr) + /** All term arguments of an application in a single flattened list */ + def allArguments(tree: Tree): List[Tree] = unsplice(tree) match { + case Apply(fn, args) => allArguments(fn) ::: args + case TypeApply(fn, _) => allArguments(fn) + case Block(_, expr) => allArguments(expr) case _ => Nil } @@ -569,6 +569,7 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => * This avoids the situation where we have a Select node that does not have a symbol. */ def constToLiteral(tree: Tree)(using Context): Tree = { + assert(!tree.isType) val tree1 = ConstFold(tree) tree1.tpe.widenTermRefExpr.dealias.normalized match { case ConstantType(Constant(_: Type)) if tree.isInstanceOf[Block] => @@ -958,7 +959,7 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => /** Extractors for splices */ object Spliced { - /** Extracts the content of a spliced expresion tree. + /** Extracts the content of a spliced expression tree. * The result can be the contents of a term splice, which * will return a term tree. */ diff --git a/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala b/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala index 9e5b7f036aa0..3f4ff4687787 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala @@ -15,55 +15,16 @@ import scala.annotation.tailrec * * This incudes implicits defined in scope as well as imported implicits. */ -class TreeMapWithImplicits extends tpd.TreeMap { +class TreeMapWithImplicits extends tpd.TreeMapWithPreciseStatContexts { import tpd._ def transformSelf(vd: ValDef)(using Context): ValDef = cpy.ValDef(vd)(tpt = transform(vd.tpt)) - /** Transform statements, while maintaining import contexts and expression contexts - * in the same way as Typer does. The code addresses additional concerns: - * - be tail-recursive where possible - * - don't re-allocate trees where nothing has changed - */ - override def transformStats(stats: List[Tree], exprOwner: Symbol)(using Context): List[Tree] = { - - @tailrec def traverse(curStats: List[Tree])(using Context): List[Tree] = { - - def recur(stats: List[Tree], changed: Tree, rest: List[Tree])(using Context): List[Tree] = - if (stats eq curStats) { - val rest1 = transformStats(rest, exprOwner) - changed match { - case Thicket(trees) => trees ::: rest1 - case tree => tree :: rest1 - } - } - else stats.head :: recur(stats.tail, changed, rest) - - curStats match { - case stat :: rest => - val statCtx = stat match { - case stat: DefTree => ctx - case _ => ctx.exprContext(stat, exprOwner) - } - val restCtx = stat match { - case stat: Import => ctx.importContext(stat, stat.symbol) - case _ => ctx - } - val stat1 = transform(stat)(using statCtx) - if (stat1 ne stat) recur(stats, stat1, rest)(using restCtx) - else traverse(rest)(using restCtx) - case nil => - stats - } - } - traverse(stats) - } - private def nestedScopeCtx(defs: List[Tree])(using Context): Context = { val nestedCtx = ctx.fresh.setNewScope defs foreach { - case d: DefTree if d.symbol.isOneOf(GivenOrImplicit) => nestedCtx.enter(d.symbol) + case d: DefTree if d.symbol.isOneOf(GivenOrImplicitVal) => nestedCtx.enter(d.symbol) case _ => } nestedCtx @@ -74,7 +35,7 @@ class TreeMapWithImplicits extends tpd.TreeMap { new TreeTraverser { def traverse(tree: Tree)(using Context): Unit = { tree match { - case d: DefTree if d.symbol.isOneOf(GivenOrImplicit) => + case d: DefTree if d.symbol.isOneOf(GivenOrImplicitVal) => nestedCtx.enter(d.symbol) case _ => } diff --git a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala index f80d126bc471..d38ce8ca6888 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala @@ -7,7 +7,6 @@ import Types._, Contexts._, Flags._ import Symbols._, Annotations._, Trees._, Symbols._, Constants.Constant import Decorators._ import dotty.tools.dotc.transform.SymUtils._ -import core.tasty.TreePickler.Hole /** A map that applies three functions and a substitution together to a tree and * makes sure they are coordinated so that the result is well-typed. The functions are diff --git a/compiler/src/dotty/tools/dotc/ast/Trees.scala b/compiler/src/dotty/tools/dotc/ast/Trees.scala index d1de1094524c..7aa4491c31de 100644 --- a/compiler/src/dotty/tools/dotc/ast/Trees.scala +++ b/compiler/src/dotty/tools/dotc/ast/Trees.scala @@ -16,17 +16,10 @@ import annotation.internal.sharable import annotation.unchecked.uncheckedVariance import annotation.constructorOnly import Decorators._ -import dotty.tools.dotc.core.tasty.TreePickler.Hole object Trees { - // Note: it would be more logical to make Untyped = Nothing. - // However, this interacts in a bad way with Scala's current type inference. - // In fact, we cannot write something like Select(pre, name), where pre is - // of type Tree[Nothing]; type inference will treat the Nothing as an uninstantiated - // value and will not infer Nothing as the type parameter for Select. - // We should come back to this issue once type inference is changed. - type Untyped = Null + type Untyped = Nothing /** The total number of created tree nodes, maintained if Stats.enabled */ @sharable var ntrees: Int = 0 @@ -45,8 +38,7 @@ object Trees { * * - You can never observe a `tpe` which is `null` (throws an exception) * - So when creating a typed tree with `withType` we can re-use - * the existing tree transparently, assigning its `tpe` field, - * provided it was `null` before. + * the existing tree transparently, assigning its `tpe` field. * - It is impossible to embed untyped trees in typed ones. * - Typed trees can be embedded in untyped ones provided they are rooted * in a TypedSplice node. @@ -327,9 +319,58 @@ object Trees { extension (mdef: untpd.DefTree) def mods: untpd.Modifiers = mdef.rawMods - abstract class NamedDefTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends NameTree[T] with DefTree[T] { + sealed trait WithEndMarker[-T >: Untyped]: + self: PackageDef[T] | NamedDefTree[T] => + + import WithEndMarker.* + + final def endSpan(using Context): Span = + if hasEndMarker then + val realName = srcName.stripModuleClassSuffix.lastPart + span.withStart(span.end - realName.length) + else + NoSpan + + /** The name in source code that represents this construct, + * and is the name that the user must write to create a valid + * end marker. + * e.g. a constructor definition is terminated in the source + * code by `end this`, so it's `srcName` should return `this`. + */ + protected def srcName(using Context): Name + + final def withEndMarker(): self.type = + self.withAttachment(HasEndMarker, ()) + + final def withEndMarker(copyFrom: WithEndMarker[?]): self.type = + if copyFrom.hasEndMarker then + this.withEndMarker() + else + this + + final def dropEndMarker(): self.type = + self.removeAttachment(HasEndMarker) + this + + protected def hasEndMarker: Boolean = self.hasAttachment(HasEndMarker) + + object WithEndMarker: + /** Property key that signals the tree was terminated + * with an `end` marker in the source code + */ + private val HasEndMarker: Property.StickyKey[Unit] = Property.StickyKey() + + end WithEndMarker + + abstract class NamedDefTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) + extends NameTree[T] with DefTree[T] with WithEndMarker[T] { type ThisTree[-T >: Untyped] <: NamedDefTree[T] + protected def srcName(using Context): Name = + if name == nme.CONSTRUCTOR then nme.this_ + else if symbol.isPackageObject then symbol.owner.name + else name + /** The position of the name defined by this definition. * This is a point position if the definition is synthetic, or a range position * if the definition comes from source. @@ -342,7 +383,7 @@ object Trees { val point = span.point if (rawMods.is(Synthetic) || span.isSynthetic || name.toTermName == nme.ERROR) Span(point) else { - val realName = name.stripModuleClassSuffix.lastPart + val realName = srcName.stripModuleClassSuffix.lastPart Span(point, point + realName.length, point) } } @@ -650,10 +691,12 @@ object Trees { s"TypeTree${if (hasType) s"[$typeOpt]" else ""}" } - /** A type tree that defines a new type variable. Its type is always a TypeVar. - * Every TypeVar is created as the type of one TypeVarBinder. + /** A type tree whose type is inferred. These trees appear in two contexts + * - as an argument of a TypeApply. In that case its type is always a TypeVar + * - as a (result-)type of an inferred ValDef or DefDef. + * Every TypeVar is created as the type of one InferredTypeTree. */ - class TypeVarBinder[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends TypeTree[T] + class InferredTypeTree[-T >: Untyped](implicit @constructorOnly src: SourceFile) extends TypeTree[T] /** ref.type */ case class SingletonTypeTree[-T >: Untyped] private[ast] (ref: Tree[T])(implicit @constructorOnly src: SourceFile) @@ -857,9 +900,10 @@ object Trees { /** package pid { stats } */ case class PackageDef[-T >: Untyped] private[ast] (pid: RefTree[T], stats: List[Tree[T]])(implicit @constructorOnly src: SourceFile) - extends ProxyTree[T] { + extends ProxyTree[T] with WithEndMarker[T] { type ThisTree[-T >: Untyped] = PackageDef[T] def forwardTo: RefTree[T] = pid + protected def srcName(using Context): Name = pid.name } /** arg @annot */ @@ -928,6 +972,15 @@ object Trees { def genericEmptyValDef[T >: Untyped]: ValDef[T] = theEmptyValDef.asInstanceOf[ValDef[T]] def genericEmptyTree[T >: Untyped]: Thicket[T] = theEmptyTree.asInstanceOf[Thicket[T]] + /** Tree that replaces a splice in pickled quotes. + * It is only used when picking quotes (Will never be in a TASTy file). + */ + case class Hole[-T >: Untyped](isTermHole: Boolean, idx: Int, args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) extends Tree[T] { + type ThisTree[-T >: Untyped] <: Hole[T] + override def isTerm: Boolean = isTermHole + override def isType: Boolean = !isTermHole + } + def flatten[T >: Untyped](trees: List[Tree[T]]): List[Tree[T]] = { def recur(buf: ListBuffer[Tree[T]], remaining: List[Tree[T]]): ListBuffer[Tree[T]] = remaining match { @@ -1029,6 +1082,7 @@ object Trees { type JavaSeqLiteral = Trees.JavaSeqLiteral[T] type Inlined = Trees.Inlined[T] type TypeTree = Trees.TypeTree[T] + type InferredTypeTree = Trees.InferredTypeTree[T] type SingletonTypeTree = Trees.SingletonTypeTree[T] type RefinedTypeTree = Trees.RefinedTypeTree[T] type AppliedTypeTree = Trees.AppliedTypeTree[T] @@ -1051,6 +1105,8 @@ object Trees { type Annotated = Trees.Annotated[T] type Thicket = Trees.Thicket[T] + type Hole = Trees.Hole[T] + @sharable val EmptyTree: Thicket = genericEmptyTree @sharable val EmptyValDef: ValDef = genericEmptyValDef @sharable val ContextualEmptyTree: Thicket = new EmptyTree() // an empty tree marking a contextual closure diff --git a/compiler/src/dotty/tools/dotc/ast/tpd.scala b/compiler/src/dotty/tools/dotc/ast/tpd.scala index ed60b817a11f..396d85a8c58a 100644 --- a/compiler/src/dotty/tools/dotc/ast/tpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/tpd.scala @@ -7,6 +7,7 @@ import typer.ProtoTypes import transform.SymUtils._ import transform.TypeUtils._ import core._ +import Scopes.newScope import util.Spans._, Types._, Contexts._, Constants._, Names._, Flags._, NameOps._ import Symbols._, StdNames._, Annotations._, Trees._, Symbols._ import Decorators._, DenotTransformers._ @@ -41,15 +42,17 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { def Super(qual: Tree, mixName: TypeName, mixinClass: Symbol = NoSymbol)(using Context): Super = Super(qual, if (mixName.isEmpty) untpd.EmptyTypeIdent else untpd.Ident(mixName), mixinClass) - def Apply(fn: Tree, args: List[Tree])(using Context): Apply = { - assert(fn.isInstanceOf[RefTree] || fn.isInstanceOf[GenericApply] || fn.isInstanceOf[Inlined] || fn.isInstanceOf[tasty.TreePickler.Hole]) - ta.assignType(untpd.Apply(fn, args), fn, args) - } + def Apply(fn: Tree, args: List[Tree])(using Context): Apply = fn match + case Block(Nil, expr) => + Apply(expr, args) + case _: RefTree | _: GenericApply | _: Inlined | _: Hole => + ta.assignType(untpd.Apply(fn, args), fn, args) - def TypeApply(fn: Tree, args: List[Tree])(using Context): TypeApply = { - assert(fn.isInstanceOf[RefTree] || fn.isInstanceOf[GenericApply]) - ta.assignType(untpd.TypeApply(fn, args), fn, args) - } + def TypeApply(fn: Tree, args: List[Tree])(using Context): TypeApply = fn match + case Block(Nil, expr) => + TypeApply(expr, args) + case _: RefTree | _: GenericApply => + ta.assignType(untpd.TypeApply(fn, args), fn, args) def Literal(const: Constant)(using Context): Literal = ta.assignType(untpd.Literal(const)) @@ -119,9 +122,9 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { Closure(Nil, call, targetTpt)) } - /** A closure whole anonymous function has the given method type */ + /** A closure whose anonymous function has the given method type */ def Lambda(tpe: MethodType, rhsFn: List[Tree] => Tree)(using Context): Block = { - val meth = newSymbol(ctx.owner, nme.ANON_FUN, Synthetic | Method, tpe) + val meth = newAnonFun(ctx.owner, tpe) Closure(meth, tss => rhsFn(tss.head).changeOwner(ctx.owner, meth)) } @@ -203,8 +206,8 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { def ValDef(sym: TermSymbol, rhs: LazyTree = EmptyTree)(using Context): ValDef = ta.assignType(untpd.ValDef(sym.name, TypeTree(sym.info), rhs), sym) - def SyntheticValDef(name: TermName, rhs: Tree)(using Context): ValDef = - ValDef(newSymbol(ctx.owner, name, Synthetic, rhs.tpe.widen, coord = rhs.span), rhs) + def SyntheticValDef(name: TermName, rhs: Tree, flags: FlagSet = EmptyFlags)(using Context): ValDef = + ValDef(newSymbol(ctx.owner, name, Synthetic | flags, rhs.tpe.widen, coord = rhs.span), rhs) def DefDef(sym: TermSymbol, paramss: List[List[Symbol]], resultType: Type, rhs: Tree)(using Context): DefDef = @@ -342,7 +345,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { } else parents val cls = newNormalizedClassSymbol(owner, tpnme.ANON_CLASS, Synthetic | Final, parents1, - coord = fns.map(_.span).reduceLeft(_ union _)) + newScope, coord = fns.map(_.span).reduceLeft(_ union _)) val constr = newConstructor(cls, Synthetic, Nil, Nil).entered def forwarder(fn: TermSymbol, name: TermName) = { val fwdMeth = fn.copy(cls, name, Synthetic | Method | Final).entered.asTerm @@ -961,8 +964,10 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { /** `tree.isInstanceOf[tp]`, with special treatment of singleton types */ def isInstance(tp: Type)(using Context): Tree = tp.dealias match { + case ConstantType(c) if c.tag == StringTag => + singleton(tp).equal(tree) case tp: SingletonType => - if (tp.widen.derivesFrom(defn.ObjectClass)) + if tp.widen.derivesFrom(defn.ObjectClass) then tree.ensureConforms(defn.ObjectType).select(defn.Object_eq).appliedTo(singleton(tp)) else singleton(tp).equal(tree) @@ -977,11 +982,13 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { } /** cast tree to `tp`, assuming no exception is raised, i.e the operation is pure */ - def cast(tp: Type)(using Context): Tree = { - assert(tp.isValueType, i"bad cast: $tree.asInstanceOf[$tp]") + def cast(tp: Type)(using Context): Tree = cast(TypeTree(tp)) + + /** cast tree to `tp`, assuming no exception is raised, i.e the operation is pure */ + def cast(tpt: TypeTree)(using Context): Tree = + assert(tpt.tpe.isValueType, i"bad cast: $tree.asInstanceOf[$tpt]") tree.select(if (ctx.erasedTypes) defn.Any_asInstanceOf else defn.Any_typeCast) - .appliedToType(tp) - } + .appliedToTypeTree(tpt) /** cast `tree` to `tp` (or its box/unbox/cast equivalent when after * erasure and value and non-value types are mixed), @@ -1098,6 +1105,21 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { if (sym.exists) sym.defTree = tree tree } + + def etaExpandCFT(using Context): Tree = + def expand(target: Tree, tp: Type)(using Context): Tree = tp match + case defn.ContextFunctionType(argTypes, resType, isErased) => + val anonFun = newAnonFun( + ctx.owner, + MethodType.companion(isContextual = true, isErased = isErased)(argTypes, resType), + coord = ctx.owner.coord) + def lambdaBody(refss: List[List[Tree]]) = + expand(target.select(nme.apply).appliedToArgss(refss), resType)( + using ctx.withOwner(anonFun)) + Closure(anonFun, lambdaBody) + case _ => + target + expand(tree, tree.tpe.widen) } inline val MapRecursionLimit = 10 @@ -1129,12 +1151,57 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { case _ => tree1 :: rest1 case nil => nil recur(trees, 0) + + /** Transform statements while maintaining import contexts and expression contexts + * in the same way as Typer does. The code addresses additional concerns: + * - be tail-recursive where possible + * - don't re-allocate trees where nothing has changed + */ + inline def mapStatements(exprOwner: Symbol, inline op: Tree => Context ?=> Tree)(using Context): List[Tree] = + @tailrec + def loop(mapped: mutable.ListBuffer[Tree] | Null, unchanged: List[Tree], pending: List[Tree])(using Context): List[Tree] = + pending match + case stat :: rest => + val statCtx = stat match + case _: DefTree | _: ImportOrExport => ctx + case _ => ctx.exprContext(stat, exprOwner) + val stat1 = op(stat)(using statCtx) + val restCtx = stat match + case stat: Import => ctx.importContext(stat, stat.symbol) + case _ => ctx + if stat1 eq stat then + loop(mapped, unchanged, rest)(using restCtx) + else + val buf = if mapped == null then new mutable.ListBuffer[Tree] else mapped + var xc = unchanged + while xc ne pending do + buf += xc.head + xc = xc.tail + stat1 match + case Thicket(stats1) => buf ++= stats1 + case _ => buf += stat1 + loop(buf, rest, rest)(using restCtx) + case nil => + if mapped == null then unchanged + else mapped.prependToList(unchanged) + + loop(null, trees, trees) + end mapStatements end extension + /** A treemap that generates the same contexts as the original typer for statements. + * This means: + * - statements that are not definitions get the exprOwner as owner + * - imports are reflected in the contexts of subsequent statements + */ + class TreeMapWithPreciseStatContexts(cpy: TreeCopier = tpd.cpy) extends TreeMap(cpy): + override def transformStats(trees: List[Tree], exprOwner: Symbol)(using Context): List[Tree] = + trees.mapStatements(exprOwner, transform(_)) + /** Map Inlined nodes, NamedArgs, Blocks with no statements and local references to underlying arguments. * Also drops Inline and Block with no statements. */ - class MapToUnderlying extends TreeMap { + private class MapToUnderlying extends TreeMap { override def transform(tree: Tree)(using Context): Tree = tree match { case tree: Ident if isBinding(tree.symbol) && skipLocal(tree.symbol) => tree.symbol.defTree match { @@ -1316,7 +1383,11 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { /** Recover identifier prefix (e.g. this) if it exists */ def desugarIdentPrefix(tree: Ident)(using Context): Tree = tree.tpe match { case TermRef(prefix: TermRef, _) => - ref(prefix) + prefix.info match + case mt: MethodType if mt.paramInfos.isEmpty && mt.resultType.typeSymbol.is(Module) => + ref(mt.resultType.typeSymbol.sourceModule) + case _ => + ref(prefix) case TermRef(prefix: ThisType, _) => This(prefix.cls) case _ => diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index 40467dc5be3f..7e00972f354d 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -458,7 +458,11 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def AppliedTypeTree(tpt: Tree, arg: Tree)(implicit src: SourceFile): AppliedTypeTree = AppliedTypeTree(tpt, arg :: Nil) - def TypeTree(tpe: Type)(using Context): TypedSplice = TypedSplice(TypeTree().withTypeUnchecked(tpe)) + def TypeTree(tpe: Type)(using Context): TypedSplice = + TypedSplice(TypeTree().withTypeUnchecked(tpe)) + + def InferredTypeTree(tpe: Type)(using Context): TypedSplice = + TypedSplice(new InferredTypeTree().withTypeUnchecked(tpe)) def unitLiteral(implicit src: SourceFile): Literal = Literal(Constant(())) diff --git a/compiler/src/dotty/tools/dotc/config/CliCommand.scala b/compiler/src/dotty/tools/dotc/config/CliCommand.scala index e361519bb54d..5a648db2b504 100644 --- a/compiler/src/dotty/tools/dotc/config/CliCommand.scala +++ b/compiler/src/dotty/tools/dotc/config/CliCommand.scala @@ -7,7 +7,7 @@ import Settings._ import core.Contexts._ import Properties._ -import scala.collection.JavaConverters._ +import scala.PartialFunction.cond trait CliCommand: @@ -41,28 +41,15 @@ trait CliCommand: /** Distill arguments into summary detailing settings, errors and files to main */ def distill(args: Array[String], sg: Settings.SettingGroup)(ss: SettingsState = sg.defaultState)(using Context): ArgsSummary = - /** - * Expands all arguments starting with @ to the contents of the - * file named like each argument. - */ - def expandArg(arg: String): List[String] = - def stripComment(s: String) = s takeWhile (_ != '#') - val path = Paths.get(arg stripPrefix "@") - if (!Files.exists(path)) - report.error(s"Argument file ${path.getFileName} could not be found") - Nil - else - val lines = Files.readAllLines(path) // default to UTF-8 encoding - val params = lines.asScala map stripComment mkString " " - CommandLineParser.tokenize(params) // expand out @filename to the contents of that filename def expandedArguments = args.toList flatMap { - case x if x startsWith "@" => expandArg(x) + case x if x startsWith "@" => CommandLineParser.expandArg(x) case x => List(x) } sg.processArguments(expandedArguments, processAll = true, settingsState = ss) + end distill /** Creates a help message for a subset of options based on cond */ protected def availableOptionsMsg(cond: Setting[?] => Boolean)(using settings: ConcreteSettings)(using SettingsState): String = @@ -106,8 +93,9 @@ trait CliCommand: // For now, skip the default values that do not make sense for the end user. // For example 'false' for the version command. "" - s"${formatName(s.name)} ${formatDescription(s.description)}${formatSetting("Default", defaultValue)}${formatSetting("Choices", s.legalChoices)}" + s"${formatName(s.name)} ${formatDescription(shortHelp(s))}${formatSetting("Default", defaultValue)}${formatSetting("Choices", s.legalChoices)}" ss.map(helpStr).mkString("", "\n", s"\n${formatName("@")} ${formatDescription("A text file containing compiler arguments (options and source files).")}\n") + end availableOptionsMsg protected def shortUsage: String = s"Usage: $cmdName " @@ -121,25 +109,61 @@ trait CliCommand: prefix + "\n" + availableOptionsMsg(cond) protected def isStandard(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = - !isAdvanced(s) && !isPrivate(s) + !isVerbose(s) && !isWarning(s) && !isAdvanced(s) && !isPrivate(s) || s.name == "-Werror" || s.name == "-Wconf" + protected def isVerbose(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = + s.name.startsWith("-V") && s.name != "-V" + protected def isWarning(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = + s.name.startsWith("-W") && s.name != "-W" || s.name == "-Xlint" protected def isAdvanced(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = s.name.startsWith("-X") && s.name != "-X" protected def isPrivate(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = s.name.startsWith("-Y") && s.name != "-Y" + protected def shortHelp(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): String = + s.description.linesIterator.next() + protected def isHelping(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = + cond(s.value) { + case ss: List[?] if s.isMultivalue => ss.contains("help") + case s: String => "help" == s + } /** Messages explaining usage and options */ protected def usageMessage(using settings: ConcreteSettings)(using SettingsState) = createUsageMsg("where possible standard", shouldExplain = false, isStandard) + protected def vusageMessage(using settings: ConcreteSettings)(using SettingsState) = + createUsageMsg("Possible verbose", shouldExplain = true, isVerbose) + protected def wusageMessage(using settings: ConcreteSettings)(using SettingsState) = + createUsageMsg("Possible warning", shouldExplain = true, isWarning) protected def xusageMessage(using settings: ConcreteSettings)(using SettingsState) = createUsageMsg("Possible advanced", shouldExplain = true, isAdvanced) protected def yusageMessage(using settings: ConcreteSettings)(using SettingsState) = createUsageMsg("Possible private", shouldExplain = true, isPrivate) - protected def phasesMessage: String = - (new Compiler()).phases.map { - case List(single) => single.phaseName - case more => more.map(_.phaseName).mkString("{", ", ", "}") - }.mkString("\n") + /** Used for the formatted output of -Xshow-phases */ + protected def phasesMessage(using ctx: Context): String = + + val phases = new Compiler().phases + val nameLimit = 25 + val maxCol = ctx.settings.pageWidth.value + val maxName = phases.flatten.map(_.phaseName.length).max + val width = maxName.min(nameLimit) + val maxDesc = maxCol - (width + 6) + val fmt = s"%${width}.${width}s %.${maxDesc}s%n" + + val sb = new StringBuilder + sb ++= fmt.format("phase name", "description") + sb ++= fmt.format("----------", "-----------") + + phases.foreach { + case List(single) => + sb ++= fmt.format(single.phaseName, single.description) + case Nil => () + case more => + sb ++= fmt.format(s"{", "") + more.foreach { mini => sb ++= fmt.format(mini.phaseName, mini.description) } + sb ++= fmt.format(s"}", "") + } + sb.mkString + /** Provide usage feedback on argument summary, assuming that all settings * are already applied in context. diff --git a/compiler/src/dotty/tools/dotc/config/CommandLineParser.scala b/compiler/src/dotty/tools/dotc/config/CommandLineParser.scala index e3ca896d18d2..91017f7fc0c0 100644 --- a/compiler/src/dotty/tools/dotc/config/CommandLineParser.scala +++ b/compiler/src/dotty/tools/dotc/config/CommandLineParser.scala @@ -3,6 +3,8 @@ package dotty.tools.dotc.config import scala.annotation.tailrec import scala.collection.mutable.ArrayBuffer import java.lang.Character.isWhitespace +import java.nio.file.{Files, Paths} +import scala.collection.JavaConverters._ /** A simple enough command line parser. */ @@ -93,4 +95,19 @@ object CommandLineParser: def tokenize(line: String): List[String] = tokenize(line, x => throw new ParseException(x)) + /** + * Expands all arguments starting with @ to the contents of the + * file named like each argument. + */ + def expandArg(arg: String): List[String] = + def stripComment(s: String) = s takeWhile (_ != '#') + val path = Paths.get(arg stripPrefix "@") + if (!Files.exists(path)) + System.err.println(s"Argument file ${path.getFileName} could not be found") + Nil + else + val lines = Files.readAllLines(path) // default to UTF-8 encoding + val params = lines.asScala map stripComment mkString " " + tokenize(params) + class ParseException(msg: String) extends RuntimeException(msg) diff --git a/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala b/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala index 049972365642..29a02b7f7a29 100644 --- a/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala +++ b/compiler/src/dotty/tools/dotc/config/CompilerCommand.scala @@ -13,12 +13,19 @@ abstract class CompilerCommand extends CliCommand: type ConcreteSettings = ScalaSettings final def helpMsg(using settings: ScalaSettings)(using SettingsState, Context): String = - if (settings.help.value) usageMessage - else if (settings.Xhelp.value) xusageMessage - else if (settings.Yhelp.value) yusageMessage - else if (settings.showPlugins.value) ctx.base.pluginDescriptions - else if (settings.XshowPhases.value) phasesMessage - else "" + settings.allSettings.find(isHelping) match + case Some(s) => s.description + case _ => + if (settings.help.value) usageMessage + else if (settings.Vhelp.value) vusageMessage + else if (settings.Whelp.value) wusageMessage + else if (settings.Xhelp.value) xusageMessage + else if (settings.Yhelp.value) yusageMessage + else if (settings.showPlugins.value) ctx.base.pluginDescriptions + else if (settings.XshowPhases.value) phasesMessage + else "" final def isHelpFlag(using settings: ScalaSettings)(using SettingsState): Boolean = - Set(settings.help, settings.Xhelp, settings.Yhelp, settings.showPlugins, settings.XshowPhases) exists (_.value) + import settings._ + val flags = Set(help, Vhelp, Whelp, Xhelp, Yhelp, showPlugins, XshowPhases) + flags.exists(_.value) || allSettings.exists(isHelping) diff --git a/compiler/src/dotty/tools/dotc/config/Config.scala b/compiler/src/dotty/tools/dotc/config/Config.scala index c009372f19d3..ac1708378e73 100644 --- a/compiler/src/dotty/tools/dotc/config/Config.scala +++ b/compiler/src/dotty/tools/dotc/config/Config.scala @@ -24,7 +24,10 @@ object Config { inline val checkConstraintsNonCyclic = false /** Check that each constraint resulting from a subtype test - * is satisfiable. + * is satisfiable. Also check that a type variable instantiation + * satisfies its constraints. + * Note that this can fail when bad bounds are in scope, like in + * tests/neg/i4721a.scala. */ inline val checkConstraintsSatisfiable = false @@ -34,6 +37,15 @@ object Config { */ inline val checkConstraintsPropagated = false + /** Check that constraint bounds do not contain wildcard types */ + inline val checkNoWildcardsInConstraint = false + + /** If a constraint is over a type lambda `tl` and `tvar` is one of + * the type variables associated with `tl` in the constraint, check + * that the origin of `tvar` is a parameter of `tl`. + */ + inline val checkConsistentVars = false + /** Check that constraints of globally committable typer states are closed. * NOTE: When enabled, the check can cause CyclicReference errors because * it traverses all elements of a type. Such failures were observed when diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index 42522d38be51..ed592e76ace1 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -27,6 +27,7 @@ object Feature: val erasedDefinitions = experimental("erasedDefinitions") val symbolLiterals = deprecated("symbolLiterals") val fewerBraces = experimental("fewerBraces") + val saferExceptions = experimental("saferExceptions") /** Is `feature` enabled by by a command-line setting? The enabling setting is * @@ -90,29 +91,35 @@ object Feature: def warnOnMigration(msg: Message, pos: SrcPos, version: SourceVersion = defaultSourceVersion)(using Context): Boolean = if sourceVersion.isMigrating && sourceVersion.stable == version - || version == `3.0` && migrateTo3 + || (version == `3.0` || version == `3.1`) && migrateTo3 then report.migrationWarning(msg, pos) true else false - private val assumeExperimentalIn = Set("dotty.tools.vulpix.ParallelTesting") - - def checkExperimentalFeature(which: String, srcPos: SrcPos = NoSourcePosition)(using Context) = - def hasSpecialPermission = - new Exception().getStackTrace.exists(elem => - assumeExperimentalIn.exists(elem.getClassName().startsWith(_))) - if !(Properties.experimental || hasSpecialPermission) - || ctx.settings.YnoExperimental.value - then - //println(i"${new Exception().getStackTrace.map(_.getClassName).toList}%\n%") - report.error(i"Experimental feature$which may only be used with nightly or snapshot version of compiler", srcPos) + def checkExperimentalFeature(which: String, srcPos: SrcPos, note: => String = "")(using Context) = + if !isExperimentalEnabled then + report.error(i"Experimental $which may only be used with a nightly or snapshot version of the compiler$note", srcPos) + + def checkExperimentalDef(sym: Symbol, srcPos: SrcPos)(using Context) = + if !isExperimentalEnabled then + val symMsg = + if sym.hasAnnotation(defn.ExperimentalAnnot) then + i"$sym is marked @experimental" + else if sym.owner.hasAnnotation(defn.ExperimentalAnnot) then + i"${sym.owner} is marked @experimental" + else + i"$sym inherits @experimental" + report.error(s"$symMsg and therefore may only be used in an experimental scope.", srcPos) /** Check that experimental compiler options are only set for snapshot or nightly compiler versions. */ def checkExperimentalSettings(using Context): Unit = for setting <- ctx.settings.language.value if setting.startsWith("experimental.") && setting != "experimental.macros" - do checkExperimentalFeature(s" $setting") + do checkExperimentalFeature(s"feature $setting", NoSourcePosition) + + def isExperimentalEnabled(using Context): Boolean = + Properties.experimental && !ctx.settings.YnoExperimental.value end Feature \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/config/Printers.scala b/compiler/src/dotty/tools/dotc/config/Printers.scala index 27391154591f..8e13e50e59b7 100644 --- a/compiler/src/dotty/tools/dotc/config/Printers.scala +++ b/compiler/src/dotty/tools/dotc/config/Printers.scala @@ -20,7 +20,7 @@ object Printers { val debug = noPrinter val derive = noPrinter val desugar = noPrinter - val dottydoc = noPrinter + val scaladoc = noPrinter val exhaustivity = noPrinter val gadts = noPrinter val gadtsConstr = noPrinter diff --git a/compiler/src/dotty/tools/dotc/config/Properties.scala b/compiler/src/dotty/tools/dotc/config/Properties.scala index 3ec49d8eef68..ca5d94977690 100644 --- a/compiler/src/dotty/tools/dotc/config/Properties.scala +++ b/compiler/src/dotty/tools/dotc/config/Properties.scala @@ -84,11 +84,11 @@ trait PropertiesTrait { /** Whether the current version of compiler is experimental * - * 1. Snapshot and nightly releases are experimental. + * 1. Snapshot, nightly releases and non-bootstrapped compiler are experimental. * 2. Features supported by experimental versions of the compiler: * - research plugins */ - val experimental: Boolean = versionString.contains("SNAPSHOT") || versionString.contains("NIGHTLY") + val experimental: Boolean = versionString.contains("SNAPSHOT") || versionString.contains("NIGHTLY") || versionString.contains("nonbootstrapped") val copyrightString: String = scalaPropOrElse("copyright.string", "(c) 2002-2017 LAMP/EPFL") diff --git a/compiler/src/dotty/tools/dotc/config/ScalaRelease.scala b/compiler/src/dotty/tools/dotc/config/ScalaRelease.scala new file mode 100644 index 000000000000..acca9ce9298c --- /dev/null +++ b/compiler/src/dotty/tools/dotc/config/ScalaRelease.scala @@ -0,0 +1,19 @@ +package dotty.tools.dotc.config + +enum ScalaRelease(val majorVersion: Int, val minorVersion: Int) extends Ordered[ScalaRelease]: + case Release3_0 extends ScalaRelease(3, 0) + case Release3_1 extends ScalaRelease(3, 1) + + def show = s"$majorVersion.$minorVersion" + + def compare(that: ScalaRelease) = + val ord = summon[Ordering[(Int, Int)]] + ord.compare((majorVersion, minorVersion), (that.majorVersion, that.minorVersion)) + +object ScalaRelease: + def latest = Release3_1 + + def parse(name: String) = name match + case "3.0" => Some(Release3_0) + case "3.1" => Some(Release3_1) + case _ => None diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index 9f346f97e742..4dccad86e98c 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -1,17 +1,37 @@ package dotty.tools.dotc package config +import dotty.tools.dotc.config.PathResolver.Defaults +import dotty.tools.dotc.config.Settings.{Setting, SettingGroup} import dotty.tools.dotc.core.Contexts._ -import dotty.tools.io.{ Directory, PlainDirectory, AbstractFile, JDK9Reflectors } -import PathResolver.Defaults -import rewrites.Rewrites -import Settings.Setting +import dotty.tools.dotc.rewrites.Rewrites +import dotty.tools.io.{AbstractFile, Directory, JDK9Reflectors, PlainDirectory} -/** Settings shared by compiler and scaladoc */ -trait CommonScalaSettings { self: Settings.SettingGroup => - protected def defaultClasspath: String = sys.env.getOrElse("CLASSPATH", ".") +import scala.util.chaining._ + +class ScalaSettings extends SettingGroup with AllScalaSettings + +object ScalaSettings: + // Keep synchronized with `classfileVersion` in `BCodeIdiomatic` + private val minTargetVersion = 8 + private val maxTargetVersion = 17 + + def supportedTargetVersions: List[String] = + (minTargetVersion to maxTargetVersion).toList.map(_.toString) + + def supportedReleaseVersions: List[String] = + if scala.util.Properties.isJavaAtLeast("9") then + val jdkVersion = JDK9Reflectors.runtimeVersionMajor(JDK9Reflectors.runtimeVersion()).intValue() + val maxVersion = Math.min(jdkVersion, maxTargetVersion) + (minTargetVersion to maxVersion).toList.map(_.toString) + else List(minTargetVersion).map(_.toString) + + def supportedScalaReleaseVersions: List[String] = + ScalaRelease.values.toList.map(_.show) + + def defaultClasspath: String = sys.env.getOrElse("CLASSPATH", ".") - protected def defaultPageWidth: Int = { + def defaultPageWidth: Int = { val defaultWidth = 80 val columnsVar = System.getenv("COLUMNS") if columnsVar != null then columnsVar.toInt @@ -23,7 +43,49 @@ trait CommonScalaSettings { self: Settings.SettingGroup => else defaultWidth } - /** Path related settings */ +trait AllScalaSettings extends CommonScalaSettings, PluginSettings, VerboseSettings, WarningSettings, XSettings, YSettings: + self: SettingGroup => + + /* Path related settings */ + val semanticdbTarget: Setting[String] = PathSetting("-semanticdb-target", "Specify an alternative output directory for SemanticDB files.", "") + + val source: Setting[String] = ChoiceSetting("-source", "source version", "source version", List("3.0", "3.1", "future", "3.0-migration", "future-migration"), "3.0", aliases = List("--source")) + val uniqid: Setting[Boolean] = BooleanSetting("-uniqid", "Uniquely tag all identifiers in debugging output.", aliases = List("--unique-id")) + val rewrite: Setting[Option[Rewrites]] = OptionSetting[Rewrites]("-rewrite", "When used in conjunction with a `...-migration` source version, rewrites sources to migrate to new version.", aliases = List("--rewrite")) + val fromTasty: Setting[Boolean] = BooleanSetting("-from-tasty", "Compile classes from tasty files. The arguments are .tasty or .jar files.", aliases = List("--from-tasty")) + + val newSyntax: Setting[Boolean] = BooleanSetting("-new-syntax", "Require `then` and `do` in control expressions.") + val oldSyntax: Setting[Boolean] = BooleanSetting("-old-syntax", "Require `(...)` around conditions.") + val indent: Setting[Boolean] = BooleanSetting("-indent", "Together with -rewrite, remove {...} syntax when possible due to significant indentation.") + val noindent: Setting[Boolean] = BooleanSetting("-no-indent", "Require classical {...} syntax, indentation is not significant.", aliases = List("-noindent")) + val YindentColons: Setting[Boolean] = BooleanSetting("-Yindent-colons", "(disabled: use -language:experimental.fewerBraces instead)") + + /* Decompiler settings */ + val printTasty: Setting[Boolean] = BooleanSetting("-print-tasty", "Prints the raw tasty.", aliases = List("--print-tasty")) + val printLines: Setting[Boolean] = BooleanSetting("-print-lines", "Show source code line numbers.", aliases = List("--print-lines")) + + /* Scala.js-related settings */ + val scalajsGenStaticForwardersForNonTopLevelObjects: Setting[Boolean] = BooleanSetting("-scalajs-genStaticForwardersForNonTopLevelObjects", "Generate static forwarders even for non-top-level objects (Scala.js only)") + val scalajsMapSourceURI: Setting[List[String]] = MultiStringSetting("-scalajs-mapSourceURI", "uri1[->uri2]", "rebases source URIs from uri1 to uri2 (or to a relative URI) for source maps (Scala.js only)") + + val projectUrl: Setting[String] = StringSetting ( + "-project-url", + "project repository homepage", + "The source repository of your project.", + "" + ) + + val wikiSyntax: Setting[Boolean] = BooleanSetting("-Xwiki-syntax", "Retains the Scala2 behavior of using Wiki Syntax in Scaladoc.") + + val jvmargs = PrefixSetting("-J", "-J", "Pass directly to the runtime system.") + val defines = PrefixSetting("-Dproperty=value", "-D", "Pass -Dproperty=value directly to the runtime system.") +end AllScalaSettings + +/** Settings shared by compiler and scaladoc */ +trait CommonScalaSettings: + self: SettingGroup => + + /* Path related settings */ val bootclasspath: Setting[String] = PathSetting("-bootclasspath", "Override location of bootstrap class files.", Defaults.scalaBootClassPath, aliases = List("--boot-class-path")) val extdirs: Setting[String] = PathSetting("-extdirs", "Override location of installed extensions.", Defaults.scalaExtDirs, aliases = List("--extension-directories")) val javabootclasspath: Setting[String] = PathSetting("-javabootclasspath", "Override java boot classpath.", Defaults.javaBootClassPath, aliases = List("--java-boot-class-path")) @@ -31,83 +93,122 @@ trait CommonScalaSettings { self: Settings.SettingGroup => val sourcepath: Setting[String] = PathSetting("-sourcepath", "Specify location(s) of source files.", Defaults.scalaSourcePath, aliases = List("--source-path")) val sourceroot: Setting[String] = PathSetting("-sourceroot", "Specify workspace root directory.", ".") - val classpath: Setting[String] = PathSetting("-classpath", "Specify where to find user class files.", defaultClasspath, aliases = List("-cp", "--class-path")) + val classpath: Setting[String] = PathSetting("-classpath", "Specify where to find user class files.", ScalaSettings.defaultClasspath, aliases = List("-cp", "--class-path")) val outputDir: Setting[AbstractFile] = OutputSetting("-d", "directory|jar", "Destination for generated classfiles.", new PlainDirectory(Directory("."))) val color: Setting[String] = ChoiceSetting("-color", "mode", "Colored output", List("always", "never"/*, "auto"*/), "always"/* "auto"*/, aliases = List("--color")) val verbose: Setting[Boolean] = BooleanSetting("-verbose", "Output messages about what the compiler is doing.", aliases = List("--verbose")) val version: Setting[Boolean] = BooleanSetting("-version", "Print product version and exit.", aliases = List("--version")) val help: Setting[Boolean] = BooleanSetting("-help", "Print a synopsis of standard options.", aliases = List("--help")) - val pageWidth: Setting[Int] = IntSetting("-pagewidth", "Set page width", defaultPageWidth, aliases = List("--page-width")) + val pageWidth: Setting[Int] = IntSetting("-pagewidth", "Set page width", ScalaSettings.defaultPageWidth, aliases = List("--page-width")) val silentWarnings: Setting[Boolean] = BooleanSetting("-nowarn", "Silence all warnings.", aliases = List("--no-warnings")) - /** Other settings */ + val release: Setting[String] = ChoiceSetting("-release", "release", "Compile code with classes specific to the given version of the Java platform available on the classpath and emit bytecode for this version.", ScalaSettings.supportedReleaseVersions, "", aliases = List("--release")) + val deprecation: Setting[Boolean] = BooleanSetting("-deprecation", "Emit warning and location for usages of deprecated APIs.", aliases = List("--deprecation")) + val feature: Setting[Boolean] = BooleanSetting("-feature", "Emit warning and location for usages of features that should be imported explicitly.", aliases = List("--feature")) + val explain: Setting[Boolean] = BooleanSetting("-explain", "Explain errors in more detail.", aliases = List("--explain")) + // -explain-types setting is necessary for cross compilation, since it is mentioned in sbt-tpolecat, for instance + // it is otherwise subsumed by -explain, and should be dropped as soon as we can. + val explainTypes: Setting[Boolean] = BooleanSetting("-explain-types", "Explain type errors in more detail (deprecated, use -explain instead).", aliases = List("--explain-types", "-explaintypes")) + val unchecked: Setting[Boolean] = BooleanSetting("-unchecked", "Enable additional warnings where generated code depends on assumptions.", initialValue = true, aliases = List("--unchecked")) + val language: Setting[List[String]] = MultiStringSetting("-language", "feature", "Enable one or more language features.", aliases = List("--language")) + + /* Other settings */ val encoding: Setting[String] = StringSetting("-encoding", "encoding", "Specify character encoding used by source files.", Properties.sourceEncoding, aliases = List("--encoding")) val usejavacp: Setting[Boolean] = BooleanSetting("-usejavacp", "Utilize the java.class.path in classpath resolution.", aliases = List("--use-java-class-path")) + val scalajs: Setting[Boolean] = BooleanSetting("-scalajs", "Compile in Scala.js mode (requires scalajs-library.jar on the classpath).", aliases = List("--scalajs")) +end CommonScalaSettings - /** Plugin-related setting */ - val plugin: Setting[List[String]] = MultiStringSetting ("-Xplugin", "paths", "Load a plugin from each classpath.") - val disable: Setting[List[String]] = MultiStringSetting ("-Xplugin-disable", "plugin", "Disable plugins by name.") - val require: Setting[List[String]] = MultiStringSetting ("-Xplugin-require", "plugin", "Abort if a named plugin is not loaded.") +/** -P "plugin" settings. Various tools might support plugins. */ +private sealed trait PluginSettings: + self: SettingGroup => + val plugin: Setting[List[String]] = MultiStringSetting ("-Xplugin", "paths", "Load a plugin from each classpath.") + val disable: Setting[List[String]] = MultiStringSetting ("-Xplugin-disable", "plugin", "Disable plugins by name.") + val require: Setting[List[String]] = MultiStringSetting ("-Xplugin-require", "plugin", "Abort if a named plugin is not loaded.") val showPlugins: Setting[Boolean] = BooleanSetting ("-Xplugin-list", "Print a synopsis of loaded plugins.") - val pluginsDir: Setting[String] = StringSetting ("-Xpluginsdir", "path", "Path to search for plugin archives.", Defaults.scalaPluginPath) - val pluginOptions: Setting[List[String]] = MultiStringSetting ("-P", "plugin:opt", "Pass an option to a plugin, e.g. -P::") -} - -class ScalaSettings extends Settings.SettingGroup with CommonScalaSettings { - // Keep synchronized with `classfileVersion` in `BCodeIdiomatic` - private val minTargetVersion = 8 - private val maxTargetVersion = 17 - - private def supportedTargetVersions: List[String] = - (minTargetVersion to maxTargetVersion).toList.map(_.toString) - - protected def supportedReleaseVersions: List[String] = - if scala.util.Properties.isJavaAtLeast("9") then - val jdkVersion = JDK9Reflectors.runtimeVersionMajor(JDK9Reflectors.runtimeVersion()).intValue() - val maxVersion = Math.min(jdkVersion, maxTargetVersion) - (minTargetVersion to maxVersion).toList.map(_.toString) - else List() + val pluginsDir: Setting[String] = StringSetting ("-Xpluginsdir", "path", "Path to search for plugin archives.", Defaults.scalaPluginPath) + val pluginOptions: Setting[List[String]] = MultiStringSetting ("-P", "plugin:opt", "Pass an option to a plugin, e.g. -P::") - /** Path related settings */ - val semanticdbTarget: Setting[String] = PathSetting("-semanticdb-target", "Specify an alternative output directory for SemanticDB files.", "") +/** -V "Verbose" settings */ +private sealed trait VerboseSettings: + self: SettingGroup => + val Vhelp: Setting[Boolean] = BooleanSetting("-V", "Print a synopsis of verbose options.") + val Xprint: Setting[List[String]] = PhasesSetting("-Vprint", "Print out program after", aliases = List("-Xprint")) - val deprecation: Setting[Boolean] = BooleanSetting("-deprecation", "Emit warning and location for usages of deprecated APIs.", aliases = List("--deprecation")) - val explainTypes: Setting[Boolean] = BooleanSetting("-explain-types", "Explain type errors in more detail (deprecated, use -explain instead).", aliases = List("--explain-types")) - // this setting is necessary for cross compilation, since it is mentioned in sbt-tpolecat, for instance - // it is otherwise subsumed by -explain, and should be dropped as soon as we can. - val explain: Setting[Boolean] = BooleanSetting("-explain", "Explain errors in more detail.", aliases = List("--explain")) - val feature: Setting[Boolean] = BooleanSetting("-feature", "Emit warning and location for usages of features that should be imported explicitly.", aliases = List("--feature")) - val release: Setting[String] = ChoiceSetting("-release", "release", "Compile code with classes specific to the given version of the Java platform available on the classpath and emit bytecode for this version.", supportedReleaseVersions, "", aliases = List("--release")) - val source: Setting[String] = ChoiceSetting("-source", "source version", "source version", List("3.0", "future", "3.0-migration", "future-migration"), "3.0", aliases = List("--source")) - val scalajs: Setting[Boolean] = BooleanSetting("-scalajs", "Compile in Scala.js mode (requires scalajs-library.jar on the classpath).", aliases = List("--scalajs")) - val unchecked: Setting[Boolean] = BooleanSetting("-unchecked", "Enable additional warnings where generated code depends on assumptions.", aliases = List("--unchecked")) - val uniqid: Setting[Boolean] = BooleanSetting("-uniqid", "Uniquely tag all identifiers in debugging output.", aliases = List("--unique-id")) - val language: Setting[List[String]] = MultiStringSetting("-language", "feature", "Enable one or more language features.", aliases = List("--language")) - val rewrite: Setting[Option[Rewrites]] = OptionSetting[Rewrites]("-rewrite", "When used in conjunction with a `...-migration` source version, rewrites sources to migrate to new version.", aliases = List("--rewrite")) - val fromTasty: Setting[Boolean] = BooleanSetting("-from-tasty", "Compile classes from tasty files. The arguments are .tasty or .jar files.", aliases = List("--from-tasty")) +/** -W "Warnings" settings + */ +private sealed trait WarningSettings: + self: SettingGroup => + val Whelp: Setting[Boolean] = BooleanSetting("-W", "Print a synopsis of warning options.") + val XfatalWarnings: Setting[Boolean] = BooleanSetting("-Werror", "Fail the compilation if there are any warnings.", aliases = List("-Xfatal-warnings")) - val newSyntax: Setting[Boolean] = BooleanSetting("-new-syntax", "Require `then` and `do` in control expressions.") - val oldSyntax: Setting[Boolean] = BooleanSetting("-old-syntax", "Require `(...)` around conditions.") - val indent: Setting[Boolean] = BooleanSetting("-indent", "Together with -rewrite, remove {...} syntax when possible due to significant indentation.") - val noindent: Setting[Boolean] = BooleanSetting("-no-indent", "Require classical {...} syntax, indentation is not significant.", aliases = List("-noindent")) - val YindentColons: Setting[Boolean] = BooleanSetting("-Yindent-colons", "(disabled: use -language:experimental.fewerBraces instead)") + val Wunused: Setting[List[String]] = MultiChoiceSetting( + name = "-Wunused", + helpArg = "warning", + descr = "Enable or disable specific `unused` warnings", + choices = List("nowarn", "all"), + default = Nil + ) + object WunusedHas: + def allOr(s: String)(using Context) = Wunused.value.pipe(us => us.contains("all") || us.contains(s)) + def nowarn(using Context) = allOr("nowarn") - /** Decompiler settings */ - val printTasty: Setting[Boolean] = BooleanSetting("-print-tasty", "Prints the raw tasty.", aliases = List("--print-tasty")) - val printLines: Setting[Boolean] = BooleanSetting("-print-lines", "Show source code line numbers.", aliases = List("--print-lines")) + val Wconf: Setting[List[String]] = MultiStringSetting( + "-Wconf", + "patterns", + default = List(), + descr = + s"""Configure compiler warnings. + |Syntax: -Wconf::,:,... + |multiple are combined with &, i.e., &...& + | + | + | - Any message: any + | + | - Message categories: cat=deprecation, cat=feature, cat=unchecked + | + | - Message content: msg=regex + | The regex need only match some part of the message, not all of it. + | + | - Message id: id=E129 + | The message id is printed with the warning. + | + | - Message name: name=PureExpressionInStatementPosition + | The message name is printed with the warning in verbose warning mode. + | + |In verbose warning mode the compiler prints matching filters for warnings. + |Verbose mode can be enabled globally using `-Wconf:any:verbose`, or locally + |using the @nowarn annotation (example: `@nowarn("v") def test = try 1`). + | + | + | - error / e + | - warning / w + | - verbose / v (emit warning, show additional help for writing `-Wconf` filters) + | - info / i (infos are not counted as warnings and not affected by `-Werror`) + | - silent / s + | + |The default configuration is empty. + | + |User-defined configurations are added to the left. The leftmost rule matching + |a warning message defines the action. + | + |Examples: + | - change every warning into an error: -Wconf:any:error + | - silence deprecations: -Wconf:cat=deprecation:s + | + |Note: on the command-line you might need to quote configurations containing `*` or `&` + |to prevent the shell from expanding patterns.""".stripMargin, + ) - /** Scala.js-related settings */ - val scalajsGenStaticForwardersForNonTopLevelObjects: Setting[Boolean] = BooleanSetting("-scalajs-genStaticForwardersForNonTopLevelObjects", "Generate static forwarders even for non-top-level objects (Scala.js only)") - val scalajsMapSourceURI: Setting[List[String]] = MultiStringSetting("-scalajs-mapSourceURI", "uri1[->uri2]", "rebases source URIs from uri1 to uri2 (or to a relative URI) for source maps (Scala.js only)") +/** -X "Extended" or "Advanced" settings */ +private sealed trait XSettings: + self: SettingGroup => - /** -X "Advanced" settings */ val Xhelp: Setting[Boolean] = BooleanSetting("-X", "Print a synopsis of advanced options.") val XnoForwarders: Setting[Boolean] = BooleanSetting("-Xno-forwarders", "Do not generate static forwarders in mirror classes.") val XmaxInlines: Setting[Int] = IntSetting("-Xmax-inlines", "Maximal number of successive inlines.", 32) val XmaxInlinedTrees: Setting[Int] = IntSetting("-Xmax-inlined-trees", "Maximal number of inlined trees.", 2_000_000) val Xmigration: Setting[ScalaVersion] = VersionSetting("-Xmigration", "Warn about constructs whose behavior may have changed since version.") - val Xprint: Setting[List[String]] = PhasesSetting("-Xprint", "Print out program after") val XprintTypes: Setting[Boolean] = BooleanSetting("-Xprint-types", "Print tree types (debugging option).") val XprintDiff: Setting[Boolean] = BooleanSetting("-Xprint-diff", "Print changed parts of the tree since last print.") val XprintDiffDel: Setting[Boolean] = BooleanSetting("-Xprint-diff-del", "Print changed parts of the tree since last print including deleted parts.") @@ -116,13 +217,14 @@ class ScalaSettings extends Settings.SettingGroup with CommonScalaSettings { val Xprompt: Setting[Boolean] = BooleanSetting("-Xprompt", "Display a prompt after each error (debugging option).") val XshowPhases: Setting[Boolean] = BooleanSetting("-Xshow-phases", "Print all compiler phases.") val XreplDisableDisplay: Setting[Boolean] = BooleanSetting("-Xrepl-disable-display", "Do not display definitions in REPL.") - val XfatalWarnings: Setting[Boolean] = BooleanSetting("-Xfatal-warnings", "Fail the compilation if there are any warnings.") val XverifySignatures: Setting[Boolean] = BooleanSetting("-Xverify-signatures", "Verify generic signatures in generated bytecode.") val XignoreScala2Macros: Setting[Boolean] = BooleanSetting("-Xignore-scala2-macros", "Ignore errors when compiling code that calls Scala2 macros, these will fail at runtime.") val XimportSuggestionTimeout: Setting[Int] = IntSetting("-Ximport-suggestion-timeout", "Timeout (in ms) for searching for import suggestions when errors are reported.", 8000) val Xsemanticdb: Setting[Boolean] = BooleanSetting("-Xsemanticdb", "Store information in SemanticDB.", aliases = List("-Ysemanticdb")) - val Xtarget: Setting[String] = ChoiceSetting("-Xtarget", "target", "Emit bytecode for the specified version of the Java platform. This might produce bytecode that will break at runtime. When on JDK 9+, consider -release as a safer alternative.", supportedTargetVersions, "", aliases = List("--Xtarget")) + val Xtarget: Setting[String] = ChoiceSetting("-Xtarget", "target", "Emit bytecode for the specified version of the Java platform. This might produce bytecode that will break at runtime. When on JDK 9+, consider -release as a safer alternative.", ScalaSettings.supportedTargetVersions, "", aliases = List("--Xtarget")) val XcheckMacros: Setting[Boolean] = BooleanSetting("-Xcheck-macros", "Check some invariants of macro generated code while expanding macros", aliases = List("--Xcheck-macros")) + val XmainClass: Setting[String] = StringSetting("-Xmain-class", "path", "Class for manifest's Main-Class entry (only useful with -d )", "") + val XimplicitSearchLimit: Setting[Int] = IntSetting("-Ximplicit-search-limit", "Maximal number of expressions to be generated in an implicit search", 50000) val XmixinForceForwarders = ChoiceSetting( name = "-Xmixin-force-forwarders", @@ -136,7 +238,13 @@ class ScalaSettings extends Settings.SettingGroup with CommonScalaSettings { def isAtLeastJunit(using Context) = isTruthy || XmixinForceForwarders.value == "junit" } - /** -Y "Private" settings */ + val XmacroSettings: Setting[List[String]] = MultiStringSetting("-Xmacro-settings", "setting1,setting2,..settingN", "List of settings which exposed to the macros") +end XSettings + +/** -Y "Forking" as in forked tongue or "Private" settings */ +private sealed trait YSettings: + self: SettingGroup => + val Yhelp: Setting[Boolean] = BooleanSetting("-Y", "Print a synopsis of private options.") val Ycheck: Setting[List[String]] = PhasesSetting("-Ycheck", "Check the tree at the end of") val YcheckMods: Setting[Boolean] = BooleanSetting("-Ycheck-mods", "Check that symbols and their defining trees have modifiers in sync.") @@ -165,7 +273,7 @@ class ScalaSettings extends Settings.SettingGroup with CommonScalaSettings { val YstopBefore: Setting[List[String]] = PhasesSetting("-Ystop-before", "Stop before") // stop before erasure as long as we have not debugged it fully val YshowSuppressedErrors: Setting[Boolean] = BooleanSetting("-Yshow-suppressed-errors", "Also show follow-on errors and warnings that are normally suppressed.") val YdetailedStats: Setting[Boolean] = BooleanSetting("-Ydetailed-stats", "Show detailed internal compiler stats (needs Stats.enabled to be set to true).") - val YkindProjector: Setting[Boolean] = BooleanSetting("-Ykind-projector", "Allow `*` as wildcard to be compatible with kind projector.") + val YkindProjector: Setting[String] = ChoiceSetting("-Ykind-projector", "[underscores, disable]", "Allow `*` as type lambda placeholder to be compatible with kind projector. When invoked as -Ykind-projector:underscores will repurpose `_` to be a type parameter placeholder, this will disable usage of underscore as a wildcard.", List("disable", "", "underscores"), "disable") val YprintPos: Setting[Boolean] = BooleanSetting("-Yprint-pos", "Show tree positions.") val YprintPosSyms: Setting[Boolean] = BooleanSetting("-Yprint-pos-syms", "Show symbol definitions positions.") val YnoDeepSubtypes: Setting[Boolean] = BooleanSetting("-Yno-deep-subtypes", "Throw an exception on deep subtyping call stacks.") @@ -174,11 +282,13 @@ class ScalaSettings extends Settings.SettingGroup with CommonScalaSettings { val YprintSyms: Setting[Boolean] = BooleanSetting("-Yprint-syms", "When printing trees print info in symbols instead of corresponding info in trees.") val YprintDebug: Setting[Boolean] = BooleanSetting("-Yprint-debug", "When printing trees, print some extra information useful for debugging.") val YprintDebugOwners: Setting[Boolean] = BooleanSetting("-Yprint-debug-owners", "When printing trees, print owners of definitions.") + val YprintLevel: Setting[Boolean] = BooleanSetting("-Yprint-level", "print nesting levels of symbols and type variables.") val YshowPrintErrors: Setting[Boolean] = BooleanSetting("-Yshow-print-errors", "Don't suppress exceptions thrown during tree printing.") val YtestPickler: Setting[Boolean] = BooleanSetting("-Ytest-pickler", "Self-test for pickling functionality; should be used with -Ystop-after:pickler.") val YcheckReentrant: Setting[Boolean] = BooleanSetting("-Ycheck-reentrant", "Check that compiled program does not contain vars that can be accessed from a global root.") - val YdropComments: Setting[Boolean] = BooleanSetting("-Ydrop-comments", "Drop comments when scanning source files.") - val YcookComments: Setting[Boolean] = BooleanSetting("-Ycook-comments", "Cook the comments (type check `@usecase`, etc.)") + val YdropComments: Setting[Boolean] = BooleanSetting("-Ydrop-docs", "Drop documentation when scanning source files.", aliases = List("-Ydrop-comments")) + val YcookComments: Setting[Boolean] = BooleanSetting("-Ycook-docs", "Cook the documentation (type check `@usecase`, etc.)", aliases = List("-Ycook-comments")) + val YreadComments: Setting[Boolean] = BooleanSetting("-Yread-docs", "Read documentation from tasty.") val YforceSbtPhases: Setting[Boolean] = BooleanSetting("-Yforce-sbt-phases", "Run the phases used by sbt for incremental compilation (ExtractDependencies and ExtractAPI) even if the compiler is ran outside of sbt, for debugging.") val YdumpSbtInc: Setting[Boolean] = BooleanSetting("-Ydump-sbt-inc", "For every compiled foo.scala, output the API representation and dependencies used for sbt incremental compilation in foo.inc, implies -Yforce-sbt-phases.") val YcheckAllPatmat: Setting[Boolean] = BooleanSetting("-Ycheck-all-patmat", "Check exhaustivity and redundancy of all pattern matching (used for testing the algorithm).") @@ -200,6 +310,7 @@ class ScalaSettings extends Settings.SettingGroup with CommonScalaSettings { val YexplicitNulls: Setting[Boolean] = BooleanSetting("-Yexplicit-nulls", "Make reference types non-nullable. Nullable types can be expressed with unions: e.g. String|Null.") val YcheckInit: Setting[Boolean] = BooleanSetting("-Ysafe-init", "Ensure safe initialization of objects") val YrequireTargetName: Setting[Boolean] = BooleanSetting("-Yrequire-targetName", "Warn if an operator is defined without a @targetName annotation") + val YscalaRelease: Setting[String] = ChoiceSetting("-Yscala-release", "release", "Emit TASTy files that can be consumed by specified version of the compiler. The compilation will fail if for any reason valid TASTy cannot be produced (e.g. the code contains references to some parts of the standard library API that are missing in the older stdlib or uses language features unexpressible in the older version of TASTy format)", ScalaSettings.supportedScalaReleaseVersions, "", aliases = List("--Yscala-release")) /** Area-specific debug output */ val YexplainLowlevel: Setting[Boolean] = BooleanSetting("-Yexplain-lowlevel", "When explaining type errors, show types at a lower level.") @@ -212,16 +323,4 @@ class ScalaSettings extends Settings.SettingGroup with CommonScalaSettings { val YinstrumentDefs: Setting[Boolean] = BooleanSetting("-Yinstrument-defs", "Add instrumentation code that counts method calls; needs -Yinstrument to be set, too.") val YforceInlineWhileTyping: Setting[Boolean] = BooleanSetting("-Yforce-inline-while-typing", "Make non-transparent inline methods inline when typing. Emulates the old inlining behavior of 3.0.0-M3.") - - /** Dottydoc specific settings that are not used in scaladoc */ - val docSnapshot: Setting[Boolean] = BooleanSetting("-doc-snapshot", "Generate a documentation snapshot for the current Dotty version") - - val projectUrl: Setting[String] = StringSetting ( - "-project-url", - "project repository homepage", - "The source repository of your project.", - "" - ) - - val wikiSyntax: Setting[Boolean] = BooleanSetting("-Xwiki-syntax", "Retains the Scala2 behavior of using Wiki Syntax in Scaladoc.") -} +end YSettings diff --git a/compiler/src/dotty/tools/dotc/config/Settings.scala b/compiler/src/dotty/tools/dotc/config/Settings.scala index 7a0f18ab9e24..29a52defa4c6 100644 --- a/compiler/src/dotty/tools/dotc/config/Settings.scala +++ b/compiler/src/dotty/tools/dotc/config/Settings.scala @@ -11,7 +11,7 @@ import language.existentials import reflect.ClassTag import scala.util.{Success, Failure} -object Settings { +object Settings: val BooleanTag: ClassTag[Boolean] = ClassTag.Boolean val IntTag: ClassTag[Int] = ClassTag.Int @@ -21,25 +21,22 @@ object Settings { val OptionTag: ClassTag[Option[?]] = ClassTag(classOf[Option[?]]) val OutputTag: ClassTag[AbstractFile] = ClassTag(classOf[AbstractFile]) - class SettingsState(initialValues: Seq[Any]) { + class SettingsState(initialValues: Seq[Any]): private val values = ArrayBuffer(initialValues: _*) private var _wasRead: Boolean = false override def toString: String = s"SettingsState(values: ${values.toList})" - def value(idx: Int): Any = { + def value(idx: Int): Any = _wasRead = true values(idx) - } def update(idx: Int, x: Any): SettingsState = - if (_wasRead) - new SettingsState(values.toSeq).update(idx, x) - else { + if (_wasRead) then SettingsState(values.toSeq).update(idx, x) + else values(idx) = x this - } - } + end SettingsState case class ArgsSummary( sstate: SettingsState, @@ -59,7 +56,7 @@ object Settings { description: String, default: T, helpArg: String = "", - choices: Option[Seq[T]] = None, + choices: Option[Seq[?]] = None, prefix: String = "", aliases: List[String] = Nil, depends: List[(Setting[?], Any)] = Nil, @@ -67,18 +64,11 @@ object Settings { private var changed: Boolean = false - def valueIn(state: SettingsState): T = - state.value(idx).asInstanceOf[T] + def valueIn(state: SettingsState): T = state.value(idx).asInstanceOf[T] - def updateIn(state: SettingsState, x: Any): SettingsState = x match { + def updateIn(state: SettingsState, x: Any): SettingsState = x match case _: T => state.update(idx, x) - case _ => - // would like to do: - // throw new ClassCastException(s"illegal argument, found: $x of type ${x.getClass}, required: ${implicitly[ClassTag[T]]}") - // but this runs afoul of primitive types. Concretely: if T is Boolean, then x is a boxed Boolean and the test will fail. - // Maybe this is a bug in Scala 2.10? - state.update(idx, x.asInstanceOf[T]) - } + case _ => throw IllegalArgumentException(s"found: $x of type ${x.getClass.getName}, required: ${implicitly[ClassTag[T]]}") def isDefaultIn(state: SettingsState): Boolean = valueIn(state) == default @@ -94,7 +84,7 @@ object Settings { def tryToSet(state: ArgsSummary): ArgsSummary = { val ArgsSummary(sstate, arg :: args, errors, warnings) = state - def update(value: Any, args: List[String]) = + def update(value: Any, args: List[String]): ArgsSummary = var dangers = warnings val value1 = if changed && isMultivalue then @@ -107,16 +97,34 @@ object Settings { value changed = true ArgsSummary(updateIn(sstate, value1), args, errors, dangers) + end update + def fail(msg: String, args: List[String]) = ArgsSummary(sstate, args, errors :+ msg, warnings) + def missingArg = fail(s"missing argument for option $name", args) + def setString(argValue: String, args: List[String]) = choices match case Some(xs) if !xs.contains(argValue) => fail(s"$argValue is not a valid choice for $name", args) case _ => update(argValue, args) + + def setInt(argValue: String, args: List[String]) = + try + val x = argValue.toInt + choices match + case Some(r: Range) if x < r.head || r.last < x => + fail(s"$argValue is out of legal range ${r.head}..${r.last} for $name", args) + case Some(xs) if !xs.contains(x) => + fail(s"$argValue is not a valid choice for $name", args) + case _ => + update(x, args) + catch case _: NumberFormatException => + fail(s"$argValue is not an integer argument for $name", args) + def doSet(argRest: String) = ((implicitly[ClassTag[T]], args): @unchecked) match { case (BooleanTag, _) => update(true, args) @@ -124,8 +132,14 @@ object Settings { update(Some(propertyClass.get.getConstructor().newInstance()), args) case (ListTag, _) => if (argRest.isEmpty) missingArg - else update((argRest split ",").toList, args) - case (StringTag, _) if argRest.nonEmpty => + else + val strings = argRest.split(",").toList + choices match + case Some(valid) => strings.filterNot(valid.contains) match + case Nil => update(strings, args) + case invalid => fail(s"invalid choice(s) for $name: ${invalid.mkString(",")}", args) + case _ => update(strings, args) + case (StringTag, _) if argRest.nonEmpty || choices.exists(_.contains("")) => setString(argRest, args) case (StringTag, arg2 :: args2) => if (arg2 startsWith "-") missingArg @@ -139,22 +153,10 @@ object Settings { val output = if (isJar) JarArchive.create(path) else new PlainDirectory(path) update(output, args) } + case (IntTag, args) if argRest.nonEmpty => + setInt(argRest, args) case (IntTag, arg2 :: args2) => - try { - val x = arg2.toInt - choices match { - case Some(r: Range) if x < r.head || r.last < x => - fail(s"$arg2 is out of legal range ${r.head}..${r.last} for $name", args2) - case Some(xs) if !xs.contains(x) => - fail(s"$arg2 is not a valid choice for $name", args) - case _ => - update(x, args2) - } - } - catch { - case _: NumberFormatException => - fail(s"$arg2 is not an integer argument for $name", args2) - } + setInt(arg2, args2) case (VersionTag, _) => ScalaVersion.parse(argRest) match { case Success(v) => update(v, args) @@ -175,13 +177,11 @@ object Settings { } } - object Setting { - extension [T](setting: Setting[T]) { + object Setting: + extension [T](setting: Setting[T]) def value(using Context): T = setting.valueIn(ctx.settingsState) def update(x: T)(using Context): SettingsState = setting.updateIn(ctx.settingsState, x) def isDefault(using Context): Boolean = setting.isDefaultIn(ctx.settingsState) - } - } class SettingGroup { @@ -209,7 +209,7 @@ object Settings { /** Iterates over the arguments applying them to settings where applicable. * Then verifies setting dependencies are met. * - * This temporarily takes a boolean indicating whether to keep + * This takes a boolean indicating whether to keep * processing if an argument is seen which is not a command line option. * This is an expedience for the moment so that you can say * @@ -221,28 +221,27 @@ object Settings { * * to get their arguments. */ - def processArguments(state: ArgsSummary, processAll: Boolean, skipped: List[String]): ArgsSummary = { + @tailrec + final def processArguments(state: ArgsSummary, processAll: Boolean, skipped: List[String]): ArgsSummary = def stateWithArgs(args: List[String]) = ArgsSummary(state.sstate, args, state.errors, state.warnings) - state.arguments match { + state.arguments match case Nil => checkDependencies(stateWithArgs(skipped)) case "--" :: args => checkDependencies(stateWithArgs(skipped ++ args)) case x :: _ if x startsWith "-" => - @tailrec def loop(settings: List[Setting[?]]): ArgsSummary = settings match { + @tailrec def loop(settings: List[Setting[?]]): ArgsSummary = settings match case setting :: settings1 => val state1 = setting.tryToSet(state) - if (state1 ne state) processArguments(state1, processAll, skipped) + if state1 ne state then state1 else loop(settings1) case Nil => - processArguments(state.warn(s"bad option '$x' was ignored"), processAll, skipped) - } - loop(allSettings.toList) + state.warn(s"bad option '$x' was ignored") + processArguments(loop(allSettings.toList), processAll, skipped) case arg :: args => - if (processAll) processArguments(stateWithArgs(args), processAll, skipped :+ arg) + if processAll then processArguments(stateWithArgs(args), processAll, skipped :+ arg) else state - } - } + end processArguments def processArguments(arguments: List[String], processAll: Boolean, settingsState: SettingsState = defaultState): ArgsSummary = processArguments(ArgsSummary(settingsState, arguments, Nil, Nil), processAll, Nil) @@ -262,14 +261,17 @@ object Settings { def ChoiceSetting(name: String, helpArg: String, descr: String, choices: List[String], default: String, aliases: List[String] = Nil): Setting[String] = publish(Setting(name, descr, default, helpArg, Some(choices), aliases = aliases)) + def MultiChoiceSetting(name: String, helpArg: String, descr: String, choices: List[String], default: List[String], aliases: List[String] = Nil): Setting[List[String]] = + publish(Setting(name, descr, default, helpArg, Some(choices), aliases = aliases)) + def IntSetting(name: String, descr: String, default: Int, aliases: List[String] = Nil): Setting[Int] = publish(Setting(name, descr, default, aliases = aliases)) def IntChoiceSetting(name: String, descr: String, choices: Seq[Int], default: Int): Setting[Int] = publish(Setting(name, descr, default, choices = Some(choices))) - def MultiStringSetting(name: String, helpArg: String, descr: String, aliases: List[String] = Nil): Setting[List[String]] = - publish(Setting(name, descr, Nil, helpArg, aliases = aliases)) + def MultiStringSetting(name: String, helpArg: String, descr: String, default: List[String] = Nil, aliases: List[String] = Nil): Setting[List[String]] = + publish(Setting(name, descr, default, helpArg, aliases = aliases)) def OutputSetting(name: String, helpArg: String, descr: String, default: AbstractFile): Setting[AbstractFile] = publish(Setting(name, descr, default, helpArg)) @@ -289,4 +291,4 @@ object Settings { def OptionSetting[T: ClassTag](name: String, descr: String, aliases: List[String] = Nil): Setting[Option[T]] = publish(Setting(name, descr, None, propertyClass = Some(implicitly[ClassTag[T]].runtimeClass), aliases = aliases)) } -} +end Settings diff --git a/compiler/src/dotty/tools/dotc/config/SourceVersion.scala b/compiler/src/dotty/tools/dotc/config/SourceVersion.scala index d336305bf5c4..7311beb07601 100644 --- a/compiler/src/dotty/tools/dotc/config/SourceVersion.scala +++ b/compiler/src/dotty/tools/dotc/config/SourceVersion.scala @@ -9,7 +9,7 @@ import core.Decorators.{_, given} import util.Property enum SourceVersion: - case `3.0-migration`, `3.0`, `future-migration`, `future` + case `3.0-migration`, `3.0`, `3.1`, `future-migration`, `future` val isMigrating: Boolean = toString.endsWith("-migration") diff --git a/compiler/src/dotty/tools/dotc/config/WrappedProperties.scala b/compiler/src/dotty/tools/dotc/config/WrappedProperties.scala index bfcba2438a0a..76fac09c8c87 100644 --- a/compiler/src/dotty/tools/dotc/config/WrappedProperties.scala +++ b/compiler/src/dotty/tools/dotc/config/WrappedProperties.scala @@ -2,8 +2,6 @@ package dotty.tools package dotc package config -import java.security.AccessControlException - /** For placing a wrapper function around property functions. * Motivated by places like google app engine throwing exceptions * on property lookups. @@ -29,6 +27,14 @@ trait WrappedProperties extends PropertiesTrait { object WrappedProperties { object AccessControl extends WrappedProperties { - def wrap[T](body: => T): Option[T] = try Some(body) catch { case _: AccessControlException => None } + def wrap[T](body: => T): Option[T] = + try Some(body) + catch { + // the actual exception we are concerned with is AccessControlException, + // but that's deprecated on JDK 17, so catching its superclass is a convenient + // way to avoid a deprecation warning + case _: SecurityException => + None + } } } diff --git a/compiler/src/dotty/tools/dotc/core/Annotations.scala b/compiler/src/dotty/tools/dotc/core/Annotations.scala index 4245ae3ef0ec..b8d62210ce26 100644 --- a/compiler/src/dotty/tools/dotc/core/Annotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Annotations.scala @@ -7,6 +7,9 @@ import StdNames._ import dotty.tools.dotc.ast.tpd import scala.util.Try import util.Spans.Span +import printing.{Showable, Printer} +import printing.Texts.Text +import annotation.internal.sharable object Annotations { @@ -14,7 +17,7 @@ object Annotations { if (tree.symbol.isConstructor) tree.symbol.owner else tree.tpe.typeSymbol - abstract class Annotation { + abstract class Annotation extends Showable { def tree(using Context): Tree def symbol(using Context): Symbol = annotClass(tree) @@ -26,7 +29,8 @@ object Annotations { def derivedAnnotation(tree: Tree)(using Context): Annotation = if (tree eq this.tree) this else Annotation(tree) - def arguments(using Context): List[Tree] = ast.tpd.arguments(tree) + /** All arguments to this annotation in a single flat list */ + def arguments(using Context): List[Tree] = ast.tpd.allArguments(tree) def argument(i: Int)(using Context): Option[Tree] = { val args = arguments @@ -44,15 +48,48 @@ object Annotations { /** The tree evaluation has finished. */ def isEvaluated: Boolean = true + /** Normally, type map over all tree nodes of this annotation, but can + * be overridden. Returns EmptyAnnotation if type type map produces a range + * type, since ranges cannot be types of trees. + */ + def mapWith(tm: TypeMap)(using Context) = + val args = arguments + if args.isEmpty then this + else + val findDiff = new TreeAccumulator[Type]: + def apply(x: Type, tree: Tree)(using Context): Type = + if tm.isRange(x) then x + else + val tp1 = tm(tree.tpe) + foldOver(if tp1 =:= tree.tpe then x else tp1, tree) + val diff = findDiff(NoType, args) + if tm.isRange(diff) then EmptyAnnotation + else if diff.exists then derivedAnnotation(tm.mapOver(tree)) + else this + + /** Does this annotation refer to a parameter of `tl`? */ + def refersToParamOf(tl: TermLambda)(using Context): Boolean = + val args = arguments + if args.isEmpty then false + else tree.existsSubTree { + case id: Ident => id.tpe match + case TermParamRef(tl1, _) => tl eq tl1 + case _ => false + case _ => false + } + + /** A string representation of the annotation. Overridden in BodyAnnotation. + */ + def toText(printer: Printer): Text = printer.annotText(this) + def ensureCompleted(using Context): Unit = tree def sameAnnotation(that: Annotation)(using Context): Boolean = symbol == that.symbol && tree.sameTree(that.tree) } - case class ConcreteAnnotation(t: Tree) extends Annotation { + case class ConcreteAnnotation(t: Tree) extends Annotation: def tree(using Context): Tree = t - } abstract class LazyAnnotation extends Annotation { protected var mySym: Symbol | (Context ?=> Symbol) @@ -98,6 +135,7 @@ object Annotations { if (tree eq this.tree) this else ConcreteBodyAnnotation(tree) override def arguments(using Context): List[Tree] = Nil override def ensureCompleted(using Context): Unit = () + override def toText(printer: Printer): Text = "@Body" } class ConcreteBodyAnnotation(body: Tree) extends BodyAnnotation { @@ -194,6 +232,8 @@ object Annotations { apply(defn.SourceFileAnnot, Literal(Constant(path))) } + @sharable val EmptyAnnotation = Annotation(EmptyTree) + def ThrowsAnnotation(cls: ClassSymbol)(using Context): Annotation = { val tref = cls.typeRef Annotation(defn.ThrowsAnnot.typeRef.appliedTo(tref), Ident(tref)) diff --git a/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala b/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala index 460647cdaf7c..a83f35ccd65a 100644 --- a/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala +++ b/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala @@ -118,6 +118,7 @@ class CheckRealizable(using Context) { case tp => def isConcrete(tp: Type): Boolean = tp.dealias match { case tp: TypeRef => tp.symbol.isClass + case tp: TypeParamRef => false case tp: TypeProxy => isConcrete(tp.underlying) case tp: AndType => isConcrete(tp.tp1) && isConcrete(tp.tp2) case tp: OrType => isConcrete(tp.tp1) && isConcrete(tp.tp2) diff --git a/compiler/src/dotty/tools/dotc/core/Comments.scala b/compiler/src/dotty/tools/dotc/core/Comments.scala index 33585411fa6e..183b581ce2e7 100644 --- a/compiler/src/dotty/tools/dotc/core/Comments.scala +++ b/compiler/src/dotty/tools/dotc/core/Comments.scala @@ -143,7 +143,7 @@ object Comments { * @author Felix Mulder */ class CommentExpander { - import dotc.config.Printers.dottydoc + import dotc.config.Printers.scaladoc import scala.collection.mutable def expand(sym: Symbol, site: Symbol)(using Context): String = { @@ -203,7 +203,7 @@ object Comments { case None => // SI-8210 - The warning would be false negative when this symbol is a setter if (ownComment.indexOf("@inheritdoc") != -1 && ! sym.isSetter) - dottydoc.println(s"${sym.span}: the comment for ${sym} contains @inheritdoc, but no parent comment is available to inherit from.") + scaladoc.println(s"${sym.span}: the comment for ${sym} contains @inheritdoc, but no parent comment is available to inherit from.") ownComment.replace("@inheritdoc", "") case Some(sc) => if (ownComment == "") sc @@ -317,7 +317,7 @@ object Comments { val sectionTextBounds = extractSectionText(parent, section) cleanupSectionText(parent.substring(sectionTextBounds._1, sectionTextBounds._2)) case None => - dottydoc.println(s"""${sym.span}: the """" + getSectionHeader + "\" annotation of the " + sym + + scaladoc.println(s"""${sym.span}: the """" + getSectionHeader + "\" annotation of the " + sym + " comment contains @inheritdoc, but the corresponding section in the parent is not defined.") "" } @@ -384,7 +384,7 @@ object Comments { lookupVariable(vname, site) match { case Some(replacement) => replaceWith(replacement) case None => - dottydoc.println(s"Variable $vname undefined in comment for $sym in $site") + scaladoc.println(s"Variable $vname undefined in comment for $sym in $site") } } } diff --git a/compiler/src/dotty/tools/dotc/core/Constants.scala b/compiler/src/dotty/tools/dotc/core/Constants.scala index 474dc0a8b508..f2e6f6fc489a 100644 --- a/compiler/src/dotty/tools/dotc/core/Constants.scala +++ b/compiler/src/dotty/tools/dotc/core/Constants.scala @@ -1,25 +1,27 @@ -package dotty.tools.dotc +package dotty.tools +package dotc package core import Types._, Symbols._, Contexts._ import printing.Printer import printing.Texts.Text +import Decorators._ object Constants { - final val NoTag = 0 - final val UnitTag = 1 - final val BooleanTag = 2 - final val ByteTag = 3 - final val ShortTag = 4 - final val CharTag = 5 - final val IntTag = 6 - final val LongTag = 7 - final val FloatTag = 8 - final val DoubleTag = 9 - final val StringTag = 10 - final val NullTag = 11 - final val ClazzTag = 12 + inline val NoTag = 0 + inline val UnitTag = 1 + inline val BooleanTag = 2 + inline val ByteTag = 3 + inline val ShortTag = 4 + inline val CharTag = 5 + inline val IntTag = 6 + inline val LongTag = 7 + inline val FloatTag = 8 + inline val DoubleTag = 9 + inline val StringTag = 10 + inline val NullTag = 11 + inline val ClazzTag = 12 class Constant(val value: Any, val tag: Int) extends printing.Showable with Product1[Any] { import java.lang.Double.doubleToRawLongBits @@ -161,25 +163,29 @@ object Constants { } case pt => pt } - val target = classBound(pt).typeSymbol - if (target == tpe.typeSymbol) - this - else if ((target == defn.ByteClass) && isByteRange) - Constant(byteValue) - else if (target == defn.ShortClass && isShortRange) - Constant(shortValue) - else if (target == defn.CharClass && isCharRange) - Constant(charValue) - else if (target == defn.IntClass && isIntRange) - Constant(intValue) - else if (target == defn.LongClass && isLongRange) - Constant(longValue) - else if (target == defn.FloatClass && isFloatRange) - Constant(floatValue) - else if (target == defn.DoubleClass && isNumeric) - Constant(doubleValue) - else - null + pt match + case ConstantType(value) if value == this => this + case _: SingletonType => null + case _ => + val target = classBound(pt).typeSymbol + if (target == tpe.typeSymbol) + this + else if ((target == defn.ByteClass) && isByteRange) + Constant(byteValue) + else if (target == defn.ShortClass && isShortRange) + Constant(shortValue) + else if (target == defn.CharClass && isCharRange) + Constant(charValue) + else if (target == defn.IntClass && isIntRange) + Constant(intValue) + else if (target == defn.LongClass && isLongRange) + Constant(longValue) + else if (target == defn.FloatClass && isFloatRange) + Constant(floatValue) + else if (target == defn.DoubleClass && isNumeric) + Constant(doubleValue) + else + null } def stringValue: String = value.toString diff --git a/compiler/src/dotty/tools/dotc/core/Constraint.scala b/compiler/src/dotty/tools/dotc/core/Constraint.scala index 81c108bc1241..c35c93886cd8 100644 --- a/compiler/src/dotty/tools/dotc/core/Constraint.scala +++ b/compiler/src/dotty/tools/dotc/core/Constraint.scala @@ -93,13 +93,15 @@ abstract class Constraint extends Showable { /** A constraint that includes the relationship `p1 <: p2`. * `<:` relationships between parameters ("edges") are propagated, but * non-parameter bounds are left alone. + * + * @param direction Must be set to `KeepParam1` or `KeepParam2` when + * `p2 <: p1` is already true depending on which parameter + * the caller intends to keep. This will avoid propagating + * bounds that will be redundant after `p1` and `p2` are + * unified. */ - def addLess(p1: TypeParamRef, p2: TypeParamRef)(using Context): This - - /** A constraint resulting from adding p2 = p1 to this constraint, and at the same - * time transferring all bounds of p2 to p1 - */ - def unify(p1: TypeParamRef, p2: TypeParamRef)(using Context): This + def addLess(p1: TypeParamRef, p2: TypeParamRef, + direction: UnificationDirection = UnificationDirection.NoUnification)(using Context): This /** A new constraint which is derived from this constraint by removing * the type parameter `param` from the domain and replacing all top-level occurrences @@ -118,8 +120,11 @@ abstract class Constraint extends Showable { /** A new constraint with all entries coming from `tl` removed. */ def remove(tl: TypeLambda)(using Context): This - /** A new constraint with entry `tl` renamed to a fresh type lambda */ - def rename(tl: TypeLambda)(using Context): This + /** A new constraint with entry `from` replaced with `to` + * Rerences to `from` from within other constraint bounds are updated to `to`. + * Type variables are left alone. + */ + def subst(from: TypeLambda, to: TypeLambda)(using Context): This /** Gives for each instantiated type var that does not yet have its `inst` field * set, the instance value stored in the constraint. Storing instances in constraints @@ -149,13 +154,10 @@ abstract class Constraint extends Showable { */ def uninstVars: collection.Seq[TypeVar] - /** The weakest constraint that subsumes both this constraint and `other`. - * - * @param otherHasErrors If true, handle incompatible constraints by - * returning an approximate constraint, instead of - * failing with an exception + /** Whether `tl` is present in both `this` and `that` but is associated with + * different TypeVars there, meaning that the constraints cannot be merged. */ - def & (other: Constraint, otherHasErrors: Boolean)(using Context): Constraint + def hasConflictingTypeVarsFor(tl: TypeLambda, that: Constraint): Boolean /** Check that no constrained parameter contains itself as a bound */ def checkNonCyclic()(using Context): this.type @@ -169,6 +171,20 @@ abstract class Constraint extends Showable { /** Check that constraint only refers to TypeParamRefs bound by itself */ def checkClosed()(using Context): Unit - /** A string describing the constraint's contents without a header or trailer */ - def contentsToString(using Context): String + /** Check that every typevar om this constraint has as origin a type parameter + * of athe type lambda that is associated with the typevar itself. + */ + def checkConsistentVars()(using Context): Unit } + +/** When calling `Constraint#addLess(p1, p2, ...)`, the caller might end up + * unifying one parameter with the other, this enum lets `addLess` know which + * direction the unification will take. + */ +enum UnificationDirection: + /** Neither p1 nor p2 will be instantiated. */ + case NoUnification + /** `p2 := p1`, p1 left uninstantiated. */ + case KeepParam1 + /** `p1 := p2`, p2 left uninstantiated. */ + case KeepParam2 diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala index e9ca12e3db9e..835da2176a33 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -10,7 +10,10 @@ import Flags._ import config.Config import config.Printers.typr import reporting.trace +import typer.ProtoTypes.{newTypeVar, representedParamRef} import StdNames.tpnme +import UnificationDirection.* +import NameKinds.AvoidNameKind /** Methods for adding constraints and solving them. * @@ -55,6 +58,24 @@ trait ConstraintHandling { */ protected var comparedTypeLambdas: Set[TypeLambda] = Set.empty + protected var myNecessaryConstraintsOnly = false + /** When collecting the constraints needed for a particular subtyping + * judgment to be true, we sometimes need to approximate the constraint + * set (see `TypeComparer#either` for example). + * + * Normally, this means adding extra constraints which may not be necessary + * for the subtyping judgment to be true, but if this variable is set to true + * we will instead under-approximate and keep only the constraints that must + * always be present for the subtyping judgment to hold. + * + * This is needed for GADT bounds inference to be sound, but it is also used + * when constraining a method call based on its expected type to avoid adding + * constraints that would later prevent us from typechecking method + * arguments, see or-inf.scala and and-inf.scala for examples. + */ + protected def necessaryConstraintsOnly(using Context): Boolean = + ctx.mode.is(Mode.GadtConstraintInference) || myNecessaryConstraintsOnly + def checkReset() = assert(addConstraintInvocations == 0) assert(frozenConstraint == false) @@ -62,13 +83,43 @@ trait ConstraintHandling { assert(homogenizeArgs == false) assert(comparedTypeLambdas == Set.empty) + def nestingLevel(param: TypeParamRef) = constraint.typeVarOfParam(param) match + case tv: TypeVar => tv.nestingLevel + case _ => Int.MaxValue + + /** If `param` is nested deeper than `maxLevel`, try to instantiate it to a + * fresh type variable of level `maxLevel` and return the new variable. + * If this isn't possible, throw a TypeError. + */ + def atLevel(maxLevel: Int, param: TypeParamRef)(using Context): TypeParamRef = + if nestingLevel(param) <= maxLevel then return param + LevelAvoidMap(0, maxLevel)(param) match + case freshVar: TypeVar => freshVar.origin + case _ => throw new TypeError( + i"Could not decrease the nesting level of ${param} from ${nestingLevel(param)} to $maxLevel in $constraint") + def nonParamBounds(param: TypeParamRef)(using Context): TypeBounds = constraint.nonParamBounds(param) + /** The full lower bound of `param` includes both the `nonParamBounds` and the + * params in the constraint known to be `<: param`, except that + * params with a `nestingLevel` higher than `param` will be instantiated + * to a fresh param at a legal level. See the documentation of `TypeVar` + * for details. + */ def fullLowerBound(param: TypeParamRef)(using Context): Type = - constraint.minLower(param).foldLeft(nonParamBounds(param).lo)(_ | _) + val maxLevel = nestingLevel(param) + var loParams = constraint.minLower(param) + if maxLevel != Int.MaxValue then + loParams = loParams.mapConserve(atLevel(maxLevel, _)) + loParams.foldLeft(nonParamBounds(param).lo)(_ | _) + /** The full upper bound of `param`, see the documentation of `fullLowerBounds` above. */ def fullUpperBound(param: TypeParamRef)(using Context): Type = - constraint.minUpper(param).foldLeft(nonParamBounds(param).hi)(_ & _) + val maxLevel = nestingLevel(param) + var hiParams = constraint.minUpper(param) + if maxLevel != Int.MaxValue then + hiParams = hiParams.mapConserve(atLevel(maxLevel, _)) + hiParams.foldLeft(nonParamBounds(param).hi)(_ & _) /** Full bounds of `param`, including other lower/upper params. * @@ -78,22 +129,125 @@ trait ConstraintHandling { def fullBounds(param: TypeParamRef)(using Context): TypeBounds = nonParamBounds(param).derivedTypeBounds(fullLowerBound(param), fullUpperBound(param)) - protected def addOneBound(param: TypeParamRef, bound: Type, isUpper: Boolean)(using Context): Boolean = + /** An approximating map that prevents types nested deeper than maxLevel as + * well as WildcardTypes from leaking into the constraint. + * Note that level-checking is turned off after typer and in uncommitable + * TyperState since these leaks should be safe. + */ + class LevelAvoidMap(topLevelVariance: Int, maxLevel: Int)(using Context) extends TypeOps.AvoidMap: + variance = topLevelVariance + + /** Are we allowed to refer to types of the given `level`? */ + private def levelOK(level: Int): Boolean = + level <= maxLevel || ctx.isAfterTyper || !ctx.typerState.isCommittable + + def toAvoid(tp: NamedType): Boolean = + tp.prefix == NoPrefix && !tp.symbol.isStatic && !levelOK(tp.symbol.nestingLevel) + + /** Return a (possibly fresh) type variable of a level no greater than `maxLevel` which is: + * - lower-bounded by `tp` if variance >= 0 + * - upper-bounded by `tp` if variance <= 0 + * If this isn't possible, return the empty range. + */ + def legalVar(tp: TypeVar): Type = + val oldParam = tp.origin + val nameKind = + if variance > 0 then AvoidNameKind.UpperBound + else if variance < 0 then AvoidNameKind.LowerBound + else AvoidNameKind.BothBounds + + /** If it exists, return the first param in the list created in a previous call to `legalVar(tp)` + * with the appropriate level and variance. + */ + def findParam(params: List[TypeParamRef]): Option[TypeParamRef] = + params.find(p => + nestingLevel(p) <= maxLevel && representedParamRef(p) == oldParam && + (p.paramName.is(AvoidNameKind.BothBounds) || + variance != 0 && p.paramName.is(nameKind))) + + // First, check if we can reuse an existing parameter, this is more than an optimization + // since it avoids an infinite loop in tests/pos/i8900-cycle.scala + findParam(constraint.lower(oldParam)).orElse(findParam(constraint.upper(oldParam))) match + case Some(param) => + constraint.typeVarOfParam(param) + case _ => + // Otherwise, try to return a fresh type variable at `maxLevel` with + // the appropriate constraints. + val name = nameKind(oldParam.paramName.toTermName).toTypeName + val freshVar = newTypeVar(TypeBounds.upper(tp.topType), name, + nestingLevel = maxLevel, represents = oldParam) + val ok = + if variance < 0 then + addLess(freshVar.origin, oldParam) + else if variance > 0 then + addLess(oldParam, freshVar.origin) + else + unify(freshVar.origin, oldParam) + if ok then freshVar else emptyRange + end legalVar + + override def apply(tp: Type): Type = tp match + case tp: TypeVar if !tp.isInstantiated && !levelOK(tp.nestingLevel) => + legalVar(tp) + // TypeParamRef can occur in tl bounds + case tp: TypeParamRef => + constraint.typeVarOfParam(tp) match + case tvar: TypeVar => + apply(tvar) + case _ => super.apply(tp) + case _ => + super.apply(tp) + + override def mapWild(t: WildcardType) = + if ctx.mode.is(Mode.TypevarsMissContext) then super.mapWild(t) + else + val tvar = newTypeVar(apply(t.effectiveBounds).toBounds, nestingLevel = maxLevel) + tvar + end LevelAvoidMap + + /** Approximate `rawBound` if needed to make it a legal bound of `param` by + * avoiding wildcards and types with a level strictly greater than its + * `nestingLevel`. + * + * Note that level-checking must be performed here and cannot be delayed + * until instantiation because if we allow level-incorrect bounds, then we + * might end up reasoning with bad bounds outside of the scope where they are + * defined. This can lead to level-correct but unsound instantiations as + * demonstrated by tests/neg/i8900.scala. + */ + protected def legalBound(param: TypeParamRef, rawBound: Type, isUpper: Boolean)(using Context): Type = + // Over-approximate for soundness. + var variance = if isUpper then -1 else 1 + // ...unless we can only infer necessary constraints, in which case we + // flip the variance to under-approximate. + if necessaryConstraintsOnly then variance = -variance + + val approx = new LevelAvoidMap(variance, nestingLevel(param)): + override def legalVar(tp: TypeVar): Type = + // `legalVar` will create a type variable whose bounds depend on + // `variance`, but whether the variance is positive or negative, + // we can still infer necessary constraints since just creating a + // type variable doesn't reduce the set of possible solutions. + // Therefore, we can safely "unflip" the variance flipped above. + // This is necessary for i8900-unflip.scala to typecheck. + val v = if necessaryConstraintsOnly then -this.variance else this.variance + atVariance(v)(super.legalVar(tp)) + approx(rawBound) + end legalBound + + protected def addOneBound(param: TypeParamRef, rawBound: Type, isUpper: Boolean)(using Context): Boolean = if !constraint.contains(param) then true - else if !isUpper && param.occursIn(bound) then + else if !isUpper && param.occursIn(rawBound) then // We don't allow recursive lower bounds when defining a type, // so we shouldn't allow them as constraints either. false else + val bound = legalBound(param, rawBound, isUpper) val oldBounds @ TypeBounds(lo, hi) = constraint.nonParamBounds(param) val equalBounds = (if isUpper then lo else hi) eq bound - if equalBounds - && !bound.existsPart(bp => bp.isInstanceOf[WildcardType] || (bp eq param)) - then - // The narrowed bounds are equal and do not contain wildcards, + if equalBounds && !bound.existsPart(_ eq param, StopAt.Static) then + // The narrowed bounds are equal and not recursive, // so we can remove `param` from the constraint. - // (Handling wildcards requires choosing a bound, but we don't know which - // bound to choose here, this is handled in `ConstraintHandling#approximation`) constraint = constraint.replace(param, bound) true else @@ -183,18 +337,50 @@ trait ConstraintHandling { def location(using Context) = "" // i"in ${ctx.typerState.stateChainStr}" // use for debugging - /** Make p2 = p1, transfer all bounds of p2 to p1 - * @pre less(p1)(p2) + /** Unify p1 with p2: one parameter will be kept in the constraint, the + * other will be removed and its bounds transferred to the remaining one. + * + * If p1 and p2 have different `nestingLevel`, the parameter with the lowest + * level will be kept and the transferred bounds from the other parameter + * will be adjusted for level-correctness. */ private def unify(p1: TypeParamRef, p2: TypeParamRef)(using Context): Boolean = { constr.println(s"unifying $p1 $p2") - assert(constraint.isLess(p1, p2)) + if !constraint.isLess(p1, p2) then + constraint = constraint.addLess(p1, p2) + + val level1 = nestingLevel(p1) + val level2 = nestingLevel(p2) + val pKept = if level1 <= level2 then p1 else p2 + val pRemoved = if level1 <= level2 then p2 else p1 + + constraint = constraint.addLess(p2, p1, direction = if pKept eq p1 then KeepParam2 else KeepParam1) + + val boundKept = constraint.nonParamBounds(pKept).substParam(pRemoved, pKept) + var boundRemoved = constraint.nonParamBounds(pRemoved).substParam(pRemoved, pKept) + + if level1 != level2 then + boundRemoved = LevelAvoidMap(-1, math.min(level1, level2))(boundRemoved) + val TypeBounds(lo, hi) = boundRemoved + // After avoidance, the interval might be empty, e.g. in + // tests/pos/i8900-promote.scala: + // >: x.type <: Singleton + // becomes: + // >: Int <: Singleton + // In that case, we can still get a legal constraint + // by replacing the lower-bound to get: + // >: Int & Singleton <: Singleton + if !isSub(lo, hi) then + boundRemoved = TypeBounds(lo & hi, hi) + val down = constraint.exclusiveLower(p2, p1) val up = constraint.exclusiveUpper(p1, p2) - constraint = constraint.unify(p1, p2) - val bounds = constraint.nonParamBounds(p1) - val lo = bounds.lo - val hi = bounds.hi + + val newBounds = (boundKept & boundRemoved).bounds + constraint = constraint.updateEntry(pKept, newBounds).replace(pRemoved, pKept) + + val lo = newBounds.lo + val hi = newBounds.hi isSub(lo, hi) && down.forall(addOneBound(_, hi, isUpper = true)) && up.forall(addOneBound(_, lo, isUpper = false)) @@ -245,81 +431,12 @@ trait ConstraintHandling { * @pre `param` is in the constraint's domain. */ final def approximation(param: TypeParamRef, fromBelow: Boolean)(using Context): Type = - - /** Substitute wildcards with fresh TypeParamRefs, to be compared with - * other bound, so that they can be instantiated. - */ - object substWildcards extends TypeMap: - override def stopAtStatic = true - - var trackedPolis: List[PolyType] = Nil - def apply(tp: Type) = tp match - case tp: WildcardType => - val poly = PolyType(tpnme.EMPTY :: Nil)(pt => tp.bounds :: Nil, pt => defn.AnyType) - trackedPolis = poly :: trackedPolis - poly.paramRefs.head - case _ => - mapOver(tp) - end substWildcards - - /** Replace TypeParamRefs substituted for wildcards by `substWildCards` - * and any remaining wildcards by a safe approximation - */ - val replaceWildcards = new TypeMap: - override def stopAtStatic = true - - /** Try to instantiate a wildcard or TypeParamRef representing a wildcard - * to a type that is known to conform to it. - * This means: - * If fromBelow is true, we minimize the type overall - * Hence, if variance < 0, pick the maximal safe type: bounds.lo - * (i.e. the whole bounds range is over the type). - * If variance > 0, pick the minimal safe type: bounds.hi - * (i.e. the whole bounds range is under the type). - * If variance == 0, pick bounds.lo anyway (this is arbitrary but in line with - * the principle that we pick the smaller type when in doubt). - * If fromBelow is false, we maximize the type overall and reverse the bounds - * If variance != 0. For variance == 0, we still minimize. - * In summary we pick the bound given by this table: - * - * variance | -1 0 1 - * ------------------------ - * from below | lo lo hi - * from above | hi lo lo - */ - def pickOneBound(bounds: TypeBounds) = - if variance == 0 || fromBelow == (variance < 0) then bounds.lo - else bounds.hi - - def apply(tp: Type) = mapOver { - tp match - case tp: WildcardType => - pickOneBound(tp.bounds) - case tp: TypeParamRef if substWildcards.trackedPolis.contains(tp.binder) => - pickOneBound(fullBounds(tp)) - case _ => tp - } - end replaceWildcards - constraint.entry(param) match case entry: TypeBounds => + val maxLevel = nestingLevel(param) val useLowerBound = fromBelow || param.occursIn(entry.hi) - val rawBound = if useLowerBound then fullLowerBound(param) else fullUpperBound(param) - val bound = substWildcards(rawBound) - val inst = - if bound eq rawBound then bound - else - // Get rid of wildcards by mapping them to fresh TypeParamRefs - // with constraints derived from comparing both bounds, and then - // instantiating. See pos/i10161.scala for a test where this matters. - val saved = constraint - try - for poly <- substWildcards.trackedPolis do addToConstraint(poly, Nil) - if useLowerBound then bound <:< fullUpperBound(param) - else fullLowerBound(param) <:< bound - replaceWildcards(bound) - finally constraint = saved - typr.println(s"approx ${param.show}, from below = $fromBelow, bound = ${bound.show}, inst = ${inst.show}") + val inst = if useLowerBound then fullLowerBound(param) else fullUpperBound(param) + typr.println(s"approx ${param.show}, from below = $fromBelow, inst = ${inst.show}") inst case inst => assert(inst.exists, i"param = $param\nconstraint = $constraint") @@ -362,8 +479,15 @@ trait ConstraintHandling { dropped = dropped.tail recur(tp) + val saved = ctx.typerState.snapshot() val tpw = recur(tp) - if (tpw eq tp) || dropped.forall(_ frozen_<:< tpw) then tp else tpw + if (tpw eq tp) || dropped.forall(_ frozen_<:< tpw) then + // Rollback any constraint change that would lead to `tp` no longer + // being a valid solution. + ctx.typerState.resetTo(saved) + tp + else + tpw end dropTransparentTraits /** If `tp` is an applied match type alias which is also an unreducible application @@ -466,9 +590,10 @@ trait ConstraintHandling { // If `c2` has, compared to `pre`, instantiated a param and we iterated over params of `c2`, // we could miss that param being instantiated to an incompatible type in `c1`. pre.forallParams(p => - c1.contains(p) && - c2.upper(p).forall(c1.isLess(p, _)) && - isSubTypeWhenFrozen(c1.nonParamBounds(p), c2.nonParamBounds(p))) + c1.entry(p).exists + && c2.upper(p).forall(c1.isLess(p, _)) + && isSubTypeWhenFrozen(c1.nonParamBounds(p), c2.nonParamBounds(p)) + ) finally constraint = saved } diff --git a/compiler/src/dotty/tools/dotc/core/ContextOps.scala b/compiler/src/dotty/tools/dotc/core/ContextOps.scala index 34956d9294c9..594f103b8481 100644 --- a/compiler/src/dotty/tools/dotc/core/ContextOps.scala +++ b/compiler/src/dotty/tools/dotc/core/ContextOps.scala @@ -2,8 +2,8 @@ package dotty.tools.dotc package core import Contexts._, Symbols._, Types._, Flags._, Scopes._, Decorators._, NameOps._ -import Denotations._ -import SymDenotations.LazyType, Names.Name, StdNames.nme +import Denotations._, SymDenotations._ +import Names.Name, StdNames.nme import ast.untpd /** Extension methods for contexts where we want to keep the ctx. syntax */ @@ -34,7 +34,8 @@ object ContextOps: if (elem.name == name) return elem.sym.denot // return self } val pre = ctx.owner.thisType - pre.findMember(name, pre, required, excluded) + if ctx.isJava then javaFindMember(name, pre, required, excluded) + else pre.findMember(name, pre, required, excluded) } else // we are in the outermost context belonging to a class; self is invisible here. See inClassContext. ctx.owner.findMember(name, ctx.owner.thisType, required, excluded) @@ -42,6 +43,42 @@ object ContextOps: ctx.scope.denotsNamed(name).filterWithFlags(required, excluded).toDenot(NoPrefix) } + final def javaFindMember(name: Name, pre: Type, required: FlagSet = EmptyFlags, excluded: FlagSet = EmptyFlags): Denotation = + assert(ctx.isJava) + inContext(ctx) { + + val preSym = pre.typeSymbol + + // 1. Try to search in current type and parents. + val directSearch = pre.findMember(name, pre, required, excluded) + + // 2. Try to search in companion class if current is an object. + def searchCompanionClass = if preSym.is(Flags.Module) then + preSym.companionClass.thisType.findMember(name, pre, required, excluded) + else NoDenotation + + // 3. Try to search in companion objects of super classes. + // In Java code, static inner classes, which we model as members of the companion object, + // can be referenced from an ident in a subclass or by a selection prefixed by the subclass. + def searchSuperCompanionObjects = + val toSearch = if preSym.is(Flags.Module) then + if preSym.companionClass.exists then + preSym.companionClass.asClass.baseClasses + else Nil + else + preSym.asClass.baseClasses + + toSearch.iterator.map { bc => + val pre1 = bc.companionModule.namedType + pre1.findMember(name, pre1, required, excluded) + }.find(_.exists).getOrElse(NoDenotation) + + if preSym.isClass then + directSearch orElse searchCompanionClass orElse searchSuperCompanionObjects + else + directSearch + } + /** A fresh local context with given tree and owner. * Owner might not exist (can happen for self valdefs), in which case * no owner is set in result context diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index 60f675ff0b0d..e9fbd6065261 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -13,7 +13,6 @@ import Scopes._ import Uniques._ import ast.Trees._ import ast.untpd -import Flags.GivenOrImplicit import util.{NoSource, SimpleIdentityMap, SourceFile, HashSet, ReusableInstance} import typer.{Implicits, ImportInfo, Inliner, SearchHistory, SearchRoot, TypeAssigner, Typer, Nullables} import Nullables._ @@ -25,7 +24,7 @@ import io.{AbstractFile, NoAbstractFile, PlainFile, Path} import scala.io.Codec import collection.mutable import printing._ -import config.{JavaPlatform, SJSPlatform, Platform, ScalaSettings} +import config.{JavaPlatform, SJSPlatform, Platform, ScalaSettings, ScalaRelease} import classfile.ReusableDataReader import StdNames.nme @@ -39,6 +38,9 @@ import xsbti.AnalysisCallback import plugins._ import java.util.concurrent.atomic.AtomicInteger import java.nio.file.InvalidPathException +import dotty.tools.tasty.TastyFormat +import dotty.tools.dotc.config.{ NoScalaVersion, SpecificScalaVersion, AnyScalaVersion, ScalaBuild } +import dotty.tools.dotc.core.tasty.TastyVersion object Contexts { @@ -271,6 +273,10 @@ object Contexts { if owner != null && owner.isClass then owner.asClass.unforcedDecls else scope + def nestingLevel: Int = + val sc = effectiveScope + if sc != null then sc.nestingLevel else 0 + /** Sourcefile corresponding to given abstract file, memoized */ def getSource(file: AbstractFile, codec: => Codec = Codec(settings.encoding.value)) = { util.Stats.record("Context.getSource") @@ -388,8 +394,9 @@ object Contexts { // to be used as a default value. compilationUnit != null && compilationUnit.isJava - /** Is current phase after FrontEnd? */ + /** Is current phase after TyperPhase? */ final def isAfterTyper = base.isAfterTyper(phase) + final def isTyper = base.isTyper(phase) /** Is this a context for the members of a class definition? */ def isClassDefContext: Boolean = @@ -476,7 +483,22 @@ object Contexts { /** A new context that summarizes an import statement */ def importContext(imp: Import[?], sym: Symbol): FreshContext = - fresh.setImportInfo(ImportInfo(sym, imp.selectors, imp.expr)) + fresh.setImportInfo(ImportInfo(sym, imp.selectors, imp.expr)) + + def scalaRelease: ScalaRelease = + val releaseName = base.settings.YscalaRelease.value + if releaseName.nonEmpty then ScalaRelease.parse(releaseName).get else ScalaRelease.latest + + def tastyVersion: TastyVersion = + import math.Ordered.orderingToOrdered + val latestRelease = ScalaRelease.latest + val specifiedRelease = scalaRelease + if specifiedRelease < latestRelease then + // This is needed to make -Yscala-release a no-op when set to the latest release for unstable versions of the compiler + // (which might have the tasty format version numbers set to higher values before they're decreased during a release) + TastyVersion.fromStableScalaRelease(specifiedRelease.majorVersion, specifiedRelease.minorVersion) + else + TastyVersion.compilerVersion /** Is the debug option set? */ def debug: Boolean = base.settings.Ydebug.value @@ -526,6 +548,12 @@ object Contexts { final def withOwner(owner: Symbol): Context = if (owner ne this.owner) fresh.setOwner(owner) else this + final def withTyperState(typerState: TyperState): Context = + if typerState ne this.typerState then fresh.setTyperState(typerState) else this + + final def withUncommittedTyperState: Context = + withTyperState(typerState.uncommittedAncestor) + final def withProperty[T](key: Key[T], value: Option[T]): Context = if (property(key) == value) this else value match { @@ -555,7 +583,7 @@ object Contexts { def platform: Platform = base.platform def pendingUnderlying: util.HashSet[Type] = base.pendingUnderlying def uniqueNamedTypes: Uniques.NamedTypeUniques = base.uniqueNamedTypes - def uniques: util.HashSet[Type] = base.uniques + def uniques: util.WeakHashSet[Type] = base.uniques def initialize()(using Context): Unit = base.initialize() } @@ -595,8 +623,8 @@ object Contexts { this.scope = newScope this def setTyperState(typerState: TyperState): this.type = { this.typerState = typerState; this } - def setNewTyperState(): this.type = setTyperState(typerState.fresh().setCommittable(true)) - def setExploreTyperState(): this.type = setTyperState(typerState.fresh().setCommittable(false)) + def setNewTyperState(): this.type = setTyperState(typerState.fresh(committable = true)) + def setExploreTyperState(): this.type = setTyperState(typerState.fresh(committable = false)) def setReporter(reporter: Reporter): this.type = setTyperState(typerState.fresh().setReporter(reporter)) def setTyper(typer: Typer): this.type = { this.scope = typer.scope; setTypeAssigner(typer) } def setGadt(gadt: GadtConstraint): this.type = @@ -922,6 +950,20 @@ object Contexts { private[core] var denotTransformers: Array[DenotTransformer] = _ + /** Flag to suppress inlining, set after overflow */ + private[dotc] var stopInlining: Boolean = false + + /** A variable that records that some error was reported in a globally committable context. + * The error will not necessarlily be emitted, since it could still be that + * the enclosing context will be aborted. The variable is used as a smoke test + * to turn off assertions that might be wrong if the program is erroneous. To + * just test for `ctx.reporter.errorsReported` is not always enough, since it + * could be that the context in which the assertion is tested is a completer context + * that's different from the context where the error was reported. See i13218.scala + * for a test. + */ + private[dotc] var errorsToBeReported = false + // Reporters state private[dotc] var indent: Int = 0 @@ -940,6 +982,8 @@ object Contexts { private[core] val reusableDataReader = ReusableInstance(new ReusableDataReader()) + private[dotc] var wConfCache: (List[String], WConf) = _ + def sharedCharArray(len: Int): Array[Char] = while len > charArray.length do charArray = new Array[Char](charArray.length * 2) @@ -951,6 +995,7 @@ object Contexts { uniqueNamedTypes.clear() emptyTypeBounds = null emptyWildcardBounds = null + errorsToBeReported = false errorTypeMsg.clear() sources.clear() files.clear() diff --git a/compiler/src/dotty/tools/dotc/core/Decorators.scala b/compiler/src/dotty/tools/dotc/core/Decorators.scala index 809b76ba3118..3ea05af0292a 100644 --- a/compiler/src/dotty/tools/dotc/core/Decorators.scala +++ b/compiler/src/dotty/tools/dotc/core/Decorators.scala @@ -74,7 +74,7 @@ object Decorators { NoSymbol } - final val MaxFilterRecursions = 10 + inline val MaxFilterRecursions = 10 /** Implements filterConserve, zipWithConserve methods * on lists that avoid duplication of list nodes where feasible. @@ -162,16 +162,29 @@ object Decorators { * `xs` to themselves. */ def mapWithIndexConserve[U <: T](f: (T, Int) => U): List[U] = - def recur(xs: List[T], idx: Int): List[U] = - if xs.isEmpty then Nil - else - val x1 = f(xs.head, idx) - val xs1 = recur(xs.tail, idx + 1) - if (x1.asInstanceOf[AnyRef] eq xs.head.asInstanceOf[AnyRef]) - && (xs1 eq xs.tail) - then xs.asInstanceOf[List[U]] - else x1 :: xs1 - recur(xs, 0) + + @tailrec + def addAll(buf: ListBuffer[T], from: List[T], until: List[T]): ListBuffer[T] = + if from eq until then buf else addAll(buf += from.head, from.tail, until) + + @tailrec + def loopWithBuffer(buf: ListBuffer[U], explore: List[T], idx: Int): List[U] = explore match + case Nil => buf.toList + case t :: rest => loopWithBuffer(buf += f(t, idx), rest, idx + 1) + + @tailrec + def loop(keep: List[T], explore: List[T], idx: Int): List[U] = explore match + case Nil => keep.asInstanceOf[List[U]] + case t :: rest => + val u = f(t, idx) + if u.asInstanceOf[AnyRef] eq t.asInstanceOf[AnyRef] then + loop(keep, rest, idx + 1) + else + val buf = addAll(new ListBuffer[T], keep, explore).asInstanceOf[ListBuffer[U]] + loopWithBuffer(buf += u, rest, idx + 1) + + loop(xs, xs, 0) + end mapWithIndexConserve final def hasSameLengthAs[U](ys: List[U]): Boolean = { @tailrec def loop(xs: List[T], ys: List[U]): Boolean = @@ -278,4 +291,3 @@ object Decorators { def binarySearch(x: T): Int = java.util.Arrays.binarySearch(arr.asInstanceOf[Array[Object]], x) } - diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 70eebc750521..794119cd7a79 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -49,7 +49,10 @@ class Definitions { private def newPermanentClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet, infoFn: ClassSymbol => Type) = newClassSymbol(owner, name, flags | Permanent | NoInits | Open, infoFn) - private def enterCompleteClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet, parents: List[TypeRef], decls: Scope = newScope) = + private def enterCompleteClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet, parents: List[TypeRef]): ClassSymbol = + enterCompleteClassSymbol(owner, name, flags, parents, newScope(owner.nestingLevel + 1)) + + private def enterCompleteClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet, parents: List[TypeRef], decls: Scope) = newCompleteClassSymbol(owner, name, flags | Permanent | NoInits | Open, parents, decls).entered private def enterTypeField(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope) = @@ -133,8 +136,7 @@ class Definitions { ClassInfo(ScalaPackageClass.thisType, cls, ObjectType :: Nil, decls) } } - val flags0 = Trait | NoInits - val flags = if (name.isContextFunction) flags0 | Final else flags0 + val flags = Trait | NoInits newPermanentClassSymbol(ScalaPackageClass, name, flags, completer) } @@ -234,6 +236,7 @@ class Definitions { @tu lazy val Compiletime_constValue : Symbol = CompiletimePackageClass.requiredMethod("constValue") @tu lazy val Compiletime_constValueOpt: Symbol = CompiletimePackageClass.requiredMethod("constValueOpt") @tu lazy val Compiletime_summonFrom : Symbol = CompiletimePackageClass.requiredMethod("summonFrom") + @tu lazy val Compiletime_summonInline : Symbol = CompiletimePackageClass.requiredMethod("summonInline") @tu lazy val CompiletimeTestingPackage: Symbol = requiredPackage("scala.compiletime.testing") @tu lazy val CompiletimeTesting_typeChecks: Symbol = CompiletimeTestingPackage.requiredMethod("typeChecks") @tu lazy val CompiletimeTesting_typeCheckErrors: Symbol = CompiletimeTestingPackage.requiredMethod("typeCheckErrors") @@ -246,6 +249,9 @@ class Definitions { @tu lazy val CompiletimeOpsPackage: Symbol = requiredPackage("scala.compiletime.ops") @tu lazy val CompiletimeOpsAnyModuleClass: Symbol = requiredModule("scala.compiletime.ops.any").moduleClass @tu lazy val CompiletimeOpsIntModuleClass: Symbol = requiredModule("scala.compiletime.ops.int").moduleClass + @tu lazy val CompiletimeOpsLongModuleClass: Symbol = requiredModule("scala.compiletime.ops.long").moduleClass + @tu lazy val CompiletimeOpsFloatModuleClass: Symbol = requiredModule("scala.compiletime.ops.float").moduleClass + @tu lazy val CompiletimeOpsDoubleModuleClass: Symbol = requiredModule("scala.compiletime.ops.double").moduleClass @tu lazy val CompiletimeOpsStringModuleClass: Symbol = requiredModule("scala.compiletime.ops.string").moduleClass @tu lazy val CompiletimeOpsBooleanModuleClass: Symbol = requiredModule("scala.compiletime.ops.boolean").moduleClass @@ -376,18 +382,18 @@ class Definitions { * void meth8(T... args) {} * * // B.scala - * meth7(1) // OK - * meth8(1) // OK + * meth7(1) // OK (creates a reference array) + * meth8(1) // OK (creates a primitive array and copies it into a reference array at Erasure) * val ai = Array[Int](1) - * meth7(ai: _*) // OK (will copy the array) - * meth8(ai: _*) // OK (will copy the array) + * meth7(ai: _*) // OK (will copy the array at Erasure) + * meth8(ai: _*) // OK (will copy the array at Erasure) * * Java repeated arguments are erased to arrays, so it would be safe to treat * them in the same way: add an `& Object` to the parameter type to disallow * passing primitives, but that would be very inconvenient as it is common to * want to pass a primitive to an Object repeated argument (e.g. * `String.format("foo: %d", 1)`). So instead we type them _without_ adding the - * `& Object` and let `ElimRepeated` take care of doing any necessary adaptation + * `& Object` and let `ElimRepeated` and `Erasure` take care of doing any necessary adaptation * (note that adapting a primitive array to a reference array requires * copying the whole array, so this transformation only preserves semantics * if the callee does not try to mutate the varargs array which is a reasonable @@ -430,7 +436,7 @@ class Definitions { Any_toString, Any_##, Any_getClass, Any_isInstanceOf, Any_typeTest, Object_eq, Object_ne) @tu lazy val AnyKindClass: ClassSymbol = { - val cls = newCompleteClassSymbol(ScalaPackageClass, tpnme.AnyKind, AbstractFinal | Permanent, Nil) + val cls = newCompleteClassSymbol(ScalaPackageClass, tpnme.AnyKind, AbstractFinal | Permanent, Nil, newScope(0)) if (!ctx.settings.YnoKindPolymorphism.value) // Enable kind-polymorphism by exposing scala.AnyKind cls.entered @@ -441,12 +447,6 @@ class Definitions { @tu lazy val andType: TypeSymbol = enterBinaryAlias(tpnme.AND, AndType(_, _)) @tu lazy val orType: TypeSymbol = enterBinaryAlias(tpnme.OR, OrType(_, _, soft = false)) - /** Marker method to indicate an argument to a call-by-name parameter. - * Created by byNameClosures and elimByName, eliminated by Erasure, - */ - @tu lazy val cbnArg: TermSymbol = enterPolyMethod(OpsPackageClass, nme.cbnArg, 1, - pt => MethodType(List(FunctionOf(Nil, pt.paramRefs(0))), pt.paramRefs(0))) - /** Method representing a throw */ @tu lazy val throwMethod: TermSymbol = enterMethod(OpsPackageClass, nme.THROWkw, MethodType(List(ThrowableType), NothingType)) @@ -455,8 +455,9 @@ class Definitions { ScalaPackageClass, tpnme.Nothing, AbstractFinal, List(AnyType)) def NothingType: TypeRef = NothingClass.typeRef @tu lazy val NullClass: ClassSymbol = { - val parent = if ctx.explicitNulls then AnyType else ObjectType - enterCompleteClassSymbol(ScalaPackageClass, tpnme.Null, AbstractFinal, parent :: Nil) + // When explicit-nulls is enabled, Null becomes a direct subtype of Any and Matchable + val parents = if ctx.explicitNulls then AnyType :: MatchableType :: Nil else ObjectType :: Nil + enterCompleteClassSymbol(ScalaPackageClass, tpnme.Null, AbstractFinal, parents) } def NullType: TypeRef = NullClass.typeRef @@ -656,8 +657,11 @@ class Definitions { // in scalac modified to have Any as parent - @tu lazy val ThrowableType: TypeRef = requiredClassRef("java.lang.Throwable") - def ThrowableClass(using Context): ClassSymbol = ThrowableType.symbol.asClass + @tu lazy val ThrowableType: TypeRef = requiredClassRef("java.lang.Throwable") + def ThrowableClass(using Context): ClassSymbol = ThrowableType.symbol.asClass + @tu lazy val ExceptionClass: ClassSymbol = requiredClass("java.lang.Exception") + @tu lazy val RuntimeExceptionClass: ClassSymbol = requiredClass("java.lang.RuntimeException") + @tu lazy val SerializableType: TypeRef = JavaSerializableClass.typeRef def SerializableClass(using Context): ClassSymbol = SerializableType.symbol.asClass @@ -729,6 +733,7 @@ class Definitions { @tu lazy val NoneModule: Symbol = requiredModule("scala.None") @tu lazy val EnumClass: ClassSymbol = requiredClass("scala.reflect.Enum") + @tu lazy val Enum_ordinal: Symbol = EnumClass.requiredMethod(nme.ordinal) @tu lazy val EnumValueSerializationProxyClass: ClassSymbol = requiredClass("scala.runtime.EnumValueSerializationProxy") @tu lazy val EnumValueSerializationProxyConstructor: TermSymbol = @@ -761,6 +766,13 @@ class Definitions { @tu lazy val LanguageDeprecatedModule: Symbol = requiredModule("scala.language.deprecated") @tu lazy val NonLocalReturnControlClass: ClassSymbol = requiredClass("scala.runtime.NonLocalReturnControl") @tu lazy val SelectableClass: ClassSymbol = requiredClass("scala.Selectable") + @tu lazy val WithoutPreciseParameterTypesClass: Symbol = requiredClass("scala.Selectable.WithoutPreciseParameterTypes") + + @tu lazy val ManifestClass: ClassSymbol = requiredClass("scala.reflect.Manifest") + @tu lazy val ManifestFactoryModule: Symbol = requiredModule("scala.reflect.ManifestFactory") + @tu lazy val ClassManifestFactoryModule: Symbol = requiredModule("scala.reflect.ClassManifestFactory") + @tu lazy val OptManifestClass: ClassSymbol = requiredClass("scala.reflect.OptManifest") + @tu lazy val NoManifestModule: Symbol = requiredModule("scala.reflect.NoManifest") @tu lazy val ReflectPackageClass: Symbol = requiredPackage("scala.reflect.package").moduleClass @tu lazy val ClassTagClass: ClassSymbol = requiredClass("scala.reflect.ClassTag") @@ -822,6 +834,9 @@ class Definitions { val methodName = if CanEqualClass.name == tpnme.Eql then nme.eqlAny else nme.canEqualAny CanEqualClass.companionModule.requiredMethod(methodName) + @tu lazy val CanThrowClass: ClassSymbol = requiredClass("scala.CanThrow") + @tu lazy val throwsAlias: Symbol = ScalaRuntimePackageVal.requiredType(tpnme.THROWS) + @tu lazy val TypeBoxClass: ClassSymbol = requiredClass("scala.runtime.TypeBox") @tu lazy val TypeBox_CAP: TypeSymbol = TypeBoxClass.requiredType(tpnme.CAP) @@ -886,13 +901,16 @@ class Definitions { @tu lazy val BodyAnnot: ClassSymbol = requiredClass("scala.annotation.internal.Body") @tu lazy val ChildAnnot: ClassSymbol = requiredClass("scala.annotation.internal.Child") @tu lazy val ContextResultCountAnnot: ClassSymbol = requiredClass("scala.annotation.internal.ContextResultCount") + @tu lazy val ProvisionalSuperClassAnnot: ClassSymbol = requiredClass("scala.annotation.internal.ProvisionalSuperClass") @tu lazy val DeprecatedAnnot: ClassSymbol = requiredClass("scala.deprecated") @tu lazy val ImplicitAmbiguousAnnot: ClassSymbol = requiredClass("scala.annotation.implicitAmbiguous") @tu lazy val ImplicitNotFoundAnnot: ClassSymbol = requiredClass("scala.annotation.implicitNotFound") @tu lazy val InlineParamAnnot: ClassSymbol = requiredClass("scala.annotation.internal.InlineParam") + @tu lazy val ErasedParamAnnot: ClassSymbol = requiredClass("scala.annotation.internal.ErasedParam") @tu lazy val InvariantBetweenAnnot: ClassSymbol = requiredClass("scala.annotation.internal.InvariantBetween") @tu lazy val MainAnnot: ClassSymbol = requiredClass("scala.main") @tu lazy val MigrationAnnot: ClassSymbol = requiredClass("scala.annotation.migration") + @tu lazy val NowarnAnnot: ClassSymbol = requiredClass("scala.annotation.nowarn") @tu lazy val TransparentTraitAnnot: ClassSymbol = requiredClass("scala.annotation.transparentTrait") @tu lazy val NativeAnnot: ClassSymbol = requiredClass("scala.native") @tu lazy val RepeatedAnnot: ClassSymbol = requiredClass("scala.annotation.internal.Repeated") @@ -909,6 +927,7 @@ class Definitions { @tu lazy val ConstructorOnlyAnnot: ClassSymbol = requiredClass("scala.annotation.constructorOnly") @tu lazy val CompileTimeOnlyAnnot: ClassSymbol = requiredClass("scala.annotation.compileTimeOnly") @tu lazy val SwitchAnnot: ClassSymbol = requiredClass("scala.annotation.switch") + @tu lazy val ExperimentalAnnot: ClassSymbol = requiredClass("scala.annotation.experimental") @tu lazy val ThrowsAnnot: ClassSymbol = requiredClass("scala.throws") @tu lazy val TransientAnnot: ClassSymbol = requiredClass("scala.transient") @tu lazy val UncheckedAnnot: ClassSymbol = requiredClass("scala.unchecked") @@ -923,6 +942,7 @@ class Definitions { @tu lazy val FunctionalInterfaceAnnot: ClassSymbol = requiredClass("java.lang.FunctionalInterface") @tu lazy val TargetNameAnnot: ClassSymbol = requiredClass("scala.annotation.targetName") @tu lazy val VarargsAnnot: ClassSymbol = requiredClass("scala.annotation.varargs") + @tu lazy val SinceAnnot: ClassSymbol = requiredClass("scala.annotation.since") @tu lazy val JavaRepeatableAnnot: ClassSymbol = requiredClass("java.lang.annotation.Repeatable") @@ -1007,6 +1027,7 @@ class Definitions { else ArrayType.appliedTo(elem :: Nil) def unapply(tp: Type)(using Context): Option[Type] = tp.dealias match { case AppliedType(at, arg :: Nil) if at.isRef(ArrayType.symbol) => Some(arg) + case JavaArrayType(tp) if ctx.erasedTypes => Some(tp) case _ => None } } @@ -1030,13 +1051,13 @@ class Definitions { /** An extractor for multi-dimensional arrays. * Note that this will also extract the high bound if an - * element type is a wildcard. E.g. + * element type is a wildcard upper-bounded by an array. E.g. * * Array[? <: Array[? <: Number]] * * would match * - * MultiArrayOf(, 2) + * MultiArrayOf(, 2) */ object MultiArrayOf { def apply(elem: Type, ndims: Int)(using Context): Type = @@ -1044,7 +1065,8 @@ class Definitions { def unapply(tp: Type)(using Context): Option[(Type, Int)] = tp match { case ArrayOf(elemtp) => def recur(elemtp: Type): Option[(Type, Int)] = elemtp.dealias match { - case TypeBounds(lo, hi) => recur(hi) + case tp @ TypeBounds(lo, hi @ MultiArrayOf(finalElemTp, n)) => + Some(finalElemTp, n) case MultiArrayOf(finalElemTp, n) => Some(finalElemTp, n + 1) case _ => Some(elemtp, 1) } @@ -1054,22 +1076,61 @@ class Definitions { } } + object ByNameFunction: + def apply(tp: Type)(using Context): Type = + defn.ContextFunction0.typeRef.appliedTo(tp :: Nil) + def unapply(tp: Type)(using Context): Option[Type] = tp match + case tp @ AppliedType(tycon, arg :: Nil) if defn.isByNameFunctionClass(tycon.typeSymbol) => + Some(arg) + case tp @ AnnotatedType(parent, _) => + unapply(parent) + case _ => + None + + final def isByNameFunctionClass(sym: Symbol): Boolean = + sym eq ContextFunction0 + + def isByNameFunction(tp: Type)(using Context): Boolean = tp match + case ByNameFunction(_) => true + case _ => false + final def isCompiletime_S(sym: Symbol)(using Context): Boolean = sym.name == tpnme.S && sym.owner == CompiletimeOpsIntModuleClass - private val compiletimePackageAnyTypes: Set[Name] = Set(tpnme.Equals, tpnme.NotEquals) - private val compiletimePackageIntTypes: Set[Name] = Set( + private val compiletimePackageAnyTypes: Set[Name] = Set( + tpnme.Equals, tpnme.NotEquals, tpnme.IsConst, tpnme.ToString + ) + private val compiletimePackageNumericTypes: Set[Name] = Set( tpnme.Plus, tpnme.Minus, tpnme.Times, tpnme.Div, tpnme.Mod, tpnme.Lt, tpnme.Gt, tpnme.Ge, tpnme.Le, - tpnme.Abs, tpnme.Negate, tpnme.Min, tpnme.Max, tpnme.ToString, + tpnme.Abs, tpnme.Negate, tpnme.Min, tpnme.Max + ) + private val compiletimePackageIntTypes: Set[Name] = compiletimePackageNumericTypes ++ Set[Name]( + tpnme.ToString, // ToString is moved to ops.any and deprecated for ops.int + tpnme.NumberOfLeadingZeros, tpnme.ToLong, tpnme.ToFloat, tpnme.ToDouble, + tpnme.Xor, tpnme.BitwiseAnd, tpnme.BitwiseOr, tpnme.ASR, tpnme.LSL, tpnme.LSR + ) + private val compiletimePackageLongTypes: Set[Name] = compiletimePackageNumericTypes ++ Set[Name]( + tpnme.NumberOfLeadingZeros, tpnme.ToInt, tpnme.ToFloat, tpnme.ToDouble, tpnme.Xor, tpnme.BitwiseAnd, tpnme.BitwiseOr, tpnme.ASR, tpnme.LSL, tpnme.LSR ) + private val compiletimePackageFloatTypes: Set[Name] = compiletimePackageNumericTypes ++ Set[Name]( + tpnme.ToInt, tpnme.ToLong, tpnme.ToDouble + ) + private val compiletimePackageDoubleTypes: Set[Name] = compiletimePackageNumericTypes ++ Set[Name]( + tpnme.ToInt, tpnme.ToLong, tpnme.ToFloat + ) private val compiletimePackageBooleanTypes: Set[Name] = Set(tpnme.Not, tpnme.Xor, tpnme.And, tpnme.Or) - private val compiletimePackageStringTypes: Set[Name] = Set(tpnme.Plus) + private val compiletimePackageStringTypes: Set[Name] = Set( + tpnme.Plus, tpnme.Length, tpnme.Substring, tpnme.Matches + ) private val compiletimePackageOpTypes: Set[Name] = Set(tpnme.S) ++ compiletimePackageAnyTypes ++ compiletimePackageIntTypes + ++ compiletimePackageLongTypes + ++ compiletimePackageFloatTypes + ++ compiletimePackageDoubleTypes ++ compiletimePackageBooleanTypes ++ compiletimePackageStringTypes @@ -1079,6 +1140,9 @@ class Definitions { isCompiletime_S(sym) || sym.owner == CompiletimeOpsAnyModuleClass && compiletimePackageAnyTypes.contains(sym.name) || sym.owner == CompiletimeOpsIntModuleClass && compiletimePackageIntTypes.contains(sym.name) + || sym.owner == CompiletimeOpsLongModuleClass && compiletimePackageLongTypes.contains(sym.name) + || sym.owner == CompiletimeOpsFloatModuleClass && compiletimePackageFloatTypes.contains(sym.name) + || sym.owner == CompiletimeOpsDoubleModuleClass && compiletimePackageDoubleTypes.contains(sym.name) || sym.owner == CompiletimeOpsBooleanModuleClass && compiletimePackageBooleanTypes.contains(sym.name) || sym.owner == CompiletimeOpsStringModuleClass && compiletimePackageStringTypes.contains(sym.name) ) @@ -1192,10 +1256,16 @@ class Definitions { @tu lazy val topClasses: Set[Symbol] = Set(AnyClass, MatchableClass, ObjectClass, AnyValClass) + @tu lazy val untestableClasses: Set[Symbol] = Set(NothingClass, NullClass, SingletonClass) + @tu lazy val AbstractFunctionType: Array[TypeRef] = mkArityArray("scala.runtime.AbstractFunction", MaxImplementedFunctionArity, 0) val AbstractFunctionClassPerRun: PerRun[Array[Symbol]] = new PerRun(AbstractFunctionType.map(_.symbol.asClass)) def AbstractFunctionClass(n: Int)(using Context): Symbol = AbstractFunctionClassPerRun()(using ctx)(n) + @tu lazy val caseClassSynthesized: List[Symbol] = List( + Any_hashCode, Any_equals, Any_toString, Product_canEqual, Product_productArity, + Product_productPrefix, Product_productElement, Product_productElementName) + val LazyHolder: PerRun[Map[Symbol, Symbol]] = new PerRun({ def holderImpl(holderType: String) = requiredClass("scala.runtime." + holderType) Map[Symbol, Symbol]( @@ -1237,10 +1307,12 @@ class Definitions { ).symbol.asClass @tu lazy val Function0_apply: Symbol = Function0.requiredMethod(nme.apply) + @tu lazy val ContextFunction0_apply: Symbol = ContextFunction0.requiredMethod(nme.apply) @tu lazy val Function0: Symbol = FunctionClass(0) @tu lazy val Function1: Symbol = FunctionClass(1) @tu lazy val Function2: Symbol = FunctionClass(2) + @tu lazy val ContextFunction0: Symbol = FunctionClass(0, isContextual = true) def FunctionType(n: Int, isContextual: Boolean = false, isErased: Boolean = false)(using Context): TypeRef = FunctionClass(n, isContextual && !ctx.erasedTypes, isErased).typeRef @@ -1327,23 +1399,6 @@ class Definitions { def isBoxedUnitClass(cls: Symbol): Boolean = cls.isClass && (cls.owner eq ScalaRuntimePackageClass) && cls.name == tpnme.BoxedUnit - /** Returns the erased class of the function class `cls` - * - FunctionN for N > 22 becomes FunctionXXL - * - FunctionN for 22 > N >= 0 remains as FunctionN - * - ContextFunctionN for N > 22 becomes FunctionXXL - * - ContextFunctionN for N <= 22 becomes FunctionN - * - ErasedFunctionN becomes Function0 - * - ImplicitErasedFunctionN becomes Function0 - * - anything else becomes a NoSymbol - */ - def erasedFunctionClass(cls: Symbol): Symbol = { - val arity = scalaClassName(cls).functionArity - if (cls.name.isErasedFunction) FunctionClass(0) - else if (arity > 22) FunctionXXLClass - else if (arity >= 0) FunctionClass(arity) - else NoSymbol - } - /** Returns the erased type of the function class `cls` * - FunctionN for N > 22 becomes FunctionXXL * - FunctionN for 22 > N >= 0 remains as FunctionN @@ -1353,20 +1408,12 @@ class Definitions { * - ImplicitErasedFunctionN becomes Function0 * - anything else becomes a NoType */ - def erasedFunctionType(cls: Symbol): Type = { + def functionTypeErasure(cls: Symbol): Type = val arity = scalaClassName(cls).functionArity - if (cls.name.isErasedFunction) FunctionType(0) - else if (arity > 22) FunctionXXLClass.typeRef - else if (arity >= 0) FunctionType(arity) + if cls.name.isErasedFunction then FunctionType(0) + else if arity > 22 then FunctionXXLClass.typeRef + else if arity >= 0 then FunctionType(arity) else NoType - } - - val predefClassNames: Set[Name] = - Set("Predef$", "DeprecatedPredef", "LowPriorityImplicits").map(_.toTypeName.unmangleClassName) - - /** Is `cls` the predef module class, or a class inherited by Predef? */ - def isPredefClass(cls: Symbol): Boolean = - (cls.owner eq ScalaPackageClass) && predefClassNames.contains(cls.name) private val JavaImportFns: List[RootRef] = List( RootRef(() => JavaLangPackageVal.termRef) @@ -1425,6 +1472,8 @@ class Definitions { @tu lazy val SpecialClassTagClasses: Set[Symbol] = Set(UnitClass, AnyClass, AnyValClass) + @tu lazy val SpecialManifestClasses: Set[Symbol] = Set(AnyClass, AnyValClass, ObjectClass, NullClass, NothingClass) + /** Classes that are known not to have an initializer irrespective of * whether NoInits is set. Note: FunctionXXLClass is in this set * because if it is compiled by Scala2, it does not get a NoInit flag. @@ -1440,7 +1489,11 @@ class Definitions { def isPolymorphicAfterErasure(sym: Symbol): Boolean = (sym eq Any_isInstanceOf) || (sym eq Any_asInstanceOf) || (sym eq Object_synchronized) - def isTupleType(tp: Type)(using Context): Boolean = { + /** Is this type a `TupleN` type? + * + * @return true if the dealiased type of `tp` is `TupleN[T1, T2, ..., Tn]` + */ + def isTupleNType(tp: Type)(using Context): Boolean = { val arity = tp.dealias.argInfos.length arity <= MaxTupleArity && TupleType(arity) != null && tp.isRef(TupleType(arity).symbol) } @@ -1506,7 +1559,8 @@ class Definitions { new PerRun(Function2SpecializedReturnTypes.map(_.symbol)) def isSpecializableFunction(cls: ClassSymbol, paramTypes: List[Type], retType: Type)(using Context): Boolean = - paramTypes.length <= 2 && cls.derivesFrom(FunctionClass(paramTypes.length)) + paramTypes.length <= 2 + && (cls.derivesFrom(FunctionClass(paramTypes.length)) || isByNameFunctionClass(cls)) && isSpecializableFunctionSAM(paramTypes, retType) /** If the Single Abstract Method of a Function class has this type, is it specializable? */ @@ -1588,11 +1642,11 @@ class Definitions { /** If `cls` is Tuple1..Tuple22, add the corresponding *: type as last parent to `parents` */ def adjustForTuple(cls: ClassSymbol, tparams: List[TypeSymbol], parents: List[Type]): List[Type] = { - def syntheticParent(tparams: List[TypeSymbol]): Type = - if (tparams.isEmpty) TupleTypeRef - else TypeOps.nestedPairs(tparams.map(_.typeRef)) - if (isTupleClass(cls)) parents :+ syntheticParent(tparams) - else parents + if !isTupleClass(cls) then parents + else if tparams.isEmpty then parents :+ TupleTypeRef + else + assert(parents.head.typeSymbol == ObjectClass) + TypeOps.nestedPairs(tparams.map(_.typeRef)) :: parents.tail } /** If it is BoxedUnit, remove `java.io.Serializable` from `parents`. */ @@ -1699,6 +1753,20 @@ class Definitions { else sys.error(s"Not a primitive value type: $tp") }.typeRef + def unboxedType(tp: Type)(using Context): TypeRef = { + val cls = tp.classSymbol + if (cls eq BoxedByteClass) ByteType + else if (cls eq BoxedShortClass) ShortType + else if (cls eq BoxedCharClass) CharType + else if (cls eq BoxedIntClass) IntType + else if (cls eq BoxedLongClass) LongType + else if (cls eq BoxedFloatClass) FloatType + else if (cls eq BoxedDoubleClass) DoubleType + else if (cls eq BoxedUnitClass) UnitType + else if (cls eq BoxedBooleanClass) BooleanType + else sys.error(s"Not a boxed primitive value type: $tp") + } + /** The JVM tag for `tp` if it's a primitive, `java.lang.Object` otherwise. */ def typeTag(tp: Type)(using Context): Name = typeTags(scalaClassName(tp)) @@ -1731,6 +1799,7 @@ class Definitions { .updated(SingletonClass, ObjectClass) .updated(TupleClass, ProductClass) .updated(NonEmptyTupleClass, ProductClass) + .updated(PairClass, ObjectClass) // ----- Initialization --------------------------------------------------- @@ -1774,7 +1843,7 @@ class Definitions { } def addSyntheticSymbolsComments(using Context): Unit = - def add(sym: Symbol, doc: String) = ctx.docCtx.get.addDocstring(sym, Some(Comment(NoSpan, doc))) + def add(sym: Symbol, doc: String) = ctx.docCtx.foreach(_.addDocstring(sym, Some(Comment(NoSpan, doc)))) add(AnyClass, """/** Class `Any` is the root of the Scala class hierarchy. Every class in a Scala diff --git a/compiler/src/dotty/tools/dotc/core/Denotations.scala b/compiler/src/dotty/tools/dotc/core/Denotations.scala index f3c547697dec..d97ae8ca2f6e 100644 --- a/compiler/src/dotty/tools/dotc/core/Denotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Denotations.scala @@ -791,8 +791,6 @@ object Denotations { val currentPeriod = ctx.period val valid = myValidFor - def signalError() = println(s"error while transforming $this") - def assertNotPackage(d: SingleDenotation, transformer: DenotTransformer) = d match case d: ClassDenotation => assert(!d.is(Package), s"illegal transformation of package denotation by transformer $transformer") @@ -836,7 +834,7 @@ object Denotations { // To work correctly, we need to demand that the context with the new phase // is not retained in the result. catch case ex: CyclicReference => - signalError() + // println(s"error while transforming $this") throw ex finally mutCtx.setPeriod(savedPeriod) @@ -1023,7 +1021,7 @@ object Denotations { * erasure (see i8615b, i9109b), Erasure takes care of adding any necessary * bridge to make this work at runtime. */ - def matchesLoosely(other: SingleDenotation)(using Context): Boolean = + def matchesLoosely(other: SingleDenotation, alwaysCompareTypes: Boolean = false)(using Context): Boolean = if isType then true else val thisLanguage = SourceLanguage(symbol) @@ -1033,7 +1031,7 @@ object Denotations { val otherSig = other.signature(commonLanguage) sig.matchDegree(otherSig) match case FullMatch => - true + !alwaysCompareTypes || info.matches(other.info) case MethodNotAMethodMatch => !ctx.erasedTypes && { // A Scala zero-parameter method and a Scala non-method always match. diff --git a/compiler/src/dotty/tools/dotc/core/Flags.scala b/compiler/src/dotty/tools/dotc/core/Flags.scala index 76cb45c1b28c..cb590e2384a0 100644 --- a/compiler/src/dotty/tools/dotc/core/Flags.scala +++ b/compiler/src/dotty/tools/dotc/core/Flags.scala @@ -233,7 +233,7 @@ object Flags { val (Param @ _, TermParam @ _, TypeParam @ _) = newFlags(8, "") /** Labeled with `implicit` modifier (implicit value) */ - val (Implicit @ _, ImplicitTerm @ _, _) = newFlags(9, "implicit") + val (Implicit @ _, ImplicitVal @ _, _) = newFlags(9, "implicit") /** Labeled with `lazy` (a lazy val) / a trait */ val (LazyOrTrait @ _, Lazy @ _, Trait @ _) = newFlags(10, "lazy", "") @@ -321,7 +321,7 @@ object Flags { val (Extension @ _, ExtensionMethod @ _, _) = newFlags(28, "") /** An inferable (`given`) parameter */ - val (Given @ _, _, _) = newFlags(29, "given") + val (Given @ _, GivenVal @ _, _) = newFlags(29, "given") /** Symbol is defined by a Java class */ val (JavaDefined @ _, JavaDefinedVal @ _, _) = newFlags(30, "") @@ -465,7 +465,7 @@ object Flags { Module, Package, Deferred, Method, Case, Enum, Param, ParamAccessor, Scala2SpecialFlags, MutableOrOpen, Opaque, Touched, JavaStatic, OuterOrCovariant, LabelOrContravariant, CaseAccessor, - Extension, NonMember, Implicit, Given, Permanent, Synthetic, + Extension, NonMember, Implicit, Given, Permanent, Synthetic, Exported, SuperParamAliasOrScala2x, Inline, Macro, ConstructorProxy, Invisible) /** Flags that are not (re)set when completing the denotation, or, if symbol is @@ -531,7 +531,7 @@ object Flags { val RetainedModuleClassFlags: FlagSet = RetainedModuleValAndClassFlags | Enum /** Flags retained in export forwarders */ - val RetainedExportFlags = Given | Implicit | Inline + val RetainedExportFlags = Given | Implicit | Inline | Transparent /** Flags that apply only to classes */ val ClassOnlyFlags = Sealed | Open | Abstract.toTypeFlags @@ -568,6 +568,7 @@ object Flags { val FinalOrSealed: FlagSet = Final | Sealed val GivenOrImplicit: FlagSet = Given | Implicit val GivenOrImplicitVal: FlagSet = GivenOrImplicit.toTermFlags + val GivenMethod: FlagSet = Given | Method val InlineOrProxy: FlagSet = Inline | InlineProxy // An inline method or inline argument proxy */ val InlineMethod: FlagSet = Inline | Method val InlineParam: FlagSet = Inline | Param @@ -600,7 +601,6 @@ object Flags { val Scala2Trait: FlagSet = Scala2x | Trait val SyntheticArtifact: FlagSet = Synthetic | Artifact val SyntheticCase: FlagSet = Synthetic | Case - val SyntheticGivenMethod: FlagSet = Synthetic | Given | Method val SyntheticModule: FlagSet = Synthetic | Module val SyntheticOpaque: FlagSet = Synthetic | Opaque val SyntheticParam: FlagSet = Synthetic | Param diff --git a/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala b/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala index 7d84b9892057..fb1ed9acaa4f 100644 --- a/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala +++ b/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala @@ -11,6 +11,7 @@ import collection.mutable import printing._ import scala.annotation.internal.sharable +import scala.annotation.unused /** Represents GADT constraints currently in scope */ sealed abstract class GadtConstraint extends Showable { @@ -43,8 +44,8 @@ sealed abstract class GadtConstraint extends Showable { */ def contains(sym: Symbol)(using Context): Boolean - def isEmpty: Boolean - final def nonEmpty: Boolean = !isEmpty + /** GADT constraint narrows bounds of at least one variable */ + def isNarrowing: Boolean /** See [[ConstraintHandling.approximation]] */ def approximation(sym: Symbol, fromBelow: Boolean)(using Context): Type @@ -61,13 +62,15 @@ final class ProperGadtConstraint private( private var myConstraint: Constraint, private var mapping: SimpleIdentityMap[Symbol, TypeVar], private var reverseMapping: SimpleIdentityMap[TypeParamRef, Symbol], + private var wasConstrained: Boolean ) extends GadtConstraint with ConstraintHandling { import dotty.tools.dotc.config.Printers.{gadts, gadtsConstr} def this() = this( myConstraint = new OrderingConstraint(SimpleIdentityMap.empty, SimpleIdentityMap.empty, SimpleIdentityMap.empty), mapping = SimpleIdentityMap.empty, - reverseMapping = SimpleIdentityMap.empty + reverseMapping = SimpleIdentityMap.empty, + wasConstrained = false ) /** Exposes ConstraintHandling.subsumes */ @@ -79,6 +82,11 @@ final class ProperGadtConstraint private( subsumes(extractConstraint(left), extractConstraint(right), extractConstraint(pre)) } + override protected def legalBound(param: TypeParamRef, rawBound: Type, isUpper: Boolean)(using Context): Type = + // GADT constraints never involve wildcards and are not propagated outside + // the case where they're valid, so no approximating is needed. + rawBound + override def addToConstraint(params: List[Symbol])(using Context): Boolean = { import NameKinds.DepParamName @@ -149,20 +157,24 @@ final class ProperGadtConstraint private( if (ntTvar ne null) stripInternalTypeVar(ntTvar) else bound case _ => bound } - ( - internalizedBound match { - case boundTvar: TypeVar => - if (boundTvar eq symTvar) true - else if (isUpper) addLess(symTvar.origin, boundTvar.origin) - else addLess(boundTvar.origin, symTvar.origin) - case bound => - addBoundTransitively(symTvar.origin, bound, isUpper) - } - ).showing({ + + val saved = constraint + val result = internalizedBound match + case boundTvar: TypeVar => + if (boundTvar eq symTvar) true + else if (isUpper) addLess(symTvar.origin, boundTvar.origin) + else addLess(boundTvar.origin, symTvar.origin) + case bound => + addBoundTransitively(symTvar.origin, bound, isUpper) + + gadts.println { val descr = if (isUpper) "upper" else "lower" val op = if (isUpper) "<:" else ">:" i"adding $descr bound $sym $op $bound = $result" - }, gadts) + } + + if constraint ne saved then wasConstrained = true + result } override def isLess(sym1: Symbol, sym2: Symbol)(using Context): Boolean = @@ -193,6 +205,8 @@ final class ProperGadtConstraint private( override def contains(sym: Symbol)(using Context): Boolean = mapping(sym) ne null + def isNarrowing: Boolean = wasConstrained + override def approximation(sym: Symbol, fromBelow: Boolean)(using Context): Type = { val res = approximation(tvarOrError(sym).origin, fromBelow = fromBelow) gadts.println(i"approximating $sym ~> $res") @@ -202,7 +216,8 @@ final class ProperGadtConstraint private( override def fresh: GadtConstraint = new ProperGadtConstraint( myConstraint, mapping, - reverseMapping + reverseMapping, + wasConstrained ) def restore(other: GadtConstraint): Unit = other match { @@ -210,11 +225,10 @@ final class ProperGadtConstraint private( this.myConstraint = other.myConstraint this.mapping = other.mapping this.reverseMapping = other.reverseMapping + this.wasConstrained = other.wasConstrained case _ => ; } - override def isEmpty: Boolean = mapping.size == 0 - // ---- Protected/internal ----------------------------------------------- override protected def constraint = myConstraint @@ -293,7 +307,7 @@ final class ProperGadtConstraint private( override def isLess(sym1: Symbol, sym2: Symbol)(using Context): Boolean = unsupported("EmptyGadtConstraint.isLess") - override def isEmpty: Boolean = true + override def isNarrowing: Boolean = false override def contains(sym: Symbol)(using Context) = false @@ -304,7 +318,7 @@ final class ProperGadtConstraint private( override def fresh = new ProperGadtConstraint override def restore(other: GadtConstraint): Unit = - if (!other.isEmpty) sys.error("cannot restore a non-empty GADTMap") + assert(!other.isNarrowing, "cannot restore a non-empty GADTMap") override def debugBoundsDescription(using Context): String = "EmptyGadtConstraint" diff --git a/compiler/src/dotty/tools/dotc/core/Hashable.scala b/compiler/src/dotty/tools/dotc/core/Hashable.scala index 1a1550183f93..713555bed517 100644 --- a/compiler/src/dotty/tools/dotc/core/Hashable.scala +++ b/compiler/src/dotty/tools/dotc/core/Hashable.scala @@ -19,20 +19,20 @@ object Hashable { /** A hash value indicating that the underlying type is not * cached in uniques. */ - final val NotCached = 0 + inline val NotCached = 0 /** An alternative value returned from `hash` if the * computed hashCode would be `NotCached`. */ - private[core] final val NotCachedAlt = Int.MinValue + private[core] inline val NotCachedAlt = Int.MinValue /** A value that indicates that the hash code is unknown */ - private[core] final val HashUnknown = 1234 + private[core] inline val HashUnknown = 1234 /** An alternative value if computeHash would otherwise yield HashUnknown */ - private[core] final val HashUnknownAlt = 4321 + private[core] inline val HashUnknownAlt = 4321 } trait Hashable { diff --git a/compiler/src/dotty/tools/dotc/core/MacroClassLoader.scala b/compiler/src/dotty/tools/dotc/core/MacroClassLoader.scala index 261b1f26a64d..e56835d59c54 100644 --- a/compiler/src/dotty/tools/dotc/core/MacroClassLoader.scala +++ b/compiler/src/dotty/tools/dotc/core/MacroClassLoader.scala @@ -3,6 +3,7 @@ package dotty.tools.dotc.core import dotty.tools.dotc.core.Contexts._ import dotty.tools.dotc.util.Property import dotty.tools.dotc.reporting.trace +import dotty.tools.io.ClassPath import scala.collection.mutable @@ -20,8 +21,9 @@ object MacroClassLoader { ctx.setProperty(MacroClassLoaderKey, makeMacroClassLoader(using ctx)) private def makeMacroClassLoader(using Context): ClassLoader = trace("new macro class loader") { - val urls = ctx.settings.classpath.value.split(java.io.File.pathSeparatorChar).map(cp => java.nio.file.Paths.get(cp).toUri.toURL) - val out = ctx.settings.outputDir.value.jpath.toUri.toURL // to find classes in case of suspended compilation - new java.net.URLClassLoader(urls :+ out, getClass.getClassLoader) + val entries = ClassPath.expandPath(ctx.settings.classpath.value, expandStar=true) + val urls = entries.map(cp => java.nio.file.Paths.get(cp).toUri.toURL).toArray + val out = Option(ctx.settings.outputDir.value.toURL) // to find classes in case of suspended compilation + new java.net.URLClassLoader(urls ++ out.toList, getClass.getClassLoader) } } diff --git a/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala b/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala new file mode 100644 index 000000000000..bc08dbc36eea --- /dev/null +++ b/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala @@ -0,0 +1,124 @@ +package dotty.tools +package dotc +package core + +import Types._, Contexts._, Symbols._, Decorators._ +import util.Property + +/** A utility module to produce match type reduction traces in error messages. + */ +object MatchTypeTrace: + + private enum TraceEntry: + case TryReduce(scrut: Type) + case NoMatches(scrut: Type, cases: List[Type]) + case Stuck(scrut: Type, stuckCase: Type, otherCases: List[Type]) + case EmptyScrutinee(scrut: Type) + import TraceEntry._ + + private class MatchTrace: + var entries: List[TraceEntry] = Nil + + private val MatchTrace = new Property.Key[MatchTrace] + + /** Execute `op` and if it involves a failed match type reduction + * return the trace of that reduction. Otherwise return the empty string. + */ + def record(op: Context ?=> Any)(using Context): String = + val trace = new MatchTrace + inContext(ctx.fresh.setProperty(MatchTrace, trace)) { + op + if trace.entries.isEmpty then "" + else + i""" + | + |Note: a match type could not be fully reduced: + | + |${trace.entries.reverse.map(explainEntry)}%\n%""" + } + + /** Are we running an operation that records a match type trace? */ + def isRecording(using Context): Boolean = + ctx.property(MatchTrace).isDefined + + private def matchTypeFail(entry: TraceEntry)(using Context) = + ctx.property(MatchTrace) match + case Some(trace) => + trace.entries match + case (e: TryReduce) :: es => trace.entries = entry :: trace.entries + case _ => + case _ => + + /** Record a failure that scrutinee `scrut` does not match any case in `cases`. + * Only the first failure is recorded. + */ + def noMatches(scrut: Type, cases: List[Type])(using Context) = + matchTypeFail(NoMatches(scrut, cases)) + + /** Record a failure that scrutinee `scrut` does not match `stuckCase` but is + * not disjoint from it either, which means that the remaining cases `otherCases` + * cannot be visited. Only the first failure is recorded. + */ + def stuck(scrut: Type, stuckCase: Type, otherCases: List[Type])(using Context) = + matchTypeFail(Stuck(scrut, stuckCase, otherCases)) + + /** Record a failure that scrutinee `scrut` is provably empty. + * Only the first failure is recorded. + */ + def emptyScrutinee(scrut: Type)(using Context) = + matchTypeFail(EmptyScrutinee(scrut)) + + /** Record in the trace that we are trying to reduce `scrut` when performing `op` + * If `op` succeeds the entry is removed after exit. If `op` fails, it stays. + */ + def recurseWith(scrut: Type)(op: => Type)(using Context): Type = + ctx.property(MatchTrace) match + case Some(trace) => + val prev = trace.entries + trace.entries = TryReduce(scrut) :: prev + val res = op + if res.exists then trace.entries = prev + res + case _ => + op + + private def caseText(tp: Type)(using Context): String = tp match + case tp: HKTypeLambda => caseText(tp.resultType) + case defn.MatchCase(any, body) if any eq defn.AnyType => i"case _ => $body" + case defn.MatchCase(pat, body) => i"case $pat => $body" + case _ => i"case $tp" + + private def casesText(cases: List[Type])(using Context) = + i"${cases.map(caseText)}%\n %" + + private def explainEntry(entry: TraceEntry)(using Context): String = entry match + case TryReduce(scrut: Type) => + i" trying to reduce $scrut" + case NoMatches(scrut, cases) => + i""" failed since selector $scrut + | matches none of the cases + | + | ${casesText(cases)}""" + case EmptyScrutinee(scrut) => + i""" failed since selector $scrut + | is uninhabited (there are no values of that type).""" + case Stuck(scrut, stuckCase, otherCases) => + val msg = + i""" failed since selector $scrut + | does not match ${caseText(stuckCase)} + | and cannot be shown to be disjoint from it either.""" + if otherCases.length == 0 then msg + else + val s = if otherCases.length == 1 then "" else "s" + i"""$msg + | Therefore, reduction cannot advance to the remaining case$s + | + | ${casesText(otherCases)}""" + + def noMatchesText(scrut: Type, cases: List[Type])(using Context): String = + i"""failed since selector $scrut + |matches none of the cases + | + | ${casesText(cases)}""" + +end MatchTypeTrace diff --git a/compiler/src/dotty/tools/dotc/core/Mode.scala b/compiler/src/dotty/tools/dotc/core/Mode.scala index 05986682e924..9f5b8a9a1c05 100644 --- a/compiler/src/dotty/tools/dotc/core/Mode.scala +++ b/compiler/src/dotty/tools/dotc/core/Mode.scala @@ -102,9 +102,6 @@ object Mode { /** We are typing the body of an inline method */ val InlineableBody: Mode = newMode(21, "InlineableBody") - /** Read comments from definitions when unpickling from TASTY */ - val ReadComments: Mode = newMode(22, "ReadComments") - /** We are synthesizing the receiver of an extension method */ val SynthesizeExtMethodReceiver: Mode = newMode(23, "SynthesizeExtMethodReceiver") diff --git a/compiler/src/dotty/tools/dotc/core/NameKinds.scala b/compiler/src/dotty/tools/dotc/core/NameKinds.scala index e17e772de993..0e9ccecb18c8 100644 --- a/compiler/src/dotty/tools/dotc/core/NameKinds.scala +++ b/compiler/src/dotty/tools/dotc/core/NameKinds.scala @@ -358,14 +358,22 @@ object NameKinds { val ProtectedAccessorName: PrefixNameKind = new PrefixNameKind(PROTECTEDACCESSOR, "protected$") val InlineAccessorName: PrefixNameKind = new PrefixNameKind(INLINEACCESSOR, "inline$") + /** See `ConstraintHandling#LevelAvoidMap`. */ + enum AvoidNameKind(tag: Int, prefix: String) extends PrefixNameKind(tag, prefix): + override def definesNewName = true + case UpperBound extends AvoidNameKind(AVOIDUPPER, "(upper)") + case LowerBound extends AvoidNameKind(AVOIDLOWER, "(lower)") + case BothBounds extends AvoidNameKind(AVOIDBOTH, "(avoid)") + val BodyRetainerName: SuffixNameKind = new SuffixNameKind(BODYRETAINER, "$retainedBody") val FieldName: SuffixNameKind = new SuffixNameKind(FIELD, "$$local") { override def mkString(underlying: TermName, info: ThisInfo) = underlying.toString } + val ExplicitFieldName: SuffixNameKind = new SuffixNameKind(EXPLICITFIELD, "$field") val ExtMethName: SuffixNameKind = new SuffixNameKind(EXTMETH, "$extension") val ParamAccessorName: SuffixNameKind = new SuffixNameKind(PARAMACC, "$accessor") val ModuleClassName: SuffixNameKind = new SuffixNameKind(OBJECTCLASS, "$", optInfoString = "ModuleClass") - val ImplMethName: SuffixNameKind = new SuffixNameKind(IMPLMETH, "$") + val DirectMethName: SuffixNameKind = new SuffixNameKind(DIRECT, "$direct") val AdaptedClosureName: SuffixNameKind = new SuffixNameKind(ADAPTEDCLOSURE, "$adapted") { override def definesNewName = true } val SyntheticSetterName: SuffixNameKind = new SuffixNameKind(SETTER, "_$eq") diff --git a/compiler/src/dotty/tools/dotc/core/NameOps.scala b/compiler/src/dotty/tools/dotc/core/NameOps.scala index d0b7e811eb4b..fb35ac0ac91f 100644 --- a/compiler/src/dotty/tools/dotc/core/NameOps.scala +++ b/compiler/src/dotty/tools/dotc/core/NameOps.scala @@ -17,7 +17,7 @@ object NameOps { object compactify { lazy val md5: MessageDigest = MessageDigest.getInstance("MD5") - final val CLASSFILE_NAME_CHAR_LIMIT = 240 + inline val CLASSFILE_NAME_CHAR_LIMIT = 240 /** COMPACTIFY * diff --git a/compiler/src/dotty/tools/dotc/core/NameTags.scala b/compiler/src/dotty/tools/dotc/core/NameTags.scala index 63aea8853235..59dfaa3d437b 100644 --- a/compiler/src/dotty/tools/dotc/core/NameTags.scala +++ b/compiler/src/dotty/tools/dotc/core/NameTags.scala @@ -5,31 +5,40 @@ import dotty.tools.tasty.TastyFormat /** The possible tags of a NameKind */ object NameTags extends TastyFormat.NameTags { - final val FLATTENED = 5 // A flat name, generated by Flatten + inline val FLATTENED = 5 // A flat name, generated by Flatten - final val TRAITSETTER = 6 // A Scala-2 trait setter, generated by AugmentScala2Traits + inline val TRAITSETTER = 6 // A Scala-2 trait setter, generated by AugmentScala2Traits - final val OUTERSELECT = 13 // A name `_outer`, used by the inliner to indicate an + inline val OUTERSELECT = 13 // A name `_outer`, used by the inliner to indicate an // outer accessor that will be filled in by ExplicitOuter. // indicates the number of hops needed to select the outer field. - final val PROTECTEDACCESSOR = 24 // The name of a protected accesor `protected$` created by ProtectedAccessors. + inline val PROTECTEDACCESSOR = 24 // The name of a protected accesor `protected$` created by ProtectedAccessors. - final val INITIALIZER = 26 // A mixin initializer method + inline val INITIALIZER = 26 // A mixin initializer method - final val FIELD = 29 // Used by Memoize to tag the name of a class member field. + inline val FIELD = 29 // Used by Memoize to tag the name of a class member field. - final val EXTMETH = 30 // Used by ExtensionMethods for the name of an extension method + inline val EXTMETH = 30 // Used by ExtensionMethods for the name of an extension method // implementing a value class method. - final val ADAPTEDCLOSURE = 31 // Used in Erasure to adapt closures over primitive types. + inline val ADAPTEDCLOSURE = 31 // Used in Erasure to adapt closures over primitive types. - final val IMPLMETH = 32 // Used to define methods in implementation classes - // (can probably be removed). + inline val DIRECT = 32 // Used to define implementations of methods with + // erased context function results that can override some + // other method. - final val PARAMACC = 33 // Used for a private parameter alias + inline val PARAMACC = 33 // Used for a private parameter alias - final val SETTER = 34 // A synthesized += suffix. + inline val SETTER = 34 // A synthesized += suffix. + + // Name of type variables created by `ConstraintHandling#LevelAvoidMap`. + final val AVOIDUPPER = 35 + final val AVOIDLOWER = 36 + final val AVOIDBOTH = 37 + + inline val EXPLICITFIELD = 38 // An explicitly named field, introduce to avoid a clash + // with a regular field of the underlying name def nameTagToString(tag: Int): String = tag match { case UTF8 => "UTF8" @@ -48,7 +57,7 @@ object NameTags extends TastyFormat.NameTags { case INITIALIZER => "INITIALIZER" case FIELD => "FIELD" case EXTMETH => "EXTMETH" - case IMPLMETH => "IMPLMETH" + case DIRECT => "DIRECT" case PARAMACC => "PARAMACC" case ADAPTEDCLOSURE => "ADAPTEDCLOSURE" case OBJECTCLASS => "OBJECTCLASS" diff --git a/compiler/src/dotty/tools/dotc/core/NamerOps.scala b/compiler/src/dotty/tools/dotc/core/NamerOps.scala index ce525fc5d827..9444270ccb05 100644 --- a/compiler/src/dotty/tools/dotc/core/NamerOps.scala +++ b/compiler/src/dotty/tools/dotc/core/NamerOps.scala @@ -11,15 +11,14 @@ import ast.untpd /** Operations that are shared between Namer and TreeUnpickler */ object NamerOps: - /** The given type, unless `sym` is a constructor, in which case the - * type of the constructed instance is returned + /** The type of the constructed instance is returned + * + * @param ctor the constructor */ - def effectiveResultType(sym: Symbol, paramss: List[List[Symbol]], givenTp: Type)(using Context): Type = - if sym.name == nme.CONSTRUCTOR then - paramss match - case TypeSymbols(tparams) :: _ => sym.owner.typeRef.appliedTo(tparams.map(_.typeRef)) - case _ => sym.owner.typeRef - else givenTp + def effectiveResultType(ctor: Symbol, paramss: List[List[Symbol]])(using Context): Type = + paramss match + case TypeSymbols(tparams) :: _ => ctor.owner.typeRef.appliedTo(tparams.map(_.typeRef)) + case _ => ctor.owner.typeRef /** if isConstructor, make sure it has one leading non-implicit parameter list */ def normalizeIfConstructor(paramss: List[List[Symbol]], isConstructor: Boolean)(using Context): List[List[Symbol]] = @@ -76,11 +75,16 @@ object NamerOps: /** The flags of an `apply` method that serves as a constructor proxy */ val ApplyProxyFlags = Synthetic | ConstructorProxy | Inline | Method - /** Does symbol `cls` need constructor proxies to be generated? */ - def needsConstructorProxies(cls: Symbol)(using Context): Boolean = - cls.isClass - && !cls.flagsUNSAFE.isOneOf(NoConstructorProxyNeededFlags) - && !cls.isAnonymousClass + /** Does symbol `sym` need constructor proxies to be generated? */ + def needsConstructorProxies(sym: Symbol)(using Context): Boolean = + sym.isClass + && !sym.flagsUNSAFE.isOneOf(NoConstructorProxyNeededFlags) + && !sym.isAnonymousClass + || + sym.isType && sym.is(Exported) + && sym.info.loBound.underlyingClassRef(refinementOK = false).match + case tref: TypeRef => tref.prefix.isStable + case _ => false /** The completer of a constructor proxy apply method */ class ApplyProxyCompleter(constr: Symbol)(using Context) extends LazyType: @@ -114,7 +118,7 @@ object NamerOps: }.withSourceModule(modul) /** A new symbol that is the constructor companion for class `cls` */ - def constructorCompanion(cls: ClassSymbol)(using Context): TermSymbol = + def classConstructorCompanion(cls: ClassSymbol)(using Context): TermSymbol = val companion = newModuleSymbol( cls.owner, cls.name.toTermName, ConstructorCompanionFlags, ConstructorCompanionFlags, @@ -125,9 +129,13 @@ object NamerOps: cls.registerCompanion(companion.moduleClass) companion + def typeConstructorCompanion(tsym: Symbol, prefix: Type, proxy: Symbol)(using Context): TermSymbol = + newSymbol(tsym.owner, tsym.name.toTermName, + ConstructorCompanionFlags | StableRealizable | Method, ExprType(prefix.select(proxy)), coord = tsym.coord) + /** Add all necesssary constructor proxy symbols for members of class `cls`. This means: * - * - if a member is a class that needs a constructor companion, add one, + * - if a member is a class, or type alias, that needs a constructor companion, add one, * provided no member with the same name exists. * - if `cls` is a companion object of a class that needs a constructor companion, * and `cls` does not already define or inherit an `apply` method, @@ -137,12 +145,21 @@ object NamerOps: def memberExists(cls: ClassSymbol, name: TermName): Boolean = cls.baseClasses.exists(_.info.decls.lookupEntry(name) != null) + for mbr <- cls.info.decls do - if needsConstructorProxies(mbr) - && !mbr.asClass.unforcedRegisteredCompanion.exists - && !memberExists(cls, mbr.name.toTermName) - then - constructorCompanion(mbr.asClass).entered + if needsConstructorProxies(mbr) then + mbr match + case mbr: ClassSymbol => + if !mbr.unforcedRegisteredCompanion.exists + && !memberExists(cls, mbr.name.toTermName) + then + classConstructorCompanion(mbr).entered + case _ => + mbr.info.loBound.underlyingClassRef(refinementOK = false) match + case ref: TypeRef => + val proxy = ref.symbol.registeredCompanion + if proxy.is(ConstructorProxy) && !memberExists(cls, mbr.name.toTermName) then + typeConstructorCompanion(mbr, ref.prefix, proxy).entered if cls.is(Module) && needsConstructorProxies(cls.linkedClass) diff --git a/compiler/src/dotty/tools/dotc/core/Names.scala b/compiler/src/dotty/tools/dotc/core/Names.scala index acfbd411a0ce..f12aaecdd12d 100644 --- a/compiler/src/dotty/tools/dotc/core/Names.scala +++ b/compiler/src/dotty/tools/dotc/core/Names.scala @@ -245,7 +245,7 @@ object Names { myMangledString } - /** If this a qualified name, split it into underlyng, last part, and separator + /** If this a qualified name, split it into underlying, last part, and separator * Otherwise return an empty name, the name itself, and "") */ def split: (TermName, TermName, String) @@ -342,8 +342,7 @@ object Names { override def encode: SimpleName = { val dontEncode = - length >= 3 && - head == '<' && last == '>' && isIdentifierStart(apply(1)) + this == StdNames.nme.CONSTRUCTOR || this == StdNames.nme.STATIC_CONSTRUCTOR if (dontEncode) this else NameTransformer.encode(this) } diff --git a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala index 9b1fbecc517d..1f83224cc3e7 100644 --- a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala +++ b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala @@ -24,11 +24,13 @@ object OrderingConstraint { type ParamOrdering = ArrayValuedMap[List[TypeParamRef]] /** A new constraint with given maps */ - private def newConstraint(boundsMap: ParamBounds, lowerMap: ParamOrdering, upperMap: ParamOrdering)(using Context) : OrderingConstraint = { - val result = new OrderingConstraint(boundsMap, lowerMap, upperMap) - ctx.run.recordConstraintSize(result, result.boundsMap.size) - result - } + private def newConstraint(boundsMap: ParamBounds, lowerMap: ParamOrdering, upperMap: ParamOrdering)(using Context) : OrderingConstraint = + if boundsMap.isEmpty && lowerMap.isEmpty && upperMap.isEmpty then + empty + else + val result = new OrderingConstraint(boundsMap, lowerMap, upperMap) + ctx.run.recordConstraintSize(result, result.boundsMap.size) + result /** A lens for updating a single entry array in one of the three constraint maps */ abstract class ConstraintLens[T <: AnyRef: ClassTag] { @@ -132,6 +134,8 @@ class OrderingConstraint(private val boundsMap: ParamBounds, private val lowerMap : ParamOrdering, private val upperMap : ParamOrdering) extends Constraint { + import UnificationDirection.* + type This = OrderingConstraint // ----------- Basic indices -------------------------------------------------- @@ -280,9 +284,11 @@ class OrderingConstraint(private val boundsMap: ParamBounds, var current = this val todos = new mutable.ListBuffer[(OrderingConstraint, TypeParamRef) => OrderingConstraint] var i = 0 + val dropWildcards = AvoidWildcardsMap() while (i < poly.paramNames.length) { val param = poly.paramRefs(i) - val stripped = stripParams(nonParamBounds(param), todos, isUpper = true) + val bounds = dropWildcards(nonParamBounds(param)) + val stripped = stripParams(bounds, todos, isUpper = true) current = updateEntry(current, param, stripped) while todos.nonEmpty do current = todos.head(current, param) @@ -309,8 +315,11 @@ class OrderingConstraint(private val boundsMap: ParamBounds, val r1 = recur(tp.tp1, fromBelow) val r2 = recur(tp.tp2, fromBelow) if (r1 eq tp.tp1) && (r2 eq tp.tp2) then tp - else if tp.isAnd then r1 & r2 - else r1 | r2 + else tp.match + case tp: OrType => + TypeComparer.lub(r1, r2, isSoft = tp.isSoft) + case _ => + r1 & r2 case tp: TypeParamRef => if tp eq param then if fromBelow then defn.NothingType else defn.AnyType @@ -343,13 +352,40 @@ class OrderingConstraint(private val boundsMap: ParamBounds, /** Add the fact `param1 <: param2` to the constraint `current` and propagate * `<:<` relationships between parameters ("edges") but not bounds. */ - private def order(current: This, param1: TypeParamRef, param2: TypeParamRef)(using Context): This = + def order(current: This, param1: TypeParamRef, param2: TypeParamRef, direction: UnificationDirection = NoUnification)(using Context): This = if (param1 == param2 || current.isLess(param1, param2)) this else { assert(contains(param1), i"$param1") assert(contains(param2), i"$param2") - val newUpper = param2 :: exclusiveUpper(param2, param1) - val newLower = param1 :: exclusiveLower(param1, param2) + val unifying = direction != NoUnification + val newUpper = { + val up = exclusiveUpper(param2, param1) + if unifying then + // Since param2 <:< param1 already holds now, filter out param1 to avoid adding + // duplicated orderings. + val filtered = up.filterNot(_ eq param1) + // Only add bounds for param2 if it will be kept in the constraint after unification. + if direction == KeepParam2 then + param2 :: filtered + else + filtered + else + param2 :: up + } + val newLower = { + val lower = exclusiveLower(param1, param2) + if unifying then + // Similarly, filter out param2 from lowerly-ordered parameters + // to avoid duplicated orderings. + val filtered = lower.filterNot(_ eq param2) + // Only add bounds for param1 if it will be kept in the constraint after unification. + if direction == KeepParam1 then + param1 :: filtered + else + filtered + else + param1 :: lower + } val current1 = newLower.foldLeft(current)(upperLens.map(this, _, _, newUpper ::: _)) val current2 = newUpper.foldLeft(current1)(lowerLens.map(this, _, _, newLower ::: _)) current2 @@ -373,6 +409,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, Nil private def updateEntry(current: This, param: TypeParamRef, tp: Type)(using Context): This = { + if Config.checkNoWildcardsInConstraint then assert(!tp.containsWildcardTypes) var current1 = boundsLens.update(this, current, param, tp) tp match { case TypeBounds(lo, hi) => @@ -389,14 +426,8 @@ class OrderingConstraint(private val boundsMap: ParamBounds, def updateEntry(param: TypeParamRef, tp: Type)(using Context): This = updateEntry(this, param, ensureNonCyclic(param, tp)).checkNonCyclic() - def addLess(param1: TypeParamRef, param2: TypeParamRef)(using Context): This = - order(this, param1, param2).checkNonCyclic() - - def unify(p1: TypeParamRef, p2: TypeParamRef)(using Context): This = - val bound1 = nonParamBounds(p1).substParam(p2, p1) - val bound2 = nonParamBounds(p2).substParam(p2, p1) - val p1Bounds = bound1 & bound2 - updateEntry(p1, p1Bounds).replace(p2, p1) + def addLess(param1: TypeParamRef, param2: TypeParamRef, direction: UnificationDirection)(using Context): This = + order(this, param1, param2, direction).checkNonCyclic() // ---------- Replacements and Removals ------------------------------------- @@ -451,83 +482,23 @@ class OrderingConstraint(private val boundsMap: ParamBounds, // ----------- Joins ----------------------------------------------------- - def & (other: Constraint, otherHasErrors: Boolean)(using Context): OrderingConstraint = { - - def merge[T](m1: ArrayValuedMap[T], m2: ArrayValuedMap[T], join: (T, T) => T): ArrayValuedMap[T] = { - var merged = m1 - def mergeArrays(xs1: Array[T], xs2: Array[T]) = { - val xs = xs1.clone - for (i <- xs.indices) xs(i) = join(xs1(i), xs2(i)) - xs - } - m2.foreachBinding { (poly, xs2) => - merged = merged.updated(poly, - if (m1.contains(poly)) mergeArrays(m1(poly), xs2) else xs2) - } - merged - } - - def mergeParams(ps1: List[TypeParamRef], ps2: List[TypeParamRef]) = - ps2.foldLeft(ps1)((ps1, p2) => if (ps1.contains(p2)) ps1 else p2 :: ps1) - - // Must be symmetric - def mergeEntries(e1: Type, e2: Type): Type = - (e1, e2) match { - case _ if e1 eq e2 => e1 - case (e1: TypeBounds, e2: TypeBounds) => e1 & e2 - case (e1: TypeBounds, _) if e1 contains e2 => e2 - case (_, e2: TypeBounds) if e2 contains e1 => e1 - case (tv1: TypeVar, tv2: TypeVar) if tv1 eq tv2 => e1 - case _ => - if (otherHasErrors) - e1 - else - throw new AssertionError(i"cannot merge $this with $other, mergeEntries($e1, $e2) failed") - } - - /** Ensure that constraint `c` does not associate different TypeVars for the - * same type lambda than this constraint. Do this by renaming type lambdas - * in `c` where necessary. - */ - def ensureNotConflicting(c: OrderingConstraint): OrderingConstraint = { - def hasConflictingTypeVarsFor(tl: TypeLambda) = - this.typeVarOfParam(tl.paramRefs(0)) ne c.typeVarOfParam(tl.paramRefs(0)) - // Note: Since TypeVars are allocated in bulk for each type lambda, we only - // have to check the first one to find out if some of them are different. - val conflicting = c.domainLambdas.find(tl => - this.contains(tl) && hasConflictingTypeVarsFor(tl)) - conflicting match { - case Some(tl) => ensureNotConflicting(c.rename(tl)) - case None => c - } - } - - val that = ensureNotConflicting(other.asInstanceOf[OrderingConstraint]) - - new OrderingConstraint( - merge(this.boundsMap, that.boundsMap, mergeEntries), - merge(this.lowerMap, that.lowerMap, mergeParams), - merge(this.upperMap, that.upperMap, mergeParams)) - }.showing(i"constraint merge $this with $other = $result", constr) + def hasConflictingTypeVarsFor(tl: TypeLambda, that: Constraint): Boolean = + contains(tl) && that.contains(tl) && + // Since TypeVars are allocated in bulk for each type lambda, we only have + // to check the first one to find out if some of them are different. + (this.typeVarOfParam(tl.paramRefs(0)) ne that.typeVarOfParam(tl.paramRefs(0))) - def rename(tl: TypeLambda)(using Context): OrderingConstraint = { - assert(contains(tl)) - val tl1 = ensureFresh(tl) - def swapKey[T](m: ArrayValuedMap[T]) = m.remove(tl).updated(tl1, m(tl)) + def subst(from: TypeLambda, to: TypeLambda)(using Context): OrderingConstraint = + def swapKey[T](m: ArrayValuedMap[T]) = m.remove(from).updated(to, m(from)) var current = newConstraint(swapKey(boundsMap), swapKey(lowerMap), swapKey(upperMap)) - def subst[T <: Type](x: T): T = x.subst(tl, tl1).asInstanceOf[T] + def subst[T <: Type](x: T): T = x.subst(from, to).asInstanceOf[T] current.foreachParam {(p, i) => current = boundsLens.map(this, current, p, i, subst) current = lowerLens.map(this, current, p, i, _.map(subst)) current = upperLens.map(this, current, p, i, _.map(subst)) } - current.foreachTypeVar { tvar => - val TypeParamRef(binder, n) = tvar.origin - if (binder eq tl) tvar.setOrigin(tl1.paramRefs(n)) - } constr.println(i"renamed $this to $current") current.checkNonCyclic() - } def instType(tvar: TypeVar): Type = entry(tvar.origin) match case _: TypeBounds => NoType @@ -547,6 +518,13 @@ class OrderingConstraint(private val boundsMap: ParamBounds, } else tl + def checkConsistentVars()(using Context): Unit = + for param <- domainParams do + typeVarOfParam(param) match + case tvar: TypeVar => + assert(tvar.origin == param, i"mismatch $tvar, $param") + case _ => + // ---------- Exploration -------------------------------------------------------- def domainLambdas: List[TypeLambda] = boundsMap.keys @@ -646,49 +624,10 @@ class OrderingConstraint(private val boundsMap: ParamBounds, upperMap.foreachBinding((_, paramss) => paramss.foreach(_.foreach(checkClosedType(_, "upper")))) end checkClosed -// ---------- toText ----------------------------------------------------- - - private def contentsToText(printer: Printer): Text = - //Printer.debugPrintUnique = true - def entryText(tp: Type) = tp match { - case tp: TypeBounds => - tp.toText(printer) - case _ => - " := " ~ tp.toText(printer) - } - val indent = 3 - val uninstVarsText = " uninstantiated variables: " ~ - Text(uninstVars.map(_.toText(printer)), ", ") - val constrainedText = - " constrained types: " ~ Text(domainLambdas map (_.toText(printer)), ", ") - val boundsText = - " bounds: " ~ { - val assocs = - for (param <- domainParams) - yield (" " * indent) ~ param.toText(printer) ~ entryText(entry(param)) - Text(assocs, "\n") - } - val orderingText = - " ordering: " ~ { - val deps = - for { - param <- domainParams - ups = minUpper(param) - if ups.nonEmpty - } - yield - (" " * indent) ~ param.toText(printer) ~ " <: " ~ - Text(ups.map(_.toText(printer)), ", ") - Text(deps, "\n") - } - //Printer.debugPrintUnique = false - Text.lines(List(uninstVarsText, constrainedText, boundsText, orderingText)) +// ---------- Printing ----------------------------------------------------- override def toText(printer: Printer): Text = - Text.lines(List("Constraint(", contentsToText(printer), ")")) - - def contentsToString(using Context): String = - contentsToText(ctx.printer).show + printer.toText(this) override def toString: String = { def entryText(tp: Type): String = tp match { @@ -697,7 +636,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, } val constrainedText = " constrained types = " + domainLambdas.mkString("\n") - val boundsText = domainLambdas + val boundsText = " bounds = " + { val assocs = for (param <- domainParams) diff --git a/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala b/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala index 72914ebfee11..f928ad785e2c 100644 --- a/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala +++ b/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala @@ -73,17 +73,17 @@ trait PatternTypeConstrainer { self: TypeComparer => * scrutinee and pattern types. This does not apply if the pattern type is only applied to type variables, * in which case the subtyping relationship "heals" the type. */ - def constrainPatternType(pat: Type, scrut: Type, widenParams: Boolean = true): Boolean = trace(i"constrainPatternType($scrut, $pat)", gadts) { + def constrainPatternType(pat: Type, scrut: Type, forceInvariantRefinement: Boolean = false): Boolean = trace(i"constrainPatternType($scrut, $pat)", gadts) { def classesMayBeCompatible: Boolean = { import Flags._ - val patClassSym = pat.classSymbol - val scrutClassSym = scrut.classSymbol - !patClassSym.exists || !scrutClassSym.exists || { - if (patClassSym.is(Final)) patClassSym.derivesFrom(scrutClassSym) - else if (scrutClassSym.is(Final)) scrutClassSym.derivesFrom(patClassSym) - else if (!patClassSym.is(Flags.Trait) && !scrutClassSym.is(Flags.Trait)) - patClassSym.derivesFrom(scrutClassSym) || scrutClassSym.derivesFrom(patClassSym) + val patCls = pat.classSymbol + val scrCls = scrut.classSymbol + !patCls.exists || !scrCls.exists || { + if (patCls.is(Final)) patCls.derivesFrom(scrCls) + else if (scrCls.is(Final)) scrCls.derivesFrom(patCls) + else if (!patCls.is(Flags.Trait) && !scrCls.is(Flags.Trait)) + patCls.derivesFrom(scrCls) || scrCls.derivesFrom(patCls) else true } } @@ -93,6 +93,14 @@ trait PatternTypeConstrainer { self: TypeComparer => case tp => tp } + def tryConstrainSimplePatternType(pat: Type, scrut: Type) = { + val patCls = pat.classSymbol + val scrCls = scrut.classSymbol + patCls.exists && scrCls.exists + && (patCls.derivesFrom(scrCls) || scrCls.derivesFrom(patCls)) + && constrainSimplePatternType(pat, scrut, forceInvariantRefinement) + } + def constrainUpcasted(scrut: Type): Boolean = trace(i"constrainUpcasted($scrut)", gadts) { // Fold a list of types into an AndType def buildAndType(xs: List[Type]): Type = { @@ -113,7 +121,7 @@ trait PatternTypeConstrainer { self: TypeComparer => val andType = buildAndType(parents) !andType.exists || constrainPatternType(pat, andType) case scrut @ AppliedType(tycon: TypeRef, _) if tycon.symbol.isClass => - val patClassSym = pat.classSymbol + val patCls = pat.classSymbol // find all shared parents in the inheritance hierarchy between pat and scrut def allParentsSharedWithPat(tp: Type, tpClassSym: ClassSymbol): List[Symbol] = { var parents = tpClassSym.info.parents @@ -121,7 +129,7 @@ trait PatternTypeConstrainer { self: TypeComparer => parents = parents.tail parents flatMap { tp => val sym = tp.classSymbol.asClass - if patClassSym.derivesFrom(sym) then List(sym) + if patCls.derivesFrom(sym) then List(sym) else allParentsSharedWithPat(tp, sym) } } @@ -135,19 +143,31 @@ trait PatternTypeConstrainer { self: TypeComparer => case _ => NoType } if (upcasted.exists) - constrainSimplePatternType(pat, upcasted, widenParams) || constrainUpcasted(upcasted) + tryConstrainSimplePatternType(pat, upcasted) || constrainUpcasted(upcasted) else true } } - scrut.dealias match { + def dealiasDropNonmoduleRefs(tp: Type) = tp.dealias match { + case tp: TermRef => + // we drop TermRefs that don't have a class symbol, as they can't + // meaningfully participate in GADT reasoning and just get in the way. + // Their info could, for an example, be an AndType. One example where + // this is important is an enum case that extends its parent and an + // additional trait - argument-less enum cases desugar to vals. + // See run/enum-Tree.scala. + if tp.classSymbol.exists then tp else tp.info + case tp => tp + } + + dealiasDropNonmoduleRefs(scrut) match { case OrType(scrut1, scrut2) => either(constrainPatternType(pat, scrut1), constrainPatternType(pat, scrut2)) case AndType(scrut1, scrut2) => constrainPatternType(pat, scrut1) && constrainPatternType(pat, scrut2) case scrut: RefinedOrRecType => constrainPatternType(pat, stripRefinement(scrut)) - case scrut => pat.dealias match { + case scrut => dealiasDropNonmoduleRefs(pat) match { case OrType(pat1, pat2) => either(constrainPatternType(pat1, scrut), constrainPatternType(pat2, scrut)) case AndType(pat1, pat2) => @@ -155,22 +175,23 @@ trait PatternTypeConstrainer { self: TypeComparer => case pat: RefinedOrRecType => constrainPatternType(stripRefinement(pat), scrut) case pat => - constrainSimplePatternType(pat, scrut, widenParams) || classesMayBeCompatible && constrainUpcasted(scrut) + tryConstrainSimplePatternType(pat, scrut) + || classesMayBeCompatible && constrainUpcasted(scrut) } } } /** Constrain "simple" patterns (see `constrainPatternType`). * - * This function attempts to modify pattern and scrutinee type s.t. the pattern must be a subtype of the scrutinee, - * or otherwise it cannot possibly match. In order to do that, we: - * - * 1. Rely on `constrainPatternType` to break the actual scrutinee/pattern types into subcomponents - * 2. Widen type parameters of scrutinee type that are not invariantly refined (see below) by the pattern type. - * 3. Wrap the pattern type in a skolem to avoid overconstraining top-level abstract types in scrutinee type - * 4. Check that `WidenedScrutineeType <: NarrowedPatternType` + * This function expects to receive two types (scrutinee and pattern), both + * of which have class symbols, one of which is derived from another. If the + * type "being derived from" is an applied type, it will 1) "upcast" the + * deriving type to an applied type with the same constructor and 2) infer + * constraints for the applied types' arguments that follow from both + * types being inhabited by one value (the scrutinee). * - * Importantly, note that the pattern type may contain type variables. + * Importantly, note that the pattern type may contain type variables, which + * are used to infer type arguments to Unapply trees. * * ## Invariant refinement * Essentially, we say that `D[B] extends C[B]` s.t. refines parameter `A` of `trait C[A]` invariantly if @@ -194,8 +215,9 @@ trait PatternTypeConstrainer { self: TypeComparer => * case classes without also appropriately extending the relevant case class * (see `RefChecks#checkCaseClassInheritanceInvariant`). */ - def constrainSimplePatternType(patternTp: Type, scrutineeTp: Type, widenParams: Boolean): Boolean = { + def constrainSimplePatternType(patternTp: Type, scrutineeTp: Type, forceInvariantRefinement: Boolean): Boolean = { def refinementIsInvariant(tp: Type): Boolean = tp match { + case tp: SingletonType => true case tp: ClassInfo => tp.cls.is(Final) || tp.cls.is(Case) case tp: TypeProxy => refinementIsInvariant(tp.underlying) case _ => false @@ -211,13 +233,54 @@ trait PatternTypeConstrainer { self: TypeComparer => tp } - val widePt = - if migrateTo3 || refinementIsInvariant(patternTp) then scrutineeTp - else if widenParams then widenVariantParams(scrutineeTp) - else scrutineeTp - val narrowTp = SkolemType(patternTp) - trace(i"constraining simple pattern type $narrowTp <:< $widePt", gadts, res => s"$res\ngadt = ${ctx.gadt.debugBoundsDescription}") { - isSubType(narrowTp, widePt) + val patternCls = patternTp.classSymbol + val scrutineeCls = scrutineeTp.classSymbol + + // NOTE: we already know that there is a derives-from relationship in either direction + val upcastPattern = + patternCls.derivesFrom(scrutineeCls) + + val pt = if upcastPattern then patternTp.baseType(scrutineeCls) else patternTp + val tp = if !upcastPattern then scrutineeTp.baseType(patternCls) else scrutineeTp + + val assumeInvariantRefinement = + migrateTo3 || forceInvariantRefinement || refinementIsInvariant(patternTp) + + trace(i"constraining simple pattern type $tp >:< $pt", gadts, res => s"$res\ngadt = ${ctx.gadt.debugBoundsDescription}") { + (tp, pt) match { + case (AppliedType(tyconS, argsS), AppliedType(tyconP, argsP)) => + val saved = state.constraint + val savedGadt = ctx.gadt.fresh + val result = + tyconS.typeParams.lazyZip(argsS).lazyZip(argsP).forall { (param, argS, argP) => + val variance = param.paramVarianceSign + if variance != 0 && !assumeInvariantRefinement then true + else if argS.isInstanceOf[TypeBounds] || argP.isInstanceOf[TypeBounds] then + // This line was added here as a quick fix for issue #13998, + // to extract GADT constraints from wildcard type arguments. + // The proper fix would involve inspecting the bounds right here and performing the + // correct subtyping checks, the ones that are already performed by `isSubType` below, + // for the same reasons for which we stopped using `SkolemType` here to begin with + // (commit 10fe5374dc2d). + isSubType(SkolemType(patternTp), scrutineeTp) + else { + var res = true + if variance < 1 then res &&= isSubType(argS, argP) + if variance > -1 then res &&= isSubType(argP, argS) + res + } + } + if !result then + constraint = saved + ctx.gadt.restore(savedGadt) + result + case _ => + // Give up if we don't get AppliedType, e.g. if we upcasted to Any. + // Note that this doesn't mean that patternTp, scrutineeTp cannot possibly + // be co-inhabited, just that we cannot extract information out of them directly + // and should upcast. + false + } } } } diff --git a/compiler/src/dotty/tools/dotc/core/Periods.scala b/compiler/src/dotty/tools/dotc/core/Periods.scala index d5944beedc2a..44d83dcb5278 100644 --- a/compiler/src/dotty/tools/dotc/core/Periods.scala +++ b/compiler/src/dotty/tools/dotc/core/Periods.scala @@ -126,18 +126,18 @@ object Periods { /** An ordinal number for compiler runs. First run has number 1. */ type RunId = Int - final val NoRunId = 0 - final val InitialRunId = 1 - final val RunWidth = java.lang.Integer.SIZE - PhaseWidth * 2 - 1/* sign */ - final val MaxPossibleRunId = (1 << RunWidth) - 1 + inline val NoRunId = 0 + inline val InitialRunId = 1 + inline val RunWidth = java.lang.Integer.SIZE - PhaseWidth * 2 - 1/* sign */ + inline val MaxPossibleRunId = (1 << RunWidth) - 1 /** An ordinal number for phases. First phase has number 1. */ type PhaseId = Int - final val NoPhaseId = 0 - final val FirstPhaseId = 1 + inline val NoPhaseId = 0 + inline val FirstPhaseId = 1 /** The number of bits needed to encode a phase identifier. */ - final val PhaseWidth = 7 - final val PhaseMask = (1 << PhaseWidth) - 1 - final val MaxPossiblePhaseId = PhaseMask + inline val PhaseWidth = 7 + inline val PhaseMask = (1 << PhaseWidth) - 1 + inline val MaxPossiblePhaseId = PhaseMask } diff --git a/compiler/src/dotty/tools/dotc/core/Phases.scala b/compiler/src/dotty/tools/dotc/core/Phases.scala index 4646751192b4..623286d837b3 100644 --- a/compiler/src/dotty/tools/dotc/core/Phases.scala +++ b/compiler/src/dotty/tools/dotc/core/Phases.scala @@ -13,10 +13,12 @@ import scala.collection.mutable.ListBuffer import dotty.tools.dotc.transform.MegaPhase._ import dotty.tools.dotc.transform._ import Periods._ -import typer.{FrontEnd, RefChecks} +import parsing.{ Parser} +import typer.{TyperPhase, RefChecks} import typer.ImportInfo.withRootImports import ast.tpd import scala.annotation.internal.sharable +import scala.util.control.NonFatal object Phases { @@ -64,7 +66,6 @@ object Phases { YCheckAfter: List[String])(using Context): List[Phase] = { val fusedPhases = ListBuffer[Phase]() var prevPhases: Set[String] = Set.empty - val YCheckAll = YCheckAfter.contains("all") var stop = false @@ -106,7 +107,7 @@ object Phases { phase } fusedPhases += phaseToAdd - val shouldAddYCheck = YCheckAfter.containsPhase(phaseToAdd) || YCheckAll + val shouldAddYCheck = filteredPhases(i).exists(_.isCheckable) && YCheckAfter.containsPhase(phaseToAdd) if (shouldAddYCheck) { val checker = new TreeChecker fusedPhases += checker @@ -194,6 +195,7 @@ object Phases { config.println(s"nextDenotTransformerId = ${nextDenotTransformerId.toList}") } + private var myParserPhase: Phase = _ private var myTyperPhase: Phase = _ private var myPostTyperPhase: Phase = _ private var mySbtExtractDependenciesPhase: Phase = _ @@ -205,6 +207,7 @@ object Phases { private var myRefChecksPhase: Phase = _ private var myPatmatPhase: Phase = _ private var myElimRepeatedPhase: Phase = _ + private var myElimByNamePhase: Phase = _ private var myExtensionMethodsPhase: Phase = _ private var myExplicitOuterPhase: Phase = _ private var myGettersPhase: Phase = _ @@ -215,6 +218,7 @@ object Phases { private var myFlattenPhase: Phase = _ private var myGenBCodePhase: Phase = _ + final def parserPhase: Phase = myParserPhase final def typerPhase: Phase = myTyperPhase final def postTyperPhase: Phase = myPostTyperPhase final def sbtExtractDependenciesPhase: Phase = mySbtExtractDependenciesPhase @@ -226,6 +230,7 @@ object Phases { final def refchecksPhase: Phase = myRefChecksPhase final def patmatPhase: Phase = myPatmatPhase final def elimRepeatedPhase: Phase = myElimRepeatedPhase + final def elimByNamePhase: Phase = myElimByNamePhase final def extensionMethodsPhase: Phase = myExtensionMethodsPhase final def explicitOuterPhase: Phase = myExplicitOuterPhase final def gettersPhase: Phase = myGettersPhase @@ -239,7 +244,8 @@ object Phases { private def setSpecificPhases() = { def phaseOfClass(pclass: Class[?]) = phases.find(pclass.isInstance).getOrElse(NoPhase) - myTyperPhase = phaseOfClass(classOf[FrontEnd]) + myParserPhase = phaseOfClass(classOf[Parser]) + myTyperPhase = phaseOfClass(classOf[TyperPhase]) myPostTyperPhase = phaseOfClass(classOf[PostTyper]) mySbtExtractDependenciesPhase = phaseOfClass(classOf[sbt.ExtractDependencies]) myPicklerPhase = phaseOfClass(classOf[Pickler]) @@ -249,6 +255,7 @@ object Phases { myCollectNullableFieldsPhase = phaseOfClass(classOf[CollectNullableFields]) myRefChecksPhase = phaseOfClass(classOf[RefChecks]) myElimRepeatedPhase = phaseOfClass(classOf[ElimRepeated]) + myElimByNamePhase = phaseOfClass(classOf[ElimByName]) myExtensionMethodsPhase = phaseOfClass(classOf[ExtensionMethods]) myErasurePhase = phaseOfClass(classOf[Erasure]) myElimErasedValueTypePhase = phaseOfClass(classOf[ElimErasedValueType]) @@ -262,6 +269,7 @@ object Phases { } final def isAfterTyper(phase: Phase): Boolean = phase.id > typerPhase.id + final def isTyper(phase: Phase): Boolean = phase.id == typerPhase.id } abstract class Phase { @@ -290,7 +298,7 @@ object Phases { /** If set, implicit search is enabled */ def allowsImplicitSearch: Boolean = false - /** List of names of phases that should precede this phase */ + /** List of names of phases that should precede this phase */ def runsAfter: Set[String] = Set.empty /** @pre `isRunnable` returns true */ @@ -313,8 +321,8 @@ object Phases { */ def checkPostCondition(tree: tpd.Tree)(using Context): Unit = () - /** Is this phase the standard typerphase? True for FrontEnd, but - * not for other first phases (such as FromTasty). The predicate + /** Is this phase the standard typerphase? True for TyperPhase, but + * not for other first phases (such as FromTasty or Parser). The predicate * is tested in some places that perform checks and corrections. It's * different from ctx.isAfterTyper (and cheaper to test). */ @@ -402,9 +410,17 @@ object Phases { final def iterator: Iterator[Phase] = Iterator.iterate(this)(_.next) takeWhile (_.hasNext) + final def monitor(doing: String)(body: => Unit)(using Context): Unit = + try body + catch + case NonFatal(ex) => + report.echo(s"exception occurred while $doing ${ctx.compilationUnit}") + throw ex + override def toString: String = phaseName } + def parserPhase(using Context): Phase = ctx.base.parserPhase def typerPhase(using Context): Phase = ctx.base.typerPhase def postTyperPhase(using Context): Phase = ctx.base.postTyperPhase def sbtExtractDependenciesPhase(using Context): Phase = ctx.base.sbtExtractDependenciesPhase @@ -414,6 +430,7 @@ object Phases { def firstTransformPhase(using Context): Phase = ctx.base.firstTransformPhase def refchecksPhase(using Context): Phase = ctx.base.refchecksPhase def elimRepeatedPhase(using Context): Phase = ctx.base.elimRepeatedPhase + def elimByNamePhase(using Context): Phase = ctx.base.elimByNamePhase def extensionMethodsPhase(using Context): Phase = ctx.base.extensionMethodsPhase def explicitOuterPhase(using Context): Phase = ctx.base.explicitOuterPhase def gettersPhase(using Context): Phase = ctx.base.gettersPhase diff --git a/compiler/src/dotty/tools/dotc/core/Scopes.scala b/compiler/src/dotty/tools/dotc/core/Scopes.scala index cb5f5c55e337..ecc61530601d 100644 --- a/compiler/src/dotty/tools/dotc/core/Scopes.scala +++ b/compiler/src/dotty/tools/dotc/core/Scopes.scala @@ -24,19 +24,19 @@ import collection.mutable object Scopes { /** Maximal fill factor of hash table */ - private final val FillFactor = 2.0/3.0 + private inline val FillFactor = 2.0/3.0 /** A hashtable is created once current size exceeds MinHash * FillFactor * The initial hash table has twice that size (i.e 16). * This value must be a power of two, so that the index of an element can * be computed as element.hashCode & (hashTable.length - 1) */ - final val MinHashedScopeSize = 8 + inline val MinHashedScopeSize = 8 /** The maximal permissible number of recursions when creating * a hashtable */ - private final val MaxRecursions = 1000 + private inline val MaxRecursions = 1000 /** A function that optionally produces synthesized symbols with * the given name in the given context. Returns `NoSymbol` if the @@ -75,7 +75,7 @@ object Scopes { */ def size: Int - /** The number of outer scopes from which symbols are inherited */ + /** The number of scopes enclosing this scope. */ def nestingLevel: Int /** The symbols in this scope in the order they were entered; @@ -193,7 +193,7 @@ object Scopes { * This is necessary because when run from reflection every scope needs to have a * SynchronizedScope as mixin. */ - class MutableScope protected[Scopes](initElems: ScopeEntry, initSize: Int, val nestingLevel: Int = 0) + class MutableScope protected[Scopes](initElems: ScopeEntry, initSize: Int, val nestingLevel: Int) extends Scope { /** Scope shares elements with `base` */ @@ -201,7 +201,7 @@ object Scopes { this(base.lastEntry, base.size, base.nestingLevel + 1) ensureCapacity(MinHashedScopeSize) - def this() = this(null, 0, 0) + def this(nestingLevel: Int) = this(null, 0, nestingLevel) private[dotc] var lastEntry: ScopeEntry = initElems @@ -225,7 +225,7 @@ object Scopes { /** Use specified synthesize for this scope */ def useSynthesizer(s: SymbolSynthesizer): Unit = synthesize = s - protected def newScopeLikeThis(): MutableScope = new MutableScope() + protected def newScopeLikeThis(): MutableScope = new MutableScope(nestingLevel) /** Clone scope, taking care not to force the denotations of any symbols in the scope. */ @@ -257,10 +257,6 @@ object Scopes { e } - /** create and enter a scope entry */ - protected def newScopeEntry(sym: Symbol)(using Context): ScopeEntry = - newScopeEntry(sym.name, sym) - private def enterInHash(e: ScopeEntry)(using Context): Unit = { val idx = e.name.hashCode & (hashTable.length - 1) e.tail = hashTable(idx) @@ -273,7 +269,11 @@ object Scopes { if (sym.isType && ctx.phaseId <= typerPhase.id) assert(lookup(sym.name) == NoSymbol, s"duplicate ${sym.debugString}; previous was ${lookup(sym.name).debugString}") // !!! DEBUG - newScopeEntry(sym) + enter(sym.name, sym) + } + + final def enter[T <: Symbol](name: Name, sym: T)(using Context): T = { + newScopeEntry(name, sym) sym } @@ -375,7 +375,7 @@ object Scopes { } if ((e eq null) && (synthesize != null)) { val sym = synthesize(name) - if (sym.exists) newScopeEntry(sym) else e + if (sym.exists) newScopeEntry(sym.name, sym) else e } else e } @@ -411,7 +411,7 @@ object Scopes { var irefs = new mutable.ListBuffer[TermRef] var e = lastEntry while (e ne null) { - if (e.sym.isOneOf(GivenOrImplicit)) { + if (e.sym.isOneOf(GivenOrImplicitVal)) { val d = e.sym.denot irefs += TermRef(NoPrefix, d.symbol.asTerm).withDenot(d) } @@ -440,7 +440,10 @@ object Scopes { } /** Create a new scope */ - def newScope: MutableScope = new MutableScope() + def newScope(using Context): MutableScope = + new MutableScope(ctx.nestingLevel + 1) + + def newScope(nestingLevel: Int): MutableScope = new MutableScope(nestingLevel) /** Create a new scope nested in another one with which it shares its elements */ def newNestedScope(outer: Scope)(using Context): MutableScope = new MutableScope(outer) @@ -468,8 +471,4 @@ object Scopes { override def lookupEntry(name: Name)(using Context): ScopeEntry = null override def lookupNextEntry(entry: ScopeEntry)(using Context): ScopeEntry = null } - - /** A class for error scopes (mutable) - */ - class ErrorScope(owner: Symbol) extends MutableScope } diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index 076010399651..f8c70176482c 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -13,33 +13,33 @@ object StdNames { /** Base strings from which synthetic names are derived. */ object str { - final val SETTER_SUFFIX = "_=" - final val EXPAND_SEPARATOR = "$$" - final val TRAIT_SETTER_SEPARATOR = "$_setter_$" - final val SUPER_PREFIX = "super$" - final val INITIALIZER_PREFIX = "initial$" - final val AVOID_CLASH_SUFFIX = "$_avoid_name_clash_$" - final val MODULE_SUFFIX = "$" - final val TOPLEVEL_SUFFIX = "$package" - final val NAME_JOIN = "$" - final val DEFAULT_GETTER = "$default$" - final val LOCALDUMMY_PREFIX = "]""", """\$""") } @@ -210,29 +210,38 @@ object StdNames { final val IOOBException: N = "IndexOutOfBoundsException" final val FunctionXXL: N = "FunctionXXL" - final val Abs: N = "Abs" - final val And: N = "&&" - final val BitwiseAnd: N = "BitwiseAnd" - final val BitwiseOr: N = "BitwiseOr" - final val Div: N = "/" - final val Equals: N = "==" - final val Ge: N = ">=" - final val Gt: N = ">" - final val Le: N = "<=" - final val Lt: N = "<" - final val Max: N = "Max" - final val Min: N = "Min" - final val Minus: N = "-" - final val Mod: N = "%" - final val Negate: N = "Negate" - final val Not: N = "!" - final val NotEquals: N = "!=" - final val Or: N = "||" - final val Plus: N = "+" - final val S: N = "S" - final val Times: N = "*" - final val ToString: N = "ToString" - final val Xor: N = "^" + final val Abs: N = "Abs" + final val And: N = "&&" + final val BitwiseAnd: N = "BitwiseAnd" + final val BitwiseOr: N = "BitwiseOr" + final val Div: N = "/" + final val Equals: N = "==" + final val Ge: N = ">=" + final val Gt: N = ">" + final val IsConst: N = "IsConst" + final val Le: N = "<=" + final val Length: N = "Length" + final val Lt: N = "<" + final val Matches: N = "Matches" + final val Max: N = "Max" + final val Min: N = "Min" + final val Minus: N = "-" + final val Mod: N = "%" + final val Negate: N = "Negate" + final val Not: N = "!" + final val NotEquals: N = "!=" + final val NumberOfLeadingZeros: N = "NumberOfLeadingZeros" + final val Or: N = "||" + final val Plus: N = "+" + final val S: N = "S" + final val Substring: N = "Substring" + final val Times: N = "*" + final val ToInt: N = "ToInt" + final val ToLong: N = "ToLong" + final val ToFloat: N = "ToFloat" + final val ToDouble: N = "ToDouble" + final val ToString: N = "ToString" + final val Xor: N = "^" final val ClassfileAnnotation: N = "ClassfileAnnotation" final val ClassManifest: N = "ClassManifest" @@ -304,6 +313,7 @@ object StdNames { val SPECIALIZED_INSTANCE: N = "specInstance$" val THIS: N = "_$this" val TRAIT_CONSTRUCTOR: N = "$init$" + val THROWS: N = "$throws" val U2EVT: N = "u2evt$" val ALLARGS: N = "$allArgs" @@ -366,7 +376,6 @@ object StdNames { val EnumValue: N = "EnumValue" val ExistentialTypeTree: N = "ExistentialTypeTree" val Flag : N = "Flag" - val floatHash: N = "floatHash" val Ident: N = "Ident" val Import: N = "Import" val Literal: N = "Literal" @@ -413,6 +422,7 @@ object StdNames { val argv : N = "argv" val arrayClass: N = "arrayClass" val arrayElementClass: N = "arrayElementClass" + val arrayType: N = "arrayType" val arrayValue: N = "arrayValue" val array_apply : N = "array_apply" val array_clone : N = "array_clone" @@ -435,10 +445,10 @@ object StdNames { val bytes: N = "bytes" val canEqual_ : N = "canEqual" val canEqualAny : N = "canEqualAny" - val cbnArg: N = "" val checkInitialized: N = "checkInitialized" val ClassManifestFactory: N = "ClassManifestFactory" val classOf: N = "classOf" + val classType: N = "classType" val clone_ : N = "clone" val common: N = "common" val compiletime : N = "compiletime" @@ -480,6 +490,7 @@ object StdNames { val find_ : N = "find" val flagsFromBits : N = "flagsFromBits" val flatMap: N = "flatMap" + val floatHash: N = "floatHash" val foreach: N = "foreach" val format: N = "format" val fromDigits: N = "fromDigits" @@ -599,6 +610,7 @@ object StdNames { val this_ : N = "this" val thisPrefix : N = "thisPrefix" val throw_ : N = "throw" + val throws: N = "throws" val toArray: N = "toArray" val toList: N = "toList" val toObjectArray : N = "toObjectArray" @@ -625,6 +637,7 @@ object StdNames { val values: N = "values" val view_ : N = "view" val wait_ : N = "wait" + val wildcardType: N = "wildcardType" val withFilter: N = "withFilter" val withFilterIfRefutable: N = "withFilterIfRefutable$" val WorksheetWrapper: N = "WorksheetWrapper" @@ -660,6 +673,12 @@ object StdNames { final val STAR : N = "*" final val TILDE: N = "~" + // kind-projector compat symbols + final val MINUS_STAR : N = "-*" + final val PLUS_STAR : N = "+*" + final val MINUS_USCORE: N = "-_" + final val PLUS_USCORE : N = "+_" + final val isUnary: Set[Name] = Set(MINUS, PLUS, TILDE, BANG) } diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index 2d71bd350b3b..2f7e3debfa6f 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -93,10 +93,11 @@ object SymDenotations { if (myFlags.is(Trait)) NoInitsInterface & bodyFlags // no parents are initialized from a trait else NoInits & bodyFlags & parentFlags) - private def isCurrent(fs: FlagSet) = - fs <= ( - if (myInfo.isInstanceOf[SymbolLoader]) FromStartFlags - else AfterLoadFlags) + def isCurrent(fs: FlagSet)(using Context): Boolean = + def knownFlags(info: Type): FlagSet = info match + case _: SymbolLoader | _: ModuleCompleter => FromStartFlags + case _ => AfterLoadFlags + !myInfo.isInstanceOf[LazyType] || fs <= knownFlags(myInfo) final def relevantFlagsFor(fs: FlagSet)(using Context) = if (isCurrent(fs)) myFlags else flags @@ -407,7 +408,7 @@ object SymDenotations { * @param tparams The type parameters with which the right-hand side bounds should be abstracted * */ - def opaqueToBounds(info: Type, rhs: tpd.Tree, tparams: List[TypeParamInfo])(using Context): Type = + def opaqueToBounds(info: Type, rhs: tpd.Tree, tparams: List[TypeSymbol])(using Context): Type = def setAlias(tp: Type) = def recur(self: Type): Unit = self match @@ -432,8 +433,8 @@ object SymDenotations { TypeBounds.empty info match - case TypeAlias(alias) if isOpaqueAlias && owner.isClass => - setAlias(alias) + case info: AliasingBounds if isOpaqueAlias && owner.isClass => + setAlias(info.alias) HKTypeLambda.boundsFromParams(tparams, bounds(rhs)) case _ => info @@ -573,7 +574,7 @@ object SymDenotations { case _ => // Otherwise, no completion is necessary, see the preconditions of `markAbsent()`. (myInfo `eq` NoType) - || is(Invisible) && !ctx.isAfterTyper + || is(Invisible) && ctx.isTyper || is(ModuleVal, butNot = Package) && moduleClass.isAbsent(canForce) } @@ -711,6 +712,19 @@ object SymDenotations { } ) + /** Do this symbol and `cls` represent a pair of a given or implicit method and + * its associated class that were defined by a single definition? + * This can mean one of two things: + * - the method and class are defined in a structural given instance, or + * - the class is an implicit class and the method is its implicit conversion. + */ + final def isCoDefinedGiven(cls: Symbol)(using Context): Boolean = + is(Method) && isOneOf(GivenOrImplicit) + && ( is(Synthetic) // previous scheme used in 3.0 + || cls.isOneOf(GivenOrImplicit) // new scheme from 3.1 + ) + && name == cls.name.toTermName && owner == cls.owner + /** Is this a denotation of a stable term (or an arbitrary type)? * Terms are stable if they are idempotent (as in TreeInfo.Idempotent): that is, they always return the same value, * if any. @@ -916,7 +930,7 @@ object SymDenotations { def hasDefaultParams(using Context): Boolean = if ctx.erasedTypes then false else if is(HasDefaultParams) then true - else if is(NoDefaultParams) then false + else if is(NoDefaultParams) || !is(Method) then false else val result = rawParamss.nestedExists(_.is(HasDefault)) @@ -1096,6 +1110,11 @@ object SymDenotations { enclClass(symbol, false) } + /** Skips symbol that are not owned by a class */ + def skipLocalOwners(using Context): Symbol = + if symbol.owner.isClass then symbol + else symbol.owner.skipLocalOwners + /** A class that in source code would be lexically enclosing */ final def lexicallyEnclosingClass(using Context): Symbol = if (!exists || isClass) symbol else owner.lexicallyEnclosingClass @@ -1140,11 +1159,10 @@ object SymDenotations { else NoSymbol /** The closest enclosing extension method containing this definition, - * provided the extension method appears in the same class. + * including methods outside the current class. */ final def enclosingExtensionMethod(using Context): Symbol = if this.is(ExtensionMethod) then symbol - else if this.isClass then NoSymbol else if this.exists then owner.enclosingExtensionMethod else NoSymbol @@ -1180,11 +1198,13 @@ object SymDenotations { */ final def companionModule(using Context): Symbol = if (is(Module)) sourceModule + else if registeredCompanion.isAbsent() then NoSymbol else registeredCompanion.sourceModule private def companionType(using Context): Symbol = if (is(Package)) NoSymbol else if (is(ModuleVal)) moduleClass.denot.companionType + else if registeredCompanion.isAbsent() then NoSymbol else registeredCompanion /** The class with the same (type-) name as this module or module class, @@ -1457,14 +1477,6 @@ object SymDenotations { else if is(Contravariant) then Contravariant else EmptyFlags - /** The length of the owner chain of this symbol. 1 for _root_, 0 for NoSymbol */ - def nestingLevel(using Context): Int = - @tailrec def recur(d: SymDenotation, n: Int): Int = d match - case NoDenotation => n - case d: ClassDenotation => d.nestingLevel + n // profit from the cache in ClassDenotation - case _ => recur(d.owner, n + 1) - recur(this, 0) - /** The flags to be used for a type parameter owned by this symbol. * Overridden by ClassDenotation. */ @@ -1591,10 +1603,10 @@ object SymDenotations { // children that are defined in the same file as their parents. def maybeChild(sym: Symbol) = (sym.isClass && !this.is(JavaDefined) || sym.originDenotation.is(EnumVal)) - && !owner.is(Package) + && (!owner.is(Package) || sym.originDenotation.infoOrCompleter.match case _: SymbolLoaders.SecondCompleter => sym.associatedFile == this.symbol.associatedFile - case _ => false + case _ => false) if owner.isClass then for c <- owner.info.decls.toList if maybeChild(c) do @@ -1612,6 +1624,66 @@ object SymDenotations { annotations.collect { case Annotation.Child(child) => child }.reverse end children + + /** Recursively assemble all children of this symbol, Preserves order of insertion. + */ + final def sealedStrictDescendants(using Context): List[Symbol] = + + @tailrec + def findLvlN( + explore: mutable.ArrayDeque[Symbol], + seen: util.HashSet[Symbol], + acc: mutable.ListBuffer[Symbol] + ): List[Symbol] = + if explore.isEmpty then + acc.toList + else + val sym = explore.head + val explore1 = explore.dropInPlace(1) + val lvlN = sym.children + val notSeen = lvlN.filterConserve(!seen.contains(_)) + if notSeen.isEmpty then + findLvlN(explore1, seen, acc) + else + findLvlN(explore1 ++= notSeen, {seen ++= notSeen; seen}, acc ++= notSeen) + end findLvlN + + /** Scans through `explore` to see if there are recursive children. + * If a symbol in `explore` has children that are not contained in + * `lvl1`, fallback to `findLvlN`, or else return `lvl1`. + */ + @tailrec + def findLvl2( + lvl1: List[Symbol], explore: List[Symbol], seenOrNull: util.HashSet[Symbol] | Null + ): List[Symbol] = explore match + case sym :: explore1 => + val lvl2 = sym.children + if lvl2.isEmpty then // no children, scan rest of explore1 + findLvl2(lvl1, explore1, seenOrNull) + else // check if we have seen the children before + val seen = // initialise the seen set if not already + if seenOrNull != null then seenOrNull + else util.HashSet.from(lvl1) + val notSeen = lvl2.filterConserve(!seen.contains(_)) + if notSeen.isEmpty then // we found children, but we had already seen them, scan the rest of explore1 + findLvl2(lvl1, explore1, seen) + else // found unseen recursive children, we should fallback to the loop + findLvlN( + explore = mutable.ArrayDeque.from(explore1).appendAll(notSeen), + seen = {seen ++= notSeen; seen}, + acc = mutable.ListBuffer.from(lvl1).appendAll(notSeen) + ) + case nil => + lvl1 + end findLvl2 + + val lvl1 = children + findLvl2(lvl1, lvl1, seenOrNull = null) + end sealedStrictDescendants + + /** Same as `sealedStrictDescendants` but prepends this symbol as well. + */ + final def sealedDescendants(using Context): List[Symbol] = this.symbol :: sealedStrictDescendants } /** The contents of a class definition during a period @@ -1804,6 +1876,11 @@ object SymDenotations { def baseClasses(implicit onBehalf: BaseData, ctx: Context): List[ClassSymbol] = baseData._1 + /** Like `baseClasses.length` but more efficient. */ + def baseClassesLength(using BaseData, Context): Int = + // `+ 1` because the baseClassSet does not include the current class unlike baseClasses + baseClassSet.classIds.length + 1 + /** A bitset that contains the superId's of all base classes */ private def baseClassSet(implicit onBehalf: BaseData, ctx: Context): BaseClassSet = baseData._2 @@ -1982,7 +2059,10 @@ object SymDenotations { override final def findMember(name: Name, pre: Type, required: FlagSet, excluded: FlagSet)(using Context): Denotation = val raw = if excluded.is(Private) then nonPrivateMembersNamed(name) else membersNamed(name) - raw.filterWithFlags(required, excluded).asSeenFrom(pre).toDenot(pre) + val pre1 = pre match + case pre: OrType => pre.widenUnion + case _ => pre + raw.filterWithFlags(required, excluded).asSeenFrom(pre1).toDenot(pre1) final def findMemberNoShadowingBasedOnFlags(name: Name, pre: Type, required: FlagSet = EmptyFlags, excluded: FlagSet = EmptyFlags)(using Context): Denotation = @@ -2162,7 +2242,7 @@ object SymDenotations { if (keepOnly eq implicitFilter) if (this.is(Package)) Iterator.empty // implicits in package objects are added by the overriding `memberNames` in `PackageClassDenotation` - else info.decls.iterator.filter(_.isOneOf(GivenOrImplicit)) + else info.decls.iterator.filter(_.isOneOf(GivenOrImplicitVal)) else info.decls.iterator for (sym <- ownSyms) maybeAdd(sym.name) names @@ -2235,12 +2315,6 @@ object SymDenotations { override def registeredCompanion_=(c: Symbol) = myCompanion = c - - private var myNestingLevel = -1 - - override def nestingLevel(using Context) = - if myNestingLevel == -1 then myNestingLevel = owner.nestingLevel + 1 - myNestingLevel } /** The denotation of a package class. @@ -2479,7 +2553,7 @@ object SymDenotations { } private[SymDenotations] def stillValidInOwner(denot: SymDenotation)(using Context): Boolean = try - val owner = denot.owner.denot + val owner = denot.maybeOwner.denot stillValid(owner) && ( !owner.isClass diff --git a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala index 26e6e659779e..83198ab65857 100644 --- a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala +++ b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala @@ -221,7 +221,7 @@ object SymbolLoaders { /** The scope of a package. This is different from a normal scope * in that names of scope entries are kept in mangled form. */ - final class PackageScope extends MutableScope { + final class PackageScope extends MutableScope(0) { override def newScopeEntry(name: Name, sym: Symbol)(using Context): ScopeEntry = super.newScopeEntry(name.mangled, sym) diff --git a/compiler/src/dotty/tools/dotc/core/Symbols.scala b/compiler/src/dotty/tools/dotc/core/Symbols.scala index d8e107852947..7f0969d55f07 100644 --- a/compiler/src/dotty/tools/dotc/core/Symbols.scala +++ b/compiler/src/dotty/tools/dotc/core/Symbols.scala @@ -46,7 +46,7 @@ object Symbols { * @param coord The coordinates of the symbol (a position or an index) * @param id A unique identifier of the symbol (unique per ContextBase) */ - class Symbol private[Symbols] (private var myCoord: Coord, val id: Int) + class Symbol private[Symbols] (private var myCoord: Coord, val id: Int, val nestingLevel: Int) extends Designator, ParamInfo, SrcPos, printing.Showable { type ThisName <: Name @@ -206,7 +206,7 @@ object Symbols { } /** Enter this symbol in its class owner after given `phase`. Create a fresh - * denotation for its owner class if the class has not yet already one + * denotation for its owner class if the class does not already have one * that starts being valid after `phase`. * @pre Symbol is a class member */ @@ -232,7 +232,7 @@ object Symbols { } /** Remove symbol from scope of owning class after given `phase`. Create a fresh - * denotation for its owner class if the class has not yet already one that starts being valid after `phase`. + * denotation for its owner class if the class does not already have one that starts being valid after `phase`. * @pre Symbol is a class member */ def dropAfter(phase: DenotTransformer)(using Context): Unit = @@ -368,8 +368,8 @@ object Symbols { type TermSymbol = Symbol { type ThisName = TermName } type TypeSymbol = Symbol { type ThisName = TypeName } - class ClassSymbol private[Symbols] (coord: Coord, val assocFile: AbstractFile, id: Int) - extends Symbol(coord, id) { + class ClassSymbol private[Symbols] (coord: Coord, val assocFile: AbstractFile, id: Int, nestingLevel: Int) + extends Symbol(coord, id, nestingLevel) { type ThisName = TypeName @@ -459,7 +459,7 @@ object Symbols { override protected def prefixString: String = "ClassSymbol" } - @sharable object NoSymbol extends Symbol(NoCoord, 0) { + @sharable object NoSymbol extends Symbol(NoCoord, 0, 0) { override def associatedFile(using Context): AbstractFile = NoSource.file override def recomputeDenot(lastd: SymDenotation)(using Context): SymDenotation = NoDenotation } @@ -506,21 +506,6 @@ object Symbols { def MutableSymbolMap[T](): EqHashMap[Symbol, T] = EqHashMap[Symbol, T]() def MutableSymbolMap[T](initialCapacity: Int): EqHashMap[Symbol, T] = EqHashMap[Symbol, T](initialCapacity) -// ---- Factory methods for symbol creation ---------------------- -// -// All symbol creations should be done via the next two methods. - - /** Create a symbol without a denotation. - * Note this uses a cast instead of a direct type refinement because - * it's debug-friendlier not to create an anonymous class here. - */ - def newNakedSymbol[N <: Name](coord: Coord = NoCoord)(using Context): Symbol { type ThisName = N } = - new Symbol(coord, ctx.base.nextSymId).asInstanceOf[Symbol { type ThisName = N }] - - /** Create a class symbol without a denotation. */ - def newNakedClassSymbol(coord: Coord = NoCoord, assocFile: AbstractFile = null)(using Context): ClassSymbol = - new ClassSymbol(coord, assocFile, ctx.base.nextSymId) - // ---- Symbol creation methods ---------------------------------- /** Create a symbol from its fields (info may be lazy) */ @@ -531,20 +516,12 @@ object Symbols { info: Type, privateWithin: Symbol = NoSymbol, coord: Coord = NoCoord)(using Context): Symbol { type ThisName = N } = { - val sym = newNakedSymbol[N](coord) + val sym = new Symbol(coord, ctx.base.nextSymId, ctx.nestingLevel).asInstanceOf[Symbol { type ThisName = N }] val denot = SymDenotation(sym, owner, name, flags, info, privateWithin) sym.denot = denot sym } - /** Create a class symbol from a function producing its denotation */ - def newClassSymbolDenoting(denotFn: ClassSymbol => SymDenotation, - coord: Coord = NoCoord, assocFile: AbstractFile = null)(using Context): ClassSymbol = { - val cls = newNakedClassSymbol(coord, assocFile) - cls.denot = denotFn(cls) - cls - } - /** Create a class symbol from its non-info fields and a function * producing its info (the produced info may be lazy). */ @@ -557,7 +534,7 @@ object Symbols { coord: Coord = NoCoord, assocFile: AbstractFile = null)(using Context): ClassSymbol = { - val cls = newNakedClassSymbol(coord, assocFile) + val cls = new ClassSymbol(coord, assocFile, ctx.base.nextSymId, ctx.nestingLevel) val denot = SymDenotation(cls, owner, name, flags, infoFn(cls), privateWithin) cls.denot = denot cls @@ -569,7 +546,7 @@ object Symbols { name: TypeName, flags: FlagSet, parents: List[TypeRef], - decls: Scope = newScope, + decls: Scope, selfInfo: Type = NoType, privateWithin: Symbol = NoSymbol, coord: Coord = NoCoord, @@ -587,7 +564,7 @@ object Symbols { name: TypeName, flags: FlagSet, parentTypes: List[Type], - decls: Scope = newScope, + decls: Scope, selfInfo: Type = NoType, privateWithin: Symbol = NoSymbol, coord: Coord = NoCoord, @@ -603,7 +580,7 @@ object Symbols { } def newRefinedClassSymbol(coord: Coord = NoCoord)(using Context): ClassSymbol = - newCompleteClassSymbol(ctx.owner, tpnme.REFINE_CLASS, NonMember, parents = Nil, coord = coord) + newCompleteClassSymbol(ctx.owner, tpnme.REFINE_CLASS, NonMember, parents = Nil, newScope, coord = coord) /** Create a module symbol with associated module class * from its non-info fields and a function producing the info @@ -620,19 +597,15 @@ object Symbols { assocFile: AbstractFile = null)(using Context): TermSymbol = { val base = owner.thisType - val module = newNakedSymbol[TermName](coord) - val modcls = newNakedClassSymbol(coord, assocFile) val modclsFlags = clsFlags | ModuleClassCreationFlags val modclsName = name.toTypeName.adjustIfModuleClass(modclsFlags) - val cdenot = SymDenotation( - modcls, owner, modclsName, modclsFlags, - infoFn(module, modcls), privateWithin) - val mdenot = SymDenotation( - module, owner, name, modFlags | ModuleValCreationFlags, - if (cdenot.isCompleted) TypeRef(owner.thisType, modcls) - else new ModuleCompleter(modcls)) - module.denot = mdenot - modcls.denot = cdenot + val module = newSymbol( + owner, name, modFlags | ModuleValCreationFlags, NoCompleter, privateWithin, coord) + val modcls = newClassSymbol( + owner, modclsName, modclsFlags, infoFn(module, _), privateWithin, coord, assocFile) + module.info = + if (modcls.isCompleted) TypeRef(owner.thisType, modcls) + else new ModuleCompleter(modcls) module } @@ -673,7 +646,7 @@ object Symbols { name: TermName, modFlags: FlagSet = EmptyFlags, clsFlags: FlagSet = EmptyFlags, - decls: Scope = newScope)(using Context): TermSymbol = + decls: Scope = newScope(0))(using Context): TermSymbol = newCompleteModuleSymbol( owner, name, modFlags | PackageCreationFlags, clsFlags | PackageCreationFlags, @@ -739,6 +712,10 @@ object Symbols { coord: Coord = NoCoord)(using Context): TermSymbol = newSymbol(cls, nme.CONSTRUCTOR, flags | Method, MethodType(paramNames, paramTypes, cls.typeRef), privateWithin, coord) + /** Create an anonymous function symbol */ + def newAnonFun(owner: Symbol, info: Type, coord: Coord = NoCoord)(using Context): TermSymbol = + newSymbol(owner, nme.ANON_FUN, Synthetic | Method, info, coord = coord) + /** Create an empty default constructor symbol for given class `cls`. */ def newDefaultConstructor(cls: ClassSymbol)(using Context): TermSymbol = newConstructor(cls, EmptyFlags, Nil, Nil) @@ -802,12 +779,13 @@ object Symbols { originals else { val copies: List[Symbol] = for (original <- originals) yield - original match { - case original: ClassSymbol => - newNakedClassSymbol(original.coord, original.assocFile) - case _ => - newNakedSymbol[original.ThisName](original.coord) - } + val odenot = original.denot + original.copy( + owner = ttmap.mapOwner(odenot.owner), + flags = odenot.flags &~ Touched, + info = NoCompleter, + privateWithin = ttmap.mapOwner(odenot.privateWithin), + coord = original.coord) val ttmap1 = ttmap.withSubstitution(originals, copies) originals.lazyZip(copies) foreach { (original, copy) => val odenot = original.denot @@ -818,9 +796,10 @@ object Symbols { val oinfo = original.info match case ClassInfo(pre, _, parents, decls, selfInfo) => assert(original.isClass) + val parents1 = parents.mapConserve(ttmap.mapType) val otypeParams = original.typeParams if otypeParams.isEmpty then - ClassInfo(pre, copy.asClass, parents, decls.cloneScope, selfInfo) + ClassInfo(pre, copy.asClass, parents1, decls.cloneScope, selfInfo) else // copy type params, enter other definitions unchanged // type parameters need to be copied early, since other type @@ -829,11 +808,11 @@ object Symbols { val newTypeParams = mapSymbols(original.typeParams, ttmap1, mapAlways = true) newTypeParams.foreach(decls1.enter) for sym <- decls do if !sym.is(TypeParam) then decls1.enter(sym) - val parents1 = parents.map(_.substSym(otypeParams, newTypeParams)) + val parents2 = parents1.map(_.substSym(otypeParams, newTypeParams)) val selfInfo1 = selfInfo match case selfInfo: Type => selfInfo.substSym(otypeParams, newTypeParams) case _ => selfInfo - ClassInfo(pre, copy.asClass, parents1, decls1, selfInfo1) + ClassInfo(pre, copy.asClass, parents2, decls1, selfInfo1) case oinfo => oinfo denot.info = oinfo // needed as otherwise we won't be able to go from Sym -> parents & etc @@ -844,13 +823,7 @@ object Symbols { end completer - copy.denot = odenot.copySymDenotation( - symbol = copy, - owner = ttmap1.mapOwner(odenot.owner), - initFlags = odenot.flags &~ Touched, - info = completer, - privateWithin = ttmap1.mapOwner(odenot.privateWithin), // since this refers to outer symbols, need not include copies (from->to) in ownermap here. - annotations = odenot.annotations) + copy.info = completer copy.denot match case cd: ClassDenotation => cd.registeredCompanion = cd.unforcedRegisteredCompanion.subst(originals, copies) diff --git a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala index 4e360224a18f..95ad0b95b335 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala @@ -53,7 +53,7 @@ object TypeApplications { || { val paramRefs = tparams.map(_.paramRef) tp.typeParams.corresponds(tparams) { (param1, param2) => - param2.paramInfo <:< param1.paramInfo.substParams(tp, paramRefs) + param2.paramInfo frozen_<:< param1.paramInfo.substParams(tp, paramRefs) } } @@ -231,6 +231,16 @@ class TypeApplications(val self: Type) extends AnyVal { (alias ne self) && alias.hasSimpleKind } + /** The top type with the same kind as `self`. */ + def topType(using Context): Type = + if self.hasSimpleKind then + defn.AnyType + else EtaExpand(self.typeParams) match + case tp: HKTypeLambda => + tp.derivedLambdaType(resType = tp.resultType.topType) + case _ => + defn.AnyKindType + /** If self type is higher-kinded, its result type, otherwise NoType. * Note: The hkResult of an any-kinded type is again AnyKind. */ @@ -466,7 +476,8 @@ class TypeApplications(val self: Type) extends AnyVal { def translateJavaArrayElementType(using Context): Type = // A type parameter upper-bounded solely by `FromJavaObject` has `ObjectClass` as its classSymbol if self.typeSymbol.isAbstractOrParamType && (self.classSymbol eq defn.ObjectClass) then - AndType(self, defn.ObjectType) + // The order is important here since Java intersections erase to their first operand + AndType(defn.ObjectType, self) else self diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index 62b06aea39a7..8b4eab685f2a 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -4,7 +4,7 @@ package core import Types._, Contexts._, Symbols._, Flags._, Names._, NameOps._, Denotations._ import Decorators._ -import Phases.gettersPhase +import Phases.{gettersPhase, elimByNamePhase} import StdNames.nme import TypeOps.refineUsingParent import collection.mutable @@ -63,8 +63,6 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling private var myInstance: TypeComparer = this def currentInstance: TypeComparer = myInstance - private var useNecessaryEither = false - /** Is a subtype check in progress? In that case we may not * permanently instantiate type variables, because the corresponding * constraint might still be retracted and the instantiation should @@ -134,10 +132,10 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling } def necessarySubType(tp1: Type, tp2: Type): Boolean = - val saved = useNecessaryEither - useNecessaryEither = true + val saved = myNecessaryConstraintsOnly + myNecessaryConstraintsOnly = true try topLevelSubType(tp1, tp2) - finally useNecessaryEither = saved + finally myNecessaryConstraintsOnly = saved def testSubType(tp1: Type, tp2: Type): CompareResult = GADTused = false @@ -162,13 +160,24 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling private inline def inFrozenGadtIf[T](cond: Boolean)(inline op: T): T = { val savedFrozenGadt = frozenGadt - frozenGadt = cond + frozenGadt ||= cond try op finally frozenGadt = savedFrozenGadt } private inline def inFrozenGadtAndConstraint[T](inline op: T): T = inFrozenGadtIf(true)(inFrozenConstraint(op)) + extension (sym: Symbol) + private inline def onGadtBounds(inline op: TypeBounds => Boolean): Boolean = + val bounds = gadtBounds(sym) + bounds != null && op(bounds) + + private inline def comparingTypeLambdas(tl1: TypeLambda, tl2: TypeLambda)(op: => Boolean): Boolean = + val saved = comparedTypeLambdas + comparedTypeLambdas += tl1 + comparedTypeLambdas += tl2 + try op finally comparedTypeLambdas = saved + protected def isSubType(tp1: Type, tp2: Type, a: ApproxState): Boolean = { val savedApprox = approx val savedLeftRoot = leftRoot @@ -201,7 +210,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * types (as when we go from an abstract type to one of its bounds). In that case * one should use `isSubType(_, _, a)` where `a` defines the kind of approximation. * - * Note: Logicaly, `recur` could be nested in `isSubType`, which would avoid + * Note: Logically, `recur` could be nested in `isSubType`, which would avoid * the instance state consisting `approx` and `leftRoot`. But then the implemented * code would have two extra parameters for each of the many calls that go from * one sub-part of isSubType to another. @@ -465,19 +474,15 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case AndType(tp21, tp22) => constrainRHSVars(tp21) && constrainRHSVars(tp22) case _ => true - // An & on the left side loses information. We compensate by also trying the join. - // This is less ad-hoc than it looks since we produce joins in type inference, - // and then need to check that they are indeed supertypes of the original types - // under -Ycheck. Test case is i7965.scala. - def containsAnd(tp: Type): Boolean = tp.dealiasKeepRefiningAnnots match - case tp: AndType => true - case OrType(tp1, tp2) => containsAnd(tp1) || containsAnd(tp2) - case _ => false - widenOK || joinOK || (tp1.isSoft || constrainRHSVars(tp2)) && recur(tp11, tp2) && recur(tp12, tp2) || containsAnd(tp1) && inFrozenGadt(recur(tp1.join, tp2)) + // An & on the left side loses information. We compensate by also trying the join. + // This is less ad-hoc than it looks since we produce joins in type inference, + // and then need to check that they are indeed supertypes of the original types + // under -Ycheck. Test case is i7965.scala. + case tp1: MatchType => val reduced = tp1.reduced if (reduced.exists) recur(reduced, tp2) else thirdTry @@ -489,11 +494,10 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling def thirdTryNamed(tp2: NamedType): Boolean = tp2.info match { case info2: TypeBounds => - def compareGADT: Boolean = { - val gbounds2 = gadtBounds(tp2.symbol) - (gbounds2 != null) && - (isSubTypeWhenFrozen(tp1, gbounds2.lo) || - (tp1 match { + def compareGADT: Boolean = + tp2.symbol.onGadtBounds(gbounds2 => + isSubTypeWhenFrozen(tp1, gbounds2.lo) + || tp1.match case tp1: NamedType if ctx.gadt.contains(tp1.symbol) => // Note: since we approximate constrained types only with their non-param bounds, // we need to manually handle the case when we're comparing two constrained types, @@ -502,10 +506,9 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling // comparing two constrained types, and that case will be handled here first. ctx.gadt.isLess(tp1.symbol, tp2.symbol) && GADTusage(tp1.symbol) && GADTusage(tp2.symbol) case _ => false - }) || - narrowGADTBounds(tp2, tp1, approx, isUpper = false)) && - { isBottom(tp1) || GADTusage(tp2.symbol) } - } + || narrowGADTBounds(tp2, tp1, approx, isUpper = false)) + && (isBottom(tp1) || GADTusage(tp2.symbol)) + isSubApproxHi(tp1, info2.lo) || compareGADT || tryLiftedToThis2 || fourthTry case _ => @@ -559,31 +562,35 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case tp2: TypeParamRef => compareTypeParamRef(tp2) case tp2: RefinedType => - def compareRefinedSlow: Boolean = { + def compareRefinedSlow: Boolean = val name2 = tp2.refinedName - recur(tp1, tp2.parent) && - (name2 == nme.WILDCARD || hasMatchingMember(name2, tp1, tp2)) - } - def compareRefined: Boolean = { + recur(tp1, tp2.parent) + && (name2 == nme.WILDCARD || hasMatchingMember(name2, tp1, tp2)) + + def compareRefined: Boolean = val tp1w = tp1.widen val skipped2 = skipMatching(tp1w, tp2) - if ((skipped2 eq tp2) || !Config.fastPathForRefinedSubtype) - tp1 match { - case tp1: AndType => - // Delay calling `compareRefinedSlow` because looking up a member - // of an `AndType` can lead to a cascade of subtyping checks - // This twist is needed to make collection/generic/ParFactory.scala compile - fourthTry || compareRefinedSlow - case tp1: HKTypeLambda => - // HKTypeLambdas do not have members. - fourthTry - case _ => - compareRefinedSlow || fourthTry - } + if (skipped2 eq tp2) || !Config.fastPathForRefinedSubtype then + if containsAnd(tp1) then + tp2.parent match + case _: RefinedType | _: AndType => + // maximally decompose RHS to limit the bad effects of the `either` that is necessary + // since LHS contains an AndType + recur(tp1, decomposeRefinements(tp2, Nil)) + case _ => + // Delay calling `compareRefinedSlow` because looking up a member + // of an `AndType` can lead to a cascade of subtyping checks + // This twist is needed to make collection/generic/ParFactory.scala compile + fourthTry || compareRefinedSlow + else if tp1.isInstanceOf[HKTypeLambda] then + // HKTypeLambdas do not have members. + fourthTry + else + compareRefinedSlow || fourthTry else // fast path, in particular for refinements resulting from parameterization. isSubRefinements(tp1w.asInstanceOf[RefinedType], tp2, skipped2) && recur(tp1, skipped2) - } + compareRefined case tp2: RecType => def compareRec = tp1.safeDealias match { @@ -616,12 +623,10 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling migrateTo3 || tp1.typeParams.corresponds(tp2.typeParams)((tparam1, tparam2) => isSubType(tparam2.paramInfo.subst(tp2, tp1), tparam1.paramInfo)) - val saved = comparedTypeLambdas - comparedTypeLambdas += tp1 - comparedTypeLambdas += tp2 - val variancesOK = variancesConform(tp1.typeParams, tp2.typeParams) - try variancesOK && boundsOK && isSubType(tp1.resType, tp2.resType.subst(tp2, tp1)) - finally comparedTypeLambdas = saved + comparingTypeLambdas(tp1, tp2) { + val variancesOK = variancesConform(tp1.typeParams, tp2.typeParams) + variancesOK && boundsOK && isSubType(tp1.resType, tp2.resType.subst(tp2, tp1)) + } case _ => val tparams1 = tp1.typeParams if (tparams1.nonEmpty) @@ -691,9 +696,11 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case tp2: PolyType => def comparePoly = tp1 match { case tp1: PolyType => - (tp1.signature consistentParams tp2.signature) && - matchingPolyParams(tp1, tp2) && - isSubType(tp1.resultType, tp2.resultType.subst(tp2, tp1)) + comparingTypeLambdas(tp1, tp2) { + (tp1.signature consistentParams tp2.signature) + && matchingPolyParams(tp1, tp2) + && isSubType(tp1.resultType, tp2.resultType.subst(tp2, tp1)) + } case _ => false } comparePoly @@ -751,13 +758,12 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case tp1: TypeRef => tp1.info match { case TypeBounds(_, hi1) => - def compareGADT = { - val gbounds1 = gadtBounds(tp1.symbol) - (gbounds1 != null) && - (isSubTypeWhenFrozen(gbounds1.hi, tp2) || - narrowGADTBounds(tp1, tp2, approx, isUpper = true)) && - { tp2.isAny || GADTusage(tp1.symbol) } - } + def compareGADT = + tp1.symbol.onGadtBounds(gbounds1 => + isSubTypeWhenFrozen(gbounds1.hi, tp2) + || narrowGADTBounds(tp1, tp2, approx, isUpper = true)) + && (tp2.isAny || GADTusage(tp1.symbol)) + isSubType(hi1, tp2, approx.addLow) || compareGADT || tryLiftedToThis1 case _ => def isNullable(tp: Type): Boolean = tp.widenDealias match { @@ -840,7 +846,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case _ => tp2.isAnyRef } compareJavaArray - case tp1: ExprType if ctx.phase.id > gettersPhase.id => + case tp1: ExprType if ctx.phaseId > gettersPhase.id => // getters might have converted T to => T, need to compensate. recur(tp1.widenExpr, tp2) case _ => @@ -848,11 +854,15 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling } /** When called from `pre1.A <:< pre2.A` does `pre1` relate to `pre2` so that - * the subtype test is true? This is the case if `pre1 <:< pre2`, or - * `pre1` and `pre2` are both this-types of related classes. Here, two classes - * are related if each of them has a self type that derives from the other. + * the subtype test is true? This is the case if + * + * 1. `pre1 <:< pre2`, or + * 2. One of `pre1` and `pre2` refers to a package and the other to a + * package object in that package, or + * 3. `pre1` and `pre2` are both this-types of related classes. * - * This criterion is a bit dubious. I.e. in the test + * Here, two classes are related if each of them has a self type that derives from the other. + * The third criterion is a bit dubious. I.e. in the test * * A.this.T <:< B.this.T * @@ -872,18 +882,32 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling |* does not conform to dotty.tools.dotc.util.Property.Key[Typer.this.Deriver & Namer.this.Deriver] */ def isSubPrefix(pre1: Type, pre2: Type): Boolean = + def samePkg(sym1: Symbol, sym2: Symbol) = + sym2.is(Package) && sym1.isPackageObject && sym1.owner == sym2.moduleClass + || sym1.is(Package) && sym2.isPackageObject && sym2.owner == sym1.moduleClass pre1 match case pre1: ThisType => pre2 match case pre2: ThisType => + if samePkg(pre1.cls, pre2.cls) then return true if pre1.cls.classInfo.selfType.derivesFrom(pre2.cls) && pre2.cls.classInfo.selfType.derivesFrom(pre1.cls) then subtyping.println(i"assume equal prefixes $pre1 $pre2") return true + case pre2: TermRef => + if samePkg(pre1.cls, pre2.symbol) then return true + case _ => + case pre1: TermRef => + pre2 match + case pre2: TermRef => + if samePkg(pre1.symbol, pre2.symbol) then return true + case pre2: ThisType => + if samePkg(pre1.symbol, pre2.cls) then return true case _ => case _ => isSubType(pre1, pre2) + end isSubPrefix /** Compare `tycon[args]` with `other := otherTycon[otherArgs]`, via `>:>` if fromBelow is true, `<:<` otherwise * (we call this relationship `~:~` in the rest of this comment). @@ -973,7 +997,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling /** True if `tp1` and `tp2` have compatible type constructors and their * corresponding arguments are subtypes relative to their variance (see `isSubArgs`). */ - def isMatchingApply(tp1: Type): Boolean = tp1 match { + def isMatchingApply(tp1: Type): Boolean = tp1.widen match { case tp1 @ AppliedType(tycon1, args1) => // We intentionally do not automatically dealias `tycon1` or `tycon2` here. // `TypeApplications#appliedTo` already takes care of dealiasing type @@ -1033,17 +1057,12 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling var touchedGADTs = false var gadtIsInstantiated = false - def byGadtBounds(sym: Symbol, tp: Type, fromAbove: Boolean): Boolean = { - touchedGADTs = true - val b = gadtBounds(sym) - def boundsDescr = if b == null then "null" else b.show - b != null && inFrozenGadt { - if fromAbove then isSubType(b.hi, tp) else isSubType(tp, b.lo) - } && { - gadtIsInstantiated = b.isInstanceOf[TypeAlias] - true - } - } + + extension (sym: Symbol) + inline def byGadtBounds(inline op: TypeBounds => Boolean): Boolean = + touchedGADTs = true + sym.onGadtBounds( + b => op(b) && { gadtIsInstantiated = b.isInstanceOf[TypeAlias]; true }) def byGadtOrdering: Boolean = ctx.gadt.contains(tycon1sym) @@ -1052,8 +1071,8 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling val res = ( tycon1sym == tycon2sym && isSubPrefix(tycon1.prefix, tycon2.prefix) - || byGadtBounds(tycon1sym, tycon2, fromAbove = true) - || byGadtBounds(tycon2sym, tycon1, fromAbove = false) + || tycon1sym.byGadtBounds(b => isSubTypeWhenFrozen(b.hi, tycon2)) + || tycon2sym.byGadtBounds(b => isSubTypeWhenFrozen(tycon1, b.lo)) || byGadtOrdering ) && { // There are two cases in which we can assume injectivity. @@ -1142,6 +1161,17 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling else fallback(tycon2bounds.lo) + def byGadtBounds: Boolean = + { + tycon2 match + case tycon2: TypeRef => + val tycon2sym = tycon2.symbol + tycon2sym.onGadtBounds { bounds2 => + inFrozenGadt { compareLower(bounds2, tyconIsTypeRef = false) } + } + case _ => false + } && { GADTused = true; true } + tycon2 match { case param2: TypeParamRef => isMatchingApply(tp1) || @@ -1149,13 +1179,14 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling compareLower(bounds(param2), tyconIsTypeRef = false) case tycon2: TypeRef => isMatchingApply(tp1) || + byGadtBounds || defn.isCompiletimeAppliedType(tycon2.symbol) && compareCompiletimeAppliedType(tp2, tp1, fromBelow = true) || { tycon2.info match { case info2: TypeBounds => compareLower(info2, tyconIsTypeRef = true) case info2: ClassInfo => tycon2.name.startsWith("Tuple") && - defn.isTupleType(tp2) && recur(tp1, tp2.toNestedPairs) || + defn.isTupleNType(tp2) && recur(tp1, tp2.toNestedPairs) || tryBaseType(info2.cls) case _ => fourthTry @@ -1188,11 +1219,18 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling isSubType(bounds(param1).hi.applyIfParameterized(args1), tp2, approx.addLow) case tycon1: TypeRef => val sym = tycon1.symbol + + def byGadtBounds: Boolean = + sym.onGadtBounds { bounds1 => + inFrozenGadt { isSubType(bounds1.hi.applyIfParameterized(args1), tp2, approx.addLow) } + } && { GADTused = true; true } + + !sym.isClass && { defn.isCompiletimeAppliedType(sym) && compareCompiletimeAppliedType(tp1, tp2, fromBelow = false) || recur(tp1.superType, tp2) || tryLiftedToThis1 - } + }|| byGadtBounds case tycon1: TypeProxy => recur(tp1.superType, tp2) case _ => @@ -1260,14 +1298,17 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling else if tp1 eq tp2 then true else val saved = constraint + val savedGadt = ctx.gadt.fresh + inline def restore() = + state.constraint = saved + ctx.gadt.restore(savedGadt) val savedSuccessCount = successCount try recCount += 1 if recCount >= Config.LogPendingSubTypesThreshold then monitored = true val result = if monitored then monitoredIsSubType else firstTry recCount -= 1 - if !result then - state.constraint = saved + if !result then restore() else if recCount == 0 && needsGc then state.gc() needsGc = false @@ -1276,7 +1317,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling catch case NonFatal(ex) => if ex.isInstanceOf[AssertionError] then showGoal(tp1, tp2) recCount -= 1 - state.constraint = saved + restore() successCount = savedSuccessCount throw ex } @@ -1363,7 +1404,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling */ def canCompare(ts: Set[Type]) = ctx.phase.isTyper - || !ts.exists(_.existsPart(_.isInstanceOf[SkolemType], stopAtStatic = true)) + || !ts.exists(_.existsPart(_.isInstanceOf[SkolemType], StopAt.Static)) def verified(result: Boolean): Boolean = if Config.checkAtomsComparisons then @@ -1407,7 +1448,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling def recurArgs(args1: List[Type], args2: List[Type], tparams2: List[ParamInfo]): Boolean = if (args1.isEmpty) args2.isEmpty - else args2.nonEmpty && { + else args2.nonEmpty && tparams2.nonEmpty && { val tparam = tparams2.head val v = tparam.paramVarianceSign @@ -1438,15 +1479,16 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling */ def compareCaptured(arg1: TypeBounds, arg2: Type) = tparam match { case tparam: Symbol => - if (leftRoot.isStable || (ctx.isAfterTyper || ctx.mode.is(Mode.TypevarsMissContext)) - && leftRoot.member(tparam.name).exists) { + if (leftRoot.isStable || ctx.isAfterTyper || ctx.mode.is(Mode.TypevarsMissContext)) + && leftRoot.isValueType + && leftRoot.member(tparam.name).exists + then val captured = TypeRef(leftRoot, tparam) try isSubArg(captured, arg2) catch case ex: TypeError => // The captured reference could be illegal and cause a // TypeError to be thrown in argDenot false - } else if (v > 0) isSubType(paramBounds(tparam).hi, arg2) else if (v < 0) @@ -1457,7 +1499,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling false } - def isSubArg(arg1: Type, arg2: Type): Boolean = arg2 match { + def isSubArg(arg1: Type, arg2: Type): Boolean = arg2 match case arg2: TypeBounds => val arg1norm = arg1 match { case arg1: TypeBounds => @@ -1468,15 +1510,23 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case _ => arg1 } arg2.contains(arg1norm) + case ExprType(arg2res) + if ctx.phaseId > elimByNamePhase.id && !ctx.erasedTypes + && defn.isByNameFunction(arg1.dealias) => + // ElimByName maps `=> T` to `()? => T`, but only in method parameters. It leaves + // embedded `=> T` arguments alone. This clause needs to compensate for that. + isSubArg(arg1.dealias.argInfos.head, arg2res) case _ => - arg1 match { + arg1 match case arg1: TypeBounds => compareCaptured(arg1, arg2) + case ExprType(arg1res) + if ctx.phaseId > elimByNamePhase.id && !ctx.erasedTypes + && defn.isByNameFunction(arg2.dealias) => + isSubArg(arg1res, arg2.argInfos.head) case _ => (v > 0 || isSubType(arg2, arg1)) && (v < 0 || isSubType(arg1, arg2)) - } - } isSubArg(args1.head, args2.head) } && recurArgs(args1.tail, args2.tail, tparams2.tail) @@ -1526,23 +1576,16 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling /** Returns true iff the result of evaluating either `op1` or `op2` is true and approximates resulting constraints. * - * If we're inferring GADT bounds or constraining a method based on its - * expected type, we infer only the _necessary_ constraints, this means we - * keep the smaller constraint if any, or no constraint at all. This is - * necessary for GADT bounds inference to be sound. When constraining a - * method, this avoid committing of constraints that would later prevent us - * from typechecking method arguments, see or-inf.scala and and-inf.scala for - * examples. + * If `necessaryConstraintsOnly` is true, we keep the smaller constraint if + * any, or no constraint at all. * * Otherwise, we infer _sufficient_ constraints: we try to keep the smaller of * the two constraints, but if never is smaller than the other, we just pick * the first one. - * - * @see [[necessaryEither]] for the GADT / result type case - * @see [[sufficientEither]] for the normal case */ protected def either(op1: => Boolean, op2: => Boolean): Boolean = - if ctx.mode.is(Mode.GadtConstraintInference) || useNecessaryEither then + Stats.record("TypeComparer.either") + if necessaryConstraintsOnly then necessaryEither(op1, op2) else sufficientEither(op1, op2) @@ -1618,7 +1661,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * T1 & T2 <:< T3 * T1 <:< T2 | T3 * - * Unlike [[sufficientEither]], this method is used in GADTConstraintInference mode, when we are attempting + * But this method is used when `useNecessaryEither` is true, like when we are attempting * to infer GADT constraints that necessarily follow from the subtyping relationship. For instance, if we have * * enum Expr[T] { @@ -1690,6 +1733,26 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling else op2 end necessaryEither + /** Decompose into conjunction of types each of which has only a single refinement */ + def decomposeRefinements(tp: Type, refines: List[(Name, Type)]): Type = tp match + case RefinedType(parent, rname, rinfo) => + decomposeRefinements(parent, (rname, rinfo) :: refines) + case AndType(tp1, tp2) => + AndType(decomposeRefinements(tp1, refines), decomposeRefinements(tp2, refines)) + case _ => + refines.map(RefinedType(tp, _, _): Type).reduce(AndType(_, _)) + + /** Can comparing this type on the left lead to an either? This is the case if + * the type is and AndType or contains embedded occurrences of AndTypes + */ + def containsAnd(tp: Type): Boolean = tp match + case tp: AndType => true + case OrType(tp1, tp2) => containsAnd(tp1) || containsAnd(tp2) + case tp: TypeParamRef => containsAnd(bounds(tp).hi) + case tp: TypeRef => containsAnd(tp.info.hiBound) || tp.symbol.onGadtBounds(gbounds => containsAnd(gbounds.hi)) + case tp: TypeProxy => containsAnd(tp.superType) + case _ => false + /** Does type `tp1` have a member with name `name` whose normalized type is a subtype of * the normalized type of the refinement `tp2`? * Normalization is as follows: If `tp2` contains a skolem to its refinement type, @@ -1697,40 +1760,74 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling */ protected def hasMatchingMember(name: Name, tp1: Type, tp2: RefinedType): Boolean = trace(i"hasMatchingMember($tp1 . $name :? ${tp2.refinedInfo}), mbr: ${tp1.member(name).info}", subtyping) { - val rinfo2 = tp2.refinedInfo - - // If the member is an abstract type and the prefix is a path, compare the member itself - // instead of its bounds. This case is needed situations like: - // - // class C { type T } - // val foo: C - // foo.type <: C { type T {= , <: , >:} foo.T } - // - // or like: - // - // class C[T] - // C[?] <: C[TV] - // - // where TV is a type variable. See i2397.scala for an example of the latter. - def matchAbstractTypeMember(info1: Type) = info1 match { - case TypeBounds(lo, hi) if lo ne hi => - tp2.refinedInfo match { - case rinfo2: TypeBounds if tp1.isStable => - val ref1 = tp1.widenExpr.select(name) - isSubType(rinfo2.lo, ref1) && isSubType(ref1, rinfo2.hi) - case _ => - false - } - case _ => false - } - def qualifies(m: SingleDenotation) = - isSubType(m.info.widenExpr, rinfo2.widenExpr) || matchAbstractTypeMember(m.info) + def qualifies(m: SingleDenotation): Boolean = + // If the member is an abstract type and the prefix is a path, compare the member itself + // instead of its bounds. This case is needed situations like: + // + // class C { type T } + // val foo: C + // foo.type <: C { type T {= , <: , >:} foo.T } + // + // or like: + // + // class C[T] + // C[?] <: C[TV] + // + // where TV is a type variable. See i2397.scala for an example of the latter. + def matchAbstractTypeMember(info1: Type): Boolean = info1 match { + case TypeBounds(lo, hi) if lo ne hi => + tp2.refinedInfo match { + case rinfo2: TypeBounds if tp1.isStable => + val ref1 = tp1.widenExpr.select(name) + isSubType(rinfo2.lo, ref1) && isSubType(ref1, rinfo2.hi) + case _ => + false + } + case _ => false + } - tp1.member(name) match { // inlined hasAltWith for performance + // An additional check for type member matching: If the refinement of the + // supertype `tp2` does not refer to a member symbol defined in the parent of `tp2`. + // then the symbol referred to in the subtype must have a signature that coincides + // in its parameters with the refinement's signature. The reason for the check + // is that if the refinement does not refer to a member symbol, we will have to + // resort to reflection to invoke the member. And Java reflection needs to know exact + // erased parameter types. See neg/i12211.scala. Other reflection algorithms could + // conceivably dispatch without knowning precise parameter signatures. One can signal + // this by inheriting from the `scala.reflect.SignatureCanBeImprecise` marker trait, + // in which case the signature test is elided. + def sigsOK(symInfo: Type, info2: Type) = + tp2.underlyingClassRef(refinementOK = true).member(name).exists + || tp2.derivesFrom(defn.WithoutPreciseParameterTypesClass) + || symInfo.isInstanceOf[MethodType] + && symInfo.signature.consistentParams(info2.signature) + + // A relaxed version of isSubType, which compares method types + // under the standard arrow rule which is contravarient in the parameter types, + // but under the condition that signatures might have to match (see sigsOK) + // This relaxed version is needed to correctly compare dependent function types. + // See pos/i12211.scala. + def isSubInfo(info1: Type, info2: Type, symInfo: Type): Boolean = + info2 match + case info2: MethodType => + info1 match + case info1: MethodType => + val symInfo1 = symInfo.stripPoly + matchingMethodParams(info1, info2, precise = false) + && isSubInfo(info1.resultType, info2.resultType.subst(info2, info1), symInfo1.resultType) + && sigsOK(symInfo1, info2) + case _ => isSubType(info1, info2) + case _ => isSubType(info1, info2) + + val info1 = m.info.widenExpr + isSubInfo(info1, tp2.refinedInfo.widenExpr, m.symbol.info.orElse(info1)) + || matchAbstractTypeMember(m.info) + end qualifies + + tp1.member(name) match // inlined hasAltWith for performance case mbr: SingleDenotation => qualifies(mbr) case mbr => mbr hasAltWith qualifies - } } final def ensureStableSingleton(tp: Type): SingletonType = tp.stripTypeVar match { @@ -1840,15 +1937,20 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling } /** Do the parameter types of `tp1` and `tp2` match in a way that allows `tp1` - * to override `tp2` ? This is the case if they're pairwise `=:=`. + * to override `tp2` ? Two modes: precise or not. + * If `precise` is set (which is the default) this is the case if they're pairwise `=:=`. + * Otherwise parameters in `tp2` must be subtypes of corresponding parameters in `tp1`. */ - def matchingMethodParams(tp1: MethodType, tp2: MethodType): Boolean = { + def matchingMethodParams(tp1: MethodType, tp2: MethodType, precise: Boolean = true): Boolean = { def loop(formals1: List[Type], formals2: List[Type]): Boolean = formals1 match { case formal1 :: rest1 => formals2 match { case formal2 :: rest2 => val formal2a = if (tp2.isParamDependent) formal2.subst(tp2, tp1) else formal2 - isSameTypeWhenFrozen(formal1, formal2a) && loop(rest1, rest2) + val paramsMatch = + if precise then isSameTypeWhenFrozen(formal1, formal2a) + else isSubTypeWhenFrozen(formal2a, formal1) + paramsMatch && loop(rest1, rest2) case nil => false } @@ -1933,43 +2035,40 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling else if (!tp2.exists) tp1 else if tp1.isAny && !tp2.isLambdaSub || tp1.isAnyKind || isBottom(tp2) then tp2 else if tp2.isAny && !tp1.isLambdaSub || tp2.isAnyKind || isBottom(tp1) then tp1 - else tp2 match { // normalize to disjunctive normal form if possible. + else tp2 match case tp2: LazyRef => glb(tp1, tp2.ref) - case OrType(tp21, tp22) => - tp1 & tp21 | tp1 & tp22 case _ => - tp1 match { + tp1 match case tp1: LazyRef => glb(tp1.ref, tp2) - case OrType(tp11, tp12) => - tp11 & tp2 | tp12 & tp2 case _ => val tp1a = dropIfSuper(tp1, tp2) - if (tp1a ne tp1) glb(tp1a, tp2) - else { + if tp1a ne tp1 then glb(tp1a, tp2) + else val tp2a = dropIfSuper(tp2, tp1) - if (tp2a ne tp2) glb(tp1, tp2a) - else tp1 match { - case tp1: ConstantType => - tp2 match { - case tp2: ConstantType => - // Make use of the fact that the intersection of two constant types - // types which are not subtypes of each other is known to be empty. - // Note: The same does not apply to singleton types in general. - // E.g. we could have a pattern match against `x.type & y.type` - // which might succeed if `x` and `y` happen to be the same ref - // at run time. It would not work to replace that with `Nothing`. - // However, maybe we can still apply the replacement to - // types which are not explicitly written. - NothingType + if tp2a ne tp2 then glb(tp1, tp2a) + else tp2 match // normalize to disjunctive normal form if possible. + case tp2 @ OrType(tp21, tp22) => + lub(tp1 & tp21, tp1 & tp22, isSoft = tp2.isSoft) + case _ => + tp1 match + case tp1 @ OrType(tp11, tp12) => + lub(tp11 & tp2, tp12 & tp2, isSoft = tp1.isSoft) + case tp1: ConstantType => + tp2 match + case tp2: ConstantType => + // Make use of the fact that the intersection of two constant types + // types which are not subtypes of each other is known to be empty. + // Note: The same does not apply to singleton types in general. + // E.g. we could have a pattern match against `x.type & y.type` + // which might succeed if `x` and `y` happen to be the same ref + // at run time. It would not work to replace that with `Nothing`. + // However, maybe we can still apply the replacement to + // types which are not explicitly written. + NothingType + case _ => andType(tp1, tp2) case _ => andType(tp1, tp2) - } - case _ => andType(tp1, tp2) - } - } - } - } } def widenInUnions(using Context): Boolean = @@ -1977,10 +2076,10 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling /** The least upper bound of two types * @param canConstrain If true, new constraints might be added to simplify the lub. + * @param isSoft If the lub is a union, this determines whether it's a soft union. * @note We do not admit singleton types in or-types as lubs. */ - def lub(tp1: Type, tp2: Type, canConstrain: Boolean = false): Type = /*>|>*/ trace(s"lub(${tp1.show}, ${tp2.show}, canConstrain=$canConstrain)", subtyping, show = true) /*<|<*/ { - + def lub(tp1: Type, tp2: Type, canConstrain: Boolean = false, isSoft: Boolean = true): Type = /*>|>*/ trace(s"lub(${tp1.show}, ${tp2.show}, canConstrain=$canConstrain, isSoft=$isSoft)", subtyping, show = true) /*<|<*/ { if (tp1 eq tp2) tp1 else if (!tp1.exists) tp1 else if (!tp2.exists) tp2 @@ -2006,8 +2105,8 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling def widen(tp: Type) = if (widenInUnions) tp.widen else tp.widenIfUnstable val tp1w = widen(tp1) val tp2w = widen(tp2) - if ((tp1 ne tp1w) || (tp2 ne tp2w)) lub(tp1w, tp2w, canConstrain) - else orType(tp1w, tp2w) // no need to check subtypes again + if ((tp1 ne tp1w) || (tp2 ne tp2w)) lub(tp1w, tp2w, canConstrain = canConstrain, isSoft = isSoft) + else orType(tp1w, tp2w, isSoft = isSoft) // no need to check subtypes again } mergedLub(tp1.stripLazyRef, tp2.stripLazyRef) } @@ -2116,11 +2215,11 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case tp2 @ OrType(tp21, tp22) => val higher1 = mergeIfSuper(tp1, tp21, canConstrain) if (higher1 eq tp21) tp2 - else if (higher1.exists) higher1 | tp22 + else if (higher1.exists) lub(higher1, tp22, isSoft = tp2.isSoft) else { val higher2 = mergeIfSuper(tp1, tp22, canConstrain) if (higher2 eq tp22) tp2 - else if (higher2.exists) tp21 | higher2 + else if (higher2.exists) lub(tp21, higher2, isSoft = tp2.isSoft) else NoType } case _ => @@ -2157,7 +2256,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * opportunistically merged. */ final def andType(tp1: Type, tp2: Type, isErased: Boolean = ctx.erasedTypes): Type = - andTypeGen(tp1, tp2, AndType(_, _), isErased = isErased) + andTypeGen(tp1, tp2, AndType.balanced(_, _), isErased = isErased) final def simplifyAndTypeWithFallback(tp1: Type, tp2: Type, fallback: Type): Type = andTypeGen(tp1, tp2, (_, _) => fallback) @@ -2168,17 +2267,18 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * ExprType, LambdaType). Also, when forming an `|`, * instantiated TypeVars are dereferenced and annotations are stripped. * + * @param isSoft If the result is a union, this determines whether it's a soft union. * @param isErased Apply erasure semantics. If erased is true, instead of creating * an OrType, the lub will be computed using TypeCreator#erasedLub. */ - final def orType(tp1: Type, tp2: Type, isErased: Boolean = ctx.erasedTypes): Type = { - val t1 = distributeOr(tp1, tp2) + final def orType(tp1: Type, tp2: Type, isSoft: Boolean = true, isErased: Boolean = ctx.erasedTypes): Type = { + val t1 = distributeOr(tp1, tp2, isSoft) if (t1.exists) t1 else { - val t2 = distributeOr(tp2, tp1) + val t2 = distributeOr(tp2, tp1, isSoft) if (t2.exists) t2 else if (isErased) erasedLub(tp1, tp2) - else liftIfHK(tp1, tp2, OrType(_, _, soft = true), _ | _, _ & _) + else liftIfHK(tp1, tp2, OrType.balanced(_, _, soft = isSoft), _ | _, _ & _) } } @@ -2266,18 +2366,18 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * * The rhs is a proper supertype of the lhs. */ - private def distributeOr(tp1: Type, tp2: Type): Type = tp1 match { + private def distributeOr(tp1: Type, tp2: Type, isSoft: Boolean = true): Type = tp1 match { case ExprType(rt1) => tp2 match { case ExprType(rt2) => - ExprType(rt1 | rt2) + ExprType(lub(rt1, rt2, isSoft = isSoft)) case _ => NoType } case tp1: TypeVar if tp1.isInstantiated => - tp1.underlying | tp2 + lub(tp1.underlying, tp2, isSoft = isSoft) case tp1: AnnotatedType if !tp1.isRefining => - tp1.underlying | tp2 + lub(tp1.underlying, tp2, isSoft = isSoft) case _ => NoType } @@ -2329,19 +2429,21 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling } /** Show subtype goal that led to an assertion failure */ - def showGoal(tp1: Type, tp2: Type)(using Context): Unit = { - report.echo(i"assertion failure for ${show(tp1)} <:< ${show(tp2)}, frozen = $frozenConstraint") - def explainPoly(tp: Type) = tp match { - case tp: TypeParamRef => report.echo(s"TypeParamRef ${tp.show} found in ${tp.binder.show}") - case tp: TypeRef if tp.symbol.exists => report.echo(s"typeref ${tp.show} found in ${tp.symbol.owner.show}") - case tp: TypeVar => report.echo(s"typevar ${tp.show}, origin = ${tp.origin}") - case _ => report.echo(s"${tp.show} is a ${tp.getClass}") - } - if (Config.verboseExplainSubtype) { - explainPoly(tp1) - explainPoly(tp2) - } - } + def showGoal(tp1: Type, tp2: Type)(using Context): Unit = + try + report.echo(i"assertion failure for ${show(tp1)} <:< ${show(tp2)}, frozen = $frozenConstraint") + def explainPoly(tp: Type) = tp match { + case tp: TypeParamRef => report.echo(s"TypeParamRef ${tp.show} found in ${tp.binder.show}") + case tp: TypeRef if tp.symbol.exists => report.echo(s"typeref ${tp.show} found in ${tp.symbol.owner.show}") + case tp: TypeVar => report.echo(s"typevar ${tp.show}, origin = ${tp.origin}") + case _ => report.echo(s"${tp.show} is a ${tp.getClass}") + } + if (Config.verboseExplainSubtype) { + explainPoly(tp1) + explainPoly(tp2) + } + catch case NonFatal(ex) => + report.echo(s"assertion failure [[cannot display since $ex was thrown]]") /** Record statistics about the total number of subtype checks * and the number of "successful" subtype checks, i.e. checks @@ -2408,29 +2510,41 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling def provablyDisjoint(tp1: Type, tp2: Type)(using Context): Boolean = trace(i"provable disjoint $tp1, $tp2", matchTypes) { // println(s"provablyDisjoint(${tp1.show}, ${tp2.show})") - def isEnumValueOrModule(ref: TermRef): Boolean = + def isEnumValue(ref: TermRef): Boolean = val sym = ref.termSymbol - sym.isAllOf(EnumCase, butNot=JavaDefined) || sym.is(Module) + sym.isAllOf(EnumCase, butNot=JavaDefined) - /** Can we enumerate all instantiations of this type? */ - def isClosedSum(tp: Symbol): Boolean = - tp.is(Sealed) && tp.isOneOf(AbstractOrTrait) && !tp.hasAnonymousChild - - /** Splits a closed type into a disjunction of smaller types. - * It should hold that `tp` and `decompose(tp).reduce(_ or _)` - * denote the same set of values. - */ - def decompose(sym: Symbol, tp: Type): List[Type] = - sym.children.map(x => refineUsingParent(tp, x)).filter(_.exists) + def isEnumValueOrModule(ref: TermRef): Boolean = + isEnumValue(ref) || ref.termSymbol.is(Module) || (ref.info match { + case tp: TermRef => isEnumValueOrModule(tp) + case _ => false + }) + + def fullyInstantiated(tp: Type): Boolean = new TypeAccumulator[Boolean] { + override def apply(x: Boolean, t: Type) = + x && { + t match { + case tp: TypeRef if tp.symbol.isAbstractOrParamType => false + case _: SkolemType | _: TypeVar | _: TypeParamRef => false + case _ => foldOver(x, t) + } + } + }.apply(true, tp) (tp1.dealias, tp2.dealias) match { - case (tp1: TypeRef, tp2: TypeRef) if tp1.symbol == defn.SingletonClass || tp2.symbol == defn.SingletonClass => + case (tp1: TypeRef, _) if tp1.symbol == defn.SingletonClass => + false + case (_, tp2: TypeRef) if tp2.symbol == defn.SingletonClass => false case (tp1: ConstantType, tp2: ConstantType) => tp1 != tp2 case (tp1: TypeRef, tp2: TypeRef) if tp1.symbol.isClass && tp2.symbol.isClass => val cls1 = tp1.classSymbol val cls2 = tp2.classSymbol + def isDecomposable(tp: Symbol): Boolean = + tp.is(Sealed) && !tp.hasAnonymousChild + def decompose(sym: Symbol, tp: Type): List[Type] = + sym.children.map(x => refineUsingParent(tp, x)).filter(_.exists) if (cls1.derivesFrom(cls2) || cls2.derivesFrom(cls1)) false else @@ -2443,9 +2557,13 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling // subtype, so they must be unrelated by single inheritance // of classes. true - else if (isClosedSum(cls1)) + else if (isDecomposable(cls1)) + // At this point, !cls1.derivesFrom(cls2): we know that direct + // instantiations of `cls1` (terms of the form `new cls1`) are not + // of type `tp2`. Therefore, we can safely decompose `cls1` using + // `.children`, even if `cls1` is non abstract. decompose(cls1, tp1).forall(x => provablyDisjoint(x, tp2)) - else if (isClosedSum(cls2)) + else if (isDecomposable(cls2)) decompose(cls2, tp2).forall(x => provablyDisjoint(x, tp1)) else false @@ -2458,31 +2576,20 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling def covariantDisjoint(tp1: Type, tp2: Type, tparam: TypeParamInfo): Boolean = provablyDisjoint(tp1, tp2) && typeparamCorrespondsToField(tycon1, tparam) - // In the invariant case, we used a weaker version of disjointness: - // we consider types not equal with respect to =:= to be disjoint + // In the invariant case, we also use a stronger notion of disjointness: + // we consider fully instantiated types not equal wrt =:= to be disjoint // (under any context). This is fine because it matches the runtime // semantics of pattern matching. To implement a pattern such as // `case Inv[T] => ...`, one needs a type tag for `T` and the compiler // is used at runtime to check it the scrutinee's type is =:= to `T`. - // Note that this is currently a theoretical concern since we Dotty + // Note that this is currently a theoretical concern since Dotty // doesn't have type tags, meaning that users cannot write patterns // that do type tests on higher kinded types. def invariantDisjoint(tp1: Type, tp2: Type, tparam: TypeParamInfo): Boolean = - covariantDisjoint(tp1, tp2, tparam) || !isSameType(tp1, tp2) && { - // We can only trust a "no" from `isSameType` when both - // `tp1` and `tp2` are fully instantiated. - def fullyInstantiated(tp: Type): Boolean = new TypeAccumulator[Boolean] { - override def apply(x: Boolean, t: Type) = - x && { - t match { - case tp: TypeRef if tp.symbol.isAbstractOrParamType => false - case _: SkolemType | _: TypeVar => false - case _ => foldOver(x, t) - } - } - }.apply(true, tp) - fullyInstantiated(tp1) && fullyInstantiated(tp2) - } + provablyDisjoint(tp1, tp2) || + !isSameType(tp1, tp2) && + fullyInstantiated(tp1) && // We can only trust a "no" from `isSameType` when + fullyInstantiated(tp2) // both `tp1` and `tp2` are fully instantiated. args1.lazyZip(args2).lazyZip(tycon1.typeParams).exists { (arg1, arg2, tparam) => @@ -2519,9 +2626,13 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling provablyDisjoint(tp1, gadtBounds(tp2.symbol).hi) || provablyDisjoint(tp1, tp2.superType) case (tp1: TermRef, tp2: TermRef) if isEnumValueOrModule(tp1) && isEnumValueOrModule(tp2) => tp1.termSymbol != tp2.termSymbol - case (tp1: Type, tp2: Type) if defn.isTupleType(tp1) => + case (tp1: TermRef, tp2: TypeRef) if isEnumValue(tp1) => + fullyInstantiated(tp2) && !tp1.classSymbols.exists(_.derivesFrom(tp2.symbol)) + case (tp1: TypeRef, tp2: TermRef) if isEnumValue(tp2) => + fullyInstantiated(tp1) && !tp2.classSymbols.exists(_.derivesFrom(tp1.symbol)) + case (tp1: Type, tp2: Type) if defn.isTupleNType(tp1) => provablyDisjoint(tp1.toNestedPairs, tp2) - case (tp1: Type, tp2: Type) if defn.isTupleType(tp2) => + case (tp1: Type, tp2: Type) if defn.isTupleNType(tp2) => provablyDisjoint(tp1, tp2.toNestedPairs) case (tp1: TypeProxy, tp2: TypeProxy) => provablyDisjoint(tp1.superType, tp2) || provablyDisjoint(tp1, tp2.superType) @@ -2632,8 +2743,8 @@ object TypeComparer { def matchingMethodParams(tp1: MethodType, tp2: MethodType)(using Context): Boolean = comparing(_.matchingMethodParams(tp1, tp2)) - def lub(tp1: Type, tp2: Type, canConstrain: Boolean = false)(using Context): Type = - comparing(_.lub(tp1, tp2, canConstrain)) + def lub(tp1: Type, tp2: Type, canConstrain: Boolean = false, isSoft: Boolean = true)(using Context): Type = + comparing(_.lub(tp1, tp2, canConstrain = canConstrain, isSoft = isSoft)) /** The least upper bound of a list of types */ final def lub(tps: List[Type])(using Context): Type = @@ -2649,8 +2760,8 @@ object TypeComparer { def glb(tps: List[Type])(using Context): Type = tps.foldLeft(defn.AnyType: Type)(glb) - def orType(using Context)(tp1: Type, tp2: Type, isErased: Boolean = ctx.erasedTypes): Type = - comparing(_.orType(tp1, tp2, isErased)) + def orType(using Context)(tp1: Type, tp2: Type, isSoft: Boolean = true, isErased: Boolean = ctx.erasedTypes): Type = + comparing(_.orType(tp1, tp2, isSoft = isSoft, isErased = isErased)) def andType(using Context)(tp1: Type, tp2: Type, isErased: Boolean = ctx.erasedTypes): Type = comparing(_.andType(tp1, tp2, isErased)) @@ -2696,8 +2807,8 @@ object TypeComparer { def dropTransparentTraits(tp: Type, bound: Type)(using Context): Type = comparing(_.dropTransparentTraits(tp, bound)) - def constrainPatternType(pat: Type, scrut: Type, widenParams: Boolean = true)(using Context): Boolean = - comparing(_.constrainPatternType(pat, scrut, widenParams)) + def constrainPatternType(pat: Type, scrut: Type, forceInvariantRefinement: Boolean = false)(using Context): Boolean = + comparing(_.constrainPatternType(pat, scrut, forceInvariantRefinement)) def explained[T](op: ExplainingTypeComparer => T, header: String = "Subtype trace:")(using Context): String = comparing(_.explained(op, header)) @@ -2785,7 +2896,7 @@ class TrackingTypeComparer(initctx: Context) extends TypeComparer(initctx) { caseLambda match { case caseLambda: HKTypeLambda => val instances = paramInstances(new Array(caseLambda.paramNames.length), pat) - instantiateParams(instances)(body) + instantiateParams(instances)(body).simplified case _ => body } @@ -2798,10 +2909,19 @@ class TrackingTypeComparer(initctx: Context) extends TypeComparer(initctx) { Some(NoType) } - def recur(cases: List[Type]): Type = cases match { - case cas :: cases1 => matchCase(cas).getOrElse(recur(cases1)) - case Nil => NoType - } + def recur(remaining: List[Type]): Type = remaining match + case cas :: remaining1 => + matchCase(cas) match + case None => + recur(remaining1) + case Some(NoType) => + MatchTypeTrace.stuck(scrut, cas, remaining1) + NoType + case Some(tp) => + tp + case Nil => + val casesText = MatchTypeTrace.noMatchesText(scrut, cases) + throw new TypeError(s"Match type reduction $casesText") inFrozenConstraint { // Empty types break the basic assumption that if a scrutinee and a @@ -2818,6 +2938,7 @@ class TrackingTypeComparer(initctx: Context) extends TypeComparer(initctx) { // we need to be careful that `provablyEmpty` covers all the conditions // used to conclude disjointness in `provablyDisjoint`. if (provablyEmpty(scrut)) + MatchTypeTrace.emptyScrutinee(scrut) NoType else recur(cases) @@ -2842,9 +2963,10 @@ class ExplainingTypeComparer(initctx: Context) extends TypeComparer(initctx) { if (skipped) op else { indent += 2 - b.append("\n").append(" " * indent).append("==> ").append(str) + val str1 = str.replace('\n', ' ') + b.append("\n").append(" " * indent).append("==> ").append(str1) val res = op - b.append("\n").append(" " * indent).append("<== ").append(str).append(" = ").append(show(res)) + b.append("\n").append(" " * indent).append("<== ").append(str1).append(" = ").append(show(res)) indent -= 2 res } @@ -2852,17 +2974,13 @@ class ExplainingTypeComparer(initctx: Context) extends TypeComparer(initctx) { private def frozenNotice: String = if frozenConstraint then " in frozen constraint" else "" - override def isSubType(tp1: Type, tp2: Type, approx: ApproxState): Boolean = + override def recur(tp1: Type, tp2: Type): Boolean = def moreInfo = if Config.verboseExplainSubtype || ctx.settings.verbose.value then s" ${tp1.getClass} ${tp2.getClass}" else "" + val approx = approxState traceIndented(s"${show(tp1)} <: ${show(tp2)}$moreInfo${approx.show}$frozenNotice") { - super.isSubType(tp1, tp2, approx) - } - - override def recur(tp1: Type, tp2: Type): Boolean = - traceIndented(s"${show(tp1)} <: ${show(tp2)} (recurring)$frozenNotice") { super.recur(tp1, tp2) } @@ -2871,9 +2989,9 @@ class ExplainingTypeComparer(initctx: Context) extends TypeComparer(initctx) { super.hasMatchingMember(name, tp1, tp2) } - override def lub(tp1: Type, tp2: Type, canConstrain: Boolean = false): Type = - traceIndented(s"lub(${show(tp1)}, ${show(tp2)}, canConstrain=$canConstrain)") { - super.lub(tp1, tp2, canConstrain) + override def lub(tp1: Type, tp2: Type, canConstrain: Boolean, isSoft: Boolean): Type = + traceIndented(s"lub(${show(tp1)}, ${show(tp2)}, canConstrain=$canConstrain, isSoft=$isSoft)") { + super.lub(tp1, tp2, canConstrain, isSoft) } override def glb(tp1: Type, tp2: Type): Type = @@ -2882,7 +3000,7 @@ class ExplainingTypeComparer(initctx: Context) extends TypeComparer(initctx) { } override def addConstraint(param: TypeParamRef, bound: Type, fromBelow: Boolean)(using Context): Boolean = - traceIndented(i"add constraint $param ${if (fromBelow) ">:" else "<:"} $bound $frozenConstraint, constraint = ${ctx.typerState.constraint}") { + traceIndented(s"add constraint ${show(param)} ${if (fromBelow) ">:" else "<:"} ${show(bound)} $frozenNotice, constraint = ${show(ctx.typerState.constraint)}") { super.addConstraint(param, bound, fromBelow) } diff --git a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala index 5ce82de63eed..1473bcc559e2 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala @@ -238,6 +238,7 @@ object TypeErasure { if (defn.isPolymorphicAfterErasure(sym)) eraseParamBounds(sym.info.asInstanceOf[PolyType]) else if (sym.isAbstractType) TypeAlias(WildcardType) + else if sym.is(ConstructorProxy) then NoType else if (sym.isConstructor) outer.addParam(sym.owner.asClass, erase(tp)(using preErasureCtx)) else if (sym.is(Label)) erase.eraseResult(sym.info)(using preErasureCtx) else erase.eraseInfo(tp, sym)(using preErasureCtx) match { @@ -426,10 +427,6 @@ object TypeErasure { * This operation has the following the properties: * - Associativity and commutativity, because this method acts as the minimum * of the total order induced by `compareErasedGlb`. - * - Java compatibility: intersections that would be valid in Java code are - * erased like javac would erase them (a Java intersection is composed of - * exactly one class and one or more interfaces and always erases to the - * class). */ def erasedGlb(tp1: Type, tp2: Type)(using Context): Type = if compareErasedGlb(tp1, tp2) <= 0 then tp1 else tp2 @@ -527,6 +524,15 @@ object TypeErasure { case tp: OrType => hasStableErasure(tp.tp1) && hasStableErasure(tp.tp2) case _ => false } + + /** The erasure of `PolyFunction { def apply: $applyInfo }` */ + def erasePolyFunctionApply(applyInfo: Type)(using Context): Type = + assert(applyInfo.isInstanceOf[PolyType]) + val res = applyInfo.resultType + val paramss = res.paramNamess + assert(paramss.length == 1) + erasure(defn.FunctionType(paramss.head.length, + isContextual = res.isImplicitMethod, isErased = res.isErasedMethod)) } import TypeErasure._ @@ -584,7 +590,7 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst val sym = tp.symbol if (!sym.isClass) this(tp.translucentSuperType) else if (semiEraseVCs && isDerivedValueClass(sym)) eraseDerivedValueClass(tp) - else if (defn.isSyntheticFunctionClass(sym)) defn.erasedFunctionType(sym) + else if (defn.isSyntheticFunctionClass(sym)) defn.functionTypeErasure(sym) else eraseNormalClassRef(tp) case tp: AppliedType => val tycon = tp.tycon @@ -600,15 +606,13 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst case ExprType(rt) => defn.FunctionType(0) case RefinedType(parent, nme.apply, refinedInfo) if parent.typeSymbol eq defn.PolyFunctionClass => - assert(refinedInfo.isInstanceOf[PolyType]) - val res = refinedInfo.resultType - val paramss = res.paramNamess - assert(paramss.length == 1) - this(defn.FunctionType(paramss.head.length, isContextual = res.isImplicitMethod, isErased = res.isErasedMethod)) + erasePolyFunctionApply(refinedInfo) case tp: TypeProxy => this(tp.underlying) case tp @ AndType(tp1, tp2) => - if sourceLanguage.isScala2 then + if sourceLanguage.isJava then + this(tp1) + else if sourceLanguage.isScala2 then this(Scala2Erasure.intersectionDominator(Scala2Erasure.flattenedParents(tp))) else erasedGlb(this(tp1), this(tp2)) @@ -655,7 +659,14 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst tr1 :: trs1.filterNot(_.isAnyRef) case nil => nil } - val erasedDecls = decls.filteredScope(sym => !sym.isType || sym.isClass) + var erasedDecls = decls.filteredScope(sym => !sym.isType || sym.isClass).openForMutations + for dcl <- erasedDecls.iterator do + if dcl.lastKnownDenotation.unforcedAnnotation(defn.TargetNameAnnot).isDefined + && dcl.targetName != dcl.name + then + if erasedDecls eq decls then erasedDecls = erasedDecls.cloneScope + erasedDecls.unlink(dcl) + erasedDecls.enter(dcl.targetName, dcl) val selfType1 = if cls.is(Module) then cls.sourceModule.termRef else NoType tp.derivedClassInfo(NoPrefix, erasedParents, erasedDecls, selfType1) // can't replace selftype by NoType because this would lose the sourceModule link @@ -698,7 +709,7 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst // See doc comment for ElimByName for speculation how we could improve this. else MethodType(Nil, Nil, - eraseResult(sym.info.finalResultType.translateFromRepeated(toArray = sourceLanguage.isJava))) + eraseResult(rt.translateFromRepeated(toArray = sourceLanguage.isJava))) case tp1: PolyType => eraseResult(tp1.resultType) match case rt: MethodType => rt @@ -726,7 +737,7 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst // erased like `Array[A]` as seen from its definition site, no matter // the `X` (same if `A` is bounded). // - // The binary compatibility is checked by sbt-dotty/sbt-test/scala2-compat/i8001 + // The binary compatibility is checked by sbt-test/scala2-compat/i8001 val erasedValueClass = if erasedUnderlying.isPrimitiveValueType && !genericUnderlying.isPrimitiveValueType then defn.boxedType(erasedUnderlying) @@ -785,7 +796,7 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst if (erasedVCRef.exists) return sigName(erasedVCRef) } if (defn.isSyntheticFunctionClass(sym)) - sigName(defn.erasedFunctionType(sym)) + sigName(defn.functionTypeErasure(sym)) else val cls = normalizeClass(sym.asClass) val fullName = @@ -821,10 +832,10 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst sigName(this(tp)) case tp: TypeProxy => sigName(tp.underlying) - case _: ErrorType | WildcardType | NoType => - tpnme.WILDCARD case tp: WildcardType => - sigName(tp.optBounds) + tpnme.Uninstantiated + case _: ErrorType | NoType => + tpnme.ERROR case _ => val erasedTp = this(tp) assert(erasedTp ne tp, tp) diff --git a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala index 950963497fbc..c9ca98f65f5e 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala @@ -141,7 +141,7 @@ class CyclicReference private (val denot: SymDenotation) extends TypeError { } // Give up and give generic errors. - else if (cycleSym.isOneOf(GivenOrImplicit, butNot = Method) && cycleSym.owner.isTerm) + else if (cycleSym.isOneOf(GivenOrImplicitVal, butNot = Method) && cycleSym.owner.isTerm) CyclicReferenceInvolvingImplicit(cycleSym) else CyclicReferenceInvolving(denot) diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index eb353744ae82..2d5c2a6da88a 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -6,7 +6,6 @@ import Contexts._, Types._, Symbols._, Names._, Flags._ import SymDenotations._ import util.Spans._ import util.Stats -import NameKinds.DepParamName import Decorators._ import StdNames._ import collection.mutable @@ -138,6 +137,8 @@ object TypeOps: tp2 case tp1 => tp1 } + case defn.MatchCase(pat, body) => + defn.MatchCase(simplify(pat, theMap), body) case tp: AppliedType => tp.tycon match case tycon: TypeRef if tycon.info.isInstanceOf[MatchAlias] => @@ -163,14 +164,22 @@ object TypeOps: // with Nulls (which have no base classes). Under -Yexplicit-nulls, we take // corrective steps, so no widening is wanted. simplify(l, theMap) | simplify(r, theMap) - case AnnotatedType(parent, annot) - if !ctx.mode.is(Mode.Type) && annot.symbol == defn.UncheckedVarianceAnnot => - simplify(parent, theMap) + case tp @ AnnotatedType(parent, annot) => + val parent1 = simplify(parent, theMap) + if annot.symbol == defn.UncheckedVarianceAnnot + && !ctx.mode.is(Mode.Type) + && !theMap.isInstanceOf[SimplifyKeepUnchecked] + then parent1 + else tp.derivedAnnotatedType(parent1, annot) case _: MatchType => val normed = tp.tryNormalize if (normed.exists) normed else mapOver case tp: MethodicType => tp // See documentation of `Types#simplified` + case tp: SkolemType => + // Mapping over a skolem creates a new skolem which by definition won't + // be =:= to the original one. + tp case _ => mapOver } @@ -180,6 +189,8 @@ object TypeOps: def apply(tp: Type): Type = simplify(tp, this) } + class SimplifyKeepUnchecked(using Context) extends SimplifyMap + /** Approximate union type by intersection of its dominators. * That is, replace a union type Tn | ... | Tn * by the smallest intersection type of base-class instances of T1,...,Tn. @@ -343,7 +354,12 @@ object TypeOps: tp match { case tp: OrType => - approximateOr(tp.tp1, tp.tp2) + (tp.tp1.dealias, tp.tp2.dealias) match + case (tp1 @ AppliedType(tycon1, args1), tp2 @ AppliedType(tycon2, args2)) + if tycon1.typeSymbol == tycon2.typeSymbol && (tycon1 =:= tycon2) => + mergeRefinedOrApplied(tp1, tp2) + case (tp1, tp2) => + approximateOr(tp1, tp2) case _ => tp } @@ -394,93 +410,109 @@ object TypeOps: } } - /** An upper approximation of the given type `tp` that does not refer to any symbol in `symsToAvoid`. + /** An approximating map that drops NamedTypes matching `toAvoid` and wildcard types. */ + abstract class AvoidMap(using Context) extends AvoidWildcardsMap: + @threadUnsafe lazy val localParamRefs = util.HashSet[Type]() + + def toAvoid(tp: NamedType): Boolean + + /** True iff all NamedTypes on this prefix are static */ + override def isStaticPrefix(pre: Type)(using Context): Boolean = pre match + case pre: NamedType => + val sym = pre.currentSymbol + sym.is(Package) || sym.isStatic && isStaticPrefix(pre.prefix) + case _ => true + + override def apply(tp: Type): Type = tp match + case tp: TermRef + if toAvoid(tp) => + tp.info.widenExpr.dealias match { + case info: SingletonType => apply(info) + case info => range(defn.NothingType, apply(info)) + } + case tp: TypeRef if toAvoid(tp) => + tp.info match { + case info: AliasingBounds => + apply(info.alias) + case TypeBounds(lo, hi) => + range(atVariance(-variance)(apply(lo)), apply(hi)) + case info: ClassInfo => + range(defn.NothingType, apply(classBound(info))) + case _ => + emptyRange // should happen only in error cases + } + case tp: ThisType => + // ThisType is only used inside a class. + // Therefore, either they don't appear in the type to be avoided, or + // it must be a class that encloses the block whose type is to be avoided. + tp + case tp: LazyRef => + if localParamRefs.contains(tp.ref) then tp + else if isExpandingBounds then emptyRange + else mapOver(tp) + case tl: HKTypeLambda => + localParamRefs ++= tl.paramRefs + mapOver(tl) + case _ => + super.apply(tp) + end apply + + /** Three deviations from standard derivedSelect: + * 1. We first try a widening conversion to the type's info with + * the original prefix. Since the original prefix is known to + * be a subtype of the returned prefix, this can improve results. + * 2. Then, if the approximation result is a singleton reference C#x.type, we + * replace by the widened type, which is usually more natural. + * 3. Finally, we need to handle the case where the prefix type does not have a member + * named `tp.name` anymmore. In that case, we need to fall back to Bot..Top. + */ + override def derivedSelect(tp: NamedType, pre: Type) = + if (pre eq tp.prefix) + tp + else tryWiden(tp, tp.prefix).orElse { + if (tp.isTerm && variance > 0 && !pre.isSingleton) + apply(tp.info.widenExpr) + else if (upper(pre).member(tp.name).exists) + super.derivedSelect(tp, pre) + else + range(defn.NothingType, defn.AnyType) + } + end AvoidMap + + /** An upper approximation of the given type `tp` that does not refer to any symbol in `symsToAvoid` + * and does not contain any WildcardType. * We need to approximate with ranges: * * term references to symbols in `symsToAvoid`, * term references that have a widened type of which some part refers * to a symbol in `symsToAvoid`, * type references to symbols in `symsToAvoid`, - * this types of classes in `symsToAvoid`. * * Type variables that would be interpolated to a type that * needs to be widened are replaced by the widened interpolation instance. + * + * TODO: Could we replace some or all usages of this method by + * `LevelAvoidMap` instead? It would be good to investigate this in details + * but when I tried it, avoidance for inlined trees broke because `TreeMap` + * does not update `ctx.nestingLevel` when entering a block so I'm leaving + * this as Future Work™. */ def avoid(tp: Type, symsToAvoid: => List[Symbol])(using Context): Type = { - val widenMap = new ApproximatingTypeMap { + val widenMap = new AvoidMap { @threadUnsafe lazy val forbidden = symsToAvoid.toSet - @threadUnsafe lazy val localParamRefs = util.HashSet[Type]() - def toAvoid(sym: Symbol) = !sym.isStatic && forbidden.contains(sym) - def partsToAvoid = new NamedPartsAccumulator(tp => toAvoid(tp.symbol)) + def toAvoid(tp: NamedType) = + val sym = tp.symbol + !sym.isStatic && forbidden.contains(sym) - /** True iff all NamedTypes on this prefix are static */ - override def isStaticPrefix(pre: Type)(using Context): Boolean = pre match - case pre: NamedType => - val sym = pre.currentSymbol - sym.is(Package) || sym.isStatic && isStaticPrefix(pre.prefix) - case _ => true - - def apply(tp: Type): Type = tp match { - case tp: TermRef - if toAvoid(tp.symbol) || partsToAvoid(Nil, tp.info).nonEmpty => - tp.info.widenExpr.dealias match { - case info: SingletonType => apply(info) - case info => range(defn.NothingType, apply(info)) - } - case tp: TypeRef if toAvoid(tp.symbol) => - tp.info match { - case info: AliasingBounds => - apply(info.alias) - case TypeBounds(lo, hi) => - range(atVariance(-variance)(apply(lo)), apply(hi)) - case info: ClassInfo => - range(defn.NothingType, apply(classBound(info))) - case _ => - emptyRange // should happen only in error cases - } - case tp: ThisType => - // ThisType is only used inside a class. - // Therefore, either they don't appear in the type to be avoided, or - // it must be a class that encloses the block whose type is to be avoided. - tp - case tp: SkolemType if partsToAvoid(Nil, tp.info).nonEmpty => - range(defn.NothingType, apply(tp.info)) + override def apply(tp: Type): Type = tp match case tp: TypeVar if mapCtx.typerState.constraint.contains(tp) => val lo = TypeComparer.instanceType( tp.origin, fromBelow = variance > 0 || variance == 0 && tp.hasLowerBound)(using mapCtx) val lo1 = apply(lo) if (lo1 ne lo) lo1 else tp - case tp: LazyRef => - if localParamRefs.contains(tp.ref) then tp - else if isExpandingBounds then emptyRange - else mapOver(tp) - case tl: HKTypeLambda => - localParamRefs ++= tl.paramRefs - mapOver(tl) case _ => - mapOver(tp) - } - - /** Three deviations from standard derivedSelect: - * 1. We first try a widening conversion to the type's info with - * the original prefix. Since the original prefix is known to - * be a subtype of the returned prefix, this can improve results. - * 2. Then, if the approximation result is a singleton reference C#x.type, we - * replace by the widened type, which is usually more natural. - * 3. Finally, we need to handle the case where the prefix type does not have a member - * named `tp.name` anymmore. In that case, we need to fall back to Bot..Top. - */ - override def derivedSelect(tp: NamedType, pre: Type) = - if (pre eq tp.prefix) - tp - else tryWiden(tp, tp.prefix).orElse { - if (tp.isTerm && variance > 0 && !pre.isSingleton) - apply(tp.info.widenExpr) - else if (upper(pre).member(tp.name).exists) - super.derivedSelect(tp, pre) - else - range(defn.NothingType, defn.AnyType) - } + super.apply(tp) + end apply } widenMap(tp) @@ -685,14 +717,28 @@ object TypeOps: */ private def instantiateToSubType(tp1: NamedType, tp2: Type)(using Context): Type = { // In order for a child type S to qualify as a valid subtype of the parent - // T, we need to test whether it is possible S <: T. Therefore, we replace - // type parameters in T with tvars, and see if the subtyping is true. - val approximateTypeParams = new TypeMap { + // T, we need to test whether it is possible S <: T. + // + // The check is different from subtype checking due to type parameters and + // `this`. We perform the following operations to approximate the parameters: + // + // 1. Replace type parameters in T with tvars + // 2. Replace `A.this.C` with `A#C` (see tests/patmat/i12681.scala) + // 3. Replace non-reducing MatchType with its bound + // + val approximateParent = new TypeMap { val boundTypeParams = util.HashMap[TypeRef, TypeVar]() def apply(tp: Type): Type = tp.dealias match { - case _: MatchType => - tp // break cycles + case tp: MatchType => + val reduced = tp.reduced + if reduced.exists then tp // break cycles + else mapOver(tp.bound) // if the match type doesn't statically reduce + // then to avoid it failing the <:< + // we'll approximate by widening to its bounds + + case ThisType(tref: TypeRef) if !tref.symbol.isStaticOwner => + tref case tp: TypeRef if !tp.symbol.isClass => def lo = LazyRef.of(apply(tp.underlying.loBound)) @@ -710,7 +756,7 @@ object TypeOps: tv end if - case AppliedType(tycon: TypeRef, _) if !tycon.dealias.typeSymbol.isClass => + case tp @ AppliedType(tycon: TypeRef, _) if !tycon.dealias.typeSymbol.isClass && !tp.isMatchAlias => // In tests/patmat/i3645g.scala, we need to tell whether it's possible // that K1 <: K[Foo]. If yes, we issue a warning; otherwise, no @@ -762,7 +808,8 @@ object TypeOps: this(tref) else { prefixTVar = WildcardType // prevent recursive call from assigning it - prefixTVar = newTypeVar(TypeBounds.upper(this(tref))) + val tref2 = this(tref.applyIfParameterized(tref.typeParams.map(_ => TypeBounds.empty))) + prefixTVar = newTypeVar(TypeBounds.upper(tref2)) prefixTVar } case tp => mapOver(tp) @@ -779,7 +826,7 @@ object TypeOps: // we manually patch subtyping check instead of changing TypeComparer. // See tests/patmat/i3645b.scala def parentQualify(tp1: Type, tp2: Type) = tp1.classSymbol.info.parents.exists { parent => - parent.argInfos.nonEmpty && approximateTypeParams(parent) <:< tp2 + parent.argInfos.nonEmpty && approximateParent(parent) <:< tp2 } def instantiate(): Type = { @@ -789,8 +836,8 @@ object TypeOps: if (protoTp1 <:< tp2) instantiate() else { - val protoTp2 = approximateTypeParams(tp2) - if (protoTp1 <:< protoTp2 || parentQualify(protoTp1, protoTp2)) instantiate() + val approxTp2 = approximateParent(tp2) + if (protoTp1 <:< approxTp2 || parentQualify(protoTp1, approxTp2)) instantiate() else NoType } } @@ -798,4 +845,11 @@ object TypeOps: def nestedPairs(ts: List[Type])(using Context): Type = ts.foldRight(defn.EmptyTupleModule.termRef: Type)(defn.PairClass.typeRef.appliedTo(_, _)) + class StripTypeVarsMap(using Context) extends TypeMap: + def apply(tp: Type) = mapOver(tp).stripTypeVar + + /** Apply [[Type.stripTypeVar]] recursively. */ + def stripTypeVars(tp: Type)(using Context): Type = + new StripTypeVarsMap().apply(tp) + end TypeOps diff --git a/compiler/src/dotty/tools/dotc/core/TyperState.scala b/compiler/src/dotty/tools/dotc/core/TyperState.scala index 8b4b6a476d1b..6163eebfbef4 100644 --- a/compiler/src/dotty/tools/dotc/core/TyperState.scala +++ b/compiler/src/dotty/tools/dotc/core/TyperState.scala @@ -22,6 +22,22 @@ object TyperState { .init(null, OrderingConstraint.empty) .setReporter(new ConsoleReporter()) .setCommittable(true) + + opaque type Snapshot = (Constraint, TypeVars, TypeVars) + + extension (ts: TyperState) + def snapshot()(using Context): Snapshot = + var previouslyInstantiated: TypeVars = SimpleIdentitySet.empty + for tv <- ts.ownedVars do if tv.inst.exists then previouslyInstantiated += tv + (ts.constraint, ts.ownedVars, previouslyInstantiated) + + def resetTo(state: Snapshot)(using Context): Unit = + val (c, tvs, previouslyInstantiated) = state + for tv <- tvs do + if tv.inst.exists && !previouslyInstantiated.contains(tv) then + tv.resetInst(ts) + ts.ownedVars = tvs + ts.constraint = c } class TyperState() { @@ -44,6 +60,8 @@ class TyperState() { def constraint_=(c: Constraint)(using Context): Unit = { if (Config.debugCheckConstraintsClosed && isGlobalCommittable) c.checkClosed() myConstraint = c + if Config.checkConsistentVars && !ctx.reporter.errorsReported then + c.checkConsistentVars() } private var previousConstraint: Constraint = _ @@ -61,7 +79,12 @@ class TyperState() { private var isCommitted: Boolean = _ - /** The set of uninstantiated type variables which have this state as their owning state */ + /** The set of uninstantiated type variables which have this state as their owning state. + * + * Invariant: + * if `tstate.isCommittable` then + * `tstate.ownedVars.contains(tvar)` iff `tvar.owningState.get eq tstate` + */ private var myOwnedVars: TypeVars = _ def ownedVars: TypeVars = myOwnedVars def ownedVars_=(vs: TypeVars): Unit = myOwnedVars = vs @@ -80,11 +103,12 @@ class TyperState() { this /** A fresh typer state with the same constraint as this one. */ - def fresh(reporter: Reporter = StoreReporter(this.reporter)): TyperState = + def fresh(reporter: Reporter = StoreReporter(this.reporter, fromTyperState = true), + committable: Boolean = this.isCommittable): TyperState = util.Stats.record("TyperState.fresh") TyperState().init(this, this.constraint) .setReporter(reporter) - .setCommittable(this.isCommittable) + .setCommittable(committable) /** The uninstantiated variables */ def uninstVars: collection.Seq[TypeVar] = constraint.uninstVars @@ -115,24 +139,110 @@ class TyperState() { */ def commit()(using Context): Unit = { Stats.record("typerState.commit") - assert(isCommittable) + assert(isCommittable, s"$this is not committable") + assert(!isCommitted, s"$this is already committed") + reporter.flush() + setCommittable(false) val targetState = ctx.typerState + + // Committing into an already committed TyperState usually doesn't make + // sense since it means the constraints we're committing won't be propagated + // further, but it can happen if the targetState gets captured in a reported + // Message, because forcing that Message might involve creating and + // committing new TyperStates into the captured one after its been committed. + assert(!targetState.isCommitted || targetState.reporter.hasErrors || targetState.reporter.hasWarnings, + s"Attempt to commit $this into already committed $targetState") + if constraint ne targetState.constraint then Stats.record("typerState.commit.new constraint") constr.println(i"committing $this to $targetState, fromConstr = $constraint, toConstr = ${targetState.constraint}") - if targetState.constraint eq previousConstraint then targetState.constraint = constraint - else targetState.mergeConstraintWith(this) - if !ownedVars.isEmpty then - for tvar <- ownedVars do - tvar.owningState = new WeakReference(targetState) - targetState.ownedVars ++= ownedVars + if targetState.constraint eq previousConstraint then + targetState.constraint = constraint + if !ownedVars.isEmpty then ownedVars.foreach(targetState.includeVar) + else + targetState.mergeConstraintWith(this) targetState.gc() - reporter.flush() isCommitted = true + ownedVars = SimpleIdentitySet.empty } - def mergeConstraintWith(that: TyperState)(using Context): Unit = - constraint = constraint & (that.constraint, otherHasErrors = that.reporter.errorsReported) + /** Ensure that this constraint does not associate different TypeVars for the + * same type lambda than the `other` constraint. Do this by renaming type lambdas + * in this constraint and its predecessors where necessary. + */ + def ensureNotConflicting(other: Constraint)(using Context): Unit = + val conflicting = constraint.domainLambdas.filter(constraint.hasConflictingTypeVarsFor(_, other)) + for tl <- conflicting do + val tl1 = constraint.ensureFresh(tl) + for case (tvar: TypeVar, pref1) <- tl.paramRefs.map(constraint.typeVarOfParam).lazyZip(tl1.paramRefs) do + tvar.setOrigin(pref1) + var ts = this + while ts.constraint.domainLambdas.contains(tl) do + ts.constraint = ts.constraint.subst(tl, tl1) + ts = ts.previous + + /** Integrate the constraints from `that` into this TyperState. + * + * @pre If `this` and `that` are committable, `that` must not contain any type variable which + * does not exist in `this` (in other words, all its type variables must + * be owned by a common parent of `this` and `that`). + */ + def mergeConstraintWith(that: TyperState)(using Context): this.type = + if this eq that then return this + + that.ensureNotConflicting(constraint) + + val comparingCtx = ctx.withTyperState(this) + + inContext(comparingCtx)(comparing(typeComparer => + val other = that.constraint + val res = other.domainLambdas.forall(tl => + // Integrate the type lambdas from `other` + constraint.contains(tl) || other.isRemovable(tl) || { + val tvars = tl.paramRefs.map(other.typeVarOfParam(_)).collect { case tv: TypeVar => tv } + if this.isCommittable then + tvars.foreach(tvar => if !tvar.inst.exists && !isOwnedAnywhere(this, tvar) then includeVar(tvar)) + typeComparer.addToConstraint(tl, tvars) + }) && + // Integrate the additional constraints on type variables from `other` + constraint.uninstVars.forall(tv => + val p = tv.origin + val otherLos = other.lower(p) + val otherHis = other.upper(p) + val otherEntry = other.entry(p) + ( (otherLos eq constraint.lower(p)) || otherLos.forall(_ <:< p)) && + ( (otherHis eq constraint.upper(p)) || otherHis.forall(p <:< _)) && + ((otherEntry eq constraint.entry(p)) || otherEntry.match + case NoType => + true + case tp: TypeBounds => + tp.contains(tv) + case tp => + tv =:= tp + ) + ) + assert(res || ctx.reporter.errorsReported, i"cannot merge $constraint with $other.") + )) + + for tl <- constraint.domainLambdas do + if constraint.isRemovable(tl) then constraint = constraint.remove(tl) + this + end mergeConstraintWith + + /** Take ownership of `tvar`. + * + * @pre `tvar` is not owned by a committable TyperState. This ensures + * each tvar can only be instantiated by one TyperState. + */ + private def includeVar(tvar: TypeVar)(using Context): Unit = + val oldState = tvar.owningState.get + assert(oldState == null || !oldState.isCommittable, + i"$this attempted to take ownership of $tvar which is already owned by committable $oldState") + tvar.owningState = new WeakReference(this) + ownedVars += tvar + + private def isOwnedAnywhere(ts: TyperState, tvar: TypeVar): Boolean = + ts.ownedVars.contains(tvar) || ts.previous != null && isOwnedAnywhere(ts.previous, tvar) /** Make type variable instances permanent by assigning to `inst` field if * type variable instantiation cannot be retracted anymore. Then, remove @@ -143,14 +253,15 @@ class TyperState() { Stats.record("typerState.gc") val toCollect = new mutable.ListBuffer[TypeLambda] for tvar <- ownedVars do - if !tvar.inst.exists then - val inst = constraint.instType(tvar) - if inst.exists then - tvar.inst = inst - val lam = tvar.origin.binder - if constraint.isRemovable(lam) then toCollect += lam - for poly <- toCollect do - constraint = constraint.remove(poly) + assert(tvar.owningState.get eq this, s"Inconsistent state in $this: it owns $tvar whose owningState is ${tvar.owningState.get}") + assert(!tvar.inst.exists, s"Inconsistent state in $this: it owns $tvar which is already instantiated") + val inst = constraint.instType(tvar) + if inst.exists then + tvar.setInst(inst) + val tl = tvar.origin.binder + if constraint.isRemovable(tl) then toCollect += tl + for tl <- toCollect do + constraint = constraint.remove(tl) override def toString: String = { def ids(state: TyperState): List[String] = diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index f9cbf932f9c8..b4a6172a6659 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -168,7 +168,11 @@ object Types { case _: SingletonType | NoPrefix => true case tp: RefinedOrRecType => tp.parent.isStable case tp: ExprType => tp.resultType.isStable - case tp: AnnotatedType => tp.parent.isStable + case tp: AnnotatedType => + // NOTE UncheckedStableAnnot was originally meant to be put on fields, + // not on types. Allowing it on types is a Scala 3 extension. See: + // https://www.scala-lang.org/files/archive/spec/2.11/11-annotations.html#scala-compiler-annotations + tp.annot.symbol == defn.UncheckedStableAnnot || tp.parent.isStable case tp: AndType => // TODO: fix And type check when tp contains type parames for explicit-nulls flow-typing // see: tests/explicit-nulls/pos/flow-stable.scala.disabled @@ -295,7 +299,13 @@ object Types { loop(this) } - def isFromJavaObject(using Context): Boolean = typeSymbol eq defn.FromJavaObjectSymbol + def isFromJavaObject(using Context): Boolean = + isRef(defn.ObjectClass) && (typeSymbol eq defn.FromJavaObjectSymbol) + + def containsFromJavaObject(using Context): Boolean = this match + case tp: OrType => tp.tp1.containsFromJavaObject || tp.tp2.containsFromJavaObject + case tp: AndType => tp.tp1.containsFromJavaObject && tp.tp2.containsFromJavaObject + case _ => isFromJavaObject /** True iff `symd` is a denotation of a class type parameter and the reference * `
 . ` is an actual argument reference, i.e. `pre` is not the
@@ -423,18 +433,23 @@ object Types {
     def isMatch(using Context): Boolean = stripped match {
       case _: MatchType => true
       case tp: HKTypeLambda => tp.resType.isMatch
+      case tp: AppliedType => tp.isMatchAlias
       case _ => false
     }
 
     /** Is this a higher-kinded type lambda with given parameter variances? */
     def isDeclaredVarianceLambda: Boolean = false
 
+    /** Does this type contain wildcard types? */
+    final def containsWildcardTypes(using Context) =
+      existsPart(_.isInstanceOf[WildcardType], StopAt.Static, forceLazy = false)
+
 // ----- Higher-order combinators -----------------------------------
 
     /** Returns true if there is a part of this type that satisfies predicate `p`.
      */
-    final def existsPart(p: Type => Boolean, stopAtStatic: Boolean = false, forceLazy: Boolean = true)(using Context): Boolean =
-      new ExistsAccumulator(p, stopAtStatic, forceLazy).apply(false, this)
+    final def existsPart(p: Type => Boolean, stopAt: StopAt = StopAt.None, forceLazy: Boolean = true)(using Context): Boolean =
+      new ExistsAccumulator(p, stopAt, forceLazy).apply(false, this)
 
     /** Returns true if all parts of this type satisfy predicate `p`.
      */
@@ -442,8 +457,8 @@ object Types {
       !existsPart(!p(_))
 
     /** Performs operation on all parts of this type */
-    final def foreachPart(p: Type => Unit, stopAtStatic: Boolean = false)(using Context): Unit =
-      new ForeachAccumulator(p, stopAtStatic).apply((), this)
+    final def foreachPart(p: Type => Unit, stopAt: StopAt = StopAt.None)(using Context): Unit =
+      new ForeachAccumulator(p, stopAt).apply((), this)
 
     /** The parts of this type which are type or term refs and which
      *  satisfy predicate `p`.
@@ -838,18 +853,23 @@ object Types {
       def goAnd(l: Type, r: Type) =
         go(l).meet(go(r), pre, safeIntersection = ctx.base.pendingMemberSearches.contains(name))
 
-      def goOr(tp: OrType) = tp match {
-        case OrNull(tp1) if Nullables.unsafeNullsEnabled =>
-          // Selecting `name` from a type `T | Null` is like selecting `name` from `T`, if
-          // unsafeNulls is enabled. This can throw at runtime, but we trade soundness for usability.
-          tp1.findMember(name, pre.stripNull, required, excluded)
-        case _ =>
+      def goOr(tp: OrType) =
+        inline def searchAfterJoin =
           // we need to keep the invariant that `pre <: tp`. Branch `union-types-narrow-prefix`
           // achieved that by narrowing `pre` to each alternative, but it led to merge errors in
           // lots of places. The present strategy is instead of widen `tp` using `join` to be a
           // supertype of `pre`.
           go(tp.join)
-      }
+
+        if Nullables.unsafeNullsEnabled then tp match
+          case OrNull(tp1) if tp1 <:< defn.ObjectType  =>
+            // Selecting `name` from a type `T | Null` is like selecting `name` from `T`, if
+            // unsafeNulls is enabled and T is a subtype of AnyRef.
+            // This can throw at runtime, but we trade soundness for usability.
+            tp1.findMember(name, pre.stripNull, required, excluded)
+          case _ =>
+            searchAfterJoin
+        else searchAfterJoin
 
       val recCount = ctx.base.findMemberCount
       if (recCount >= Config.LogPendingFindMemberThreshold)
@@ -1038,17 +1058,15 @@ object Types {
       TypeComparer.isSameTypeWhenFrozen(this, that)
 
     /** Is this type a primitive value type which can be widened to the primitive value type `that`? */
-    def isValueSubType(that: Type)(using Context): Boolean = widen match {
+    def isValueSubType(that: Type)(using Context): Boolean = widenDealias match
       case self: TypeRef if self.symbol.isPrimitiveValueClass =>
-        that.widenExpr match {
+        that.widenExpr.dealias match
           case that: TypeRef if that.symbol.isPrimitiveValueClass =>
             defn.isValueSubClass(self.symbol, that.symbol)
           case _ =>
             false
-        }
       case _ =>
         false
-    }
 
     def relaxed_<:<(that: Type)(using Context): Boolean =
       (this <:< that) || (this isValueSubType that)
@@ -1272,13 +1290,10 @@ object Types {
       case tp =>
         tp.widenUnionWithoutNull
 
+    /** Overridden in OrType */
     def widenUnionWithoutNull(using Context): Type = widen match
-      case tp @ OrType(lhs, rhs) if tp.isSoft =>
-        TypeComparer.lub(lhs.widenUnionWithoutNull, rhs.widenUnionWithoutNull, canConstrain = true) match
-          case union: OrType => union.join
-          case res => res
-      case tp: AndOrType =>
-        tp.derivedAndOrType(tp.tp1.widenUnionWithoutNull, tp.tp2.widenUnionWithoutNull)
+      case tp: AndType =>
+        tp.derivedAndType(tp.tp1.widenUnionWithoutNull, tp.tp2.widenUnionWithoutNull)
       case tp: RefinedType =>
         tp.derivedRefinedType(tp.parent.widenUnion, tp.refinedName, tp.refinedInfo)
       case tp: RecType =>
@@ -1348,25 +1363,26 @@ object Types {
             case Atoms.Unknown => Atoms.Unknown
         case _ => Atoms.Unknown
 
-    private def dealias1(keep: AnnotatedType => Context ?=> Boolean)(using Context): Type = this match {
+    private def dealias1(keep: AnnotatedType => Context ?=> Boolean, keepOpaques: Boolean)(using Context): Type = this match {
       case tp: TypeRef =>
         if (tp.symbol.isClass) tp
         else tp.info match {
-          case TypeAlias(alias) => alias.dealias1(keep)
+          case TypeAlias(alias) if !(keepOpaques && tp.symbol.is(Opaque)) =>
+            alias.dealias1(keep, keepOpaques)
           case _ => tp
         }
       case app @ AppliedType(tycon, _) =>
-        val tycon1 = tycon.dealias1(keep)
-        if (tycon1 ne tycon) app.superType.dealias1(keep)
+        val tycon1 = tycon.dealias1(keep, keepOpaques)
+        if (tycon1 ne tycon) app.superType.dealias1(keep, keepOpaques)
         else this
       case tp: TypeVar =>
         val tp1 = tp.instanceOpt
-        if (tp1.exists) tp1.dealias1(keep) else tp
+        if (tp1.exists) tp1.dealias1(keep, keepOpaques) else tp
       case tp: AnnotatedType =>
-        val tp1 = tp.parent.dealias1(keep)
+        val tp1 = tp.parent.dealias1(keep, keepOpaques)
         if keep(tp) then tp.derivedAnnotatedType(tp1, tp.annot) else tp1
       case tp: LazyRef =>
-        tp.ref.dealias1(keep)
+        tp.ref.dealias1(keep, keepOpaques)
       case _ => this
     }
 
@@ -1374,16 +1390,33 @@ object Types {
      *  TypeVars until type is no longer alias type, annotated type, LazyRef,
      *  or instantiated type variable.
      */
-    final def dealias(using Context): Type = dealias1(keepNever)
+    final def dealias(using Context): Type = dealias1(keepNever, keepOpaques = false)
 
     /** Follow aliases and dereferences LazyRefs and instantiated TypeVars until type
      *  is no longer alias type, LazyRef, or instantiated type variable.
      *  Goes through annotated types and rewraps annotations on the result.
      */
-    final def dealiasKeepAnnots(using Context): Type = dealias1(keepAlways)
+    final def dealiasKeepAnnots(using Context): Type = dealias1(keepAlways, keepOpaques = false)
 
     /** Like `dealiasKeepAnnots`, but keeps only refining annotations */
-    final def dealiasKeepRefiningAnnots(using Context): Type = dealias1(keepIfRefining)
+    final def dealiasKeepRefiningAnnots(using Context): Type = dealias1(keepIfRefining, keepOpaques = false)
+
+    /** Follow non-opaque aliases and dereferences LazyRefs, annotated types and instantiated
+     *  TypeVars until type is no longer alias type, annotated type, LazyRef,
+     *  or instantiated type variable.
+     */
+    final def dealiasKeepOpaques(using Context): Type = dealias1(keepNever, keepOpaques = true)
+
+    /** Approximate this type with a type that does not contain skolem types. */
+    final def deskolemized(using Context): Type =
+      val deskolemizer = new ApproximatingTypeMap {
+        def apply(tp: Type) = /*trace(i"deskolemize($tp) at $variance", show = true)*/
+          tp match {
+            case tp: SkolemType => range(defn.NothingType, atVariance(1)(apply(tp.info)))
+            case _ => mapOver(tp)
+          }
+      }
+      deskolemizer(this)
 
     /** The result of normalization using `tryNormalize`, or the type itself if
      *  tryNormlize yields NoType
@@ -1400,7 +1433,7 @@ object Types {
     def tryNormalize(using Context): Type = NoType
 
     private def widenDealias1(keep: AnnotatedType => Context ?=> Boolean)(using Context): Type = {
-      val res = this.widen.dealias1(keep)
+      val res = this.widen.dealias1(keep, keepOpaques = false)
       if (res eq this) res else res.widenDealias1(keep)
     }
 
@@ -1641,6 +1674,20 @@ object Types {
       case _ => resultType
     }
 
+    /** Determine the expected function type from the prototype. If multiple
+     *  function types are found in a union or intersection, their intersection
+     *  is returned. If no function type is found, Any is returned.
+     */
+    def findFunctionType(using Context): Type = dealias match
+      case tp: AndOrType =>
+        tp.tp1.findFunctionType & tp.tp2.findFunctionType
+      case t if defn.isNonRefinedFunction(t) =>
+        t
+      case t @ SAMType(_) =>
+        t
+      case _ =>
+        defn.AnyType
+
     /** This type seen as a TypeBounds */
     final def bounds(using Context): TypeBounds = this match {
       case tp: TypeBounds => tp
@@ -1688,6 +1735,14 @@ object Types {
     /** If this is a proto type, WildcardType, otherwise the type itself */
     def dropIfProto: Type = this
 
+    /** If this is an AndType, the number of factors, 1 for all other types */
+    def andFactorCount: Int = 1
+
+    /** If this is a OrType, the number of factors if that match `soft`,
+     *  1 for all other types.
+     */
+    def orFactorCount(soft: Boolean): Int = 1
+
 // ----- Substitutions -----------------------------------------------------
 
     /** Substitute all types that refer in their symbol attribute to
@@ -1888,7 +1943,15 @@ object Types {
       case st => st
     }
 
-    /** Same as superType, except that opaque types are treated as transparent aliases */
+    /** Same as superType, except for two differences:
+     *   - opaque types are treated as transparent aliases
+     *   - applied type are matchtype-reduced if possible
+     *
+     *  Note: the reason to reduce match type aliases here and not in `superType`
+     *  is that `superType` is context-independent and cached, whereas matchtype
+     *  reduction depends on context and should not be cached (at least not without
+     *  the very specific cache invalidation condition for matchtypes).
+     */
     def translucentSuperType(using Context): Type = superType
   }
 
@@ -2216,7 +2279,7 @@ object Types {
       if (!d.exists && !allowPrivate && ctx.mode.is(Mode.Interactive))
         // In the IDE we might change a public symbol to private, and would still expect to find it.
         d = memberDenot(prefix, name, true)
-      if (!d.exists && ctx.phaseId > FirstPhaseId && lastDenotation.isInstanceOf[SymDenotation])
+      if (!d.exists && ctx.isAfterTyper && lastDenotation.isInstanceOf[SymDenotation])
         // name has changed; try load in earlier phase and make current
         d = atPhase(ctx.phaseId - 1)(memberDenot(name, allowPrivate)).current
       if (d.isOverloaded)
@@ -2808,12 +2871,16 @@ object Types {
     private var myRef: Type = null
     private var computed = false
 
+    override def tryNormalize(using Context): Type = ref.tryNormalize
+
     def ref(using Context): Type =
       if computed then
         if myRef == null then
           // if errors were reported previously handle this by throwing a CyclicReference
           // instead of crashing immediately. A test case is neg/i6057.scala.
-          assert(ctx.mode.is(Mode.CheckCyclic) || ctx.reporter.errorsReported)
+          assert(ctx.mode.is(Mode.CheckCyclic)
+              || ctx.mode.is(Mode.Printing)
+              || ctx.reporter.errorsReported)
           throw CyclicReference(NoDenotation)
       else
         computed = true
@@ -3070,6 +3137,12 @@ object Types {
       myBaseClasses
     }
 
+    private var myFactorCount = 0
+    override def andFactorCount =
+      if myFactorCount == 0 then
+      	myFactorCount = tp1.andFactorCount + tp2.andFactorCount
+      myFactorCount
+
     def derivedAndType(tp1: Type, tp2: Type)(using Context): Type =
       if ((tp1 eq this.tp1) && (tp2 eq this.tp2)) this
       else AndType.make(tp1, tp2, checkValid = true)
@@ -3095,6 +3168,23 @@ object Types {
       unchecked(tp1, tp2)
     }
 
+    def balanced(tp1: Type, tp2: Type)(using Context): AndType =
+      tp1 match
+        case AndType(tp11, tp12) if tp1.andFactorCount > tp2.andFactorCount * 2 =>
+          if tp11.andFactorCount < tp12.andFactorCount then
+            return apply(tp12, balanced(tp11, tp2))
+          else
+            return apply(tp11, balanced(tp12, tp2))
+        case _ =>
+      tp2 match
+        case AndType(tp21, tp22) if tp2.andFactorCount > tp1.andFactorCount * 2 =>
+          if tp22.andFactorCount < tp21.andFactorCount then
+            return apply(balanced(tp1, tp22), tp21)
+          else
+            return apply(balanced(tp1, tp21), tp22)
+        case _ =>
+      apply(tp1, tp2)
+
     def unchecked(tp1: Type, tp2: Type)(using Context): AndType = {
       assertUnerased()
       unique(new CachedAndType(tp1, tp2))
@@ -3141,6 +3231,14 @@ object Types {
       myBaseClasses
     }
 
+    private var myFactorCount = 0
+    override def orFactorCount(soft: Boolean) =
+      if this.isSoft == soft then
+        if myFactorCount == 0 then
+          myFactorCount = tp1.orFactorCount(soft) + tp2.orFactorCount(soft)
+        myFactorCount
+      else 1
+
     assert(tp1.isValueTypeOrWildcard &&
            tp2.isValueTypeOrWildcard, s"$tp1 $tp2")
 
@@ -3158,6 +3256,20 @@ object Types {
       myJoin
     }
 
+    private var myUnion: Type = _
+    private var myUnionPeriod: Period = Nowhere
+
+    override def widenUnionWithoutNull(using Context): Type =
+      if myUnionPeriod != ctx.period then
+        myUnion =
+          if isSoft then
+            TypeComparer.lub(tp1.widenUnionWithoutNull, tp2.widenUnionWithoutNull, canConstrain = true, isSoft = isSoft) match
+              case union: OrType => union.join
+              case res => res
+          else derivedOrType(tp1.widenUnionWithoutNull, tp2.widenUnionWithoutNull, soft = isSoft)
+        if !isProvisional then myUnionPeriod = ctx.period
+      myUnion
+
     private var atomsRunId: RunId = NoRunId
     private var myAtoms: Atoms = _
     private var myWidened: Type = _
@@ -3170,7 +3282,7 @@ object Types {
           else tp1.atoms | tp2.atoms
         val tp1w = tp1.widenSingletons
         val tp2w = tp2.widenSingletons
-        myWidened = if ((tp1 eq tp1w) && (tp2 eq tp2w)) this else tp1w | tp2w
+        myWidened = if ((tp1 eq tp1w) && (tp2 eq tp2w)) this else TypeComparer.lub(tp1w, tp2w, isSoft = isSoft)
         atomsRunId = ctx.runId
 
     override def atoms(using Context): Atoms =
@@ -3198,10 +3310,29 @@ object Types {
   final class CachedOrType(tp1: Type, tp2: Type, override val isSoft: Boolean) extends OrType(tp1, tp2)
 
   object OrType {
+
     def apply(tp1: Type, tp2: Type, soft: Boolean)(using Context): OrType = {
       assertUnerased()
       unique(new CachedOrType(tp1, tp2, soft))
     }
+
+    def balanced(tp1: Type, tp2: Type, soft: Boolean)(using Context): OrType =
+      tp1 match
+        case OrType(tp11, tp12) if tp1.orFactorCount(soft) > tp2.orFactorCount(soft) * 2 =>
+          if tp11.orFactorCount(soft) < tp12.orFactorCount(soft) then
+            return apply(tp12, balanced(tp11, tp2, soft), soft)
+          else
+            return apply(tp11, balanced(tp12, tp2, soft), soft)
+        case _ =>
+      tp2 match
+        case OrType(tp21, tp22) if tp2.orFactorCount(soft) > tp1.orFactorCount(soft) * 2 =>
+          if tp22.orFactorCount(soft) < tp21.orFactorCount(soft) then
+            return apply(balanced(tp1, tp22, soft), tp21, soft)
+          else
+            return apply(balanced(tp1, tp21, soft), tp22, soft)
+        case _ =>
+      apply(tp1, tp2, soft)
+
     def make(tp1: Type, tp2: Type, soft: Boolean)(using Context): Type =
       if (tp1 eq tp2) tp1
       else apply(tp1, tp2, soft)
@@ -3495,6 +3626,9 @@ object Types {
           case tp: AppliedType => tp.fold(status, compute(_, _, theAcc))
           case tp: TypeVar if !tp.isInstantiated => combine(status, Provisional)
           case tp: TermParamRef if tp.binder eq thisLambdaType => TrueDeps
+          case AnnotatedType(parent, ann) =>
+            if ann.refersToParamOf(thisLambdaType) then TrueDeps
+            else compute(status, parent, theAcc)
           case _: ThisType | _: BoundType | NoPrefix => status
           case _ =>
             (if theAcc != null then theAcc else DepAcc()).foldOver(status, tp)
@@ -3547,8 +3681,10 @@ object Types {
       if (isResultDependent) {
         val dropDependencies = new ApproximatingTypeMap {
           def apply(tp: Type) = tp match {
-            case tp @ TermParamRef(thisLambdaType, _) =>
+            case tp @ TermParamRef(`thisLambdaType`, _) =>
               range(defn.NothingType, atVariance(1)(apply(tp.underlying)))
+            case AnnotatedType(parent, ann) if ann.refersToParamOf(thisLambdaType) =>
+              mapOver(parent)
             case _ => mapOver(tp)
           }
         }
@@ -3640,9 +3776,15 @@ object Types {
         case ExprType(resType) => ExprType(AnnotatedType(resType, Annotation(defn.InlineParamAnnot)))
         case _ => AnnotatedType(tp, Annotation(defn.InlineParamAnnot))
       }
+      def translateErased(tp: Type): Type = tp match {
+        case ExprType(resType) => ExprType(AnnotatedType(resType, Annotation(defn.ErasedParamAnnot)))
+        case _ => AnnotatedType(tp, Annotation(defn.ErasedParamAnnot))
+      }
       def paramInfo(param: Symbol) = {
-        val paramType = param.info.annotatedToRepeated
-        if (param.is(Inline)) translateInline(paramType) else paramType
+        var paramType = param.info.annotatedToRepeated
+        if (param.is(Inline)) paramType = translateInline(paramType)
+        if (param.is(Erased)) paramType = translateErased(paramType)
+        paramType
       }
 
       apply(params.map(_.name.asTermName))(
@@ -3735,9 +3877,6 @@ object Types {
     val paramInfos: List[TypeBounds] = paramInfosExp(this)
     val resType: Type = resultTypeExp(this)
 
-    assert(resType.isInstanceOf[TermType], this)
-    assert(paramNames.nonEmpty)
-
     private def setVariances(tparams: List[LambdaParam], vs: List[Variance]): Unit =
       if tparams.nonEmpty then
         tparams.head.declaredVariance = vs.head
@@ -3764,6 +3903,8 @@ object Types {
                   x.declaredVariance == y.declaredVariance))
         && {
           val bs1 = new BinderPairs(this, that, bs)
+          // `paramInfos` and `resType` might still be uninstantiated at this point
+          paramInfos != null && resType != null &&
           paramInfos.equalElements(that.paramInfos, bs1) &&
           resType.equals(that.resType, bs1)
         }
@@ -3787,6 +3928,9 @@ object Types {
       if isDeclaredVarianceLambda then
         s"HKTypeLambda($paramNames, $paramInfos, $resType, ${declaredVariances.map(_.flagsString)})"
       else super.toString
+
+    assert(resType.isInstanceOf[TermType], this)
+    assert(paramNames.nonEmpty)
   }
 
   /** The type of a polymorphic method. It has the same form as HKTypeLambda,
@@ -3924,7 +4068,7 @@ object Types {
   // ----- Type application: LambdaParam, AppliedType ---------------------
 
   /** The parameter of a type lambda */
-  case class LambdaParam(tl: TypeLambda, n: Int) extends ParamInfo {
+  case class LambdaParam(tl: TypeLambda, n: Int) extends ParamInfo, printing.Showable {
     type ThisName = TypeName
 
     def isTypeParam(using Context): Boolean = tl.paramNames.head.isTypeName
@@ -3969,6 +4113,8 @@ object Types {
           case _ =>
             myVariance = Invariant
       myVariance
+
+    def toText(printer: Printer): Text = printer.toText(this)
   }
 
   /** A type application `C[T_1, ..., T_n]` */
@@ -3988,24 +4134,23 @@ object Types {
 
     override def underlying(using Context): Type = tycon
 
-    override def superType(using Context): Type = {
-      if (ctx.period != validSuper) {
-        cachedSuper = tycon match {
+    override def superType(using Context): Type =
+      if ctx.period != validSuper then
+        validSuper = if (tycon.isProvisional) Nowhere else ctx.period
+        cachedSuper = tycon match
           case tycon: HKTypeLambda => defn.AnyType
           case tycon: TypeRef if tycon.symbol.isClass => tycon
-          case tycon: TypeProxy => tycon.superType.applyIfParameterized(args)
+          case tycon: TypeProxy =>
+            if isMatchAlias then validSuper = Nowhere
+            tycon.superType.applyIfParameterized(args).normalized
           case _ => defn.AnyType
-        }
-        validSuper = if (tycon.isProvisional) Nowhere else ctx.period
-      }
       cachedSuper
-    }
 
     override def translucentSuperType(using Context): Type = tycon match {
       case tycon: TypeRef if tycon.symbol.isOpaqueAlias =>
         tycon.translucentSuperType.applyIfParameterized(args)
       case _ =>
-        superType
+        tryNormalize.orElse(superType)
     }
 
     inline def map(inline op: Type => Type)(using Context) =
@@ -4025,7 +4170,9 @@ object Types {
         def tryMatchAlias = tycon.info match {
           case MatchAlias(alias) =>
             trace(i"normalize $this", typr, show = true) {
-              alias.applyIfParameterized(args).tryNormalize
+              MatchTypeTrace.recurseWith(this) {
+                alias.applyIfParameterized(args.map(_.normalized)).tryNormalize
+              }
             }
           case _ =>
             NoType
@@ -4055,85 +4202,233 @@ object Types {
 
     def tryCompiletimeConstantFold(using Context): Type = tycon match {
       case tycon: TypeRef if defn.isCompiletimeAppliedType(tycon.symbol) =>
-        def constValue(tp: Type): Option[Any] = tp.dealias match {
+        extension (tp: Type) def fixForEvaluation: Type =
+          tp.normalized.dealias match {
+            // enable operations for constant singleton terms. E.g.:
+            // ```
+            // final val one = 1
+            // type Two = one.type + one.type
+            // ```
+            case tp: TermRef => tp.underlying
+            case tp => tp
+          }
+
+        def constValue(tp: Type): Option[Any] = tp.fixForEvaluation match {
           case ConstantType(Constant(n)) => Some(n)
           case _ => None
         }
 
-        def boolValue(tp: Type): Option[Boolean] = tp.dealias match {
+        def boolValue(tp: Type): Option[Boolean] = tp.fixForEvaluation match {
           case ConstantType(Constant(n: Boolean)) => Some(n)
           case _ => None
         }
 
-        def intValue(tp: Type): Option[Int] = tp.dealias match {
+        def intValue(tp: Type): Option[Int] = tp.fixForEvaluation match {
           case ConstantType(Constant(n: Int)) => Some(n)
           case _ => None
         }
 
-        def stringValue(tp: Type): Option[String] = tp.dealias match {
+        def longValue(tp: Type): Option[Long] = tp.fixForEvaluation match {
+          case ConstantType(Constant(n: Long)) => Some(n)
+          case _ => None
+        }
+
+        def floatValue(tp: Type): Option[Float] = tp.fixForEvaluation match {
+          case ConstantType(Constant(n: Float)) => Some(n)
+          case _ => None
+        }
+
+        def doubleValue(tp: Type): Option[Double] = tp.fixForEvaluation match {
+          case ConstantType(Constant(n: Double)) => Some(n)
+          case _ => None
+        }
+
+        def stringValue(tp: Type): Option[String] = tp.fixForEvaluation match {
           case ConstantType(Constant(n: String)) => Some(n)
           case _ => None
         }
 
+        // Returns Some(true) if the type is a constant.
+        // Returns Some(false) if the type is not a constant.
+        // Returns None if there is not enough information to determine if the type is a constant.
+        // The type is a constant if it is a constant type or a type operation composition of constant types.
+        // If we get a type reference for an argument, then the result is not yet known.
+        def isConst(tp: Type): Option[Boolean] = tp.dealias match {
+          // known to be constant
+          case ConstantType(_) => Some(true)
+          // currently not a concrete known type
+          case TypeRef(NoPrefix,_) => None
+          // currently not a concrete known type
+          case _: TypeParamRef => None
+          // constant if the term is constant
+          case t: TermRef => isConst(t.underlying)
+          // an operation type => recursively check all argument compositions
+          case applied: AppliedType if defn.isCompiletimeAppliedType(applied.typeSymbol) =>
+            val argsConst = applied.args.map(isConst)
+            if (argsConst.exists(_.isEmpty)) None
+            else Some(argsConst.forall(_.get))
+          // all other types are considered not to be constant
+          case _ => Some(false)
+        }
+
+        def expectArgsNum(expectedNum: Int): Unit =
+        // We can use assert instead of a compiler type error because this error should not
+        // occur since the type signature of the operation enforces the proper number of args.
+          assert(args.length == expectedNum, s"Type operation expects $expectedNum arguments but found ${args.length}")
+
         def natValue(tp: Type): Option[Int] = intValue(tp).filter(n => n >= 0 && n < Int.MaxValue)
 
+        // Runs the op and returns the result as a constant type.
+        // If the op throws an exception, then this exception is converted into a type error.
+        def runConstantOp(op: => Any): Type =
+          val result = try {
+            op
+          } catch {
+            case e: Throwable =>
+              throw new TypeError(e.getMessage)
+          }
+          ConstantType(Constant(result))
+
         def constantFold1[T](extractor: Type => Option[T], op: T => Any): Option[Type] =
-          extractor(args.head.normalized).map(a => ConstantType(Constant(op(a))))
+          expectArgsNum(1)
+          extractor(args.head).map(a => runConstantOp(op(a)))
 
         def constantFold2[T](extractor: Type => Option[T], op: (T, T) => Any): Option[Type] =
+          constantFold2AB(extractor, extractor, op)
+
+        def constantFold2AB[TA, TB](extractorA: Type => Option[TA], extractorB: Type => Option[TB], op: (TA, TB) => Any): Option[Type] =
+          expectArgsNum(2)
           for {
-            a <- extractor(args.head.normalized)
-            b <- extractor(args.tail.head.normalized)
-          } yield ConstantType(Constant(op(a, b)))
+            a <- extractorA(args(0))
+            b <- extractorB(args(1))
+          } yield runConstantOp(op(a, b))
+
+        def constantFold3[TA, TB, TC](
+          extractorA: Type => Option[TA],
+          extractorB: Type => Option[TB],
+          extractorC: Type => Option[TC],
+          op: (TA, TB, TC) => Any
+        ): Option[Type] =
+          expectArgsNum(3)
+          for {
+            a <- extractorA(args(0))
+            b <- extractorB(args(1))
+            c <- extractorC(args(2))
+          } yield runConstantOp(op(a, b, c))
 
         trace(i"compiletime constant fold $this", typr, show = true) {
           val name = tycon.symbol.name
           val owner = tycon.symbol.owner
-          val nArgs = args.length
           val constantType =
             if (defn.isCompiletime_S(tycon.symbol)) {
-              if (nArgs == 1) constantFold1(natValue, _ + 1)
-              else None
+              constantFold1(natValue, _ + 1)
             } else if (owner == defn.CompiletimeOpsAnyModuleClass) name match {
-              case tpnme.Equals    if nArgs == 2 => constantFold2(constValue, _ == _)
-              case tpnme.NotEquals if nArgs == 2 => constantFold2(constValue, _ != _)
+              case tpnme.Equals     => constantFold2(constValue, _ == _)
+              case tpnme.NotEquals  => constantFold2(constValue, _ != _)
+              case tpnme.ToString   => constantFold1(constValue, _.toString)
+              case tpnme.IsConst    => isConst(args.head).map(b => ConstantType(Constant(b)))
               case _ => None
             } else if (owner == defn.CompiletimeOpsIntModuleClass) name match {
-              case tpnme.Abs      if nArgs == 1 => constantFold1(intValue, _.abs)
-              case tpnme.Negate   if nArgs == 1 => constantFold1(intValue, x => -x)
-              case tpnme.ToString if nArgs == 1 => constantFold1(intValue, _.toString)
-              case tpnme.Plus     if nArgs == 2 => constantFold2(intValue, _ + _)
-              case tpnme.Minus    if nArgs == 2 => constantFold2(intValue, _ - _)
-              case tpnme.Times    if nArgs == 2 => constantFold2(intValue, _ * _)
-              case tpnme.Div if nArgs == 2 => constantFold2(intValue, {
-                case (_, 0) => throw new TypeError("Division by 0")
-                case (a, b) => a / b
-              })
-              case tpnme.Mod if nArgs == 2 => constantFold2(intValue, {
-                case (_, 0) => throw new TypeError("Modulo by 0")
-                case (a, b) => a % b
-              })
-              case tpnme.Lt  if nArgs == 2 => constantFold2(intValue, _ < _)
-              case tpnme.Gt  if nArgs == 2 => constantFold2(intValue, _ > _)
-              case tpnme.Ge  if nArgs == 2 => constantFold2(intValue, _ >= _)
-              case tpnme.Le  if nArgs == 2 => constantFold2(intValue, _ <= _)
-              case tpnme.Xor if nArgs == 2 => constantFold2(intValue, _ ^ _)
-              case tpnme.BitwiseAnd if nArgs == 2 => constantFold2(intValue, _ & _)
-              case tpnme.BitwiseOr  if nArgs == 2 => constantFold2(intValue, _ | _)
-              case tpnme.ASR if nArgs == 2 => constantFold2(intValue, _ >> _)
-              case tpnme.LSL if nArgs == 2 => constantFold2(intValue, _ << _)
-              case tpnme.LSR if nArgs == 2 => constantFold2(intValue, _ >>> _)
-              case tpnme.Min if nArgs == 2 => constantFold2(intValue, _ min _)
-              case tpnme.Max if nArgs == 2 => constantFold2(intValue, _ max _)
+              case tpnme.Abs        => constantFold1(intValue, _.abs)
+              case tpnme.Negate     => constantFold1(intValue, x => -x)
+              // ToString is deprecated for ops.int, and moved to ops.any
+              case tpnme.ToString   => constantFold1(intValue, _.toString)
+              case tpnme.Plus       => constantFold2(intValue, _ + _)
+              case tpnme.Minus      => constantFold2(intValue, _ - _)
+              case tpnme.Times      => constantFold2(intValue, _ * _)
+              case tpnme.Div        => constantFold2(intValue, _ / _)
+              case tpnme.Mod        => constantFold2(intValue, _ % _)
+              case tpnme.Lt         => constantFold2(intValue, _ < _)
+              case tpnme.Gt         => constantFold2(intValue, _ > _)
+              case tpnme.Ge         => constantFold2(intValue, _ >= _)
+              case tpnme.Le         => constantFold2(intValue, _ <= _)
+              case tpnme.Xor        => constantFold2(intValue, _ ^ _)
+              case tpnme.BitwiseAnd => constantFold2(intValue, _ & _)
+              case tpnme.BitwiseOr  => constantFold2(intValue, _ | _)
+              case tpnme.ASR        => constantFold2(intValue, _ >> _)
+              case tpnme.LSL        => constantFold2(intValue, _ << _)
+              case tpnme.LSR        => constantFold2(intValue, _ >>> _)
+              case tpnme.Min        => constantFold2(intValue, _ min _)
+              case tpnme.Max        => constantFold2(intValue, _ max _)
+              case tpnme.NumberOfLeadingZeros => constantFold1(intValue, Integer.numberOfLeadingZeros(_))
+              case tpnme.ToLong     => constantFold1(intValue, _.toLong)
+              case tpnme.ToFloat    => constantFold1(intValue, _.toFloat)
+              case tpnme.ToDouble   => constantFold1(intValue, _.toDouble)
+              case _ => None
+            } else if (owner == defn.CompiletimeOpsLongModuleClass) name match {
+              case tpnme.Abs        => constantFold1(longValue, _.abs)
+              case tpnme.Negate     => constantFold1(longValue, x => -x)
+              case tpnme.Plus       => constantFold2(longValue, _ + _)
+              case tpnme.Minus      => constantFold2(longValue, _ - _)
+              case tpnme.Times      => constantFold2(longValue, _ * _)
+              case tpnme.Div        => constantFold2(longValue, _ / _)
+              case tpnme.Mod        => constantFold2(longValue, _ % _)
+              case tpnme.Lt         => constantFold2(longValue, _ < _)
+              case tpnme.Gt         => constantFold2(longValue, _ > _)
+              case tpnme.Ge         => constantFold2(longValue, _ >= _)
+              case tpnme.Le         => constantFold2(longValue, _ <= _)
+              case tpnme.Xor        => constantFold2(longValue, _ ^ _)
+              case tpnme.BitwiseAnd => constantFold2(longValue, _ & _)
+              case tpnme.BitwiseOr  => constantFold2(longValue, _ | _)
+              case tpnme.ASR        => constantFold2(longValue, _ >> _)
+              case tpnme.LSL        => constantFold2(longValue, _ << _)
+              case tpnme.LSR        => constantFold2(longValue, _ >>> _)
+              case tpnme.Min        => constantFold2(longValue, _ min _)
+              case tpnme.Max        => constantFold2(longValue, _ max _)
+              case tpnme.NumberOfLeadingZeros =>
+                constantFold1(longValue, java.lang.Long.numberOfLeadingZeros(_))
+              case tpnme.ToInt      => constantFold1(longValue, _.toInt)
+              case tpnme.ToFloat    => constantFold1(longValue, _.toFloat)
+              case tpnme.ToDouble   => constantFold1(longValue, _.toDouble)
+              case _ => None
+            } else if (owner == defn.CompiletimeOpsFloatModuleClass) name match {
+              case tpnme.Abs        => constantFold1(floatValue, _.abs)
+              case tpnme.Negate     => constantFold1(floatValue, x => -x)
+              case tpnme.Plus       => constantFold2(floatValue, _ + _)
+              case tpnme.Minus      => constantFold2(floatValue, _ - _)
+              case tpnme.Times      => constantFold2(floatValue, _ * _)
+              case tpnme.Div        => constantFold2(floatValue, _ / _)
+              case tpnme.Mod        => constantFold2(floatValue, _ % _)
+              case tpnme.Lt         => constantFold2(floatValue, _ < _)
+              case tpnme.Gt         => constantFold2(floatValue, _ > _)
+              case tpnme.Ge         => constantFold2(floatValue, _ >= _)
+              case tpnme.Le         => constantFold2(floatValue, _ <= _)
+              case tpnme.Min        => constantFold2(floatValue, _ min _)
+              case tpnme.Max        => constantFold2(floatValue, _ max _)
+              case tpnme.ToInt      => constantFold1(floatValue, _.toInt)
+              case tpnme.ToLong     => constantFold1(floatValue, _.toLong)
+              case tpnme.ToDouble   => constantFold1(floatValue, _.toDouble)
+              case _ => None
+            } else if (owner == defn.CompiletimeOpsDoubleModuleClass) name match {
+              case tpnme.Abs        => constantFold1(doubleValue, _.abs)
+              case tpnme.Negate     => constantFold1(doubleValue, x => -x)
+              case tpnme.Plus       => constantFold2(doubleValue, _ + _)
+              case tpnme.Minus      => constantFold2(doubleValue, _ - _)
+              case tpnme.Times      => constantFold2(doubleValue, _ * _)
+              case tpnme.Div        => constantFold2(doubleValue, _ / _)
+              case tpnme.Mod        => constantFold2(doubleValue, _ % _)
+              case tpnme.Lt         => constantFold2(doubleValue, _ < _)
+              case tpnme.Gt         => constantFold2(doubleValue, _ > _)
+              case tpnme.Ge         => constantFold2(doubleValue, _ >= _)
+              case tpnme.Le         => constantFold2(doubleValue, _ <= _)
+              case tpnme.Min        => constantFold2(doubleValue, _ min _)
+              case tpnme.Max        => constantFold2(doubleValue, _ max _)
+              case tpnme.ToInt      => constantFold1(doubleValue, _.toInt)
+              case tpnme.ToLong     => constantFold1(doubleValue, _.toLong)
+              case tpnme.ToFloat    => constantFold1(doubleValue, _.toFloat)
               case _ => None
             } else if (owner == defn.CompiletimeOpsStringModuleClass) name match {
-              case tpnme.Plus if nArgs == 2 => constantFold2(stringValue, _ + _)
+              case tpnme.Plus       => constantFold2(stringValue, _ + _)
+              case tpnme.Length     => constantFold1(stringValue, _.length)
+              case tpnme.Matches    => constantFold2(stringValue, _ matches _)
+              case tpnme.Substring  =>
+                constantFold3(stringValue, intValue, intValue, (s, b, e) => s.substring(b, e))
               case _ => None
             } else if (owner == defn.CompiletimeOpsBooleanModuleClass) name match {
-              case tpnme.Not if nArgs == 1 => constantFold1(boolValue, x => !x)
-              case tpnme.And if nArgs == 2 => constantFold2(boolValue, _ && _)
-              case tpnme.Or  if nArgs == 2 => constantFold2(boolValue, _ || _)
-              case tpnme.Xor if nArgs == 2 => constantFold2(boolValue, _ ^ _)
+              case tpnme.Not        => constantFold1(boolValue, x => !x)
+              case tpnme.And        => constantFold2(boolValue, _ && _)
+              case tpnme.Or         => constantFold2(boolValue, _ || _)
+              case tpnme.Xor        => constantFold2(boolValue, _ ^ _)
               case _ => None
             } else None
 
@@ -4300,6 +4595,8 @@ object Types {
 
   private final class RecThisImpl(binder: RecType) extends RecThis(binder)
 
+  // @sharable private var skid: Int = 0
+
   // ----- Skolem types -----------------------------------------------
 
   /** A skolem type reference with underlying type `info`.
@@ -4317,6 +4614,10 @@ object Types {
 
     def withName(name: Name): this.type = { myRepr = name; this }
 
+    //skid += 1
+    //val id = skid
+    //assert(id != 10)
+
     private var myRepr: Name = null
     def repr(using Context): Name = {
       if (myRepr == null) myRepr = SkolemName.fresh()
@@ -4356,9 +4657,17 @@ object Types {
    *
    *  @param  origin        The parameter that's tracked by the type variable.
    *  @param  creatorState  The typer state in which the variable was created.
+   *  @param  nestingLevel  Symbols with a nestingLevel strictly greater than this
+   *                        will not appear in the instantiation of this type variable.
+   *                        This is enforced in `ConstraintHandling` by:
+   *                        - Maintaining the invariant that the `nonParamBounds`
+   *                          of a type variable never refer to a type with a
+   *                          greater `nestingLevel` (see `legalBound` for the reason
+   *                          why this cannot be delayed until instantiation).
+   *                        - On instantiation, replacing any param in the param bound
+   *                          with a level greater than nestingLevel (see `fullLowerBound`).
    */
-  final class TypeVar private(initOrigin: TypeParamRef, creatorState: TyperState, nestingLevel: Int) extends CachedProxyType with ValueType {
-
+  final class TypeVar private(initOrigin: TypeParamRef, creatorState: TyperState, val nestingLevel: Int) extends CachedProxyType with ValueType {
     private var currentOrigin = initOrigin
 
     def origin: TypeParamRef = currentOrigin
@@ -4372,13 +4681,17 @@ object Types {
     private var myInst: Type = NoType
 
     private[core] def inst: Type = myInst
-    private[core] def inst_=(tp: Type): Unit = {
+    private[core] def setInst(tp: Type): Unit =
       myInst = tp
-      if (tp.exists && (owningState ne null)) {
-        owningState.get.ownedVars -= this
-        owningState = null // no longer needed; null out to avoid a memory leak
-      }
-    }
+      if tp.exists && owningState != null then
+        val owningState1 = owningState.get
+        if owningState1 != null then
+          owningState1.ownedVars -= this
+          owningState = null // no longer needed; null out to avoid a memory leak
+
+    private[core] def resetInst(ts: TyperState): Unit =
+      myInst = NoType
+      owningState = new WeakReference(ts)
 
     /** The state owning the variable. This is at first `creatorState`, but it can
      *  be changed to an enclosing state on a commit.
@@ -4395,42 +4708,18 @@ object Types {
     /** Is the variable already instantiated? */
     def isInstantiated(using Context): Boolean = instanceOpt.exists
 
-    /** Avoid term references in `tp` to parameters or local variables that
-     *  are nested more deeply than the type variable itself.
-     */
-    private def avoidCaptures(tp: Type)(using Context): Type =
-      val problemSyms = new TypeAccumulator[Set[Symbol]]:
-        def apply(syms: Set[Symbol], t: Type): Set[Symbol] = t match
-          case ref @ TermRef(NoPrefix, _)
-          // AVOIDANCE TODO: Are there other problematic kinds of references?
-          // Our current tests only give us these, but we might need to generalize this.
-          if ref.symbol.maybeOwner.nestingLevel > nestingLevel =>
-            syms + ref.symbol
-          case _ =>
-            foldOver(syms, t)
-      val problems = problemSyms(Set.empty, tp)
-      if problems.isEmpty then tp
-      else
-        val atp = TypeOps.avoid(tp, problems.toList)
-        def msg = i"Inaccessible variables captured in instantation of type variable $this.\n$tp was fixed to $atp"
-        typr.println(msg)
-        val bound = TypeComparer.fullUpperBound(origin)
-        if !(atp <:< bound) then
-          throw new TypeError(s"$msg,\nbut the latter type does not conform to the upper bound $bound")
-        atp
-      // AVOIDANCE TODO: This really works well only if variables are instantiated from below
-      // If we hit a problematic symbol while instantiating from above, then avoidance
-      // will widen the instance type further. This could yield an alias, which would be OK.
-      // But it also could yield a true super type which would then fail the bounds check
-      // and throw a TypeError. The right thing to do instead would be to avoid "downwards".
-      // To do this, we need first test cases for that situation.
-
     /** Instantiate variable with given type */
     def instantiateWith(tp: Type)(using Context): Type = {
-      assert(tp ne this, s"self instantiation of ${tp.show}, constraint = ${ctx.typerState.constraint.show}")
-      typr.println(s"instantiating ${this.show} with ${tp.show}")
+      assert(tp ne this, i"self instantiation of $origin, constraint = ${ctx.typerState.constraint}")
+      assert(!myInst.exists, i"$origin is already instantiated to $myInst but we attempted to instantiate it to $tp")
+      typr.println(i"instantiating $this with $tp")
+
+      if Config.checkConstraintsSatisfiable then
+        assert(currentEntry.bounds.contains(tp),
+          i"$origin is constrained to be $currentEntry but attempted to instantiate it to $tp")
+
       if ((ctx.typerState eq owningState.get) && !TypeComparer.subtypeCheckInProgress)
-        inst = tp
+        setInst(tp)
       ctx.typerState.constraint = ctx.typerState.constraint.replace(origin, tp)
       tp
     }
@@ -4443,15 +4732,22 @@ object Types {
      *  is also a singleton type.
      */
     def instantiate(fromBelow: Boolean)(using Context): Type =
-      instantiateWith(avoidCaptures(TypeComparer.instanceType(origin, fromBelow)))
+      val tp = TypeComparer.instanceType(origin, fromBelow)
+      if myInst.exists then // The line above might have triggered instantiation of the current type variable
+        myInst
+      else
+        instantiateWith(tp)
+
+    /** For uninstantiated type variables: the entry in the constraint (either bounds or
+     *  provisional instance value)
+     */
+    private def currentEntry(using Context): Type = ctx.typerState.constraint.entry(origin)
 
     /** For uninstantiated type variables: Is the lower bound different from Nothing? */
-    def hasLowerBound(using Context): Boolean =
-      !ctx.typerState.constraint.entry(origin).loBound.isExactlyNothing
+    def hasLowerBound(using Context): Boolean = !currentEntry.loBound.isExactlyNothing
 
     /** For uninstantiated type variables: Is the upper bound different from Any? */
-    def hasUpperBound(using Context): Boolean =
-      !ctx.typerState.constraint.entry(origin).hiBound.isRef(defn.AnyClass)
+    def hasUpperBound(using Context): Boolean = !currentEntry.hiBound.isRef(defn.AnyClass)
 
     /** Unwrap to instance (if instantiated) or origin (if not), until result
      *  is no longer a TypeVar
@@ -4478,8 +4774,8 @@ object Types {
     }
   }
   object TypeVar:
-    def apply(initOrigin: TypeParamRef, creatorState: TyperState)(using Context) =
-      new TypeVar(initOrigin, creatorState, ctx.owner.nestingLevel)
+    def apply(using Context)(initOrigin: TypeParamRef, creatorState: TyperState, nestingLevel: Int = ctx.nestingLevel) =
+      new TypeVar(initOrigin, creatorState, nestingLevel)
 
   type TypeVars = SimpleIdentitySet[TypeVar]
 
@@ -4509,7 +4805,12 @@ object Types {
     private var myReduced: Type = null
     private var reductionContext: util.MutableMap[Type, Type] = null
 
-    override def tryNormalize(using Context): Type = reduced.normalized
+    override def tryNormalize(using Context): Type =
+      try
+        reduced.normalized
+      catch
+        case ex: Throwable =>
+          handleRecursive("normalizing", s"${scrutinee.show} match ..." , ex)
 
     def reduced(using Context): Type = {
 
@@ -4537,19 +4838,28 @@ object Types {
         }
 
       record("MatchType.reduce called")
-      if (!Config.cacheMatchReduced || myReduced == null || !isUpToDate) {
+      if !Config.cacheMatchReduced
+          || myReduced == null
+          || !isUpToDate
+          || MatchTypeTrace.isRecording
+      then
         record("MatchType.reduce computed")
         if (myReduced != null) record("MatchType.reduce cache miss")
         myReduced =
           trace(i"reduce match type $this $hashCode", matchTypes, show = true) {
             def matchCases(cmp: TrackingTypeComparer): Type =
+              val saved = ctx.typerState.snapshot()
               try cmp.matchCases(scrutinee.normalized, cases)
               catch case ex: Throwable =>
                 handleRecursive("reduce type ", i"$scrutinee match ...", ex)
-              finally updateReductionContext(cmp.footprint)
+              finally
+                updateReductionContext(cmp.footprint)
+                ctx.typerState.resetTo(saved)
+                  // this drops caseLambdas in constraint and undoes any typevar
+                  // instantiations during matchtype reduction
+
             TypeComparer.tracked(matchCases)
           }
-      }
       myReduced
     }
 
@@ -4749,8 +5059,23 @@ object Types {
     }
 
     def & (that: TypeBounds)(using Context): TypeBounds =
-      if ((this.lo frozen_<:< that.lo) && (that.hi frozen_<:< this.hi)) that
-      else if ((that.lo frozen_<:< this.lo) && (this.hi frozen_<:< that.hi)) this
+      // This will try to preserve the FromJavaObjects type in upper bounds.
+      // For example, (? <: FromJavaObjects | Null) & (? <: Any),
+      // we want to get (? <: FromJavaObjects | Null) intead of (? <: Any),
+      // because we may check the result <:< (? <: Object | Null) later.
+      if this.hi.containsFromJavaObject
+        && (this.hi frozen_<:< that.hi)
+        && (that.lo frozen_<:< this.lo) then
+        // FromJavaObject in tp1.hi guarantees tp2.hi <:< tp1.hi
+        // prefer tp1 if FromJavaObject is in its hi
+        this
+      else if that.hi.containsFromJavaObject
+        && (that.hi frozen_<:< this.hi)
+        && (this.lo frozen_<:< that.lo) then
+        // Similarly, prefer tp2 if FromJavaObject is in its hi
+        that
+      else if (this.lo frozen_<:< that.lo) && (that.hi frozen_<:< this.hi) then that
+      else if (that.lo frozen_<:< this.lo) && (this.hi frozen_<:< that.hi) then this
       else TypeBounds(this.lo | that.lo, this.hi & that.hi)
 
     def | (that: TypeBounds)(using Context): TypeBounds =
@@ -4970,6 +5295,11 @@ object Types {
 
   /** Wildcard type, possibly with bounds */
   abstract case class WildcardType(optBounds: Type) extends CachedGroundType with TermType {
+
+    def effectiveBounds(using Context): TypeBounds = optBounds match
+      case bounds: TypeBounds => bounds
+      case _ => TypeBounds.empty
+
     def derivedWildcardType(optBounds: Type)(using Context): WildcardType =
       if (optBounds eq this.optBounds) this
       else if (!optBounds.exists) WildcardType
@@ -5043,7 +5373,7 @@ object Types {
         NoType
     }
     def isInstantiatable(tp: Type)(using Context): Boolean = zeroParamClass(tp) match {
-      case cinfo: ClassInfo =>
+      case cinfo: ClassInfo if !cinfo.cls.isOneOf(FinalOrSealed) =>
         val selfType = cinfo.selfType.asSeenFrom(tp, cinfo.cls)
         tp <:< selfType
       case _ =>
@@ -5111,6 +5441,12 @@ object Types {
 
   // ----- TypeMaps --------------------------------------------------------------------
 
+  /** Where a traversal should stop */
+  enum StopAt:
+    case None    // traverse everything
+    case Package // stop at package references
+    case Static  // stop at static references
+
   /** Common base class of TypeMap and TypeAccumulator */
   abstract class VariantTraversal:
     protected[core] var variance: Int = 1
@@ -5123,7 +5459,7 @@ object Types {
       res
     }
 
-    protected def stopAtStatic: Boolean = true
+    protected def stopAt: StopAt = StopAt.Static
 
     /** Can the prefix of this static reference be omitted if the reference
      *  itself can be omitted? Overridden in TypeOps#avoid.
@@ -5132,7 +5468,11 @@ object Types {
 
     protected def stopBecauseStaticOrLocal(tp: NamedType)(using Context): Boolean =
       (tp.prefix eq NoPrefix)
-      || stopAtStatic && tp.currentSymbol.isStatic && isStaticPrefix(tp.prefix)
+      || {
+        val stop = stopAt
+        stop == StopAt.Static && tp.currentSymbol.isStatic && isStaticPrefix(tp.prefix)
+        || stop == StopAt.Package && tp.currentSymbol.is(Package)
+      }
   end VariantTraversal
 
   abstract class TypeMap(implicit protected var mapCtx: Context)
@@ -5195,6 +5535,8 @@ object Types {
       variance = saved
       derivedLambdaType(tp)(ptypes1, this(restpe))
 
+    def isRange(tp: Type): Boolean = tp.isInstanceOf[Range]
+
     /** Map this function over given type */
     def mapOver(tp: Type): Type = {
       record(s"TypeMap mapOver ${getClass}")
@@ -5238,8 +5580,9 @@ object Types {
 
         case tp @ AnnotatedType(underlying, annot) =>
           val underlying1 = this(underlying)
-          if (underlying1 eq underlying) tp
-          else derivedAnnotatedType(tp, underlying1, mapOver(annot))
+          val annot1 = annot.mapWith(this)
+          if annot1 eq EmptyAnnotation then underlying1
+          else derivedAnnotatedType(tp, underlying1, annot1)
 
         case _: ThisType
           | _: BoundType
@@ -5311,9 +5654,6 @@ object Types {
       else newScopeWith(elems1: _*)
     }
 
-    def mapOver(annot: Annotation): Annotation =
-      annot.derivedAnnotation(mapOver(annot.tree))
-
     def mapOver(tree: Tree): Tree = treeTypeMap(tree)
 
     /** Can be overridden. By default, only the prefix is mapped. */
@@ -5321,7 +5661,7 @@ object Types {
       derivedClassInfo(tp, this(tp.prefix))
 
     def andThen(f: Type => Type): TypeMap = new TypeMap {
-      override def stopAtStatic = thisMap.stopAtStatic
+      override def stopAt = thisMap.stopAt
       def apply(tp: Type) = f(thisMap(tp))
     }
   }
@@ -5360,8 +5700,6 @@ object Types {
 
     protected def emptyRange = range(defn.NothingType, defn.AnyType)
 
-    protected def isRange(tp: Type): Boolean = tp.isInstanceOf[Range]
-
     protected def lower(tp: Type): Type = tp match {
       case tp: Range => tp.lo
       case _ => tp
@@ -5464,7 +5802,7 @@ object Types {
             case Range(infoLo: TypeBounds, infoHi: TypeBounds) =>
               assert(variance == 0)
               if (!infoLo.isTypeAlias && !infoHi.isTypeAlias) propagate(infoLo, infoHi)
-              else range(defn.NothingType, tp.parent)
+              else range(defn.NothingType, parent)
             case Range(infoLo, infoHi) =>
               propagate(infoLo, infoHi)
             case _ =>
@@ -5509,7 +5847,7 @@ object Types {
               tp.derivedAppliedType(tycon, args.map(rangeToBounds)) match
                 case tp1: AppliedType if tp1.isUnreducibleWild =>
                   // don't infer a type that would trigger an error later in
-                  // Checling.checkAppliedType; fall through to default handling instead
+                  // Checking.checkAppliedType; fall through to default handling instead
                 case tp1 =>
                   return tp1
             end if
@@ -5518,7 +5856,7 @@ object Types {
             // non-range arguments L1, ..., Ln and H1, ..., Hn such that
             // C[L1, ..., Ln] <: C[H1, ..., Hn] by taking the right limits of
             // ranges that appear in as co- or contravariant arguments.
-            // Fail for non-variant argument ranges.
+            // Fail for non-variant argument ranges (see use-site else branch below).
             // If successful, the L-arguments are in loBut, the H-arguments in hiBuf.
             // @return  operation succeeded for all arguments.
             def distributeArgs(args: List[Type], tparams: List[ParamInfo]): Boolean = args match {
@@ -5539,11 +5877,18 @@ object Types {
             if (distributeArgs(args, tp.tyconTypeParams))
               range(tp.derivedAppliedType(tycon, loBuf.toList),
                     tp.derivedAppliedType(tycon, hiBuf.toList))
-            else range(defn.NothingType, defn.AnyType)
-              // TODO: can we give a better bound than `topType`?
+            else if tycon.isLambdaSub || args.exists(isRangeOfNonTermTypes) then
+              range(defn.NothingType, defn.AnyType)
+            else
+              // See lampepfl/dotty#14152
+              range(defn.NothingType, tp.derivedAppliedType(tycon, args.map(rangeToBounds)))
           else tp.derivedAppliedType(tycon, args)
       }
 
+    private def isRangeOfNonTermTypes(tp: Type): Boolean = tp match
+      case Range(lo, hi) => !lo.isInstanceOf[TermType] || !hi.isInstanceOf[TermType]
+      case _             => false
+
     override protected def derivedAndType(tp: AndType, tp1: Type, tp2: Type): Type =
       if (isRange(tp1) || isRange(tp2)) range(lower(tp1) & lower(tp2), upper(tp1) & upper(tp2))
       else tp.derivedAndType(tp1, tp2)
@@ -5572,12 +5917,13 @@ object Types {
             case Range(lo, hi) => range(bound.bounds.lo, bound.bounds.hi)
             case _ => tp.derivedMatchType(bound, scrutinee, cases)
 
-    override protected def derivedSkolemType(tp: SkolemType, info: Type): Type = info match {
-      case Range(lo, hi) =>
-        range(tp.derivedSkolemType(lo), tp.derivedSkolemType(hi))
-      case _ =>
-        tp.derivedSkolemType(info)
-    }
+    override protected def derivedSkolemType(tp: SkolemType, info: Type): Type =
+      if info eq tp.info then tp
+      // By definition, a skolem is neither a subtype nor a supertype of a
+      // different skolem. So, regardless of `variance`, we cannot return a
+      // fresh skolem when approximating an existing skolem, we can only return
+      // a range.
+      else range(defn.NothingType, info)
 
     override protected def derivedClassInfo(tp: ClassInfo, pre: Type): Type = {
       assert(!isRange(pre))
@@ -5591,7 +5937,12 @@ object Types {
         case Range(lo, hi) =>
           range(derivedLambdaType(tp)(formals, lo), derivedLambdaType(tp)(formals, hi))
         case _ =>
-          tp.derivedLambdaType(tp.paramNames, formals, restpe)
+          if formals.exists(isRange) then
+            range(
+              derivedLambdaType(tp)(formals.map(upper(_).asInstanceOf[tp.PInfo]), restpe),
+              derivedLambdaType(tp)(formals.map(lower(_).asInstanceOf[tp.PInfo]), restpe))
+          else
+            tp.derivedLambdaType(tp.paramNames, formals, restpe)
       }
 
     protected def reapply(tp: Type): Type = apply(tp)
@@ -5608,6 +5959,15 @@ object Types {
       lo.toText(printer) ~ ".." ~ hi.toText(printer)
   }
 
+  /** Approximate wildcards by their bounds */
+  class AvoidWildcardsMap(using Context) extends ApproximatingTypeMap:
+    protected def mapWild(t: WildcardType) =
+      val bounds = t.effectiveBounds
+      range(atVariance(-variance)(apply(bounds.lo)), apply(bounds.hi))
+    def apply(t: Type): Type = t match
+      case t: WildcardType => mapWild(t)
+      case _ => mapOver(t)
+
   // ----- TypeAccumulators ----------------------------------------------------
 
   abstract class TypeAccumulator[T](implicit protected val accCtx: Context)
@@ -5729,12 +6089,12 @@ object Types {
 
   class ExistsAccumulator(
       p: Type => Boolean,
-      override val stopAtStatic: Boolean,
+      override val stopAt: StopAt,
       forceLazy: Boolean)(using Context) extends TypeAccumulator[Boolean]:
     def apply(x: Boolean, tp: Type): Boolean =
       x || p(tp) || (forceLazy || !tp.isInstanceOf[LazyRef]) && foldOver(x, tp)
 
-  class ForeachAccumulator(p: Type => Unit, override val stopAtStatic: Boolean)(using Context) extends TypeAccumulator[Unit] {
+  class ForeachAccumulator(p: Type => Unit, override val stopAt: StopAt)(using Context) extends TypeAccumulator[Unit] {
     def apply(x: Unit, tp: Type): Unit = foldOver(p(tp), tp)
   }
 
@@ -5894,7 +6254,7 @@ object Types {
   }
 
   object takeAllFilter extends NameFilter {
-    def apply(pre: Type, name: Name)(using Context): Boolean = true
+    def apply(pre: Type, name: Name)(using Context): Boolean = name != nme.CONSTRUCTOR
     def isStable = true
   }
 
diff --git a/compiler/src/dotty/tools/dotc/core/Uniques.scala b/compiler/src/dotty/tools/dotc/core/Uniques.scala
index 5b1ae1a499e9..d706875f58dd 100644
--- a/compiler/src/dotty/tools/dotc/core/Uniques.scala
+++ b/compiler/src/dotty/tools/dotc/core/Uniques.scala
@@ -4,9 +4,11 @@ package core
 import Types._, Contexts._, util.Stats._, Hashable._, Names._
 import config.Config
 import Decorators._
-import util.{HashSet, Stats}
+import util.{WeakHashSet, Stats}
+import WeakHashSet.Entry
+import scala.annotation.tailrec
 
-class Uniques extends HashSet[Type](Config.initialUniquesCapacity):
+class Uniques extends WeakHashSet[Type](Config.initialUniquesCapacity):
   override def hash(x: Type): Int = x.hash
   override def isEqual(x: Type, y: Type) = x.eql(y)
 
@@ -32,7 +34,7 @@ object Uniques:
     if tp.hash == NotCached then tp
     else ctx.uniques.put(tp).asInstanceOf[T]
 
-  final class NamedTypeUniques extends HashSet[NamedType](Config.initialUniquesCapacity * 4) with Hashable:
+  final class NamedTypeUniques extends WeakHashSet[NamedType](Config.initialUniquesCapacity * 4) with Hashable:
     override def hash(x: NamedType): Int = x.hash
 
     def enterIfNew(prefix: Type, designator: Designator, isTerm: Boolean)(using Context): NamedType =
@@ -43,17 +45,25 @@ object Uniques:
         else new CachedTypeRef(prefix, designator, h)
       if h == NotCached then newType
       else
+        // Inlined from WeakHashSet#put
         Stats.record(statsItem("put"))
-        var idx = index(h)
-        var e = entryAt(idx)
-        while e != null do
-          if (e.prefix eq prefix) && (e.designator eq designator) && (e.isTerm == isTerm) then return e
-          idx = nextIndex(idx)
-          e = entryAt(idx)
-        addEntryAt(idx, newType)
+        removeStaleEntries()
+        val bucket = index(h)
+        val oldHead = table(bucket)
+
+        @tailrec
+        def linkedListLoop(entry: Entry[NamedType]): NamedType = entry match
+          case null                    => addEntryAt(bucket, newType, h, oldHead)
+          case _                       =>
+            val e = entry.get
+            if e != null && (e.prefix eq prefix) && (e.designator eq designator) && (e.isTerm == isTerm) then e
+            else linkedListLoop(entry.tail)
+
+        linkedListLoop(oldHead)
+      end if
   end NamedTypeUniques
 
-  final class AppliedUniques extends HashSet[AppliedType](Config.initialUniquesCapacity * 2) with Hashable:
+  final class AppliedUniques extends WeakHashSet[AppliedType](Config.initialUniquesCapacity * 2) with Hashable:
     override def hash(x: AppliedType): Int = x.hash
 
     def enterIfNew(tycon: Type, args: List[Type]): AppliedType =
@@ -62,13 +72,21 @@ object Uniques:
       if monitored then recordCaching(h, classOf[CachedAppliedType])
       if h == NotCached then newType
       else
+        // Inlined from WeakHashSet#put
         Stats.record(statsItem("put"))
-        var idx = index(h)
-        var e = entryAt(idx)
-        while e != null do
-          if (e.tycon eq tycon) && e.args.eqElements(args) then return e
-          idx = nextIndex(idx)
-          e = entryAt(idx)
-        addEntryAt(idx, newType)
+        removeStaleEntries()
+        val bucket = index(h)
+        val oldHead = table(bucket)
+
+        @tailrec
+        def linkedListLoop(entry: Entry[AppliedType]): AppliedType = entry match
+          case null                    => addEntryAt(bucket, newType, h, oldHead)
+          case _                       =>
+            val e = entry.get
+            if e != null && (e.tycon eq tycon) && e.args.eqElements(args) then e
+            else linkedListLoop(entry.tail)
+
+        linkedListLoop(oldHead)
+      end if
   end AppliedUniques
 end Uniques
diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala
index 93603e8a9d37..3b05ee351b86 100644
--- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala
+++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala
@@ -6,11 +6,11 @@ import scala.annotation.switch
 
 object ClassfileConstants {
 
-  final val JAVA_MAGIC = 0xCAFEBABE
-  final val JAVA_MAJOR_VERSION = 45
-  final val JAVA_MINOR_VERSION = 3
+  inline val JAVA_MAGIC = 0xCAFEBABE
+  inline val JAVA_MAJOR_VERSION = 45
+  inline val JAVA_MINOR_VERSION = 3
 
-  final val JAVA8_MAJOR_VERSION = 52
+  inline val JAVA8_MAJOR_VERSION = 52
 
   /** (see http://java.sun.com/docs/books/jvms/second_edition/jvms-clarify.html)
    *
@@ -38,298 +38,298 @@ object ClassfileConstants {
    *    https://groups.google.com/forum/?hl=en#!topic/jvm-languages/jVhzvq8-ZIk
    *
    */                                        // Class   Field   Method
-  final val JAVA_ACC_PUBLIC       = 0x0001   //   X       X        X
-  final val JAVA_ACC_PRIVATE      = 0x0002   //           X        X
-  final val JAVA_ACC_PROTECTED    = 0x0004   //           X        X
-  final val JAVA_ACC_STATIC       = 0x0008   //           X        X
-  final val JAVA_ACC_FINAL        = 0x0010   //   X       X        X
-  final val JAVA_ACC_SUPER        = 0x0020   //   X
-  final val JAVA_ACC_SYNCHRONIZED = 0x0020   //                    X
-  final val JAVA_ACC_VOLATILE     = 0x0040   //           X
-  final val JAVA_ACC_BRIDGE       = 0x0040   //                    X
-  final val JAVA_ACC_TRANSIENT    = 0x0080   //           X
-  final val JAVA_ACC_VARARGS      = 0x0080   //                    X
-  final val JAVA_ACC_NATIVE       = 0x0100   //                    X
-  final val JAVA_ACC_INTERFACE    = 0x0200   //   X
-  final val JAVA_ACC_ABSTRACT     = 0x0400   //   X                X
-  final val JAVA_ACC_STRICT       = 0x0800   //                    X
-  final val JAVA_ACC_SYNTHETIC    = 0x1000   //   X       X        X
-  final val JAVA_ACC_ANNOTATION   = 0x2000   //   X
-  final val JAVA_ACC_ENUM         = 0x4000   //   X       X
+  inline val JAVA_ACC_PUBLIC       = 0x0001   //   X       X        X
+  inline val JAVA_ACC_PRIVATE      = 0x0002   //           X        X
+  inline val JAVA_ACC_PROTECTED    = 0x0004   //           X        X
+  inline val JAVA_ACC_STATIC       = 0x0008   //           X        X
+  inline val JAVA_ACC_FINAL        = 0x0010   //   X       X        X
+  inline val JAVA_ACC_SUPER        = 0x0020   //   X
+  inline val JAVA_ACC_SYNCHRONIZED = 0x0020   //                    X
+  inline val JAVA_ACC_VOLATILE     = 0x0040   //           X
+  inline val JAVA_ACC_BRIDGE       = 0x0040   //                    X
+  inline val JAVA_ACC_TRANSIENT    = 0x0080   //           X
+  inline val JAVA_ACC_VARARGS      = 0x0080   //                    X
+  inline val JAVA_ACC_NATIVE       = 0x0100   //                    X
+  inline val JAVA_ACC_INTERFACE    = 0x0200   //   X
+  inline val JAVA_ACC_ABSTRACT     = 0x0400   //   X                X
+  inline val JAVA_ACC_STRICT       = 0x0800   //                    X
+  inline val JAVA_ACC_SYNTHETIC    = 0x1000   //   X       X        X
+  inline val JAVA_ACC_ANNOTATION   = 0x2000   //   X
+  inline val JAVA_ACC_ENUM         = 0x4000   //   X       X
 
   // tags describing the type of a literal in the constant pool
-  final val CONSTANT_UTF8          =  1
-  final val CONSTANT_UNICODE       =  2
-  final val CONSTANT_INTEGER       =  3
-  final val CONSTANT_FLOAT         =  4
-  final val CONSTANT_LONG          =  5
-  final val CONSTANT_DOUBLE        =  6
-  final val CONSTANT_CLASS         =  7
-  final val CONSTANT_STRING        =  8
-  final val CONSTANT_FIELDREF      =  9
-  final val CONSTANT_METHODREF     = 10
-  final val CONSTANT_INTFMETHODREF = 11
-  final val CONSTANT_NAMEANDTYPE   = 12
-
-  final val CONSTANT_METHODHANDLE  = 15
-  final val CONSTANT_METHODTYPE    = 16
-  final val CONSTANT_INVOKEDYNAMIC = 18
+  inline val CONSTANT_UTF8          =  1
+  inline val CONSTANT_UNICODE       =  2
+  inline val CONSTANT_INTEGER       =  3
+  inline val CONSTANT_FLOAT         =  4
+  inline val CONSTANT_LONG          =  5
+  inline val CONSTANT_DOUBLE        =  6
+  inline val CONSTANT_CLASS         =  7
+  inline val CONSTANT_STRING        =  8
+  inline val CONSTANT_FIELDREF      =  9
+  inline val CONSTANT_METHODREF     = 10
+  inline val CONSTANT_INTFMETHODREF = 11
+  inline val CONSTANT_NAMEANDTYPE   = 12
+
+  inline val CONSTANT_METHODHANDLE  = 15
+  inline val CONSTANT_METHODTYPE    = 16
+  inline val CONSTANT_INVOKEDYNAMIC = 18
 
   // tags describing the type of a literal in attribute values
-  final val BYTE_TAG   = 'B'
-  final val CHAR_TAG   = 'C'
-  final val DOUBLE_TAG = 'D'
-  final val FLOAT_TAG  = 'F'
-  final val INT_TAG    = 'I'
-  final val LONG_TAG   = 'J'
-  final val SHORT_TAG  = 'S'
-  final val BOOL_TAG   = 'Z'
-  final val STRING_TAG = 's'
-  final val ENUM_TAG   = 'e'
-  final val CLASS_TAG  = 'c'
-  final val ARRAY_TAG  = '['
-  final val VOID_TAG   = 'V'
-  final val TVAR_TAG   = 'T'
-  final val OBJECT_TAG = 'L'
-  final val ANNOTATION_TAG = '@'
-  final val SCALA_NOTHING = "scala.runtime.Nothing$"
-  final val SCALA_NULL = "scala.runtime.Null$"
+  inline val BYTE_TAG   = 'B'
+  inline val CHAR_TAG   = 'C'
+  inline val DOUBLE_TAG = 'D'
+  inline val FLOAT_TAG  = 'F'
+  inline val INT_TAG    = 'I'
+  inline val LONG_TAG   = 'J'
+  inline val SHORT_TAG  = 'S'
+  inline val BOOL_TAG   = 'Z'
+  inline val STRING_TAG = 's'
+  inline val ENUM_TAG   = 'e'
+  inline val CLASS_TAG  = 'c'
+  inline val ARRAY_TAG  = '['
+  inline val VOID_TAG   = 'V'
+  inline val TVAR_TAG   = 'T'
+  inline val OBJECT_TAG = 'L'
+  inline val ANNOTATION_TAG = '@'
+  inline val SCALA_NOTHING = "scala.runtime.Nothing$"
+  inline val SCALA_NULL = "scala.runtime.Null$"
 
 
   // tags describing the type of newarray
-  final val T_BOOLEAN = 4
-  final val T_CHAR    = 5
-  final val T_FLOAT   = 6
-  final val T_DOUBLE  = 7
-  final val T_BYTE    = 8
-  final val T_SHORT   = 9
-  final val T_INT     = 10
-  final val T_LONG    = 11
+  inline val T_BOOLEAN = 4
+  inline val T_CHAR    = 5
+  inline val T_FLOAT   = 6
+  inline val T_DOUBLE  = 7
+  inline val T_BYTE    = 8
+  inline val T_SHORT   = 9
+  inline val T_INT     = 10
+  inline val T_LONG    = 11
 
   // JVM mnemonics
-  final val nop         = 0x00
-  final val aconst_null = 0x01
-  final val iconst_m1   = 0x02
-
-  final val iconst_0    = 0x03
-  final val iconst_1    = 0x04
-  final val iconst_2    = 0x05
-  final val iconst_3    = 0x06
-  final val iconst_4    = 0x07
-  final val iconst_5    = 0x08
-
-  final val lconst_0    = 0x09
-  final val lconst_1    = 0x0a
-  final val fconst_0    = 0x0b
-  final val fconst_1    = 0x0c
-  final val fconst_2    = 0x0d
-  final val dconst_0    = 0x0e
-  final val dconst_1    = 0x0f
-
-  final val bipush      = 0x10
-  final val sipush      = 0x11
-  final val ldc         = 0x12
-  final val ldc_w       = 0x13
-  final val ldc2_w      = 0x14
-
-  final val iload       = 0x15
-  final val lload       = 0x16
-  final val fload       = 0x17
-  final val dload       = 0x18
-  final val aload       = 0x19
-
-  final val iload_0     = 0x1a
-  final val iload_1     = 0x1b
-  final val iload_2     = 0x1c
-  final val iload_3     = 0x1d
-  final val lload_0     = 0x1e
-  final val lload_1     = 0x1f
-  final val lload_2     = 0x20
-  final val lload_3     = 0x21
-  final val fload_0     = 0x22
-  final val fload_1     = 0x23
-  final val fload_2     = 0x24
-  final val fload_3     = 0x25
-  final val dload_0     = 0x26
-  final val dload_1     = 0x27
-  final val dload_2     = 0x28
-  final val dload_3     = 0x29
-  final val aload_0     = 0x2a
-  final val aload_1     = 0x2b
-  final val aload_2     = 0x2c
-  final val aload_3     = 0x2d
-  final val iaload      = 0x2e
-  final val laload      = 0x2f
-  final val faload      = 0x30
-  final val daload      = 0x31
-  final val aaload      = 0x32
-  final val baload      = 0x33
-  final val caload      = 0x34
-  final val saload      = 0x35
-
-  final val istore      = 0x36
-  final val lstore      = 0x37
-  final val fstore      = 0x38
-  final val dstore      = 0x39
-  final val astore      = 0x3a
-  final val istore_0    = 0x3b
-  final val istore_1    = 0x3c
-  final val istore_2    = 0x3d
-  final val istore_3    = 0x3e
-  final val lstore_0    = 0x3f
-  final val lstore_1    = 0x40
-  final val lstore_2    = 0x41
-  final val lstore_3    = 0x42
-  final val fstore_0    = 0x43
-  final val fstore_1    = 0x44
-  final val fstore_2    = 0x45
-  final val fstore_3    = 0x46
-  final val dstore_0    = 0x47
-  final val dstore_1    = 0x48
-  final val dstore_2    = 0x49
-  final val dstore_3    = 0x4a
-  final val astore_0    = 0x4b
-  final val astore_1    = 0x4c
-  final val astore_2    = 0x4d
-  final val astore_3    = 0x4e
-  final val iastore     = 0x4f
-  final val lastore     = 0x50
-  final val fastore     = 0x51
-  final val dastore     = 0x52
-  final val aastore     = 0x53
-  final val bastore     = 0x54
-  final val castore     = 0x55
-  final val sastore     = 0x56
-
-  final val pop         = 0x57
-  final val pop2        = 0x58
-  final val dup         = 0x59
-  final val dup_x1      = 0x5a
-  final val dup_x2      = 0x5b
-  final val dup2        = 0x5c
-  final val dup2_x1     = 0x5d
-  final val dup2_x2     = 0x5e
-  final val swap        = 0x5f
-
-  final val iadd        = 0x60
-  final val ladd        = 0x61
-  final val fadd        = 0x62
-  final val dadd        = 0x63
-  final val isub        = 0x64
-  final val lsub        = 0x65
-  final val fsub        = 0x66
-  final val dsub        = 0x67
-  final val imul        = 0x68
-  final val lmul        = 0x69
-  final val fmul        = 0x6a
-  final val dmul        = 0x6b
-  final val idiv        = 0x6c
-  final val ldiv        = 0x6d
-  final val fdiv        = 0x6e
-  final val ddiv        = 0x6f
-  final val irem        = 0x70
-  final val lrem        = 0x71
-  final val frem        = 0x72
-  final val drem        = 0x73
-
-  final val ineg        = 0x74
-  final val lneg        = 0x75
-  final val fneg        = 0x76
-  final val dneg        = 0x77
-
-  final val ishl        = 0x78
-  final val lshl        = 0x79
-  final val ishr        = 0x7a
-  final val lshr        = 0x7b
-  final val iushr       = 0x7c
-  final val lushr       = 0x7d
-  final val iand        = 0x7e
-  final val land        = 0x7f
-  final val ior         = 0x80
-  final val lor         = 0x81
-  final val ixor        = 0x82
-  final val lxor        = 0x83
-  final val iinc        = 0x84
-
-  final val i2l         = 0x85
-  final val i2f         = 0x86
-  final val i2d         = 0x87
-  final val l2i         = 0x88
-  final val l2f         = 0x89
-  final val l2d         = 0x8a
-  final val f2i         = 0x8b
-  final val f2l         = 0x8c
-  final val f2d         = 0x8d
-  final val d2i         = 0x8e
-  final val d2l         = 0x8f
-  final val d2f         = 0x90
-  final val i2b         = 0x91
-  final val i2c         = 0x92
-  final val i2s         = 0x93
-
-  final val lcmp        = 0x94
-  final val fcmpl       = 0x95
-  final val fcmpg       = 0x96
-  final val dcmpl       = 0x97
-  final val dcmpg       = 0x98
-
-  final val ifeq        = 0x99
-  final val ifne        = 0x9a
-  final val iflt        = 0x9b
-  final val ifge        = 0x9c
-  final val ifgt        = 0x9d
-  final val ifle        = 0x9e
-  final val if_icmpeq   = 0x9f
-  final val if_icmpne   = 0xa0
-  final val if_icmplt   = 0xa1
-  final val if_icmpge   = 0xa2
-  final val if_icmpgt   = 0xa3
-  final val if_icmple   = 0xa4
-  final val if_acmpeq   = 0xa5
-  final val if_acmpne   = 0xa6
-  final val goto        = 0xa7
-  final val jsr         = 0xa8
-  final val ret         = 0xa9
-  final val tableswitch = 0xaa
-  final val lookupswitch = 0xab
-  final val ireturn     = 0xac
-  final val lreturn     = 0xad
-  final val freturn     = 0xae
-  final val dreturn     = 0xaf
-  final val areturn     = 0xb0
-  final val return_     = 0xb1
-
-  final val getstatic   = 0xb2
-  final val putstatic   = 0xb3
-  final val getfield    = 0xb4
-  final val putfield    = 0xb5
-
-  final val invokevirtual   = 0xb6
-  final val invokespecial   = 0xb7
-  final val invokestatic    = 0xb8
-  final val invokeinterface = 0xb9
-  final val xxxunusedxxxx   = 0xba
-
-  final val new_          = 0xbb
-  final val newarray      = 0xbc
-  final val anewarray     = 0xbd
-  final val arraylength   = 0xbe
-  final val athrow        = 0xbf
-  final val checkcast     = 0xc0
-  final val instanceof    = 0xc1
-  final val monitorenter  = 0xc2
-  final val monitorexit   = 0xc3
-  final val wide          = 0xc4
-  final val multianewarray = 0xc5
-  final val ifnull        = 0xc6
-  final val ifnonnull     = 0xc7
-  final val goto_w        = 0xc8
-  final val jsr_w         = 0xc9
+  inline val nop         = 0x00
+  inline val aconst_null = 0x01
+  inline val iconst_m1   = 0x02
+
+  inline val iconst_0    = 0x03
+  inline val iconst_1    = 0x04
+  inline val iconst_2    = 0x05
+  inline val iconst_3    = 0x06
+  inline val iconst_4    = 0x07
+  inline val iconst_5    = 0x08
+
+  inline val lconst_0    = 0x09
+  inline val lconst_1    = 0x0a
+  inline val fconst_0    = 0x0b
+  inline val fconst_1    = 0x0c
+  inline val fconst_2    = 0x0d
+  inline val dconst_0    = 0x0e
+  inline val dconst_1    = 0x0f
+
+  inline val bipush      = 0x10
+  inline val sipush      = 0x11
+  inline val ldc         = 0x12
+  inline val ldc_w       = 0x13
+  inline val ldc2_w      = 0x14
+
+  inline val iload       = 0x15
+  inline val lload       = 0x16
+  inline val fload       = 0x17
+  inline val dload       = 0x18
+  inline val aload       = 0x19
+
+  inline val iload_0     = 0x1a
+  inline val iload_1     = 0x1b
+  inline val iload_2     = 0x1c
+  inline val iload_3     = 0x1d
+  inline val lload_0     = 0x1e
+  inline val lload_1     = 0x1f
+  inline val lload_2     = 0x20
+  inline val lload_3     = 0x21
+  inline val fload_0     = 0x22
+  inline val fload_1     = 0x23
+  inline val fload_2     = 0x24
+  inline val fload_3     = 0x25
+  inline val dload_0     = 0x26
+  inline val dload_1     = 0x27
+  inline val dload_2     = 0x28
+  inline val dload_3     = 0x29
+  inline val aload_0     = 0x2a
+  inline val aload_1     = 0x2b
+  inline val aload_2     = 0x2c
+  inline val aload_3     = 0x2d
+  inline val iaload      = 0x2e
+  inline val laload      = 0x2f
+  inline val faload      = 0x30
+  inline val daload      = 0x31
+  inline val aaload      = 0x32
+  inline val baload      = 0x33
+  inline val caload      = 0x34
+  inline val saload      = 0x35
+
+  inline val istore      = 0x36
+  inline val lstore      = 0x37
+  inline val fstore      = 0x38
+  inline val dstore      = 0x39
+  inline val astore      = 0x3a
+  inline val istore_0    = 0x3b
+  inline val istore_1    = 0x3c
+  inline val istore_2    = 0x3d
+  inline val istore_3    = 0x3e
+  inline val lstore_0    = 0x3f
+  inline val lstore_1    = 0x40
+  inline val lstore_2    = 0x41
+  inline val lstore_3    = 0x42
+  inline val fstore_0    = 0x43
+  inline val fstore_1    = 0x44
+  inline val fstore_2    = 0x45
+  inline val fstore_3    = 0x46
+  inline val dstore_0    = 0x47
+  inline val dstore_1    = 0x48
+  inline val dstore_2    = 0x49
+  inline val dstore_3    = 0x4a
+  inline val astore_0    = 0x4b
+  inline val astore_1    = 0x4c
+  inline val astore_2    = 0x4d
+  inline val astore_3    = 0x4e
+  inline val iastore     = 0x4f
+  inline val lastore     = 0x50
+  inline val fastore     = 0x51
+  inline val dastore     = 0x52
+  inline val aastore     = 0x53
+  inline val bastore     = 0x54
+  inline val castore     = 0x55
+  inline val sastore     = 0x56
+
+  inline val pop         = 0x57
+  inline val pop2        = 0x58
+  inline val dup         = 0x59
+  inline val dup_x1      = 0x5a
+  inline val dup_x2      = 0x5b
+  inline val dup2        = 0x5c
+  inline val dup2_x1     = 0x5d
+  inline val dup2_x2     = 0x5e
+  inline val swap        = 0x5f
+
+  inline val iadd        = 0x60
+  inline val ladd        = 0x61
+  inline val fadd        = 0x62
+  inline val dadd        = 0x63
+  inline val isub        = 0x64
+  inline val lsub        = 0x65
+  inline val fsub        = 0x66
+  inline val dsub        = 0x67
+  inline val imul        = 0x68
+  inline val lmul        = 0x69
+  inline val fmul        = 0x6a
+  inline val dmul        = 0x6b
+  inline val idiv        = 0x6c
+  inline val ldiv        = 0x6d
+  inline val fdiv        = 0x6e
+  inline val ddiv        = 0x6f
+  inline val irem        = 0x70
+  inline val lrem        = 0x71
+  inline val frem        = 0x72
+  inline val drem        = 0x73
+
+  inline val ineg        = 0x74
+  inline val lneg        = 0x75
+  inline val fneg        = 0x76
+  inline val dneg        = 0x77
+
+  inline val ishl        = 0x78
+  inline val lshl        = 0x79
+  inline val ishr        = 0x7a
+  inline val lshr        = 0x7b
+  inline val iushr       = 0x7c
+  inline val lushr       = 0x7d
+  inline val iand        = 0x7e
+  inline val land        = 0x7f
+  inline val ior         = 0x80
+  inline val lor         = 0x81
+  inline val ixor        = 0x82
+  inline val lxor        = 0x83
+  inline val iinc        = 0x84
+
+  inline val i2l         = 0x85
+  inline val i2f         = 0x86
+  inline val i2d         = 0x87
+  inline val l2i         = 0x88
+  inline val l2f         = 0x89
+  inline val l2d         = 0x8a
+  inline val f2i         = 0x8b
+  inline val f2l         = 0x8c
+  inline val f2d         = 0x8d
+  inline val d2i         = 0x8e
+  inline val d2l         = 0x8f
+  inline val d2f         = 0x90
+  inline val i2b         = 0x91
+  inline val i2c         = 0x92
+  inline val i2s         = 0x93
+
+  inline val lcmp        = 0x94
+  inline val fcmpl       = 0x95
+  inline val fcmpg       = 0x96
+  inline val dcmpl       = 0x97
+  inline val dcmpg       = 0x98
+
+  inline val ifeq        = 0x99
+  inline val ifne        = 0x9a
+  inline val iflt        = 0x9b
+  inline val ifge        = 0x9c
+  inline val ifgt        = 0x9d
+  inline val ifle        = 0x9e
+  inline val if_icmpeq   = 0x9f
+  inline val if_icmpne   = 0xa0
+  inline val if_icmplt   = 0xa1
+  inline val if_icmpge   = 0xa2
+  inline val if_icmpgt   = 0xa3
+  inline val if_icmple   = 0xa4
+  inline val if_acmpeq   = 0xa5
+  inline val if_acmpne   = 0xa6
+  inline val goto        = 0xa7
+  inline val jsr         = 0xa8
+  inline val ret         = 0xa9
+  inline val tableswitch = 0xaa
+  inline val lookupswitch = 0xab
+  inline val ireturn     = 0xac
+  inline val lreturn     = 0xad
+  inline val freturn     = 0xae
+  inline val dreturn     = 0xaf
+  inline val areturn     = 0xb0
+  inline val return_     = 0xb1
+
+  inline val getstatic   = 0xb2
+  inline val putstatic   = 0xb3
+  inline val getfield    = 0xb4
+  inline val putfield    = 0xb5
+
+  inline val invokevirtual   = 0xb6
+  inline val invokespecial   = 0xb7
+  inline val invokestatic    = 0xb8
+  inline val invokeinterface = 0xb9
+  inline val xxxunusedxxxx   = 0xba
+
+  inline val new_          = 0xbb
+  inline val newarray      = 0xbc
+  inline val anewarray     = 0xbd
+  inline val arraylength   = 0xbe
+  inline val athrow        = 0xbf
+  inline val checkcast     = 0xc0
+  inline val instanceof    = 0xc1
+  inline val monitorenter  = 0xc2
+  inline val monitorexit   = 0xc3
+  inline val wide          = 0xc4
+  inline val multianewarray = 0xc5
+  inline val ifnull        = 0xc6
+  inline val ifnonnull     = 0xc7
+  inline val goto_w        = 0xc8
+  inline val jsr_w         = 0xc9
 
   // reserved opcodes
-  final val breakpoint    = 0xca
-  final val impdep1       = 0xfe
-  final val impdep2       = 0xff
+  inline val breakpoint    = 0xca
+  inline val impdep1       = 0xfe
+  inline val impdep2       = 0xff
 
   import Flags._
   abstract class FlagTranslation {
diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala
index bc64f34d8d7a..9920fc060142 100644
--- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala
+++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala
@@ -3,7 +3,7 @@ package dotc
 package core
 package classfile
 
-import dotty.tools.tasty.{ TastyReader, TastyHeaderUnpickler }
+import dotty.tools.tasty.{ TastyFormat, TastyReader, TastyHeaderUnpickler }
 
 import Contexts._, Symbols._, Types._, Names._, StdNames._, NameOps._, Scopes._, Decorators._
 import SymDenotations._, unpickleScala2.Scala2Unpickler._, Constants._, Annotations._, util.Spans._
@@ -20,6 +20,7 @@ import java.util.UUID
 import scala.collection.immutable
 import scala.collection.mutable.{ ListBuffer, ArrayBuffer }
 import scala.annotation.switch
+import tasty.TastyVersion
 import typer.Checking.checkNonCyclic
 import io.{AbstractFile, PlainFile, ZipArchive}
 import scala.util.control.NonFatal
@@ -62,8 +63,8 @@ class ClassfileParser(
 
   protected val staticModule: Symbol = moduleRoot.sourceModule(using ictx)
 
-  protected val instanceScope: MutableScope = newScope     // the scope of all instance definitions
-  protected val staticScope: MutableScope = newScope       // the scope of all static definitions
+  protected val instanceScope: MutableScope = newScope(0)     // the scope of all instance definitions
+  protected val staticScope: MutableScope = newScope(0)       // the scope of all static definitions
   protected var pool: ConstantPool = _              // the classfile's constant pool
 
   protected var currentClassName: SimpleName = _      // JVM name of the current class
@@ -884,7 +885,7 @@ class ClassfileParser(
       }
 
       def unpickleTASTY(bytes: Array[Byte]): Some[Embedded]  = {
-        val unpickler = new tasty.DottyUnpickler(bytes)
+        val unpickler = new tasty.DottyUnpickler(bytes, ctx.tastyVersion)
         unpickler.enter(roots = Set(classRoot, moduleRoot, moduleRoot.sourceModule))(using ctx.withSource(util.NoSource))
         Some(unpickler)
       }
@@ -950,9 +951,37 @@ class ClassfileParser(
           if (tastyBytes.nonEmpty) {
             val reader = new TastyReader(bytes, 0, 16)
             val expectedUUID = new UUID(reader.readUncompressedLong(), reader.readUncompressedLong())
-            val tastyUUID = new TastyHeaderUnpickler(tastyBytes).readHeader()
+            val tastyHeader = new TastyHeaderUnpickler(tastyBytes).readFullHeader()
+            val fileTastyVersion = TastyVersion(tastyHeader.majorVersion, tastyHeader.minorVersion, tastyHeader.experimentalVersion)
+            val tastyUUID = tastyHeader.uuid
             if (expectedUUID != tastyUUID)
               report.warning(s"$classfile is out of sync with its TASTy file. Loaded TASTy file. Try cleaning the project to fix this issue", NoSourcePosition)
+
+            val tastyFilePath = classfile.path.stripSuffix(".class") + ".tasty"
+
+            def reportWrongTasty(reason: String, highestAllowed: TastyVersion) =
+              report.error(s"""The class ${classRoot.symbol.showFullName} cannot be loaded from file ${tastyFilePath} because $reason:
+                              |highest allowed: ${highestAllowed.show}
+                              |found:           ${fileTastyVersion.show}
+              """.stripMargin)
+
+            val isTastyReadable = fileTastyVersion.isCompatibleWith(TastyVersion.compilerVersion)
+            if !isTastyReadable then
+              reportWrongTasty("its TASTy format cannot be read by the compiler", TastyVersion.compilerVersion)
+            else
+              def isStdlibClass(cls: ClassDenotation): Boolean =
+                ctx.platform.classPath.findClassFile(cls.fullName.mangledString) match {
+                  case Some(entry: ZipArchive#Entry) =>
+                    entry.underlyingSource.map(_.name.startsWith("scala3-library_")).getOrElse(false)
+                  case _ => false
+                }
+              // While emitting older TASTy the the newer standard library used by the compiler will still be on the class path so trying to read its TASTy files should not cause a crash.
+              // This is OK however because references to elements of stdlib API are validated according to the values of their `@since` annotations.
+              // This should guarantee that the code won't crash at runtime when used with the stdlib provided by an older compiler.
+              val isTastyCompatible = fileTastyVersion.isCompatibleWith(ctx.tastyVersion) || isStdlibClass(classRoot)
+              if !isTastyCompatible then
+                reportWrongTasty(s"its TASTy format is not compatible with the one of the targeted Scala release (${ctx.scalaRelease.show})", ctx.tastyVersion)
+
             return unpickleTASTY(tastyBytes)
           }
         }
@@ -1061,6 +1090,8 @@ class ClassfileParser(
           }
         else if (sym == classRoot.symbol)
           instanceScope.lookup(name)
+        else if (sym == classRoot.symbol.owner && name == classRoot.name)
+          classRoot.symbol
         else
           sym.info.member(name).symbol
 
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala
index ffcab8dc7a90..7396adf76991 100644
--- a/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala
+++ b/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala
@@ -39,7 +39,7 @@ object DottyUnpickler {
  *  @param bytes         the bytearray containing the Tasty file from which we unpickle
  *  @param mode          the tasty file contains package (TopLevel), an expression (Term) or a type (TypeTree)
  */
-class DottyUnpickler(bytes: Array[Byte], mode: UnpickleMode = UnpickleMode.TopLevel) extends ClassfileParser.Embedded with tpd.TreeProvider {
+class DottyUnpickler(bytes: Array[Byte], maximalTastyVersion: TastyVersion, mode: UnpickleMode = UnpickleMode.TopLevel) extends ClassfileParser.Embedded with tpd.TreeProvider {
   import tpd._
   import DottyUnpickler._
 
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala
index 7181687d2a99..ad0c051e1b7b 100644
--- a/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala
+++ b/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala
@@ -91,7 +91,7 @@ class PositionPickler(
           | _: Trees.PackageDef[?]
           // holes can change source files when filled, which means
           // they might lose their position
-          | _: TreePickler.Hole => true
+          | _: Trees.Hole[?] => true
       case _ => false
     }
 
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala
index 3aeb7e6f35c9..74b8b357e00c 100644
--- a/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala
@@ -7,6 +7,8 @@ import dotty.tools.tasty.{TastyBuffer, TastyFormat, TastyHash}
 import TastyFormat._
 import TastyBuffer._
 
+import Contexts._
+
 import collection.mutable
 import core.Symbols.{Symbol, ClassSymbol}
 import ast.tpd
@@ -30,7 +32,7 @@ class TastyPickler(val rootCls: ClassSymbol) {
   def newSection(name: String, buf: TastyBuffer): Unit =
     sections += ((nameBuffer.nameIndex(name.toTermName), buf))
 
-  def assembleParts(): Array[Byte] = {
+  def assembleParts()(using Context): Array[Byte] = {
     def lengthWithLength(buf: TastyBuffer) =
       buf.length + natSize(buf.length)
 
@@ -40,6 +42,8 @@ class TastyPickler(val rootCls: ClassSymbol) {
     val nameBufferHash = TastyHash.pjwHash64(nameBuffer.bytes)
     val treeSectionHash +: otherSectionHashes = sections.map(x => TastyHash.pjwHash64(x._2.bytes))
 
+    val tastyVersion = ctx.tastyVersion
+
     // Hash of name table and tree
     val uuidLow: Long = nameBufferHash ^ treeSectionHash
     // Hash of positions, comments and any additional section
@@ -48,9 +52,9 @@ class TastyPickler(val rootCls: ClassSymbol) {
     val headerBuffer = {
       val buf = new TastyBuffer(header.length + TastyPickler.versionStringBytes.length + 32)
       for (ch <- header) buf.writeByte(ch.toByte)
-      buf.writeNat(MajorVersion)
-      buf.writeNat(MinorVersion)
-      buf.writeNat(ExperimentalVersion)
+      buf.writeNat(tastyVersion.major)
+      buf.writeNat(tastyVersion.minor)
+      buf.writeNat(tastyVersion.experimental)
       buf.writeNat(TastyPickler.versionStringBytes.length)
       buf.writeBytes(TastyPickler.versionStringBytes, TastyPickler.versionStringBytes.length)
       buf.writeUncompressedLong(uuidLow)
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala
index 3a482309e0c3..d1c9904cf6cb 100644
--- a/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala
@@ -11,14 +11,52 @@ import TastyUnpickler._
 import util.Spans.offsetToInt
 import printing.Highlighting._
 import dotty.tools.tasty.TastyFormat.{ASTsSection, PositionsSection, CommentsSection}
+import java.nio.file.{Files, Paths}
+import dotty.tools.io.{JarArchive, Path}
 
 object TastyPrinter:
-  def show(bytes: Array[Byte])(using Context): String =
+
+  def showContents(bytes: Array[Byte], noColor: Boolean): String =
     val printer =
-      if ctx.settings.color.value == "never" then new TastyPrinter(bytes)
+      if noColor then new TastyPrinter(bytes)
       else new TastyAnsiiPrinter(bytes)
     printer.showContents()
 
+  def main(args: Array[String]): Unit = {
+    // TODO: Decouple CliCommand from Context and use CliCommand.distill?
+    val lineWidth = 80
+    val line = "-" * lineWidth
+    val noColor = args.contains("-color:never")
+    var printLastLine = false
+    def printTasty(fileName: String, bytes: Array[Byte]): Unit =
+      println(line)
+      println(fileName)
+      println(line)
+      println(showContents(bytes, noColor))
+      println()
+      printLastLine = true
+    for arg <- args do
+      if arg == "-color:never" then () // skip
+      else if arg.startsWith("-") then println(s"bad option '$arg' was ignored")
+      else if arg.endsWith(".tasty") then {
+        val path = Paths.get(arg)
+        if Files.exists(path) then printTasty(arg, Files.readAllBytes(path))
+        else println("File not found: " + arg)
+      }
+      else if arg.endsWith(".jar") then {
+        val jar = JarArchive.open(Path(arg), create = false)
+        try
+          for file <- jar.iterator() if file.name.endsWith(".tasty") do
+            printTasty(s"$arg ${file.path}", file.toByteArray)
+        finally jar.close()
+
+      }
+      else println(s"Not a '.tasty' or '.jar' file: $arg")
+
+    if printLastLine then
+      println(line)
+  }
+
 class TastyPrinter(bytes: Array[Byte]) {
 
   private val sb: StringBuilder = new StringBuilder
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala
index 7bb71aa6deee..1a7e1d50c981 100644
--- a/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala
@@ -3,7 +3,7 @@ package core
 package tasty
 
 import dotty.tools.tasty.{TastyFormat, TastyBuffer, TastyReader, TastyHeaderUnpickler}
-import TastyFormat.NameTags._
+import TastyFormat.NameTags._, TastyFormat.nameTagToString
 import TastyBuffer.NameRef
 
 import scala.collection.mutable
@@ -79,8 +79,10 @@ class TastyUnpickler(reader: TastyReader) {
         val original = readName()
         val target = readName()
         readSignedRest(original, target)
-      case _ =>
+      case SUPERACCESSOR | INLINEACCESSOR | BODYRETAINER | OBJECTCLASS =>
         simpleNameKindOfTag(tag)(readName())
+      case _ =>
+        throw MatchError(s"unknown name tag ${nameTagToString(tag)}")
     }
     assert(currentAddr == end, s"bad name $result $start $currentAddr $end")
     result
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyVersion.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyVersion.scala
new file mode 100644
index 000000000000..9c417563a1e3
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyVersion.scala
@@ -0,0 +1,23 @@
+package dotty.tools.dotc.core.tasty
+
+import dotty.tools.tasty.TastyFormat
+
+case class TastyVersion(major: Int, minor: Int, experimental: Int) {
+  def show = s"$major.$minor-$experimental"
+
+  def isCompatibleWith(that: TastyVersion): Boolean = TastyFormat.isVersionCompatible(
+    this.major, this.minor, this.experimental,
+    that.major, that.minor, that.experimental
+  )
+}
+
+object TastyVersion {
+  def compilerVersion = TastyVersion(TastyFormat.MajorVersion, TastyFormat.MinorVersion, TastyFormat.ExperimentalVersion)
+
+  def fromStableScalaRelease(majorVersion: Int, minorVersion: Int) = {
+    val tastyMajor = majorVersion + 25
+    val tastyMinor = minorVersion
+    TastyVersion(tastyMajor, tastyMinor, 0)
+  }
+
+}
\ No newline at end of file
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala
index 268b02b4389f..a3dedaaec685 100644
--- a/compiler/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala
@@ -13,7 +13,7 @@ import ast.untpd.Tree
 
 class TreeBuffer extends TastyBuffer(50000) {
 
-  private final val ItemsOverOffsets = 2
+  private inline val ItemsOverOffsets = 2
   private val initialOffsetSize = bytes.length / (AddrWidth * ItemsOverOffsets)
   private var offsets = new Array[Int](initialOffsetSize)
   private var isRelative = new Array[Boolean](initialOffsetSize)
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala
index 06ba83ae15bf..8f5910c3dd56 100644
--- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala
@@ -22,21 +22,10 @@ import annotation.constructorOnly
 import collection.mutable
 import dotty.tools.tasty.TastyFormat.ASTsSection
 
-object TreePickler {
-
-  case class Hole(isTermHole: Boolean, idx: Int, args: List[tpd.Tree])(implicit @constructorOnly src: SourceFile) extends tpd.Tree {
-    override def isTerm: Boolean = isTermHole
-    override def isType: Boolean = !isTermHole
-    override def fallbackToText(printer: Printer): Text =
-      if isTermHole then s"{{{ $idx |" ~~ printer.toTextGlobal(tpe) ~~ "|" ~~ printer.toTextGlobal(args, ", ") ~~ "}}}"
-      else s"[[[ $idx |" ~~ printer.toTextGlobal(tpe) ~~ "|" ~~ printer.toTextGlobal(args, ", ") ~~ "]]]"
-  }
-}
 
 class TreePickler(pickler: TastyPickler) {
   val buf: TreeBuffer = new TreeBuffer
   pickler.newSection(ASTsSection, buf)
-  import TreePickler._
   import buf._
   import pickler.nameBuffer.nameIndex
   import tpd._
@@ -100,11 +89,6 @@ class TreePickler(pickler: TastyPickler) {
     case Some(label) =>
       if (label != NoAddr) writeRef(label) else pickleForwardSymRef(sym)
     case None =>
-      // See pos/t1957.scala for an example where this can happen.
-      // I believe it's a bug in typer: the type of an implicit argument refers
-      // to a closure parameter outside the closure itself. TODO: track this down, so that we
-      // can eliminate this case.
-      report.log(i"pickling reference to as yet undefined $sym in ${sym.owner}", sym.srcPos)
       pickleForwardSymRef(sym)
   }
 
@@ -217,6 +201,8 @@ class TreePickler(pickler: TastyPickler) {
       }
       else if (tpe.prefix == NoPrefix) {
         writeByte(if (tpe.isType) TYPEREFdirect else TERMREFdirect)
+        if !symRefs.contains(sym) && !sym.isPatternBound && !sym.hasAnnotation(defn.QuotedRuntimePatterns_patternTypeAnnot) then
+          report.error(i"pickling reference to as yet undefined $tpe with symbol ${sym}", sym.srcPos)
         pickleSymRef(sym)
       }
       else tpe.designator match {
@@ -332,6 +318,7 @@ class TreePickler(pickler: TastyPickler) {
 
   def pickleDef(tag: Int, mdef: MemberDef, tpt: Tree, rhs: Tree = EmptyTree, pickleParams: => Unit = ())(using Context): Unit = {
     val sym = mdef.symbol
+
     assert(symRefs(sym) == NoAddr, sym)
     registerDef(sym)
     writeByte(tag)
@@ -355,8 +342,7 @@ class TreePickler(pickler: TastyPickler) {
   def pickleParam(tree: Tree)(using Context): Unit = {
     registerTreeAddr(tree)
     tree match {
-      case tree: ValDef => pickleDef(PARAM, tree, tree.tpt)
-      case tree: DefDef => pickleDef(PARAM, tree, tree.tpt, tree.rhs)
+      case tree: ValDef  => pickleDef(PARAM, tree, tree.tpt)
       case tree: TypeDef => pickleDef(TYPEPARAM, tree, tree.rhs)
     }
   }
@@ -411,7 +397,7 @@ class TreePickler(pickler: TastyPickler) {
               var ename = tree.symbol.targetName
               val selectFromQualifier =
                 name.isTypeName
-                || qual.isInstanceOf[TreePickler.Hole] // holes have no symbol
+                || qual.isInstanceOf[Hole] // holes have no symbol
                 || sig == Signature.NotAMethod // no overload resolution necessary
                 || !tree.denot.symbol.exists // polymorphic function type
                 || tree.denot.asSingleDenotation.isRefinedMethod // refined methods have no defining class symbol
@@ -731,9 +717,10 @@ class TreePickler(pickler: TastyPickler) {
     if flags.is(Infix) then writeModTag(INFIX)
     if flags.is(Invisible) then writeModTag(INVISIBLE)
     if (flags.is(Erased)) writeModTag(ERASED)
+    if (flags.is(Exported)) writeModTag(EXPORTED)
+    if (flags.is(Given)) writeModTag(GIVEN)
+    if (flags.is(Implicit)) writeModTag(IMPLICIT)
     if (isTerm) {
-      if (flags.is(Implicit)) writeModTag(IMPLICIT)
-      if (flags.is(Given)) writeModTag(GIVEN)
       if (flags.is(Lazy, butNot = Module)) writeModTag(LAZY)
       if (flags.is(AbsOverride)) { writeModTag(ABSTRACT); writeModTag(OVERRIDE) }
       if (flags.is(Mutable)) writeModTag(MUTABLE)
@@ -744,7 +731,6 @@ class TreePickler(pickler: TastyPickler) {
       if (flags.is(Extension)) writeModTag(EXTENSION)
       if (flags.is(ParamAccessor)) writeModTag(PARAMsetter)
       if (flags.is(SuperParamAlias)) writeModTag(PARAMalias)
-      if (flags.is(Exported)) writeModTag(EXPORTED)
       assert(!(flags.is(Label)))
     }
     else {
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala
index 944d3016bd27..ac315f4cef89 100644
--- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala
@@ -634,7 +634,6 @@ class TreeUnpickler(reader: TastyReader,
         }
         nextByte match {
           case PRIVATE => addFlag(Private)
-          case INTERNAL => ??? // addFlag(Internal)
           case PROTECTED => addFlag(Protected)
           case ABSTRACT =>
             readByte()
@@ -832,7 +831,11 @@ class TreeUnpickler(reader: TastyReader,
           val tpt = readTpt()(using localCtx)
           val paramss = normalizeIfConstructor(
               paramDefss.nestedMap(_.symbol), name == nme.CONSTRUCTOR)
-          val resType = effectiveResultType(sym, paramss, tpt.tpe)
+          val resType =
+            if name == nme.CONSTRUCTOR then
+              effectiveResultType(sym, paramss)
+            else
+              tpt.tpe
           sym.info = methodType(paramss, resType)
           DefDef(paramDefss, tpt)
         case VALDEF =>
@@ -857,36 +860,45 @@ class TreeUnpickler(reader: TastyReader,
             sym.info = TypeBounds.empty // needed to avoid cyclic references when unpickling rhs, see i3816.scala
             sym.setFlag(Provisional)
             val rhs = readTpt()(using localCtx)
-            sym.info = new NoCompleter {
+
+            sym.info = new NoCompleter:
               override def completerTypeParams(sym: Symbol)(using Context) =
                 rhs.tpe.typeParams
-            }
-            sym.info = sym.opaqueToBounds(
-              checkNonCyclic(sym, rhs.tpe.toBounds, reportErrors = false),
-              rhs, rhs.tpe.typeParams)
-            if sym.isOpaqueAlias then sym.typeRef.recomputeDenot() // make sure we see the new bounds from now on
+
+            def opaqueToBounds(info: Type): Type =
+              val tparamSyms = rhs match
+                case LambdaTypeTree(tparams, body) => tparams.map(_.symbol.asType)
+                case _ => Nil
+              sym.opaqueToBounds(info, rhs, tparamSyms)
+
+            val info = checkNonCyclic(sym, rhs.tpe.toBounds, reportErrors = false)
+            if sym.isOpaqueAlias then
+              sym.info = opaqueToBounds(info)
+              sym.typeRef.recomputeDenot() // make sure we see the new bounds from now on
+            else
+              sym.info = info
+
             sym.resetFlag(Provisional)
             TypeDef(rhs)
           }
         case PARAM =>
           val tpt = readTpt()(using localCtx)
-          if (nothingButMods(end)) {
-            sym.info = tpt.tpe
-            ValDef(tpt)
-          }
-          else {
-            sym.info = ExprType(tpt.tpe)
-            pickling.println(i"reading param alias $name -> $currentAddr")
-            DefDef(Nil, tpt)
-          }
+          assert(nothingButMods(end))
+          sym.info = tpt.tpe
+          ValDef(tpt)
       }
       goto(end)
       setSpan(start, tree)
-      if (!sym.isType) // Only terms might have leaky aliases, see the documentation of `checkNoPrivateLeaks`
+
+      // Dealias any non-accessible type alias in the type of `sym`. This can be
+      // skipped for types (see `checkNoPrivateLeaks` for why) as well as for
+      // param accessors since they can't refer to an inaccesible type member of
+      // the class.
+      if !sym.isType && !sym.is(ParamAccessor) then
         sym.info = ta.avoidPrivateLeaks(sym)
 
-      if (ctx.mode.is(Mode.ReadComments)) {
-        assert(ctx.docCtx.isDefined, "Mode is `ReadComments`, but no `docCtx` is set.")
+      if (ctx.settings.YreadComments.value) {
+        assert(ctx.docCtx.isDefined, "`-Yread-docs` enabled, but no `docCtx` is set.")
         commentUnpicklerOpt.foreach { commentUnpickler =>
           val comment = commentUnpickler.commentAt(start)
           ctx.docCtx.get.addDocstring(tree.symbol, comment)
@@ -1061,12 +1073,10 @@ class TreeUnpickler(reader: TastyReader,
       def makeSelect(qual: Tree, name: Name, denot: Denotation): Select =
         var qualType = qual.tpe.widenIfUnstable
         val owner = denot.symbol.maybeOwner
-        if (owner.isPackageObject && qualType.termSymbol.is(Package))
-          qualType = qualType.select(owner.sourceModule)
-        val tpe = name match {
+        val tpe0 = name match
           case name: TypeName => TypeRef(qualType, name, denot)
           case name: TermName => TermRef(qualType, name, denot)
-        }
+        val tpe = TypeOps.makePackageObjPrefixExplicit(tpe0)
         ConstFold.Select(untpd.Select(qual, name).withType(tpe))
 
       def completeSelect(name: Name, sig: Signature, target: Name): Select =
@@ -1199,8 +1209,8 @@ class TreeUnpickler(reader: TastyReader,
                *  or an override has been removed.
                *
                *  This is tested in
-               *  - sbt-dotty/sbt-test/tasty-compat/remove-override
-               *  - sbt-dotty/sbt-test/tasty-compat/move-method
+               *  - sbt-test/tasty-compat/remove-override
+               *  - sbt-test/tasty-compat/move-method
                */
               def lookupInSuper =
                 val cls = ownerTpe.classSymbol
@@ -1278,7 +1288,7 @@ class TreeUnpickler(reader: TastyReader,
               val idx = readNat()
               val tpe = readType()
               val args = until(end)(readTerm())
-              TreePickler.Hole(true, idx, args).withType(tpe)
+              Hole(true, idx, args).withType(tpe)
             case _ =>
               readPathTerm()
           }
@@ -1287,8 +1297,6 @@ class TreeUnpickler(reader: TastyReader,
       }
 
       val tree = if (tag < firstLengthTreeTag) readSimpleTerm() else readLengthTerm()
-      if (!tree.isInstanceOf[TypTree]) // FIXME: Necessary to avoid self-type cyclic reference in tasty_tools
-        tree.overwriteType(tree.tpe.simplified)
       setSpan(start, tree)
     }
 
@@ -1314,7 +1322,7 @@ class TreeUnpickler(reader: TastyReader,
           val idx = readNat()
           val tpe = readType()
           val args = until(end)(readTerm())
-          TreePickler.Hole(false, idx, args).withType(tpe)
+          Hole(false, idx, args).withType(tpe)
         case _ =>
           if (isTypeTreeTag(nextByte)) readTerm()
           else {
@@ -1353,7 +1361,9 @@ class TreeUnpickler(reader: TastyReader,
     def readLaterWithOwner[T <: AnyRef](end: Addr, op: TreeReader => Context ?=> T)(using Context): Symbol => Trees.Lazy[T] = {
       val localReader = fork
       goto(end)
-      owner => new LazyReader(localReader, owner, ctx.mode, ctx.source, op)
+      val mode = ctx.mode
+      val source = ctx.source
+      owner => new LazyReader(localReader, owner, mode, source, op)
     }
 
 // ------ Setting positions ------------------------------------------------
@@ -1397,7 +1407,7 @@ class TreeUnpickler(reader: TastyReader,
       if (path.nonEmpty) {
         val sourceFile = ctx.getSource(path)
         posUnpicklerOpt match
-          case Some(posUnpickler) =>
+          case Some(posUnpickler) if !sourceFile.initialized =>
             sourceFile.setLineIndicesFromLineSizes(posUnpickler.lineSizes)
           case _ =>
         pickling.println(i"source change at $addr: $path")
@@ -1510,9 +1520,9 @@ object TreeUnpickler {
 
   /** An enumeration indicating which subtrees should be added to an OwnerTree. */
   type MemberDefMode = Int
-  final val MemberDefsOnly = 0   // add only member defs; skip other statements
-  final val NoMemberDefs = 1     // add only statements that are not member defs
-  final val AllDefs = 2          // add everything
+  inline val MemberDefsOnly = 0   // add only member defs; skip other statements
+  inline val NoMemberDefs = 1     // add only statements that are not member defs
+  inline val AllDefs = 2          // add everything
 
   class TreeWithoutOwner extends Exception
 }
diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleBuffer.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleBuffer.scala
index fc06ce4da8a3..2aeb1bdeefcc 100644
--- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleBuffer.scala
+++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleBuffer.scala
@@ -189,9 +189,9 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) {
 
 object PickleBuffer {
 
-  private final val ScalaFlagEnd = 48
-  private final val ChunkBits = 8
-  private final val ChunkSize = 1 << ChunkBits
+  private inline val ScalaFlagEnd = 48
+  private inline val ChunkBits = 8
+  private inline val ChunkSize = 1 << ChunkBits
   private type FlagMap = Array[Array[Long]]
 
   private val (scalaTermFlagMap, scalaTypeFlagMap) = {
diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleFormat.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleFormat.scala
index a9a64a9bb7ee..f135de7e43e9 100644
--- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleFormat.scala
+++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleFormat.scala
@@ -119,109 +119,109 @@ object PickleFormat {
   val MajorVersion: Int = 5
   val MinorVersion: Int = 2
 
-  final val TERMname = 1
-  final val TYPEname = 2
-  final val NONEsym = 3
-  final val TYPEsym = 4
-  final val ALIASsym = 5
-  final val CLASSsym = 6
-  final val MODULEsym = 7
-  final val VALsym = 8
-  final val EXTref = 9
-  final val EXTMODCLASSref = 10
-  final val NOtpe = 11
-  final val NOPREFIXtpe = 12
-  final val THIStpe = 13
-  final val SINGLEtpe = 14
-  final val CONSTANTtpe = 15
-  final val TYPEREFtpe = 16
-  final val TYPEBOUNDStpe = 17
-  final val REFINEDtpe = 18
-  final val CLASSINFOtpe = 19
-  final val METHODtpe = 20
-  final val POLYtpe = 21
-  final val IMPLICITMETHODtpe = 22    // no longer generated
+  inline val TERMname = 1
+  inline val TYPEname = 2
+  inline val NONEsym = 3
+  inline val TYPEsym = 4
+  inline val ALIASsym = 5
+  inline val CLASSsym = 6
+  inline val MODULEsym = 7
+  inline val VALsym = 8
+  inline val EXTref = 9
+  inline val EXTMODCLASSref = 10
+  inline val NOtpe = 11
+  inline val NOPREFIXtpe = 12
+  inline val THIStpe = 13
+  inline val SINGLEtpe = 14
+  inline val CONSTANTtpe = 15
+  inline val TYPEREFtpe = 16
+  inline val TYPEBOUNDStpe = 17
+  inline val REFINEDtpe = 18
+  inline val CLASSINFOtpe = 19
+  inline val METHODtpe = 20
+  inline val POLYtpe = 21
+  inline val IMPLICITMETHODtpe = 22    // no longer generated
 
-  final val LITERAL = 23   // base line for literals
-  final val LITERALunit = 24
-  final val LITERALboolean = 25
-  final val LITERALbyte = 26
-  final val LITERALshort = 27
-  final val LITERALchar = 28
-  final val LITERALint = 29
-  final val LITERALlong = 30
-  final val LITERALfloat = 31
-  final val LITERALdouble = 32
-  final val LITERALstring = 33
-  final val LITERALnull = 34
-  final val LITERALclass = 35
-  final val LITERALenum = 36
-  final val SYMANNOT = 40
-  final val CHILDREN = 41
-  final val ANNOTATEDtpe = 42
-  final val ANNOTINFO = 43
-  final val ANNOTARGARRAY = 44
+  inline val LITERAL = 23   // base line for literals
+  inline val LITERALunit = 24
+  inline val LITERALboolean = 25
+  inline val LITERALbyte = 26
+  inline val LITERALshort = 27
+  inline val LITERALchar = 28
+  inline val LITERALint = 29
+  inline val LITERALlong = 30
+  inline val LITERALfloat = 31
+  inline val LITERALdouble = 32
+  inline val LITERALstring = 33
+  inline val LITERALnull = 34
+  inline val LITERALclass = 35
+  inline val LITERALenum = 36
+  inline val SYMANNOT = 40
+  inline val CHILDREN = 41
+  inline val ANNOTATEDtpe = 42
+  inline val ANNOTINFO = 43
+  inline val ANNOTARGARRAY = 44
 
-  final val SUPERtpe = 46
-  final val DEBRUIJNINDEXtpe = 47   // no longer generated
-  final val EXISTENTIALtpe = 48
+  inline val SUPERtpe = 46
+  inline val DEBRUIJNINDEXtpe = 47   // no longer generated
+  inline val EXISTENTIALtpe = 48
 
-  final val TREE = 49      // prefix code that means a tree is coming
-    final val EMPTYtree = 1
-    final val PACKAGEtree = 2
-    final val CLASStree = 3
-    final val MODULEtree = 4
-    final val VALDEFtree = 5
-    final val DEFDEFtree = 6
-    final val TYPEDEFtree = 7
-    final val LABELtree = 8
-    final val IMPORTtree = 9
-    final val DOCDEFtree = 11
-    final val TEMPLATEtree = 12
-    final val BLOCKtree = 13
-    final val CASEtree = 14
+  inline val TREE = 49      // prefix code that means a tree is coming
+    inline val EMPTYtree = 1
+    inline val PACKAGEtree = 2
+    inline val CLASStree = 3
+    inline val MODULEtree = 4
+    inline val VALDEFtree = 5
+    inline val DEFDEFtree = 6
+    inline val TYPEDEFtree = 7
+    inline val LABELtree = 8
+    inline val IMPORTtree = 9
+    inline val DOCDEFtree = 11
+    inline val TEMPLATEtree = 12
+    inline val BLOCKtree = 13
+    inline val CASEtree = 14
     // This node type has been removed.
-    // final val SEQUENCEtree = 15
-    final val ALTERNATIVEtree = 16
-    final val STARtree = 17
-    final val BINDtree = 18
-    final val UNAPPLYtree = 19
-    final val ARRAYVALUEtree = 20
-    final val FUNCTIONtree = 21
-    final val ASSIGNtree = 22
-    final val IFtree = 23
-    final val MATCHtree = 24
-    final val RETURNtree = 25
-    final val TREtree = 26
-    final val THROWtree = 27
-    final val NEWtree = 28
-    final val TYPEDtree = 29
-    final val TYPEAPPLYtree = 30
-    final val APPLYtree = 31
-    final val APPLYDYNAMICtree = 32
-    final val SUPERtree = 33
-    final val THIStree = 34
-    final val SELECTtree = 35
-    final val IDENTtree = 36
-    final val LITERALtree = 37
-    final val TYPEtree = 38
-    final val ANNOTATEDtree = 39
-    final val SINGLETONTYPEtree = 40
-    final val SELECTFROMTYPEtree = 41
-    final val COMPOUNDTYPEtree = 42
-    final val APPLIEDTYPEtree = 43
-    final val TYPEBOUNDStree = 44
-    final val EXISTENTIALTYPEtree = 45
+    // inline val SEQUENCEtree = 15
+    inline val ALTERNATIVEtree = 16
+    inline val STARtree = 17
+    inline val BINDtree = 18
+    inline val UNAPPLYtree = 19
+    inline val ARRAYVALUEtree = 20
+    inline val FUNCTIONtree = 21
+    inline val ASSIGNtree = 22
+    inline val IFtree = 23
+    inline val MATCHtree = 24
+    inline val RETURNtree = 25
+    inline val TREtree = 26
+    inline val THROWtree = 27
+    inline val NEWtree = 28
+    inline val TYPEDtree = 29
+    inline val TYPEAPPLYtree = 30
+    inline val APPLYtree = 31
+    inline val APPLYDYNAMICtree = 32
+    inline val SUPERtree = 33
+    inline val THIStree = 34
+    inline val SELECTtree = 35
+    inline val IDENTtree = 36
+    inline val LITERALtree = 37
+    inline val TYPEtree = 38
+    inline val ANNOTATEDtree = 39
+    inline val SINGLETONTYPEtree = 40
+    inline val SELECTFROMTYPEtree = 41
+    inline val COMPOUNDTYPEtree = 42
+    inline val APPLIEDTYPEtree = 43
+    inline val TYPEBOUNDStree = 44
+    inline val EXISTENTIALTYPEtree = 45
 
-  final val MODIFIERS = 50
+  inline val MODIFIERS = 50
 
-  final val firstSymTag = NONEsym
-  final val lastSymTag = VALsym
-  final val lastExtSymTag = EXTMODCLASSref
+  inline val firstSymTag = NONEsym
+  inline val lastSymTag = VALsym
+  inline val lastExtSymTag = EXTMODCLASSref
 }
 
 
   //The following two are no longer accurate, because ANNOTATEDtpe,
   //SUPERtpe, ... are not in the same range as the other types
-  //final val firstTypeTag = NOtpe
-  //final val lastTypeTag = POLYtpe
+  //inline val firstTypeTag = NOtpe
+  //inline val lastTypeTag = POLYtpe
diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Erasure.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Erasure.scala
index 5dde517c4864..f43735d16630 100644
--- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Erasure.scala
+++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Erasure.scala
@@ -29,7 +29,7 @@ object Scala2Erasure:
    *  these issues because they get flattened into a single RefinedType with
    *  three parents, cf `flattenedParents`.
    *
-   *  See sbt-dotty/sbt-test/scala2-compat/erasure/changes/Main.scala for examples.
+   *  See sbt-test/scala2-compat/erasure/changes/Main.scala for examples.
    *
    *  @throws TypeError if this type is unsupported.
    */
@@ -215,7 +215,7 @@ object Scala2Erasure:
    *  is only specified for class symbols. Therefore, the accuracy of this
    *  method cannot be guaranteed, the best we can do is make sure it works on
    *  as many test cases as possible which can be run from sbt using:
-   *  > sbt-dotty/scripted scala2-compat/erasure
+   *  > sbt-test/scripted scala2-compat/erasure
    *
    *  The body of this method is made to look as much as the Scala 2 version as
    *  possible to make them easier to compare, cf:
diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Flags.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Flags.scala
index 5749706270c9..6cf08b3384d9 100644
--- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Flags.scala
+++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Flags.scala
@@ -12,96 +12,96 @@ package unpickleScala2
 /** Scala2 flags, adapted from https://github.com/scala/scala/blob/2.11.x/src/reflect/scala/reflect/internal/Flags.scala
  */
 object Scala2Flags {
-  final val IMPLICIT      = 1 << 9
-  final val FINAL         = 1 << 5    // May not be overridden. Note that java final implies much more than scala final.
-  final val PRIVATE       = 1 << 2
-  final val PROTECTED     = 1 << 0
+  inline val IMPLICIT      = 1 << 9
+  inline val FINAL         = 1 << 5    // May not be overridden. Note that java final implies much more than scala final.
+  inline val PRIVATE       = 1 << 2
+  inline val PROTECTED     = 1 << 0
 
-  final val SEALED        = 1 << 10
-  final val OVERRIDE      = 1 << 1
-  final val CASE          = 1 << 11
-  final val ABSTRACT      = 1 << 3        // abstract class, or used in conjunction with abstract override.
+  inline val SEALED        = 1 << 10
+  inline val OVERRIDE      = 1 << 1
+  inline val CASE          = 1 << 11
+  inline val ABSTRACT      = 1 << 3        // abstract class, or used in conjunction with abstract override.
                                           // Note difference to DEFERRED!
-  final val DEFERRED      = 1 << 4        // was `abstract' for members | trait is virtual
-  final val INTERFACE     = 1 << 7        // symbol is an interface (i.e. a trait which defines only abstract methods)
-  final val MUTABLE       = 1 << 12       // symbol is a mutable variable.
-  final val PARAM         = 1 << 13       // symbol is a (value or type) parameter to a method
-  final val MACRO         = 1 << 15       // symbol is a macro definition
+  inline val DEFERRED      = 1 << 4        // was `abstract' for members | trait is virtual
+  inline val INTERFACE     = 1 << 7        // symbol is an interface (i.e. a trait which defines only abstract methods)
+  inline val MUTABLE       = 1 << 12       // symbol is a mutable variable.
+  inline val PARAM         = 1 << 13       // symbol is a (value or type) parameter to a method
+  inline val MACRO         = 1 << 15       // symbol is a macro definition
 
-  final val COVARIANT     = 1 << 16       // symbol is a covariant type variable
-  final val BYNAMEPARAM   = 1 << 16       // parameter is by name
-  final val CONTRAVARIANT = 1 << 17       // symbol is a contravariant type variable
-  final val ABSOVERRIDE   = 1 << 18       // combination of abstract & override
-  final val LOCAL         = 1 << 19       // symbol is local to current class (i.e. private[this] or protected
+  inline val COVARIANT     = 1 << 16       // symbol is a covariant type variable
+  inline val BYNAMEPARAM   = 1 << 16       // parameter is by name
+  inline val CONTRAVARIANT = 1 << 17       // symbol is a contravariant type variable
+  inline val ABSOVERRIDE   = 1 << 18       // combination of abstract & override
+  inline val LOCAL         = 1 << 19       // symbol is local to current class (i.e. private[this] or protected
                                           // pre: PRIVATE or PROTECTED are also set
-  final val JAVA          = 1 << 20       // symbol was defined by a Java class
-  final val STATIC        = 1 << 23       // static field, method or class
-  final val CASEACCESSOR  = 1 << 24       // symbol is a case parameter (or its accessor, or a GADT skolem)
-  final val TRAIT         = 1 << 25       // symbol is a trait
-  final val DEFAULTPARAM  = 1 << 25       // the parameter has a default value
-  final val PARAMACCESSOR = 1 << 29       // for field definitions generated for primary constructor
+  inline val JAVA          = 1 << 20       // symbol was defined by a Java class
+  inline val STATIC        = 1 << 23       // static field, method or class
+  inline val CASEACCESSOR  = 1 << 24       // symbol is a case parameter (or its accessor, or a GADT skolem)
+  inline val TRAIT         = 1 << 25       // symbol is a trait
+  inline val DEFAULTPARAM  = 1 << 25       // the parameter has a default value
+  inline val PARAMACCESSOR = 1 << 29       // for field definitions generated for primary constructor
                                           //   parameters (no matter if it's a 'val' parameter or not)
                                           // for parameters of a primary constructor ('val' or not)
                                           // for the accessor methods generated for 'val' or 'var' parameters
-  final val LAZY          = 1L << 31      // symbol is a lazy val. can't have MUTABLE unless transformed by typer
-  final val PRESUPER      = 1L << 37      // value is evaluated before super call
-  final val DEFAULTINIT   = 1L << 41      // symbol is initialized to the default value: used by -Xcheckinit
-  final val ARTIFACT      = 1L << 46      // symbol should be ignored when typechecking; will be marked ACC_SYNTHETIC in bytecode
+  inline val LAZY          = 1L << 31      // symbol is a lazy val. can't have MUTABLE unless transformed by typer
+  inline val PRESUPER      = 1L << 37      // value is evaluated before super call
+  inline val DEFAULTINIT   = 1L << 41      // symbol is initialized to the default value: used by -Xcheckinit
+  inline val ARTIFACT      = 1L << 46      // symbol should be ignored when typechecking; will be marked ACC_SYNTHETIC in bytecode
                                           // to see which symbols are marked as ARTIFACT, see scaladocs for FlagValues.ARTIFACT
-  final val DEFAULTMETHOD = 1L << 47      // symbol is a java default method
-  final val ENUM          = 1L << 48      // symbol is an enum
+  inline val DEFAULTMETHOD = 1L << 47      // symbol is a java default method
+  inline val ENUM          = 1L << 48      // symbol is an enum
 
-  final val PrivateLocal   = PRIVATE | LOCAL
-  final val ProtectedLocal = PROTECTED | LOCAL
-  final val AccessFlags    = PRIVATE | PROTECTED | LOCAL
+  inline val PrivateLocal   = PRIVATE | LOCAL
+  inline val ProtectedLocal = PROTECTED | LOCAL
+  inline val AccessFlags    = PRIVATE | PROTECTED | LOCAL
 
-  final val METHOD        = 1 << 6        // a method
-  final val MODULE        = 1 << 8        // symbol is module or class implementing a module
-  final val PACKAGE       = 1 << 14       // symbol is a java package
+  inline val METHOD        = 1 << 6        // a method
+  inline val MODULE        = 1 << 8        // symbol is module or class implementing a module
+  inline val PACKAGE       = 1 << 14       // symbol is a java package
 
-  final val CAPTURED      = 1 << 16       // variable is accessed from nested function.  Set by LambdaLift.
-  final val LABEL         = 1 << 17       // method symbol is a label. Set by TailCall
-  final val INCONSTRUCTOR = 1 << 17       // class symbol is defined in this/superclass constructor.
-  final val SYNTHETIC     = 1 << 21       // symbol is compiler-generated (compare with ARTIFACT)
-  final val STABLE        = 1 << 22       // functions that are assumed to be stable
+  inline val CAPTURED      = 1 << 16       // variable is accessed from nested function.  Set by LambdaLift.
+  inline val LABEL         = 1 << 17       // method symbol is a label. Set by TailCall
+  inline val INCONSTRUCTOR = 1 << 17       // class symbol is defined in this/superclass constructor.
+  inline val SYNTHETIC     = 1 << 21       // symbol is compiler-generated (compare with ARTIFACT)
+  inline val STABLE        = 1 << 22       // functions that are assumed to be stable
                                           // (typically, access methods for valdefs)
                                           // or classes that do not contain abstract types.
-  final val BRIDGE        = 1 << 26       // function is a bridge method. Set by Erasure
-  final val ACCESSOR      = 1 << 27       // a value or variable accessor (getter or setter)
+  inline val BRIDGE        = 1 << 26       // function is a bridge method. Set by Erasure
+  inline val ACCESSOR      = 1 << 27       // a value or variable accessor (getter or setter)
 
-  final val SUPERACCESSOR = 1 << 28       // a super accessor
-  final val MODULEVAR     = 1 << 30       // for variables: is the variable caching a module value
+  inline val SUPERACCESSOR = 1 << 28       // a super accessor
+  inline val MODULEVAR     = 1 << 30       // for variables: is the variable caching a module value
 
-  final val IS_ERROR      = 1L << 32      // symbol is an error symbol
-  final val OVERLOADED    = 1L << 33      // symbol is overloaded
-  final val LIFTED        = 1L << 34      // class has been lifted out to package level
+  inline val IS_ERROR      = 1L << 32      // symbol is an error symbol
+  inline val OVERLOADED    = 1L << 33      // symbol is overloaded
+  inline val LIFTED        = 1L << 34      // class has been lifted out to package level
                                           // local value has been lifted out to class level
                                           // todo: make LIFTED = latePRIVATE?
-  final val MIXEDIN       = 1L << 35      // term member has been mixed in
-  final val EXISTENTIAL   = 1L << 35      // type is an existential parameter or skolem
-  final val EXPANDEDNAME  = 1L << 36      // name has been expanded with class suffix
-  final val TRANS_FLAG    = 1L << 38      // transient flag guaranteed to be reset after each phase.
+  inline val MIXEDIN       = 1L << 35      // term member has been mixed in
+  inline val EXISTENTIAL   = 1L << 35      // type is an existential parameter or skolem
+  inline val EXPANDEDNAME  = 1L << 36      // name has been expanded with class suffix
+  inline val TRANS_FLAG    = 1L << 38      // transient flag guaranteed to be reset after each phase.
 
-  final val LOCKED        = 1L << 39      // temporary flag to catch cyclic dependencies
-  final val SPECIALIZED   = 1L << 40      // symbol is a generated specialized member
-  final val VBRIDGE       = 1L << 42      // symbol is a varargs bridge
+  inline val LOCKED        = 1L << 39      // temporary flag to catch cyclic dependencies
+  inline val SPECIALIZED   = 1L << 40      // symbol is a generated specialized member
+  inline val VBRIDGE       = 1L << 42      // symbol is a varargs bridge
 
-  final val VARARGS       = 1L << 43      // symbol is a Java-style varargs method
-  final val TRIEDCOOKING  = 1L << 44      // `Cooking` has been tried on this symbol
+  inline val VARARGS       = 1L << 43      // symbol is a Java-style varargs method
+  inline val TRIEDCOOKING  = 1L << 44      // `Cooking` has been tried on this symbol
                                           // A Java method's type is `cooked` by transforming raw types to existentials
 
-  final val SYNCHRONIZED  = 1L << 45      // symbol is a method which should be marked ACC_SYNCHRONIZED
+  inline val SYNCHRONIZED  = 1L << 45      // symbol is a method which should be marked ACC_SYNCHRONIZED
 
-  final val IMPLICIT_PKL   = (1 << 0)
-  final val FINAL_PKL      = (1 << 1)
-  final val PRIVATE_PKL    = (1 << 2)
-  final val PROTECTED_PKL  = (1 << 3)
-  final val SEALED_PKL     = (1 << 4)
-  final val OVERRIDE_PKL   = (1 << 5)
-  final val CASE_PKL       = (1 << 6)
-  final val ABSTRACT_PKL   = (1 << 7)
-  final val DEFERRED_PKL   = (1 << 8)
-  final val METHOD_PKL     = (1 << 9)
-  final val MODULE_PKL     = (1 << 10)
-  final val INTERFACE_PKL  = (1 << 11)
+  inline val IMPLICIT_PKL   = (1 << 0)
+  inline val FINAL_PKL      = (1 << 1)
+  inline val PRIVATE_PKL    = (1 << 2)
+  inline val PROTECTED_PKL  = (1 << 3)
+  inline val SEALED_PKL     = (1 << 4)
+  inline val OVERRIDE_PKL   = (1 << 5)
+  inline val CASE_PKL       = (1 << 6)
+  inline val ABSTRACT_PKL   = (1 << 7)
+  inline val DEFERRED_PKL   = (1 << 8)
+  inline val METHOD_PKL     = (1 << 9)
+  inline val MODULE_PKL     = (1 << 10)
+  inline val INTERFACE_PKL  = (1 << 11)
 }
diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala
index 54a7ad91e434..b5b8c4715ebc 100644
--- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala
+++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala
@@ -233,7 +233,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
   }
 
   /** The `decls` scope associated with given symbol */
-  protected def symScope(sym: Symbol): Scope = symScopes.getOrElseUpdate(sym, newScope)
+  protected def symScope(sym: Symbol): Scope = symScopes.getOrElseUpdate(sym, newScope(0))
 
   /** Does entry represent an (internal) symbol */
   protected def isSymbolEntry(i: Int)(using Context): Boolean = {
@@ -434,12 +434,16 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
     var name = at(nameref, () => readName()(using ctx))
     val owner = readSymbolRef()
 
-    if (name eq nme.getClass_) && defn.hasProblematicGetClass(owner.name) then
+    var flags = unpickleScalaFlags(readLongNat(), name.isTypeName)
+
+    if (name eq nme.getClass_) && defn.hasProblematicGetClass(owner.name)
+       // Scala 2 sometimes pickle the same type parameter symbol multiple times
+       // (see i11173 for an example), but we should only unpickle it once.
+       || tag == TYPEsym && flags.is(TypeParam) && symScope(owner).lookup(name.asTypeName).exists
+    then
       // skip this member
       return NoSymbol
 
-    var flags = unpickleScalaFlags(readLongNat(), name.isTypeName)
-
     name = name.adjustIfModuleClass(flags)
     if (flags.is(Method))
       name =
@@ -491,7 +495,27 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
         sym.setFlag(Scala2x)
       if (!(isRefinementClass(sym) || isUnpickleRoot(sym) || sym.is(Scala2Existential))) {
         val owner = sym.owner
-        if (owner.isClass)
+        val canEnter =
+          owner.isClass &&
+          (!sym.is(TypeParam) ||
+            owner.infoOrCompleter.match
+              case completer: ClassUnpickler =>
+                // Type parameters seen after class initialization are not
+                // actually type parameters of the current class but of some
+                // external class because of the bizarre way in which Scala 2
+                // pickles them (see
+                // https://github.com/scala/scala/blob/aa31e3e6bb945f5d69740d379ede1cd514904109/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala#L181-L197).
+                // Make sure we don't enter them in the class otherwise the
+                // compiler will get very confused (testcase in sbt-test/scala2-compat/i12641).
+                // Note: I don't actually know if these stray type parameters
+                // can also show up before initialization, if that's the case
+                // we'll need to study more closely how Scala 2 handles type
+                // parameter unpickling and try to emulate it.
+                !completer.isInitialized
+              case _ =>
+                true)
+
+        if (canEnter)
           owner.asClass.enter(sym, symScope(owner))
       }
       sym
@@ -621,23 +645,30 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
   object localMemberUnpickler extends LocalUnpickler
 
   class ClassUnpickler(infoRef: Int) extends LocalUnpickler with TypeParamsCompleter {
-    private def readTypeParams()(using Context): List[TypeSymbol] = {
+    private var myTypeParams: List[TypeSymbol] = null
+
+    private def readTypeParams()(using Context): Unit = {
       val tag = readByte()
       val end = readNat() + readIndex
-      if (tag == POLYtpe) {
-        val unusedRestpeRef = readNat()
-        until(end, () => readSymbolRef()(using ctx)).asInstanceOf[List[TypeSymbol]]
-      }
-      else Nil
+      myTypeParams =
+        if (tag == POLYtpe) {
+          val unusedRestpeRef = readNat()
+          until(end, () => readSymbolRef()(using ctx)).asInstanceOf[List[TypeSymbol]]
+        } else Nil
     }
-    private def loadTypeParams(using Context) =
+    private def loadTypeParams()(using Context) =
       atReadPos(index(infoRef), () => readTypeParams()(using ctx))
 
+    /** Have the type params of this class already been unpickled? */
+    def isInitialized: Boolean = myTypeParams ne null
+
     /** Force reading type params early, we need them in setClassInfo of subclasses. */
-    def init()(using Context): List[TypeSymbol] = loadTypeParams
+    def init()(using Context): List[TypeSymbol] =
+      if !isInitialized then loadTypeParams()
+      myTypeParams
 
     override def completerTypeParams(sym: Symbol)(using Context): List[TypeSymbol] =
-      loadTypeParams
+      init()
   }
 
   def rootClassUnpickler(start: Coord, cls: Symbol, module: Symbol, infoRef: Int): ClassUnpickler =
@@ -669,7 +700,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
     }
     // Cannot use standard `existsPart` method because it calls `lookupRefined`
     // which can cause CyclicReference errors.
-    val isBoundAccumulator = new ExistsAccumulator(isBound, stopAtStatic = true, forceLazy = true):
+    val isBoundAccumulator = new ExistsAccumulator(isBound, StopAt.Static, forceLazy = true):
       override def foldOver(x: Boolean, tp: Type): Boolean = tp match
         case tp: TypeRef => applyToPrefix(x, tp)
         case _ => super.foldOver(x, tp)
@@ -708,7 +739,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
       val anyTypes = boundSyms map (_ => defn.AnyType)
       val boundBounds = boundSyms map (_.info.bounds.hi)
       val tp2 = tp1.subst(boundSyms, boundBounds).subst(boundSyms, anyTypes)
-      report.warning(FailureToEliminateExistential(tp, tp1, tp2, boundSyms))
+      report.warning(FailureToEliminateExistential(tp, tp1, tp2, boundSyms, classRoot.symbol))
       tp2
     }
     else tp1
@@ -816,6 +847,12 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
         val maker = MethodType.companion(
           isImplicit = tag == IMPLICITMETHODtpe || params.nonEmpty && params.head.is(Implicit))
         val result = maker.fromSymbols(params, restpe)
+        result.resType match
+          case restpe1: MethodType if restpe1 ne restpe =>
+            val prevResParams = paramsOfMethodType.remove(restpe)
+            if prevResParams != null then
+              paramsOfMethodType.put(restpe1, prevResParams)
+          case _ =>
         if params.nonEmpty then paramsOfMethodType.put(result, params)
         result
       case POLYtpe =>
@@ -922,33 +959,33 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
   /** Read an annotation argument, which is pickled either
    *  as a Constant or a Tree.
    */
-  protected def readAnnotArg(i: Int)(using Context): Tree = bytes(index(i)) match {
+  protected def readAnnotArg(i: Int)(using Context): untpd.Tree = untpd.TypedSplice(bytes(index(i)) match
     case TREE => at(i, () => readTree())
     case _ => at(i, () =>
       readConstant() match
         case c: Constant => Literal(c)
         case tp: TermRef => ref(tp)
     )
-  }
+  )
 
   /** Read a ClassfileAnnotArg (argument to a classfile annotation)
    */
-  private def readArrayAnnotArg()(using Context): Tree = {
+  private def readArrayAnnotArg()(using Context): untpd.Tree = {
     readByte() // skip the `annotargarray` tag
     val end = readNat() + readIndex
     // array elements are trees representing instances of scala.annotation.Annotation
-    SeqLiteral(
+    untpd.JavaSeqLiteral(
       until(end, () => readClassfileAnnotArg(readNat())),
-      TypeTree(defn.AnnotationClass.typeRef))
+      untpd.TypeTree())
   }
 
-  private def readAnnotInfoArg()(using Context): Tree = {
+  private def readAnnotInfoArg()(using Context): untpd.Tree = untpd.TypedSplice {
     readByte() // skip the `annotinfo` tag
     val end = readNat() + readIndex
     readAnnotationContents(end)
   }
 
-  protected def readClassfileAnnotArg(i: Int)(using Context): Tree = bytes(index(i)) match {
+  protected def readClassfileAnnotArg(i: Int)(using Context): untpd.Tree = bytes(index(i)) match {
     case ANNOTINFO => at(i, () => readAnnotInfoArg())
     case ANNOTARGARRAY => at(i, () => readArrayAnnotArg())
     case _ => readAnnotArg(i)
@@ -960,7 +997,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
   protected def readAnnotationContents(end: Int)(using Context): Tree = {
     val atp = readTypeRef()
     val args = {
-      val t = new ListBuffer[Tree]
+      val t = new ListBuffer[untpd.Tree]
 
       while (readIndex != end) {
         val argref = readNat()
@@ -968,14 +1005,14 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
           if (isNameEntry(argref)) {
             val name = at(argref, () => readName())
             val arg = readClassfileAnnotArg(readNat())
-            NamedArg(name.asTermName, arg)
+            untpd.NamedArg(name.asTermName, arg)
           }
           else readAnnotArg(argref)
         }
       }
       t.toList
     }
-    resolveConstructor(atp, args)
+    untpd.resolveConstructor(atp, args)
   }
 
   /** Read an annotation and as a side effect store it into
@@ -1007,9 +1044,9 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
    */
   protected def deferredAnnot(end: Int)(using Context): Annotation = {
     val start = readIndex
-    val atp = readTypeRef()
     val phase = ctx.phase
-    Annotation.deferred(atp.typeSymbol)(
+    Annotation.deferredSymAndTree(
+        atReadPos(start, () => atPhase(phase)(readTypeRef().typeSymbol)))(
         atReadPos(start, () => atPhase(phase)(readAnnotationContents(end))))
   }
 
@@ -1333,4 +1370,4 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
       case other =>
         errorBadSignature("expected an TypeDef (" + other + ")")
     }
-}
\ No newline at end of file
+}
diff --git a/compiler/src/dotty/tools/dotc/decompiler/DecompilationPrinter.scala b/compiler/src/dotty/tools/dotc/decompiler/DecompilationPrinter.scala
index 978ebb4416e3..55e6e54787a2 100644
--- a/compiler/src/dotty/tools/dotc/decompiler/DecompilationPrinter.scala
+++ b/compiler/src/dotty/tools/dotc/decompiler/DecompilationPrinter.scala
@@ -40,12 +40,8 @@ class DecompilationPrinter extends Phase {
 
   private def printToOutput(out: PrintStream)(using Context): Unit = {
     val unit = ctx.compilationUnit
-    if (ctx.settings.printTasty.value)
-      println(TastyPrinter.show(unit.pickled.head._2()))
-    else {
-      val unitFile = unit.source.toString.replace("\\", "/").replace(".class", ".tasty")
-      out.println(s"/** Decompiled from $unitFile */")
-      out.println(QuotesImpl.showDecompiledTree(unit.tpdTree))
-    }
+    val unitFile = unit.source.toString.replace("\\", "/").replace(".class", ".tasty")
+    out.println(s"/** Decompiled from $unitFile */")
+    out.println(QuotesImpl.showDecompiledTree(unit.tpdTree))
   }
 }
diff --git a/compiler/src/dotty/tools/dotc/decompiler/IDEDecompilerDriver.scala b/compiler/src/dotty/tools/dotc/decompiler/IDEDecompilerDriver.scala
index 3e8e0f5fb86d..2e3c67150d11 100644
--- a/compiler/src/dotty/tools/dotc/decompiler/IDEDecompilerDriver.scala
+++ b/compiler/src/dotty/tools/dotc/decompiler/IDEDecompilerDriver.scala
@@ -16,7 +16,8 @@ import scala.quoted.runtime.impl.QuotesImpl
 class IDEDecompilerDriver(val settings: List[String]) extends dotc.Driver {
 
   private val myInitCtx: Context = {
-    val rootCtx = initCtx.fresh.addMode(Mode.Interactive | Mode.ReadPositions | Mode.ReadComments)
+    val rootCtx = initCtx.fresh.addMode(Mode.Interactive | Mode.ReadPositions)
+    rootCtx.setSetting(rootCtx.settings.YreadComments, true)
     rootCtx.setSetting(rootCtx.settings.YretainTrees, true)
     rootCtx.setSetting(rootCtx.settings.fromTasty, true)
     val ctx = setup(settings.toArray :+ "dummy.scala", rootCtx).get._2
diff --git a/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala b/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala
index 2d2d41ae50ed..1c8ba2359623 100644
--- a/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala
+++ b/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala
@@ -4,6 +4,7 @@ package fromtasty
 
 import io.{JarArchive, AbstractFile, Path}
 import core.Contexts._
+import java.io.File
 
 class TASTYRun(comp: Compiler, ictx: Context) extends Run(comp, ictx) {
   override def compile(files: List[AbstractFile]): Unit = {
@@ -17,9 +18,10 @@ class TASTYRun(comp: Compiler, ictx: Context) extends Run(comp, ictx) {
     val classNames = files.flatMap { file =>
       file.extension match
         case "jar" =>
-          JarArchive.open(Path(file.path), create = false).iterator()
-            .filter(e => e.extension == "tasty" && !fromTastyIgnoreList(e.name))
-            .map(e => e.name.stripSuffix(".tasty").replace("/", "."))
+          JarArchive.open(Path(file.path), create = false).allFileNames()
+            .map(_.stripPrefix(File.separator)) // change paths from absolute to relative
+            .filter(e => Path.extension(e) == "tasty" && !fromTastyIgnoreList(e))
+            .map(e => e.stripSuffix(".tasty").replace(File.separator, "."))
             .toList
         case "tasty" => TastyFileUtil.getClassName(file)
         case _ =>
diff --git a/compiler/src/dotty/tools/dotc/interactive/Completion.scala b/compiler/src/dotty/tools/dotc/interactive/Completion.scala
index bb552191fc36..1f26b50b6cce 100644
--- a/compiler/src/dotty/tools/dotc/interactive/Completion.scala
+++ b/compiler/src/dotty/tools/dotc/interactive/Completion.scala
@@ -24,6 +24,7 @@ import dotty.tools.dotc.printing.Texts._
 import dotty.tools.dotc.util.{NameTransformer, NoSourcePosition, SourcePosition}
 
 import scala.collection.mutable
+import scala.util.control.NonFatal
 
 /**
  * One of the results of a completion query.
@@ -61,6 +62,8 @@ object Completion {
    */
   def completionMode(path: List[Tree], pos: SourcePosition): Mode =
     path match {
+      case Ident(_) :: Import(_, _) :: _ =>
+        Mode.Import
       case (ref: RefTree) :: _ =>
         if (ref.name.isTermName) Mode.Term
         else if (ref.name.isTypeName) Mode.Type
@@ -112,10 +115,14 @@ object Completion {
     val completer = new Completer(mode, prefix, pos)
 
     val completions = path match {
-        case Select(qual, _) :: _                              => completer.selectionCompletions(qual)
-        case Import(expr, _) :: _                              => completer.directMemberCompletions(expr)
-        case (_: untpd.ImportSelector) :: Import(expr, _) :: _ => completer.directMemberCompletions(expr)
-        case _                                                 => completer.scopeCompletions
+        // Ignore synthetic select from `This` because in code it was `Ident`
+        // See example in dotty.tools.languageserver.CompletionTest.syntheticThis
+        case Select(qual @ This(_), _) :: _ if qual.span.isSynthetic  => completer.scopeCompletions
+        case Select(qual, _) :: _           if qual.tpe.hasSimpleKind => completer.selectionCompletions(qual)
+        case Select(qual, _) :: _                                     => Map.empty
+        case Import(expr, _) :: _                                     => completer.directMemberCompletions(expr)
+        case (_: untpd.ImportSelector) :: Import(expr, _) :: _        => completer.directMemberCompletions(expr)
+        case _                                                        => completer.scopeCompletions
       }
 
     val describedCompletions = describeCompletions(completions)
@@ -197,14 +204,45 @@ object Completion {
 
       mappings.foreach { (name, denotss) =>
         val first = denotss.head
+
+        // import a.c
+        def isSingleImport =  denotss.length < 2
+        // import a.C
+        // locally {  import b.C }
+        def isImportedInDifferentScope =  first.ctx.scope ne denotss(1).ctx.scope
+        // import a.C
+        // import a.C
+        def isSameSymbolImportedDouble =  denotss.forall(_.denots == first.denots)
+
+        def isScalaPackage(scopedDenots: ScopedDenotations) =
+          scopedDenots.denots.exists(_.info.typeSymbol.owner == defn.ScalaPackageClass)
+
+        def isJavaLangPackage(scopedDenots: ScopedDenotations) =
+          scopedDenots.denots.exists(_.info.typeSymbol.owner == defn.JavaLangPackageClass)
+
+        // For example
+        // import java.lang.annotation
+        //    is shadowed by
+        // import scala.annotation
+        def isJavaLangAndScala =
+          try
+            denotss.forall(denots => isScalaPackage(denots) || isJavaLangPackage(denots))
+          catch
+            case NonFatal(_) => false
+
         denotss.find(!_.ctx.isImportContext) match {
           // most deeply nested member or local definition if not shadowed by an import
           case Some(local) if local.ctx.scope == first.ctx.scope =>
             resultMappings += name -> local.denots
 
-          // most deeply nested import if not shadowed by another import
-          case None if denotss.length < 2 || (denotss(1).ctx.scope ne first.ctx.scope) =>
+          case None if isSingleImport || isImportedInDifferentScope || isSameSymbolImportedDouble =>
             resultMappings += name -> first.denots
+          case None if isJavaLangAndScala =>
+            denotss.foreach{
+              denots =>
+                if isScalaPackage(denots) then
+                  resultMappings += name -> denots.denots
+            }
 
           case _ =>
         }
@@ -300,7 +338,8 @@ object Completion {
               case name: TermName if include(denot, name) => Some((denot, name))
               case _ => None
 
-        types.flatMap { tpe =>
+        types.flatMap { tp =>
+          val tpe = tp.widenExpr
           tpe.membersBasedOnFlags(required = ExtensionMethod, excluded = EmptyFlags)
             .collect { case DenotWithMatchingName(denot, name) => TermRef(tpe, denot.symbol) -> name }
         }
@@ -322,7 +361,7 @@ object Completion {
       val extMethodsFromImplicitScope = extractMemberExtensionMethods(implicitScopeCompanions)
 
       // 4. The reference is of the form r.m and the extension method is defined in some given instance in the implicit scope of the type of r.
-      val givensInImplicitScope = implicitScopeCompanions.flatMap(_.membersBasedOnFlags(required = Given, excluded = EmptyFlags)).map(_.info)
+      val givensInImplicitScope = implicitScopeCompanions.flatMap(_.membersBasedOnFlags(required = GivenVal, excluded = EmptyFlags)).map(_.info)
       val extMethodsFromGivensInImplicitScope = extractMemberExtensionMethods(givensInImplicitScope)
 
       val availableExtMethods = extMethodsFromGivensInImplicitScope ++ extMethodsFromImplicitScope ++ extMethodsFromGivensInScope ++ extMethodsInScope
diff --git a/compiler/src/dotty/tools/dotc/interactive/Interactive.scala b/compiler/src/dotty/tools/dotc/interactive/Interactive.scala
index aa2359f33eb2..cb41650c0b57 100644
--- a/compiler/src/dotty/tools/dotc/interactive/Interactive.scala
+++ b/compiler/src/dotty/tools/dotc/interactive/Interactive.scala
@@ -252,14 +252,14 @@ object Interactive {
    *  the tree closest enclosing `pos` and ends with an element of `trees`.
    */
   def pathTo(trees: List[SourceTree], pos: SourcePosition)(using Context): List[Tree] =
-    trees.find(_.pos.contains(pos)) match {
-      case Some(tree) => pathTo(tree.tree, pos.span)
-      case None => Nil
-    }
+    pathTo(trees.map(_.tree), pos.span)
 
   def pathTo(tree: Tree, span: Span)(using Context): List[Tree] =
-    if (tree.span.contains(span))
-      NavigateAST.pathTo(span, tree, skipZeroExtent = true)
+    pathTo(List(tree), span)
+
+  private def pathTo(trees: List[Tree], span: Span)(using Context): List[Tree] =
+    if (trees.exists(_.span.contains(span)))
+      NavigateAST.pathTo(span, trees, skipZeroExtent = true)
         .collect { case t: untpd.Tree => t }
         .dropWhile(!_.hasType).asInstanceOf[List[tpd.Tree]]
     else Nil
@@ -293,8 +293,10 @@ object Interactive {
             // in subsequent parameter sections
           localCtx
         case tree: MemberDef =>
-          assert(tree.symbol.exists)
-          outer.localContext(tree, tree.symbol)
+          if (tree.symbol.exists)
+            outer.localContext(tree, tree.symbol)
+          else
+            outer
         case tree @ Block(stats, expr) =>
           val localCtx = outer.fresh.setNewScope
           stats.foreach {
@@ -310,7 +312,7 @@ object Interactive {
           }
           localCtx
         case tree @ Template(constr, parents, self, _) =>
-          if ((constr :: self :: parents).contains(nested)) ctx
+          if ((constr :: self :: parents).contains(nested)) outer
           else contextOfStat(tree.body, nested, tree.symbol, outer.inClassContext(self.symbol))
         case _ =>
           outer
diff --git a/compiler/src/dotty/tools/dotc/interactive/InteractiveCompiler.scala b/compiler/src/dotty/tools/dotc/interactive/InteractiveCompiler.scala
index b3ef40dec8a3..38a93125a342 100644
--- a/compiler/src/dotty/tools/dotc/interactive/InteractiveCompiler.scala
+++ b/compiler/src/dotty/tools/dotc/interactive/InteractiveCompiler.scala
@@ -4,6 +4,7 @@ package interactive
 
 import core._
 import Phases._
+import parsing._
 import typer._
 
 class InteractiveCompiler extends Compiler {
@@ -12,7 +13,8 @@ class InteractiveCompiler extends Compiler {
   // This could be improved by reporting errors back to the IDE
   // after each phase group instead of waiting for the pipeline to finish.
   override def phases: List[List[Phase]] = List(
-    List(new FrontEnd),
+    List(new Parser),
+    List(new TyperPhase),
     List(new transform.SetRootTree),
     List(new transform.CookComments)
   )
diff --git a/compiler/src/dotty/tools/dotc/interactive/InteractiveDriver.scala b/compiler/src/dotty/tools/dotc/interactive/InteractiveDriver.scala
index aa45b2080ee9..c0584c51b842 100644
--- a/compiler/src/dotty/tools/dotc/interactive/InteractiveDriver.scala
+++ b/compiler/src/dotty/tools/dotc/interactive/InteractiveDriver.scala
@@ -29,9 +29,10 @@ class InteractiveDriver(val settings: List[String]) extends Driver {
   override def sourcesRequired: Boolean = false
 
   private val myInitCtx: Context = {
-    val rootCtx = initCtx.fresh.addMode(Mode.ReadPositions).addMode(Mode.Interactive).addMode(Mode.ReadComments)
+    val rootCtx = initCtx.fresh.addMode(Mode.ReadPositions).addMode(Mode.Interactive)
     rootCtx.setSetting(rootCtx.settings.YretainTrees, true)
     rootCtx.setSetting(rootCtx.settings.YcookComments, true)
+    rootCtx.setSetting(rootCtx.settings.YreadComments, true)
     val ctx = setup(settings.toArray, rootCtx) match
       case Some((_, ctx)) => ctx
       case None => rootCtx
@@ -166,6 +167,7 @@ class InteractiveDriver(val settings: List[String]) extends Driver {
       cleanup(t)
       myOpenedTrees(uri) = topLevelTrees(t, source)
       myCompilationUnits(uri) = unit
+      myCtx = myCtx.fresh.setPhase(myInitCtx.base.typerPhase)
 
       reporter.removeBufferedMessages
     }
diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala
index 44236c168b66..e5b0b24bdd2f 100644
--- a/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala
+++ b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala
@@ -359,19 +359,6 @@ object JavaParsers {
       * but instead we skip entire annotation silently.
       */
     def annotation(): Option[Tree] = {
-      object LiteralT:
-        def unapply(token: Token) = Option(token match {
-          case TRUE      => true
-          case FALSE     => false
-          case CHARLIT   => in.name(0)
-          case INTLIT    => in.intVal(false).toInt
-          case LONGLIT   => in.intVal(false)
-          case FLOATLIT  => in.floatVal(false).toFloat
-          case DOUBLELIT => in.floatVal(false)
-          case STRINGLIT => in.name.toString
-          case _         => null
-        }).map(Constant(_))
-
       def classOrId(): Tree =
         val id = qualId()
         if in.lookaheadToken == CLASS then
@@ -398,17 +385,17 @@ object JavaParsers {
         }
 
       def argValue(): Option[Tree] =
-        val tree = in.token match {
-          case LiteralT(c) =>
-            val tree = atSpan(in.offset)(Literal(c))
-            in.nextToken()
-            Some(tree)
-          case AT =>
-            in.nextToken()
-            annotation()
-          case IDENTIFIER => Some(classOrId())
-          case LBRACE => array()
-          case _ => None
+        val tree = tryConstant match {
+          case Some(c) =>
+            Some(atSpan(in.offset)(Literal(c)))
+          case _ => in.token match {
+            case AT =>
+              in.nextToken()
+              annotation()
+            case IDENTIFIER => Some(classOrId())
+            case LBRACE => array()
+            case _ => None
+          }
         }
         if in.token == COMMA || in.token == RBRACE || in.token == RPAREN then
           tree
@@ -716,11 +703,7 @@ object JavaParsers {
 
         in.nextToken() // EQUALS
         if (mods.is(Flags.JavaStatic) && mods.is(Flags.Final)) {
-          val neg = in.token match {
-            case MINUS | BANG => in.nextToken(); true
-            case _ => false
-          }
-          tryLiteral(neg).map(forConst).getOrElse(tpt1)
+          tryConstant.map(forConst).getOrElse(tpt1)
         }
         else tpt1
       }
@@ -751,35 +734,8 @@ object JavaParsers {
           makeTemplate(List(), statics, List(), false)).withMods((cdef.mods & Flags.RetainedModuleClassFlags).toTermFlags)
       }
 
-    def importCompanionObject(cdef: TypeDef): Tree =
-      Import(
-        Ident(cdef.name.toTermName).withSpan(NoSpan),
-        ImportSelector(Ident(nme.WILDCARD)) :: Nil)
-
-    // Importing the companion object members cannot be done uncritically: see
-    // ticket #2377 wherein a class contains two static inner classes, each of which
-    // has a static inner class called "Builder" - this results in an ambiguity error
-    // when each performs the import in the enclosing class's scope.
-    //
-    // To address this I moved the import Companion._ inside the class, as the first
-    // statement.  This should work without compromising the enclosing scope, but may (?)
-    // end up suffering from the same issues it does in scala - specifically that this
-    // leaves auxiliary constructors unable to access members of the companion object
-    // as unqualified identifiers.
-    def addCompanionObject(statics: List[Tree], cdef: TypeDef): List[Tree] = {
-      // if there are no statics we can use the original cdef, but we always
-      // create the companion so import A._ is not an error (see ticket #1700)
-      val cdefNew =
-        if (statics.isEmpty) cdef
-        else {
-          val template = cdef.rhs.asInstanceOf[Template]
-          cpy.TypeDef(cdef)(cdef.name,
-            cpy.Template(template)(body = importCompanionObject(cdef) :: template.body))
-              .withMods(cdef.mods)
-        }
-
-      List(makeCompanionObject(cdefNew, statics), cdefNew)
-    }
+    def addCompanionObject(statics: List[Tree], cdef: TypeDef): List[Tree] =
+      List(makeCompanionObject(cdef, statics), cdef)
 
     def importDecl(): List[Tree] = {
       val start = in.offset
@@ -901,16 +857,7 @@ object JavaParsers {
             members) ++= decls
         }
       }
-      def forwarders(sdef: Tree): List[Tree] = sdef match {
-        case TypeDef(name, _) if (parentToken == INTERFACE) =>
-          var rhs: Tree = Select(Ident(parentName.toTermName), name)
-          List(TypeDef(name, rhs).withMods(Modifiers(Flags.Protected)))
-        case _ =>
-          List()
-      }
-      val sdefs = statics.toList
-      val idefs = members.toList ::: (sdefs flatMap forwarders)
-      (sdefs, idefs)
+      (statics.toList, members.toList)
     }
     def annotationParents: List[Select] = List(
       scalaAnnotationDot(tpnme.Annotation),
@@ -1012,7 +959,11 @@ object JavaParsers {
       case _         => in.nextToken(); syntaxError("illegal start of type declaration", skipIt = true); List(errorTypeTree)
     }
 
-    def tryLiteral(negate: Boolean = false): Option[Constant] = {
+    def tryConstant: Option[Constant] = {
+      val negate = in.token match {
+        case MINUS | BANG => in.nextToken(); true
+        case _ => false
+      }
       val l = in.token match {
         case TRUE      => !negate
         case FALSE     => negate
@@ -1062,7 +1013,9 @@ object JavaParsers {
       }
       val unit = atSpan(start) { PackageDef(pkg, buf.toList) }
       accept(EOF)
-      unit
+      unit match
+        case PackageDef(Ident(nme.EMPTY_PACKAGE), Nil) => EmptyTree
+        case _ => unit
     }
   }
 
diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaTokens.scala b/compiler/src/dotty/tools/dotc/parsing/JavaTokens.scala
index 79dd549cdf0d..3e73b6d95adb 100644
--- a/compiler/src/dotty/tools/dotc/parsing/JavaTokens.scala
+++ b/compiler/src/dotty/tools/dotc/parsing/JavaTokens.scala
@@ -5,88 +5,88 @@ package parsing
 import collection.immutable.BitSet
 
 object JavaTokens extends TokensCommon {
-  final val minToken = EMPTY
+  inline val minToken = EMPTY
   final def maxToken: Int = DOUBLE
 
   final val javaOnlyKeywords: TokenSet = tokenRange(INSTANCEOF, ASSERT)
   final val sharedKeywords: BitSet = BitSet( IF, FOR, ELSE, THIS, NULL, NEW, SUPER, ABSTRACT, FINAL, PRIVATE, PROTECTED,
-    OVERRIDE, EXTENDS, TRUE, FALSE, CLASS, IMPORT, PACKAGE, DO, THROW, TRY, CATCH, FINALLY, WHILE, RETURN )
+    EXTENDS, TRUE, FALSE, CLASS, IMPORT, PACKAGE, DO, THROW, TRY, CATCH, FINALLY, WHILE, RETURN )
   final val primTypes: TokenSet = tokenRange(VOID, DOUBLE)
   final val keywords: BitSet = sharedKeywords | javaOnlyKeywords | primTypes
 
   /** keywords */
-  final val INSTANCEOF = 101;       enter(INSTANCEOF, "instanceof")
-  final val CONST = 102;            enter(CONST, "const")
+  inline val INSTANCEOF = 101;       enter(INSTANCEOF, "instanceof")
+  inline val CONST = 102;            enter(CONST, "const")
 
   /** templates */
-  final val INTERFACE = 105;        enter(INTERFACE, "interface")
-  final val ENUM = 106;             enter(ENUM, "enum")
-  final val IMPLEMENTS = 107;       enter(IMPLEMENTS, "implements")
+  inline val INTERFACE = 105;        enter(INTERFACE, "interface")
+  inline val ENUM = 106;             enter(ENUM, "enum")
+  inline val IMPLEMENTS = 107;       enter(IMPLEMENTS, "implements")
 
   /** modifiers */
-  final val PUBLIC = 110;           enter(PUBLIC, "public")
-  final val DEFAULT = 111;          enter(DEFAULT, "default")
-  final val STATIC = 112;           enter(STATIC, "static")
-  final val TRANSIENT = 113;        enter(TRANSIENT, "transient")
-  final val VOLATILE = 114;         enter(VOLATILE, "volatile")
-  final val SYNCHRONIZED = 115;     enter(SYNCHRONIZED, "synchronized")
-  final val NATIVE = 116;           enter(NATIVE, "native")
-  final val STRICTFP = 117;         enter(STRICTFP, "strictfp")
-  final val THROWS = 118;           enter(THROWS, "throws")
+  inline val PUBLIC = 110;           enter(PUBLIC, "public")
+  inline val DEFAULT = 111;          enter(DEFAULT, "default")
+  inline val STATIC = 112;           enter(STATIC, "static")
+  inline val TRANSIENT = 113;        enter(TRANSIENT, "transient")
+  inline val VOLATILE = 114;         enter(VOLATILE, "volatile")
+  inline val SYNCHRONIZED = 115;     enter(SYNCHRONIZED, "synchronized")
+  inline val NATIVE = 116;           enter(NATIVE, "native")
+  inline val STRICTFP = 117;         enter(STRICTFP, "strictfp")
+  inline val THROWS = 118;           enter(THROWS, "throws")
 
   /** control structures */
-  final val BREAK = 130;            enter(BREAK, "break")
-  final val CONTINUE = 131;         enter(CONTINUE, "continue")
-  final val GOTO = 132;             enter(GOTO, "goto")
-  final val SWITCH = 133;           enter(SWITCH, "switch")
-  final val ASSERT = 134;           enter(ASSERT, "assert")
+  inline val BREAK = 130;            enter(BREAK, "break")
+  inline val CONTINUE = 131;         enter(CONTINUE, "continue")
+  inline val GOTO = 132;             enter(GOTO, "goto")
+  inline val SWITCH = 133;           enter(SWITCH, "switch")
+  inline val ASSERT = 134;           enter(ASSERT, "assert")
 
   /** special symbols */
-  final val EQEQ = 140
-  final val BANGEQ = 141
-  final val LT = 142
-  final val GT = 143
-  final val LTEQ = 144
-  final val GTEQ = 145
-  final val BANG = 146
-  final val QMARK = 147
-  final val AMP = 148
-  final val BAR = 149
-  final val PLUS = 150
-  final val MINUS = 151
-  final val ASTERISK = 152
-  final val SLASH = 153
-  final val PERCENT = 154
-  final val HAT = 155
-  final val LTLT = 156
-  final val GTGT = 157
-  final val GTGTGT = 158
-  final val AMPAMP = 159
-  final val BARBAR = 160
-  final val PLUSPLUS = 161
-  final val MINUSMINUS = 162
-  final val TILDE = 163
-  final val DOTDOTDOT = 164
-  final val AMPEQ = 165
-  final val BAREQ = 166
-  final val PLUSEQ = 167
-  final val MINUSEQ = 168
-  final val ASTERISKEQ = 169
-  final val SLASHEQ = 170
-  final val PERCENTEQ = 171
-  final val HATEQ = 172
-  final val LTLTEQ = 173
-  final val GTGTEQ = 174
-  final val GTGTGTEQ = 175
+  inline val EQEQ = 140
+  inline val BANGEQ = 141
+  inline val LT = 142
+  inline val GT = 143
+  inline val LTEQ = 144
+  inline val GTEQ = 145
+  inline val BANG = 146
+  inline val QMARK = 147
+  inline val AMP = 148
+  inline val BAR = 149
+  inline val PLUS = 150
+  inline val MINUS = 151
+  inline val ASTERISK = 152
+  inline val SLASH = 153
+  inline val PERCENT = 154
+  inline val HAT = 155
+  inline val LTLT = 156
+  inline val GTGT = 157
+  inline val GTGTGT = 158
+  inline val AMPAMP = 159
+  inline val BARBAR = 160
+  inline val PLUSPLUS = 161
+  inline val MINUSMINUS = 162
+  inline val TILDE = 163
+  inline val DOTDOTDOT = 164
+  inline val AMPEQ = 165
+  inline val BAREQ = 166
+  inline val PLUSEQ = 167
+  inline val MINUSEQ = 168
+  inline val ASTERISKEQ = 169
+  inline val SLASHEQ = 170
+  inline val PERCENTEQ = 171
+  inline val HATEQ = 172
+  inline val LTLTEQ = 173
+  inline val GTGTEQ = 174
+  inline val GTGTGTEQ = 175
 
   /** primitive types */
-  final val VOID = 180;             enter(VOID, "void")
-  final val BOOLEAN = 181;          enter(BOOLEAN, "boolean")
-  final val BYTE = 182;             enter(BYTE, "byte")
-  final val SHORT = 183;            enter(SHORT, "short")
-  final val CHAR = 184;             enter(CHAR, "char")
-  final val INT = 185;              enter(INT, "int")
-  final val LONG = 186;             enter(LONG, "long")
-  final val FLOAT = 187;            enter(FLOAT, "float")
-  final val DOUBLE = 188;           enter(DOUBLE, "double")
+  inline val VOID = 180;             enter(VOID, "void")
+  inline val BOOLEAN = 181;          enter(BOOLEAN, "boolean")
+  inline val BYTE = 182;             enter(BYTE, "byte")
+  inline val SHORT = 183;            enter(SHORT, "short")
+  inline val CHAR = 184;             enter(CHAR, "char")
+  inline val INT = 185;              enter(INT, "int")
+  inline val LONG = 186;             enter(LONG, "long")
+  inline val FLOAT = 187;            enter(FLOAT, "float")
+  inline val DOUBLE = 188;           enter(DOUBLE, "double")
 }
diff --git a/compiler/src/dotty/tools/dotc/parsing/ParserPhase.scala b/compiler/src/dotty/tools/dotc/parsing/ParserPhase.scala
new file mode 100644
index 000000000000..d5918b5179ca
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/parsing/ParserPhase.scala
@@ -0,0 +1,63 @@
+package dotty.tools.dotc.parsing
+
+import dotty.tools.dotc.ast.Trees
+import dotty.tools.dotc.config.Config
+import dotty.tools.dotc.config.Printers.{ default, typr }
+import dotty.tools.dotc.core.Contexts.{ Context, ctx }
+import dotty.tools.dotc.core.Phases.Phase
+import dotty.tools.dotc.core.Symbols.defn
+import dotty.tools.dotc.typer.ImportInfo.withRootImports
+import dotty.tools.dotc.{ CompilationUnit, ast, report }
+import dotty.tools.dotc.util.{ NoSourcePosition, SourcePosition }
+import dotty.tools.dotc.util.Stats.record
+import dotty.tools.unsupported
+
+class Parser extends Phase {
+
+  override def phaseName: String = Parser.name
+  override def description: String = Parser.description
+
+  // We run TreeChecker only after type checking
+  override def isCheckable: Boolean = false
+
+  /** The position of the first XML literal encountered while parsing,
+   *  NoSourcePosition if there were no XML literals.
+   */
+  private[dotc] var firstXmlPos: SourcePosition = NoSourcePosition
+
+  def parse(using Context) = monitor("parser") {
+    val unit = ctx.compilationUnit
+    unit.untpdTree =
+      if (unit.isJava) new JavaParsers.JavaParser(unit.source).parse()
+      else {
+        val p = new Parsers.Parser(unit.source)
+        //  p.in.debugTokenStream = true
+        val tree = p.parse()
+        if (p.firstXmlPos.exists && !firstXmlPos.exists)
+          firstXmlPos = p.firstXmlPos
+        tree
+      }
+    if (Config.checkPositions)
+      unit.untpdTree.checkPos(nonOverlapping = !unit.isJava && !ctx.reporter.hasErrors)
+  }
+
+
+  override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = {
+    val unitContexts =
+      for unit <- units yield
+        report.inform(s"parsing ${unit.source}")
+        ctx.fresh.setCompilationUnit(unit).withRootImports
+
+    unitContexts.foreach(parse(using _))
+    record("parsedTrees", ast.Trees.ntrees)
+
+    unitContexts.map(_.compilationUnit)
+  }
+
+  def run(using Context): Unit = unsupported("run")
+}
+
+object Parser{
+  val name: String = "parser"
+  val description: String = "scan and parse sources"
+}
diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala
index f5db937f8684..366ed459a6a0 100644
--- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala
+++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala
@@ -56,7 +56,8 @@ object Parsers {
   object StageKind {
     val None = 0
     val Quoted = 1
-    val Spliced = 2
+    val Spliced = 1 << 1
+    val QuotedPattern = 1 << 2
   }
 
   extension (buf: ListBuffer[Tree])
@@ -72,8 +73,9 @@ object Parsers {
     if source.isSelfContained then new ScriptParser(source)
     else new Parser(source)
 
-  private val InCase: Region => Region = Scanners.InCase.apply
-  private val InCond: Region => Region = Scanners.InBraces.apply
+  private val InCase: Region => Region = Scanners.InCase(_)
+  private val InCond: Region => Region = Scanners.InParens(LPAREN, _)
+  private val InFor : Region => Region = Scanners.InBraces(_)
 
   abstract class ParserCommon(val source: SourceFile)(using Context) {
 
@@ -132,11 +134,11 @@ object Parsers {
      *  and update lastErrorOffset.
      */
     def syntaxError(msg: Message, offset: Int = in.offset): Unit =
-      if (offset > lastErrorOffset) {
-        val length = if (offset == in.offset && in.name != null) in.name.show.length else 0
+      if offset > lastErrorOffset then
+        val length = if offset == in.offset && in.name != null then in.name.show.length else 0
         syntaxError(msg, Span(offset, offset + length))
         lastErrorOffset = in.offset
-      }
+      end if
 
     /** Unconditionally issue an error at given span, without
      *  updating lastErrorOffset.
@@ -167,6 +169,7 @@ object Parsers {
   class Parser(source: SourceFile)(using Context) extends ParserCommon(source) {
 
     val in: Scanner = new Scanner(source)
+    //in.debugTokenStream = true    // uncomment to see the token stream of the standard scanner, but not syntax highlighting
 
     /** This is the general parse entry point.
      *  Overridden by ScriptParser
@@ -233,15 +236,6 @@ object Parsers {
 
 /* ------------- ERROR HANDLING ------------------------------------------- */
 
-    /** The offset of the last time when a statement on a new line was definitely
-     *  encountered in the current scope or an outer scope.
-     */
-    private var lastStatOffset = -1
-
-    def setLastStatOffset(): Unit =
-      if (mustStartStat && in.isAfterLineEnd)
-        lastStatOffset = in.offset
-
     /** Is offset1 less or equally indented than offset2?
      *  This is the case if the characters between the preceding end-of-line and offset1
      *  are a prefix of the characters between the preceding end-of-line and offset2.
@@ -255,13 +249,14 @@ object Parsers {
 
     /** Skip on error to next safe point.
      */
-    protected def skip(): Unit =
+    protected def skip(stopAtComma: Boolean): Unit =
       val lastRegion = in.currentRegion
       def atStop =
         in.token == EOF
-        || skipStopTokens.contains(in.token) && (in.currentRegion eq lastRegion)
+        || ((stopAtComma && in.token == COMMA) || skipStopTokens.contains(in.token)) && (in.currentRegion eq lastRegion)
       while !atStop do
         in.nextToken()
+      lastErrorOffset = in.offset
 
     def warning(msg: Message, sourcePos: SourcePosition): Unit =
       report.warning(msg, sourcePos)
@@ -281,11 +276,9 @@ object Parsers {
      */
     def syntaxErrorOrIncomplete(msg: Message, offset: Int = in.offset): Unit =
       if (in.token == EOF) incompleteInputError(msg)
-      else {
+      else
         syntaxError(msg, offset)
-        skip()
-        lastErrorOffset = in.offset
-      }
+        skip(stopAtComma = true)
 
     /** Consume one token of the specified type, or
       * signal an error if it is not there.
@@ -320,22 +313,45 @@ object Parsers {
     def acceptStatSep(): Unit =
       if in.isNewLine then in.nextToken() else accept(SEMI)
 
-    def acceptStatSepUnlessAtEnd[T <: Tree](stats: ListBuffer[T], altEnd: Token = EOF): Unit =
-      def skipEmptyStats(): Unit =
-        while (in.token == SEMI || in.token == NEWLINE || in.token == NEWLINES) do in.nextToken()
-
-      in.observeOutdented()
-      in.token match
-        case SEMI | NEWLINE | NEWLINES =>
-          skipEmptyStats()
+    /** Parse statement separators and end markers. Ensure that there is at least
+     *  one statement separator unless the next token terminates a statement´sequence.
+     *  @param   stats      the statements parsed to far
+     *  @param   noPrevStat true if there was no immediately preceding statement parsed
+     *  @param   what       a string indicating what kind of statement is parsed
+     *  @param   altEnd     a token that is also considered as a terminator of the statement
+     *                      sequence (the default `EOF` already assumes to terminate a statement
+     *                      sequence).
+     *  @return  true if the statement sequence continues, false if it terminates.
+     */
+    def statSepOrEnd[T <: Tree](stats: ListBuffer[T], noPrevStat: Boolean = false, what: String = "statement", altEnd: Token = EOF): Boolean =
+      def recur(sepSeen: Boolean, endSeen: Boolean): Boolean =
+        if isStatSep then
+          in.nextToken()
+          recur(true, endSeen)
+        else if in.token == END then
+          if endSeen then syntaxError("duplicate end marker")
           checkEndMarker(stats)
-          skipEmptyStats()
-        case `altEnd` =>
-        case _ =>
-          if !isStatSeqEnd then
-            syntaxError(i"end of statement expected but ${showToken(in.token)} found")
+          recur(sepSeen, endSeen = true)
+        else if isStatSeqEnd || in.token == altEnd then
+          false
+        else if sepSeen || endSeen then
+          true
+        else
+          val found = in.token
+          val statFollows = mustStartStatTokens.contains(found)
+          syntaxError(
+            if noPrevStat then IllegalStartOfStatement(what, isModifier, statFollows)
+            else i"end of $what expected but ${showToken(found)} found")
+          if mustStartStatTokens.contains(found) then
+            false // it's a statement that might be legal in an outer context
+          else
             in.nextToken() // needed to ensure progress; otherwise we might cycle forever
-            accept(SEMI)
+            skip(stopAtComma=false)
+            true
+
+      in.observeOutdented()
+      recur(false, false)
+    end statSepOrEnd
 
     def rewriteNotice(version: String = "3.0", additionalOption: String = "") = {
       val optionStr = if (additionalOption.isEmpty) "" else " " ++ additionalOption
@@ -427,6 +443,9 @@ object Parsers {
     def convertToParam(tree: Tree, mods: Modifiers, expected: String = "formal parameter"): ValDef = tree match {
       case id @ Ident(name) =>
         makeParameter(name.asTermName, TypeTree(), mods, isBackquoted = isBackquoted(id)).withSpan(tree.span)
+      case Typed(_, tpt: TypeBoundsTree) =>
+        syntaxError(s"not a legal $expected", tree.span)
+        makeParameter(nme.ERROR, tree, mods)
       case Typed(id @ Ident(name), tpt) =>
         makeParameter(name.asTermName, tpt, mods, isBackquoted = isBackquoted(id)).withSpan(tree.span)
       case Typed(Splice(Ident(name)), tpt) =>
@@ -522,22 +541,17 @@ object Parsers {
     def inBrackets[T](body: => T): T = enclosed(LBRACKET, body)
 
     def inBracesOrIndented[T](body: => T, rewriteWithColon: Boolean = false): T =
-      if (in.token == INDENT) {
-        val rewriteToBraces =
-          in.rewriteNoIndent &&
-          !testChars(in.lastOffset - 3, " =>") // braces are always optional after `=>` so none should be inserted
-        if (rewriteToBraces) indentedToBraces(body)
+      if in.token == INDENT then
+        val rewriteToBraces = in.rewriteNoIndent
+          && !testChars(in.lastOffset - 3, " =>") // braces are always optional after `=>` so none should be inserted
+        if rewriteToBraces then indentedToBraces(body)
         else enclosed(INDENT, body)
-      }
       else
-        if (in.rewriteToIndent) bracesToIndented(body, rewriteWithColon)
+        if in.rewriteToIndent then bracesToIndented(body, rewriteWithColon)
         else inBraces(body)
 
-    def inDefScopeBraces[T](body: => T, rewriteWithColon: Boolean = false): T = {
-      val saved = lastStatOffset
-      try inBracesOrIndented(body, rewriteWithColon)
-      finally lastStatOffset = saved
-    }
+    def inDefScopeBraces[T](body: => T, rewriteWithColon: Boolean = false): T =
+      inBracesOrIndented(body, rewriteWithColon)
 
     /** part { `separator` part }
      */
@@ -623,26 +637,18 @@ object Parsers {
       else idx
 
     /** Parse indentation region `body` and rewrite it to be in braces instead */
-    def indentedToBraces[T](body: => T): T = {
-      val enclRegion = in.currentRegion.enclosing
-      def indentWidth = enclRegion.indentWidth
+    def indentedToBraces[T](body: => T): T =
+      val enclRegion   = in.currentRegion.enclosing          // capture on entry
+      def indentWidth  = enclRegion.indentWidth
       val followsColon = testChar(in.lastOffset - 1, ':')
-      val startOpening =
-        if (followsColon)
-          if (testChar(in.lastOffset - 2, ' ')) in.lastOffset - 2
-          else in.lastOffset - 1
-        else in.lastOffset
-      val endOpening = in.lastOffset
-
-      val t = enclosed(INDENT, body)
 
       /** Is `expr` a tree that lacks a final `else`? Put such trees in `{...}` to make
        *  sure we don't accidentally merge them with a following `else`.
        */
       def isPartialIf(expr: Tree): Boolean = expr match {
         case If(_, _, EmptyTree) => true
-        case If(_, _, e) => isPartialIf(e)
-        case _ => false
+        case If(_, _, e)         => isPartialIf(e)
+        case _                   => false
       }
 
       /** Is `expr` a (possibly curried) function that has a multi-statement block
@@ -650,41 +656,43 @@ object Parsers {
        *  a `=>` in braces.
        */
       def isBlockFunction[T](expr: T): Boolean = expr match {
-        case Function(_, body) => isBlockFunction(body)
+        case Function(_, body)  => isBlockFunction(body)
         case Block(stats, expr) => stats.nonEmpty || isBlockFunction(expr)
-        case _ => false
+        case _                  => false
       }
 
       /** Start of first line after in.lastOffset that does not have a comment
        *  at indent width greater than the indent width of the closing brace.
        */
       def closingOffset(lineStart: Offset): Offset =
-        if (in.lineOffset >= 0 && lineStart >= in.lineOffset) in.lineOffset
-        else {
-          val candidate = source.nextLine(lineStart)
+        if in.lineOffset >= 0 && lineStart >= in.lineOffset then in.lineOffset
+        else
           val commentStart = skipBlanks(lineStart)
-          if (testChar(commentStart, '/') && indentWidth < in.indentWidth(commentStart))
-            closingOffset(source.nextLine(lineStart))
-          else
-            lineStart
-        }
+          if testChar(commentStart, '/') && indentWidth < in.indentWidth(commentStart)
+          then closingOffset(source.nextLine(lineStart))
+          else lineStart
 
       def needsBraces(t: Any): Boolean = t match {
         case Match(EmptyTree, _) => true
-        case Block(stats, expr) =>
-          stats.nonEmpty || needsBraces(expr)
-        case expr: Tree =>
-          followsColon ||
-          isPartialIf(expr) && in.token == ELSE ||
-          isBlockFunction(expr)
-        case _ => true
-      }
-      if (needsBraces(t)) {
+        case Block(stats, expr)  => stats.nonEmpty || needsBraces(expr)
+        case expr: Tree          => followsColon
+                                 || isPartialIf(expr) && in.token == ELSE
+                                 || isBlockFunction(expr)
+        case _                   => true
+      }
+      // begin indentedToBraces
+      val startOpening =
+        if followsColon then
+          if testChar(in.lastOffset - 2, ' ') then in.lastOffset - 2
+          else in.lastOffset - 1
+        else in.lastOffset
+      val endOpening = in.lastOffset
+      val t = enclosed(INDENT, body)
+      if needsBraces(t) then
         patch(source, Span(startOpening, endOpening), " {")
         patch(source, Span(closingOffset(source.nextLine(in.lastOffset))), indentWidth.toPrefix ++ "}\n")
-      }
       t
-    }
+    end indentedToBraces
 
     /** The region to eliminate when replacing an opening `(` or `{` that ends a line.
      *  The `(` or `{` is at in.offset.
@@ -873,7 +881,8 @@ object Parsers {
       val next = in.lookahead.token
       next == LBRACKET || next == LPAREN
 
-    /** Is current ident a `*`, and is it followed by a `)` or `, )`? */
+    /** Is current ident a `*`, and is it followed by a `)`, `, )`, `,EOF`? The latter two are not
+        syntactically valid, but we need to include them here for error recovery. */
     def followingIsVararg(): Boolean =
       in.isIdent(nme.raw.STAR) && {
         val lookahead = in.LookaheadScanner()
@@ -882,7 +891,7 @@ object Parsers {
         || lookahead.token == COMMA
            && {
              lookahead.nextToken()
-             lookahead.token == RPAREN
+             lookahead.token == RPAREN || lookahead.token == EOF
            }
       }
 
@@ -966,6 +975,10 @@ object Parsers {
     def ident(): TermName =
       if (isIdent) {
         val name = in.name
+        if name == nme.CONSTRUCTOR || name == nme.STATIC_CONSTRUCTOR then
+          report.error(
+            i"""Illegal backquoted identifier: `` and `` are forbidden""",
+            in.sourcePos())
         in.nextToken()
         name
       }
@@ -1150,10 +1163,11 @@ object Parsers {
           }
           else
             if !in.featureEnabled(Feature.symbolLiterals) then
+              val name = in.name // capture name (not `in`) in the warning message closure
               report.errorOrMigrationWarning(
-                em"""symbol literal '${in.name} is no longer supported,
-                    |use a string literal "${in.name}" or an application Symbol("${in.name}") instead,
-                    |or enclose in braces '{${in.name}} if you want a quoted expression.
+                em"""symbol literal '$name is no longer supported,
+                    |use a string literal "$name" or an application Symbol("$name") instead,
+                    |or enclose in braces '{$name} if you want a quoted expression.
                     |For now, you can also `import language.deprecated.symbolLiterals` to accept
                     |the idiom, but this possibility might no longer be available in the future.""",
                 in.sourcePos())
@@ -1254,7 +1268,7 @@ object Parsers {
     def possibleTemplateStart(isNew: Boolean = false): Unit =
       in.observeColonEOL()
       if in.token == COLONEOL then
-        if in.lookahead.isIdent(nme.end) then in.token = NEWLINE
+        if in.lookahead.token == END then in.token = NEWLINE
         else
           in.nextToken()
           if in.token != INDENT && in.token != LBRACE then
@@ -1264,7 +1278,13 @@ object Parsers {
 
     def checkEndMarker[T <: Tree](stats: ListBuffer[T]): Unit =
 
-      def matches(stat: Tree): Boolean = stat match
+      def updateSpanOfLast(last: T): Unit =
+        last match
+          case last: WithEndMarker[t] => last.withEndMarker()
+          case _ =>
+        last.span = last.span.withEnd(in.lastCharOffset)
+
+      def matches(stat: T): Boolean = stat match
         case stat: MemberDef if !stat.name.isEmpty =>
           if stat.name == nme.CONSTRUCTOR then in.token == THIS
           else in.isIdent && in.name == stat.name.toTermName
@@ -1272,8 +1292,6 @@ object Parsers {
           in.token == IDENTIFIER && in.name == nme.extension
         case PackageDef(pid: RefTree, _) =>
           in.isIdent && in.name == pid.name
-        case PatDef(_, IdPattern(id, _) :: Nil, _, _) =>
-         in.isIdent && in.name == id.name
         case stat: MemberDef if stat.mods.is(Given) => in.token == GIVEN
         case _: PatDef => in.token == VAL
         case _: If => in.token == IF
@@ -1284,25 +1302,23 @@ object Parsers {
         case _: (ForYield | ForDo) => in.token == FOR
         case _ => false
 
-      if isIdent(nme.end) then
-        val start = in.offset
-        val isEndMarker =
-          val endLine = source.offsetToLine(start)
-          val lookahead = in.LookaheadScanner()
-          lookahead.nextToken()
-          source.offsetToLine(lookahead.offset) == endLine
-          && endMarkerTokens.contains(in.token)
-          && {
-            lookahead.nextToken()
-            lookahead.token == EOF
-            || source.offsetToLine(lookahead.offset) > endLine
-          }
-        if isEndMarker then
-          in.nextToken()
-          if stats.isEmpty || !matches(stats.last) then
-            syntaxError("misaligned end marker", Span(start, in.lastCharOffset))
-          in.token = IDENTIFIER // Leaving it as the original token can confuse newline insertion
-          in.nextToken()
+      def endName = if in.token == IDENTIFIER then in.name.toString else tokenString(in.token)
+
+      def matchesAndSetEnd(last: T): Boolean =
+        val didMatch = matches(last)
+        if didMatch then
+          updateSpanOfLast(last)
+        didMatch
+
+      if in.token == END then
+        val start = in.skipToken()
+        if stats.isEmpty || !matchesAndSetEnd(stats.last) then
+          syntaxError("misaligned end marker", Span(start, in.lastCharOffset))
+        else if overlapsPatch(source, Span(start, start)) then
+          patch(source, Span(start, start), "")
+          patch(source, Span(start, in.lastCharOffset), s"} // end $endName")
+        in.token = IDENTIFIER // Leaving it as the original token can confuse newline insertion
+        in.nextToken()
     end checkEndMarker
 
 /* ------------- TYPES ------------------------------------------------------ */
@@ -1314,6 +1330,7 @@ object Parsers {
 
     private def isFunction(tree: Tree): Boolean = tree match {
       case Parens(tree1) => isFunction(tree1)
+      case Block(Nil, tree1) => isFunction(tree1)
       case _: Function => true
       case _ => false
     }
@@ -1357,7 +1374,7 @@ object Parsers {
             if imods.is(Given) && params.isEmpty then
               syntaxError("context function types require at least one parameter", paramSpan)
             new FunctionWithMods(params, t, imods)
-          else if ctx.settings.YkindProjector.value then
+          else if !ctx.settings.YkindProjector.isDefault then
             val (newParams :+ newT, tparams) = replaceKindProjectorPlaceholders(params :+ t)
 
             lambdaAbstract(tparams, Function(newParams, newT))
@@ -1448,20 +1465,31 @@ object Parsers {
       }
     }
 
-    private def makeKindProjectorTypeDef(name: TypeName): TypeDef =
-      TypeDef(name, WildcardTypeBoundsTree()).withFlags(Param)
+    private def makeKindProjectorTypeDef(name: TypeName): TypeDef = {
+      val isVarianceAnnotated = name.startsWith("+") || name.startsWith("-")
+      // We remove the variance marker from the name without passing along the specified variance at all
+      // The real variance will be inferred at a later stage but may contradict the variance specified,
+      // This is ok, because `-Ykind-projector` is for cross-compiling existing Scala 2 code, not for writing new code,
+      // we may assume that variance annotations have already been checked by the Scala 2 compiler.
+      val unannotatedName = if (isVarianceAnnotated) name.mapLast(_.drop(1)) else name
+      TypeDef(unannotatedName, WildcardTypeBoundsTree()).withFlags(Param)
+    }
 
     /** Replaces kind-projector's `*` in a list of types arguments with synthetic names,
      *  returning the new argument list and the synthetic type definitions.
      */
     private def replaceKindProjectorPlaceholders(params: List[Tree]): (List[Tree], List[TypeDef]) = {
       val tparams = new ListBuffer[TypeDef]
+      def addParam() = {
+        val name = WildcardParamName.fresh().toTypeName
+        tparams += makeKindProjectorTypeDef(name)
+        Ident(name)
+      }
 
+      val uscores = ctx.settings.YkindProjector.value == "underscores"
       val newParams = params.mapConserve {
-        case param @ Ident(tpnme.raw.STAR) =>
-          val name = WildcardParamName.fresh().toTypeName
-          tparams += makeKindProjectorTypeDef(name)
-          Ident(name)
+        case param @ Ident(tpnme.raw.STAR | tpnme.raw.MINUS_STAR | tpnme.raw.PLUS_STAR) => addParam()
+        case param @ Ident(tpnme.USCOREkw | tpnme.raw.MINUS_USCORE | tpnme.raw.PLUS_USCORE) if uscores => addParam()
         case other => other
       }
 
@@ -1538,20 +1566,21 @@ object Parsers {
       else t
 
     /** The block in a quote or splice */
-    def stagedBlock() =
-      val saved = lastStatOffset
-      try inBraces(block(simplify = true))
-      finally lastStatOffset = saved
+    def stagedBlock() = inBraces(block(simplify = true))
 
-    /** SimpleEpxr  ::=  spliceId | ‘$’ ‘{’ Block ‘}’)
-     *  SimpleType  ::=  spliceId | ‘$’ ‘{’ Block ‘}’)
+    /** SimpleExpr  ::=  spliceId | ‘$’ ‘{’ Block ‘}’)  unless inside quoted pattern
+     *  SimpleType  ::=  spliceId | ‘$’ ‘{’ Block ‘}’)  unless inside quoted pattern
+     *
+     *  SimpleExpr  ::=  spliceId | ‘$’ ‘{’ Pattern ‘}’)  when inside quoted pattern
+     *  SimpleType  ::=  spliceId | ‘$’ ‘{’ Pattern ‘}’)  when inside quoted pattern
      */
     def splice(isType: Boolean): Tree =
       atSpan(in.offset) {
         val expr =
           if (in.name.length == 1) {
             in.nextToken()
-            withinStaged(StageKind.Spliced)(stagedBlock())
+            val inPattern = (staged & StageKind.QuotedPattern) != 0
+            withinStaged(StageKind.Spliced)(if (inPattern) inBraces(pattern()) else stagedBlock())
           }
           else atSpan(in.offset + 1) {
             val id = Ident(in.name.drop(1))
@@ -1571,16 +1600,25 @@ object Parsers {
       if isSimpleLiteral then
         SingletonTypeTree(simpleLiteral())
       else if in.token == USCORE then
-        if sourceVersion.isAtLeast(future) then
-          deprecationWarning(em"`_` is deprecated for wildcard arguments of types: use `?` instead")
-          patch(source, Span(in.offset, in.offset + 1), "?")
+        if ctx.settings.YkindProjector.value == "underscores" then
+          val start = in.skipToken()
+          Ident(tpnme.USCOREkw).withSpan(Span(start, in.lastOffset, start))
+        else
+          if sourceVersion.isAtLeast(future) then
+            deprecationWarning(em"`_` is deprecated for wildcard arguments of types: use `?` instead")
+            patch(source, Span(in.offset, in.offset + 1), "?")
+          val start = in.skipToken()
+          typeBounds().withSpan(Span(start, in.lastOffset, start))
+      // Allow symbols -_ and +_ through for compatibility with code written using kind-projector in Scala 3 underscore mode.
+      // While these signify variant type parameters in Scala 2 + kind-projector, we ignore their variance markers since variance is inferred.
+      else if (isIdent(nme.MINUS) || isIdent(nme.PLUS)) && in.lookahead.token == USCORE && ctx.settings.YkindProjector.value == "underscores" then
+        val identName = in.name.toTypeName ++ nme.USCOREkw
         val start = in.skipToken()
-        typeBounds().withSpan(Span(start, in.lastOffset, start))
+        in.nextToken()
+        Ident(identName).withSpan(Span(start, in.lastOffset, start))
       else if isIdent(nme.?) then
         val start = in.skipToken()
         typeBounds().withSpan(Span(start, in.lastOffset, start))
-      else if isIdent(nme.*) && ctx.settings.YkindProjector.value then
-        typeIdent()
       else
         def singletonArgs(t: Tree): Tree =
           if in.token == LPAREN && in.featureEnabled(Feature.dependent)
@@ -1625,11 +1663,11 @@ object Parsers {
         val applied = rejectWildcardType(t)
         val args = typeArgs(namedOK = false, wildOK = true)
 
-        if (ctx.settings.YkindProjector.value) {
+        if (!ctx.settings.YkindProjector.isDefault) {
           def fail(): Tree = {
             syntaxError(
               "λ requires a single argument of the form X => ... or (X, Y) => ...",
-              Span(t.span.start, in.lastOffset)
+              Span(startOffset(t), in.lastOffset)
             )
             AppliedTypeTree(applied, args)
           }
@@ -1657,7 +1695,7 @@ object Parsers {
         }
       })
       case _ =>
-        if (ctx.settings.YkindProjector.value) {
+        if (!ctx.settings.YkindProjector.isDefault) {
           t match {
             case Tuple(params) =>
               val (newParams, tparams) = replaceKindProjectorPlaceholders(params)
@@ -1792,7 +1830,7 @@ object Parsers {
 
     def typeDependingOn(location: Location): Tree =
       if location.inParens then typ()
-      else if location.inPattern then refinedType()
+      else if location.inPattern then rejectWildcardType(refinedType())
       else infixType()
 
 /* ----------- EXPRESSIONS ------------------------------------------------ */
@@ -1801,14 +1839,14 @@ object Parsers {
      *  the initially parsed (...) region?
      */
     def toBeContinued(altToken: Token): Boolean =
-      if in.isNewLine || migrateTo3 then
-        false // a newline token means the expression is finished
-      else if !in.canStartStatTokens.contains(in.token)
-              || in.isLeadingInfixOperator(inConditional = true)
-      then
-        true
-      else
-        followedByToken(altToken) // scan ahead to see whether we find a `then` or `do`
+      inline def canContinue =
+        !in.canStartStatTokens.contains(in.token)  // not statement, so take as continued expr
+      || followedByToken(altToken)                 // scan ahead to see whether we find a `then` or `do`
+
+      !in.isNewLine       // a newline token means the expression is finished
+      && !migrateTo3      // old syntax
+      && canContinue
+    end toBeContinued
 
     def condExpr(altToken: Token): Tree =
       val t: Tree =
@@ -1838,6 +1876,7 @@ object Parsers {
       t
 
     /** Expr              ::=  [`implicit'] FunParams (‘=>’ | ‘?=>’) Expr
+     *                      |  HkTypeParamClause ‘=>’ Expr
      *                      |  Expr1
      *  FunParams         ::=  Bindings
      *                      |  id
@@ -1845,6 +1884,7 @@ object Parsers {
      *  ExprInParens      ::=  PostfixExpr `:' Type
      *                      |  Expr
      *  BlockResult       ::=  [‘implicit’] FunParams (‘=>’ | ‘?=>’) Block
+     *                      |  HkTypeParamClause ‘=>’ Block
      *                      |  Expr1
      *  Expr1             ::=  [‘inline’] `if' `(' Expr `)' {nl} Expr [[semi] else Expr]
      *                      |  [‘inline’] `if' Expr `then' Expr [[semi] else Expr]
@@ -1855,8 +1895,8 @@ object Parsers {
      *                      |  `throw' Expr
      *                      |  `return' [Expr]
      *                      |  ForExpr
-     *                      |  HkTypeParamClause ‘=>’ Expr
      *                      |  [SimpleExpr `.'] id `=' Expr
+     *                      |  PrefixOperator SimpleExpr `=' Expr
      *                      |  SimpleExpr1 ArgumentExprs `=' Expr
      *                      |  PostfixExpr [Ascription]
      *                      |  ‘inline’ InfixExpr MatchClause
@@ -1876,28 +1916,41 @@ object Parsers {
     def expr(location: Location): Tree = {
       val start = in.offset
       def isSpecialClosureStart = in.lookahead.isIdent(nme.erased) && in.erasedEnabled
-      if in.token == IMPLICIT then
-        closure(start, location, modifiers(BitSet(IMPLICIT)))
-      else if in.token == LPAREN && isSpecialClosureStart then
-        closure(start, location, Modifiers())
-      else {
-        val saved = placeholderParams
-        placeholderParams = Nil
-
-        def wrapPlaceholders(t: Tree) = try
-          if (placeholderParams.isEmpty) t
-          else new WildcardFunction(placeholderParams.reverse, t)
-        finally placeholderParams = saved
-
-        val t = expr1(location)
-        if in.isArrow then
-          placeholderParams = Nil // don't interpret `_' to the left of `=>` as placeholder
-          wrapPlaceholders(closureRest(start, location, convertToParams(t)))
-        else if isWildcard(t) then
-          placeholderParams = placeholderParams ::: saved
-          t
-        else wrapPlaceholders(t)
-      }
+      in.token match
+        case IMPLICIT =>
+          closure(start, location, modifiers(BitSet(IMPLICIT)))
+        case LPAREN if isSpecialClosureStart =>
+          closure(start, location, Modifiers())
+        case LBRACKET =>
+          val start = in.offset
+          val tparams = typeParamClause(ParamOwner.TypeParam)
+          val arrowOffset = accept(ARROW)
+          val body = expr(location)
+          atSpan(start, arrowOffset) {
+            if (isFunction(body))
+              PolyFunction(tparams, body)
+            else {
+              syntaxError("Implementation restriction: polymorphic function literals must have a value parameter", arrowOffset)
+              errorTermTree
+            }
+          }
+        case _ =>
+          val saved = placeholderParams
+          placeholderParams = Nil
+
+          def wrapPlaceholders(t: Tree) = try
+            if (placeholderParams.isEmpty) t
+            else new WildcardFunction(placeholderParams.reverse, t)
+          finally placeholderParams = saved
+
+          val t = expr1(location)
+          if in.isArrow then
+            placeholderParams = Nil // don't interpret `_' to the left of `=>` as placeholder
+            wrapPlaceholders(closureRest(start, location, convertToParams(t)))
+          else if isWildcard(t) then
+            placeholderParams = placeholderParams ::: saved
+            t
+          else wrapPlaceholders(t)
     }
 
     def expr1(location: Location = Location.ElseWhere): Tree = in.token match
@@ -1981,19 +2034,6 @@ object Parsers {
         }
       case FOR =>
         forExpr()
-      case LBRACKET =>
-        val start = in.offset
-        val tparams = typeParamClause(ParamOwner.TypeParam)
-        val arrowOffset = accept(ARROW)
-        val body = expr()
-        atSpan(start, arrowOffset) {
-          if (isFunction(body))
-            PolyFunction(tparams, body)
-          else {
-            syntaxError("Implementation restriction: polymorphic function literals must have a value parameter", arrowOffset)
-            errorTermTree
-          }
-        }
       case _ =>
         if isIdent(nme.inline)
            && !in.inModifierPosition()
@@ -2016,7 +2056,7 @@ object Parsers {
     def expr1Rest(t: Tree, location: Location): Tree = in.token match
       case EQUALS =>
         t match
-          case Ident(_) | Select(_, _) | Apply(_, _) =>
+          case Ident(_) | Select(_, _) | Apply(_, _) | PrefixOp(_, _) =>
             atSpan(startOffset(t), in.skipToken()) {
               val loc = if location.inArgs then location else Location.ElseWhere
               Assign(t, subPart(() => expr(loc)))
@@ -2080,14 +2120,14 @@ object Parsers {
     /**    MatchClause ::= `match' `{' CaseClauses `}'
      */
     def matchClause(t: Tree): Match =
-      atSpan(t.span.start, in.skipToken()) {
+      atSpan(startOffset(t), in.skipToken()) {
         Match(t, inBracesOrIndented(caseClauses(() => caseClause())))
       }
 
     /**    `match' `{' TypeCaseClauses `}'
      */
     def matchType(t: Tree): MatchTypeTree =
-      atSpan(t.span.start, accept(MATCH)) {
+      atSpan(startOffset(t), accept(MATCH)) {
         MatchTypeTree(EmptyTree, t, inBracesOrIndented(caseClauses(typeCaseClause)))
       }
 
@@ -2177,17 +2217,19 @@ object Parsers {
         isOperator = !(location.inArgs && followingIsVararg()),
         maybePostfix = true)
 
-    /** PrefixExpr   ::= [`-' | `+' | `~' | `!'] SimpleExpr
-    */
+    /** PrefixExpr       ::= [PrefixOperator'] SimpleExpr
+     *  PrefixOperator   ::=  ‘-’ | ‘+’ | ‘~’ | ‘!’
+     */
     val prefixExpr: Location => Tree = location =>
-      if (isIdent && nme.raw.isUnary(in.name)) {
+      if isIdent && nme.raw.isUnary(in.name)
+         && in.canStartExprTokens.contains(in.lookahead.token)
+      then
         val start = in.offset
         val op = termIdent()
         if (op.name == nme.raw.MINUS && isNumericLit)
           simpleExprRest(literal(start), location, canApply = true)
         else
           atSpan(start) { PrefixOp(op, simpleExpr(location)) }
-      }
       else simpleExpr(location)
 
     /** SimpleExpr    ::= ‘new’ ConstrApp {`with` ConstrApp} [TemplateBody]
@@ -2235,7 +2277,7 @@ object Parsers {
           blockExpr()
         case QUOTE =>
           atSpan(in.skipToken()) {
-            withinStaged(StageKind.Quoted) {
+            withinStaged(StageKind.Quoted | (if (location.inPattern) StageKind.QuotedPattern else 0)) {
               Quote {
                 if (in.token == LBRACKET) inBrackets(typ())
                 else stagedBlock()
@@ -2445,7 +2487,7 @@ object Parsers {
       else Nil
 
     /** Enumerator  ::=  Generator
-     *                |  Guard
+     *                |  Guard {Guard}
      *                |  Pattern1 `=' Expr
      */
     def enumerator(): Tree =
@@ -2473,9 +2515,9 @@ object Parsers {
         GenFrom(pat, subExpr(), checkMode)
       }
 
-    /** ForExpr  ::= `for' (`(' Enumerators `)' | `{' Enumerators `}')
-     *                {nl} [`yield'] Expr
-     *            |  `for' Enumerators (`do' Expr | `yield' Expr)
+    /** ForExpr  ::=  ‘for’ ‘(’ Enumerators ‘)’ {nl} [‘do‘ | ‘yield’] Expr
+     *             |  ‘for’ ‘{’ Enumerators ‘}’ {nl} [‘do‘ | ‘yield’] Expr
+     *             |  ‘for’     Enumerators          (‘do‘ | ‘yield’) Expr
      */
     def forExpr(): Tree =
       atSpan(in.skipToken()) {
@@ -2522,7 +2564,7 @@ object Parsers {
             if (in.token == INDENT)
               inBracesOrIndented(enumerators())
             else {
-              val ts = inSepRegion(InCond)(enumerators())
+              val ts = inSepRegion(InFor)(enumerators())
               if (rewriteToOldSyntax(Span(start)) && ts.nonEmpty)
                 if (ts.head.sourcePos.startLine != ts.last.sourcePos.startLine) {
                   patch(source, Span(forEnd), " {")
@@ -2574,15 +2616,22 @@ object Parsers {
       })
     }
 
-    /** TypeCaseClause     ::= ‘case’ InfixType ‘=>’ Type [nl]
+    /** TypeCaseClause     ::= ‘case’ (InfixType | ‘_’) ‘=>’ Type [semi]
      */
     def typeCaseClause(): CaseDef = atSpan(in.offset) {
       val pat = inSepRegion(InCase) {
         accept(CASE)
-        infixType()
+        in.token match {
+          case USCORE if in.lookahead.isArrow =>
+            val start = in.skipToken()
+            Ident(tpnme.WILDCARD).withSpan(Span(start, in.lastOffset, start))
+          case _ =>
+            rejectWildcardType(infixType())
+        }
       }
       CaseDef(pat, EmptyTree, atSpan(accept(ARROW)) {
-        val t = typ()
+        val t = rejectWildcardType(typ())
+        if in.token == SEMI then in.nextToken()
         newLinesOptWhenFollowedBy(CASE)
         t
       })
@@ -2677,7 +2726,7 @@ object Parsers {
       case LPAREN =>
         atSpan(in.offset) { makeTupleOrParens(inParens(patternsOpt())) }
       case QUOTE =>
-        simpleExpr(Location.ElseWhere)
+        simpleExpr(Location.InPattern)
       case XMLSTART =>
         xmlLiteralPattern()
       case GIVEN =>
@@ -3080,10 +3129,6 @@ object Parsers {
       languageImport(tree) match
         case Some(prefix) =>
           in.languageImportContext = in.languageImportContext.importContext(imp, NoSymbol)
-          if prefix == nme.experimental
-             && selectors.exists(sel => Feature.experimental(sel.name) != Feature.scala2macros)
-          then
-            Feature.checkExperimentalFeature("s", imp.srcPos)
           for
             case ImportSelector(id @ Ident(imported), EmptyTree, _) <- selectors
             if allSourceVersionNames.contains(imported)
@@ -3439,7 +3484,11 @@ object Parsers {
           givenDef(start, mods, atSpan(in.skipToken()) { Mod.Given() })
         case _ =>
           syntaxErrorOrIncomplete(ExpectedStartOfTopLevelDefinition())
-          EmptyTree
+          mods.annotations match {
+            case head :: Nil => head
+            case Nil => EmptyTree
+            case all => Block(all, errorTermTree)
+          }
       }
 
     /** ClassDef ::= id ClassConstr TemplateOpt
@@ -3577,7 +3626,7 @@ object Parsers {
             ValDef(name, parents.head, subExpr())
           else
             DefDef(name, joinParams(tparams, vparamss), parents.head, subExpr())
-        else if in.token != WITH && parentsIsType then
+        else if (isStatSep || isStatSeqEnd) && parentsIsType then
           if name.isEmpty then
             syntaxError(em"anonymous given cannot be abstract")
           DefDef(name, joinParams(tparams, vparamss), parents.head, EmptyTree)
@@ -3585,14 +3634,17 @@ object Parsers {
           val tparams1 = tparams.map(tparam => tparam.withMods(tparam.mods | PrivateLocal))
           val vparamss1 = vparamss.map(_.map(vparam =>
             vparam.withMods(vparam.mods &~ Param | ParamAccessor | Protected)))
-          val templ = withTemplate(makeConstructor(tparams1, vparamss1), parents)
-          if noParams then ModuleDef(name, templ)
+          val constr = makeConstructor(tparams1, vparamss1)
+          val templ =
+            if isStatSep || isStatSeqEnd then Template(constr, parents, Nil, EmptyValDef, Nil)
+            else withTemplate(constr, parents)
+          if noParams && !mods.is(Inline) then ModuleDef(name, templ)
           else TypeDef(name.toTypeName, templ)
       end gdef
       finalizeDef(gdef, mods1, start)
     }
 
-    /** Extension  ::=  ‘extension’ [DefTypeParamClause] ‘(’ DefParam ‘)’
+    /** Extension  ::=  ‘extension’ [DefTypeParamClause] {UsingParamClause} ‘(’ DefParam ‘)’
      *                  {UsingParamClause} ExtMethods
      */
     def extension(): ExtMethods =
@@ -3616,7 +3668,7 @@ object Parsers {
           in.observeIndented()
           newLineOptWhenFollowedBy(LBRACE)
           if in.isNestedStart then inDefScopeBraces(extMethods(nparams))
-          else { syntaxError("Extension without extension methods"); Nil }
+          else { syntaxErrorOrIncomplete("Extension without extension methods") ; Nil }
       val result = atSpan(start)(ExtMethods(joinParams(tparams, leadParamss.toList), methods))
       val comment = in.getDocComment(start)
       if comment.isDefined then
@@ -3637,12 +3689,16 @@ object Parsers {
      */
     def extMethods(numLeadParams: Int): List[DefDef] = checkNoEscapingPlaceholders {
       val meths = new ListBuffer[DefDef]
-      val exitOnError = false
-      while !isStatSeqEnd && !exitOnError do
-        setLastStatOffset()
-        meths += extMethod(numLeadParams)
-        acceptStatSepUnlessAtEnd(meths)
-      if meths.isEmpty then syntaxError("`def` expected")
+      while
+        val start = in.offset
+        val mods = defAnnotsMods(modifierTokens)
+        in.token != EOF && {
+          accept(DEF)
+          meths += defDefOrDcl(start, mods, numLeadParams)
+          in.token != EOF && statSepOrEnd(meths, what = "extension method")
+        }
+      do ()
+      if meths.isEmpty then syntaxErrorOrIncomplete("`def` expected")
       meths.toList
     }
 
@@ -3703,11 +3759,11 @@ object Parsers {
         }
         else Nil
       possibleTemplateStart()
-      if (isEnum) {
-        val (self, stats) = withinEnum(templateBody())
+      if isEnum then
+        val (self, stats) = withinEnum(templateBody(parents))
         Template(constr, parents, derived, self, stats)
-      }
-      else templateBodyOpt(constr, parents, derived)
+      else
+        templateBodyOpt(constr, parents, derived)
     }
 
     /** TemplateOpt = [Template]
@@ -3730,15 +3786,15 @@ object Parsers {
     def templateBodyOpt(constr: DefDef, parents: List[Tree], derived: List[Tree]): Template =
       val (self, stats) =
         if in.isNestedStart then
-          templateBody()
+          templateBody(parents)
         else
           checkNextNotIndented()
           (EmptyValDef, Nil)
       Template(constr, parents, derived, self, stats)
 
-    def templateBody(rewriteWithColon: Boolean = true): (ValDef, List[Tree]) =
+    def templateBody(parents: List[Tree], rewriteWithColon: Boolean = true): (ValDef, List[Tree]) =
       val r = inDefScopeBraces(templateStatSeq(), rewriteWithColon)
-      if in.token == WITH then
+      if in.token == WITH && parents.isEmpty then
         syntaxError(EarlyDefinitionsNotSupported())
         in.nextToken()
         template(emptyConstructor)
@@ -3747,7 +3803,7 @@ object Parsers {
     /** with Template, with EOL  interpreted */
     def withTemplate(constr: DefDef, parents: List[Tree]): Template =
       accept(WITH)
-      val (self, stats) = templateBody(rewriteWithColon = false)
+      val (self, stats) = templateBody(parents, rewriteWithColon = false)
       Template(constr, parents, Nil, self, stats)
         .withSpan(Span(constr.span.orElse(parents.head.span).start, in.lastOffset))
 
@@ -3777,8 +3833,8 @@ object Parsers {
      */
     def topStatSeq(outermost: Boolean = false): List[Tree] = {
       val stats = new ListBuffer[Tree]
-      while (!isStatSeqEnd) {
-        setLastStatOffset()
+      while
+        var empty = false
         if (in.token == PACKAGE) {
           val start = in.skipToken()
           if (in.token == OBJECT) {
@@ -3795,13 +3851,10 @@ object Parsers {
           stats += extension()
         else if isDefIntro(modifierTokens) then
           stats +++= defOrDcl(in.offset, defAnnotsMods(modifierTokens))
-        else if !isStatSep then
-          if (in.token == CASE)
-            syntaxErrorOrIncomplete(OnlyCaseClassOrCaseObjectAllowed())
-          else
-            syntaxErrorOrIncomplete(ExpectedToplevelDef())
-        acceptStatSepUnlessAtEnd(stats)
-      }
+        else
+          empty = true
+        statSepOrEnd(stats, empty, "toplevel definition")
+      do ()
       stats.toList
     }
 
@@ -3819,9 +3872,9 @@ object Parsers {
     def templateStatSeq(): (ValDef, List[Tree]) = checkNoEscapingPlaceholders {
       var self: ValDef = EmptyValDef
       val stats = new ListBuffer[Tree]
-      if (isExprIntro && !isDefIntro(modifierTokens)) {
+      if isExprIntro && !isDefIntro(modifierTokens) then
         val first = expr1()
-        if (in.token == ARROW) {
+        if in.token == ARROW then
           first match {
             case Typed(tree @ This(EmptyTypeIdent), tpt) =>
               self = makeSelfDef(nme.WILDCARD, tpt).withSpan(first.span)
@@ -3832,15 +3885,12 @@ object Parsers {
           }
           in.token = SELFARROW // suppresses INDENT insertion after `=>`
           in.nextToken()
-        }
-        else {
+        else
           stats += first
-          acceptStatSepUnlessAtEnd(stats)
-        }
-      }
-      var exitOnError = false
-      while (!isStatSeqEnd && !exitOnError) {
-        setLastStatOffset()
+          statSepOrEnd(stats)
+      end if
+      while
+        var empty = false
         if (in.token == IMPORT)
           stats ++= importClause(IMPORT, mkImport())
         else if (in.token == EXPORT)
@@ -3851,13 +3901,11 @@ object Parsers {
           stats +++= defOrDcl(in.offset, defAnnotsMods(modifierTokens))
         else if (isExprIntro)
           stats += expr1()
-        else if (!isStatSep) {
-          exitOnError = mustStartStat
-          syntaxErrorOrIncomplete("illegal start of definition")
-        }
-        acceptStatSepUnlessAtEnd(stats)
-      }
-      (self, if (stats.isEmpty) List(EmptyTree) else stats.toList)
+        else
+          empty = true
+        statSepOrEnd(stats, empty)
+      do ()
+      (self, if stats.isEmpty then List(EmptyTree) else stats.toList)
     }
 
     /** RefineStatSeq    ::=  RefineStat {semi RefineStat}
@@ -3870,6 +3918,9 @@ object Parsers {
       val stats = new ListBuffer[Tree]
       def checkLegal(tree: Tree): List[Tree] =
         val problem = tree match
+          case tree: ValDef if tree.mods.is(Mutable) =>
+            i"""refinement cannot be a mutable var.
+               |You can use an explicit getter ${tree.name} and setter ${tree.name}_= instead"""
           case tree: MemberDef if !(tree.mods.flags & ModifierFlags).isEmpty =>
             i"refinement cannot be ${(tree.mods.flags & ModifierFlags).flagStrings().mkString("`", "`, `", "`")}"
           case tree: DefDef if tree.termParamss.nestedExists(!_.rhs.isEmpty) =>
@@ -3885,16 +3936,14 @@ object Parsers {
         if problem.isEmpty then tree :: Nil
         else { syntaxError(problem, tree.span); Nil }
 
-      while (!isStatSeqEnd) {
-        if (isDclIntro)
+      while
+        val dclFound = isDclIntro
+        if dclFound then
           stats ++= checkLegal(defOrDcl(in.offset, Modifiers()))
-        else if (!isStatSep)
-          syntaxErrorOrIncomplete(
-            "illegal start of declaration" +
-            (if (inFunReturnType) " (possible cause: missing `=` in front of current method body)"
-             else ""))
-        acceptStatSepUnlessAtEnd(stats)
-      }
+        var what = "declaration"
+        if inFunReturnType then what += " (possible cause: missing `=` in front of current method body)"
+        statSepOrEnd(stats, !dclFound, what)
+      do ()
       stats.toList
     }
 
@@ -3918,9 +3967,8 @@ object Parsers {
      */
     def blockStatSeq(): List[Tree] = checkNoEscapingPlaceholders {
       val stats = new ListBuffer[Tree]
-      var exitOnError = false
-      while (!isStatSeqEnd && in.token != CASE && !exitOnError) {
-        setLastStatOffset()
+      while
+        var empty = false
         if (in.token == IMPORT)
           stats ++= importClause(IMPORT, mkImport())
         else if (isExprIntro)
@@ -3931,12 +3979,10 @@ object Parsers {
           stats += extension()
         else if isDefIntro(localModifierTokens, excludedSoftModifiers = Set(nme.`opaque`)) then
           stats +++= localDef(in.offset)
-        else if (!isStatSep && (in.token != CASE)) {
-          exitOnError = mustStartStat
-          syntaxErrorOrIncomplete(IllegalStartOfStatement(isModifier))
-        }
-        acceptStatSepUnlessAtEnd(stats, CASE)
-      }
+        else
+          empty = true
+        statSepOrEnd(stats, empty, altEnd = CASE)
+      do ()
       stats.toList
     }
 
@@ -3953,7 +3999,7 @@ object Parsers {
             in.nextToken()
             ts += objectDef(start, Modifiers(Package))
             if (in.token != EOF) {
-              acceptStatSepUnlessAtEnd(ts)
+              statSepOrEnd(ts, what = "toplevel definition")
               ts ++= topStatSeq()
             }
           }
@@ -3970,7 +4016,7 @@ object Parsers {
               acceptStatSep()
               ts += makePackaging(start, pkg, topstats())
             if continue then
-              acceptStatSepUnlessAtEnd(ts)
+              statSepOrEnd(ts, what = "toplevel definition")
               ts ++= topStatSeq()
         }
         else
@@ -4000,7 +4046,7 @@ object Parsers {
       EmptyTree
     }
 
-    override def templateBody(rewriteWithColon: Boolean): (ValDef, List[Thicket]) = {
+    override def templateBody(parents: List[Tree], rewriteWithColon: Boolean): (ValDef, List[Thicket]) = {
       skipBraces()
       (EmptyValDef, List(EmptyTree))
     }
diff --git a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala
index 15b3f6e63134..d6ef2646eacf 100644
--- a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala
+++ b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala
@@ -135,14 +135,19 @@ object Scanners {
       */
     protected def putChar(c: Char): Unit = litBuf.append(c)
 
-    /** Clear buffer and set name and token */
-    def finishNamed(idtoken: Token = IDENTIFIER, target: TokenData = this): Unit = {
+    /** Finish an IDENTIFIER with `this.name`. */
+    inline def finishNamed(): Unit = finishNamedToken(IDENTIFIER, this)
+
+    /** Clear buffer and set name and token.
+     *  If `target` is different from `this`, don't treat identifiers as end tokens.
+     */
+    def finishNamedToken(idtoken: Token, target: TokenData): Unit =
       target.name = termName(litBuf.chars, 0, litBuf.length)
       litBuf.clear()
       target.token = idtoken
-      if (idtoken == IDENTIFIER)
-        target.token = toToken(target.name)
-    }
+      if idtoken == IDENTIFIER then
+        val converted = toToken(target.name)
+        if converted != END || (target eq this) then target.token = converted
 
     /** The token for given `name`. Either IDENTIFIER or a keyword. */
     def toToken(name: SimpleName): Token
@@ -240,23 +245,18 @@ object Scanners {
     /** A buffer for comments */
     private val commentBuf = CharBuffer()
 
-    private def handleMigration(keyword: Token): Token =
-      if scala3keywords.contains(keyword) && migrateTo3 then treatAsIdent()
-      else keyword
-
-    private def treatAsIdent(): Token =
-      report.errorOrMigrationWarning(
-        i"$name is now a keyword, write `$name` instead of $name to keep it as an identifier",
-        sourcePos())
-      patch(source, Span(offset), "`")
-      patch(source, Span(offset + name.length), "`")
-      IDENTIFIER
-
-    def toToken(name: SimpleName): Token = {
-      val idx = name.start
+    def toToken(identifier: SimpleName): Token =
+      def handleMigration(keyword: Token): Token =
+        if scala3keywords.contains(keyword) && migrateTo3 then
+          val what = tokenString(keyword)
+          report.errorOrMigrationWarning(i"$what is now a keyword, write `$what` instead of $what to keep it as an identifier", sourcePos())
+          patch(source, Span(offset), "`")
+          patch(source, Span(offset + identifier.length), "`")
+          IDENTIFIER
+        else keyword
+      val idx = identifier.start
       if (idx >= 0 && idx <= lastKeywordStart) handleMigration(kwArray(idx))
       else IDENTIFIER
-    }
 
     def newTokenData: TokenData = new TokenData {}
 
@@ -367,6 +367,8 @@ object Scanners {
       *   - it does not follow a blank line, and
       *   - it is followed by at least one whitespace character and a
       *     token that can start an expression.
+      *   - if the operator appears on its own line, the next line must have at least
+      *     the same indentation width as the operator. See pos/i12395 for a test where this matters.
       *  If a leading infix operator is found and the source version is `3.0-migration`, emit a change warning.
       */
     def isLeadingInfixOperator(nextWidth: IndentWidth = indentWidth(offset), inConditional: Boolean = true) =
@@ -377,8 +379,8 @@ object Scanners {
       && {
         // Is current lexeme  assumed to start an expression?
         // This is the case if the lexime is one of the tokens that
-        // starts an expression. Furthermore, if the previous token is
-        // in backticks, the lexeme may not be a binary operator.
+        // starts an expression or it is a COLONEOL. Furthermore, if
+        // the previous token is in backticks, the lexeme may not be a binary operator.
         // I.e. in
         //
         //   a
@@ -388,14 +390,16 @@ object Scanners {
         // in backticks and is a binary operator. Hence, `x` is not classified as a
         // leading infix operator.
         def assumeStartsExpr(lexeme: TokenData) =
-          canStartExprTokens.contains(lexeme.token)
+          (canStartExprTokens.contains(lexeme.token) || lexeme.token == COLONEOL)
           && (!lexeme.isOperator || nme.raw.isUnary(lexeme.name))
         val lookahead = LookaheadScanner()
         lookahead.allowLeadingInfixOperators = false
           // force a NEWLINE a after current token if it is on its own line
         lookahead.nextToken()
         assumeStartsExpr(lookahead)
-        || lookahead.token == NEWLINE && assumeStartsExpr(lookahead.next)
+        || lookahead.token == NEWLINE
+           && assumeStartsExpr(lookahead.next)
+           && indentWidth(offset) <= indentWidth(lookahead.next.offset)
       }
       && {
         currentRegion match
@@ -535,7 +539,7 @@ object Scanners {
            || nextWidth == lastWidth && (indentPrefix == MATCH || indentPrefix == CATCH) && token != CASE then
           if currentRegion.isOutermost then
             if nextWidth < lastWidth then currentRegion = topLevelRegion(nextWidth)
-          else if !isLeadingInfixOperator(nextWidth) && !statCtdTokens.contains(lastToken) then
+          else if !isLeadingInfixOperator(nextWidth) && !statCtdTokens.contains(lastToken) && lastToken != INDENT then
             currentRegion match
               case r: Indented =>
                 currentRegion = r.enclosing
@@ -595,7 +599,9 @@ object Scanners {
       case r: Indented
       if !r.isOutermost
          && closingRegionTokens.contains(token)
-         && !(token == CASE && r.prefix == MATCH) =>
+         && !(token == CASE && r.prefix == MATCH)
+         && next.token == EMPTY  // can be violated for ill-formed programs, e.g. neg/i12605.sala
+      =>
         currentRegion = r.enclosing
         insert(OUTDENT, offset)
       case _ =>
@@ -604,6 +610,7 @@ object Scanners {
       prev.copyFrom(this)
       lastOffset = lastCharOffset
       fetchToken()
+      if token == END && !isEndMarker then token = IDENTIFIER
     }
 
     def reset() = {
@@ -652,10 +659,10 @@ object Scanners {
                  && (token == RPAREN || token == RBRACKET || token == RBRACE || token == OUTDENT)
               then
                 () /* skip the trailing comma */
-              else if token == EOF then // e.g. when the REPL is parsing "val List(x, y, _*,"
-                () /* skip the trailing comma */
               else
                 reset()
+        case END =>
+          if !isEndMarker then token = IDENTIFIER
         case COLON =>
           if fewerBracesEnabled then observeColonEOL()
         case RBRACE | RPAREN | RBRACKET =>
@@ -666,6 +673,21 @@ object Scanners {
       }
     }
 
+    protected def isEndMarker: Boolean =
+      if indentSyntax && isAfterLineEnd then
+        val endLine = source.offsetToLine(offset)
+        val lookahead = new LookaheadScanner():
+          override def isEndMarker = false
+        lookahead.nextToken()
+        if endMarkerTokens.contains(lookahead.token)
+          && source.offsetToLine(lookahead.offset) == endLine
+        then
+          lookahead.nextToken()
+          if lookahead.token == EOF
+          || source.offsetToLine(lookahead.offset) > endLine
+          then return true
+      false
+
     /** Is there a blank line between the current token and the last one?
      *  A blank line consists only of characters <= ' '.
      *  @pre  afterLineEnd().
@@ -975,7 +997,7 @@ object Scanners {
       getLitChars('`')
       if (ch == '`') {
         nextChar()
-        finishNamed(BACKQUOTED_IDENT)
+        finishNamedToken(BACKQUOTED_IDENT, target = this)
         if (name.length == 0)
           error("empty quoted identifier")
         else if (name == nme.WILDCARD)
@@ -1141,10 +1163,12 @@ object Scanners {
             nextRawChar()
             ch != SU && Character.isUnicodeIdentifierPart(ch)
           do ()
-          finishNamed(target = next)
+          finishNamedToken(IDENTIFIER, target = next)
         }
         else
-          error("invalid string interpolation: `$$`, `$\"`, `$`ident or `$`BlockExpr expected")
+          error("invalid string interpolation: `$$`, `$\"`, `$`ident or `$`BlockExpr expected", off = charOffset - 2)
+          putChar('$')
+          getStringPart(multiLine)
       }
       else {
         val isUnclosedLiteral = !isUnicodeEscape && (ch == SU || (!multiLine && (ch == CR || ch == LF)))
@@ -1227,7 +1251,7 @@ object Scanners {
               nextChar()
             }
           }
-          val alt = if oct == LF then raw"\n" else f"\u$oct%04x"
+          val alt = if oct == LF then raw"\n" else f"${"\\"}u$oct%04x"
           error(s"octal escape literals are unsupported: use $alt instead", start)
           putChar(oct.toChar)
         }
diff --git a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala
index 27f533408deb..55f428cef5a4 100644
--- a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala
+++ b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala
@@ -30,107 +30,107 @@ abstract class TokensCommon {
   }
 
   /** special tokens */
-  final val EMPTY = 0;             enter(EMPTY, "") // a missing token, used in lookahead
-  final val ERROR = 1;             enter(ERROR, "erroneous token") // an erroneous token
-  final val EOF = 2;               enter(EOF, "eof")
+  inline val EMPTY = 0;             enter(EMPTY, "") // a missing token, used in lookahead
+  inline val ERROR = 1;             enter(ERROR, "erroneous token") // an erroneous token
+  inline val EOF = 2;               enter(EOF, "eof")
 
   /** literals */
-  final val CHARLIT = 3;           enter(CHARLIT, "character literal")
-  final val INTLIT = 4;            enter(INTLIT, "integer literal")
-  final val DECILIT = 5;           enter(DECILIT, "number literal")  // with decimal point
-  final val EXPOLIT = 6;           enter(EXPOLIT, "number literal with exponent")
-  final val LONGLIT = 7;           enter(LONGLIT, "long literal")
-  final val FLOATLIT = 8;          enter(FLOATLIT, "float literal")
-  final val DOUBLELIT = 9;         enter(DOUBLELIT, "double literal")
-  final val STRINGLIT = 10;         enter(STRINGLIT, "string literal")
-  final val STRINGPART = 11;       enter(STRINGPART, "string literal", "string literal part")
-  //final val INTERPOLATIONID = 12;  enter(INTERPOLATIONID, "string interpolator")
-  //final val QUOTEID = 13;        enter(QUOTEID, "quoted identifier") // TODO: deprecate
+  inline val CHARLIT = 3;           enter(CHARLIT, "character literal")
+  inline val INTLIT = 4;            enter(INTLIT, "integer literal")
+  inline val DECILIT = 5;           enter(DECILIT, "number literal")  // with decimal point
+  inline val EXPOLIT = 6;           enter(EXPOLIT, "number literal with exponent")
+  inline val LONGLIT = 7;           enter(LONGLIT, "long literal")
+  inline val FLOATLIT = 8;          enter(FLOATLIT, "float literal")
+  inline val DOUBLELIT = 9;         enter(DOUBLELIT, "double literal")
+  inline val STRINGLIT = 10;         enter(STRINGLIT, "string literal")
+  inline val STRINGPART = 11;       enter(STRINGPART, "string literal", "string literal part")
+  //inline val INTERPOLATIONID = 12;  enter(INTERPOLATIONID, "string interpolator")
+  //inline val QUOTEID = 13;        enter(QUOTEID, "quoted identifier") // TODO: deprecate
 
   /** identifiers */
-  final val IDENTIFIER = 14;       enter(IDENTIFIER, "identifier")
-  //final val BACKQUOTED_IDENT = 15; enter(BACKQUOTED_IDENT, "identifier", "backquoted ident")
+  inline val IDENTIFIER = 14;       enter(IDENTIFIER, "identifier")
+  //inline val BACKQUOTED_IDENT = 15; enter(BACKQUOTED_IDENT, "identifier", "backquoted ident")
 
   /** alphabetic keywords */
-  final val IF = 20;               enter(IF, "if")
-  final val FOR = 21;              enter(FOR, "for")
-  final val ELSE = 22;             enter(ELSE, "else")
-  final val THIS = 23;             enter(THIS, "this")
-  final val NULL = 24;             enter(NULL, "null")
-  final val NEW = 25;              enter(NEW, "new")
-  //final val WITH = 26;             enter(WITH, "with")
-  final val SUPER = 27;            enter(SUPER, "super")
-  //final val CASE = 28;             enter(CASE, "case")
-  //final val CASECLASS = 29;        enter(CASECLASS, "case class")
-  //final val CASEOBJECT = 30;       enter(CASEOBJECT, "case object")
-  //final val VAL = 31;              enter(VAL, "val")
-  final val ABSTRACT = 32;         enter(ABSTRACT, "abstract")
-  final val FINAL = 33;            enter(FINAL, "final")
-  final val PRIVATE = 34;          enter(PRIVATE, "private")
-  final val PROTECTED = 35;        enter(PROTECTED, "protected")
-  final val OVERRIDE = 36;         enter(OVERRIDE, "override")
-  //final val IMPLICIT = 37;         enter(IMPLICIT, "implicit")
-  //final val VAR = 38;              enter(VAR, "var")
-  //final val DEF = 39;              enter(DEF, "def")
-  //final val TYPE = 40;             enter(TYPE, "type")
-  final val EXTENDS = 41;          enter(EXTENDS, "extends")
-  final val TRUE = 42;             enter(TRUE, "true")
-  final val FALSE = 43;            enter(FALSE, "false")
-  //final val OBJECT = 44;           enter(OBJECT, "object")
-  final val CLASS = 45;            enter(CLASS, "class")
-  final val IMPORT = 46;           enter(IMPORT, "import")
-  final val PACKAGE = 47;          enter(PACKAGE, "package")
-  //final val YIELD = 48;            enter(YIELD, "yield")
-  final val DO = 49;               enter(DO, "do")
-  //final val TRAIT = 50;            enter(TRAIT, "trait")
-  //final val SEALED = 51;           enter(SEALED, "sealed")
-  final val THROW = 52;            enter(THROW, "throw")
-  final val TRY = 53;              enter(TRY, "try")
-  final val CATCH = 54;            enter(CATCH, "catch")
-  final val FINALLY = 55;          enter(FINALLY, "finally")
-  final val WHILE = 56;            enter(WHILE, "while")
-  final val RETURN = 57;           enter(RETURN, "return")
-  //final val MATCH = 58;            enter(MATCH, "match")
-  //final val LAZY = 59;             enter(LAZY, "lazy")
-  //final val THEN = 60;             enter(THEN, "then")
-  //final val FORSOME = 61;          enter(FORSOME, "forSome") // TODO: deprecate
-  //final val ENUM = 62;             enter(ENUM, "enum")
+  inline val IF = 20;               enter(IF, "if")
+  inline val FOR = 21;              enter(FOR, "for")
+  inline val ELSE = 22;             enter(ELSE, "else")
+  inline val THIS = 23;             enter(THIS, "this")
+  inline val NULL = 24;             enter(NULL, "null")
+  inline val NEW = 25;              enter(NEW, "new")
+  //inline val WITH = 26;             enter(WITH, "with")
+  inline val SUPER = 27;            enter(SUPER, "super")
+  //inline val CASE = 28;             enter(CASE, "case")
+  //inline val CASECLASS = 29;        enter(CASECLASS, "case class")
+  //inline val CASEOBJECT = 30;       enter(CASEOBJECT, "case object")
+  //inline val VAL = 31;              enter(VAL, "val")
+  inline val ABSTRACT = 32;         enter(ABSTRACT, "abstract")
+  inline val FINAL = 33;            enter(FINAL, "final")
+  inline val PRIVATE = 34;          enter(PRIVATE, "private")
+  inline val PROTECTED = 35;        enter(PROTECTED, "protected")
+  inline val OVERRIDE = 36;         enter(OVERRIDE, "override")
+  //inline val IMPLICIT = 37;         enter(IMPLICIT, "implicit")
+  //inline val VAR = 38;              enter(VAR, "var")
+  //inline val DEF = 39;              enter(DEF, "def")
+  //inline val TYPE = 40;             enter(TYPE, "type")
+  inline val EXTENDS = 41;          enter(EXTENDS, "extends")
+  inline val TRUE = 42;             enter(TRUE, "true")
+  inline val FALSE = 43;            enter(FALSE, "false")
+  //inline val OBJECT = 44;           enter(OBJECT, "object")
+  inline val CLASS = 45;            enter(CLASS, "class")
+  inline val IMPORT = 46;           enter(IMPORT, "import")
+  inline val PACKAGE = 47;          enter(PACKAGE, "package")
+  //inline val YIELD = 48;            enter(YIELD, "yield")
+  inline val DO = 49;               enter(DO, "do")
+  //inline val TRAIT = 50;            enter(TRAIT, "trait")
+  //inline val SEALED = 51;           enter(SEALED, "sealed")
+  inline val THROW = 52;            enter(THROW, "throw")
+  inline val TRY = 53;              enter(TRY, "try")
+  inline val CATCH = 54;            enter(CATCH, "catch")
+  inline val FINALLY = 55;          enter(FINALLY, "finally")
+  inline val WHILE = 56;            enter(WHILE, "while")
+  inline val RETURN = 57;           enter(RETURN, "return")
+  //inline val MATCH = 58;            enter(MATCH, "match")
+  //inline val LAZY = 59;             enter(LAZY, "lazy")
+  //inline val THEN = 60;             enter(THEN, "then")
+  //inline val FORSOME = 61;          enter(FORSOME, "forSome") // TODO: deprecate
+  //inline val ENUM = 62;             enter(ENUM, "enum")
 
   /** special symbols */
-  final val COMMA = 70;            enter(COMMA, "','")
-  final val SEMI = 71;             enter(SEMI, "';'")
-  final val DOT = 72;              enter(DOT, "'.'")
-  //final val NEWLINE = 78;          enter(NEWLINE, "end of statement", "new line")
-  //final val NEWLINES = 79;         enter(NEWLINES, "end of statement", "new lines")
+  inline val COMMA = 70;            enter(COMMA, "','")
+  inline val SEMI = 71;             enter(SEMI, "';'")
+  inline val DOT = 72;              enter(DOT, "'.'")
+  //inline val NEWLINE = 78;          enter(NEWLINE, "end of statement", "new line")
+  //inline val NEWLINES = 79;         enter(NEWLINES, "end of statement", "new lines")
 
   /** special keywords */
-  //final val USCORE = 73;           enter(USCORE, "_")
-  final val COLON = 74;            enter(COLON, ":")
-  final val EQUALS = 75;           enter(EQUALS, "=")
-  //final val LARROW = 76;           enter(LARROW, "<-")
-  //final val ARROW = 77;            enter(ARROW, "=>")
-  //final val SUBTYPE = 80;          enter(SUBTYPE, "<:")
-  //final val SUPERTYPE = 81;        enter(SUPERTYPE, ">:")
-  //final val HASH = 82;             enter(HASH, "#")
-  final val AT = 83;               enter(AT, "@")
-  //final val VIEWBOUND = 84;        enter(VIEWBOUND, "<%")
+  //inline val USCORE = 73;           enter(USCORE, "_")
+  inline val COLON = 74;            enter(COLON, ":")
+  inline val EQUALS = 75;           enter(EQUALS, "=")
+  //inline val LARROW = 76;           enter(LARROW, "<-")
+  //inline val ARROW = 77;            enter(ARROW, "=>")
+  //inline val SUBTYPE = 80;          enter(SUBTYPE, "<:")
+  //inline val SUPERTYPE = 81;        enter(SUPERTYPE, ">:")
+  //inline val HASH = 82;             enter(HASH, "#")
+  inline val AT = 83;               enter(AT, "@")
+  //inline val VIEWBOUND = 84;        enter(VIEWBOUND, "<%")
 
   val keywords: TokenSet
 
   def isKeyword(token: Token): Boolean = keywords contains token
 
   /** parentheses */
-  final val LPAREN = 91;           enter(LPAREN, "'('")
-  final val RPAREN = 92;           enter(RPAREN, "')'")
-  final val LBRACKET = 93;         enter(LBRACKET, "'['")
-  final val RBRACKET = 94;         enter(RBRACKET, "']'")
-  final val LBRACE = 95;           enter(LBRACE, "'{'")
-  final val RBRACE = 96;           enter(RBRACE, "'}'")
-  final val INDENT = 97;           enter(INDENT, "indent")
-  final val OUTDENT = 98;          enter(OUTDENT, "unindent")
-
-  final val firstParen = LPAREN
-  final val lastParen = OUTDENT
+  inline val LPAREN = 91;           enter(LPAREN, "'('")
+  inline val RPAREN = 92;           enter(RPAREN, "')'")
+  inline val LBRACKET = 93;         enter(LBRACKET, "'['")
+  inline val RBRACKET = 94;         enter(RBRACKET, "']'")
+  inline val LBRACE = 95;           enter(LBRACE, "'{'")
+  inline val RBRACE = 96;           enter(RBRACE, "'}'")
+  inline val INDENT = 97;           enter(INDENT, "indent")
+  inline val OUTDENT = 98;          enter(OUTDENT, "unindent")
+
+  inline val firstParen = LPAREN
+  inline val lastParen = OUTDENT
 
   def buildKeywordArray(keywords: TokenSet): (Int, Array[Int]) = {
     def start(tok: Token) = tokenString(tok).toTermName.asSimpleName.start
@@ -148,13 +148,13 @@ abstract class TokensCommon {
 }
 
 object Tokens extends TokensCommon {
-  final val minToken = EMPTY
+  inline val minToken = EMPTY
   final def maxToken: Int = XMLSTART
 
-  final val INTERPOLATIONID = 12;  enter(INTERPOLATIONID, "string interpolator")
-  final val QUOTEID = 13;          enter(QUOTEID, "quoted identifier") // TODO: deprecate
+  inline val INTERPOLATIONID = 12;  enter(INTERPOLATIONID, "string interpolator")
+  inline val QUOTEID = 13;          enter(QUOTEID, "quoted identifier") // TODO: deprecate
 
-  final val BACKQUOTED_IDENT = 15; enter(BACKQUOTED_IDENT, "identifier", "backquoted ident")
+  inline val BACKQUOTED_IDENT = 15; enter(BACKQUOTED_IDENT, "identifier", "backquoted ident")
 
   final val identifierTokens: TokenSet = BitSet(IDENTIFIER, BACKQUOTED_IDENT)
 
@@ -162,52 +162,53 @@ object Tokens extends TokensCommon {
     token >= IDENTIFIER && token <= BACKQUOTED_IDENT
 
   /** alphabetic keywords */
-  final val WITH = 26;             enter(WITH, "with")
-  final val CASE = 28;             enter(CASE, "case")
-  final val CASECLASS = 29;        enter(CASECLASS, "case class")
-  final val CASEOBJECT = 30;       enter(CASEOBJECT, "case object")
-  final val VAL = 31;              enter(VAL, "val")
-  final val IMPLICIT = 37;         enter(IMPLICIT, "implicit")
-  final val VAR = 38;              enter(VAR, "var")
-  final val DEF = 39;              enter(DEF, "def")
-  final val TYPE = 40;             enter(TYPE, "type")
-  final val OBJECT = 44;           enter(OBJECT, "object")
-  final val YIELD = 48;            enter(YIELD, "yield")
-  final val TRAIT = 50;            enter(TRAIT, "trait")
-  final val SEALED = 51;           enter(SEALED, "sealed")
-  final val MATCH = 58;            enter(MATCH, "match")
-  final val LAZY = 59;             enter(LAZY, "lazy")
-  final val THEN = 60;             enter(THEN, "then")
-  final val FORSOME = 61;          enter(FORSOME, "forSome") // TODO: deprecate
-  final val ENUM = 62;             enter(ENUM, "enum")
-  final val GIVEN = 63;            enter(GIVEN, "given")
-  final val EXPORT = 64;           enter(EXPORT, "export")
-  final val MACRO = 65;            enter(MACRO, "macro") // TODO: remove
+  inline val WITH = 26;             enter(WITH, "with")
+  inline val CASE = 28;             enter(CASE, "case")
+  inline val CASECLASS = 29;        enter(CASECLASS, "case class")
+  inline val CASEOBJECT = 30;       enter(CASEOBJECT, "case object")
+  inline val VAL = 31;              enter(VAL, "val")
+  inline val IMPLICIT = 37;         enter(IMPLICIT, "implicit")
+  inline val VAR = 38;              enter(VAR, "var")
+  inline val DEF = 39;              enter(DEF, "def")
+  inline val TYPE = 40;             enter(TYPE, "type")
+  inline val OBJECT = 44;           enter(OBJECT, "object")
+  inline val YIELD = 48;            enter(YIELD, "yield")
+  inline val TRAIT = 50;            enter(TRAIT, "trait")
+  inline val SEALED = 51;           enter(SEALED, "sealed")
+  inline val MATCH = 58;            enter(MATCH, "match")
+  inline val LAZY = 59;             enter(LAZY, "lazy")
+  inline val THEN = 60;             enter(THEN, "then")
+  inline val FORSOME = 61;          enter(FORSOME, "forSome") // TODO: deprecate
+  inline val ENUM = 62;             enter(ENUM, "enum")
+  inline val GIVEN = 63;            enter(GIVEN, "given")
+  inline val EXPORT = 64;           enter(EXPORT, "export")
+  inline val MACRO = 65;            enter(MACRO, "macro") // TODO: remove
+  inline val END = 66;              enter(END, "end")
 
   /** special symbols */
-  final val NEWLINE = 78;          enter(NEWLINE, "end of statement", "new line")
-  final val NEWLINES = 79;         enter(NEWLINES, "end of statement", "new lines")
+  inline val NEWLINE = 78;          enter(NEWLINE, "end of statement", "new line")
+  inline val NEWLINES = 79;         enter(NEWLINES, "end of statement", "new lines")
 
   /** special keywords */
-  final val USCORE = 73;           enter(USCORE, "_")
-  final val LARROW = 76;           enter(LARROW, "<-")
-  final val ARROW = 77;            enter(ARROW, "=>")
-  final val SUBTYPE = 80;          enter(SUBTYPE, "<:")
-  final val SUPERTYPE = 81;        enter(SUPERTYPE, ">:")
-  final val HASH = 82;             enter(HASH, "#")
-  final val VIEWBOUND = 84;        enter(VIEWBOUND, "<%")
-  final val TLARROW = 85;          enter(TLARROW, "=>>")
-  final val CTXARROW = 86;         enter(CTXARROW, "?=>")
+  inline val USCORE = 73;           enter(USCORE, "_")
+  inline val LARROW = 76;           enter(LARROW, "<-")
+  inline val ARROW = 77;            enter(ARROW, "=>")
+  inline val SUBTYPE = 80;          enter(SUBTYPE, "<:")
+  inline val SUPERTYPE = 81;        enter(SUPERTYPE, ">:")
+  inline val HASH = 82;             enter(HASH, "#")
+  inline val VIEWBOUND = 84;        enter(VIEWBOUND, "<%")
+  inline val TLARROW = 85;          enter(TLARROW, "=>>")
+  inline val CTXARROW = 86;         enter(CTXARROW, "?=>")
 
-  final val QUOTE = 87;            enter(QUOTE, "'")
+  inline val QUOTE = 87;            enter(QUOTE, "'")
 
-  final val COLONEOL = 88;         enter(COLONEOL, ":", ": at eol")
-  final val SELFARROW = 89;        enter(SELFARROW, "=>") // reclassified ARROW following self-type
+  inline val COLONEOL = 88;         enter(COLONEOL, ":", ": at eol")
+  inline val SELFARROW = 89;        enter(SELFARROW, "=>") // reclassified ARROW following self-type
 
   /** XML mode */
-  final val XMLSTART = 99;         enter(XMLSTART, "$XMLSTART$<") // TODO: deprecate
+  inline val XMLSTART = 99;         enter(XMLSTART, "$XMLSTART$<") // TODO: deprecate
 
-  final val alphaKeywords: TokenSet = tokenRange(IF, MACRO)
+  final val alphaKeywords: TokenSet = tokenRange(IF, END)
   final val symbolicKeywords: TokenSet = tokenRange(USCORE, CTXARROW)
   final val keywords: TokenSet = alphaKeywords | symbolicKeywords
 
@@ -254,9 +255,9 @@ object Tokens extends TokensCommon {
   final val mustStartStatTokens: TokenSet = defIntroTokens | modifierTokens | BitSet(IMPORT, EXPORT, PACKAGE)
 
   final val canStartStatTokens2: TokenSet = canStartExprTokens2 | mustStartStatTokens | BitSet(
-    AT, CASE)
+    AT, CASE, END) // END is included since it might be tested before being converted back to IDENTIFIER
   final val canStartStatTokens3: TokenSet = canStartExprTokens3 | mustStartStatTokens | BitSet(
-    AT, CASE)
+    AT, CASE, END)
 
   final val canEndStatTokens: TokenSet = atomicExprTokens | BitSet(TYPE, GIVEN, RPAREN, RBRACE, RBRACKET, OUTDENT)
 
diff --git a/compiler/src/dotty/tools/dotc/plugins/Plugins.scala b/compiler/src/dotty/tools/dotc/plugins/Plugins.scala
index a4db7ef44a86..14cb83fb6c6c 100644
--- a/compiler/src/dotty/tools/dotc/plugins/Plugins.scala
+++ b/compiler/src/dotty/tools/dotc/plugins/Plugins.scala
@@ -3,7 +3,7 @@ package plugins
 
 import core._
 import Contexts._
-import config.{ PathResolver, Properties }
+import config.{ PathResolver, Feature }
 import dotty.tools.io._
 import Phases._
 import config.Printers.plugins.{ println => debug }
@@ -125,7 +125,7 @@ trait Plugins {
     val updatedPlan = Plugins.schedule(plan, pluginPhases)
 
     // add research plugins
-    if (Properties.experimental)
+    if (Feature.isExperimentalEnabled)
       plugins.collect { case p: ResearchPlugin => p }.foldRight(updatedPlan) {
         (plug, plan) => plug.init(options(plug), plan)
       }
diff --git a/compiler/src/dotty/tools/dotc/printing/Formatting.scala b/compiler/src/dotty/tools/dotc/printing/Formatting.scala
index be25469c549d..845283f69a0f 100644
--- a/compiler/src/dotty/tools/dotc/printing/Formatting.scala
+++ b/compiler/src/dotty/tools/dotc/printing/Formatting.scala
@@ -36,7 +36,7 @@ object Formatting {
               case _ => ex.getMessage
             s"[cannot display due to $msg, raw string = ${arg.toString}]"
         }
-      case _ => arg.toString
+      case _ => String.valueOf(arg)
     }
 
     private def treatArg(arg: Any, suffix: String)(using Context): (Any, String) = arg match {
diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala
index 618f7be6dac3..157b424b6251 100644
--- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala
+++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala
@@ -12,7 +12,6 @@ import typer.Implicits._
 import typer.ImportInfo
 import Variances.varianceSign
 import util.SourcePosition
-import java.lang.Integer.toOctalString
 import scala.util.control.NonFatal
 import scala.annotation.switch
 
@@ -30,6 +29,7 @@ class PlainPrinter(_ctx: Context) extends Printer {
   protected def maxToTextRecursions: Int = 100
 
   protected def showUniqueIds = ctx.settings.uniqid.value || Printer.debugPrintUnique
+  protected def showNestingLevel = ctx.settings.YprintLevel.value
 
   protected final def limiter: MessageLimiter = ctx.property(MessageLimiter).get
 
@@ -66,6 +66,8 @@ class PlainPrinter(_ctx: Context) extends Printer {
         case tp @ AppliedType(tycon, args) =>
           if (defn.isCompiletimeAppliedType(tycon.typeSymbol)) tp.tryCompiletimeConstantFold
           else tycon.dealias.appliedTo(args)
+        case tp: NamedType =>
+          tp.reduceProjection
         case _ =>
           tp
       }
@@ -156,7 +158,12 @@ class PlainPrinter(_ctx: Context) extends Printer {
       case tp: TermParamRef =>
         ParamRefNameString(tp) ~ lambdaHash(tp.binder) ~ ".type"
       case tp: TypeParamRef =>
-        ParamRefNameString(tp) ~ lambdaHash(tp.binder)
+        val suffix =
+          if showNestingLevel then
+            val tvar = ctx.typerState.constraint.typeVarOfParam(tp)
+            if tvar.exists then s"#${tvar.asInstanceOf[TypeVar].nestingLevel.toString}" else ""
+          else ""
+        ParamRefNameString(tp) ~ lambdaHash(tp.binder) ~ suffix
       case tp: SingletonType =>
         toTextSingleton(tp)
       case AppliedType(tycon, args) =>
@@ -178,13 +185,14 @@ class PlainPrinter(_ctx: Context) extends Printer {
       case MatchType(bound, scrutinee, cases) =>
         changePrec(GlobalPrec) {
           def caseText(tp: Type): Text = tp match {
+            case tp: HKTypeLambda => caseText(tp.resultType)
             case defn.MatchCase(pat, body) => "case " ~ toText(pat) ~ " => " ~ toText(body)
             case _ => "case " ~ toText(tp)
           }
           def casesText = Text(cases.map(caseText), "\n")
-            atPrec(InfixPrec) { toText(scrutinee) } ~
-            keywordStr(" match ") ~ "{" ~ casesText ~ "}" ~
-            (" <: " ~ toText(bound) provided !bound.isAny)
+          atPrec(InfixPrec) { toText(scrutinee) } ~
+          keywordStr(" match ") ~ "{" ~ casesText ~ "}" ~
+          (" <: " ~ toText(bound) provided !bound.isAny)
         }.close
       case tp: PreviousErrorType if ctx.settings.XprintTypes.value =>
         "" // do not print previously reported error message because they may try to print this error type again recuresevely
@@ -219,7 +227,7 @@ class PlainPrinter(_ctx: Context) extends Printer {
           toTextGlobal(tp.resultType)
         }
       case AnnotatedType(tpe, annot) =>
-        if annot.symbol == defn.InlineParamAnnot then toText(tpe)
+        if annot.symbol == defn.InlineParamAnnot || annot.symbol == defn.ErasedParamAnnot then toText(tpe)
         else toTextLocal(tpe) ~ " " ~ toText(annot)
       case tp: TypeVar =>
         if (tp.isInstantiated)
@@ -252,11 +260,11 @@ class PlainPrinter(_ctx: Context) extends Printer {
 
   protected def paramsText(lam: LambdaType): Text = {
     def paramText(name: Name, tp: Type) =
-      toText(name) ~ lambdaHash(lam) ~ toTextRHS(tp)
+      toText(name) ~ lambdaHash(lam) ~ toTextRHS(tp, isParameter = true)
     Text(lam.paramNames.lazyZip(lam.paramInfos).map(paramText), ", ")
   }
 
-  protected def ParamRefNameString(name: Name): String = name.toString
+  protected def ParamRefNameString(name: Name): String = nameString(name)
 
   protected def ParamRefNameString(param: ParamRef): String =
     ParamRefNameString(param.binder.paramNames(param.paramNum))
@@ -271,9 +279,13 @@ class PlainPrinter(_ctx: Context) extends Printer {
       catch { case ex: NullPointerException => "" }
     else ""
 
-  /** If -uniqid is set, the unique id of symbol, after a # */
+  /** A string to append to a symbol composed of:
+   *  - if -uniqid is set, its unique id after a #.
+   *  - if -Yprint-level, its nesting level after a %.
+   */
   protected def idString(sym: Symbol): String =
-    if (showUniqueIds || Printer.debugPrintUnique) "#" + sym.id else ""
+    (if (showUniqueIds || Printer.debugPrintUnique) "#" + sym.id else "") +
+    (if (showNestingLevel) "%" + sym.nestingLevel else "")
 
   def nameString(sym: Symbol): String =
     simpleNameString(sym) + idString(sym) // + "<" + (if (sym.exists) sym.owner else "") + ">"
@@ -348,15 +360,17 @@ class PlainPrinter(_ctx: Context) extends Printer {
     case None => "?"
   }
 
-  protected def decomposeLambdas(bounds: TypeBounds): (String, TypeBounds) =
-    def decompose(tp: Type) = tp.stripTypeVar match
+  protected def decomposeLambdas(bounds: TypeBounds): (Text, TypeBounds) =
+    def decompose(tp: Type): (Text, Type) = tp.stripTypeVar match
       case lam: HKTypeLambda =>
         val names =
           if lam.isDeclaredVarianceLambda then
             lam.paramNames.lazyZip(lam.declaredVariances).map((name, v) =>
               varianceSign(v) + name)
-          else lam.paramNames
-        (names.mkString("[", ", ", "]"), lam.resType)
+          else lam.paramNames.map(_.toString)
+        val infos = lam.paramInfos.map(toText)
+        val tparams = names.zip(infos).map(_ ~ _)
+        ("[" ~ Text(tparams, ",") ~ "]", lam.resType)
       case _ =>
         ("", tp)
     bounds match
@@ -370,7 +384,7 @@ class PlainPrinter(_ctx: Context) extends Printer {
   end decomposeLambdas
 
   /** String representation of a definition's type following its name */
-  protected def toTextRHS(tp: Type): Text = controlled {
+  protected def toTextRHS(tp: Type, isParameter: Boolean = false): Text = controlled {
     homogenize(tp) match {
       case tp: TypeBounds =>
         val (tparamStr, rhs) = decomposeLambdas(tp)
@@ -400,7 +414,12 @@ class PlainPrinter(_ctx: Context) extends Printer {
       case mt: MethodType =>
         toTextGlobal(mt)
       case tp: ExprType =>
-        ": => " ~ toTextGlobal(tp.widenExpr)
+        // parameterless methods require special treatment, see #11201
+        (if (isParameter) ": => " else ": ") ~ toTextGlobal(tp.widenExpr)
+      case tp: PolyType =>
+        "[" ~ paramsText(tp) ~ "]"
+        ~ (Str(": ") provided !tp.resultType.isInstanceOf[MethodType])
+        ~ toTextGlobal(tp.resultType)
       case tp =>
         ": " ~ toTextGlobal(tp)
     }
@@ -523,7 +542,7 @@ class PlainPrinter(_ctx: Context) extends Printer {
     case '"' => "\\\""
     case '\'' => "\\\'"
     case '\\' => "\\\\"
-    case _ => if (ch.isControl) "\\0" + toOctalString(ch) else String.valueOf(ch)
+    case _ => if ch.isControl then f"${"\\"}u${ch.toInt}%04x" else String.valueOf(ch)
   }
 
   def toText(const: Constant): Text = const.tag match {
@@ -536,7 +555,16 @@ class PlainPrinter(_ctx: Context) extends Printer {
     case _ => literalText(String.valueOf(const.value))
   }
 
-  def toText(annot: Annotation): Text = s"@${annot.symbol.name}" // for now
+  /** Usual target for `Annotation#toText`, overridden in RefinedPrinter */
+  def annotText(annot: Annotation): Text = s"@${annot.symbol.name}"
+
+  def toText(annot: Annotation): Text = annot.toText(this)
+
+  def toText(param: LambdaParam): Text =
+    varianceSign(param.paramVariance)
+    ~ toText(param.paramName)
+    ~ (if param.isTypeParam then "" else ": ")
+    ~ toText(param.paramInfo)
 
   protected def escapedString(str: String): String = str flatMap escapedChar
 
@@ -561,7 +589,7 @@ class PlainPrinter(_ctx: Context) extends Printer {
         Text()
 
     nodeName ~ "(" ~ elems ~ tpSuffix ~ ")" ~ (Str(tree.sourcePos.toString) provided printDebug)
-  }.close // todo: override in refined printer
+  }.close
 
   def toText(pos: SourcePosition): Text =
     if (!pos.exists) ""
@@ -591,6 +619,47 @@ class PlainPrinter(_ctx: Context) extends Printer {
       case _ => "{...}"
     s"import $exprStr.$selectorStr"
 
+  def toText(c: OrderingConstraint): Text =
+    val savedConstraint = ctx.typerState.constraint
+    try
+      // The current TyperState constraint determines how type variables are printed
+      ctx.typerState.constraint = c
+      def entryText(tp: Type) = tp match {
+        case tp: TypeBounds =>
+          toText(tp)
+        case _ =>
+          " := " ~ toText(tp)
+      }
+      val indent = 3
+      val uninstVarsText = " uninstantiated variables: " ~
+        Text(c.uninstVars.map(toText), ", ")
+      val constrainedText =
+        " constrained types: " ~ Text(c.domainLambdas.map(toText), ", ")
+      val boundsText =
+        " bounds: " ~ {
+          val assocs =
+            for (param <- c.domainParams)
+            yield (" " * indent) ~ toText(param) ~ entryText(c.entry(param))
+          Text(assocs, "\n")
+        }
+      val orderingText =
+        " ordering: " ~ {
+          val deps =
+            for {
+              param <- c.domainParams
+              ups = c.minUpper(param)
+              if ups.nonEmpty
+            }
+            yield
+              (" " * indent) ~ toText(param) ~ " <: " ~
+                Text(ups.map(toText), ", ")
+          Text(deps, "\n")
+        }
+      //Printer.debugPrintUnique = false
+      Text.lines(List(uninstVarsText, constrainedText, boundsText, orderingText))
+    finally
+      ctx.typerState.constraint = savedConstraint
+
   def plain: PlainPrinter = this
 
   protected def keywordStr(text: String): String = coloredStr(text, SyntaxHighlighting.KeywordColor)
diff --git a/compiler/src/dotty/tools/dotc/printing/Printer.scala b/compiler/src/dotty/tools/dotc/printing/Printer.scala
index 8584c889eeda..550bdb94af4f 100644
--- a/compiler/src/dotty/tools/dotc/printing/Printer.scala
+++ b/compiler/src/dotty/tools/dotc/printing/Printer.scala
@@ -4,7 +4,8 @@ package printing
 
 import core._
 import Texts._, ast.Trees._
-import Types.{Type, SingletonType}, Symbols.Symbol, Scopes.Scope, Constants.Constant,
+import Types.{Type, SingletonType, LambdaParam},
+       Symbols.Symbol, Scopes.Scope, Constants.Constant,
        Names.Name, Denotations._, Annotations.Annotation
 import typer.Implicits.SearchResult
 import util.SourcePosition
@@ -118,6 +119,9 @@ abstract class Printer {
   /** A description of sym's location */
   def extendedLocationText(sym: Symbol): Text
 
+  /** Textual description of regular annotation in terms of its tree */
+  def annotText(annot: Annotation): Text
+
   /** Textual representation of denotation */
   def toText(denot: Denotation): Text
 
@@ -130,6 +134,9 @@ abstract class Printer {
   /** Textual representation of type */
   def toText(tp: Type): Text
 
+  /** Textual representation of lambda param */
+  def toText(tree: LambdaParam): Text
+
   /** Textual representation of all symbols in given list,
    *  using `dclText` for displaying each.
    */
@@ -150,6 +157,9 @@ abstract class Printer {
   /** Textual representation of info relating to an import clause */
   def toText(result: ImportInfo): Text
 
+  /** Textual representation of a constraint */
+  def toText(c: OrderingConstraint): Text
+
   /** Render element within highest precedence */
   def toTextLocal(elem: Showable): Text =
     atPrec(DotPrec) { elem.toText(this) }
diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala
index cf60a152547b..cf5942a178f0 100644
--- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala
+++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala
@@ -1,4 +1,5 @@
-package dotty.tools.dotc
+package dotty.tools
+package dotc
 package printing
 
 import core._
@@ -20,6 +21,7 @@ import typer.ProtoTypes._
 import Trees._
 import TypeApplications._
 import Decorators._
+import NameKinds.{WildcardParamName, DefaultGetterName}
 import util.Chars.isOperatorPart
 import transform.TypeUtils._
 import transform.SymUtils._
@@ -76,9 +78,10 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
   }
 
   override def nameString(name: Name): String =
-    if ctx.settings.YdebugNames.value
-    then name.debugString
-    else super.nameString(name)
+    def strippedName = if printDebug then name else name.stripModuleClassSuffix
+    if ctx.settings.YdebugNames.value then strippedName.debugString
+    else if name.isTypeName && name.is(WildcardParamName) && !printDebug then "_"
+    else super.nameString(strippedName)
 
   override protected def simpleNameString(sym: Symbol): String =
     nameString(if (ctx.property(XprintMode).isEmpty) sym.initial.name else sym.name)
@@ -105,15 +108,16 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
 
   override def toTextPrefix(tp: Type): Text = controlled {
     def isOmittable(sym: Symbol) =
-      if (printDebug) false
-      else if (homogenizedView) isEmptyPrefix(sym) // drop  and anonymous classes, but not scala, Predef.
+      if printDebug then false
+      else if homogenizedView then isEmptyPrefix(sym) // drop  and anonymous classes, but not scala, Predef.
+      else if sym.isPackageObject then isOmittablePrefix(sym.owner)
       else isOmittablePrefix(sym)
     tp match {
       case tp: ThisType if isOmittable(tp.cls) =>
         ""
       case tp @ TermRef(pre, _) =>
         val sym = tp.symbol
-        if (sym.isPackageObject && !homogenizedView) toTextPrefix(pre)
+        if sym.isPackageObject && !homogenizedView && !printDebug then toTextPrefix(pre)
         else if (isOmittable(sym)) ""
         else super.toTextPrefix(tp)
       case _ => super.toTextPrefix(tp)
@@ -143,7 +147,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
       changePrec(GlobalPrec) {
         val argStr: Text =
           if args.length == 2
-             && !defn.isTupleType(args.head)
+             && !defn.isTupleNType(args.head)
              && !isGiven && !isErased
           then
             atPrec(InfixPrec) { argText(args.head) }
@@ -155,22 +159,41 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
         argStr ~ " " ~ arrow(isGiven) ~ " " ~ argText(args.last)
       }
 
-    def toTextDependentFunction(appType: MethodType): Text =
-      "("
-      ~ keywordText("erased ").provided(appType.isErasedMethod)
-      ~ paramsText(appType)
-      ~ ") "
-      ~ arrow(appType.isImplicitMethod)
-      ~ " "
-      ~ toText(appType.resultType)
+    def toTextMethodAsFunction(info: Type): Text = info match
+      case info: MethodType =>
+        changePrec(GlobalPrec) {
+          "("
+          ~ keywordText("erased ").provided(info.isErasedMethod)
+          ~ ( if info.isParamDependent || info.isResultDependent
+              then paramsText(info)
+              else argsText(info.paramInfos)
+            )
+          ~ ") "
+          ~ arrow(info.isImplicitMethod)
+          ~ " "
+          ~ toTextMethodAsFunction(info.resultType)
+        }
+      case info: PolyType =>
+        changePrec(GlobalPrec) {
+          "["
+          ~ paramsText(info)
+          ~ "] => "
+          ~ toTextMethodAsFunction(info.resultType)
+        }
+      case _ =>
+        toText(info)
 
-    def isInfixType(tp: Type): Boolean = tp match {
+    def isInfixType(tp: Type): Boolean = tp match
       case AppliedType(tycon, args) =>
-        args.length == 2 &&
-        tycon.typeSymbol.getAnnotation(defn.ShowAsInfixAnnot).map(_.argumentConstant(0).forall(_.booleanValue))
-          .getOrElse(!Character.isUnicodeIdentifierStart(tycon.typeSymbol.name.toString.head))
+        args.length == 2
+        && {
+          val sym = tycon.typeSymbol
+          sym.is(Infix)
+          || sym.getAnnotation(defn.ShowAsInfixAnnot)
+              .exists(_.argumentConstant(0).forall(_.booleanValue))
+          || !Character.isUnicodeIdentifierStart(tycon.typeSymbol.name.toString.head)
+        }
       case _ => false
-    }
 
     def tyconName(tp: Type): Name = tp.typeSymbol.name
     def checkAssocMismatch(tp: Type, isRightAssoc: Boolean) = tp match {
@@ -222,8 +245,10 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
       if !printDebug && appliedText(tp.asInstanceOf[HKLambda].resType).isEmpty =>
         // don't eta contract if the application would be printed specially
         toText(tycon)
-      case tp: RefinedType if defn.isFunctionType(tp) && !printDebug =>
-        toTextDependentFunction(tp.refinedInfo.asInstanceOf[MethodType])
+      case tp: RefinedType
+      if (defn.isFunctionType(tp) || (tp.parent.typeSymbol eq defn.PolyFunctionClass))
+          && !printDebug =>
+        toTextMethodAsFunction(tp.refinedInfo)
       case tp: TypeRef =>
         if (tp.symbol.isAnonymousClass && !showUniqueIds)
           toText(tp.info)
@@ -237,9 +262,16 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
       case ErasedValueType(tycon, underlying) =>
         "ErasedValueType(" ~ toText(tycon) ~ ", " ~ toText(underlying) ~ ")"
       case tp: ClassInfo =>
+        if tp.cls.derivesFrom(defn.PolyFunctionClass) then
+          tp.member(nme.apply).info match
+            case info: PolyType => return toTextMethodAsFunction(info)
+            case _ =>
         toTextParents(tp.parents) ~~ "{...}"
       case JavaArrayType(elemtp) =>
         toText(elemtp) ~ "[]"
+      case tp: LazyRef if !printDebug =>
+        try toText(tp.ref)
+        catch case ex: Throwable => "..."
       case tp: SelectionProto =>
         "?{ " ~ toText(tp.name) ~
            (Str(" ") provided !tp.name.toSimpleName.last.isLetterOrDigit) ~
@@ -490,6 +522,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
         ""
       case TypeTree() =>
         typeText(toText(tree.typeOpt))
+        ~ Str("(inf)").provided(tree.isInstanceOf[InferredTypeTree] && printDebug)
       case SingletonTypeTree(ref) =>
         toTextLocal(ref) ~ "." ~ keywordStr("type")
       case RefinedTypeTree(tpt, refines) =>
@@ -499,6 +532,9 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
           changePrec(OrTypePrec) { toText(args(0)) ~ " | " ~ atPrec(OrTypePrec + 1) { toText(args(1)) } }
         else if (tpt.symbol == defn.andType && args.length == 2)
           changePrec(AndTypePrec) { toText(args(0)) ~ " & " ~ atPrec(AndTypePrec + 1) { toText(args(1)) } }
+        else if defn.isFunctionClass(tpt.symbol)
+            && tpt.isInstanceOf[TypeTree] && tree.hasType && !printDebug
+        then changePrec(GlobalPrec) { toText(tree.typeOpt) }
         else args match
           case arg :: _ if arg.isTerm =>
             toTextLocal(tpt) ~ "(" ~ Text(args.map(argText), ", ") ~ ")"
@@ -571,7 +607,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
       case tree: Template =>
         toTextTemplate(tree)
       case Annotated(arg, annot) =>
-        toTextLocal(arg) ~~ annotText(annot)
+        toTextLocal(arg) ~~ annotText(annot.symbol.enclosingClass, annot)
       case EmptyTree =>
         ""
       case TypedSplice(t) =>
@@ -663,6 +699,10 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
         "Thicket {" ~~ toTextGlobal(trees, "\n") ~~ "}"
       case MacroTree(call) =>
         keywordStr("macro ") ~ toTextGlobal(call)
+      case Hole(isTermHole, idx, args) =>
+        val (prefix, postfix) = if isTermHole then ("{{{ ", " }}}") else ("[[[ ", " ]]]")
+        val argsText = toTextGlobal(args, ", ")
+        prefix ~~ idx.toString ~~ "|" ~~ argsText ~~ postfix
       case _ =>
         tree.fallbackToText(this)
     }
@@ -758,7 +798,15 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
   protected def optAscription[T >: Untyped](tpt: Tree[T]): Text = optText(tpt)(": " ~ _)
 
   private def idText(tree: untpd.Tree): Text =
-    if showUniqueIds && tree.hasType && tree.symbol.exists then s"#${tree.symbol.id}" else ""
+    (if showUniqueIds && tree.hasType && tree.symbol.exists then s"#${tree.symbol.id}" else "") ~
+    (if showNestingLevel then tree.typeOpt match
+      case tp: NamedType if !tp.symbol.isStatic => s"%${tp.symbol.nestingLevel}"
+      case tp: TypeVar => s"%${tp.nestingLevel}"
+      case tp: TypeParamRef => ctx.typerState.constraint.typeVarOfParam(tp) match
+        case tvar: TypeVar => s"%${tvar.nestingLevel}"
+        case _ => ""
+      case _ => ""
+     else "")
 
   private def useSymbol(tree: untpd.Tree) =
     tree.hasType && tree.symbol.exists && ctx.settings.YprintSyms.value
@@ -924,14 +972,22 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
     keywordStr("package ") ~ toTextPackageId(tree.pid) ~ bodyText
   }
 
+  /** Textual representation of an instance creation expression without the leading `new` */
   protected def constrText(tree: untpd.Tree): Text = toTextLocal(tree).stripPrefix(keywordStr("new ")) // DD
 
-  protected def annotText(tree: untpd.Tree): Text = "@" ~ constrText(tree) // DD
-
-  override def annotsText(sym: Symbol): Text =
-    Text(sym.annotations.map(ann =>
-      if ann.symbol == defn.BodyAnnot then Str(simpleNameString(ann.symbol))
-      else annotText(ann.tree)))
+  protected def annotText(sym: Symbol, tree: untpd.Tree): Text =
+    def recur(t: untpd.Tree): Text = t match
+      case Apply(fn, Nil) => recur(fn)
+      case Apply(fn, args) =>
+        val explicitArgs = args.filterNot(_.symbol.name.is(DefaultGetterName))
+        recur(fn) ~ "(" ~ toTextGlobal(explicitArgs, ", ") ~ ")"
+      case TypeApply(fn, args) => recur(fn) ~ "[" ~ toTextGlobal(args, ", ") ~ "]"
+      case Select(qual, nme.CONSTRUCTOR) => recur(qual)
+      case New(tpt) => recur(tpt)
+      case _ =>
+        val annotSym = sym.orElse(tree.symbol.enclosingClass)
+        s"@${if annotSym.exists then annotSym.name.toString else t.show}"
+    recur(tree)
 
   protected def modText(mods: untpd.Modifiers, sym: Symbol, kw: String, isType: Boolean): Text = { // DD
     val suppressKw = if (enclDefIsClass) mods.isAllOf(LocalParam) else mods.is(Param)
@@ -944,12 +1000,16 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
     if (rawFlags.is(Param)) flagMask = flagMask &~ Given &~ Erased
     val flags = rawFlags & flagMask
     var flagsText = toTextFlags(sym, flags)
-    val annotations =
-      if (sym.exists) sym.annotations.filterNot(ann => dropAnnotForModText(ann.symbol)).map(_.tree)
-      else mods.annotations.filterNot(tree => dropAnnotForModText(tree.symbol))
-    Text(annotations.map(annotText), " ") ~~ flagsText ~~ (Str(kw) provided !suppressKw)
+    val annotTexts =
+      if sym.exists then
+        sym.annotations.filterNot(ann => dropAnnotForModText(ann.symbol)).map(toText)
+      else
+        mods.annotations.filterNot(tree => dropAnnotForModText(tree.symbol)).map(annotText(NoSymbol, _))
+    Text(annotTexts, " ") ~~ flagsText ~~ (Str(kw) provided !suppressKw)
   }
 
+  override def annotText(annot: Annotation): Text = annotText(annot.symbol, annot.tree)
+
   def optText(name: Name)(encl: Text => Text): Text =
     if (name.isEmpty) "" else encl(toText(name))
 
@@ -959,9 +1019,6 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
   def optText[T >: Untyped](tree: List[Tree[T]])(encl: Text => Text): Text =
     if (tree.exists(!_.isEmpty)) encl(blockText(tree)) else ""
 
-  override protected def ParamRefNameString(name: Name): String =
-    name.toString
-
   override protected def treatAsTypeParam(sym: Symbol): Boolean = sym.is(TypeParam)
 
   override protected def treatAsTypeArg(sym: Symbol): Boolean =
diff --git a/compiler/src/dotty/tools/dotc/printing/SyntaxHighlighting.scala b/compiler/src/dotty/tools/dotc/printing/SyntaxHighlighting.scala
index cce2c6d00859..623f540bd721 100644
--- a/compiler/src/dotty/tools/dotc/printing/SyntaxHighlighting.scala
+++ b/compiler/src/dotty/tools/dotc/printing/SyntaxHighlighting.scala
@@ -17,7 +17,7 @@ import java.util.Arrays
 object SyntaxHighlighting {
 
   /** if true, log erroneous positions being highlighted */
-  private final val debug = true
+  private inline val debug = true
 
   // Keep in sync with SyntaxHighlightingTests
   val NoColor: String         = Console.RESET
diff --git a/compiler/src/dotty/tools/dotc/printing/Texts.scala b/compiler/src/dotty/tools/dotc/printing/Texts.scala
index 7df464ae74b8..9a3aac39ed18 100644
--- a/compiler/src/dotty/tools/dotc/printing/Texts.scala
+++ b/compiler/src/dotty/tools/dotc/printing/Texts.scala
@@ -106,11 +106,13 @@ object Texts {
       case Str(s, lines) =>
         if (numberWidth != 0) {
           val ln = lines.show
-          val pad = (numberWidth - ln.length - 1)
-          assert(pad >= 0)
-          sb.append(" " * pad)
-          sb.append(ln)
-          sb.append("|")
+          if (ln.nonEmpty) {
+            val pad = (numberWidth - ln.length - 1)
+            assert(pad >= 0)
+            sb.append(" " * pad)
+            sb.append(ln)
+            sb.append("|")
+          }
         }
         sb.append(s)
       case _ =>
diff --git a/compiler/src/dotty/tools/dotc/quoted/MacroExpansion.scala b/compiler/src/dotty/tools/dotc/quoted/MacroExpansion.scala
index 90a208814f2a..628afe4504bd 100644
--- a/compiler/src/dotty/tools/dotc/quoted/MacroExpansion.scala
+++ b/compiler/src/dotty/tools/dotc/quoted/MacroExpansion.scala
@@ -14,6 +14,6 @@ object MacroExpansion {
     ctx.property(MacroExpansionPosition)
 
   def context(inlinedFrom: tpd.Tree)(using Context): Context =
-    ctx.fresh.setProperty(MacroExpansionPosition, SourcePosition(inlinedFrom.source, inlinedFrom.span)).setTypeAssigner(new Typer).withSource(inlinedFrom.source)
+    QuotesCache.init(ctx.fresh).setProperty(MacroExpansionPosition, SourcePosition(inlinedFrom.source, inlinedFrom.span)).setTypeAssigner(new Typer(ctx.nestingLevel + 1)).withSource(inlinedFrom.source)
 }
 
diff --git a/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala b/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala
index fd118eb43bb5..8c72177a76d0 100644
--- a/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala
+++ b/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala
@@ -11,7 +11,6 @@ import dotty.tools.dotc.core.NameKinds
 import dotty.tools.dotc.core.Mode
 import dotty.tools.dotc.core.Symbols._
 import dotty.tools.dotc.core.Types._
-import dotty.tools.dotc.core.tasty.TreePickler.Hole
 import dotty.tools.dotc.core.tasty.{ PositionPickler, TastyPickler, TastyPrinter }
 import dotty.tools.dotc.core.tasty.DottyUnpickler
 import dotty.tools.dotc.core.tasty.TreeUnpickler.UnpickleMode
@@ -92,11 +91,6 @@ object PickledQuotes {
               val quotedType = typeHole(idx, reifiedArgs)
               PickledQuotes.quotedTypeToTree(quotedType)
           }
-        case tree: Select =>
-          // Retain selected members
-          val qual = transform(tree.qualifier)
-          qual.select(tree.symbol).withSpan(tree.span)
-
         case tree =>
           if tree.isDef then
             tree.symbol.annotations = tree.symbol.annotations.map {
@@ -175,31 +169,44 @@ object PickledQuotes {
       positionWarnings.foreach(report.warning(_))
 
     val pickled = pickler.assembleParts()
-    quotePickling.println(s"**** pickled quote\n${TastyPrinter.show(pickled)}")
+    quotePickling.println(s"**** pickled quote\n${TastyPrinter.showContents(pickled, ctx.settings.color.value == "never")}")
     pickled
   }
 
   /** Unpickle TASTY bytes into it's tree */
   private def unpickle(pickled: String | List[String], isType: Boolean)(using Context): Tree = {
-    val bytes = pickled match
-      case pickled: String => TastyString.unpickle(pickled)
-      case pickled: List[String] => TastyString.unpickle(pickled)
+    QuotesCache.getTree(pickled) match
+      case Some(tree) =>
+        quotePickling.println(s"**** Using cached quote for TASTY\n$tree")
+        treeOwner(tree) match
+          case Some(owner) =>
+            // Copy the cached tree to make sure the all definitions are unique.
+            TreeTypeMap(oldOwners = List(owner), newOwners = List(owner)).apply(tree)
+          case _ =>
+            tree
+
+      case _ =>
+        val bytes = pickled match
+          case pickled: String => TastyString.unpickle(pickled)
+          case pickled: List[String] => TastyString.unpickle(pickled)
 
-    quotePickling.println(s"**** unpickling quote from TASTY\n${TastyPrinter.show(bytes)}")
+        quotePickling.println(s"**** unpickling quote from TASTY\n${TastyPrinter.showContents(bytes, ctx.settings.color.value == "never")}")
 
-    val mode = if (isType) UnpickleMode.TypeTree else UnpickleMode.Term
-    val unpickler = new DottyUnpickler(bytes, mode)
-    unpickler.enter(Set.empty)
+        val mode = if (isType) UnpickleMode.TypeTree else UnpickleMode.Term
+        val unpickler = new DottyUnpickler(bytes, ctx.tastyVersion, mode)
+        unpickler.enter(Set.empty)
 
-    val tree = unpickler.tree
+        val tree = unpickler.tree
+        QuotesCache(pickled) = tree
 
-    // Make sure trees and positions are fully loaded
-    new TreeTraverser {
-      def traverse(tree: Tree)(using Context): Unit = traverseChildren(tree)
-    }.traverse(tree)
+        // Make sure trees and positions are fully loaded
+        new TreeTraverser {
+          def traverse(tree: Tree)(using Context): Unit = traverseChildren(tree)
+        }.traverse(tree)
 
-    quotePickling.println(i"**** unpickled quote\n$tree")
-    tree
+        quotePickling.println(i"**** unpickled quote\n$tree")
+
+        tree
   }
 
 }
diff --git a/compiler/src/dotty/tools/dotc/quoted/QuotesCache.scala b/compiler/src/dotty/tools/dotc/quoted/QuotesCache.scala
new file mode 100644
index 000000000000..996eb8a76fbb
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/quoted/QuotesCache.scala
@@ -0,0 +1,28 @@
+package dotty.tools.dotc.quoted
+
+import dotty.tools.dotc.core.Contexts._
+import dotty.tools.dotc.util.Property
+import dotty.tools.dotc.reporting.trace
+import dotty.tools.dotc.ast.tpd
+
+import scala.collection.mutable
+
+object QuotesCache {
+  import tpd._
+
+  /** A key to be used in a context property that caches the unpickled trees */
+  private val QuotesCacheKey = new Property.Key[collection.mutable.Map[String | List[String], Tree]]
+
+
+  /** Get the cached tree of the quote */
+  def getTree(pickled: String | List[String])(using Context): Option[Tree] =
+    ctx.property(QuotesCacheKey).get.get(pickled)
+
+  /** Update the cached tree of the quote */
+  def update(pickled: String | List[String], tree: Tree)(using Context): Unit =
+    ctx.property(QuotesCacheKey).get.update(pickled, tree)
+
+  /** Context with a cache for quote trees and tasty bytes */
+  def init(ctx: FreshContext): ctx.type =
+    ctx.setProperty(QuotesCacheKey, collection.mutable.Map.empty)
+}
diff --git a/compiler/src/dotty/tools/dotc/report.scala b/compiler/src/dotty/tools/dotc/report.scala
index 912ae9af52aa..36c857b94e37 100644
--- a/compiler/src/dotty/tools/dotc/report.scala
+++ b/compiler/src/dotty/tools/dotc/report.scala
@@ -21,15 +21,7 @@ object report:
     ctx.reporter.report(new Info(msg, pos.sourcePos))
 
   private def issueWarning(warning: Warning)(using Context): Unit =
-    if (!ctx.settings.silentWarnings.value)
-      if (ctx.settings.XfatalWarnings.value)
-        warning match {
-          case warning: ConditionalWarning if !warning.enablingOption.value =>
-            ctx.reporter.report(warning) // conditional warnings that are not enabled are not fatal
-          case _ =>
-            ctx.reporter.report(warning.toError)
-        }
-      else ctx.reporter.report(warning)
+    ctx.reporter.report(warning)
 
   def deprecationWarning(msg: Message, pos: SrcPos = NoSourcePosition)(using Context): Unit =
     issueWarning(new DeprecationWarning(msg, pos.sourcePos))
diff --git a/compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala b/compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala
index c3d6d042e379..0d2292318a71 100644
--- a/compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala
+++ b/compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala
@@ -12,32 +12,26 @@ import Diagnostic.{ Error, ConditionalWarning }
 class ConsoleReporter(
   reader: BufferedReader = Console.in,
   writer: PrintWriter = new PrintWriter(Console.err, true)
-) extends AbstractReporter {
+) extends ConsoleReporter.AbstractConsoleReporter {
+  override def printMessage(msg: String): Unit = { writer.print(msg + "\n"); writer.flush() }
+  override def flush()(using Context): Unit    = writer.flush()
 
-  import Diagnostic._
+  override def doReport(dia: Diagnostic)(using Context): Unit = {
+    super.doReport(dia)
+    dia match
+      case dia: Error if ctx.settings.Xprompt.value => Reporter.displayPrompt(reader, writer)
+      case _                                        =>
+  }
+}
 
-  /** Prints the message. */
-  def printMessage(msg: String): Unit = { writer.print(msg + "\n"); writer.flush() }
+object ConsoleReporter {
+  abstract class AbstractConsoleReporter extends AbstractReporter {
+    /** Prints the message. */
+    def printMessage(msg: String): Unit
 
-  /** Prints the message with the given position indication. */
-  def doReport(dia: Diagnostic)(using Context): Unit = {
-    val didPrint = dia match {
-      case dia: Error =>
-        printMessage(messageAndPos(dia.msg, dia.pos, diagnosticLevel(dia)))
-        if (ctx.settings.Xprompt.value) Reporter.displayPrompt(reader, writer)
-        true
-      case dia: ConditionalWarning if !dia.enablingOption.value =>
-        false
-      case dia =>
-        printMessage(messageAndPos(dia.msg, dia.pos, diagnosticLevel(dia)))
-        true
+    /** Prints the message with the given position indication. */
+    def doReport(dia: Diagnostic)(using Context): Unit = {
+      printMessage(messageAndPos(dia))
     }
-
-    if (didPrint && shouldExplain(dia))
-      printMessage(explanation(dia.msg))
-    else if (didPrint && dia.msg.canExplain)
-      printMessage("\nlonger explanation available when compiling with `-explain`")
   }
-
-  override def flush()(using Context): Unit = { writer.flush() }
 }
diff --git a/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala b/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala
index fbb7145152d6..3f498b432f98 100644
--- a/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala
+++ b/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala
@@ -2,12 +2,13 @@ package dotty.tools
 package dotc
 package reporting
 
-import util.SourcePosition
-import core.Contexts._
-import config.Settings.Setting
-import interfaces.Diagnostic.{ERROR, INFO, WARNING}
+import dotty.tools.dotc.config.Settings.Setting
+import dotty.tools.dotc.core.Contexts._
+import dotty.tools.dotc.interfaces.Diagnostic.{ERROR, INFO, WARNING}
+import dotty.tools.dotc.util.SourcePosition
 
 import java.util.Optional
+import scala.util.chaining._
 
 object Diagnostic:
 
@@ -35,7 +36,9 @@ object Diagnostic:
     msg: Message,
     pos: SourcePosition
   ) extends Diagnostic(msg, pos, WARNING) {
-    def toError: Error = new Error(msg, pos)
+    def toError: Error = new Error(msg, pos).tap(e => if isVerbose then e.setVerbose())
+    def toInfo: Info = new Info(msg, pos).tap(e => if isVerbose then e.setVerbose())
+    def isSummarizedConditional(using Context): Boolean = false
   }
 
   class Info(
@@ -48,6 +51,7 @@ object Diagnostic:
     pos: SourcePosition
   ) extends Warning(msg, pos) {
     def enablingOption(using Context): Setting[Boolean]
+    override def isSummarizedConditional(using Context): Boolean = !enablingOption.value
   }
 
   class FeatureWarning(
@@ -81,6 +85,12 @@ class Diagnostic(
   val pos: SourcePosition,
   val level: Int
 ) extends Exception with interfaces.Diagnostic:
+  private var verbose: Boolean = false
+  def isVerbose: Boolean = verbose
+  def setVerbose(): this.type =
+    verbose = true
+    this
+
   override def position: Optional[interfaces.SourcePosition] =
     if (pos.exists && pos.source.exists) Optional.of(pos) else Optional.empty()
   override def message: String =
diff --git a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala
index d73e799ddf3c..f55196f82a8e 100644
--- a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala
+++ b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala
@@ -1,7 +1,7 @@
 package dotty.tools.dotc.reporting
 
 /** Unique IDs identifying the messages */
-enum ErrorMessageID extends java.lang.Enum[ErrorMessageID] {
+enum ErrorMessageID extends java.lang.Enum[ErrorMessageID]:
 
   // IMPORTANT: Add new IDs only at the end and never remove IDs
   case
@@ -87,8 +87,8 @@ enum ErrorMessageID extends java.lang.Enum[ErrorMessageID] {
     ValueClassesMayNotWrapAnotherValueClassID,
     ValueClassParameterMayNotBeAVarID,
     ValueClassNeedsExactlyOneValParamID,
-    OnlyCaseClassOrCaseObjectAllowedID,
-    ExpectedTopLevelDefID,
+    UNUSED1,
+    UNUSED2,
     AnonymousFunctionMissingParamTypeID,
     SuperCallsNotAllowedInlineableID,
     NotAPathID,
@@ -173,7 +173,15 @@ enum ErrorMessageID extends java.lang.Enum[ErrorMessageID] {
     CaseClassInInlinedCodeID,
     OverrideTypeMismatchErrorID,
     OverrideErrorID,
-    MatchableWarningID
+    MatchableWarningID,
+    CannotExtendFunctionID,
+    LossyWideningConstantConversionID,
+    ImplicitSearchTooLargeID
 
   def errorNumber = ordinal - 2
-}
+
+object ErrorMessageID:
+  def fromErrorNumber(n: Int): Option[ErrorMessageID] =
+    val enumId = n + 2
+    if enumId >= 2 && enumId < ErrorMessageID.values.length then Some(fromOrdinal(enumId))
+    else None
diff --git a/compiler/src/dotty/tools/dotc/reporting/ExploringReporter.scala b/compiler/src/dotty/tools/dotc/reporting/ExploringReporter.scala
index e7b636cddf02..9255820140d8 100644
--- a/compiler/src/dotty/tools/dotc/reporting/ExploringReporter.scala
+++ b/compiler/src/dotty/tools/dotc/reporting/ExploringReporter.scala
@@ -7,7 +7,7 @@ import core.Contexts.Context
 import Diagnostic._
 
 /** A re-usable Reporter used in Contexts#test */
-class ExploringReporter extends StoreReporter(null):
+class ExploringReporter extends StoreReporter(null, fromTyperState = false):
   infos = new mutable.ListBuffer[Diagnostic]
 
   override def hasUnreportedErrors: Boolean =
diff --git a/compiler/src/dotty/tools/dotc/reporting/Message.scala b/compiler/src/dotty/tools/dotc/reporting/Message.scala
index 03dc0426c582..cd7456cafcc2 100644
--- a/compiler/src/dotty/tools/dotc/reporting/Message.scala
+++ b/compiler/src/dotty/tools/dotc/reporting/Message.scala
@@ -1,7 +1,7 @@
 package dotty.tools
 package dotc
 package reporting
-
+import scala.annotation.threadUnsafe
 import util.SourcePosition
 
 object Message {
@@ -12,8 +12,7 @@ object Message {
     * not yet been ported to the new scheme. Comment out this `implicit def` to
     * see where old errors still exist
     */
-  implicit def toNoExplanation(str: => String): Message =
-    NoExplanation(str)
+  implicit def toNoExplanation(str: => String): Message = NoExplanation(str)
 }
 
 /** A `Message` contains all semantic information necessary to easily
@@ -58,6 +57,9 @@ abstract class Message(val errorId: ErrorMessageID) { self =>
     */
   protected def explain: String
 
+  /** A message suffix that can be added for certain subclasses */
+  protected def msgSuffix: String = ""
+
   /** Does this message have an explanation?
    *  This is normally the same as `explain.nonEmpty` but can be overridden
    *  if we need a way to return `true` without actually calling the
@@ -82,10 +84,10 @@ abstract class Message(val errorId: ErrorMessageID) { self =>
   def rawMessage = message
 
   /** The message to report.  tags are filtered out */
-  lazy val message: String = dropNonSensical(msg)
+  @threadUnsafe lazy val message: String = dropNonSensical(msg + msgSuffix)
 
   /** The explanation to report.  tags are filtered out */
-  lazy val explanation: String = dropNonSensical(explain)
+  @threadUnsafe lazy val explanation: String = dropNonSensical(explain)
 
   /** A message is non-sensical if it contains references to 
    *  tags.  Such tags are inserted by the error diagnostic framework if a
@@ -122,6 +124,13 @@ abstract class Message(val errorId: ErrorMessageID) { self =>
     def explain    = self.explain ++ suffix
     override def canExplain = true
 
+  /** Override with `true` for messages that should always be shown even if their
+   *  position overlaps another messsage of a different class. On the other hand
+   *  multiple messages of the same class with overlapping positions will lead
+   *  to only a single message of that class to be issued.
+   */
+  def showAlways = false
+
   override def toString = msg
 }
 
diff --git a/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala b/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala
index 57b409109961..c2ddd535b728 100644
--- a/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala
+++ b/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala
@@ -16,6 +16,8 @@ import scala.annotation.switch
 import scala.collection.mutable
 
 trait MessageRendering {
+  import Highlight.*
+  import Offsets.*
 
   /** Remove ANSI coloring from `str`, useful for getting real length of
     * strings
@@ -25,15 +27,9 @@ trait MessageRendering {
   def stripColor(str: String): String =
     str.replaceAll("\u001b\\[.*?m", "")
 
-  /** When inlining a method call, if there's an error we'd like to get the
-    * outer context and the `pos` at which the call was inlined.
-    *
-    * @return a list of strings with inline locations
-    */
-  def outer(pos: SourcePosition, prefix: String)(using Context): List[String] =
-    if (pos.outer.exists)
-       i"$prefix| This location contains code that was inlined from $pos" ::
-       outer(pos.outer, prefix)
+  /** List of all the inline calls that surround the position */
+  def inlinePosStack(pos: SourcePosition): List[SourcePosition] =
+    if pos.outer != null && pos.outer.exists then pos :: inlinePosStack(pos.outer)
     else Nil
 
   /** Get the sourcelines before and after the position, as well as the offset
@@ -41,15 +37,15 @@ trait MessageRendering {
     *
     * @return (lines before error, lines after error, line numbers offset)
     */
-  def sourceLines(pos: SourcePosition, diagnosticLevel: String)(using Context): (List[String], List[String], Int) = {
+  private def sourceLines(pos: SourcePosition)(using Context, Level, Offset): (List[String], List[String], Int) = {
     assert(pos.exists && pos.source.file.exists)
     var maxLen = Int.MinValue
     def render(offsetAndLine: (Int, String)): String = {
-      val (offset, line) = offsetAndLine
-      val lineNbr = pos.source.offsetToLine(offset)
-      val prefix = s"${lineNbr + 1} |"
+      val (offset1, line) = offsetAndLine
+      val lineNbr = (pos.source.offsetToLine(offset1) + 1).toString
+      val prefix = String.format(s"%${offset - 2}s |", lineNbr)
       maxLen = math.max(maxLen, prefix.length)
-      val lnum = hl(diagnosticLevel)(" " * math.max(0, maxLen - prefix.length) + prefix)
+      val lnum = hl(" " * math.max(0, maxLen - prefix.length - 1) + prefix)
       lnum + line.stripLineEnd
     }
 
@@ -77,23 +73,76 @@ trait MessageRendering {
     )
   }
 
-  /** The column markers aligned under the error */
-  def columnMarker(pos: SourcePosition, offset: Int, diagnosticLevel: String)(using Context): String = {
-    val prefix = " " * (offset - 1)
+  /** Generate box containing the report title
+   *
+   *  ```
+   *  -- Error: source.scala ---------------------
+   *  ```
+   */
+  private def boxTitle(title: String)(using Context, Level, Offset): String =
+    val pageWidth = ctx.settings.pageWidth.value
+    val line = "-" * (pageWidth - title.length - 4)
+    hl(s"-- $title $line")
+
+  /** The position markers aligned under the error
+   *
+   *  ```
+   *    |         ^^^^^
+   *  ```
+   */
+  private def positionMarker(pos: SourcePosition)(using Context, Level, Offset): String = {
     val padding = pos.startColumnPadding
-    val carets = hl(diagnosticLevel) {
+    val carets =
       if (pos.startLine == pos.endLine)
         "^" * math.max(1, pos.endColumn - pos.startColumn)
       else "^"
-    }
-    s"$prefix|$padding$carets"
+    hl(s"$offsetBox$padding$carets")
   }
 
+  /** The horizontal line with the given offset
+   *
+   *  ```
+   *    |
+   *  ```
+   */
+  private def offsetBox(using Context, Level, Offset): String =
+    val prefix = " " * (offset - 1)
+    hl(s"$prefix|")
+
+  /** The end of a box section
+   *
+   *  ```
+   *    |---------------
+   *  ```
+   *  Or if there `soft` is true,
+   *  ```
+   *    |- - - - - - - -
+   *  ```
+   */
+  private def newBox(soft: Boolean = false)(using Context, Level, Offset): String =
+    val pageWidth = ctx.settings.pageWidth.value
+    val prefix = " " * (offset - 1)
+    val lineWidth = (pageWidth - offset)
+    val line = if soft then ("- " * ((lineWidth + 1) / 2)).trim else "-" * lineWidth
+    hl(s"$prefix|$line")
+
+  /** The end of a box section
+   *
+   *  ```
+   *     ----------------
+   *  ```
+   */
+  private def endBox(using Context, Level, Offset): String =
+    val pageWidth = ctx.settings.pageWidth.value
+    val prefix = " " * (offset - 1)
+    val line = "-" * (pageWidth - offset)
+    hl(s"${prefix} $line")
+
   /** The error message (`msg`) aligned under `pos`
     *
     * @return aligned error message
     */
-  def errorMsg(pos: SourcePosition, msg: String, offset: Int)(using Context): String = {
+  private def errorMsg(pos: SourcePosition, msg: String)(using Context, Level, Offset): String = {
     val padding = msg.linesIterator.foldLeft(pos.startColumnPadding) { (pad, line) =>
       val lineLength = stripColor(line).length
       val maxPad = math.max(0, ctx.settings.pageWidth.value - offset - lineLength) - offset
@@ -103,31 +152,35 @@ trait MessageRendering {
     }
 
     msg.linesIterator
-      .map { line => " " * (offset - 1) + "|" + (if line.isEmpty then "" else padding + line) }
+      .map { line => offsetBox + (if line.isEmpty then "" else padding + line) }
       .mkString(EOL)
   }
 
+  /** The source file path, line and column numbers from the given SourcePosition */
+  protected def posFileStr(pos: SourcePosition): String =
+    val path = pos.source.file.path
+    if pos.exists then s"$path:${pos.line + 1}:${pos.column}" else path
+
   /** The separator between errors containing the source file and error type
     *
     * @return separator containing error location and kind
     */
-  def posStr(pos: SourcePosition, diagnosticLevel: String, message: Message)(using Context): String =
-    if (pos.source != NoSourcePosition.source) hl(diagnosticLevel)({
-      val pos1 = pos.nonInlined
-      val file = if !pos.exists then pos1.source.file.toString else
-        s"${pos1.source.file.toString}:${pos1.line + 1}:${pos1.column}"
+  private def posStr(pos: SourcePosition, message: Message, diagnosticString: String)(using Context, Level, Offset): String =
+    if (pos.source != NoSourcePosition.source) hl({
+      val realPos = pos.nonInlined
+      val fileAndPos = posFileStr(realPos)
       val errId =
         if (message.errorId ne ErrorMessageID.NoExplanationID) {
           val errorNumber = message.errorId.errorNumber
           s"[E${"0" * (3 - errorNumber.toString.length) + errorNumber}] "
         } else ""
       val kind =
-        if (message.kind == "") diagnosticLevel
-        else s"${message.kind} $diagnosticLevel"
-      val prefix = s"-- ${errId}${kind}: $file "
-
-      prefix +
-        ("-" * math.max(ctx.settings.pageWidth.value - stripColor(prefix).length, 0))
+        if (message.kind == "") diagnosticString
+        else s"${message.kind} $diagnosticString"
+      val title =
+        if fileAndPos.isEmpty then s"$errId$kind:" // this happens in dotty.tools.repl.ScriptedTests // TODO add name of source or remove `:` (and update test files)
+        else s"$errId$kind: $fileAndPos"
+      boxTitle(title)
     }) else ""
 
   /** Explanation rendered under "Explanation" header */
@@ -138,45 +191,120 @@ trait MessageRendering {
           |${Blue("===========").show}""".stripMargin
     )
     sb.append(EOL).append(m.explanation)
-    if (m.explanation.lastOption != Some(EOL)) sb.append(EOL)
+    if (!m.explanation.endsWith(EOL)) sb.append(EOL)
     sb.toString
   }
 
+  private def appendFilterHelp(dia: Diagnostic, sb: mutable.StringBuilder): Unit =
+    import dia._
+    val hasId = msg.errorId.errorNumber >= 0
+    val category = dia match {
+      case _: UncheckedWarning => "unchecked"
+      case _: DeprecationWarning => "deprecation"
+      case _: FeatureWarning => "feature"
+      case _ => ""
+    }
+    if (hasId || category.nonEmpty)
+      sb.append(EOL).append("Matching filters for @nowarn or -Wconf:")
+      if (hasId)
+        sb.append(EOL).append("  - id=E").append(msg.errorId.errorNumber)
+        sb.append(EOL).append("  - name=").append(msg.errorId.productPrefix.stripSuffix("ID"))
+      if (category.nonEmpty)
+        sb.append(EOL).append("  - cat=").append(category)
+
   /** The whole message rendered from `msg` */
-  def messageAndPos(msg: Message, pos: SourcePosition, diagnosticLevel: String)(using Context): String = {
+  def messageAndPos(dia: Diagnostic)(using Context): String = {
+    import dia._
+    val pos1 = pos.nonInlined
+    val inlineStack = inlinePosStack(pos).filter(_ != pos1)
+    val maxLineNumber =
+      if pos.exists then (pos1 :: inlineStack).map(_.endLine).max + 1
+      else 0
+    given Level = Level(level)
+    given Offset = Offset(maxLineNumber.toString.length + 2)
     val sb = mutable.StringBuilder()
-    val posString = posStr(pos, diagnosticLevel, msg)
+    val posString = posStr(pos, msg, diagnosticLevel(dia))
     if (posString.nonEmpty) sb.append(posString).append(EOL)
     if (pos.exists) {
       val pos1 = pos.nonInlined
       if (pos1.exists && pos1.source.file.exists) {
-        val (srcBefore, srcAfter, offset) = sourceLines(pos1, diagnosticLevel)
-        val marker = columnMarker(pos1, offset, diagnosticLevel)
-        val err = errorMsg(pos1, msg.message, offset)
-        sb.append((srcBefore ::: marker :: err :: outer(pos, " " * (offset - 1)) ::: srcAfter).mkString(EOL))
+        val (srcBefore, srcAfter, offset) = sourceLines(pos1)
+        val marker = positionMarker(pos1)
+        val err = errorMsg(pos1, msg.message)
+        sb.append((srcBefore ::: marker :: err :: srcAfter).mkString(EOL))
+
+        if inlineStack.nonEmpty then
+          sb.append(EOL).append(newBox())
+          sb.append(EOL).append(offsetBox).append(i"Inline stack trace")
+          for inlinedPos <- inlineStack if inlinedPos != pos1 do
+            sb.append(EOL).append(newBox(soft = true))
+            sb.append(EOL).append(offsetBox).append(i"This location contains code that was inlined from $pos")
+            if inlinedPos.source.file.exists then
+              val (srcBefore, srcAfter, _) = sourceLines(inlinedPos)
+              val marker = positionMarker(inlinedPos)
+              sb.append(EOL).append((srcBefore ::: marker :: srcAfter).mkString(EOL))
+          sb.append(EOL).append(endBox)
       }
       else sb.append(msg.message)
     }
     else sb.append(msg.message)
+    if (dia.isVerbose)
+      appendFilterHelp(dia, sb)
+
+    if Diagnostic.shouldExplain(dia) then
+      sb.append(EOL).append(newBox())
+      sb.append(EOL).append(offsetBox).append(" Explanation (enabled by `-explain`)")
+      sb.append(EOL).append(newBox(soft = true))
+      dia.msg.explanation.split(raw"\R").foreach { line =>
+        sb.append(EOL).append(offsetBox).append(if line.isEmpty then "" else " ").append(line)
+      }
+      sb.append(EOL).append(endBox)
+    else if dia.msg.canExplain then
+      sb.append(EOL).append(offsetBox)
+      sb.append(EOL).append(offsetBox).append(" longer explanation available when compiling with `-explain`")
+
     sb.toString
   }
 
-  def hl(diagnosticLevel: String)(str: String)(using Context): String = diagnosticLevel match {
-    case "Info" => Blue(str).show
-    case "Error" => Red(str).show
-    case _ =>
-      assert(diagnosticLevel.contains("Warning"))
-      Yellow(str).show
-  }
+  private  def hl(str: String)(using Context, Level): String =
+    summon[Level].value match
+      case interfaces.Diagnostic.ERROR   => Red(str).show
+      case interfaces.Diagnostic.WARNING => Yellow(str).show
+      case interfaces.Diagnostic.INFO    => Blue(str).show
 
-  def diagnosticLevel(dia: Diagnostic): String =
+  private def diagnosticLevel(dia: Diagnostic): String =
     dia match {
-      case dia: Error => "Error"
       case dia: FeatureWarning => "Feature Warning"
       case dia: DeprecationWarning => "Deprecation Warning"
       case dia: UncheckedWarning => "Unchecked Warning"
       case dia: MigrationWarning => "Migration Warning"
-      case dia: Warning => "Warning"
-      case dia: Info => "Info"
+      case _ => dia.level match // Diagnostic isn't sealed (e.g. created in the REPL) so provide a fallback
+        case interfaces.Diagnostic.ERROR   => "Error"
+        case interfaces.Diagnostic.WARNING => "Warning"
+        case interfaces.Diagnostic.INFO    => "Info"
     }
+
+}
+
+private object Highlight {
+  opaque type Level = Int
+  extension (level: Level) def value: Int = level
+  object Level:
+    def apply(level: Int): Level = level
+}
+
+/** Size of the left offset added by the box
+ *
+ *  ```
+ *  -- Error: ... ------------
+ *  4 |  foo
+ *    |  ^^^
+ *  ^^^ // size of this offset
+ *  ```
+ */
+private object Offsets {
+  opaque type Offset = Int
+  def offset(using o: Offset): Int = o
+  object Offset:
+    def apply(level: Int): Offset = level
 }
diff --git a/compiler/src/dotty/tools/dotc/reporting/Reporter.scala b/compiler/src/dotty/tools/dotc/reporting/Reporter.scala
index 437963af9161..421d8f52b43a 100644
--- a/compiler/src/dotty/tools/dotc/reporting/Reporter.scala
+++ b/compiler/src/dotty/tools/dotc/reporting/Reporter.scala
@@ -2,30 +2,25 @@ package dotty.tools
 package dotc
 package reporting
 
+import dotty.tools.dotc.ast.{Trees, tpd}
+import dotty.tools.dotc.core.Contexts._
+import dotty.tools.dotc.core.Decorators._
+import dotty.tools.dotc.core.Mode
+import dotty.tools.dotc.core.Symbols.{NoSymbol, Symbol}
+import dotty.tools.dotc.reporting.Diagnostic._
+import dotty.tools.dotc.reporting.Message._
+import dotty.tools.dotc.util.NoSourcePosition
+
+import java.io.{BufferedReader, PrintWriter}
 import scala.annotation.internal.sharable
-
-import core.Contexts._
-import core.Decorators._
-import collection.mutable
-import core.Mode
-import dotty.tools.dotc.core.Symbols.{Symbol, NoSymbol}
-import Diagnostic._
-import ast.{tpd, Trees}
-import Message._
-import core.Decorators._
-import util.NoSourcePosition
-
-import java.io.{ BufferedReader, PrintWriter }
+import scala.collection.mutable
+import scala.util.chaining._
 
 object Reporter {
   /** Convert a SimpleReporter into a real Reporter */
   def fromSimpleReporter(simple: interfaces.SimpleReporter): Reporter =
     new Reporter with UniqueMessagePositions with HideNonSensicalMessages {
-      override def doReport(dia: Diagnostic)(using Context): Unit = dia match {
-        case dia: ConditionalWarning if !dia.enablingOption.value =>
-        case _ =>
-          simple.report(dia)
-      }
+      override def doReport(dia: Diagnostic)(using Context): Unit = simple.report(dia)
     }
 
   /** A reporter that ignores reports, and doesn't record errors */
@@ -95,6 +90,8 @@ abstract class Reporter extends interfaces.ReporterResult {
     finally incompleteHandler = saved
   }
 
+  private def isIncompleteChecking = incompleteHandler ne defaultIncompleteHandler
+
   private var _errorCount = 0
   private var _warningCount = 0
 
@@ -140,25 +137,68 @@ abstract class Reporter extends interfaces.ReporterResult {
 
   var unreportedWarnings: Map[String, Int] = Map.empty
 
-  def report(dia: Diagnostic)(using Context): Unit =
-    val isSummarized = dia match
-      case dia: ConditionalWarning => !dia.enablingOption.value
-      case _ => false
-    if isSummarized  // avoid isHidden test for summarized warnings so that message is not forced
-       || !isHidden(dia)
-    then
-      withMode(Mode.Printing)(doReport(dia))
+  def addUnreported(key: String, n: Int): Unit =
+    val count = unreportedWarnings.getOrElse(key, 0)
+    unreportedWarnings = unreportedWarnings.updated(key, count + n)
+
+  /** Issue the diagnostic, ignoring `-Wconf` and `@nowarn` configurations,
+   *  but still honouring `-nowarn`, `-Werror`, and conditional warnings. */
+  def issueUnconfigured(dia: Diagnostic)(using Context): Unit = dia match
+    case w: Warning if ctx.settings.silentWarnings.value    =>
+    case w: ConditionalWarning if w.isSummarizedConditional =>
+      val key = w.enablingOption.name
+      addUnreported(key, 1)
+    case _                                                  =>
+      // conditional warnings that are not enabled are not fatal
+      val d = dia match
+        case w: Warning if ctx.settings.XfatalWarnings.value => w.toError
+        case _                                               => dia
+      if !isHidden(d) then // avoid isHidden test for summarized warnings so that message is not forced
+        withMode(Mode.Printing)(doReport(d))
+        d match {
+          case _: Warning => _warningCount += 1
+          case e: Error   =>
+            errors = e :: errors
+            _errorCount += 1
+            if ctx.typerState.isGlobalCommittable then
+              ctx.base.errorsToBeReported = true
+          case _: Info    => // nothing to do here
+          // match error if d is something else
+        }
+  end issueUnconfigured
+
+  def issueIfNotSuppressed(dia: Diagnostic)(using Context): Unit =
+    def go() =
+      import Action._
       dia match
-        case dia: ConditionalWarning if !dia.enablingOption.value =>
-          val key = dia.enablingOption.name
-          unreportedWarnings =
-            unreportedWarnings.updated(key, unreportedWarnings.getOrElse(key, 0) + 1)
-        case dia: Warning => _warningCount += 1
-        case dia: Error =>
-          errors = dia :: errors
-          _errorCount += 1
-        case dia: Info => // nothing to do here
-        // match error if d is something else
+        case w: Warning => WConf.parsed.action(w) match
+          case Error   => issueUnconfigured(w.toError)
+          case Warning => issueUnconfigured(w)
+          case Verbose => issueUnconfigured(w.setVerbose())
+          case Info    => issueUnconfigured(w.toInfo)
+          case Silent  =>
+        case _ => issueUnconfigured(dia)
+
+    // `ctx.run` can be null in test, also in the repl when parsing the first line. The parser runs early, the Run is
+    // only created in ReplDriver.compile when a line is submitted. This means that `@nowarn` doesnt work on parser
+    // warnings in the first line.
+    dia match
+      case w: Warning if ctx.run != null =>
+        val sup = ctx.run.suppressions
+        if sup.suppressionsComplete(w.pos.source) then sup.nowarnAction(w) match
+          case Action.Warning => go()
+          case Action.Verbose => w.setVerbose(); go()
+          case Action.Silent =>
+        else
+          // ParseResult.isIncomplete creates a new source file and reporter to check if the input is complete.
+          // The reporter's warnings are discarded, and we should not add them to the run's suspended messages,
+          // otherwise they are later reported.
+          if !isIncompleteChecking then
+            sup.addSuspendedMessage(w)
+      case _ => go()
+  end issueIfNotSuppressed
+
+  def report(dia: Diagnostic)(using Context): Unit = issueIfNotSuppressed(dia)
 
   def incomplete(dia: Diagnostic)(using Context): Unit =
     incompleteHandler(dia, ctx)
@@ -204,6 +244,8 @@ abstract class Reporter extends interfaces.ReporterResult {
   def flush()(using Context): Unit =
     val msgs = removeBufferedMessages
     if msgs.nonEmpty then msgs.foreach(ctx.reporter.report)
+    for (key, count) <- unreportedWarnings do
+      ctx.reporter.addUnreported(key, count)
 
   /** If this reporter buffers messages, all buffered messages, otherwise Nil */
   def pendingMessages(using Context): List[Diagnostic] = Nil
diff --git a/compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala b/compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala
index 8b6cd6ea5a0d..db39ed6527d1 100644
--- a/compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala
+++ b/compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala
@@ -17,7 +17,7 @@ import Diagnostic._
   * - The reporter is not flushed and the message containers capture a
   *   `Context` (about 4MB)
   */
-class StoreReporter(outer: Reporter = Reporter.NoReporter) extends Reporter {
+class StoreReporter(outer: Reporter = Reporter.NoReporter, fromTyperState: Boolean = false) extends Reporter {
 
   protected var infos: mutable.ListBuffer[Diagnostic] = null
 
@@ -37,7 +37,14 @@ class StoreReporter(outer: Reporter = Reporter.NoReporter) extends Reporter {
     if (infos != null) try infos.toList finally infos = null
     else Nil
 
-  override def pendingMessages(using Context): List[Diagnostic] = infos.toList
+  override def pendingMessages(using Context): List[Diagnostic] = if (infos != null) infos.toList else Nil
 
   override def errorsReported: Boolean = hasErrors || (outer != null && outer.errorsReported)
+
+  // If this is a TyperState buffering reporter then buffer the messages,
+  // so that then only when the messages are unbuffered (when the reporter if flushed)
+  // do they go through -Wconf, and possibly then buffered on the Run as a suspended message
+  override def report(dia: Diagnostic)(using Context): Unit =
+    if fromTyperState then issueUnconfigured(dia)
+    else super.report(dia)
 }
diff --git a/compiler/src/dotty/tools/dotc/reporting/TestReporter.scala b/compiler/src/dotty/tools/dotc/reporting/TestReporter.scala
index 8ad5b525de5c..a3d84e462bf0 100644
--- a/compiler/src/dotty/tools/dotc/reporting/TestReporter.scala
+++ b/compiler/src/dotty/tools/dotc/reporting/TestReporter.scala
@@ -6,7 +6,7 @@ import collection.mutable
 import Diagnostic._
 
 /** A re-usable Reporter used in Contexts#test */
-class TestingReporter extends StoreReporter(null):
+class TestingReporter extends StoreReporter(null, fromTyperState = false):
   infos = new mutable.ListBuffer[Diagnostic]
   override def hasUnreportedErrors: Boolean = infos.exists(_.isInstanceOf[Error])
   def reset(): Unit = infos.clear()
diff --git a/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala b/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala
index fb4c92c12f83..d745d8d20081 100644
--- a/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala
+++ b/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala
@@ -10,19 +10,24 @@ import core.Contexts._
   * are suppressed, unless they are of increasing severity. */
 trait UniqueMessagePositions extends Reporter {
 
-  private val positions = new mutable.HashMap[(SourceFile, Int), Int]
+  private val positions = new mutable.HashMap[(SourceFile, Integer), Diagnostic]
 
   /** Logs a position and returns true if it was already logged.
    *  @note  Two positions are considered identical for logging if they have the same point.
    */
   override def isHidden(dia: Diagnostic)(using Context): Boolean =
+    extension (dia1: Diagnostic) def hides(dia2: Diagnostic): Boolean =
+      if dia2.msg.showAlways then dia1.msg.getClass == dia2.msg.getClass
+      else dia1.level >= dia2.level
     super.isHidden(dia) || {
-      dia.pos.exists && !ctx.settings.YshowSuppressedErrors.value && {
+      dia.pos.exists
+      && !ctx.settings.YshowSuppressedErrors.value
+      && {
         var shouldHide = false
         for (pos <- dia.pos.start to dia.pos.end)
           positions get (ctx.source, pos) match {
-            case Some(level) if level >= dia.level => shouldHide = true
-            case _ => positions((ctx.source, pos)) = dia.level
+            case Some(dia1) if dia1.hides(dia) => shouldHide = true
+            case _ => positions((ctx.source, pos)) = dia
           }
         shouldHide
       }
diff --git a/compiler/src/dotty/tools/dotc/reporting/WConf.scala b/compiler/src/dotty/tools/dotc/reporting/WConf.scala
new file mode 100644
index 000000000000..34a47fa3db9d
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/reporting/WConf.scala
@@ -0,0 +1,124 @@
+package dotty.tools
+package dotc
+package reporting
+
+import dotty.tools.dotc.core.Contexts._
+import dotty.tools.dotc.util.SourcePosition
+
+import java.util.regex.PatternSyntaxException
+import scala.annotation.internal.sharable
+import scala.collection.mutable.ListBuffer
+import scala.util.matching.Regex
+
+enum MessageFilter:
+  def matches(message: Diagnostic): Boolean = this match
+    case Any => true
+    case Deprecated => message.isInstanceOf[Diagnostic.DeprecationWarning]
+    case Feature => message.isInstanceOf[Diagnostic.FeatureWarning]
+    case Unchecked => message.isInstanceOf[Diagnostic.UncheckedWarning]
+    case MessagePattern(pattern) =>
+      val noHighlight = message.msg.rawMessage.replaceAll("\\e\\[[\\d;]*[^\\d;]","")
+      pattern.findFirstIn(noHighlight).nonEmpty
+    case MessageID(errorId) => message.msg.errorId == errorId
+    case None => false
+
+  case Any, Deprecated, Feature, Unchecked, None
+  case MessagePattern(pattern: Regex)
+  case MessageID(errorId: ErrorMessageID)
+
+enum Action:
+  case Error, Warning, Verbose, Info, Silent
+
+final case class WConf(confs: List[(List[MessageFilter], Action)]):
+  def action(message: Diagnostic): Action = confs.collectFirst {
+    case (filters, action) if filters.forall(_.matches(message)) => action
+  }.getOrElse(Action.Warning)
+
+object WConf:
+  import Action._
+  import MessageFilter._
+
+  private type Conf = (List[MessageFilter], Action)
+
+  def parseAction(s: String): Either[List[String], Action] = s match
+    case "error" | "e"            => Right(Error)
+    case "warning" | "w"          => Right(Warning)
+    case "verbose" | "v"          => Right(Verbose)
+    case "info" | "i"             => Right(Info)
+    case "silent" | "s"           => Right(Silent)
+    case _                        => Left(List(s"unknown action: `$s`"))
+
+  private def regex(s: String) =
+    try Right(s.r)
+    catch case e: PatternSyntaxException => Left(s"invalid pattern `$s`: ${e.getMessage}")
+
+  @sharable val Splitter = raw"([^=]+)=(.+)".r
+  @sharable val ErrorId = raw"E?(\d+)".r
+
+  def parseFilters(s: String): Either[List[String], List[MessageFilter]] =
+    // TODO: don't split on escaped \&
+    val (parseErrors, filters) = s.split('&').toList.partitionMap(parseFilter)
+    if parseErrors.nonEmpty then Left(parseErrors)
+    else if filters.isEmpty then Left(List("no filters or no action defined"))
+    else Right(filters)
+
+  def parseFilter(s: String): Either[String, MessageFilter] = s match
+    case "any" => Right(Any)
+    case Splitter(filter, conf) => filter match
+      case "msg" => regex(conf).map(MessagePattern.apply)
+      case "id" => conf match
+        case ErrorId(num) =>
+          ErrorMessageID.fromErrorNumber(num.toInt) match
+            case Some(errId) => Right(MessageID(errId))
+            case _ => Left(s"unknonw error message number: E$num")
+        case _ =>
+          Left(s"invalid error message id: $conf")
+      case "name" =>
+        try Right(MessageID(ErrorMessageID.valueOf(conf + "ID")))
+        catch case _: IllegalArgumentException => Left(s"unknown error message name: $conf")
+
+      case "cat" => conf match
+        case "deprecation" => Right(Deprecated)
+        case "feature"     => Right(Feature)
+        case "unchecked"   => Right(Unchecked)
+        case _             => Left(s"unknown category: $conf")
+      case _ => Left(s"unknown filter: $filter")
+    case _ => Left(s"unknown filter: $s")
+
+  def parsed(using Context): WConf =
+    val setting = ctx.settings.Wconf.value
+    def cached = ctx.base.wConfCache
+    if cached == null || cached._1 != setting then
+      val conf = fromSettings(setting)
+      ctx.base.wConfCache = (setting, conf.getOrElse(WConf(Nil)))
+      conf.swap.foreach(msgs =>
+        val multiHelp =
+          if setting.sizeIs > 1 then
+            """
+              |Note: for multiple filters, use `-Wconf:filter1:action1,filter2:action2`
+              |      or alternatively          `-Wconf:filter1:action1 -Wconf:filter2:action2`""".stripMargin
+          else ""
+        report.warning(s"Failed to parse `-Wconf` configuration: ${ctx.settings.Wconf.value.mkString(",")}\n${msgs.mkString("\n")}$multiHelp"))
+    cached._2
+
+  def fromSettings(settings: List[String]): Either[List[String], WConf] =
+    if (settings.isEmpty) Right(WConf(Nil))
+    else
+      val parsedConfs: List[Either[List[String], (List[MessageFilter], Action)]] = settings.map(conf =>
+        val filtersAndAction = conf.split(':')
+        if filtersAndAction.length != 2 then Left(List("exactly one `:` expected (&...&:)"))
+        else
+          parseFilters(filtersAndAction(0)).flatMap(filters =>
+            parseAction(filtersAndAction(1)).map((filters, _))))
+      val (parseErrorss, configs) = parsedConfs.partitionMap(identity)
+      if (parseErrorss.nonEmpty) Left(parseErrorss.flatten)
+      else Right(WConf(configs))
+
+class Suppression(val annotPos: SourcePosition, filters: List[MessageFilter], val start: Int, end: Int, val verbose: Boolean):
+  private[this] var _used = false
+  def used: Boolean = _used
+  def markUsed(): Unit = { _used = true }
+
+  def matches(dia: Diagnostic): Boolean =
+    val pos = dia.pos
+    pos.exists && start <= pos.start && pos.end <= end && filters.forall(_.matches(dia))
diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala
index b32f00dde083..4a1efab782a1 100644
--- a/compiler/src/dotty/tools/dotc/reporting/messages.scala
+++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala
@@ -16,8 +16,9 @@ import printing.Formatting
 import ErrorMessageID._
 import ast.Trees
 import config.{Feature, ScalaVersion}
-import typer.ErrorReporting.err
+import typer.ErrorReporting.{err, matchReductionAddendum}
 import typer.ProtoTypes.ViewProto
+import typer.Implicits.Candidate
 import scala.util.control.NonFatal
 import StdNames.nme
 import printing.Formatting.hl
@@ -45,7 +46,11 @@ import transform.SymUtils._
   abstract class TypeMsg(errorId: ErrorMessageID) extends Message(errorId):
     def kind = "Type"
 
-  abstract class TypeMismatchMsg(found: Type, expected: Type)(errorId: ErrorMessageID)(using Context) extends Message(errorId):
+  trait ShowMatchTrace(tps: Type*)(using Context) extends Message:
+    override def msgSuffix: String = matchReductionAddendum(tps*)
+
+  abstract class TypeMismatchMsg(found: Type, expected: Type)(errorId: ErrorMessageID)(using Context)
+  extends Message(errorId), ShowMatchTrace(found, expected):
     def kind = "Type Mismatch"
     def explain = err.whyNoMatchStr(found, expected)
     override def canExplain = true
@@ -145,7 +150,6 @@ import transform.SymUtils._
   }
 
   class AnonymousFunctionMissingParamType(param: untpd.ValDef,
-                                          args: List[untpd.Tree],
                                           tree: untpd.Function,
                                           pt: Type)
                                           (using Context)
@@ -153,7 +157,7 @@ import transform.SymUtils._
     def msg = {
       val ofFun =
         if param.name.is(WildcardParamName)
-           || (MethodType.syntheticParamNames(args.length + 1) contains param.name)
+           || (MethodType.syntheticParamNames(tree.args.length + 1) contains param.name)
         then i" of expanded function:\n$tree"
         else ""
 
@@ -238,7 +242,7 @@ import transform.SymUtils._
     }
   }
 
-  class TypeMismatch(found: Type, expected: Type, addenda: => String*)(using Context)
+  class TypeMismatch(found: Type,  expected: Type, inTree: Option[untpd.Tree],  addenda: => String*)(using Context)
     extends TypeMismatchMsg(found, expected)(TypeMismatchID):
 
     // replace constrained TypeParamRefs and their typevars by their bounds where possible
@@ -278,10 +282,16 @@ import transform.SymUtils._
       s"""|Found:    $foundStr
           |Required: $expectedStr""".stripMargin
         + whereSuffix + postScript
+
+    override def explain =
+      val treeStr = inTree.map(x => s"\nTree: ${x.show}").getOrElse("")
+      treeStr + "\n" + super.explain
+
+
   end TypeMismatch
 
   class NotAMember(site: Type, val name: Name, selected: String, addendum: => String = "")(using Context)
-  extends NotFoundMsg(NotAMemberID) {
+  extends NotFoundMsg(NotAMemberID), ShowMatchTrace(site) {
     //println(i"site = $site, decls = ${site.decls}, source = ${site.typeSymbol.sourceFile}") //DEBUG
 
     def msg = {
@@ -289,16 +299,16 @@ import transform.SymUtils._
       val maxDist = 3  // maximal number of differences to be considered for a hint
       val missing = name.show
 
-      // The names of all non-synthetic, non-private members of `site`
+      // The symbols of all non-synthetic, non-private members of `site`
       // that are of the same type/term kind as the missing member.
-      def candidates: Set[String] =
+      def candidates: Set[Symbol] =
         for
           bc <- site.widen.baseClasses.toSet
           sym <- bc.info.decls.filter(sym =>
             sym.isType == name.isTypeName
             && !sym.isConstructor
             && !sym.flagsUNSAFE.isOneOf(Synthetic | Private))
-        yield sym.name.show
+        yield sym
 
       // Calculate Levenshtein distance
       def distance(s1: String, s2: String): Int =
@@ -314,13 +324,13 @@ import transform.SymUtils._
             else (dist(j - 1)(i) min dist(j)(i - 1) min dist(j - 1)(i - 1)) + 1
         dist(s2.length)(s1.length)
 
-      // A list of possible candidate strings with their Levenstein distances
+      // A list of possible candidate symbols with their Levenstein distances
       // to the name of the missing member
-      def closest: List[(Int, String)] = candidates
+      def closest: List[(Int, Symbol)] = candidates
         .toList
-        .map(n => (distance(n, missing), n))
-        .filter((d, n) => d <= maxDist && d < missing.length && d < n.length)
-        .sorted  // sort by distance first, alphabetically second
+        .map(sym => (distance(sym.name.show, missing), sym))
+        .filter((d, sym) => d <= maxDist && d < missing.length && d < sym.name.show.length)
+        .sortBy((d, sym) => (d, sym.name.show))  // sort by distance first, alphabetically second
 
       val enumClause =
         if ((name eq nme.values) || (name eq nme.valueOf)) && site.classSymbol.companionClass.isEnumClass then
@@ -339,11 +349,15 @@ import transform.SymUtils._
       val finalAddendum =
         if addendum.nonEmpty then prefixEnumClause(addendum)
         else closest match
-          case (d, n) :: _ =>
+          case (d, sym) :: _ =>
             val siteName = site match
               case site: NamedType => site.name.show
               case site => i"$site"
-            s" - did you mean $siteName.$n?$enumClause"
+            val showName =
+              // Add .type to the name if it is a module
+              if sym.is(ModuleClass) then s"${sym.name.show}.type"
+              else sym.name.show
+            s" - did you mean $siteName.$showName?$enumClause"
           case Nil => prefixEnumClause("")
 
       ex"$selected $name is not a member of ${site.widen}$finalAddendum"
@@ -794,6 +808,13 @@ import transform.SymUtils._
            |"""
   }
 
+  class LossyWideningConstantConversion(sourceType: Type, targetType: Type)(using Context)
+  extends Message(LossyWideningConstantConversionID):
+    def kind = "Lossy Conversion"
+    def msg = em"""|Widening conversion from $sourceType to $targetType loses precision.
+                   |Write `.to$targetType` instead.""".stripMargin
+    def explain = ""
+
   class PatternMatchExhaustivity(uncoveredFn: => String, hasMore: Boolean)(using Context)
   extends Message(PatternMatchExhaustivityID) {
     def kind = "Pattern Match Exhaustivity"
@@ -834,7 +855,7 @@ import transform.SymUtils._
 
   class MatchCaseOnlyNullWarning()(using Context)
   extends PatternMatchMsg(MatchCaseOnlyNullWarningID) {
-    def msg = em"""Only ${hl("null")} is matched. Consider using ${hl("case null =>")} instead."""
+    def msg = em"""Unreachable case except for ${hl("null")} (if this is intentional, consider writing ${hl("case null =>")} instead)."""
     def explain = ""
   }
 
@@ -1558,6 +1579,12 @@ import transform.SymUtils._
       def explain = ""
     }
 
+  class CannotExtendContextFunction(sym: Symbol)(using Context)
+    extends SyntaxMsg(CannotExtendFunctionID) {
+      def msg = em"""$sym cannot extend a context function class"""
+      def explain = ""
+    }
+
   class JavaEnumParentArgs(parent: Type)(using Context)
     extends TypeMsg(JavaEnumParentArgsID) {
       def msg = em"""not enough arguments for constructor Enum: ${hl("(name: String, ordinal: Int)")}: ${hl(parent.show)}"""
@@ -1646,18 +1673,6 @@ import transform.SymUtils._
     def explain = ""
   }
 
-  class OnlyCaseClassOrCaseObjectAllowed()(using Context)
-    extends SyntaxMsg(OnlyCaseClassOrCaseObjectAllowedID) {
-    def msg = em"""Only ${hl("case class")} or ${hl("case object")} allowed"""
-    def explain = ""
-  }
-
-  class ExpectedToplevelDef()(using Context)
-    extends SyntaxMsg(ExpectedTopLevelDefID) {
-    def msg = "Expected a toplevel definition"
-    def explain = ""
-  }
-
   class SuperCallsNotAllowedInlineable(symbol: Symbol)(using Context)
     extends SyntaxMsg(SuperCallsNotAllowedInlineableID) {
     def msg = em"Super call not allowed in inlineable $symbol"
@@ -1758,13 +1773,13 @@ import transform.SymUtils._
     def explain = ""
   }
 
-  class FailureToEliminateExistential(tp: Type, tp1: Type, tp2: Type, boundSyms: List[Symbol])(using Context)
+  class FailureToEliminateExistential(tp: Type, tp1: Type, tp2: Type, boundSyms: List[Symbol], classRoot: Symbol)(using Context)
     extends Message(FailureToEliminateExistentialID) {
     def kind: String = "Compatibility"
     def msg =
       val originalType = ctx.printer.dclsText(boundSyms, "; ").show
-      em"""An existential type that came from a Scala-2 classfile cannot be
-          |mapped accurately to to a Scala-3 equivalent.
+      em"""An existential type that came from a Scala-2 classfile for $classRoot
+          |cannot be mapped accurately to a Scala-3 equivalent.
           |original type    : $tp forSome ${originalType}
           |reduces to       : $tp1
           |type used instead: $tp2
@@ -1814,12 +1829,16 @@ import transform.SymUtils._
     def explain = ""
   }
 
-  class IllegalStartOfStatement(isModifier: Boolean)(using Context) extends SyntaxMsg(IllegalStartOfStatementID) {
-    def msg = {
-      val addendum = if (isModifier) ": no modifiers allowed here" else ""
-      "Illegal start of statement" + addendum
-    }
-    def explain = "A statement is either an import, a definition or an expression."
+  class IllegalStartOfStatement(what: String, isModifier: Boolean, isStat: Boolean)(using Context) extends SyntaxMsg(IllegalStartOfStatementID) {
+    def msg =
+      if isStat then
+        "this kind of statement is not allowed here"
+      else
+        val addendum = if isModifier then ": this modifier is not allowed here" else ""
+        s"Illegal start of $what$addendum"
+    def explain =
+      i"""A statement is an import or export, a definition or an expression.
+         |Some statements are only allowed in certain contexts"""
   }
 
   class TraitIsExpected(symbol: Symbol)(using Context) extends SyntaxMsg(TraitIsExpectedID) {
@@ -1860,11 +1879,15 @@ import transform.SymUtils._
         i" in ${conflicting.associatedFile}"
       else if conflicting.owner == owner then ""
       else i" in ${conflicting.owner}"
+    private def note =
+      if owner.is(Method) || conflicting.is(Method) then
+        "\n\nNote that overloaded methods must all be defined in the same group of toplevel definitions"
+      else ""
     def msg =
       if conflicting.isTerm != name.isTermName then
         em"$name clashes with $conflicting$where; the two must be defined together"
       else
-        em"$name is already defined as $conflicting$where"
+        em"$name is already defined as $conflicting$where$note"
     def explain = ""
 
   class PackageNameAlreadyDefined(pkg: Symbol)(using Context) extends NamingMsg(PackageNameAlreadyDefinedID) {
@@ -2493,3 +2516,26 @@ import transform.SymUtils._
           |Inlining such definition would multiply this footprint for each call site.
           |""".stripMargin
   }
+
+  class ImplicitSearchTooLargeWarning(limit: Int, openSearchPairs: List[(Candidate, Type)])(using Context)
+    extends TypeMsg(ImplicitSearchTooLargeID):
+    override def showAlways = true
+    def showQuery(query: (Candidate, Type)): String =
+      i"  ${query._1.ref.symbol.showLocated}  for  ${query._2}}"
+    def msg =
+      em"""Implicit search problem too large.
+          |an implicit search was terminated with failure after trying $limit expressions.
+          |The root candidate for the search was:
+          |
+          |${showQuery(openSearchPairs.last)}
+          |
+          |You can change the behavior by setting the `-Ximplicit-search-limit` value.
+          |Smaller values cause the search to fail faster.
+          |Larger values might make a very large search problem succeed.
+          |"""
+    def explain =
+      em"""The overflow happened with the following lists of tried expressions and target types,
+          |starting with the root query:
+          |
+          |${openSearchPairs.reverse.map(showQuery)}%\n%
+        """
diff --git a/compiler/src/dotty/tools/dotc/reporting/trace.scala b/compiler/src/dotty/tools/dotc/reporting/trace.scala
index 9036ba1c1dc7..804188b20780 100644
--- a/compiler/src/dotty/tools/dotc/reporting/trace.scala
+++ b/compiler/src/dotty/tools/dotc/reporting/trace.scala
@@ -7,6 +7,12 @@ import config.Config
 import config.Printers
 import core.Mode
 
+/** Exposes the {{{ trace("question") { op } }}} syntax.
+ *
+ * Traced operations will print indented messages if enabled.
+ * Tracing depends on [[Config.tracingEnabled]] and [[dotty.tools.dotc.config.ScalaSettings.Ylog]].
+ * Tracing can be forced by replacing [[trace]] with [[trace.force]] or [[trace.log]] (see below).
+ */
 object trace extends TraceSyntax:
   inline def isEnabled = Config.tracingEnabled
   protected val isForced = false
@@ -14,6 +20,10 @@ object trace extends TraceSyntax:
   object force extends TraceSyntax:
     inline def isEnabled: true = true
     protected val isForced = true
+
+  object log extends TraceSyntax:
+    inline def isEnabled: true = true
+    protected val isForced = false
 end trace
 
 /** This module is carefully optimized to give zero overhead if Config.tracingEnabled
@@ -33,7 +43,7 @@ trait TraceSyntax:
       apply(question, if cond then Printers.default else Printers.noPrinter, show)(op)
     else op
 
-  inline def apply[T](inline question: String, inline printer: Printers.Printer, inline showOp: Any => String)(inline op: T)(using Context): T =
+  inline def apply[T, U >: T](inline question: String, inline printer: Printers.Printer, inline showOp: U => String)(inline op: T)(using Context): T =
     inline if isEnabled then
       doTrace[T](question, printer, showOp)(op)
     else op
@@ -60,20 +70,20 @@ trait TraceSyntax:
 
   private def doTrace[T](question: => String,
                          printer: Printers.Printer = Printers.default,
-                         showOp: Any => String = alwaysToString)
+                         showOp: T => String = alwaysToString)
                         (op: => T)(using Context): T =
     if ctx.mode.is(Mode.Printing) || !isForced && (printer eq Printers.noPrinter) then op
     else
       // Avoid evaluating question multiple time, since each evaluation
       // may cause some extra logging output.
-      val q = question
+      val q = question.replace('\n', ' ')
       val leading = s"==> $q?"
-      val trailing = (res: Any) => s"<== $q = ${showOp(res)}"
+      val trailing = (res: T) => s"<== $q = ${showOp(res)}"
       var finalized = false
       var logctx = ctx
       while logctx.reporter.isInstanceOf[StoreReporter] do logctx = logctx.outer
       def margin = ctx.base.indentTab * ctx.base.indent
-      def doLog(s: String) = if isForced then println(s) else report.log(s)
+      def doLog(s: String) = if isForced then println(s) else report.log(s)(using logctx)
       def finalize(msg: String) =
         if !finalized then
           ctx.base.indent -= 1
diff --git a/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala b/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala
index 5eeee5dc1dd3..9c85d2f5bd1d 100644
--- a/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala
+++ b/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala
@@ -20,13 +20,16 @@ object Rewrites {
     private[Rewrites] val pbuf = new mutable.ListBuffer[Patch]()
 
     def addPatch(span: Span, replacement: String): Unit =
-      pbuf += Patch(span, replacement)
+      pbuf.indexWhere(p => p.span.start == span.start && p.span.end == span.end) match {
+        case i if i >= 0 => pbuf.update(i, Patch(span, replacement))
+        case _           => pbuf += Patch(span, replacement)
+      }
 
     def apply(cs: Array[Char]): Array[Char] = {
       val delta = pbuf.map(_.delta).sum
       val patches = pbuf.toList.sortBy(_.span.start)
       if (patches.nonEmpty)
-        patches reduceLeft {(p1, p2) =>
+        patches.reduceLeft {(p1, p2) =>
           assert(p1.span.end <= p2.span.start, s"overlapping patches in $source: $p1 and $p2")
           p2
         }
@@ -64,11 +67,11 @@ object Rewrites {
    *  given by `span` in `source` by `replacement`
    */
   def patch(source: SourceFile, span: Span, replacement: String)(using Context): Unit =
-    if (ctx.reporter != Reporter.NoReporter) // NoReporter is used for syntax highlighting
-      for (rewrites <- ctx.settings.rewrite.value)
-        rewrites.patched
-          .getOrElseUpdate(source, new Patches(source))
-          .addPatch(span, replacement)
+    if ctx.reporter != Reporter.NoReporter // NoReporter is used for syntax highlighting
+    then ctx.settings.rewrite.value.foreach(_.patched
+         .getOrElseUpdate(source, new Patches(source))
+         .addPatch(span, replacement)
+    )
 
   /** Patch position in `ctx.compilationUnit.source`. */
   def patch(span: Span, replacement: String)(using Context): Unit =
@@ -96,6 +99,3 @@ class Rewrites {
   import Rewrites._
   private val patched = new PatchedFiles
 }
-
-
-
diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala
index cf8b3e01822a..3e442aab5b68 100644
--- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala
+++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala
@@ -24,6 +24,8 @@ import java.io.PrintWriter
 import xsbti.api.DefinitionType
 
 import scala.collection.mutable
+import scala.util.hashing.MurmurHash3
+import scala.util.chaining.*
 
 /** This phase sends a representation of the API of classes to sbt via callbacks.
  *
@@ -41,7 +43,10 @@ import scala.collection.mutable
  *  @see ExtractDependencies
  */
 class ExtractAPI extends Phase {
-  override def phaseName: String = "sbt-api"
+
+  override def phaseName: String = ExtractAPI.name
+
+  override def description: String = ExtractAPI.description
 
   override def isRunnable(using Context): Boolean = {
     def forceRun = ctx.settings.YdumpSbtInc.value || ctx.settings.YforceSbtPhases.value
@@ -85,6 +90,10 @@ class ExtractAPI extends Phase {
   }
 }
 
+object ExtractAPI:
+  val name: String = "sbt-api"
+  val description: String = "sends a representation of the API of classes to sbt"
+
 /** Extracts full (including private members) API representation out of Symbols and Types.
  *
  *  The exact representation used for each type is not important: the only thing
@@ -140,9 +149,21 @@ private class ExtractAPICollector(using Context) extends ThunkHolder {
   /** This cache is necessary to avoid unstable name hashing when `typeCache` is present,
    *  see the comment in the `RefinedType` case in `computeType`
    *  The cache key is (api of RefinedType#parent, api of RefinedType#refinedInfo).
-    */
+   */
   private val refinedTypeCache = new mutable.HashMap[(api.Type, api.Definition), api.Structure]
 
+  /** This cache is necessary to avoid infinite loops when hashing an inline "Body" annotation.
+   *  Its values are transitively seen inline references within a call chain starting from a single "origin" inline
+   *  definition. Avoid hashing an inline "Body" annotation if its associated definition is already in the cache.
+   *  Precondition: the cache is empty whenever we hash a new "origin" inline "Body" annotation.
+   */
+  private val seenInlineCache = mutable.HashSet.empty[Symbol]
+
+  /** This cache is optional, it avoids recomputing hashes of inline "Body" annotations,
+   *  e.g. when a concrete inline method is inherited by a subclass.
+   */
+  private val inlineBodyCache = mutable.HashMap.empty[Symbol, Int]
+
   private val allNonLocalClassesInSrc = new mutable.HashSet[xsbti.api.ClassLike]
   private val _mainClasses = new mutable.HashSet[String]
 
@@ -219,9 +240,9 @@ private class ExtractAPICollector(using Context) extends ThunkHolder {
     val structure = apiClassStructure(sym)
     val acc = apiAccess(sym)
     val modifiers = apiModifiers(sym)
-    val anns = apiAnnotations(sym).toArray
+    val anns = apiAnnotations(sym, inlineOrigin = NoSymbol).toArray
     val topLevel = sym.isTopLevelClass
-    val childrenOfSealedClass = sym.children.sorted(classFirstSort).map(c =>
+    val childrenOfSealedClass = sym.sealedDescendants.sorted(classFirstSort).map(c =>
       if (c.isClass)
         apiType(c.typeRef)
       else
@@ -320,54 +341,100 @@ private class ExtractAPICollector(using Context) extends ThunkHolder {
     }
   }
 
-  def apiDefinitions(defs: List[Symbol]): List[api.ClassDefinition] = {
-    defs.sorted(classFirstSort).map(apiDefinition)
-  }
+  def apiDefinitions(defs: List[Symbol]): List[api.ClassDefinition] =
+    defs.sorted(classFirstSort).map(apiDefinition(_, inlineOrigin = NoSymbol))
 
-  def apiDefinition(sym: Symbol): api.ClassDefinition = {
+  /** `inlineOrigin` denotes an optional inline method that we are
+   *  currently hashing the body of. If it exists, include extra information
+   *  that is missing after erasure
+   */
+  def apiDefinition(sym: Symbol, inlineOrigin: Symbol): api.ClassDefinition = {
     if (sym.isClass) {
       apiClass(sym.asClass)
     } else if (sym.isType) {
       apiTypeMember(sym.asType)
     } else if (sym.is(Mutable, butNot = Accessor)) {
       api.Var.of(sym.name.toString, apiAccess(sym), apiModifiers(sym),
-        apiAnnotations(sym).toArray, apiType(sym.info))
+        apiAnnotations(sym, inlineOrigin).toArray, apiType(sym.info))
     } else if (sym.isStableMember && !sym.isRealMethod) {
       api.Val.of(sym.name.toString, apiAccess(sym), apiModifiers(sym),
-        apiAnnotations(sym).toArray, apiType(sym.info))
+        apiAnnotations(sym, inlineOrigin).toArray, apiType(sym.info))
     } else {
-      apiDef(sym.asTerm)
+      apiDef(sym.asTerm, inlineOrigin)
     }
   }
 
-  def apiDef(sym: TermSymbol): api.Def = {
+  /** `inlineOrigin` denotes an optional inline method that we are
+   *  currently hashing the body of. If it exists, include extra information
+   *  that is missing after erasure
+   */
+  def apiDef(sym: TermSymbol, inlineOrigin: Symbol): api.Def = {
+
+    var seenInlineExtras = false
+    var inlineExtras = 41
+
+    def mixInlineParam(p: Symbol): Unit =
+      if inlineOrigin.exists && p.is(Inline) then
+        seenInlineExtras = true
+        inlineExtras = hashInlineParam(p, inlineExtras)
+
+    def inlineExtrasAnnot: Option[api.Annotation] =
+      val h = inlineExtras
+      Option.when(seenInlineExtras) {
+        marker(s"${MurmurHash3.finalizeHash(h, "inlineExtras".hashCode)}")
+      }
+
+    def tparamList(pt: TypeLambda): List[api.TypeParameter] =
+      pt.paramNames.lazyZip(pt.paramInfos).map((pname, pbounds) =>
+        apiTypeParameter(pname.toString, 0, pbounds.lo, pbounds.hi)
+      )
+
+    def paramList(mt: MethodType, params: List[Symbol]): api.ParameterList =
+      val apiParams = params.lazyZip(mt.paramInfos).map((param, ptype) =>
+        mixInlineParam(param)
+        api.MethodParameter.of(
+          param.name.toString, apiType(ptype), param.is(HasDefault), api.ParameterModifier.Plain))
+      api.ParameterList.of(apiParams.toArray, mt.isImplicitMethod)
+
     def paramLists(t: Type, paramss: List[List[Symbol]]): List[api.ParameterList] = t match {
       case pt: TypeLambda =>
         paramLists(pt.resultType, paramss.drop(1))
       case mt @ MethodTpe(pnames, ptypes, restpe) =>
         assert(paramss.nonEmpty && paramss.head.hasSameLengthAs(pnames),
           i"mismatch for $sym, ${sym.info}, ${sym.paramSymss}")
-        val apiParams = paramss.head.lazyZip(ptypes).map((param, ptype) =>
-          api.MethodParameter.of(param.name.toString, apiType(ptype),
-          param.is(HasDefault), api.ParameterModifier.Plain))
-        api.ParameterList.of(apiParams.toArray, mt.isImplicitMethod)
-          :: paramLists(restpe, paramss.tail)
+        paramList(mt, paramss.head) :: paramLists(restpe, paramss.tail)
       case _ =>
         Nil
     }
 
-    val tparams = sym.info match {
+    /** returns list of pairs of 1: the position in all parameter lists, and 2: a type parameter list */
+    def tparamLists(t: Type, index: Int): List[(Int, List[api.TypeParameter])] = t match
       case pt: TypeLambda =>
-        pt.paramNames.lazyZip(pt.paramInfos).map((pname, pbounds) =>
-          apiTypeParameter(pname.toString, 0, pbounds.lo, pbounds.hi))
+        (index, tparamList(pt)) :: tparamLists(pt.resultType, index + 1)
+      case mt: MethodType =>
+        tparamLists(mt.resultType, index + 1)
       case _ =>
         Nil
-    }
+
+    val (tparams, tparamsExtras) = sym.info match
+      case pt: TypeLambda =>
+        (tparamList(pt), tparamLists(pt.resultType, index = 1))
+      case mt: MethodType =>
+        (Nil, tparamLists(mt.resultType, index = 1))
+      case _ =>
+        (Nil, Nil)
+
     val vparamss = paramLists(sym.info, sym.paramSymss)
     val retTp = sym.info.finalResultType.widenExpr
 
-    api.Def.of(sym.name.toString, apiAccess(sym), apiModifiers(sym),
-      apiAnnotations(sym).toArray, tparams.toArray, vparamss.toArray, apiType(retTp))
+    val tparamsExtraAnnot = Option.when(tparamsExtras.nonEmpty) {
+      marker(s"${hashTparamsExtras(tparamsExtras)("tparamsExtra".hashCode)}")
+    }
+
+    val annotations = inlineExtrasAnnot ++: tparamsExtraAnnot ++: apiAnnotations(sym, inlineOrigin)
+
+    api.Def.of(sym.zincMangledName.toString, apiAccess(sym), apiModifiers(sym),
+      annotations.toArray, tparams.toArray, vparamss.toArray, apiType(retTp))
   }
 
   def apiTypeMember(sym: TypeSymbol): api.TypeMember = {
@@ -375,7 +442,7 @@ private class ExtractAPICollector(using Context) extends ThunkHolder {
     val name = sym.name.toString
     val access = apiAccess(sym)
     val modifiers = apiModifiers(sym)
-    val as = apiAnnotations(sym)
+    val as = apiAnnotations(sym, inlineOrigin = NoSymbol)
     val tpe = sym.info
 
     if (sym.isAliasType)
@@ -585,23 +652,29 @@ private class ExtractAPICollector(using Context) extends ThunkHolder {
       sym.isOneOf(GivenOrImplicit), sym.is(Lazy), sym.is(Macro), sym.isSuperAccessor)
   }
 
-  def apiAnnotations(s: Symbol): List[api.Annotation] = {
+  /** `inlineOrigin` denotes an optional inline method that we are
+   *  currently hashing the body of.
+   */
+  def apiAnnotations(s: Symbol, inlineOrigin: Symbol): List[api.Annotation] = {
     val annots = new mutable.ListBuffer[api.Annotation]
     val inlineBody = Inliner.bodyToInline(s)
-    if (!inlineBody.isEmpty) {
+    if !inlineBody.isEmpty then
       // If the body of an inline def changes, all the reverse dependencies of
       // this method need to be recompiled. sbt has no way of tracking method
       // bodies, so we include the hash of the body of the method as part of the
       // signature we send to sbt.
-      //
-      // FIXME: The API of a class we send to Zinc includes the signatures of
-      // inherited methods, which means that we repeatedly compute the hash of
-      // an inline def in every class that extends its owner. To avoid this we
-      // could store the hash as an annotation when pickling an inline def
-      // and retrieve it here instead of computing it on the fly.
-      val inlineBodyHash = treeHash(inlineBody)
-      annots += marker(inlineBodyHash.toString)
-    }
+
+      def hash[U](inlineOrigin: Symbol): Int =
+        assert(seenInlineCache.add(s)) // will fail if already seen, guarded by treeHash
+        treeHash(inlineBody, inlineOrigin)
+
+      val inlineHash =
+        if inlineOrigin.exists then hash(inlineOrigin)
+        else inlineBodyCache.getOrElseUpdate(s, hash(inlineOrigin = s).tap(_ => seenInlineCache.clear()))
+
+      annots += marker(inlineHash.toString)
+
+    end if
 
     // In the Scala2 ExtractAPI phase we only extract annotations that extend
     // StaticAnnotation, but in Dotty we currently pickle all annotations so we
@@ -619,15 +692,66 @@ private class ExtractAPICollector(using Context) extends ThunkHolder {
   /** Produce a hash for a tree that is as stable as possible:
    *  it should stay the same across compiler runs, compiler instances,
    *  JVMs, etc.
+   *
+   * `inlineOrigin` denotes an optional inline method that we are hashing the body of, where `tree` could be
+   * its body, or the body of another method referenced in a call chain leading to `inlineOrigin`.
+   *
+   * If `inlineOrigin` is NoSymbol, then tree is the tree of an annotation.
    */
-  def treeHash(tree: Tree): Int =
-    import scala.util.hashing.MurmurHash3
+  def treeHash(tree: Tree, inlineOrigin: Symbol): Int =
+    import core.Constants.*
+
+    def nameHash(n: Name, initHash: Int): Int =
+      val h =
+        if n.isTermName then
+          MurmurHash3.mix(initHash, TermNameHash)
+        else
+          MurmurHash3.mix(initHash, TypeNameHash)
+
+      // The hashCode of the name itself is not stable across compiler instances
+      MurmurHash3.mix(h, n.toString.hashCode)
+    end nameHash
+
+    def constantHash(c: Constant, initHash: Int): Int =
+      var h = MurmurHash3.mix(initHash, c.tag)
+      c.tag match
+        case NullTag =>
+          // No value to hash, the tag is enough.
+        case ClazzTag =>
+          // Go through `apiType` to get a value with a stable hash, it'd
+          // be better to use Murmur here too instead of relying on
+          // `hashCode`, but that would essentially mean duplicating
+          // https://github.com/sbt/zinc/blob/develop/internal/zinc-apiinfo/src/main/scala/xsbt/api/HashAPI.scala
+          // and at that point we might as well do type hashing on our own
+          // representation.
+          h = MurmurHash3.mix(h, apiType(c.typeValue).hashCode)
+        case _ =>
+          h = MurmurHash3.mix(h, c.value.hashCode)
+      h
+    end constantHash
+
+    def cannotHash(what: String, elem: Any, pos: Positioned): Unit =
+      internalError(i"Don't know how to produce a stable hash for $what", pos.sourcePos)
 
     def positionedHash(p: ast.Positioned, initHash: Int): Int =
+      var h = initHash
+
       p match
         case p: WithLazyField[?] =>
           p.forceIfLazy
         case _ =>
+
+      if inlineOrigin.exists then
+        p match
+          case ref: RefTree @unchecked =>
+            val sym = ref.symbol
+            if sym.is(Inline, butNot = Param) && !seenInlineCache.contains(sym) then
+              // An inline method that calls another inline method will eventually inline the call
+              // at a non-inline callsite, in this case if the implementation of the nested call
+              // changes, then the callsite will have a different API, we should hash the definition
+              h = MurmurHash3.mix(h, apiDefinition(sym, inlineOrigin).hashCode)
+          case _ =>
+
       // FIXME: If `p` is a tree we should probably take its type into account
       // when hashing it, but producing a stable hash for a type is not trivial
       // since the same type might have multiple representations, for method
@@ -635,12 +759,11 @@ private class ExtractAPICollector(using Context) extends ThunkHolder {
       // in Zinc that generates hashes from that, if we can reliably produce
       // stable hashes for types ourselves then we could bypass all that and
       // send Zinc hashes directly.
-      val h = MurmurHash3.mix(initHash, p.productPrefix.hashCode)
+      h = MurmurHash3.mix(h, p.productPrefix.hashCode)
       iteratorHash(p.productIterator, h)
     end positionedHash
 
     def iteratorHash(it: Iterator[Any], initHash: Int): Int =
-      import core.Constants._
       var h = initHash
       while it.hasNext do
         it.next() match
@@ -649,30 +772,11 @@ private class ExtractAPICollector(using Context) extends ThunkHolder {
           case xs: List[?] =>
             h = iteratorHash(xs.iterator, h)
           case c: Constant =>
-            h = MurmurHash3.mix(h, c.tag)
-            c.tag match
-              case NullTag =>
-                // No value to hash, the tag is enough.
-              case ClazzTag =>
-                // Go through `apiType` to get a value with a stable hash, it'd
-                // be better to use Murmur here too instead of relying on
-                // `hashCode`, but that would essentially mean duplicating
-                // https://github.com/sbt/zinc/blob/develop/internal/zinc-apiinfo/src/main/scala/xsbt/api/HashAPI.scala
-                // and at that point we might as well do type hashing on our own
-                // representation.
-                val apiValue = apiType(c.typeValue)
-                h = MurmurHash3.mix(h, apiValue.hashCode)
-              case _ =>
-                h = MurmurHash3.mix(h, c.value.hashCode)
+            h = constantHash(c, h)
           case n: Name =>
-            // The hashCode of the name itself is not stable across compiler instances
-            h = MurmurHash3.mix(h, n.toString.hashCode)
+            h = nameHash(n, h)
           case elem =>
-            internalError(
-              i"Don't know how to produce a stable hash for `$elem` of unknown class ${elem.getClass}",
-              tree.sourcePos)
-
-            h = MurmurHash3.mix(h, elem.toString.hashCode)
+            cannotHash(what = i"`$elem` of unknown class ${elem.getClass}", elem, tree)
       h
     end iteratorHash
 
@@ -681,6 +785,38 @@ private class ExtractAPICollector(using Context) extends ThunkHolder {
     MurmurHash3.finalizeHash(h, 0)
   end treeHash
 
+  /** Hash secondary type parameters in separate marker annotation.
+   *  We hash them separately because the position of type parameters is important.
+   */
+  private def hashTparamsExtras(tparamsExtras: List[(Int, List[api.TypeParameter])])(initHash: Int): Int =
+
+    def mixTparams(tparams: List[api.TypeParameter])(initHash: Int) =
+      var h = initHash
+      var elems = tparams
+      while elems.nonEmpty do
+        h = MurmurHash3.mix(h, elems.head.hashCode)
+        elems = elems.tail
+      h
+
+    def mixIndexAndTparams(index: Int, tparams: List[api.TypeParameter])(initHash: Int) =
+      mixTparams(tparams)(MurmurHash3.mix(initHash, index))
+
+    var h = initHash
+    var extras = tparamsExtras
+    var len = 0
+    while extras.nonEmpty do
+      h = mixIndexAndTparams(index = extras.head(0), tparams = extras.head(1))(h)
+      extras = extras.tail
+      len += 1
+    MurmurHash3.finalizeHash(h, len)
+  end hashTparamsExtras
+
+  /** Mix in the name hash also because otherwise switching which
+   *  parameter is inline will not affect the hash.
+   */
+  private def hashInlineParam(p: Symbol, h: Int) =
+    MurmurHash3.mix(p.name.toString.hashCode, MurmurHash3.mix(h, InlineParamHash))
+
   def apiAnnotation(annot: Annotation): api.Annotation = {
     // Like with inline defs, the whole body of the annotation and not just its
     // type is part of its API so we need to store its hash, but Zinc wants us
@@ -691,6 +827,6 @@ private class ExtractAPICollector(using Context) extends ThunkHolder {
     // annotated @org.junit.Test).
     api.Annotation.of(
       apiType(annot.tree.tpe), // Used by sbt to find tests to run
-      Array(api.AnnotationArgument.of("TREE_HASH", treeHash(annot.tree).toString)))
+      Array(api.AnnotationArgument.of("TREE_HASH", treeHash(annot.tree, inlineOrigin = NoSymbol).toString)))
   }
 }
diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala
index 860e9e3e6c4c..0dd30cd27ced 100644
--- a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala
+++ b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala
@@ -14,6 +14,7 @@ import dotty.tools.dotc.core.Names._
 import dotty.tools.dotc.core.Phases._
 import dotty.tools.dotc.core.StdNames._
 import dotty.tools.dotc.core.Symbols._
+import dotty.tools.dotc.core.Denotations.StaleSymbol
 import dotty.tools.dotc.core.Types._
 import dotty.tools.dotc.transform.SymUtils._
 import dotty.tools.dotc.util.{SrcPos, NoSourcePosition}
@@ -48,7 +49,9 @@ import scala.collection.{Set, mutable}
 class ExtractDependencies extends Phase {
   import ExtractDependencies._
 
-  override def phaseName: String = "sbt-deps"
+  override def phaseName: String = ExtractDependencies.name
+
+  override def description: String = ExtractDependencies.description
 
   override def isRunnable(using Context): Boolean = {
     def forceRun = ctx.settings.YdumpSbtInc.value || ctx.settings.YforceSbtPhases.value
@@ -179,6 +182,9 @@ class ExtractDependencies extends Phase {
 }
 
 object ExtractDependencies {
+  val name: String = "sbt-deps"
+  val description: String = "sends information on classes' dependencies to sbt"
+
   def classNameAsString(sym: Symbol)(using Context): String =
     sym.fullName.stripModuleClassSuffix.toString
 
@@ -310,14 +316,6 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT
     }
   }
 
-  /** Mangle a JVM symbol name in a format better suited for internal uses by sbt. */
-  private def mangledName(sym: Symbol)(using Context): Name = {
-    def constructorName = sym.owner.fullName ++ ";init;"
-
-    if (sym.isConstructor) constructorName
-    else sym.name.stripModuleClassSuffix
-  }
-
   private def addMemberRefDependency(sym: Symbol)(using Context): Unit =
     if (!ignoreDependency(sym)) {
       val enclOrModuleClass = if (sym.is(ModuleVal)) sym.moduleClass else sym.enclosingClass
@@ -327,7 +325,7 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT
       if (fromClass.exists) { // can happen when visiting imports
         assert(fromClass.isClass)
 
-        addUsedName(fromClass, mangledName(sym), UseScope.Default)
+        addUsedName(fromClass, sym.zincMangledName, UseScope.Default)
         // packages have class symbol. Only record them as used names but not dependency
         if (!sym.is(Package)) {
           _dependencies += ClassDependency(fromClass, enclOrModuleClass, DependencyByMemberRef)
@@ -348,12 +346,17 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT
     else DependencyByInheritance
 
   private def ignoreDependency(sym: Symbol)(using Context) =
-    !sym.exists ||
-    sym.isAbsent(canForce = false) || // ignore dependencies that have a symbol but do not exist.
-                                      // e.g. java.lang.Object companion object
-    sym.isEffectiveRoot ||
-    sym.isAnonymousFunction ||
-    sym.isAnonymousClass
+    try
+      !sym.exists ||
+      sym.isAbsent(canForce = false) || // ignore dependencies that have a symbol but do not exist.
+                                        // e.g. java.lang.Object companion object
+      sym.isEffectiveRoot ||
+      sym.isAnonymousFunction ||
+      sym.isAnonymousClass
+    catch case ex: StaleSymbol =>
+      // can happen for constructor proxies. Test case is pos-macros/i13532.
+      true
+
 
   /** Traverse the tree of a source file and record the dependencies and used names which
    *  can be retrieved using `dependencies` and`usedNames`.
@@ -455,18 +458,20 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT
   private abstract class TypeDependencyTraverser(using Context) extends TypeTraverser() {
     protected def addDependency(symbol: Symbol): Unit
 
-    val seen = new mutable.HashSet[Type]
+    // Avoid cycles by remembering both the types (testcase:
+    // tests/run/enum-values.scala) and the symbols of named types (testcase:
+    // tests/pos-java-interop/i13575) we've seen before.
+    val seen = new mutable.HashSet[Symbol | Type]
     def traverse(tp: Type): Unit = if (!seen.contains(tp)) {
       seen += tp
       tp match {
         case tp: NamedType =>
           val sym = tp.symbol
-          if (!sym.is(Package)) {
+          if !seen.contains(sym) && !sym.is(Package) then
+            seen += sym
             addDependency(sym)
-            if (!sym.isClass)
-              traverse(tp.info)
+            if !sym.isClass then traverse(tp.info)
             traverse(tp.prefix)
-          }
         case tp: ThisType =>
           traverse(tp.underlying)
         case tp: ConstantType =>
@@ -490,7 +495,7 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT
     val traverser = new TypeDependencyTraverser {
       def addDependency(symbol: Symbol) =
         if (!ignoreDependency(symbol) && symbol.is(Sealed)) {
-          val usedName = mangledName(symbol)
+          val usedName = symbol.zincMangledName
           addUsedName(usedName, UseScope.PatMatTarget)
         }
     }
diff --git a/compiler/src/dotty/tools/dotc/sbt/package.scala b/compiler/src/dotty/tools/dotc/sbt/package.scala
new file mode 100644
index 000000000000..379a2e45ce40
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/sbt/package.scala
@@ -0,0 +1,20 @@
+package dotty.tools.dotc.sbt
+
+import dotty.tools.dotc.core.Contexts.Context
+import dotty.tools.dotc.core.Symbols.Symbol
+import dotty.tools.dotc.core.NameOps.stripModuleClassSuffix
+import dotty.tools.dotc.core.Names.Name
+
+inline val TermNameHash = 1987 // 300th prime
+inline val TypeNameHash = 1993 // 301st prime
+inline val InlineParamHash = 1997 // 302nd prime
+
+extension (sym: Symbol)
+
+  def constructorName(using Context) =
+    sym.owner.fullName ++ ";init;"
+
+  /** Mangle a JVM symbol name in a format better suited for internal uses by sbt. */
+  def zincMangledName(using Context): Name =
+    if (sym.isConstructor) constructorName
+    else sym.name.stripModuleClassSuffix
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/Access.scala b/compiler/src/dotty/tools/dotc/semanticdb/Access.scala
new file mode 100644
index 000000000000..6fb67be23ac7
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/semanticdb/Access.scala
@@ -0,0 +1,651 @@
+// Generated by https://github.com/tanishiking/semanticdb-for-scala3
+// Generated by the Scala Plugin for the Protocol Buffer Compiler.
+// Do not edit!
+//
+// Protofile syntax: PROTO3
+
+package dotty.tools.dotc.semanticdb
+import dotty.tools.dotc.semanticdb.internal._
+import scala.annotation.internal.sharable
+
+sealed trait Access  extends SemanticdbGeneratedSealedOneof  derives CanEqual {
+  type MessageType = dotty.tools.dotc.semanticdb.AccessMessage
+  final def isEmpty = this.isInstanceOf[dotty.tools.dotc.semanticdb.Access.Empty.type]
+  final def isDefined = !isEmpty
+  final def asMessage: dotty.tools.dotc.semanticdb.AccessMessage = dotty.tools.dotc.semanticdb.Access.AccessTypeMapper.toBase(this)
+  final def asNonEmpty: Option[dotty.tools.dotc.semanticdb.Access.NonEmpty] = if (isEmpty) None else Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.Access.NonEmpty])
+}
+
+object Access {
+  case object Empty extends dotty.tools.dotc.semanticdb.Access
+  
+  sealed trait NonEmpty extends dotty.tools.dotc.semanticdb.Access
+  def defaultInstance: dotty.tools.dotc.semanticdb.Access = Empty
+  
+  implicit val AccessTypeMapper: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.AccessMessage, dotty.tools.dotc.semanticdb.Access] = new SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.AccessMessage, dotty.tools.dotc.semanticdb.Access] {
+    override def toCustom(__base: dotty.tools.dotc.semanticdb.AccessMessage): dotty.tools.dotc.semanticdb.Access = __base.sealedValue match {
+      case __v: dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.PrivateAccess => __v.value
+      case __v: dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.PrivateThisAccess => __v.value
+      case __v: dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.PrivateWithinAccess => __v.value
+      case __v: dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.ProtectedAccess => __v.value
+      case __v: dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.ProtectedThisAccess => __v.value
+      case __v: dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.ProtectedWithinAccess => __v.value
+      case __v: dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.PublicAccess => __v.value
+      case dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.Empty => Empty
+    }
+    override def toBase(__custom: dotty.tools.dotc.semanticdb.Access): dotty.tools.dotc.semanticdb.AccessMessage = dotty.tools.dotc.semanticdb.AccessMessage(__custom match {
+      case __v: dotty.tools.dotc.semanticdb.PrivateAccess => dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.PrivateAccess(__v)
+      case __v: dotty.tools.dotc.semanticdb.PrivateThisAccess => dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.PrivateThisAccess(__v)
+      case __v: dotty.tools.dotc.semanticdb.PrivateWithinAccess => dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.PrivateWithinAccess(__v)
+      case __v: dotty.tools.dotc.semanticdb.ProtectedAccess => dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.ProtectedAccess(__v)
+      case __v: dotty.tools.dotc.semanticdb.ProtectedThisAccess => dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.ProtectedThisAccess(__v)
+      case __v: dotty.tools.dotc.semanticdb.ProtectedWithinAccess => dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.ProtectedWithinAccess(__v)
+      case __v: dotty.tools.dotc.semanticdb.PublicAccess => dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.PublicAccess(__v)
+      case Empty => dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.Empty
+    })
+  }
+}
+@SerialVersionUID(0L)
+final case class AccessMessage(
+    sealedValue: dotty.tools.dotc.semanticdb.AccessMessage.SealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.Empty
+    )  extends SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      if (sealedValue.privateAccess.isDefined) {
+        val __value = sealedValue.privateAccess.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.privateThisAccess.isDefined) {
+        val __value = sealedValue.privateThisAccess.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.privateWithinAccess.isDefined) {
+        val __value = sealedValue.privateWithinAccess.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.protectedAccess.isDefined) {
+        val __value = sealedValue.protectedAccess.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.protectedThisAccess.isDefined) {
+        val __value = sealedValue.protectedThisAccess.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.protectedWithinAccess.isDefined) {
+        val __value = sealedValue.protectedWithinAccess.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.publicAccess.isDefined) {
+        val __value = sealedValue.publicAccess.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      sealedValue.privateAccess.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(1, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.privateThisAccess.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(2, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.privateWithinAccess.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(3, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.protectedAccess.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(4, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.protectedThisAccess.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(5, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.protectedWithinAccess.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(6, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.publicAccess.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(7, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+    }
+    def getPrivateAccess: dotty.tools.dotc.semanticdb.PrivateAccess = sealedValue.privateAccess.getOrElse(dotty.tools.dotc.semanticdb.PrivateAccess.defaultInstance)
+    def withPrivateAccess(__v: dotty.tools.dotc.semanticdb.PrivateAccess): AccessMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.PrivateAccess(__v))
+    def getPrivateThisAccess: dotty.tools.dotc.semanticdb.PrivateThisAccess = sealedValue.privateThisAccess.getOrElse(dotty.tools.dotc.semanticdb.PrivateThisAccess.defaultInstance)
+    def withPrivateThisAccess(__v: dotty.tools.dotc.semanticdb.PrivateThisAccess): AccessMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.PrivateThisAccess(__v))
+    def getPrivateWithinAccess: dotty.tools.dotc.semanticdb.PrivateWithinAccess = sealedValue.privateWithinAccess.getOrElse(dotty.tools.dotc.semanticdb.PrivateWithinAccess.defaultInstance)
+    def withPrivateWithinAccess(__v: dotty.tools.dotc.semanticdb.PrivateWithinAccess): AccessMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.PrivateWithinAccess(__v))
+    def getProtectedAccess: dotty.tools.dotc.semanticdb.ProtectedAccess = sealedValue.protectedAccess.getOrElse(dotty.tools.dotc.semanticdb.ProtectedAccess.defaultInstance)
+    def withProtectedAccess(__v: dotty.tools.dotc.semanticdb.ProtectedAccess): AccessMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.ProtectedAccess(__v))
+    def getProtectedThisAccess: dotty.tools.dotc.semanticdb.ProtectedThisAccess = sealedValue.protectedThisAccess.getOrElse(dotty.tools.dotc.semanticdb.ProtectedThisAccess.defaultInstance)
+    def withProtectedThisAccess(__v: dotty.tools.dotc.semanticdb.ProtectedThisAccess): AccessMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.ProtectedThisAccess(__v))
+    def getProtectedWithinAccess: dotty.tools.dotc.semanticdb.ProtectedWithinAccess = sealedValue.protectedWithinAccess.getOrElse(dotty.tools.dotc.semanticdb.ProtectedWithinAccess.defaultInstance)
+    def withProtectedWithinAccess(__v: dotty.tools.dotc.semanticdb.ProtectedWithinAccess): AccessMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.ProtectedWithinAccess(__v))
+    def getPublicAccess: dotty.tools.dotc.semanticdb.PublicAccess = sealedValue.publicAccess.getOrElse(dotty.tools.dotc.semanticdb.PublicAccess.defaultInstance)
+    def withPublicAccess(__v: dotty.tools.dotc.semanticdb.PublicAccess): AccessMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.PublicAccess(__v))
+    def clearSealedValue: AccessMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.Empty)
+    def withSealedValue(__v: dotty.tools.dotc.semanticdb.AccessMessage.SealedValue): AccessMessage = copy(sealedValue = __v)
+    
+    
+    
+    
+    def toAccess: dotty.tools.dotc.semanticdb.Access = dotty.tools.dotc.semanticdb.Access.AccessTypeMapper.toCustom(this)
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Access])
+}
+
+object AccessMessage  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.AccessMessage] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.AccessMessage] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.AccessMessage = {
+    var __sealedValue: dotty.tools.dotc.semanticdb.AccessMessage.SealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.Empty
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.PrivateAccess(__sealedValue.privateAccess.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.PrivateAccess](_input__))(LiteParser.readMessage(_input__, _)))
+        case 18 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.PrivateThisAccess(__sealedValue.privateThisAccess.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.PrivateThisAccess](_input__))(LiteParser.readMessage(_input__, _)))
+        case 26 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.PrivateWithinAccess(__sealedValue.privateWithinAccess.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.PrivateWithinAccess](_input__))(LiteParser.readMessage(_input__, _)))
+        case 34 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.ProtectedAccess(__sealedValue.protectedAccess.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.ProtectedAccess](_input__))(LiteParser.readMessage(_input__, _)))
+        case 42 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.ProtectedThisAccess(__sealedValue.protectedThisAccess.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.ProtectedThisAccess](_input__))(LiteParser.readMessage(_input__, _)))
+        case 50 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.ProtectedWithinAccess(__sealedValue.protectedWithinAccess.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.ProtectedWithinAccess](_input__))(LiteParser.readMessage(_input__, _)))
+        case 58 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.PublicAccess(__sealedValue.publicAccess.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.PublicAccess](_input__))(LiteParser.readMessage(_input__, _)))
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.AccessMessage(
+        sealedValue = __sealedValue
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.AccessMessage(
+    sealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.Empty
+  )
+  sealed trait SealedValue  extends SemanticdbGeneratedOneof  derives CanEqual {
+    def isEmpty: _root_.scala.Boolean = false
+    def isDefined: _root_.scala.Boolean = true
+    def isPrivateAccess: _root_.scala.Boolean = false
+    def isPrivateThisAccess: _root_.scala.Boolean = false
+    def isPrivateWithinAccess: _root_.scala.Boolean = false
+    def isProtectedAccess: _root_.scala.Boolean = false
+    def isProtectedThisAccess: _root_.scala.Boolean = false
+    def isProtectedWithinAccess: _root_.scala.Boolean = false
+    def isPublicAccess: _root_.scala.Boolean = false
+    def privateAccess: _root_.scala.Option[dotty.tools.dotc.semanticdb.PrivateAccess] = _root_.scala.None
+    def privateThisAccess: _root_.scala.Option[dotty.tools.dotc.semanticdb.PrivateThisAccess] = _root_.scala.None
+    def privateWithinAccess: _root_.scala.Option[dotty.tools.dotc.semanticdb.PrivateWithinAccess] = _root_.scala.None
+    def protectedAccess: _root_.scala.Option[dotty.tools.dotc.semanticdb.ProtectedAccess] = _root_.scala.None
+    def protectedThisAccess: _root_.scala.Option[dotty.tools.dotc.semanticdb.ProtectedThisAccess] = _root_.scala.None
+    def protectedWithinAccess: _root_.scala.Option[dotty.tools.dotc.semanticdb.ProtectedWithinAccess] = _root_.scala.None
+    def publicAccess: _root_.scala.Option[dotty.tools.dotc.semanticdb.PublicAccess] = _root_.scala.None
+  }
+  object SealedValue {
+    @SerialVersionUID(0L)
+    case object Empty extends dotty.tools.dotc.semanticdb.AccessMessage.SealedValue {
+      type ValueType = _root_.scala.Nothing
+      override def isEmpty: _root_.scala.Boolean = true
+      override def isDefined: _root_.scala.Boolean = false
+      override def number: _root_.scala.Int = 0
+      override def value: _root_.scala.Nothing = throw new java.util.NoSuchElementException("Empty.value")
+    }
+  
+    @SerialVersionUID(0L)
+    final case class PrivateAccess(value: dotty.tools.dotc.semanticdb.PrivateAccess) extends dotty.tools.dotc.semanticdb.AccessMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.PrivateAccess
+      override def isPrivateAccess: _root_.scala.Boolean = true
+      override def privateAccess: _root_.scala.Option[dotty.tools.dotc.semanticdb.PrivateAccess] = Some(value)
+      override def number: _root_.scala.Int = 1
+    }
+    @SerialVersionUID(0L)
+    final case class PrivateThisAccess(value: dotty.tools.dotc.semanticdb.PrivateThisAccess) extends dotty.tools.dotc.semanticdb.AccessMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.PrivateThisAccess
+      override def isPrivateThisAccess: _root_.scala.Boolean = true
+      override def privateThisAccess: _root_.scala.Option[dotty.tools.dotc.semanticdb.PrivateThisAccess] = Some(value)
+      override def number: _root_.scala.Int = 2
+    }
+    @SerialVersionUID(0L)
+    final case class PrivateWithinAccess(value: dotty.tools.dotc.semanticdb.PrivateWithinAccess) extends dotty.tools.dotc.semanticdb.AccessMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.PrivateWithinAccess
+      override def isPrivateWithinAccess: _root_.scala.Boolean = true
+      override def privateWithinAccess: _root_.scala.Option[dotty.tools.dotc.semanticdb.PrivateWithinAccess] = Some(value)
+      override def number: _root_.scala.Int = 3
+    }
+    @SerialVersionUID(0L)
+    final case class ProtectedAccess(value: dotty.tools.dotc.semanticdb.ProtectedAccess) extends dotty.tools.dotc.semanticdb.AccessMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.ProtectedAccess
+      override def isProtectedAccess: _root_.scala.Boolean = true
+      override def protectedAccess: _root_.scala.Option[dotty.tools.dotc.semanticdb.ProtectedAccess] = Some(value)
+      override def number: _root_.scala.Int = 4
+    }
+    @SerialVersionUID(0L)
+    final case class ProtectedThisAccess(value: dotty.tools.dotc.semanticdb.ProtectedThisAccess) extends dotty.tools.dotc.semanticdb.AccessMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.ProtectedThisAccess
+      override def isProtectedThisAccess: _root_.scala.Boolean = true
+      override def protectedThisAccess: _root_.scala.Option[dotty.tools.dotc.semanticdb.ProtectedThisAccess] = Some(value)
+      override def number: _root_.scala.Int = 5
+    }
+    @SerialVersionUID(0L)
+    final case class ProtectedWithinAccess(value: dotty.tools.dotc.semanticdb.ProtectedWithinAccess) extends dotty.tools.dotc.semanticdb.AccessMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.ProtectedWithinAccess
+      override def isProtectedWithinAccess: _root_.scala.Boolean = true
+      override def protectedWithinAccess: _root_.scala.Option[dotty.tools.dotc.semanticdb.ProtectedWithinAccess] = Some(value)
+      override def number: _root_.scala.Int = 6
+    }
+    @SerialVersionUID(0L)
+    final case class PublicAccess(value: dotty.tools.dotc.semanticdb.PublicAccess) extends dotty.tools.dotc.semanticdb.AccessMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.PublicAccess
+      override def isPublicAccess: _root_.scala.Boolean = true
+      override def publicAccess: _root_.scala.Option[dotty.tools.dotc.semanticdb.PublicAccess] = Some(value)
+      override def number: _root_.scala.Int = 7
+    }
+  }
+  final val PRIVATE_ACCESS_FIELD_NUMBER = 1
+  final val PRIVATE_THIS_ACCESS_FIELD_NUMBER = 2
+  final val PRIVATE_WITHIN_ACCESS_FIELD_NUMBER = 3
+  final val PROTECTED_ACCESS_FIELD_NUMBER = 4
+  final val PROTECTED_THIS_ACCESS_FIELD_NUMBER = 5
+  final val PROTECTED_WITHIN_ACCESS_FIELD_NUMBER = 6
+  final val PUBLIC_ACCESS_FIELD_NUMBER = 7
+  def of(
+    sealedValue: dotty.tools.dotc.semanticdb.AccessMessage.SealedValue
+  ): _root_.dotty.tools.dotc.semanticdb.AccessMessage = _root_.dotty.tools.dotc.semanticdb.AccessMessage(
+    sealedValue
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Access])
+}
+
+@SerialVersionUID(0L)
+final case class PrivateAccess(
+    )  extends dotty.tools.dotc.semanticdb.Access.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    final override def serializedSize: _root_.scala.Int = 0
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+    }
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.PrivateAccess])
+}
+
+object PrivateAccess  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.PrivateAccess] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.PrivateAccess] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.PrivateAccess = {
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.PrivateAccess(
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.PrivateAccess(
+  )
+  def of(
+  ): _root_.dotty.tools.dotc.semanticdb.PrivateAccess = _root_.dotty.tools.dotc.semanticdb.PrivateAccess(
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.PrivateAccess])
+}
+
+@SerialVersionUID(0L)
+final case class PrivateThisAccess(
+    )  extends dotty.tools.dotc.semanticdb.Access.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    final override def serializedSize: _root_.scala.Int = 0
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+    }
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.PrivateThisAccess])
+}
+
+object PrivateThisAccess  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.PrivateThisAccess] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.PrivateThisAccess] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.PrivateThisAccess = {
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.PrivateThisAccess(
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.PrivateThisAccess(
+  )
+  def of(
+  ): _root_.dotty.tools.dotc.semanticdb.PrivateThisAccess = _root_.dotty.tools.dotc.semanticdb.PrivateThisAccess(
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.PrivateThisAccess])
+}
+
+@SerialVersionUID(0L)
+final case class PrivateWithinAccess(
+    symbol: _root_.scala.Predef.String = ""
+    )  extends dotty.tools.dotc.semanticdb.Access.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      
+      {
+        val __value = symbol
+        if (!__value.isEmpty) {
+          __size += SemanticdbOutputStream.computeStringSize(1, __value)
+        }
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      {
+        val __v = symbol
+        if (!__v.isEmpty) {
+          _output__.writeString(1, __v)
+        }
+      };
+    }
+    def withSymbol(__v: _root_.scala.Predef.String): PrivateWithinAccess = copy(symbol = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.PrivateWithinAccess])
+}
+
+object PrivateWithinAccess  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.PrivateWithinAccess] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.PrivateWithinAccess] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.PrivateWithinAccess = {
+    var __symbol: _root_.scala.Predef.String = ""
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __symbol = _input__.readStringRequireUtf8()
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.PrivateWithinAccess(
+        symbol = __symbol
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.PrivateWithinAccess(
+    symbol = ""
+  )
+  final val SYMBOL_FIELD_NUMBER = 1
+  def of(
+    symbol: _root_.scala.Predef.String
+  ): _root_.dotty.tools.dotc.semanticdb.PrivateWithinAccess = _root_.dotty.tools.dotc.semanticdb.PrivateWithinAccess(
+    symbol
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.PrivateWithinAccess])
+}
+
+@SerialVersionUID(0L)
+final case class ProtectedAccess(
+    )  extends dotty.tools.dotc.semanticdb.Access.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    final override def serializedSize: _root_.scala.Int = 0
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+    }
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ProtectedAccess])
+}
+
+object ProtectedAccess  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ProtectedAccess] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ProtectedAccess] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ProtectedAccess = {
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.ProtectedAccess(
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.ProtectedAccess(
+  )
+  def of(
+  ): _root_.dotty.tools.dotc.semanticdb.ProtectedAccess = _root_.dotty.tools.dotc.semanticdb.ProtectedAccess(
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ProtectedAccess])
+}
+
+@SerialVersionUID(0L)
+final case class ProtectedThisAccess(
+    )  extends dotty.tools.dotc.semanticdb.Access.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    final override def serializedSize: _root_.scala.Int = 0
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+    }
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ProtectedThisAccess])
+}
+
+object ProtectedThisAccess  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ProtectedThisAccess] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ProtectedThisAccess] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ProtectedThisAccess = {
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.ProtectedThisAccess(
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.ProtectedThisAccess(
+  )
+  def of(
+  ): _root_.dotty.tools.dotc.semanticdb.ProtectedThisAccess = _root_.dotty.tools.dotc.semanticdb.ProtectedThisAccess(
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ProtectedThisAccess])
+}
+
+@SerialVersionUID(0L)
+final case class ProtectedWithinAccess(
+    symbol: _root_.scala.Predef.String = ""
+    )  extends dotty.tools.dotc.semanticdb.Access.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      
+      {
+        val __value = symbol
+        if (!__value.isEmpty) {
+          __size += SemanticdbOutputStream.computeStringSize(1, __value)
+        }
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      {
+        val __v = symbol
+        if (!__v.isEmpty) {
+          _output__.writeString(1, __v)
+        }
+      };
+    }
+    def withSymbol(__v: _root_.scala.Predef.String): ProtectedWithinAccess = copy(symbol = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ProtectedWithinAccess])
+}
+
+object ProtectedWithinAccess  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ProtectedWithinAccess] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ProtectedWithinAccess] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ProtectedWithinAccess = {
+    var __symbol: _root_.scala.Predef.String = ""
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __symbol = _input__.readStringRequireUtf8()
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.ProtectedWithinAccess(
+        symbol = __symbol
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.ProtectedWithinAccess(
+    symbol = ""
+  )
+  final val SYMBOL_FIELD_NUMBER = 1
+  def of(
+    symbol: _root_.scala.Predef.String
+  ): _root_.dotty.tools.dotc.semanticdb.ProtectedWithinAccess = _root_.dotty.tools.dotc.semanticdb.ProtectedWithinAccess(
+    symbol
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ProtectedWithinAccess])
+}
+
+@SerialVersionUID(0L)
+final case class PublicAccess(
+    )  extends dotty.tools.dotc.semanticdb.Access.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    final override def serializedSize: _root_.scala.Int = 0
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+    }
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.PublicAccess])
+}
+
+object PublicAccess  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.PublicAccess] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.PublicAccess] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.PublicAccess = {
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.PublicAccess(
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.PublicAccess(
+  )
+  def of(
+  ): _root_.dotty.tools.dotc.semanticdb.PublicAccess = _root_.dotty.tools.dotc.semanticdb.PublicAccess(
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.PublicAccess])
+}
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/Annotation.scala b/compiler/src/dotty/tools/dotc/semanticdb/Annotation.scala
new file mode 100644
index 000000000000..214354bd4cf8
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/semanticdb/Annotation.scala
@@ -0,0 +1,90 @@
+// Generated by https://github.com/tanishiking/semanticdb-for-scala3
+// Generated by the Scala Plugin for the Protocol Buffer Compiler.
+// Do not edit!
+//
+// Protofile syntax: PROTO3
+
+package dotty.tools.dotc.semanticdb
+import dotty.tools.dotc.semanticdb.internal._
+import scala.annotation.internal.sharable
+
+@SerialVersionUID(0L)
+final case class Annotation(
+    tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.Annotation._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)
+    )  extends SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      
+      {
+        val __value = dotty.tools.dotc.semanticdb.Annotation._typemapper_tpe.toBase(tpe)
+        if (__value != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) {
+          __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+        }
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      {
+        val __v = dotty.tools.dotc.semanticdb.Annotation._typemapper_tpe.toBase(tpe)
+        if (__v != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) {
+          _output__.writeTag(1, 2)
+          _output__.writeUInt32NoTag(__v.serializedSize)
+          __v.writeTo(_output__)
+        }
+      };
+    }
+    def withTpe(__v: dotty.tools.dotc.semanticdb.Type): Annotation = copy(tpe = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Annotation])
+}
+
+object Annotation  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Annotation] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Annotation] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.Annotation = {
+    var __tpe: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __tpe = _root_.scala.Some(__tpe.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _)))
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.Annotation(
+        tpe = dotty.tools.dotc.semanticdb.Annotation._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance))
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.Annotation(
+    tpe = dotty.tools.dotc.semanticdb.Annotation._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)
+  )
+  final val TPE_FIELD_NUMBER = 1
+  @transient @sharable
+  private[semanticdb] val _typemapper_tpe: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type] = implicitly[SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type]]
+  def of(
+    tpe: dotty.tools.dotc.semanticdb.Type
+  ): _root_.dotty.tools.dotc.semanticdb.Annotation = _root_.dotty.tools.dotc.semanticdb.Annotation(
+    tpe
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Annotation])
+}
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/Constant.scala b/compiler/src/dotty/tools/dotc/semanticdb/Constant.scala
new file mode 100644
index 000000000000..1f8f7778494d
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/semanticdb/Constant.scala
@@ -0,0 +1,1171 @@
+// Generated by https://github.com/tanishiking/semanticdb-for-scala3
+// Generated by the Scala Plugin for the Protocol Buffer Compiler.
+// Do not edit!
+//
+// Protofile syntax: PROTO3
+
+package dotty.tools.dotc.semanticdb
+import dotty.tools.dotc.semanticdb.internal._
+import scala.annotation.internal.sharable
+
+sealed trait Constant  extends SemanticdbGeneratedSealedOneof  derives CanEqual {
+  type MessageType = dotty.tools.dotc.semanticdb.ConstantMessage
+  final def isEmpty = this.isInstanceOf[dotty.tools.dotc.semanticdb.Constant.Empty.type]
+  final def isDefined = !isEmpty
+  final def asMessage: dotty.tools.dotc.semanticdb.ConstantMessage = dotty.tools.dotc.semanticdb.Constant.ConstantTypeMapper.toBase(this)
+  final def asNonEmpty: Option[dotty.tools.dotc.semanticdb.Constant.NonEmpty] = if (isEmpty) None else Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.Constant.NonEmpty])
+}
+
+object Constant {
+  case object Empty extends dotty.tools.dotc.semanticdb.Constant
+  
+  sealed trait NonEmpty extends dotty.tools.dotc.semanticdb.Constant
+  def defaultInstance: dotty.tools.dotc.semanticdb.Constant = Empty
+  
+  implicit val ConstantTypeMapper: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.ConstantMessage, dotty.tools.dotc.semanticdb.Constant] = new SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.ConstantMessage, dotty.tools.dotc.semanticdb.Constant] {
+    override def toCustom(__base: dotty.tools.dotc.semanticdb.ConstantMessage): dotty.tools.dotc.semanticdb.Constant = __base.sealedValue match {
+      case __v: dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.UnitConstant => __v.value
+      case __v: dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.BooleanConstant => __v.value
+      case __v: dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.ByteConstant => __v.value
+      case __v: dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.ShortConstant => __v.value
+      case __v: dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.CharConstant => __v.value
+      case __v: dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.IntConstant => __v.value
+      case __v: dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.LongConstant => __v.value
+      case __v: dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.FloatConstant => __v.value
+      case __v: dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.DoubleConstant => __v.value
+      case __v: dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.StringConstant => __v.value
+      case __v: dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.NullConstant => __v.value
+      case dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.Empty => Empty
+    }
+    override def toBase(__custom: dotty.tools.dotc.semanticdb.Constant): dotty.tools.dotc.semanticdb.ConstantMessage = dotty.tools.dotc.semanticdb.ConstantMessage(__custom match {
+      case __v: dotty.tools.dotc.semanticdb.UnitConstant => dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.UnitConstant(__v)
+      case __v: dotty.tools.dotc.semanticdb.BooleanConstant => dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.BooleanConstant(__v)
+      case __v: dotty.tools.dotc.semanticdb.ByteConstant => dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.ByteConstant(__v)
+      case __v: dotty.tools.dotc.semanticdb.ShortConstant => dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.ShortConstant(__v)
+      case __v: dotty.tools.dotc.semanticdb.CharConstant => dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.CharConstant(__v)
+      case __v: dotty.tools.dotc.semanticdb.IntConstant => dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.IntConstant(__v)
+      case __v: dotty.tools.dotc.semanticdb.LongConstant => dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.LongConstant(__v)
+      case __v: dotty.tools.dotc.semanticdb.FloatConstant => dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.FloatConstant(__v)
+      case __v: dotty.tools.dotc.semanticdb.DoubleConstant => dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.DoubleConstant(__v)
+      case __v: dotty.tools.dotc.semanticdb.StringConstant => dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.StringConstant(__v)
+      case __v: dotty.tools.dotc.semanticdb.NullConstant => dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.NullConstant(__v)
+      case Empty => dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.Empty
+    })
+  }
+}
+@SerialVersionUID(0L)
+final case class ConstantMessage(
+    sealedValue: dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.Empty
+    )  extends SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      if (sealedValue.unitConstant.isDefined) {
+        val __value = sealedValue.unitConstant.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.booleanConstant.isDefined) {
+        val __value = sealedValue.booleanConstant.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.byteConstant.isDefined) {
+        val __value = sealedValue.byteConstant.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.shortConstant.isDefined) {
+        val __value = sealedValue.shortConstant.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.charConstant.isDefined) {
+        val __value = sealedValue.charConstant.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.intConstant.isDefined) {
+        val __value = sealedValue.intConstant.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.longConstant.isDefined) {
+        val __value = sealedValue.longConstant.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.floatConstant.isDefined) {
+        val __value = sealedValue.floatConstant.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.doubleConstant.isDefined) {
+        val __value = sealedValue.doubleConstant.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.stringConstant.isDefined) {
+        val __value = sealedValue.stringConstant.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.nullConstant.isDefined) {
+        val __value = sealedValue.nullConstant.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      sealedValue.unitConstant.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(1, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.booleanConstant.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(2, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.byteConstant.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(3, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.shortConstant.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(4, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.charConstant.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(5, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.intConstant.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(6, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.longConstant.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(7, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.floatConstant.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(8, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.doubleConstant.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(9, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.stringConstant.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(10, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.nullConstant.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(11, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+    }
+    def getUnitConstant: dotty.tools.dotc.semanticdb.UnitConstant = sealedValue.unitConstant.getOrElse(dotty.tools.dotc.semanticdb.UnitConstant.defaultInstance)
+    def withUnitConstant(__v: dotty.tools.dotc.semanticdb.UnitConstant): ConstantMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.UnitConstant(__v))
+    def getBooleanConstant: dotty.tools.dotc.semanticdb.BooleanConstant = sealedValue.booleanConstant.getOrElse(dotty.tools.dotc.semanticdb.BooleanConstant.defaultInstance)
+    def withBooleanConstant(__v: dotty.tools.dotc.semanticdb.BooleanConstant): ConstantMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.BooleanConstant(__v))
+    def getByteConstant: dotty.tools.dotc.semanticdb.ByteConstant = sealedValue.byteConstant.getOrElse(dotty.tools.dotc.semanticdb.ByteConstant.defaultInstance)
+    def withByteConstant(__v: dotty.tools.dotc.semanticdb.ByteConstant): ConstantMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.ByteConstant(__v))
+    def getShortConstant: dotty.tools.dotc.semanticdb.ShortConstant = sealedValue.shortConstant.getOrElse(dotty.tools.dotc.semanticdb.ShortConstant.defaultInstance)
+    def withShortConstant(__v: dotty.tools.dotc.semanticdb.ShortConstant): ConstantMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.ShortConstant(__v))
+    def getCharConstant: dotty.tools.dotc.semanticdb.CharConstant = sealedValue.charConstant.getOrElse(dotty.tools.dotc.semanticdb.CharConstant.defaultInstance)
+    def withCharConstant(__v: dotty.tools.dotc.semanticdb.CharConstant): ConstantMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.CharConstant(__v))
+    def getIntConstant: dotty.tools.dotc.semanticdb.IntConstant = sealedValue.intConstant.getOrElse(dotty.tools.dotc.semanticdb.IntConstant.defaultInstance)
+    def withIntConstant(__v: dotty.tools.dotc.semanticdb.IntConstant): ConstantMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.IntConstant(__v))
+    def getLongConstant: dotty.tools.dotc.semanticdb.LongConstant = sealedValue.longConstant.getOrElse(dotty.tools.dotc.semanticdb.LongConstant.defaultInstance)
+    def withLongConstant(__v: dotty.tools.dotc.semanticdb.LongConstant): ConstantMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.LongConstant(__v))
+    def getFloatConstant: dotty.tools.dotc.semanticdb.FloatConstant = sealedValue.floatConstant.getOrElse(dotty.tools.dotc.semanticdb.FloatConstant.defaultInstance)
+    def withFloatConstant(__v: dotty.tools.dotc.semanticdb.FloatConstant): ConstantMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.FloatConstant(__v))
+    def getDoubleConstant: dotty.tools.dotc.semanticdb.DoubleConstant = sealedValue.doubleConstant.getOrElse(dotty.tools.dotc.semanticdb.DoubleConstant.defaultInstance)
+    def withDoubleConstant(__v: dotty.tools.dotc.semanticdb.DoubleConstant): ConstantMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.DoubleConstant(__v))
+    def getStringConstant: dotty.tools.dotc.semanticdb.StringConstant = sealedValue.stringConstant.getOrElse(dotty.tools.dotc.semanticdb.StringConstant.defaultInstance)
+    def withStringConstant(__v: dotty.tools.dotc.semanticdb.StringConstant): ConstantMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.StringConstant(__v))
+    def getNullConstant: dotty.tools.dotc.semanticdb.NullConstant = sealedValue.nullConstant.getOrElse(dotty.tools.dotc.semanticdb.NullConstant.defaultInstance)
+    def withNullConstant(__v: dotty.tools.dotc.semanticdb.NullConstant): ConstantMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.NullConstant(__v))
+    def clearSealedValue: ConstantMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.Empty)
+    def withSealedValue(__v: dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue): ConstantMessage = copy(sealedValue = __v)
+    
+    
+    
+    
+    def toConstant: dotty.tools.dotc.semanticdb.Constant = dotty.tools.dotc.semanticdb.Constant.ConstantTypeMapper.toCustom(this)
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Constant])
+}
+
+object ConstantMessage  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ConstantMessage] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ConstantMessage] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ConstantMessage = {
+    var __sealedValue: dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.Empty
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.UnitConstant(__sealedValue.unitConstant.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.UnitConstant](_input__))(LiteParser.readMessage(_input__, _)))
+        case 18 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.BooleanConstant(__sealedValue.booleanConstant.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.BooleanConstant](_input__))(LiteParser.readMessage(_input__, _)))
+        case 26 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.ByteConstant(__sealedValue.byteConstant.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.ByteConstant](_input__))(LiteParser.readMessage(_input__, _)))
+        case 34 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.ShortConstant(__sealedValue.shortConstant.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.ShortConstant](_input__))(LiteParser.readMessage(_input__, _)))
+        case 42 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.CharConstant(__sealedValue.charConstant.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.CharConstant](_input__))(LiteParser.readMessage(_input__, _)))
+        case 50 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.IntConstant(__sealedValue.intConstant.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.IntConstant](_input__))(LiteParser.readMessage(_input__, _)))
+        case 58 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.LongConstant(__sealedValue.longConstant.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.LongConstant](_input__))(LiteParser.readMessage(_input__, _)))
+        case 66 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.FloatConstant(__sealedValue.floatConstant.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.FloatConstant](_input__))(LiteParser.readMessage(_input__, _)))
+        case 74 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.DoubleConstant(__sealedValue.doubleConstant.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.DoubleConstant](_input__))(LiteParser.readMessage(_input__, _)))
+        case 82 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.StringConstant(__sealedValue.stringConstant.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.StringConstant](_input__))(LiteParser.readMessage(_input__, _)))
+        case 90 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.NullConstant(__sealedValue.nullConstant.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.NullConstant](_input__))(LiteParser.readMessage(_input__, _)))
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.ConstantMessage(
+        sealedValue = __sealedValue
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.ConstantMessage(
+    sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.Empty
+  )
+  sealed trait SealedValue  extends SemanticdbGeneratedOneof  derives CanEqual {
+    def isEmpty: _root_.scala.Boolean = false
+    def isDefined: _root_.scala.Boolean = true
+    def isUnitConstant: _root_.scala.Boolean = false
+    def isBooleanConstant: _root_.scala.Boolean = false
+    def isByteConstant: _root_.scala.Boolean = false
+    def isShortConstant: _root_.scala.Boolean = false
+    def isCharConstant: _root_.scala.Boolean = false
+    def isIntConstant: _root_.scala.Boolean = false
+    def isLongConstant: _root_.scala.Boolean = false
+    def isFloatConstant: _root_.scala.Boolean = false
+    def isDoubleConstant: _root_.scala.Boolean = false
+    def isStringConstant: _root_.scala.Boolean = false
+    def isNullConstant: _root_.scala.Boolean = false
+    def unitConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.UnitConstant] = _root_.scala.None
+    def booleanConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.BooleanConstant] = _root_.scala.None
+    def byteConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.ByteConstant] = _root_.scala.None
+    def shortConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.ShortConstant] = _root_.scala.None
+    def charConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.CharConstant] = _root_.scala.None
+    def intConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.IntConstant] = _root_.scala.None
+    def longConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.LongConstant] = _root_.scala.None
+    def floatConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.FloatConstant] = _root_.scala.None
+    def doubleConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.DoubleConstant] = _root_.scala.None
+    def stringConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.StringConstant] = _root_.scala.None
+    def nullConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.NullConstant] = _root_.scala.None
+  }
+  object SealedValue {
+    @SerialVersionUID(0L)
+    case object Empty extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue {
+      type ValueType = _root_.scala.Nothing
+      override def isEmpty: _root_.scala.Boolean = true
+      override def isDefined: _root_.scala.Boolean = false
+      override def number: _root_.scala.Int = 0
+      override def value: _root_.scala.Nothing = throw new java.util.NoSuchElementException("Empty.value")
+    }
+  
+    @SerialVersionUID(0L)
+    final case class UnitConstant(value: dotty.tools.dotc.semanticdb.UnitConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.UnitConstant
+      override def isUnitConstant: _root_.scala.Boolean = true
+      override def unitConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.UnitConstant] = Some(value)
+      override def number: _root_.scala.Int = 1
+    }
+    @SerialVersionUID(0L)
+    final case class BooleanConstant(value: dotty.tools.dotc.semanticdb.BooleanConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.BooleanConstant
+      override def isBooleanConstant: _root_.scala.Boolean = true
+      override def booleanConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.BooleanConstant] = Some(value)
+      override def number: _root_.scala.Int = 2
+    }
+    @SerialVersionUID(0L)
+    final case class ByteConstant(value: dotty.tools.dotc.semanticdb.ByteConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.ByteConstant
+      override def isByteConstant: _root_.scala.Boolean = true
+      override def byteConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.ByteConstant] = Some(value)
+      override def number: _root_.scala.Int = 3
+    }
+    @SerialVersionUID(0L)
+    final case class ShortConstant(value: dotty.tools.dotc.semanticdb.ShortConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.ShortConstant
+      override def isShortConstant: _root_.scala.Boolean = true
+      override def shortConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.ShortConstant] = Some(value)
+      override def number: _root_.scala.Int = 4
+    }
+    @SerialVersionUID(0L)
+    final case class CharConstant(value: dotty.tools.dotc.semanticdb.CharConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.CharConstant
+      override def isCharConstant: _root_.scala.Boolean = true
+      override def charConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.CharConstant] = Some(value)
+      override def number: _root_.scala.Int = 5
+    }
+    @SerialVersionUID(0L)
+    final case class IntConstant(value: dotty.tools.dotc.semanticdb.IntConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.IntConstant
+      override def isIntConstant: _root_.scala.Boolean = true
+      override def intConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.IntConstant] = Some(value)
+      override def number: _root_.scala.Int = 6
+    }
+    @SerialVersionUID(0L)
+    final case class LongConstant(value: dotty.tools.dotc.semanticdb.LongConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.LongConstant
+      override def isLongConstant: _root_.scala.Boolean = true
+      override def longConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.LongConstant] = Some(value)
+      override def number: _root_.scala.Int = 7
+    }
+    @SerialVersionUID(0L)
+    final case class FloatConstant(value: dotty.tools.dotc.semanticdb.FloatConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.FloatConstant
+      override def isFloatConstant: _root_.scala.Boolean = true
+      override def floatConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.FloatConstant] = Some(value)
+      override def number: _root_.scala.Int = 8
+    }
+    @SerialVersionUID(0L)
+    final case class DoubleConstant(value: dotty.tools.dotc.semanticdb.DoubleConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.DoubleConstant
+      override def isDoubleConstant: _root_.scala.Boolean = true
+      override def doubleConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.DoubleConstant] = Some(value)
+      override def number: _root_.scala.Int = 9
+    }
+    @SerialVersionUID(0L)
+    final case class StringConstant(value: dotty.tools.dotc.semanticdb.StringConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.StringConstant
+      override def isStringConstant: _root_.scala.Boolean = true
+      override def stringConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.StringConstant] = Some(value)
+      override def number: _root_.scala.Int = 10
+    }
+    @SerialVersionUID(0L)
+    final case class NullConstant(value: dotty.tools.dotc.semanticdb.NullConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.NullConstant
+      override def isNullConstant: _root_.scala.Boolean = true
+      override def nullConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.NullConstant] = Some(value)
+      override def number: _root_.scala.Int = 11
+    }
+  }
+  final val UNIT_CONSTANT_FIELD_NUMBER = 1
+  final val BOOLEAN_CONSTANT_FIELD_NUMBER = 2
+  final val BYTE_CONSTANT_FIELD_NUMBER = 3
+  final val SHORT_CONSTANT_FIELD_NUMBER = 4
+  final val CHAR_CONSTANT_FIELD_NUMBER = 5
+  final val INT_CONSTANT_FIELD_NUMBER = 6
+  final val LONG_CONSTANT_FIELD_NUMBER = 7
+  final val FLOAT_CONSTANT_FIELD_NUMBER = 8
+  final val DOUBLE_CONSTANT_FIELD_NUMBER = 9
+  final val STRING_CONSTANT_FIELD_NUMBER = 10
+  final val NULL_CONSTANT_FIELD_NUMBER = 11
+  def of(
+    sealedValue: dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue
+  ): _root_.dotty.tools.dotc.semanticdb.ConstantMessage = _root_.dotty.tools.dotc.semanticdb.ConstantMessage(
+    sealedValue
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Constant])
+}
+
+@SerialVersionUID(0L)
+final case class UnitConstant(
+    )  extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    final override def serializedSize: _root_.scala.Int = 0
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+    }
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.UnitConstant])
+}
+
+object UnitConstant  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.UnitConstant] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.UnitConstant] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.UnitConstant = {
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.UnitConstant(
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.UnitConstant(
+  )
+  def of(
+  ): _root_.dotty.tools.dotc.semanticdb.UnitConstant = _root_.dotty.tools.dotc.semanticdb.UnitConstant(
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.UnitConstant])
+}
+
+@SerialVersionUID(0L)
+final case class BooleanConstant(
+    value: _root_.scala.Boolean = false
+    )  extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      
+      {
+        val __value = value
+        if (__value != false) {
+          __size += SemanticdbOutputStream.computeBoolSize(1, __value)
+        }
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      {
+        val __v = value
+        if (__v != false) {
+          _output__.writeBool(1, __v)
+        }
+      };
+    }
+    def withValue(__v: _root_.scala.Boolean): BooleanConstant = copy(value = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.BooleanConstant])
+}
+
+object BooleanConstant  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.BooleanConstant] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.BooleanConstant] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.BooleanConstant = {
+    var __value: _root_.scala.Boolean = false
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 8 =>
+          __value = _input__.readBool()
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.BooleanConstant(
+        value = __value
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.BooleanConstant(
+    value = false
+  )
+  final val VALUE_FIELD_NUMBER = 1
+  def of(
+    value: _root_.scala.Boolean
+  ): _root_.dotty.tools.dotc.semanticdb.BooleanConstant = _root_.dotty.tools.dotc.semanticdb.BooleanConstant(
+    value
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.BooleanConstant])
+}
+
+@SerialVersionUID(0L)
+final case class ByteConstant(
+    value: _root_.scala.Int = 0
+    )  extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      
+      {
+        val __value = value
+        if (__value != 0) {
+          __size += SemanticdbOutputStream.computeInt32Size(1, __value)
+        }
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      {
+        val __v = value
+        if (__v != 0) {
+          _output__.writeInt32(1, __v)
+        }
+      };
+    }
+    def withValue(__v: _root_.scala.Int): ByteConstant = copy(value = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ByteConstant])
+}
+
+object ByteConstant  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ByteConstant] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ByteConstant] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ByteConstant = {
+    var __value: _root_.scala.Int = 0
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 8 =>
+          __value = _input__.readInt32()
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.ByteConstant(
+        value = __value
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.ByteConstant(
+    value = 0
+  )
+  final val VALUE_FIELD_NUMBER = 1
+  def of(
+    value: _root_.scala.Int
+  ): _root_.dotty.tools.dotc.semanticdb.ByteConstant = _root_.dotty.tools.dotc.semanticdb.ByteConstant(
+    value
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ByteConstant])
+}
+
+@SerialVersionUID(0L)
+final case class ShortConstant(
+    value: _root_.scala.Int = 0
+    )  extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      
+      {
+        val __value = value
+        if (__value != 0) {
+          __size += SemanticdbOutputStream.computeInt32Size(1, __value)
+        }
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      {
+        val __v = value
+        if (__v != 0) {
+          _output__.writeInt32(1, __v)
+        }
+      };
+    }
+    def withValue(__v: _root_.scala.Int): ShortConstant = copy(value = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ShortConstant])
+}
+
+object ShortConstant  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ShortConstant] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ShortConstant] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ShortConstant = {
+    var __value: _root_.scala.Int = 0
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 8 =>
+          __value = _input__.readInt32()
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.ShortConstant(
+        value = __value
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.ShortConstant(
+    value = 0
+  )
+  final val VALUE_FIELD_NUMBER = 1
+  def of(
+    value: _root_.scala.Int
+  ): _root_.dotty.tools.dotc.semanticdb.ShortConstant = _root_.dotty.tools.dotc.semanticdb.ShortConstant(
+    value
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ShortConstant])
+}
+
+@SerialVersionUID(0L)
+final case class CharConstant(
+    value: _root_.scala.Int = 0
+    )  extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      
+      {
+        val __value = value
+        if (__value != 0) {
+          __size += SemanticdbOutputStream.computeInt32Size(1, __value)
+        }
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      {
+        val __v = value
+        if (__v != 0) {
+          _output__.writeInt32(1, __v)
+        }
+      };
+    }
+    def withValue(__v: _root_.scala.Int): CharConstant = copy(value = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.CharConstant])
+}
+
+object CharConstant  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.CharConstant] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.CharConstant] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.CharConstant = {
+    var __value: _root_.scala.Int = 0
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 8 =>
+          __value = _input__.readInt32()
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.CharConstant(
+        value = __value
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.CharConstant(
+    value = 0
+  )
+  final val VALUE_FIELD_NUMBER = 1
+  def of(
+    value: _root_.scala.Int
+  ): _root_.dotty.tools.dotc.semanticdb.CharConstant = _root_.dotty.tools.dotc.semanticdb.CharConstant(
+    value
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.CharConstant])
+}
+
+@SerialVersionUID(0L)
+final case class IntConstant(
+    value: _root_.scala.Int = 0
+    )  extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      
+      {
+        val __value = value
+        if (__value != 0) {
+          __size += SemanticdbOutputStream.computeInt32Size(1, __value)
+        }
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      {
+        val __v = value
+        if (__v != 0) {
+          _output__.writeInt32(1, __v)
+        }
+      };
+    }
+    def withValue(__v: _root_.scala.Int): IntConstant = copy(value = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.IntConstant])
+}
+
+object IntConstant  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.IntConstant] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.IntConstant] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.IntConstant = {
+    var __value: _root_.scala.Int = 0
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 8 =>
+          __value = _input__.readInt32()
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.IntConstant(
+        value = __value
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.IntConstant(
+    value = 0
+  )
+  final val VALUE_FIELD_NUMBER = 1
+  def of(
+    value: _root_.scala.Int
+  ): _root_.dotty.tools.dotc.semanticdb.IntConstant = _root_.dotty.tools.dotc.semanticdb.IntConstant(
+    value
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.IntConstant])
+}
+
+@SerialVersionUID(0L)
+final case class LongConstant(
+    value: _root_.scala.Long = 0L
+    )  extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      
+      {
+        val __value = value
+        if (__value != 0L) {
+          __size += SemanticdbOutputStream.computeInt64Size(1, __value)
+        }
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      {
+        val __v = value
+        if (__v != 0L) {
+          _output__.writeInt64(1, __v)
+        }
+      };
+    }
+    def withValue(__v: _root_.scala.Long): LongConstant = copy(value = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.LongConstant])
+}
+
+object LongConstant  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.LongConstant] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.LongConstant] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.LongConstant = {
+    var __value: _root_.scala.Long = 0L
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 8 =>
+          __value = _input__.readInt64()
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.LongConstant(
+        value = __value
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.LongConstant(
+    value = 0L
+  )
+  final val VALUE_FIELD_NUMBER = 1
+  def of(
+    value: _root_.scala.Long
+  ): _root_.dotty.tools.dotc.semanticdb.LongConstant = _root_.dotty.tools.dotc.semanticdb.LongConstant(
+    value
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.LongConstant])
+}
+
+@SerialVersionUID(0L)
+final case class FloatConstant(
+    value: _root_.scala.Float = 0.0f
+    )  extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      
+      {
+        val __value = value
+        if (__value != 0.0f) {
+          __size += SemanticdbOutputStream.computeFloatSize(1, __value)
+        }
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      {
+        val __v = value
+        if (__v != 0.0f) {
+          _output__.writeFloat(1, __v)
+        }
+      };
+    }
+    def withValue(__v: _root_.scala.Float): FloatConstant = copy(value = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.FloatConstant])
+}
+
+object FloatConstant  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.FloatConstant] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.FloatConstant] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.FloatConstant = {
+    var __value: _root_.scala.Float = 0.0f
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 13 =>
+          __value = _input__.readFloat()
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.FloatConstant(
+        value = __value
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.FloatConstant(
+    value = 0.0f
+  )
+  final val VALUE_FIELD_NUMBER = 1
+  def of(
+    value: _root_.scala.Float
+  ): _root_.dotty.tools.dotc.semanticdb.FloatConstant = _root_.dotty.tools.dotc.semanticdb.FloatConstant(
+    value
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.FloatConstant])
+}
+
+@SerialVersionUID(0L)
+final case class DoubleConstant(
+    value: _root_.scala.Double = 0.0
+    )  extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      
+      {
+        val __value = value
+        if (__value != 0.0) {
+          __size += SemanticdbOutputStream.computeDoubleSize(1, __value)
+        }
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      {
+        val __v = value
+        if (__v != 0.0) {
+          _output__.writeDouble(1, __v)
+        }
+      };
+    }
+    def withValue(__v: _root_.scala.Double): DoubleConstant = copy(value = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.DoubleConstant])
+}
+
+object DoubleConstant  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.DoubleConstant] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.DoubleConstant] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.DoubleConstant = {
+    var __value: _root_.scala.Double = 0.0
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 9 =>
+          __value = _input__.readDouble()
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.DoubleConstant(
+        value = __value
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.DoubleConstant(
+    value = 0.0
+  )
+  final val VALUE_FIELD_NUMBER = 1
+  def of(
+    value: _root_.scala.Double
+  ): _root_.dotty.tools.dotc.semanticdb.DoubleConstant = _root_.dotty.tools.dotc.semanticdb.DoubleConstant(
+    value
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.DoubleConstant])
+}
+
+@SerialVersionUID(0L)
+final case class StringConstant(
+    value: _root_.scala.Predef.String = ""
+    )  extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      
+      {
+        val __value = value
+        if (!__value.isEmpty) {
+          __size += SemanticdbOutputStream.computeStringSize(1, __value)
+        }
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      {
+        val __v = value
+        if (!__v.isEmpty) {
+          _output__.writeString(1, __v)
+        }
+      };
+    }
+    def withValue(__v: _root_.scala.Predef.String): StringConstant = copy(value = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.StringConstant])
+}
+
+object StringConstant  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.StringConstant] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.StringConstant] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.StringConstant = {
+    var __value: _root_.scala.Predef.String = ""
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __value = _input__.readStringRequireUtf8()
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.StringConstant(
+        value = __value
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.StringConstant(
+    value = ""
+  )
+  final val VALUE_FIELD_NUMBER = 1
+  def of(
+    value: _root_.scala.Predef.String
+  ): _root_.dotty.tools.dotc.semanticdb.StringConstant = _root_.dotty.tools.dotc.semanticdb.StringConstant(
+    value
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.StringConstant])
+}
+
+@SerialVersionUID(0L)
+final case class NullConstant(
+    )  extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    final override def serializedSize: _root_.scala.Int = 0
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+    }
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.NullConstant])
+}
+
+object NullConstant  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.NullConstant] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.NullConstant] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.NullConstant = {
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.NullConstant(
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.NullConstant(
+  )
+  def of(
+  ): _root_.dotty.tools.dotc.semanticdb.NullConstant = _root_.dotty.tools.dotc.semanticdb.NullConstant(
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.NullConstant])
+}
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/ConstantOps.scala b/compiler/src/dotty/tools/dotc/semanticdb/ConstantOps.scala
new file mode 100644
index 000000000000..975d5480fe9b
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/semanticdb/ConstantOps.scala
@@ -0,0 +1,25 @@
+package dotty.tools
+package dotc
+package semanticdb
+
+import dotty.tools.dotc.{semanticdb => s}
+
+import core.Contexts.Context
+import core.Constants._
+
+object ConstantOps:
+  extension (const: Constant)
+    def toSemanticConst(using Context): s.Constant = const.tag match {
+      case UnitTag => s.UnitConstant()
+      case BooleanTag => s.BooleanConstant(const.booleanValue)
+      case ByteTag => s.ByteConstant(const.byteValue)
+      case ShortTag => s.ShortConstant(const.shortValue)
+      case CharTag => s.CharConstant(const.charValue)
+      case IntTag => s.IntConstant(const.intValue)
+      case LongTag => s.LongConstant(const.longValue)
+      case FloatTag => s.FloatConstant(const.floatValue)
+      case DoubleTag => s.DoubleConstant(const.doubleValue)
+      case StringTag => s.StringConstant(const.stringValue)
+      case NullTag => s.NullConstant()
+      case _ => throw new Error(s"Constant ${const} can't be converted to Semanticdb Constant.")
+    }
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/Descriptor.scala b/compiler/src/dotty/tools/dotc/semanticdb/Descriptor.scala
new file mode 100644
index 000000000000..a93b99df602c
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/semanticdb/Descriptor.scala
@@ -0,0 +1,120 @@
+package dotty.tools.dotc.semanticdb
+
+import java.lang.System.{lineSeparator => EOL}
+import dotty.tools.dotc.semanticdb.{Descriptor => d}
+
+class DescriptorParser(s: String) {
+  var i = s.length
+  def fail() = {
+    val message = "invalid symbol format"
+    val caret = " " * i + "^"
+    sys.error(s"$message$EOL$s$EOL$caret")
+  }
+
+  val BOF = '\u0000'
+  val EOF = '\u001A'
+  var currChar = EOF
+  def readChar(): Char = {
+    if (i <= 0) {
+      if (i == 0) {
+        i -= 1
+        currChar = BOF
+        currChar
+      } else {
+        fail()
+      }
+    } else {
+      i -= 1
+      currChar = s(i)
+      currChar
+    }
+  }
+
+  def parseValue(): String = {
+    if (currChar == '`') {
+      val end = i
+      while (readChar() != '`') {}
+      readChar()
+      s.substring(i + 2, end)
+    } else {
+      val end = i + 1
+      if (!Character.isJavaIdentifierPart(currChar)) fail()
+      while (Character.isJavaIdentifierPart(readChar()) && currChar != BOF) {}
+      s.substring(i + 1, end)
+    }
+  }
+
+  def parseDisambiguator(): String = {
+    val end = i + 1
+    if (currChar != ')') fail()
+    while (readChar() != '(') {}
+    readChar()
+    s.substring(i + 1, end)
+  }
+
+  def parseDescriptor(): Descriptor = {
+    if (currChar == '.') {
+      readChar()
+      if (currChar == ')') {
+        val disambiguator = parseDisambiguator()
+        val value = parseValue()
+        d.Method(value, disambiguator)
+      } else {
+        d.Term(parseValue())
+      }
+    } else if (currChar == '#') {
+      readChar()
+      d.Type(parseValue())
+    } else if (currChar == '/') {
+      readChar()
+      d.Package(parseValue())
+    } else if (currChar == ')') {
+      readChar()
+      val value = parseValue()
+      if (currChar != '(') fail()
+      else readChar()
+      d.Parameter(value)
+    } else if (currChar == ']') {
+      readChar()
+      val value = parseValue()
+      if (currChar != '[') fail()
+      else readChar()
+      d.TypeParameter(value)
+    } else {
+      fail()
+    }
+  }
+
+  def entryPoint(): (Descriptor, String) = {
+    readChar()
+    val desc = parseDescriptor()
+    (desc, s.substring(0, i + 1))
+  }
+}
+
+object DescriptorParser {
+  def apply(symbol: String): (Descriptor, String) = {
+    val parser = new DescriptorParser(symbol)
+    parser.entryPoint()
+  }
+}
+
+sealed trait Descriptor {
+  def isNone: Boolean = this == d.None
+  def isTerm: Boolean = this.isInstanceOf[d.Term]
+  def isMethod: Boolean = this.isInstanceOf[d.Method]
+  def isType: Boolean = this.isInstanceOf[d.Type]
+  def isPackage: Boolean = this.isInstanceOf[d.Package]
+  def isParameter: Boolean = this.isInstanceOf[d.Parameter]
+  def isTypeParameter: Boolean = this.isInstanceOf[d.TypeParameter]
+  def value: String
+}
+object Descriptor {
+  case object None extends Descriptor { def value: String = "" }
+  final case class Term(value: String) extends Descriptor
+  final case class Method(value: String, disambiguator: String) extends Descriptor
+  final case class Type(value: String) extends Descriptor
+  final case class Package(value: String) extends Descriptor
+  final case class Parameter(value: String) extends Descriptor
+  final case class TypeParameter(value: String) extends Descriptor
+}
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/Diagnostic.scala b/compiler/src/dotty/tools/dotc/semanticdb/Diagnostic.scala
new file mode 100644
index 000000000000..0478dfadb17b
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/semanticdb/Diagnostic.scala
@@ -0,0 +1,195 @@
+// Generated by https://github.com/tanishiking/semanticdb-for-scala3
+// Generated by the Scala Plugin for the Protocol Buffer Compiler.
+// Do not edit!
+//
+// Protofile syntax: PROTO3
+
+package dotty.tools.dotc.semanticdb
+import dotty.tools.dotc.semanticdb.internal._
+import scala.annotation.internal.sharable
+
+@SerialVersionUID(0L)
+final case class Diagnostic(
+    range: _root_.scala.Option[dotty.tools.dotc.semanticdb.Range] = _root_.scala.None,
+    severity: dotty.tools.dotc.semanticdb.Diagnostic.Severity = dotty.tools.dotc.semanticdb.Diagnostic.Severity.UNKNOWN_SEVERITY,
+    message: _root_.scala.Predef.String = ""
+    )  extends SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      if (range.isDefined) {
+        val __value = range.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      
+      {
+        val __value = severity.value
+        if (__value != 0) {
+          __size += SemanticdbOutputStream.computeEnumSize(2, __value)
+        }
+      };
+      
+      {
+        val __value = message
+        if (!__value.isEmpty) {
+          __size += SemanticdbOutputStream.computeStringSize(3, __value)
+        }
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      range.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(1, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      {
+        val __v = severity.value
+        if (__v != 0) {
+          _output__.writeEnum(2, __v)
+        }
+      };
+      {
+        val __v = message
+        if (!__v.isEmpty) {
+          _output__.writeString(3, __v)
+        }
+      };
+    }
+    def getRange: dotty.tools.dotc.semanticdb.Range = range.getOrElse(dotty.tools.dotc.semanticdb.Range.defaultInstance)
+    def clearRange: Diagnostic = copy(range = _root_.scala.None)
+    def withRange(__v: dotty.tools.dotc.semanticdb.Range): Diagnostic = copy(range = Option(__v))
+    def withSeverity(__v: dotty.tools.dotc.semanticdb.Diagnostic.Severity): Diagnostic = copy(severity = __v)
+    def withMessage(__v: _root_.scala.Predef.String): Diagnostic = copy(message = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Diagnostic])
+}
+
+object Diagnostic  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Diagnostic] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Diagnostic] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.Diagnostic = {
+    var __range: _root_.scala.Option[dotty.tools.dotc.semanticdb.Range] = _root_.scala.None
+    var __severity: dotty.tools.dotc.semanticdb.Diagnostic.Severity = dotty.tools.dotc.semanticdb.Diagnostic.Severity.UNKNOWN_SEVERITY
+    var __message: _root_.scala.Predef.String = ""
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __range = Option(__range.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.Range](_input__))(LiteParser.readMessage(_input__, _)))
+        case 16 =>
+          __severity = dotty.tools.dotc.semanticdb.Diagnostic.Severity.fromValue(_input__.readEnum())
+        case 26 =>
+          __message = _input__.readStringRequireUtf8()
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.Diagnostic(
+        range = __range,
+        severity = __severity,
+        message = __message
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.Diagnostic(
+    range = _root_.scala.None,
+    severity = dotty.tools.dotc.semanticdb.Diagnostic.Severity.UNKNOWN_SEVERITY,
+    message = ""
+  )
+  sealed abstract class Severity(val value: _root_.scala.Int)  extends SemanticdbGeneratedEnum  derives CanEqual {
+    type EnumType = Severity
+    def isUnknownSeverity: _root_.scala.Boolean = false
+    def isError: _root_.scala.Boolean = false
+    def isWarning: _root_.scala.Boolean = false
+    def isInformation: _root_.scala.Boolean = false
+    def isHint: _root_.scala.Boolean = false
+    
+    final def asRecognized: _root_.scala.Option[dotty.tools.dotc.semanticdb.Diagnostic.Severity.Recognized] = if (isUnrecognized) _root_.scala.None else _root_.scala.Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.Diagnostic.Severity.Recognized])
+  }
+  
+  object Severity  {
+    sealed trait Recognized extends Severity
+    
+    @SerialVersionUID(0L)
+    case object UNKNOWN_SEVERITY extends Severity(0) with Severity.Recognized {
+      val index = 0
+      val name = "UNKNOWN_SEVERITY"
+      override def isUnknownSeverity: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object ERROR extends Severity(1) with Severity.Recognized {
+      val index = 1
+      val name = "ERROR"
+      override def isError: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object WARNING extends Severity(2) with Severity.Recognized {
+      val index = 2
+      val name = "WARNING"
+      override def isWarning: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object INFORMATION extends Severity(3) with Severity.Recognized {
+      val index = 3
+      val name = "INFORMATION"
+      override def isInformation: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object HINT extends Severity(4) with Severity.Recognized {
+      val index = 4
+      val name = "HINT"
+      override def isHint: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    final case class Unrecognized(unrecognizedValue: _root_.scala.Int)  extends Severity(unrecognizedValue) with SemanticdbUnrecognizedEnum
+    
+    lazy val values = scala.collection.immutable.Seq(UNKNOWN_SEVERITY, ERROR, WARNING, INFORMATION, HINT)
+    def fromValue(__value: _root_.scala.Int): Severity = __value match {
+      case 0 => UNKNOWN_SEVERITY
+      case 1 => ERROR
+      case 2 => WARNING
+      case 3 => INFORMATION
+      case 4 => HINT
+      case __other => Unrecognized(__other)
+    }
+    
+    
+  }
+  final val RANGE_FIELD_NUMBER = 1
+  final val SEVERITY_FIELD_NUMBER = 2
+  final val MESSAGE_FIELD_NUMBER = 3
+  def of(
+    range: _root_.scala.Option[dotty.tools.dotc.semanticdb.Range],
+    severity: dotty.tools.dotc.semanticdb.Diagnostic.Severity,
+    message: _root_.scala.Predef.String
+  ): _root_.dotty.tools.dotc.semanticdb.Diagnostic = _root_.dotty.tools.dotc.semanticdb.Diagnostic(
+    range,
+    severity,
+    message
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Diagnostic])
+}
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/Documentation.scala b/compiler/src/dotty/tools/dotc/semanticdb/Documentation.scala
new file mode 100644
index 000000000000..c2c2fc470cbb
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/semanticdb/Documentation.scala
@@ -0,0 +1,173 @@
+// Generated by https://github.com/tanishiking/semanticdb-for-scala3
+// Generated by the Scala Plugin for the Protocol Buffer Compiler.
+// Do not edit!
+//
+// Protofile syntax: PROTO3
+
+package dotty.tools.dotc.semanticdb
+import dotty.tools.dotc.semanticdb.internal._
+import scala.annotation.internal.sharable
+
+@SerialVersionUID(0L)
+final case class Documentation(
+    message: _root_.scala.Predef.String = "",
+    format: dotty.tools.dotc.semanticdb.Documentation.Format = dotty.tools.dotc.semanticdb.Documentation.Format.HTML
+    )  extends SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      
+      {
+        val __value = message
+        if (!__value.isEmpty) {
+          __size += SemanticdbOutputStream.computeStringSize(1, __value)
+        }
+      };
+      
+      {
+        val __value = format.value
+        if (__value != 0) {
+          __size += SemanticdbOutputStream.computeEnumSize(2, __value)
+        }
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      {
+        val __v = message
+        if (!__v.isEmpty) {
+          _output__.writeString(1, __v)
+        }
+      };
+      {
+        val __v = format.value
+        if (__v != 0) {
+          _output__.writeEnum(2, __v)
+        }
+      };
+    }
+    def withMessage(__v: _root_.scala.Predef.String): Documentation = copy(message = __v)
+    def withFormat(__v: dotty.tools.dotc.semanticdb.Documentation.Format): Documentation = copy(format = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Documentation])
+}
+
+object Documentation  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Documentation] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Documentation] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.Documentation = {
+    var __message: _root_.scala.Predef.String = ""
+    var __format: dotty.tools.dotc.semanticdb.Documentation.Format = dotty.tools.dotc.semanticdb.Documentation.Format.HTML
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __message = _input__.readStringRequireUtf8()
+        case 16 =>
+          __format = dotty.tools.dotc.semanticdb.Documentation.Format.fromValue(_input__.readEnum())
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.Documentation(
+        message = __message,
+        format = __format
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.Documentation(
+    message = "",
+    format = dotty.tools.dotc.semanticdb.Documentation.Format.HTML
+  )
+  sealed abstract class Format(val value: _root_.scala.Int)  extends SemanticdbGeneratedEnum  derives CanEqual {
+    type EnumType = Format
+    def isHtml: _root_.scala.Boolean = false
+    def isMarkdown: _root_.scala.Boolean = false
+    def isJavadoc: _root_.scala.Boolean = false
+    def isScaladoc: _root_.scala.Boolean = false
+    def isKdoc: _root_.scala.Boolean = false
+    
+    final def asRecognized: _root_.scala.Option[dotty.tools.dotc.semanticdb.Documentation.Format.Recognized] = if (isUnrecognized) _root_.scala.None else _root_.scala.Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.Documentation.Format.Recognized])
+  }
+  
+  object Format  {
+    sealed trait Recognized extends Format
+    
+    @SerialVersionUID(0L)
+    case object HTML extends Format(0) with Format.Recognized {
+      val index = 0
+      val name = "HTML"
+      override def isHtml: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object MARKDOWN extends Format(1) with Format.Recognized {
+      val index = 1
+      val name = "MARKDOWN"
+      override def isMarkdown: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object JAVADOC extends Format(2) with Format.Recognized {
+      val index = 2
+      val name = "JAVADOC"
+      override def isJavadoc: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object SCALADOC extends Format(3) with Format.Recognized {
+      val index = 3
+      val name = "SCALADOC"
+      override def isScaladoc: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object KDOC extends Format(4) with Format.Recognized {
+      val index = 4
+      val name = "KDOC"
+      override def isKdoc: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    final case class Unrecognized(unrecognizedValue: _root_.scala.Int)  extends Format(unrecognizedValue) with SemanticdbUnrecognizedEnum
+    
+    lazy val values = scala.collection.immutable.Seq(HTML, MARKDOWN, JAVADOC, SCALADOC, KDOC)
+    def fromValue(__value: _root_.scala.Int): Format = __value match {
+      case 0 => HTML
+      case 1 => MARKDOWN
+      case 2 => JAVADOC
+      case 3 => SCALADOC
+      case 4 => KDOC
+      case __other => Unrecognized(__other)
+    }
+    
+    
+  }
+  final val MESSAGE_FIELD_NUMBER = 1
+  final val FORMAT_FIELD_NUMBER = 2
+  def of(
+    message: _root_.scala.Predef.String,
+    format: dotty.tools.dotc.semanticdb.Documentation.Format
+  ): _root_.dotty.tools.dotc.semanticdb.Documentation = _root_.dotty.tools.dotc.semanticdb.Documentation(
+    message,
+    format
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Documentation])
+}
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala b/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala
index 89cf6f3df7d5..e40607ef0d5a 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala
@@ -6,13 +6,14 @@ import core._
 import Phases._
 import ast.tpd._
 import ast.untpd.given
-import ast.Trees.mods
+import ast.Trees.{mods, WithEndMarker}
 import Contexts._
 import Symbols._
 import Flags._
 import Names.Name
 import StdNames.nme
 import NameOps._
+import Denotations.StaleSymbol
 import util.Spans.Span
 import util.{SourceFile, SourcePosition}
 import transform.SymUtils._
@@ -22,6 +23,7 @@ import scala.collection.mutable
 import scala.annotation.{ threadUnsafe => tu, tailrec }
 import scala.PartialFunction.condOpt
 
+import dotty.tools.dotc.{semanticdb => s}
 
 /** Extract symbol references and uses to semanticdb files.
  *  See https://scalameta.org/docs/semanticdb/specification.html#symbol-1
@@ -34,6 +36,8 @@ class ExtractSemanticDB extends Phase:
 
   override val phaseName: String = ExtractSemanticDB.name
 
+  override val description: String = ExtractSemanticDB.description
+
   override def isRunnable(using Context) =
     super.isRunnable && ctx.settings.Xsemanticdb.value
 
@@ -42,43 +46,46 @@ class ExtractSemanticDB extends Phase:
 
   override def run(using Context): Unit =
     val unit = ctx.compilationUnit
-    val extract = Extractor()
-    extract.traverse(unit.tpdTree)
-    ExtractSemanticDB.write(unit.source, extract.occurrences.toList, extract.symbolInfos.toList)
+    val extractor = Extractor()
+    extractor.extract(unit.tpdTree)
+    ExtractSemanticDB.write(unit.source, extractor.occurrences.toList, extractor.symbolInfos.toList, extractor.synthetics.toList)
 
   /** Extractor of symbol occurrences from trees */
   class Extractor extends TreeTraverser:
-
-    private var nextLocalIdx: Int = 0
-
-    /** The index of a local symbol */
-    private val locals = mutable.HashMap[Symbol, Int]()
+    given s.SemanticSymbolBuilder = s.SemanticSymbolBuilder()
+    val synth = SyntheticsExtractor()
+    given converter: s.TypeOps = s.TypeOps()
 
     /** The bodies of synthetic locals */
     private val localBodies = mutable.HashMap[Symbol, Tree]()
 
-    /** The local symbol(s) starting at given offset */
-    private val symsAtOffset = new mutable.HashMap[Int, Set[Symbol]]():
-      override def default(key: Int) = Set[Symbol]()
-
     /** The extracted symbol occurrences */
     val occurrences = new mutable.ListBuffer[SymbolOccurrence]()
 
     /** The extracted symbol infos */
     val symbolInfos = new mutable.ListBuffer[SymbolInformation]()
 
+    val synthetics = new mutable.ListBuffer[s.Synthetic]()
+
     /** A cache of localN names */
     val localNames = new mutable.HashSet[String]()
 
     /** The symbol occurrences generated so far, as a set */
     private val generated = new mutable.HashSet[SymbolOccurrence]
 
+    def extract(tree: Tree)(using Context): Unit =
+      traverse(tree)
+      val fakeSyms = converter.fakeSymbols.map(_.symbolInfo(Set.empty)(using LinkMode.SymlinkChildren, converter))
+      symbolInfos.appendAll(fakeSyms)
+
     /** Definitions of this symbol should be excluded from semanticdb */
     private def excludeDef(sym: Symbol)(using Context): Boolean =
       !sym.exists
       || sym.isLocalDummy
       || sym.is(Synthetic)
       || sym.isSetter
+      || sym.isOldStyleImplicitConversion(forImplicitClassOnly = true)
+      || sym.owner.isGivenInstanceSummoner
       || excludeDefOrUse(sym)
 
     private def excludeDefOrUse(sym: Symbol)(using Context): Boolean =
@@ -102,6 +109,7 @@ class ExtractSemanticDB extends Phase:
     private def excludeChildren(sym: Symbol)(using Context): Boolean =
       !sym.exists
       || sym.is(Param) && sym.info.bounds.hi.isInstanceOf[Types.HKTypeLambda]
+      || sym.isOldStyleImplicitConversion(forImplicitClassOnly = true)
 
     /** Uses of this symbol where the reference has given span should be excluded from semanticdb */
     private def excludeUse(qualifier: Option[Symbol], sym: Symbol)(using Context): Boolean =
@@ -130,62 +138,60 @@ class ExtractSemanticDB extends Phase:
 
       tree match
         case tree: PackageDef =>
-          if !excludeDef(tree.pid.symbol)
-          && tree.pid.span.hasLength then
-            tree.pid match
-            case tree: Select =>
-              registerDefinition(tree.symbol, selectSpan(tree), Set.empty, tree.source)
-              traverse(tree.qualifier)
-            case tree => registerDefinition(tree.symbol, tree.span, Set.empty, tree.source)
           tree.stats.foreach(traverse)
+          if !excludeDef(tree.pid.symbol) && tree.pid.span.hasLength then
+            tree.pid match
+              case tree: Select =>
+                traverse(tree.qualifier)
+                registerDefinition(tree.symbol, selectSpan(tree), Set.empty, tree.source)
+              case tree => registerDefinition(tree.symbol, tree.span, Set.empty, tree.source)
         case tree: NamedDefTree =>
-          if tree.symbol.isAllOf(ModuleValCreationFlags) then
-            return
-          if !excludeDef(tree.symbol)
-          && tree.span.hasLength then
-            registerDefinition(tree.symbol, tree.nameSpan, symbolKinds(tree), tree.source)
-            val privateWithin = tree.symbol.privateWithin
-            if privateWithin.exists then
-              registerUseGuarded(None, privateWithin, spanOfSymbol(privateWithin, tree.span, tree.source), tree.source)
-          else if !excludeSymbol(tree.symbol) then
-            registerSymbol(tree.symbol, symbolName(tree.symbol), symbolKinds(tree))
-          tree match
-          case tree: ValDef
-          if tree.symbol.isAllOf(EnumValue) =>
-            tree.rhs match
-            case Block(TypeDef(_, template: Template) :: _, _) => // simple case with specialised extends clause
-              template.parents.filter(!_.span.isZeroExtent).foreach(traverse)
-            case _ => // calls $new
-          case tree: ValDef
-          if tree.symbol.isSelfSym =>
-            if tree.tpt.span.hasLength then
-              traverse(tree.tpt)
-          case tree: DefDef
-          if tree.symbol.isConstructor => // ignore typeparams for secondary ctors
-            tree.trailingParamss.foreach(_.foreach(traverse))
-            traverse(tree.rhs)
-          case tree: (DefDef | ValDef)
-          if tree.symbol.isSyntheticWithIdent =>
-            tree match
-              case tree: DefDef =>
-                tree.paramss.foreach(_.foreach(param => registerSymbolSimple(param.symbol)))
-              case tree: ValDef if tree.symbol.is(Given) => traverse(tree.tpt)
-              case _ =>
-            if !tree.symbol.isGlobal then
-              localBodies(tree.symbol) = tree.rhs
-            // ignore rhs
-          case PatternValDef(pat, rhs) =>
-            traverse(rhs)
-            PatternValDef.collectPats(pat).foreach(traverse)
-          case tree =>
-            if !excludeChildren(tree.symbol) then
-              traverseChildren(tree)
+          if !tree.symbol.isAllOf(ModuleValCreationFlags) then
+            tree match {
+              case tree: ValDef if tree.symbol.isAllOf(EnumValue) =>
+                tree.rhs match
+                case Block(TypeDef(_, template: Template) :: _, _) => // simple case with specialised extends clause
+                  template.parents.filter(!_.span.isZeroExtent).foreach(traverse)
+                case _ => // calls $new
+              case tree: ValDef if tree.symbol.isSelfSym =>
+                if tree.tpt.span.hasLength then
+                  traverse(tree.tpt)
+              case tree: DefDef if tree.symbol.isConstructor => // ignore typeparams for secondary ctors
+                tree.trailingParamss.foreach(_.foreach(traverse))
+                traverse(tree.rhs)
+              case tree: (DefDef | ValDef) if tree.symbol.isSyntheticWithIdent =>
+                tree match
+                  case tree: DefDef =>
+                    tree.paramss.foreach(_.foreach(param => registerSymbolSimple(param.symbol)))
+                  case tree: ValDef if tree.symbol.is(Given) =>
+                    traverse(tree.tpt)
+                  case _ =>
+                if !tree.symbol.isGlobal then
+                  localBodies(tree.symbol) = tree.rhs
+                // ignore rhs
+
+              case PatternValDef(pat, rhs) =>
+                traverse(rhs)
+                PatternValDef.collectPats(pat).foreach(traverse)
+              case tree: TypeDef =>
+                traverseChildren(tree)
+              case tree =>
+                if !excludeChildren(tree.symbol) then
+                  traverseChildren(tree)
+            }
+            if !excludeDef(tree.symbol) && tree.span.hasLength then
+              registerDefinition(tree.symbol, tree.nameSpan, symbolKinds(tree), tree.source)
+              val privateWithin = tree.symbol.privateWithin
+              if privateWithin.exists then
+                registerUseGuarded(None, privateWithin, spanOfSymbol(privateWithin, tree.span, tree.source), tree.source)
+            else if !excludeSymbol(tree.symbol) then
+              registerSymbol(tree.symbol, symbolKinds(tree))
+        case tree: Template if tree.symbol.owner.is(Invisible) =>
+          // do nothing
+          // exclude the symbols and synthetics generated by @main annotation
+          // (main class generated by @main has `Invisible` flag, see `MainProxies.scala`).
         case tree: Template =>
           val ctorSym = tree.constr.symbol
-          if !excludeDef(ctorSym) then
-            traverseAnnotsOfDefinition(ctorSym)
-            registerDefinition(ctorSym, tree.constr.nameSpan.startPos, Set.empty, tree.source)
-            ctorParams(tree.constr.termParamss, tree.body)
           for parent <- tree.parentsOrDerived if parent.span.hasLength do
             traverse(parent)
           val selfSpan = tree.self.span
@@ -195,14 +201,19 @@ class ExtractSemanticDB extends Phase:
             tree.body.foreachUntilImport(traverse).foreach(traverse) // the first import statement
           else
             tree.body.foreach(traverse)
+          if !excludeDef(ctorSym) then
+            traverseAnnotsOfDefinition(ctorSym)
+            ctorParams(tree.constr.termParamss, tree.constr.leadingTypeParams, tree.body)
+            registerDefinition(ctorSym, tree.constr.nameSpan.startPos, Set.empty, tree.source)
         case tree: Apply =>
-          @tu lazy val genParamSymbol: Name => String = funParamSymbol(tree.fun.symbol)
+          @tu lazy val genParamSymbol: Name => String = tree.fun.symbol.funParamSymbol
           traverse(tree.fun)
+          synth.tryFindSynthetic(tree).foreach(synthetics.addOne)
           for arg <- tree.args do
             arg match
               case tree @ NamedArg(name, arg) =>
-                registerUse(genParamSymbol(name), tree.span.startPos.withEnd(tree.span.start + name.toString.length), tree.source)
                 traverse(localBodies.get(arg.symbol).getOrElse(arg))
+                registerUse(genParamSymbol(name), tree.span.startPos.withEnd(tree.span.start + name.toString.length), tree.source)
               case _ => traverse(arg)
         case tree: Assign =>
           val qualSym = condOpt(tree.lhs) { case Select(qual, _) if qual.symbol.exists => qual.symbol }
@@ -222,33 +233,52 @@ class ExtractSemanticDB extends Phase:
           val qual = tree.qualifier
           val qualSpan = qual.span
           val sym = tree.symbol.adjustIfCtorTyparam
-          registerUseGuarded(qual.symbol.ifExists, sym, selectSpan(tree), tree.source)
           if qualSpan.exists && qualSpan.hasLength then
             traverse(qual)
+          registerUseGuarded(qual.symbol.ifExists, sym, selectSpan(tree), tree.source)
         case tree: Import =>
           if tree.span.exists && tree.span.hasLength then
+            traverseChildren(tree)
             for sel <- tree.selectors do
               val imported = sel.imported.name
               if imported != nme.WILDCARD then
                 for alt <- tree.expr.tpe.member(imported).alternatives do
                   registerUseGuarded(None, alt.symbol, sel.imported.span, tree.source)
-                  if (alt.symbol.companionClass.exists)
-                    registerUseGuarded(None, alt.symbol.companionClass, sel.imported.span, tree.source)
-            traverseChildren(tree)
+                  try
+                    if (alt.symbol.companionClass.exists)
+                      registerUseGuarded(None, alt.symbol.companionClass, sel.imported.span, tree.source)
+                  catch case ex: StaleSymbol =>
+                    // can happen for constructor proxies. Test case is pos-macros/i13532.
+                    ()
+
         case tree: Inlined =>
           traverse(tree.call)
+
+        case tree: TypeApply =>
+          synth.tryFindSynthetic(tree).foreach(synthetics.addOne)
+          traverseChildren(tree)
+
+        case tree: TypeTree =>
+          tree.typeOpt match
+            // Any types could be appear inside of `TypeTree`, but
+            // types that precent in source other than TypeRef are traversable and contain Ident tree nodes
+            // (e.g. TypeBoundsTree, AppliedTypeTree)
+            case Types.TypeRef(_, sym: Symbol) if namePresentInSource(sym, tree.span, tree.source) =>
+              registerUseGuarded(None, sym, tree.span, tree.source)
+            case _ => ()
+
+
         case _ =>
           traverseChildren(tree)
 
-    end traverse
+      tree match
+        case tree: WithEndMarker[t] =>
+          val endSpan = tree.endSpan
+          if endSpan.exists then
+            registerUseGuarded(None, tree.symbol, endSpan, tree.source)
+        case _ =>
 
-    private def funParamSymbol(funSym: Symbol)(using Context): Name => String =
-      if funSym.isGlobal then
-        val funSymbol = symbolName(funSym)
-        name => s"$funSymbol($name)"
-      else
-        name => locals.keys.find(local => local.isTerm && local.owner == funSym && local.name == name)
-                      .fold("")(Symbols.LocalPrefix + _)
+    end traverse
 
     private object PatternValDef:
 
@@ -283,217 +313,46 @@ class ExtractSemanticDB extends Phase:
 
     end PatternValDef
 
-    /** Add semanticdb name of the given symbol to string builder */
-    private def addSymName(b: StringBuilder, sym: Symbol)(using Context): Unit =
-
-      def addName(name: Name) =
-        val str = name.toString.unescapeUnicode
-        if str.isJavaIdent then b append str
-        else b append '`' append str append '`'
 
-      def addOwner(owner: Symbol): Unit =
-        if !owner.isRoot then addSymName(b, owner)
 
-      def addOverloadIdx(sym: Symbol): Unit =
-        val decls =
-          val decls0 = sym.owner.info.decls.lookupAll(sym.name)
-          if sym.owner.isAllOf(JavaModule) then
-            decls0 ++ sym.owner.companionClass.info.decls.lookupAll(sym.name)
-          else
-            decls0
-        end decls
-        val alts = decls.filter(_.isOneOf(Method | Mutable)).toList.reverse
-        def find(filter: Symbol => Boolean) = alts match
-          case notSym :: rest if !filter(notSym) =>
-            val idx = rest.indexWhere(filter).ensuring(_ >= 0)
-            b.append('+').append(idx + 1)
-          case _ =>
-        end find
-        val sig = sym.signature
-        find(_.signature == sig)
-
-      def addDescriptor(sym: Symbol): Unit =
-        if sym.is(ModuleClass) then
-          addDescriptor(sym.sourceModule)
-        else if sym.is(TypeParam) then
-          b.append('['); addName(sym.name); b.append(']')
-        else if sym.is(Param) then
-          b.append('('); addName(sym.name); b.append(')')
-        else if sym.isRoot then
-          b.append(Symbols.RootPackage)
-        else if sym.isEmptyPackage then
-          b.append(Symbols.EmptyPackage)
-        else if (sym.isScala2PackageObject) then
-          b.append(Symbols.PackageObjectDescriptor)
-        else
-          addName(sym.name)
-          if sym.is(Package) then b.append('/')
-          else if sym.isType || sym.isAllOf(JavaModule) then b.append('#')
-          else if sym.isOneOf(Method | Mutable)
-          && (!sym.is(StableRealizable) || sym.isConstructor) then
-            b.append('('); addOverloadIdx(sym); b.append(").")
-          else b.append('.')
-
-      /** The index of local symbol `sym`. Symbols with the same name and
-       *  the same starting position have the same index.
-       */
-      def localIdx(sym: Symbol)(using Context): Int =
-        val startPos =
-          assert(sym.span.exists, s"$sym should have a span")
-          sym.span.start
-        @tailrec
-        def computeLocalIdx(sym: Symbol): Int = locals get sym match
-          case Some(idx) => idx
-          case None      => symsAtOffset(startPos).find(_.name == sym.name) match
-            case Some(other) => computeLocalIdx(other)
-            case None =>
-              val idx = nextLocalIdx
-              nextLocalIdx += 1
-              locals(sym) = idx
-              symsAtOffset(startPos) += sym
-              idx
-        end computeLocalIdx
-        computeLocalIdx(sym)
-      end localIdx
-
-      if sym.exists then
-        if sym.isGlobal then
-          addOwner(sym.owner); addDescriptor(sym)
-        else
-          b.append(Symbols.LocalPrefix).append(localIdx(sym))
-
-    end addSymName
-
-    /** The semanticdb name of the given symbol */
-    private def symbolName(sym: Symbol)(using Context): String =
-      val b = StringBuilder(20)
-      addSymName(b, sym)
-      b.toString
-
-    private def range(span: Span, treeSource: SourceFile)(using Context): Option[Range] =
-      def lineCol(offset: Int) = (treeSource.offsetToLine(offset), treeSource.column(offset))
-      val (startLine, startCol) = lineCol(span.start)
-      val (endLine, endCol) = lineCol(span.end)
-      Some(Range(startLine, startCol, endLine, endCol))
-
-    private def symbolKind(sym: Symbol, symkinds: Set[SymbolKind])(using Context): SymbolInformation.Kind =
-      if sym.isTypeParam then
-        SymbolInformation.Kind.TYPE_PARAMETER
-      else if sym.is(TermParam) then
-        SymbolInformation.Kind.PARAMETER
-      else if sym.isTerm && sym.owner.isTerm then
-        SymbolInformation.Kind.LOCAL
-      else if sym.isInlineMethod || sym.is(Macro) then
-        SymbolInformation.Kind.MACRO
-      else if sym.isConstructor then
-        SymbolInformation.Kind.CONSTRUCTOR
-      else if sym.isSelfSym then
-        SymbolInformation.Kind.SELF_PARAMETER
-      else if sym.isOneOf(Method) || symkinds.exists(_.isVarOrVal) then
-        SymbolInformation.Kind.METHOD
-      else if sym.isPackageObject then
-        SymbolInformation.Kind.PACKAGE_OBJECT
-      else if sym.is(Module) then
-        SymbolInformation.Kind.OBJECT
-      else if sym.is(Package) then
-        SymbolInformation.Kind.PACKAGE
-      else if sym.isAllOf(JavaInterface) then
-        SymbolInformation.Kind.INTERFACE
-      else if sym.is(Trait) then
-        SymbolInformation.Kind.TRAIT
-      else if sym.isClass then
-        SymbolInformation.Kind.CLASS
-      else if sym.isType then
-        SymbolInformation.Kind.TYPE
-      else if sym.is(ParamAccessor) then
-        SymbolInformation.Kind.FIELD
-      else
-        SymbolInformation.Kind.UNKNOWN_KIND
-
-    private def symbolProps(sym: Symbol, symkinds: Set[SymbolKind])(using Context): Int =
-      if sym.is(ModuleClass) then
-        return symbolProps(sym.sourceModule, symkinds)
-      var props = 0
-      if sym.isPrimaryConstructor then
-        props |= SymbolInformation.Property.PRIMARY.value
-      if sym.is(Abstract) || symkinds.contains(SymbolKind.Abstract) then
-        props |= SymbolInformation.Property.ABSTRACT.value
-      if sym.is(Final) then
-        props |= SymbolInformation.Property.FINAL.value
-      if sym.is(Sealed) then
-        props |= SymbolInformation.Property.SEALED.value
-      if sym.isOneOf(GivenOrImplicit) then
-        props |= SymbolInformation.Property.IMPLICIT.value
-      if sym.is(Lazy, butNot=Module) then
-        props |= SymbolInformation.Property.LAZY.value
-      if sym.isAllOf(Case | Module) || sym.is(CaseClass) || sym.isAllOf(EnumCase) then
-        props |= SymbolInformation.Property.CASE.value
-      if sym.is(Covariant) then
-        props |= SymbolInformation.Property.COVARIANT.value
-      if sym.is(Contravariant) then
-        props |= SymbolInformation.Property.CONTRAVARIANT.value
-      if sym.isAllOf(DefaultMethod | JavaDefined) || sym.is(Accessor) && sym.name.is(NameKinds.DefaultGetterName) then
-        props |= SymbolInformation.Property.DEFAULT.value
-      if symkinds.exists(_.isVal) then
-        props |= SymbolInformation.Property.VAL.value
-      if symkinds.exists(_.isVar) then
-        props |= SymbolInformation.Property.VAR.value
-      if sym.is(JavaStatic) then
-        props |= SymbolInformation.Property.STATIC.value
-      if sym.is(Enum) then
-        props |= SymbolInformation.Property.ENUM.value
-      props
-
-    private def symbolInfo(sym: Symbol, symbolName: String, symkinds: Set[SymbolKind])(using Context): SymbolInformation =
-      SymbolInformation(
-        symbol = symbolName,
-        language = Language.SCALA,
-        kind = symbolKind(sym, symkinds),
-        properties = symbolProps(sym, symkinds),
-        displayName = Symbols.displaySymbol(sym)
-      )
-
-    private def registerSymbol(sym: Symbol, symbolName: String, symkinds: Set[SymbolKind])(using Context): Unit =
-      val isLocal = symbolName.isLocal
-      if !isLocal || !localNames.contains(symbolName) then
+    private def registerSymbol(sym: Symbol, symkinds: Set[SymbolKind])(using Context): Unit =
+      val sname = sym.symbolName
+      val isLocal = sname.isLocal
+      if !isLocal || !localNames.contains(sname) then
         if isLocal then
-          localNames += symbolName
-        symbolInfos += symbolInfo(sym, symbolName, symkinds)
+          localNames += sname
+        symbolInfos += sym.symbolInfo(symkinds)(using LinkMode.SymlinkChildren, converter)
 
     private def registerSymbolSimple(sym: Symbol)(using Context): Unit =
-      registerSymbol(sym, symbolName(sym), Set.empty)
+      registerSymbol(sym, Set.empty)
 
     private def registerOccurrence(symbol: String, span: Span, role: SymbolOccurrence.Role, treeSource: SourceFile)(using Context): Unit =
-      val occ = SymbolOccurrence(symbol, range(span, treeSource), role)
+      val occ = SymbolOccurrence(range(span, treeSource), symbol, role)
       if !generated.contains(occ) && occ.symbol.nonEmpty then
         occurrences += occ
         generated += occ
 
     private def registerUseGuarded(qualSym: Option[Symbol], sym: Symbol, span: Span, treeSource: SourceFile)(using Context) =
-      if !excludeUse(qualSym, sym) then
+      if !excludeUse(qualSym, sym) && !span.isZeroExtent then
         registerUse(sym, span, treeSource)
 
     private def registerUse(sym: Symbol, span: Span, treeSource: SourceFile)(using Context): Unit =
-      registerUse(symbolName(sym), span, treeSource)
+      registerUse(sym.symbolName, span, treeSource)
 
     private def registerUse(symbol: String, span: Span, treeSource: SourceFile)(using Context): Unit =
       registerOccurrence(symbol, span, SymbolOccurrence.Role.REFERENCE, treeSource)
 
     private def registerDefinition(sym: Symbol, span: Span, symkinds: Set[SymbolKind], treeSource: SourceFile)(using Context) =
-      val symbol = symbolName(sym)
+      val sname = sym.symbolName
       val finalSpan = if !span.hasLength || !sym.is(Given) || namePresentInSource(sym, span, treeSource) then
         span
       else
         Span(span.start)
 
-      registerOccurrence(symbol, finalSpan, SymbolOccurrence.Role.DEFINITION, treeSource)
+      if namePresentInSource(sym, span, treeSource) then
+        registerOccurrence(sname, finalSpan, SymbolOccurrence.Role.DEFINITION, treeSource)
       if !sym.is(Package) then
-        registerSymbol(sym, symbol, symkinds)
-
-    private def namePresentInSource(sym: Symbol, span: Span, source:SourceFile)(using Context): Boolean =
-      val content = source.content()
-      val (start, end) = if content(span.end - 1) == '`' then (span.start + 1, span.end - 1) else (span.start, span.end)
-      content.slice(start, end).mkString == sym.name.stripModuleClassSuffix.lastPart.toString
+        registerSymbol(sym, symkinds)
 
     private def spanOfSymbol(sym: Symbol, span: Span, treeSource: SourceFile)(using Context): Span =
       val contents = if treeSource.exists then treeSource.content() else Array.empty[Char]
@@ -580,19 +439,22 @@ class ExtractSemanticDB extends Phase:
         symkinds.toSet
 
     private def ctorParams(
-      vparamss: List[List[ValDef]], body: List[Tree])(using Context): Unit =
+      vparamss: List[List[ValDef]], tparams: List[TypeDef], body: List[Tree])(using Context): Unit =
       @tu lazy val getters = findGetters(vparamss.flatMap(_.map(_.name)).toSet, body)
       for
         vparams <- vparamss
         vparam  <- vparams
       do
+        traverse(vparam.tpt)
         if !excludeSymbol(vparam.symbol) then
           traverseAnnotsOfDefinition(vparam.symbol)
           val symkinds =
             getters.get(vparam.name).fold(SymbolKind.emptySet)(getter =>
               if getter.mods.is(Mutable) then SymbolKind.VarSet else SymbolKind.ValSet)
-          registerSymbol(vparam.symbol, symbolName(vparam.symbol), symkinds)
+          registerSymbol(vparam.symbol, symkinds)
         traverse(vparam.tpt)
+      tparams.foreach(tp => traverse(tp.rhs))
+
 
 object ExtractSemanticDB:
   import java.nio.file.Path
@@ -601,8 +463,14 @@ object ExtractSemanticDB:
   import java.nio.file.Paths
 
   val name: String = "extractSemanticDB"
-
-  def write(source: SourceFile, occurrences: List[SymbolOccurrence], symbolInfos: List[SymbolInformation])(using Context): Unit =
+  val description: String = "extract info into .semanticdb files"
+
+  def write(
+    source: SourceFile,
+    occurrences: List[SymbolOccurrence],
+    symbolInfos: List[SymbolInformation],
+    synthetics: List[Synthetic],
+  )(using Context): Unit =
     def absolutePath(path: Path): Path = path.toAbsolutePath.normalize
     val semanticdbTarget =
       val semanticdbTargetSetting = ctx.settings.semanticdbTarget.value
@@ -624,7 +492,8 @@ object ExtractSemanticDB:
       text = "",
       md5 = internal.MD5.compute(String(source.content)),
       symbols = symbolInfos,
-      occurrences = occurrences
+      occurrences = occurrences,
+      synthetics = synthetics,
     )
     val docs = TextDocuments(List(doc))
     val out = Files.newOutputStream(outpath)
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/Language.scala b/compiler/src/dotty/tools/dotc/semanticdb/Language.scala
index cde313b6ec6e..7007f29816db 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/Language.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/Language.scala
@@ -1,21 +1,56 @@
-package dotty.tools.dotc.semanticdb
+// Generated by https://github.com/tanishiking/semanticdb-for-scala3
+// Generated by the Scala Plugin for the Protocol Buffer Compiler.
+// Do not edit!
+//
+// Protofile syntax: PROTO3
 
+package dotty.tools.dotc.semanticdb
 import dotty.tools.dotc.semanticdb.internal._
+import scala.annotation.internal.sharable
 
-sealed trait Language(val value: Int) extends SemanticdbEnum derives CanEqual
-
-object Language {
-
-  case object UNKNOWN_LANGUAGE extends Language(0)
-  case object SCALA extends Language(1)
-  case object JAVA extends Language(2)
-  final case class Unrecognized(id: Int) extends Language(id)
+sealed abstract class Language(val value: _root_.scala.Int)  extends SemanticdbGeneratedEnum  derives CanEqual {
+  type EnumType = Language
+  def isUnknownLanguage: _root_.scala.Boolean = false
+  def isScala: _root_.scala.Boolean = false
+  def isJava: _root_.scala.Boolean = false
+  
+  final def asRecognized: _root_.scala.Option[dotty.tools.dotc.semanticdb.Language.Recognized] = if (isUnrecognized) _root_.scala.None else _root_.scala.Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.Language.Recognized])
+}
 
-  def fromValue(value: Int): Language = value match {
+object Language  {
+  sealed trait Recognized extends Language
+  
+  @SerialVersionUID(0L)
+  case object UNKNOWN_LANGUAGE extends Language(0) with Language.Recognized {
+    val index = 0
+    val name = "UNKNOWN_LANGUAGE"
+    override def isUnknownLanguage: _root_.scala.Boolean = true
+  }
+  
+  @SerialVersionUID(0L)
+  case object SCALA extends Language(1) with Language.Recognized {
+    val index = 1
+    val name = "SCALA"
+    override def isScala: _root_.scala.Boolean = true
+  }
+  
+  @SerialVersionUID(0L)
+  case object JAVA extends Language(2) with Language.Recognized {
+    val index = 2
+    val name = "JAVA"
+    override def isJava: _root_.scala.Boolean = true
+  }
+  
+  @SerialVersionUID(0L)
+  final case class Unrecognized(unrecognizedValue: _root_.scala.Int)  extends Language(unrecognizedValue) with SemanticdbUnrecognizedEnum
+  
+  lazy val values = scala.collection.immutable.Seq(UNKNOWN_LANGUAGE, SCALA, JAVA)
+  def fromValue(__value: _root_.scala.Int): Language = __value match {
     case 0 => UNKNOWN_LANGUAGE
     case 1 => SCALA
     case 2 => JAVA
-    case id => Unrecognized(id)
+    case __other => Unrecognized(__other)
   }
-
-}
+  
+  
+}
\ No newline at end of file
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/LinkMode.scala b/compiler/src/dotty/tools/dotc/semanticdb/LinkMode.scala
new file mode 100644
index 000000000000..aac4c1254892
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/semanticdb/LinkMode.scala
@@ -0,0 +1,4 @@
+package dotty.tools.dotc.semanticdb
+
+enum LinkMode:
+  case SymlinkChildren, HardlinkChildren
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/Location.scala b/compiler/src/dotty/tools/dotc/semanticdb/Location.scala
new file mode 100644
index 000000000000..b58fb8c8e395
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/semanticdb/Location.scala
@@ -0,0 +1,108 @@
+// Generated by https://github.com/tanishiking/semanticdb-for-scala3
+// Generated by the Scala Plugin for the Protocol Buffer Compiler.
+// Do not edit!
+//
+// Protofile syntax: PROTO3
+
+package dotty.tools.dotc.semanticdb
+import dotty.tools.dotc.semanticdb.internal._
+import scala.annotation.internal.sharable
+
+@SerialVersionUID(0L)
+final case class Location(
+    uri: _root_.scala.Predef.String = "",
+    range: _root_.scala.Option[dotty.tools.dotc.semanticdb.Range] = _root_.scala.None
+    )  extends SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      
+      {
+        val __value = uri
+        if (!__value.isEmpty) {
+          __size += SemanticdbOutputStream.computeStringSize(1, __value)
+        }
+      };
+      if (range.isDefined) {
+        val __value = range.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      {
+        val __v = uri
+        if (!__v.isEmpty) {
+          _output__.writeString(1, __v)
+        }
+      };
+      range.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(2, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+    }
+    def withUri(__v: _root_.scala.Predef.String): Location = copy(uri = __v)
+    def getRange: dotty.tools.dotc.semanticdb.Range = range.getOrElse(dotty.tools.dotc.semanticdb.Range.defaultInstance)
+    def clearRange: Location = copy(range = _root_.scala.None)
+    def withRange(__v: dotty.tools.dotc.semanticdb.Range): Location = copy(range = Option(__v))
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Location])
+}
+
+object Location  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Location] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Location] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.Location = {
+    var __uri: _root_.scala.Predef.String = ""
+    var __range: _root_.scala.Option[dotty.tools.dotc.semanticdb.Range] = _root_.scala.None
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __uri = _input__.readStringRequireUtf8()
+        case 18 =>
+          __range = Option(__range.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.Range](_input__))(LiteParser.readMessage(_input__, _)))
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.Location(
+        uri = __uri,
+        range = __range
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.Location(
+    uri = "",
+    range = _root_.scala.None
+  )
+  final val URI_FIELD_NUMBER = 1
+  final val RANGE_FIELD_NUMBER = 2
+  def of(
+    uri: _root_.scala.Predef.String,
+    range: _root_.scala.Option[dotty.tools.dotc.semanticdb.Range]
+  ): _root_.dotty.tools.dotc.semanticdb.Location = _root_.dotty.tools.dotc.semanticdb.Location(
+    uri,
+    range
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Location])
+}
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/PPrint.scala b/compiler/src/dotty/tools/dotc/semanticdb/PPrint.scala
new file mode 100644
index 000000000000..4361db3e50d6
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/semanticdb/PPrint.scala
@@ -0,0 +1,392 @@
+package dotty.tools.dotc.semanticdb
+
+import dotty.tools.dotc.{semanticdb => s}
+
+import scala.collection.mutable
+import dotty.tools.dotc.semanticdb.Scala3.{_, given}
+import SymbolInformation.Kind._
+import dotty.tools.dotc.util.SourceFile
+class SymbolInformationPrinter (symtab: PrinterSymtab):
+  val notes = InfoNotes()
+  val infoPrinter = InfoPrinter(notes)
+
+  def pprintSymbolInformation(info: SymbolInformation): String =
+    val sb = new StringBuilder()
+    sb.append(info.symbol).append(" => ")
+    sb.append(infoPrinter.pprint(info))
+    sb.toString
+
+  class InfoNotes:
+    private val noteSymtab = mutable.Map[String, SymbolInformation]()
+    def enter(info: SymbolInformation) =
+      if (symtab.info(info.symbol).isEmpty && info.kind != UNKNOWN_KIND)
+        noteSymtab(info.symbol) = info
+
+    def visit(sym: String): SymbolInformation =
+      val symtabInfo = noteSymtab.get(sym).orElse(symtab.info(sym))
+      symtabInfo.getOrElse {
+        val displayName = if sym.isGlobal then sym.desc.value else sym
+        SymbolInformation(symbol = sym, displayName = displayName)
+      }
+  end InfoNotes
+
+  class InfoPrinter(notes: InfoNotes):
+    private enum SymbolStyle:
+      case Reference, Definition
+    def pprint(info: SymbolInformation): String =
+      val sb = new StringBuilder()
+      val annotStr = info.annotations.map(pprint).mkString(" ")
+      if annotStr.nonEmpty then
+        sb.append(annotStr + " ")
+      sb.append(accessString(info.access))
+      if info.isAbstract then sb.append("abstract ")
+      if info.isFinal then sb.append("final ")
+      if info.isSealed then sb.append("sealed ")
+      if info.isImplicit then sb.append("implicit ")
+      if info.isLazy then sb.append("lazy ")
+      if info.isCase then sb.append("case ")
+      if info.isCovariant then sb.append("covariant ")
+      if info.isContravariant then sb.append("contravariant ")
+      if info.isVal then sb.append("val ")
+      if info.isVar then sb.append("var ")
+      if info.isStatic then sb.append("static ")
+      if info.isPrimary then sb.append("primary ")
+      if info.isEnum then sb.append("enum ")
+      if info.isDefault then sb.append("default ")
+      if info.isGiven then sb.append("given ")
+      if info.isInline then sb.append("inline ")
+      if info.isOpen then sb.append("open ")
+      if info.isTransparent then sb.append("transparent ")
+      if info.isInfix then sb.append("infix ")
+      if info.isOpaque then sb.append("opaque ")
+      info.kind match
+        case LOCAL => sb.append("local ")
+        case FIELD => sb.append("field ")
+        case METHOD => sb.append("method ")
+        case CONSTRUCTOR => sb.append("ctor ")
+        case MACRO => sb.append("macro ")
+        case TYPE => sb.append("type ")
+        case PARAMETER => sb.append("param ")
+        case SELF_PARAMETER => sb.append("selfparam ")
+        case TYPE_PARAMETER => sb.append("typeparam ")
+        case OBJECT => sb.append("object ")
+        case PACKAGE => sb.append("package ")
+        case PACKAGE_OBJECT => sb.append("package object ")
+        case CLASS => sb.append("class ")
+        case TRAIT => sb.append("trait ")
+        case INTERFACE => sb.append("interface ")
+        case UNKNOWN_KIND | Unrecognized(_) => sb.append("unknown ")
+      sb.append(s"${info.displayName}${info.prefixBeforeTpe}${pprint(info.signature)}")
+      info.overriddenSymbols match
+        case Nil => ()
+        case all => sb.append(s" <: ${all.mkString(", ")}")
+      sb.toString
+
+    private def pprintDef(info: SymbolInformation) =
+      notes.enter(info)
+      pprint(info.symbol, SymbolStyle.Definition)
+    def pprintRef(sym: String): String = pprint(sym, SymbolStyle.Reference)
+    private def pprintDef(sym: String): String = pprint(sym, SymbolStyle.Definition)
+    private def pprint(sym: String, style: SymbolStyle): String =
+      val info = notes.visit(sym)
+      style match
+        case SymbolStyle.Reference =>
+          info.displayName
+        case SymbolStyle.Definition =>
+          pprint(info)
+
+
+    private def pprint(sig: Signature): String =
+      sig match
+        case ClassSignature(tparams, parents, self, decls) =>
+          val sb = new StringBuilder()
+          if (tparams.infos.nonEmpty)
+            sb.append(tparams.infos.map(pprintDef).mkString("[", ", ", "] "))
+          if (parents.nonEmpty)
+            sb.append(parents.map(pprint).mkString("extends ", " with ", " "))
+          if (self.isDefined || decls.infos.nonEmpty) {
+            val selfStr = if (self.isDefined) s"self: ${pprint(self)} =>" else ""
+            val declsStr = if (decls.infos.nonEmpty) s"+${decls.infos.length} decls" else ""
+            sb.append(s"{ ${selfStr} ${declsStr} }")
+          }
+          sb.toString
+        case MethodSignature(tparams, paramss, res) =>
+          val sb = new StringBuilder()
+          if (tparams.infos.nonEmpty)
+            sb.append(tparams.infos.map(pprintDef).mkString("[", ", ", "]"))
+          paramss.foreach { params =>
+            val paramsStr = params.infos.map(pprintDef).mkString("(", ", ", ")")
+            sb.append(paramsStr)
+          }
+          sb.append(s": ${pprint(res)}")
+          sb.toString
+        case TypeSignature(tparams, lo, hi) =>
+          val sb = new StringBuilder()
+          if (tparams.infos.nonEmpty)
+            sb.append(tparams.infos.map(pprintDef).mkString("[", ", ", "]"))
+          if (lo == hi) {
+            sb.append(s" = ${pprint(lo)}")
+          } else {
+            lo match
+              case TypeRef(Type.Empty, "scala/Nothing#", Nil) => ()
+              case lo => sb.append(s" >: ${pprint(lo)}")
+            hi match
+              case TypeRef(Type.Empty, "scala/Any#", Nil) => ()
+              case TypeRef(Type.Empty, "java/lang/Object#", Nil) => ()
+              case hi => sb.append(s" <: ${pprint(hi)}")
+          }
+          sb.toString
+        case ValueSignature(tpe) =>
+          pprint(tpe)
+        case _ =>
+          ""
+
+    protected def pprint(tpe: Type): String = {
+      def prefix(tpe: Type): String = tpe match
+        case TypeRef(pre, sym, args) =>
+          val preStr = pre match {
+            case _: SingleType | _: ThisType | _: SuperType =>
+              s"${prefix(pre)}."
+            case Type.Empty => ""
+            case _ =>
+              s"${prefix(pre)}#"
+          }
+          val argsStr = if (args.nonEmpty) args.map(normal).mkString("[", ", ", "]") else ""
+          s"${preStr}${pprintRef(sym)}${argsStr}"
+        case SingleType(pre, sym) =>
+          pre match {
+            case Type.Empty => pprintRef(sym)
+            case _ =>
+              s"${prefix(pre)}.${pprintRef(sym)}"
+          }
+        case ThisType(sym) =>
+          s"${pprintRef(sym)}.this"
+        case SuperType(pre, sym) =>
+          s"${prefix(pre)}.super[${pprintRef(sym)}]"
+        case ConstantType(const) =>
+          pprint(const)
+        case IntersectionType(types) =>
+          types.map(normal).mkString(" & ")
+        case UnionType(types) =>
+          types.map(normal).mkString(" | ")
+        case WithType(types) =>
+          types.map(normal).mkString(" with ")
+        case StructuralType(utpe, decls) =>
+          val declsStr =
+            if (decls.infos.nonEmpty)
+              s"{ ${decls.infos.map(pprintDef).mkString("; ")} }"
+            else "{}"
+          s"${normal(utpe)} ${declsStr}"
+        case AnnotatedType(anns, utpe) =>
+          s"${normal(utpe)} ${anns.map(pprint).mkString(" ")}"
+        case ExistentialType(utpe, decls) =>
+          val sdecls = decls.infos.map(pprintDef).mkString("; ")
+          val sutpe = normal(utpe)
+          s"${sutpe} forSome { ${sdecls} }"
+        case UniversalType(tparams, utpe) =>
+          val params = tparams.infos.map(_.displayName).mkString("[", ", ", "]")
+          val resType = normal(utpe)
+          s"${params} => ${resType}"
+        case ByNameType(utpe) =>
+          s"=> ${normal(utpe)}"
+        case RepeatedType(utpe) =>
+          s"${normal(utpe)}*"
+        case _ =>
+          ""
+
+      def normal(tpe: Type): String = tpe match
+        case _: SingleType | _: ThisType | _: SuperType =>
+          s"${prefix(tpe)}.type"
+        case _ =>
+          prefix(tpe)
+      normal(tpe)
+    }
+
+    private def pprint(ann: Annotation): String =
+      ann.tpe match {
+        case Type.Empty => s"@"
+        case tpe => s"@${pprint(tpe)}"
+      }
+
+    protected def pprint(const: Constant): String = const match {
+        case Constant.Empty =>
+          ""
+        case UnitConstant() =>
+          "()"
+        case BooleanConstant(true) =>
+          "true"
+        case BooleanConstant(false) =>
+          "false"
+        case ByteConstant(value) =>
+          value.toByte.toString
+        case ShortConstant(value) =>
+          value.toShort.toString
+        case CharConstant(value) =>
+          s"'${value.toChar.toString}'"
+        case IntConstant(value) =>
+          value.toString
+        case LongConstant(value) =>
+          s"${value.toString}L"
+        case FloatConstant(value) =>
+          s"${value.toString}f"
+        case DoubleConstant(value) =>
+          value.toString
+        case StringConstant(value) =>
+          "\"" + value + "\""
+        case NullConstant() =>
+          "null"
+      }
+
+    private def accessString(access: Access): String =
+      access match
+        case Access.Empty => ""
+        case _: PublicAccess => ""
+        case _: PrivateAccess => "private "
+        case _: ProtectedAccess => "protected "
+        case _: PrivateThisAccess => "private[this] "
+        case _: ProtectedThisAccess => "protected[this] "
+        case PrivateWithinAccess(ssym) =>
+          s"private[${ssym}] "
+        case ProtectedWithinAccess(ssym) =>
+          s"protected[${ssym}] "
+    extension (scope: Scope)
+      private def infos: List[SymbolInformation] =
+        if (scope.symlinks.nonEmpty)
+          scope.symlinks.map(symbol => SymbolInformation(symbol = symbol)).toList
+        else
+          scope.hardlinks.toList
+
+    extension (scope: Option[Scope])
+      private def infos: List[SymbolInformation] = scope match {
+        case Some(s) => s.infos
+        case None => Nil
+      }
+  end InfoPrinter
+end SymbolInformationPrinter
+
+extension (info: SymbolInformation)
+  def prefixBeforeTpe: String = {
+    info.kind match {
+      case LOCAL | FIELD | PARAMETER | SELF_PARAMETER | UNKNOWN_KIND | Unrecognized(_) =>
+        ": "
+      case METHOD | CONSTRUCTOR | MACRO | TYPE | TYPE_PARAMETER | OBJECT | PACKAGE |
+          PACKAGE_OBJECT | CLASS | TRAIT | INTERFACE =>
+        " "
+    }
+  }
+
+trait PrinterSymtab:
+  def info(symbol: String): Option[SymbolInformation]
+object PrinterSymtab:
+  def fromTextDocument(doc: TextDocument): PrinterSymtab =
+    val map = doc.symbols.map(info => (info.symbol, info)).toMap
+    new PrinterSymtab {
+      override def info(symbol: String): Option[SymbolInformation] = map.get(symbol)
+    }
+
+def processRange(sb: StringBuilder, range: Range): Unit =
+  sb.append('[')
+    .append(range.startLine).append(':').append(range.startCharacter)
+    .append("..")
+    .append(range.endLine).append(':').append(range.endCharacter)
+    .append("):")
+
+
+
+class SyntheticPrinter(symtab: PrinterSymtab, source: SourceFile) extends SymbolInformationPrinter(symtab):
+
+  def pprint(synth: Synthetic): String =
+    val sb = new StringBuilder()
+    val notes = InfoNotes()
+    val treePrinter = TreePrinter(source, synth.range, notes)
+
+    synth.range match
+      case Some(range) =>
+        processRange(sb, range)
+        sb.append(source.substring(range))
+      case None =>
+        sb.append("[):")
+    sb.append(" => ")
+    sb.append(treePrinter.pprint(synth.tree))
+    sb.toString
+
+  extension (source: SourceFile)
+    private def substring(range: Option[s.Range]): String =
+      range match
+        case Some(range) => source.substring(range)
+        case None => ""
+    private def substring(range: s.Range): String =
+      /** get the line length of a given line */
+      def lineLength(line: Int): Int =
+        val isLastLine = source.lineToOffsetOpt(line).nonEmpty && source.lineToOffsetOpt(line + 1).isEmpty
+        if isLastLine then source.content.length - source.lineToOffset(line) - 1
+        else source.lineToOffset(line + 1) - source.lineToOffset(line) - 1 // -1 for newline char
+
+      val start = source.lineToOffset(range.startLine) +
+        math.min(range.startCharacter, lineLength(range.startLine))
+      val end = source.lineToOffset(range.endLine) +
+        math.min(range.endCharacter, lineLength(range.endLine))
+      new String(source.content, start, end - start)
+
+
+  // def pprint(tree: s.Tree, range: Option[Range]): String =
+  class TreePrinter(source: SourceFile, originalRange: Option[Range], notes: InfoNotes) extends InfoPrinter(notes):
+    def pprint(tree: Tree): String =
+      val sb = new StringBuilder()
+      processTree(tree)(using sb)
+      sb.toString
+
+
+    private def rep[T](xs: Seq[T], seq: String)(f: T => Unit)(using sb: StringBuilder): Unit =
+      xs.zipWithIndex.foreach { (x, i) =>
+        if i != 0 then sb.append(seq)
+        f(x)
+      }
+
+    private def processTree(tree: Tree)(using sb: StringBuilder): Unit =
+      tree match {
+        case tree: ApplyTree =>
+          processTree(tree.function)
+          sb.append("(")
+          rep(tree.arguments, ", ")(processTree)
+          sb.append(")")
+        case tree: FunctionTree =>
+          sb.append("{")
+          sb.append("(")
+          rep(tree.parameters, ", ")(processTree)
+          sb.append(") =>")
+          processTree(tree.body)
+          sb.append("}")
+        case tree: IdTree =>
+          sb.append(pprintRef(tree.symbol))
+        case tree: LiteralTree =>
+          sb.append(pprint(tree.constant))
+        case tree: MacroExpansionTree =>
+          sb.append("(`macro-expandee` : `")
+          sb.append(pprint(tree.tpe))
+          sb.append(")")
+        case tree: OriginalTree =>
+          if (tree.range == originalRange && originalRange.nonEmpty) then
+            sb.append("*")
+          else
+            sb.append("orig(")
+            sb.append(source.substring(tree.range))
+            sb.append(")")
+        case tree: SelectTree =>
+          processTree(tree.qualifier)
+          sb.append(".")
+          tree.id match
+            case Some(tree) => processTree(tree)
+            case None => ()
+        case tree: TypeApplyTree =>
+          processTree(tree.function)
+          sb.append("[")
+          rep(tree.typeArguments, ", ")((t) => sb.append(pprint(t)))
+          sb.append("]")
+
+        case _ =>
+          sb.append("")
+      }
+
+
+end SyntheticPrinter
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/Range.scala b/compiler/src/dotty/tools/dotc/semanticdb/Range.scala
index 61d9d7f27c74..f8b1675a37c4 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/Range.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/Range.scala
@@ -1,97 +1,106 @@
-package dotty.tools.dotc.semanticdb
+// Generated by https://github.com/tanishiking/semanticdb-for-scala3
+// Generated by the Scala Plugin for the Protocol Buffer Compiler.
+// Do not edit!
+//
+// Protofile syntax: PROTO3
 
+package dotty.tools.dotc.semanticdb
 import dotty.tools.dotc.semanticdb.internal._
 import scala.annotation.internal.sharable
 
-object Range {
-  val defaultInstance: Range = Range(0, 0, 0, 0)
-}
-
+@SerialVersionUID(0L)
 final case class Range(
-  startLine: Int,
-  startCharacter: Int,
-  endLine: Int,
-  endCharacter: Int
-) extends SemanticdbMessage[Range] derives CanEqual {
-  @sharable
-  private var __serializedSizeCachedValue: Int = 0
-  private def __computeSerializedValue(): Int = {
-    var __size = 0
-
-    {
-      val __value = startLine
-      if (__value != 0) {
-        __size += SemanticdbOutputStream
-          .computeInt32Size(1, __value)
-      }
-    };
-
-    {
-      val __value = startCharacter
-      if (__value != 0) {
-        __size += SemanticdbOutputStream
-          .computeInt32Size(2, __value)
-      }
-    };
-
-    {
-      val __value = endLine
-      if (__value != 0) {
-        __size += SemanticdbOutputStream
-          .computeInt32Size(3, __value)
-      }
-    };
-
-    {
-      val __value = endCharacter
-      if (__value != 0) {
-        __size += SemanticdbOutputStream
-          .computeInt32Size(4, __value)
-      }
-    };
-    __size
-  }
-  final override def serializedSize: Int = {
-    var read = __serializedSizeCachedValue
-    if (read == 0) {
-      read = __computeSerializedValue()
-      __serializedSizeCachedValue = read
+    startLine: _root_.scala.Int = 0,
+    startCharacter: _root_.scala.Int = 0,
+    endLine: _root_.scala.Int = 0,
+    endCharacter: _root_.scala.Int = 0
+    )  extends SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      
+      {
+        val __value = startLine
+        if (__value != 0) {
+          __size += SemanticdbOutputStream.computeInt32Size(1, __value)
+        }
+      };
+      
+      {
+        val __value = startCharacter
+        if (__value != 0) {
+          __size += SemanticdbOutputStream.computeInt32Size(2, __value)
+        }
+      };
+      
+      {
+        val __value = endLine
+        if (__value != 0) {
+          __size += SemanticdbOutputStream.computeInt32Size(3, __value)
+        }
+      };
+      
+      {
+        val __value = endCharacter
+        if (__value != 0) {
+          __size += SemanticdbOutputStream.computeInt32Size(4, __value)
+        }
+      };
+      __size
     }
-    read
-  }
-  def writeTo(
-      `_output__`: SemanticdbOutputStream
-  ): Unit = {
-    {
-      val __v = startLine
-      if (__v != 0) {
-        _output__.writeInt32(1, __v)
-      }
-    };
-    {
-      val __v = startCharacter
-      if (__v != 0) {
-        _output__.writeInt32(2, __v)
-      }
-    };
-    {
-      val __v = endLine
-      if (__v != 0) {
-        _output__.writeInt32(3, __v)
-      }
-    };
-    {
-      val __v = endCharacter
-      if (__v != 0) {
-        _output__.writeInt32(4, __v)
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
       }
-    };
-  }
-  def mergeFrom(`_input__`: SemanticdbInputStream): Range = {
-    var __startLine = this.startLine
-    var __startCharacter = this.startCharacter
-    var __endLine = this.endLine
-    var __endCharacter = this.endCharacter
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      {
+        val __v = startLine
+        if (__v != 0) {
+          _output__.writeInt32(1, __v)
+        }
+      };
+      {
+        val __v = startCharacter
+        if (__v != 0) {
+          _output__.writeInt32(2, __v)
+        }
+      };
+      {
+        val __v = endLine
+        if (__v != 0) {
+          _output__.writeInt32(3, __v)
+        }
+      };
+      {
+        val __v = endCharacter
+        if (__v != 0) {
+          _output__.writeInt32(4, __v)
+        }
+      };
+    }
+    def withStartLine(__v: _root_.scala.Int): Range = copy(startLine = __v)
+    def withStartCharacter(__v: _root_.scala.Int): Range = copy(startCharacter = __v)
+    def withEndLine(__v: _root_.scala.Int): Range = copy(endLine = __v)
+    def withEndCharacter(__v: _root_.scala.Int): Range = copy(endCharacter = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Range])
+}
+
+object Range  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Range] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Range] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.Range = {
+    var __startLine: _root_.scala.Int = 0
+    var __startCharacter: _root_.scala.Int = 0
+    var __endLine: _root_.scala.Int = 0
+    var __endCharacter: _root_.scala.Int = 0
     var _done__ = false
     while (!_done__) {
       val _tag__ = _input__.readTag()
@@ -108,11 +117,39 @@ final case class Range(
         case tag => _input__.skipField(tag)
       }
     }
-    Range(
-      startLine = __startLine,
-      startCharacter = __startCharacter,
-      endLine = __endLine,
-      endCharacter = __endCharacter
+    dotty.tools.dotc.semanticdb.Range(
+        startLine = __startLine,
+        startCharacter = __startCharacter,
+        endLine = __endLine,
+        endCharacter = __endCharacter
     )
   }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.Range(
+    startLine = 0,
+    startCharacter = 0,
+    endLine = 0,
+    endCharacter = 0
+  )
+  final val START_LINE_FIELD_NUMBER = 1
+  final val START_CHARACTER_FIELD_NUMBER = 2
+  final val END_LINE_FIELD_NUMBER = 3
+  final val END_CHARACTER_FIELD_NUMBER = 4
+  def of(
+    startLine: _root_.scala.Int,
+    startCharacter: _root_.scala.Int,
+    endLine: _root_.scala.Int,
+    endCharacter: _root_.scala.Int
+  ): _root_.dotty.tools.dotc.semanticdb.Range = _root_.dotty.tools.dotc.semanticdb.Range(
+    startLine,
+    startCharacter,
+    endLine,
+    endCharacter
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Range])
 }
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/Scala3.scala b/compiler/src/dotty/tools/dotc/semanticdb/Scala3.scala
index 211c1576254f..686dc20e481b 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/Scala3.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/Scala3.scala
@@ -1,14 +1,19 @@
 package dotty.tools.dotc.semanticdb
 
 import dotty.tools.dotc.core
-import core.Symbols.{ Symbol , defn }
+import core.Symbols.{ Symbol , defn, NoSymbol }
 import core.Contexts._
 import core.Names
 import core.Names.Name
-import core.Types.Type
+import core.Types.{Type, TypeBounds}
 import core.Flags._
 import core.NameKinds
 import core.StdNames.nme
+import SymbolInformation.{Kind => k}
+import dotty.tools.dotc.util.SourceFile
+import dotty.tools.dotc.util.Spans.Span
+import dotty.tools.dotc.ast.tpd
+import dotty.tools.dotc.{semanticdb => s}
 
 import java.lang.Character.{isJavaIdentifierPart, isJavaIdentifierStart}
 
@@ -25,6 +30,128 @@ object Scala3:
 
   private val WILDCARDTypeName = nme.WILDCARD.toTypeName
 
+  def range(span: Span, treeSource: SourceFile)(using Context): Option[Range] =
+    def lineCol(offset: Int) = (treeSource.offsetToLine(offset), treeSource.column(offset))
+    val (startLine, startCol) = lineCol(span.start)
+    val (endLine, endCol) = lineCol(span.end)
+    Some(Range(startLine, startCol, endLine, endCol))
+
+  def namePresentInSource(sym: Symbol, span: Span, source:SourceFile)(using Context): Boolean =
+    if !span.exists then false
+    else
+      val content = source.content()
+      val (start, end) =
+        if content.lift(span.end - 1).exists(_ == '`') then
+          (span.start + 1, span.end - 1)
+        else (span.start, span.end)
+      val nameInSource = content.slice(start, end).mkString
+      // for secondary constructors `this`
+      if sym.isConstructor && nameInSource == nme.THISkw.toString then
+        true
+      else
+        val target =
+          if sym.isPackageObject then sym.owner
+          else sym
+        nameInSource == target.name.stripModuleClassSuffix.lastPart.toString
+
+  sealed trait FakeSymbol {
+    private[Scala3] var sname: Option[String] = None
+  }
+
+  /** Fake symbol that represents wildcard symbol which will be converted to
+    * semanticdb symbol with
+    * - name: local...
+    * - SymbolInformation with signature TypeSignature of given type bound.
+    */
+  case class WildcardTypeSymbol(owner: Symbol, bounds: TypeBounds) extends FakeSymbol
+
+  case class TermParamRefSymbol(owner: Symbol, name: Name, tp: Type) extends FakeSymbol
+  case class TypeParamRefSymbol(owner: Symbol, name: Name, tp: TypeBounds) extends FakeSymbol
+  case class RefinementSymbol(owner: Symbol, name: Name, tp: Type) extends FakeSymbol
+  type SemanticSymbol = Symbol | FakeSymbol
+
+  given SemanticSymbolOps : AnyRef with
+    extension (sym: SemanticSymbol)
+      def name(using Context): Name = sym match
+        case s: Symbol => s.name
+        case s: WildcardTypeSymbol => nme.WILDCARD
+        case s: TermParamRefSymbol => s.name
+        case s: TypeParamRefSymbol => s.name
+        case s: RefinementSymbol => s.name
+
+      def symbolName(using builder: SemanticSymbolBuilder)(using Context): String =
+        sym match
+          case s: Symbol => builder.symbolName(s)
+          case s: FakeSymbol =>
+            s.sname.getOrElse {
+              val sname = builder.symbolName(s)
+              s.sname = Some(sname)
+              sname
+            }
+
+      def symbolInfo(symkinds: Set[SymbolKind])(using LinkMode, TypeOps, SemanticSymbolBuilder, Context): SymbolInformation =
+        sym match
+          case s: Symbol =>
+            val kind = s.symbolKind(symkinds)
+            val sname = sym.symbolName
+            val signature = s.info.toSemanticSig(s)
+            val symbolAnnotations = s.annotations.collect{
+              case annot if annot.symbol != defn.BodyAnnot && annot.symbol != defn.ChildAnnot =>
+                Annotation(annot.tree.tpe.toSemanticType(annot.symbol))
+            }
+            SymbolInformation(
+              symbol = sname,
+              language = Language.SCALA,
+              kind = kind,
+              properties = s.symbolProps(symkinds),
+              displayName = Symbols.displaySymbol(s),
+              signature = signature,
+              access = s.symbolAccess(kind),
+              overriddenSymbols = s.overriddenSymbols,
+              annotations = symbolAnnotations
+            )
+          case s: WildcardTypeSymbol =>
+            SymbolInformation(
+              symbol = symbolName,
+              language = Language.SCALA,
+              kind = SymbolInformation.Kind.TYPE,
+              displayName = nme.WILDCARD.show,
+              signature = s.bounds.toSemanticSig(s.owner),
+            )
+          case s: TermParamRefSymbol =>
+            SymbolInformation(
+              symbol = symbolName,
+              language = Language.SCALA,
+              kind = SymbolInformation.Kind.PARAMETER,
+              displayName = s.name.show.unescapeUnicode,
+              signature = s.tp.toSemanticSig(s.owner),
+            )
+          case s: TypeParamRefSymbol =>
+            SymbolInformation(
+              symbol = symbolName,
+              language = Language.SCALA,
+              kind = SymbolInformation.Kind.TYPE_PARAMETER,
+              displayName = s.name.show.unescapeUnicode,
+              signature = s.tp.toSemanticSig(s.owner),
+            )
+          case s: RefinementSymbol =>
+            val signature = s.tp.toSemanticSig(s.owner)
+            val kind = signature match
+              case _: TypeSignature => SymbolInformation.Kind.TYPE
+              case _: MethodSignature => SymbolInformation.Kind.METHOD
+              case _: ValueSignature => SymbolInformation.Kind.FIELD
+              case _ => SymbolInformation.Kind.UNKNOWN_KIND
+            SymbolInformation(
+              symbol = symbolName,
+              language = Language.SCALA,
+              kind = kind,
+              displayName = s.name.show.unescapeUnicode,
+              properties =
+                SymbolInformation.Property.ABSTRACT.value,
+              signature = signature,
+            )
+  end SemanticSymbolOps
+
   enum SymbolKind derives CanEqual:
     kind =>
 
@@ -83,6 +210,7 @@ object Scala3:
 
       def isEmptyNumbered: Boolean =
         !name.is(NameKinds.WildcardParamName)
+        && !name.is(NameKinds.EvidenceParamName)
         && { name match
           case NameKinds.AnyNumberedName(nme.EMPTY, _) => true
           case _                                       => false
@@ -126,6 +254,113 @@ object Scala3:
       def isSyntheticWithIdent(using Context): Boolean =
         sym.is(Synthetic) && !sym.isAnonymous && !sym.name.isEmptyNumbered
 
+      /** The semanticdb name of the given symbol */
+      def symbolName(using builder: SemanticSymbolBuilder)(using Context): String =
+        builder.symbolName(sym)
+
+      def funParamSymbol(using builder: SemanticSymbolBuilder)(using Context): Name => String =
+        builder.funParamSymbol(sym)
+
+      def symbolKind(symkinds: Set[SymbolKind])(using Context): SymbolInformation.Kind =
+        if sym.isTypeParam then
+          SymbolInformation.Kind.TYPE_PARAMETER
+        else if sym.is(TermParam) then
+          SymbolInformation.Kind.PARAMETER
+        else if sym.isTerm && sym.owner.isTerm then
+          SymbolInformation.Kind.LOCAL
+        else if sym.isInlineMethod || sym.is(Macro) then
+          SymbolInformation.Kind.MACRO
+        else if sym.isConstructor then
+          SymbolInformation.Kind.CONSTRUCTOR
+        else if sym.isSelfSym then
+          SymbolInformation.Kind.SELF_PARAMETER
+        else if sym.isOneOf(Method) || symkinds.exists(_.isVarOrVal) then
+          SymbolInformation.Kind.METHOD
+        else if sym.isPackageObject then
+          SymbolInformation.Kind.PACKAGE_OBJECT
+        else if sym.is(Module) then
+          SymbolInformation.Kind.OBJECT
+        else if sym.is(Package) then
+          SymbolInformation.Kind.PACKAGE
+        else if sym.isAllOf(JavaInterface) then
+          SymbolInformation.Kind.INTERFACE
+        else if sym.is(Trait) then
+          SymbolInformation.Kind.TRAIT
+        else if sym.isClass then
+          SymbolInformation.Kind.CLASS
+        else if sym.isType then
+          SymbolInformation.Kind.TYPE
+        else if sym.is(ParamAccessor) then
+          SymbolInformation.Kind.FIELD
+        else
+          SymbolInformation.Kind.UNKNOWN_KIND
+
+      def symbolProps(symkinds: Set[SymbolKind])(using Context): Int =
+        if sym.is(ModuleClass) then
+          return sym.sourceModule.symbolProps(symkinds)
+        var props = 0
+        if sym.isPrimaryConstructor then
+          props |= SymbolInformation.Property.PRIMARY.value
+        if sym.is(Abstract) || symkinds.contains(SymbolKind.Abstract) then
+          props |= SymbolInformation.Property.ABSTRACT.value
+        if sym.is(Final) then
+          props |= SymbolInformation.Property.FINAL.value
+        if sym.is(Sealed) then
+          props |= SymbolInformation.Property.SEALED.value
+        if sym.isOneOf(GivenOrImplicit) then
+          props |= SymbolInformation.Property.IMPLICIT.value
+        if sym.is(Lazy, butNot=Module) then
+          props |= SymbolInformation.Property.LAZY.value
+        if sym.isAllOf(Case | Module) || sym.is(CaseClass) || sym.isAllOf(EnumCase) then
+          props |= SymbolInformation.Property.CASE.value
+        if sym.is(Covariant) then
+          props |= SymbolInformation.Property.COVARIANT.value
+        if sym.is(Contravariant) then
+          props |= SymbolInformation.Property.CONTRAVARIANT.value
+        if sym.isAllOf(DefaultMethod | JavaDefined) || sym.is(Accessor) && sym.name.is(NameKinds.DefaultGetterName) then
+          props |= SymbolInformation.Property.DEFAULT.value
+        if symkinds.exists(_.isVal) then
+          props |= SymbolInformation.Property.VAL.value
+        if symkinds.exists(_.isVar) then
+          props |= SymbolInformation.Property.VAR.value
+        if sym.is(JavaStatic) then
+          props |= SymbolInformation.Property.STATIC.value
+        if sym.is(Enum) then
+          props |= SymbolInformation.Property.ENUM.value
+        if sym.is(Given) then
+          props |= SymbolInformation.Property.GIVEN.value
+        if sym.is(Inline) then
+          props |= SymbolInformation.Property.INLINE.value
+        if sym.is(Open) then
+          props |= SymbolInformation.Property.OPEN.value
+        if sym.is(Open) then
+          props |= SymbolInformation.Property.OPEN.value
+        if sym.is(Transparent) then
+          props |= SymbolInformation.Property.TRANSPARENT.value
+        if sym.is(Infix) then
+          props |= SymbolInformation.Property.INFIX.value
+        if sym.is(Opaque) then
+          props |= SymbolInformation.Property.OPAQUE.value
+        props
+
+      def symbolAccess(kind: SymbolInformation.Kind)(using Context, SemanticSymbolBuilder): Access =
+        kind match
+          case k.LOCAL | k.PARAMETER | k.SELF_PARAMETER | k.TYPE_PARAMETER | k.PACKAGE | k.PACKAGE_OBJECT =>
+            Access.Empty
+          case _ =>
+            if (sym.privateWithin == NoSymbol)
+              if (sym.isAllOf(PrivateLocal)) PrivateThisAccess()
+              else if (sym.is(Private)) PrivateAccess()
+              else if (sym.isAllOf(ProtectedLocal)) ProtectedThisAccess()
+              else if (sym.is(Protected)) ProtectedAccess()
+              else PublicAccess()
+            else
+              val ssym = sym.privateWithin.symbolName
+              if (sym.is(Protected)) ProtectedWithinAccess(ssym)
+              else PrivateWithinAccess(ssym)
+
+      def overriddenSymbols(using Context, SemanticSymbolBuilder): List[String] =
+        sym.allOverriddenSymbols.map(_.symbolName).toList
   end SymbolOps
 
   object LocalSymbol:
@@ -164,6 +399,10 @@ object Scala3:
       def isTypeParameter: Boolean = !symbol.isEmpty && !symbol.isMulti && symbol.last == ']'
       def isParameter: Boolean = !symbol.isEmpty && !symbol.isMulti && symbol.last == ')'
 
+      def desc: Descriptor =
+        if isGlobal then DescriptorParser(symbol)._1
+        else Descriptor.None
+
       def unescapeUnicode =
         unicodeEscape.replaceAllIn(symbol, m => String.valueOf(Integer.parseInt(m.group(1), 16).toChar))
 
@@ -187,6 +426,12 @@ object Scala3:
       def isStatic: Boolean = (info.properties & SymbolInformation.Property.STATIC.value) != 0
       def isEnum: Boolean = (info.properties & SymbolInformation.Property.ENUM.value) != 0
       def isDefault: Boolean = (info.properties & SymbolInformation.Property.DEFAULT.value) != 0
+      def isGiven: Boolean = (info.properties & SymbolInformation.Property.GIVEN.value) != 0
+      def isInline: Boolean = (info.properties & SymbolInformation.Property.INLINE.value) != 0
+      def isOpen: Boolean = (info.properties & SymbolInformation.Property.OPEN.value) != 0
+      def isTransparent: Boolean = (info.properties & SymbolInformation.Property.TRANSPARENT.value) != 0
+      def isInfix: Boolean = (info.properties & SymbolInformation.Property.INFIX.value) != 0
+      def isOpaque: Boolean = (info.properties & SymbolInformation.Property.OPAQUE.value) != 0
 
       def isUnknownKind: Boolean = info.kind.isUnknownKind
       def isLocal: Boolean = info.kind.isLocal
@@ -211,9 +456,7 @@ object Scala3:
       def hasLength = range.endLine > range.startLine || range.endCharacter > range.startCharacter
   end RangeOps
 
-  /** Sort symbol occurrences by their start position. */
-  given OccurrenceOrdering: Ordering[SymbolOccurrence] = (x, y) =>
-    x.range -> y.range match
+  private def compareRange(x: Option[Range], y: Option[Range]): Int = x -> y match
     case None -> _ | _ -> None => 0
     case Some(a) -> Some(b) =>
       val byLine = Integer.compare(a.startLine, b.startLine)
@@ -221,10 +464,14 @@ object Scala3:
         byLine
       else // byCharacter
         Integer.compare(a.startCharacter, b.startCharacter)
-  end OccurrenceOrdering
+
+  /** Sort symbol occurrences by their start position. */
+  given Ordering[SymbolOccurrence] = (x, y) => compareRange(x.range, y.range)
 
   given Ordering[SymbolInformation] = Ordering.by[SymbolInformation, String](_.symbol)(IdentifierOrdering())
 
+  given Ordering[Synthetic] = (x, y) => compareRange(x.range, y.range)
+
   /**
     * A comparator for identifier like "Predef" or "Function10".
     *
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/Schema.scala b/compiler/src/dotty/tools/dotc/semanticdb/Schema.scala
index 33b236d7ac5a..492e1647de51 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/Schema.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/Schema.scala
@@ -1,21 +1,56 @@
-package dotty.tools.dotc.semanticdb
+// Generated by https://github.com/tanishiking/semanticdb-for-scala3
+// Generated by the Scala Plugin for the Protocol Buffer Compiler.
+// Do not edit!
+//
+// Protofile syntax: PROTO3
 
+package dotty.tools.dotc.semanticdb
 import dotty.tools.dotc.semanticdb.internal._
+import scala.annotation.internal.sharable
 
-sealed trait Schema(val value: Int) extends SemanticdbEnum derives CanEqual
-
-object Schema {
-
-  case object LEGACY extends Schema(0)
-  case object SEMANTICDB3 extends Schema(3)
-  case object SEMANTICDB4 extends Schema(4)
-  final case class Unrecognized(id: Int) extends Schema(id)
+sealed abstract class Schema(val value: _root_.scala.Int)  extends SemanticdbGeneratedEnum  derives CanEqual {
+  type EnumType = Schema
+  def isLegacy: _root_.scala.Boolean = false
+  def isSemanticdb3: _root_.scala.Boolean = false
+  def isSemanticdb4: _root_.scala.Boolean = false
+  
+  final def asRecognized: _root_.scala.Option[dotty.tools.dotc.semanticdb.Schema.Recognized] = if (isUnrecognized) _root_.scala.None else _root_.scala.Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.Schema.Recognized])
+}
 
-  def fromValue(value: Int): Schema = value match {
+object Schema  {
+  sealed trait Recognized extends Schema
+  
+  @SerialVersionUID(0L)
+  case object LEGACY extends Schema(0) with Schema.Recognized {
+    val index = 0
+    val name = "LEGACY"
+    override def isLegacy: _root_.scala.Boolean = true
+  }
+  
+  @SerialVersionUID(0L)
+  case object SEMANTICDB3 extends Schema(3) with Schema.Recognized {
+    val index = 1
+    val name = "SEMANTICDB3"
+    override def isSemanticdb3: _root_.scala.Boolean = true
+  }
+  
+  @SerialVersionUID(0L)
+  case object SEMANTICDB4 extends Schema(4) with Schema.Recognized {
+    val index = 2
+    val name = "SEMANTICDB4"
+    override def isSemanticdb4: _root_.scala.Boolean = true
+  }
+  
+  @SerialVersionUID(0L)
+  final case class Unrecognized(unrecognizedValue: _root_.scala.Int)  extends Schema(unrecognizedValue) with SemanticdbUnrecognizedEnum
+  
+  lazy val values = scala.collection.immutable.Seq(LEGACY, SEMANTICDB3, SEMANTICDB4)
+  def fromValue(__value: _root_.scala.Int): Schema = __value match {
     case 0 => LEGACY
     case 3 => SEMANTICDB3
     case 4 => SEMANTICDB4
-    case id => Unrecognized(id)
+    case __other => Unrecognized(__other)
   }
-
-}
+  
+  
+}
\ No newline at end of file
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/Scope.scala b/compiler/src/dotty/tools/dotc/semanticdb/Scope.scala
new file mode 100644
index 000000000000..f396929763ea
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/semanticdb/Scope.scala
@@ -0,0 +1,107 @@
+// Generated by https://github.com/tanishiking/semanticdb-for-scala3
+// Generated by the Scala Plugin for the Protocol Buffer Compiler.
+// Do not edit!
+//
+// Protofile syntax: PROTO3
+
+package dotty.tools.dotc.semanticdb
+import dotty.tools.dotc.semanticdb.internal._
+import scala.annotation.internal.sharable
+
+@SerialVersionUID(0L)
+final case class Scope(
+    symlinks: _root_.scala.Seq[_root_.scala.Predef.String] = _root_.scala.Seq.empty,
+    hardlinks: _root_.scala.Seq[dotty.tools.dotc.semanticdb.SymbolInformation] = _root_.scala.Seq.empty
+    )  extends SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      symlinks.foreach { __item =>
+        val __value = __item
+        __size += SemanticdbOutputStream.computeStringSize(1, __value)
+      }
+      hardlinks.foreach { __item =>
+        val __value = __item
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      }
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      symlinks.foreach { __v =>
+        val __m = __v
+        _output__.writeString(1, __m)
+      };
+      hardlinks.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(2, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+    }
+    def clearSymlinks = copy(symlinks = _root_.scala.Seq.empty)
+    def addSymlinks(__vs: _root_.scala.Predef.String*): Scope = addAllSymlinks(__vs)
+    def addAllSymlinks(__vs: Iterable[_root_.scala.Predef.String]): Scope = copy(symlinks = symlinks ++ __vs)
+    def withSymlinks(__v: _root_.scala.Seq[_root_.scala.Predef.String]): Scope = copy(symlinks = __v)
+    def clearHardlinks = copy(hardlinks = _root_.scala.Seq.empty)
+    def addHardlinks(__vs: dotty.tools.dotc.semanticdb.SymbolInformation*): Scope = addAllHardlinks(__vs)
+    def addAllHardlinks(__vs: Iterable[dotty.tools.dotc.semanticdb.SymbolInformation]): Scope = copy(hardlinks = hardlinks ++ __vs)
+    def withHardlinks(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.SymbolInformation]): Scope = copy(hardlinks = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Scope])
+}
+
+object Scope  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Scope] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Scope] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.Scope = {
+    val __symlinks: _root_.scala.collection.immutable.VectorBuilder[_root_.scala.Predef.String] = new _root_.scala.collection.immutable.VectorBuilder[_root_.scala.Predef.String]
+    val __hardlinks: _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.SymbolInformation] = new _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.SymbolInformation]
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __symlinks += _input__.readStringRequireUtf8()
+        case 18 =>
+          __hardlinks += LiteParser.readMessage[dotty.tools.dotc.semanticdb.SymbolInformation](_input__)
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.Scope(
+        symlinks = __symlinks.result(),
+        hardlinks = __hardlinks.result()
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.Scope(
+    symlinks = _root_.scala.Seq.empty,
+    hardlinks = _root_.scala.Seq.empty
+  )
+  final val SYMLINKS_FIELD_NUMBER = 1
+  final val HARDLINKS_FIELD_NUMBER = 2
+  def of(
+    symlinks: _root_.scala.Seq[_root_.scala.Predef.String],
+    hardlinks: _root_.scala.Seq[dotty.tools.dotc.semanticdb.SymbolInformation]
+  ): _root_.dotty.tools.dotc.semanticdb.Scope = _root_.dotty.tools.dotc.semanticdb.Scope(
+    symlinks,
+    hardlinks
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Scope])
+}
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/SemanticSymbolBuilder.scala b/compiler/src/dotty/tools/dotc/semanticdb/SemanticSymbolBuilder.scala
new file mode 100644
index 000000000000..8743fd04d651
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/semanticdb/SemanticSymbolBuilder.scala
@@ -0,0 +1,151 @@
+package dotty.tools
+package dotc
+package semanticdb
+
+import core._
+import Contexts._
+import Symbols._
+import Flags._
+import Names.Name
+
+import scala.annotation.tailrec
+import scala.collection.mutable
+import dotty.tools.dotc.core.Types.TypeParamRef
+
+class SemanticSymbolBuilder:
+  import Scala3.{_, given}
+
+  private var nextLocalIdx: Int = 0
+
+  /** The index of a local symbol */
+  private val locals = mutable.HashMap[Symbol, Int]()
+
+  /** The local symbol(s) starting at given offset */
+  private val symsAtOffset = new mutable.HashMap[Int, Set[Symbol]]():
+    override def default(key: Int) = Set[Symbol]()
+
+
+  def symbolName(sym: Symbol)(using Context): String =
+    val b = StringBuilder(20)
+    addSymName(b, sym)
+    b.toString
+  def symbolName(sym: FakeSymbol)(using Context): String =
+    sym match
+      case sym: WildcardTypeSymbol =>
+        val b = StringBuilder(20)
+        addSymName(b, sym.owner)
+        b.append('['); addName(b, sym.name); b.append(']')
+        b.toString
+      case sym: TermParamRefSymbol =>
+        val b = StringBuilder(20)
+        addSymName(b, sym.owner)
+        b.append('('); addName(b, sym.name); b.append(')')
+        b.toString
+      case sym: TypeParamRefSymbol =>
+        val b = StringBuilder(20)
+        addSymName(b, sym.owner)
+        b.append('['); addName(b, sym.name); b.append(']')
+        b.toString
+      case sym: RefinementSymbol =>
+        val b = StringBuilder(20)
+        addLocalSymName(b)
+        b.toString
+
+  def funParamSymbol(sym: Symbol)(using Context): Name => String =
+    if sym.isGlobal then
+      val funSymbol = symbolName(sym)
+      name => s"$funSymbol($name)"
+    else
+      name => locals.keys.find(local => local.isTerm && local.owner == sym && local.name == name)
+                    .fold("")(Symbols.LocalPrefix + locals(_))
+
+  private def addName(b: StringBuilder, name: Name): Unit =
+    val str = name.toString.unescapeUnicode
+    if str.isJavaIdent then b append str
+    else b append '`' append str append '`'
+
+  private def addLocalSymName(b: StringBuilder): Unit =
+    val idx = nextLocalIdx
+    nextLocalIdx += 1
+    b.append(Symbols.LocalPrefix).append(idx)
+
+  /** Add semanticdb name of the given symbol to string builder */
+  private def addSymName(b: StringBuilder, sym: Symbol)(using Context): Unit =
+
+    def addOwner(owner: Symbol): Unit =
+      if !owner.isRoot then addSymName(b, owner)
+
+    def addOverloadIdx(sym: Symbol): Unit =
+      val decls =
+        val decls0 = sym.owner.info.decls.lookupAll(sym.name)
+        if sym.owner.isAllOf(JavaModule) then
+          decls0 ++ sym.owner.companionClass.info.decls.lookupAll(sym.name)
+        else
+          decls0
+      end decls
+      val alts = decls.filter(_.isOneOf(Method | Mutable)).toList.reverse
+      def find(filter: Symbol => Boolean) = alts match
+        case notSym :: rest if !filter(notSym) =>
+          val idx = rest.indexWhere(filter).ensuring(_ >= 0)
+          b.append('+').append(idx + 1)
+        case _ =>
+      end find
+      val sig = sym.signature
+      find(_.signature == sig)
+
+    def addDescriptor(sym: Symbol): Unit =
+      if sym.is(ModuleClass) then
+        addDescriptor(sym.sourceModule)
+      else if sym.is(TypeParam) then
+        b.append('['); addName(b, sym.name); b.append(']')
+      else if sym.is(Param) then
+        b.append('('); addName(b, sym.name); b.append(')')
+      else if sym.isRoot then
+        b.append(Symbols.RootPackage)
+      else if sym.isEmptyPackage then
+        b.append(Symbols.EmptyPackage)
+      else if (sym.isScala2PackageObject) then
+        b.append(Symbols.PackageObjectDescriptor)
+      else
+        addName(b, sym.name)
+        if sym.is(Package) then b.append('/')
+        else if sym.isType || sym.isAllOf(JavaModule) then b.append('#')
+        else if sym.isOneOf(Method | Mutable)
+        && (!sym.is(StableRealizable) || sym.isConstructor) then
+          b.append('('); addOverloadIdx(sym); b.append(").")
+        else b.append('.')
+
+    /** The index of local symbol `sym`. Symbols with the same name and
+     *  the same starting position have the same index.
+     */
+    def localIdx(sym: Symbol)(using Context): Int =
+      val startPos =
+        // assert(sym.span.exists, s"$sym should have a span")
+        if (sym.span.exists) Some(sym.span.start) else None
+      @tailrec
+      def computeLocalIdx(sym: Symbol): Int = locals get sym match
+        case Some(idx) => idx
+        case None =>
+          (for {
+            pos <- startPos
+            syms <- symsAtOffset.get(pos)
+            found <- syms.find(_.name == sym.name)
+          } yield found) match
+            case Some(other) => computeLocalIdx(other)
+            case None =>
+              val idx = nextLocalIdx
+              nextLocalIdx += 1
+              locals(sym) = idx
+              startPos.foreach(pos => symsAtOffset(pos) += sym)
+              idx
+      end computeLocalIdx
+      computeLocalIdx(sym)
+    end localIdx
+
+    if sym.exists then
+      if sym.isGlobal then
+        addOwner(sym.owner); addDescriptor(sym)
+      else
+        b.append(Symbols.LocalPrefix).append(localIdx(sym))
+
+  end addSymName
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/Signature.scala b/compiler/src/dotty/tools/dotc/semanticdb/Signature.scala
new file mode 100644
index 000000000000..9f1bff592b75
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/semanticdb/Signature.scala
@@ -0,0 +1,701 @@
+// Generated by https://github.com/tanishiking/semanticdb-for-scala3
+// Generated by the Scala Plugin for the Protocol Buffer Compiler.
+// Do not edit!
+//
+// Protofile syntax: PROTO3
+
+package dotty.tools.dotc.semanticdb
+import dotty.tools.dotc.semanticdb.internal._
+import scala.annotation.internal.sharable
+
+sealed trait Signature  extends SemanticdbGeneratedSealedOneof  derives CanEqual {
+  type MessageType = dotty.tools.dotc.semanticdb.SignatureMessage
+  final def isEmpty = this.isInstanceOf[dotty.tools.dotc.semanticdb.Signature.Empty.type]
+  final def isDefined = !isEmpty
+  final def asMessage: dotty.tools.dotc.semanticdb.SignatureMessage = dotty.tools.dotc.semanticdb.Signature.SignatureTypeMapper.toBase(this)
+  final def asNonEmpty: Option[dotty.tools.dotc.semanticdb.Signature.NonEmpty] = if (isEmpty) None else Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.Signature.NonEmpty])
+}
+
+object Signature {
+  case object Empty extends dotty.tools.dotc.semanticdb.Signature
+  
+  sealed trait NonEmpty extends dotty.tools.dotc.semanticdb.Signature
+  def defaultInstance: dotty.tools.dotc.semanticdb.Signature = Empty
+  
+  implicit val SignatureTypeMapper: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.SignatureMessage, dotty.tools.dotc.semanticdb.Signature] = new SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.SignatureMessage, dotty.tools.dotc.semanticdb.Signature] {
+    override def toCustom(__base: dotty.tools.dotc.semanticdb.SignatureMessage): dotty.tools.dotc.semanticdb.Signature = __base.sealedValue match {
+      case __v: dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.ClassSignature => __v.value
+      case __v: dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.MethodSignature => __v.value
+      case __v: dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.TypeSignature => __v.value
+      case __v: dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.ValueSignature => __v.value
+      case dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.Empty => Empty
+    }
+    override def toBase(__custom: dotty.tools.dotc.semanticdb.Signature): dotty.tools.dotc.semanticdb.SignatureMessage = dotty.tools.dotc.semanticdb.SignatureMessage(__custom match {
+      case __v: dotty.tools.dotc.semanticdb.ClassSignature => dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.ClassSignature(__v)
+      case __v: dotty.tools.dotc.semanticdb.MethodSignature => dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.MethodSignature(__v)
+      case __v: dotty.tools.dotc.semanticdb.TypeSignature => dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.TypeSignature(__v)
+      case __v: dotty.tools.dotc.semanticdb.ValueSignature => dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.ValueSignature(__v)
+      case Empty => dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.Empty
+    })
+  }
+}
+@SerialVersionUID(0L)
+final case class SignatureMessage(
+    sealedValue: dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue = dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.Empty
+    )  extends SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      if (sealedValue.classSignature.isDefined) {
+        val __value = sealedValue.classSignature.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.methodSignature.isDefined) {
+        val __value = sealedValue.methodSignature.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.typeSignature.isDefined) {
+        val __value = sealedValue.typeSignature.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.valueSignature.isDefined) {
+        val __value = sealedValue.valueSignature.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      sealedValue.classSignature.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(1, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.methodSignature.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(2, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.typeSignature.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(3, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.valueSignature.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(4, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+    }
+    def getClassSignature: dotty.tools.dotc.semanticdb.ClassSignature = sealedValue.classSignature.getOrElse(dotty.tools.dotc.semanticdb.ClassSignature.defaultInstance)
+    def withClassSignature(__v: dotty.tools.dotc.semanticdb.ClassSignature): SignatureMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.ClassSignature(__v))
+    def getMethodSignature: dotty.tools.dotc.semanticdb.MethodSignature = sealedValue.methodSignature.getOrElse(dotty.tools.dotc.semanticdb.MethodSignature.defaultInstance)
+    def withMethodSignature(__v: dotty.tools.dotc.semanticdb.MethodSignature): SignatureMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.MethodSignature(__v))
+    def getTypeSignature: dotty.tools.dotc.semanticdb.TypeSignature = sealedValue.typeSignature.getOrElse(dotty.tools.dotc.semanticdb.TypeSignature.defaultInstance)
+    def withTypeSignature(__v: dotty.tools.dotc.semanticdb.TypeSignature): SignatureMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.TypeSignature(__v))
+    def getValueSignature: dotty.tools.dotc.semanticdb.ValueSignature = sealedValue.valueSignature.getOrElse(dotty.tools.dotc.semanticdb.ValueSignature.defaultInstance)
+    def withValueSignature(__v: dotty.tools.dotc.semanticdb.ValueSignature): SignatureMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.ValueSignature(__v))
+    def clearSealedValue: SignatureMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.Empty)
+    def withSealedValue(__v: dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue): SignatureMessage = copy(sealedValue = __v)
+    
+    
+    
+    
+    def toSignature: dotty.tools.dotc.semanticdb.Signature = dotty.tools.dotc.semanticdb.Signature.SignatureTypeMapper.toCustom(this)
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Signature])
+}
+
+object SignatureMessage  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SignatureMessage] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SignatureMessage] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.SignatureMessage = {
+    var __sealedValue: dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue = dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.Empty
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.ClassSignature(__sealedValue.classSignature.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.ClassSignature](_input__))(LiteParser.readMessage(_input__, _)))
+        case 18 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.MethodSignature(__sealedValue.methodSignature.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.MethodSignature](_input__))(LiteParser.readMessage(_input__, _)))
+        case 26 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.TypeSignature(__sealedValue.typeSignature.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeSignature](_input__))(LiteParser.readMessage(_input__, _)))
+        case 34 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.ValueSignature(__sealedValue.valueSignature.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.ValueSignature](_input__))(LiteParser.readMessage(_input__, _)))
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.SignatureMessage(
+        sealedValue = __sealedValue
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.SignatureMessage(
+    sealedValue = dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.Empty
+  )
+  sealed trait SealedValue  extends SemanticdbGeneratedOneof  derives CanEqual {
+    def isEmpty: _root_.scala.Boolean = false
+    def isDefined: _root_.scala.Boolean = true
+    def isClassSignature: _root_.scala.Boolean = false
+    def isMethodSignature: _root_.scala.Boolean = false
+    def isTypeSignature: _root_.scala.Boolean = false
+    def isValueSignature: _root_.scala.Boolean = false
+    def classSignature: _root_.scala.Option[dotty.tools.dotc.semanticdb.ClassSignature] = _root_.scala.None
+    def methodSignature: _root_.scala.Option[dotty.tools.dotc.semanticdb.MethodSignature] = _root_.scala.None
+    def typeSignature: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeSignature] = _root_.scala.None
+    def valueSignature: _root_.scala.Option[dotty.tools.dotc.semanticdb.ValueSignature] = _root_.scala.None
+  }
+  object SealedValue {
+    @SerialVersionUID(0L)
+    case object Empty extends dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue {
+      type ValueType = _root_.scala.Nothing
+      override def isEmpty: _root_.scala.Boolean = true
+      override def isDefined: _root_.scala.Boolean = false
+      override def number: _root_.scala.Int = 0
+      override def value: _root_.scala.Nothing = throw new java.util.NoSuchElementException("Empty.value")
+    }
+  
+    @SerialVersionUID(0L)
+    final case class ClassSignature(value: dotty.tools.dotc.semanticdb.ClassSignature) extends dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.ClassSignature
+      override def isClassSignature: _root_.scala.Boolean = true
+      override def classSignature: _root_.scala.Option[dotty.tools.dotc.semanticdb.ClassSignature] = Some(value)
+      override def number: _root_.scala.Int = 1
+    }
+    @SerialVersionUID(0L)
+    final case class MethodSignature(value: dotty.tools.dotc.semanticdb.MethodSignature) extends dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.MethodSignature
+      override def isMethodSignature: _root_.scala.Boolean = true
+      override def methodSignature: _root_.scala.Option[dotty.tools.dotc.semanticdb.MethodSignature] = Some(value)
+      override def number: _root_.scala.Int = 2
+    }
+    @SerialVersionUID(0L)
+    final case class TypeSignature(value: dotty.tools.dotc.semanticdb.TypeSignature) extends dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.TypeSignature
+      override def isTypeSignature: _root_.scala.Boolean = true
+      override def typeSignature: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeSignature] = Some(value)
+      override def number: _root_.scala.Int = 3
+    }
+    @SerialVersionUID(0L)
+    final case class ValueSignature(value: dotty.tools.dotc.semanticdb.ValueSignature) extends dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.ValueSignature
+      override def isValueSignature: _root_.scala.Boolean = true
+      override def valueSignature: _root_.scala.Option[dotty.tools.dotc.semanticdb.ValueSignature] = Some(value)
+      override def number: _root_.scala.Int = 4
+    }
+  }
+  final val CLASS_SIGNATURE_FIELD_NUMBER = 1
+  final val METHOD_SIGNATURE_FIELD_NUMBER = 2
+  final val TYPE_SIGNATURE_FIELD_NUMBER = 3
+  final val VALUE_SIGNATURE_FIELD_NUMBER = 4
+  def of(
+    sealedValue: dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue
+  ): _root_.dotty.tools.dotc.semanticdb.SignatureMessage = _root_.dotty.tools.dotc.semanticdb.SignatureMessage(
+    sealedValue
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Signature])
+}
+
+@SerialVersionUID(0L)
+final case class ClassSignature(
+    typeParameters: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None,
+    parents: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type] = _root_.scala.Seq.empty,
+    self: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.ClassSignature._typemapper_self.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance),
+    declarations: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None
+    )  extends dotty.tools.dotc.semanticdb.Signature.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      if (typeParameters.isDefined) {
+        val __value = typeParameters.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      parents.foreach { __item =>
+        val __value = dotty.tools.dotc.semanticdb.ClassSignature._typemapper_parents.toBase(__item)
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      }
+      
+      {
+        val __value = dotty.tools.dotc.semanticdb.ClassSignature._typemapper_self.toBase(self)
+        if (__value != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) {
+          __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+        }
+      };
+      if (declarations.isDefined) {
+        val __value = declarations.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      typeParameters.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(1, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      parents.foreach { __v =>
+        val __m = dotty.tools.dotc.semanticdb.ClassSignature._typemapper_parents.toBase(__v)
+        _output__.writeTag(2, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      {
+        val __v = dotty.tools.dotc.semanticdb.ClassSignature._typemapper_self.toBase(self)
+        if (__v != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) {
+          _output__.writeTag(3, 2)
+          _output__.writeUInt32NoTag(__v.serializedSize)
+          __v.writeTo(_output__)
+        }
+      };
+      declarations.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(4, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+    }
+    def getTypeParameters: dotty.tools.dotc.semanticdb.Scope = typeParameters.getOrElse(dotty.tools.dotc.semanticdb.Scope.defaultInstance)
+    def clearTypeParameters: ClassSignature = copy(typeParameters = _root_.scala.None)
+    def withTypeParameters(__v: dotty.tools.dotc.semanticdb.Scope): ClassSignature = copy(typeParameters = Option(__v))
+    def clearParents = copy(parents = _root_.scala.Seq.empty)
+    def addParents(__vs: dotty.tools.dotc.semanticdb.Type*): ClassSignature = addAllParents(__vs)
+    def addAllParents(__vs: Iterable[dotty.tools.dotc.semanticdb.Type]): ClassSignature = copy(parents = parents ++ __vs)
+    def withParents(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type]): ClassSignature = copy(parents = __v)
+    def withSelf(__v: dotty.tools.dotc.semanticdb.Type): ClassSignature = copy(self = __v)
+    def getDeclarations: dotty.tools.dotc.semanticdb.Scope = declarations.getOrElse(dotty.tools.dotc.semanticdb.Scope.defaultInstance)
+    def clearDeclarations: ClassSignature = copy(declarations = _root_.scala.None)
+    def withDeclarations(__v: dotty.tools.dotc.semanticdb.Scope): ClassSignature = copy(declarations = Option(__v))
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ClassSignature])
+}
+
+object ClassSignature  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ClassSignature] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ClassSignature] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ClassSignature = {
+    var __typeParameters: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None
+    val __parents: _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Type] = new _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Type]
+    var __self: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None
+    var __declarations: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __typeParameters = Option(__typeParameters.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.Scope](_input__))(LiteParser.readMessage(_input__, _)))
+        case 18 =>
+          __parents += dotty.tools.dotc.semanticdb.ClassSignature._typemapper_parents.toCustom(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))
+        case 26 =>
+          __self = _root_.scala.Some(__self.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _)))
+        case 34 =>
+          __declarations = Option(__declarations.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.Scope](_input__))(LiteParser.readMessage(_input__, _)))
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.ClassSignature(
+        typeParameters = __typeParameters,
+        parents = __parents.result(),
+        self = dotty.tools.dotc.semanticdb.ClassSignature._typemapper_self.toCustom(__self.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)),
+        declarations = __declarations
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.ClassSignature(
+    typeParameters = _root_.scala.None,
+    parents = _root_.scala.Seq.empty,
+    self = dotty.tools.dotc.semanticdb.ClassSignature._typemapper_self.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance),
+    declarations = _root_.scala.None
+  )
+  final val TYPE_PARAMETERS_FIELD_NUMBER = 1
+  final val PARENTS_FIELD_NUMBER = 2
+  final val SELF_FIELD_NUMBER = 3
+  final val DECLARATIONS_FIELD_NUMBER = 4
+  @transient @sharable
+  private[semanticdb] val _typemapper_parents: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type] = implicitly[SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type]]
+  @transient @sharable
+  private[semanticdb] val _typemapper_self: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type] = implicitly[SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type]]
+  def of(
+    typeParameters: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope],
+    parents: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type],
+    self: dotty.tools.dotc.semanticdb.Type,
+    declarations: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope]
+  ): _root_.dotty.tools.dotc.semanticdb.ClassSignature = _root_.dotty.tools.dotc.semanticdb.ClassSignature(
+    typeParameters,
+    parents,
+    self,
+    declarations
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ClassSignature])
+}
+
+@SerialVersionUID(0L)
+final case class MethodSignature(
+    typeParameters: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None,
+    parameterLists: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.Seq.empty,
+    returnType: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.MethodSignature._typemapper_returnType.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)
+    )  extends dotty.tools.dotc.semanticdb.Signature.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      if (typeParameters.isDefined) {
+        val __value = typeParameters.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      parameterLists.foreach { __item =>
+        val __value = __item
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      }
+      
+      {
+        val __value = dotty.tools.dotc.semanticdb.MethodSignature._typemapper_returnType.toBase(returnType)
+        if (__value != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) {
+          __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+        }
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      typeParameters.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(1, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      parameterLists.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(2, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      {
+        val __v = dotty.tools.dotc.semanticdb.MethodSignature._typemapper_returnType.toBase(returnType)
+        if (__v != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) {
+          _output__.writeTag(3, 2)
+          _output__.writeUInt32NoTag(__v.serializedSize)
+          __v.writeTo(_output__)
+        }
+      };
+    }
+    def getTypeParameters: dotty.tools.dotc.semanticdb.Scope = typeParameters.getOrElse(dotty.tools.dotc.semanticdb.Scope.defaultInstance)
+    def clearTypeParameters: MethodSignature = copy(typeParameters = _root_.scala.None)
+    def withTypeParameters(__v: dotty.tools.dotc.semanticdb.Scope): MethodSignature = copy(typeParameters = Option(__v))
+    def clearParameterLists = copy(parameterLists = _root_.scala.Seq.empty)
+    def addParameterLists(__vs: dotty.tools.dotc.semanticdb.Scope*): MethodSignature = addAllParameterLists(__vs)
+    def addAllParameterLists(__vs: Iterable[dotty.tools.dotc.semanticdb.Scope]): MethodSignature = copy(parameterLists = parameterLists ++ __vs)
+    def withParameterLists(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Scope]): MethodSignature = copy(parameterLists = __v)
+    def withReturnType(__v: dotty.tools.dotc.semanticdb.Type): MethodSignature = copy(returnType = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.MethodSignature])
+}
+
+object MethodSignature  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.MethodSignature] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.MethodSignature] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.MethodSignature = {
+    var __typeParameters: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None
+    val __parameterLists: _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Scope] = new _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Scope]
+    var __returnType: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __typeParameters = Option(__typeParameters.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.Scope](_input__))(LiteParser.readMessage(_input__, _)))
+        case 18 =>
+          __parameterLists += LiteParser.readMessage[dotty.tools.dotc.semanticdb.Scope](_input__)
+        case 26 =>
+          __returnType = _root_.scala.Some(__returnType.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _)))
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.MethodSignature(
+        typeParameters = __typeParameters,
+        parameterLists = __parameterLists.result(),
+        returnType = dotty.tools.dotc.semanticdb.MethodSignature._typemapper_returnType.toCustom(__returnType.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance))
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.MethodSignature(
+    typeParameters = _root_.scala.None,
+    parameterLists = _root_.scala.Seq.empty,
+    returnType = dotty.tools.dotc.semanticdb.MethodSignature._typemapper_returnType.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)
+  )
+  final val TYPE_PARAMETERS_FIELD_NUMBER = 1
+  final val PARAMETER_LISTS_FIELD_NUMBER = 2
+  final val RETURN_TYPE_FIELD_NUMBER = 3
+  @transient @sharable
+  private[semanticdb] val _typemapper_returnType: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type] = implicitly[SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type]]
+  def of(
+    typeParameters: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope],
+    parameterLists: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Scope],
+    returnType: dotty.tools.dotc.semanticdb.Type
+  ): _root_.dotty.tools.dotc.semanticdb.MethodSignature = _root_.dotty.tools.dotc.semanticdb.MethodSignature(
+    typeParameters,
+    parameterLists,
+    returnType
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.MethodSignature])
+}
+
+@SerialVersionUID(0L)
+final case class TypeSignature(
+    typeParameters: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None,
+    lowerBound: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_lowerBound.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance),
+    upperBound: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_upperBound.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)
+    )  extends dotty.tools.dotc.semanticdb.Signature.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      if (typeParameters.isDefined) {
+        val __value = typeParameters.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      
+      {
+        val __value = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_lowerBound.toBase(lowerBound)
+        if (__value != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) {
+          __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+        }
+      };
+      
+      {
+        val __value = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_upperBound.toBase(upperBound)
+        if (__value != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) {
+          __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+        }
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      typeParameters.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(1, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      {
+        val __v = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_lowerBound.toBase(lowerBound)
+        if (__v != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) {
+          _output__.writeTag(2, 2)
+          _output__.writeUInt32NoTag(__v.serializedSize)
+          __v.writeTo(_output__)
+        }
+      };
+      {
+        val __v = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_upperBound.toBase(upperBound)
+        if (__v != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) {
+          _output__.writeTag(3, 2)
+          _output__.writeUInt32NoTag(__v.serializedSize)
+          __v.writeTo(_output__)
+        }
+      };
+    }
+    def getTypeParameters: dotty.tools.dotc.semanticdb.Scope = typeParameters.getOrElse(dotty.tools.dotc.semanticdb.Scope.defaultInstance)
+    def clearTypeParameters: TypeSignature = copy(typeParameters = _root_.scala.None)
+    def withTypeParameters(__v: dotty.tools.dotc.semanticdb.Scope): TypeSignature = copy(typeParameters = Option(__v))
+    def withLowerBound(__v: dotty.tools.dotc.semanticdb.Type): TypeSignature = copy(lowerBound = __v)
+    def withUpperBound(__v: dotty.tools.dotc.semanticdb.Type): TypeSignature = copy(upperBound = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.TypeSignature])
+}
+
+object TypeSignature  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TypeSignature] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TypeSignature] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.TypeSignature = {
+    var __typeParameters: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None
+    var __lowerBound: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None
+    var __upperBound: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __typeParameters = Option(__typeParameters.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.Scope](_input__))(LiteParser.readMessage(_input__, _)))
+        case 18 =>
+          __lowerBound = _root_.scala.Some(__lowerBound.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _)))
+        case 26 =>
+          __upperBound = _root_.scala.Some(__upperBound.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _)))
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.TypeSignature(
+        typeParameters = __typeParameters,
+        lowerBound = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_lowerBound.toCustom(__lowerBound.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)),
+        upperBound = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_upperBound.toCustom(__upperBound.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance))
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.TypeSignature(
+    typeParameters = _root_.scala.None,
+    lowerBound = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_lowerBound.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance),
+    upperBound = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_upperBound.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)
+  )
+  final val TYPE_PARAMETERS_FIELD_NUMBER = 1
+  final val LOWER_BOUND_FIELD_NUMBER = 2
+  final val UPPER_BOUND_FIELD_NUMBER = 3
+  @transient @sharable
+  private[semanticdb] val _typemapper_lowerBound: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type] = implicitly[SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type]]
+  @transient @sharable
+  private[semanticdb] val _typemapper_upperBound: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type] = implicitly[SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type]]
+  def of(
+    typeParameters: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope],
+    lowerBound: dotty.tools.dotc.semanticdb.Type,
+    upperBound: dotty.tools.dotc.semanticdb.Type
+  ): _root_.dotty.tools.dotc.semanticdb.TypeSignature = _root_.dotty.tools.dotc.semanticdb.TypeSignature(
+    typeParameters,
+    lowerBound,
+    upperBound
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TypeSignature])
+}
+
+@SerialVersionUID(0L)
+final case class ValueSignature(
+    tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.ValueSignature._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)
+    )  extends dotty.tools.dotc.semanticdb.Signature.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      
+      {
+        val __value = dotty.tools.dotc.semanticdb.ValueSignature._typemapper_tpe.toBase(tpe)
+        if (__value != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) {
+          __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+        }
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      {
+        val __v = dotty.tools.dotc.semanticdb.ValueSignature._typemapper_tpe.toBase(tpe)
+        if (__v != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) {
+          _output__.writeTag(1, 2)
+          _output__.writeUInt32NoTag(__v.serializedSize)
+          __v.writeTo(_output__)
+        }
+      };
+    }
+    def withTpe(__v: dotty.tools.dotc.semanticdb.Type): ValueSignature = copy(tpe = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ValueSignature])
+}
+
+object ValueSignature  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ValueSignature] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ValueSignature] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ValueSignature = {
+    var __tpe: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __tpe = _root_.scala.Some(__tpe.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _)))
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.ValueSignature(
+        tpe = dotty.tools.dotc.semanticdb.ValueSignature._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance))
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.ValueSignature(
+    tpe = dotty.tools.dotc.semanticdb.ValueSignature._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)
+  )
+  final val TPE_FIELD_NUMBER = 1
+  @transient @sharable
+  private[semanticdb] val _typemapper_tpe: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type] = implicitly[SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type]]
+  def of(
+    tpe: dotty.tools.dotc.semanticdb.Type
+  ): _root_.dotty.tools.dotc.semanticdb.ValueSignature = _root_.dotty.tools.dotc.semanticdb.ValueSignature(
+    tpe
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ValueSignature])
+}
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/SymbolInformation.scala b/compiler/src/dotty/tools/dotc/semanticdb/SymbolInformation.scala
index b2508732232a..f6d060563be4 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/SymbolInformation.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/SymbolInformation.scala
@@ -1,179 +1,94 @@
-package dotty.tools.dotc.semanticdb
+// Generated by https://github.com/tanishiking/semanticdb-for-scala3
+// Generated by the Scala Plugin for the Protocol Buffer Compiler.
+// Do not edit!
+//
+// Protofile syntax: PROTO3
 
+package dotty.tools.dotc.semanticdb
 import dotty.tools.dotc.semanticdb.internal._
 import scala.annotation.internal.sharable
 
-object SymbolInformation {
-
-  val defaultInstance = SymbolInformation("", Language.UNKNOWN_LANGUAGE, SymbolInformation.Kind.UNKNOWN_KIND, 0, "")
-
-  sealed trait Kind(val value: Int) extends SemanticdbEnum derives CanEqual {
-    def isUnknownKind: Boolean = this == Kind.UNKNOWN_KIND
-    def isLocal: Boolean = this == Kind.LOCAL
-    def isField: Boolean = this == Kind.FIELD
-    def isMethod: Boolean = this == Kind.METHOD
-    def isConstructor: Boolean = this == Kind.CONSTRUCTOR
-    def isMacro: Boolean = this == Kind.MACRO
-    def isType: Boolean = this == Kind.TYPE
-    def isParameter: Boolean = this == Kind.PARAMETER
-    def isSelfParameter: Boolean = this == Kind.SELF_PARAMETER
-    def isTypeParameter: Boolean = this == Kind.TYPE_PARAMETER
-    def isObject: Boolean = this == Kind.OBJECT
-    def isPackage: Boolean = this == Kind.PACKAGE
-    def isPackageObject: Boolean = this == Kind.PACKAGE_OBJECT
-    def isClass: Boolean = this == Kind.CLASS
-    def isTrait: Boolean = this == Kind.TRAIT
-    def isInterface: Boolean = this == Kind.INTERFACE
-  }
-
-  object Kind {
-
-    case object UNKNOWN_KIND extends Kind(0)
-    case object METHOD extends Kind(3)
-    case object MACRO extends Kind(6)
-    case object TYPE extends Kind(7)
-    case object PARAMETER extends Kind(8)
-    case object TYPE_PARAMETER extends Kind(9)
-    case object OBJECT extends Kind(10)
-    case object PACKAGE extends Kind(11)
-    case object PACKAGE_OBJECT extends Kind(12)
-    case object CLASS extends Kind(13)
-    case object TRAIT extends Kind(14)
-    case object SELF_PARAMETER extends Kind(17)
-    case object INTERFACE extends Kind(18)
-    case object LOCAL extends Kind(19)
-    case object FIELD extends Kind(20)
-    case object CONSTRUCTOR extends Kind(21)
-    final case class Unrecognized(id: Int) extends Kind(id)
-
-    def fromValue(value: Int): Kind = value match {
-      case 0 => UNKNOWN_KIND
-      case 3 => METHOD
-      case 6 => MACRO
-      case 7 => TYPE
-      case 8 => PARAMETER
-      case 9 => TYPE_PARAMETER
-      case 10 => OBJECT
-      case 11 => PACKAGE
-      case 12 => PACKAGE_OBJECT
-      case 13 => CLASS
-      case 14 => TRAIT
-      case 17 => SELF_PARAMETER
-      case 18 => INTERFACE
-      case 19 => LOCAL
-      case 20 => FIELD
-      case 21 => CONSTRUCTOR
-      case id => Unrecognized(id)
-    }
-  }
-
-  sealed trait Property(val value: Int) extends SemanticdbEnum derives CanEqual {
-    def isUnknownProperty: Boolean = this == Property.UNKNOWN_PROPERTY
-    def isAbstract: Boolean = this == Property.ABSTRACT
-    def isFinal: Boolean = this == Property.FINAL
-    def isSealed: Boolean = this == Property.SEALED
-    def isImplicit: Boolean = this == Property.IMPLICIT
-    def isLazy: Boolean = this == Property.LAZY
-    def isCase: Boolean = this == Property.CASE
-    def isCovariant: Boolean = this == Property.CONTRAVARIANT
-    def isContravariant: Boolean = this == Property.CONTRAVARIANT
-    def isVal: Boolean = this == Property.VAL
-    def isVar: Boolean = this == Property.VAR
-    def isStatic: Boolean = this == Property.STATIC
-    def isPrimary: Boolean = this == Property.PRIMARY
-    def isEnum: Boolean = this == Property.ENUM
-    def isDefault: Boolean = this == Property.DEFAULT
-  }
-
-  object Property {
-
-    case object UNKNOWN_PROPERTY extends Property(0)
-    case object ABSTRACT extends Property(4)
-    case object FINAL extends Property(8)
-    case object SEALED extends Property(16)
-    case object IMPLICIT extends Property(32)
-    case object LAZY extends Property(64)
-    case object CASE extends Property(128)
-    case object COVARIANT extends Property(256)
-    case object CONTRAVARIANT extends Property(512)
-    case object VAL extends Property(1024)
-    case object VAR extends Property(2048)
-    case object STATIC extends Property(4096)
-    case object PRIMARY extends Property(8192)
-    case object ENUM extends Property(16384)
-    case object DEFAULT extends Property(32768)
-    final case class Unrecognized(id: Int) extends Property(id)
-
-    def fromValue(value: Int): Property = value match {
-      case 0x0 => UNKNOWN_PROPERTY
-      case 0x4 => ABSTRACT
-      case 0x8 => FINAL
-      case 0x10 => SEALED
-      case 0x20 => IMPLICIT
-      case 0x40 => LAZY
-      case 0x80 => CASE
-      case 0x100 => COVARIANT
-      case 0x200 => CONTRAVARIANT
-      case 0x400 => VAL
-      case 0x800 => VAR
-      case 0x1000 => STATIC
-      case 0x2000 => PRIMARY
-      case 0x4000 => ENUM
-      case 0x8000 => DEFAULT
-      case id => Unrecognized(id)
-    }
-  }
-}
-
+@SerialVersionUID(0L)
 final case class SymbolInformation(
-  symbol: String,
-  language: Language,
-  kind: SymbolInformation.Kind,
-  properties: Int,
-  displayName: String
-) extends SemanticdbMessage[SymbolInformation] derives CanEqual {
-    @sharable
-    private var __serializedSizeCachedValue: Int = 0
-    private def __computeSerializedValue(): Int = {
+    symbol: _root_.scala.Predef.String = "",
+    language: dotty.tools.dotc.semanticdb.Language = dotty.tools.dotc.semanticdb.Language.UNKNOWN_LANGUAGE,
+    kind: dotty.tools.dotc.semanticdb.SymbolInformation.Kind = dotty.tools.dotc.semanticdb.SymbolInformation.Kind.UNKNOWN_KIND,
+    properties: _root_.scala.Int = 0,
+    displayName: _root_.scala.Predef.String = "",
+    signature: dotty.tools.dotc.semanticdb.Signature = dotty.tools.dotc.semanticdb.SymbolInformation._typemapper_signature.toCustom(dotty.tools.dotc.semanticdb.SignatureMessage.defaultInstance),
+    annotations: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Annotation] = _root_.scala.Seq.empty,
+    access: dotty.tools.dotc.semanticdb.Access = dotty.tools.dotc.semanticdb.SymbolInformation._typemapper_access.toCustom(dotty.tools.dotc.semanticdb.AccessMessage.defaultInstance),
+    overriddenSymbols: _root_.scala.Seq[_root_.scala.Predef.String] = _root_.scala.Seq.empty,
+    documentation: _root_.scala.Option[dotty.tools.dotc.semanticdb.Documentation] = _root_.scala.None
+    )  extends SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
       var __size = 0
-
+      
       {
         val __value = symbol
-        if (__value != "") {
+        if (!__value.isEmpty) {
           __size += SemanticdbOutputStream.computeStringSize(1, __value)
         }
       };
-
+      
       {
-        val __value = language
-        if (__value != Language.UNKNOWN_LANGUAGE) {
-          __size += SemanticdbOutputStream.computeEnumSize(16, __value.value)
+        val __value = language.value
+        if (__value != 0) {
+          __size += SemanticdbOutputStream.computeEnumSize(16, __value)
         }
       };
-
+      
       {
-        val __value = kind
-        if (__value != SymbolInformation.Kind.UNKNOWN_KIND) {
-          __size += SemanticdbOutputStream.computeEnumSize(3, __value.value)
+        val __value = kind.value
+        if (__value != 0) {
+          __size += SemanticdbOutputStream.computeEnumSize(3, __value)
         }
       };
-
+      
       {
         val __value = properties
         if (__value != 0) {
           __size += SemanticdbOutputStream.computeInt32Size(4, __value)
         }
       };
-
+      
       {
         val __value = displayName
-        if (__value != "") {
+        if (!__value.isEmpty) {
           __size += SemanticdbOutputStream.computeStringSize(5, __value)
         }
       };
+      
+      {
+        val __value = dotty.tools.dotc.semanticdb.SymbolInformation._typemapper_signature.toBase(signature)
+        if (__value != dotty.tools.dotc.semanticdb.SignatureMessage.defaultInstance) {
+          __size += 2 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+        }
+      };
+      annotations.foreach { __item =>
+        val __value = __item
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      }
+      
+      {
+        val __value = dotty.tools.dotc.semanticdb.SymbolInformation._typemapper_access.toBase(access)
+        if (__value != dotty.tools.dotc.semanticdb.AccessMessage.defaultInstance) {
+          __size += 2 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+        }
+      };
+      overriddenSymbols.foreach { __item =>
+        val __value = __item
+        __size += SemanticdbOutputStream.computeStringSize(19, __value)
+      }
+      if (documentation.isDefined) {
+        val __value = documentation.get
+        __size += 2 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
       __size
     }
-    final override def serializedSize: Int = {
+    override def serializedSize: _root_.scala.Int = {
       var read = __serializedSizeCachedValue
       if (read == 0) {
         read = __computeSerializedValue()
@@ -181,17 +96,17 @@ final case class SymbolInformation(
       }
       read
     }
-    def writeTo(`_output__`: SemanticdbOutputStream): Unit = {
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
       {
         val __v = symbol
-        if (__v != "") {
+        if (!__v.isEmpty) {
           _output__.writeString(1, __v)
         }
       };
       {
-        val __v = kind
-        if (__v != SymbolInformation.Kind.UNKNOWN_KIND) {
-          _output__.writeEnum(3, __v.value)
+        val __v = kind.value
+        if (__v != 0) {
+          _output__.writeEnum(3, __v)
         }
       };
       {
@@ -202,45 +117,553 @@ final case class SymbolInformation(
       };
       {
         val __v = displayName
-        if (__v != "") {
+        if (!__v.isEmpty) {
           _output__.writeString(5, __v)
         }
       };
+      annotations.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(13, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
       {
-        val __v = language
-        if (__v != Language.UNKNOWN_LANGUAGE) {
-          _output__.writeEnum(16, __v.value)
+        val __v = language.value
+        if (__v != 0) {
+          _output__.writeEnum(16, __v)
         }
       };
-    }
-    def mergeFrom(`_input__`: SemanticdbInputStream): SymbolInformation = {
-      var __symbol = this.symbol
-      var __language = this.language
-      var __kind = this.kind
-      var __properties = this.properties
-      var __displayName = this.displayName
-      var _done__ = false
-      while (!_done__) {
-        val _tag__ = _input__.readTag()
-        _tag__ match {
-          case 0 => _done__ = true
-          case 10 =>
-            __symbol = _input__.readString()
-          case 24 =>
-            __kind = SymbolInformation.Kind.fromValue(_input__.readEnum())
-          case 32 =>
-            __properties = _input__.readInt32()
-          case 42 =>
-            __displayName = _input__.readString()
-          case tag => _input__.skipField(tag)
+      {
+        val __v = dotty.tools.dotc.semanticdb.SymbolInformation._typemapper_signature.toBase(signature)
+        if (__v != dotty.tools.dotc.semanticdb.SignatureMessage.defaultInstance) {
+          _output__.writeTag(17, 2)
+          _output__.writeUInt32NoTag(__v.serializedSize)
+          __v.writeTo(_output__)
         }
+      };
+      {
+        val __v = dotty.tools.dotc.semanticdb.SymbolInformation._typemapper_access.toBase(access)
+        if (__v != dotty.tools.dotc.semanticdb.AccessMessage.defaultInstance) {
+          _output__.writeTag(18, 2)
+          _output__.writeUInt32NoTag(__v.serializedSize)
+          __v.writeTo(_output__)
+        }
+      };
+      overriddenSymbols.foreach { __v =>
+        val __m = __v
+        _output__.writeString(19, __m)
+      };
+      documentation.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(20, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+    }
+    def withSymbol(__v: _root_.scala.Predef.String): SymbolInformation = copy(symbol = __v)
+    def withLanguage(__v: dotty.tools.dotc.semanticdb.Language): SymbolInformation = copy(language = __v)
+    def withKind(__v: dotty.tools.dotc.semanticdb.SymbolInformation.Kind): SymbolInformation = copy(kind = __v)
+    def withProperties(__v: _root_.scala.Int): SymbolInformation = copy(properties = __v)
+    def withDisplayName(__v: _root_.scala.Predef.String): SymbolInformation = copy(displayName = __v)
+    def withSignature(__v: dotty.tools.dotc.semanticdb.Signature): SymbolInformation = copy(signature = __v)
+    def clearAnnotations = copy(annotations = _root_.scala.Seq.empty)
+    def addAnnotations(__vs: dotty.tools.dotc.semanticdb.Annotation*): SymbolInformation = addAllAnnotations(__vs)
+    def addAllAnnotations(__vs: Iterable[dotty.tools.dotc.semanticdb.Annotation]): SymbolInformation = copy(annotations = annotations ++ __vs)
+    def withAnnotations(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Annotation]): SymbolInformation = copy(annotations = __v)
+    def withAccess(__v: dotty.tools.dotc.semanticdb.Access): SymbolInformation = copy(access = __v)
+    def clearOverriddenSymbols = copy(overriddenSymbols = _root_.scala.Seq.empty)
+    def addOverriddenSymbols(__vs: _root_.scala.Predef.String*): SymbolInformation = addAllOverriddenSymbols(__vs)
+    def addAllOverriddenSymbols(__vs: Iterable[_root_.scala.Predef.String]): SymbolInformation = copy(overriddenSymbols = overriddenSymbols ++ __vs)
+    def withOverriddenSymbols(__v: _root_.scala.Seq[_root_.scala.Predef.String]): SymbolInformation = copy(overriddenSymbols = __v)
+    def getDocumentation: dotty.tools.dotc.semanticdb.Documentation = documentation.getOrElse(dotty.tools.dotc.semanticdb.Documentation.defaultInstance)
+    def clearDocumentation: SymbolInformation = copy(documentation = _root_.scala.None)
+    def withDocumentation(__v: dotty.tools.dotc.semanticdb.Documentation): SymbolInformation = copy(documentation = Option(__v))
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.SymbolInformation])
+}
+
+object SymbolInformation  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SymbolInformation] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SymbolInformation] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.SymbolInformation = {
+    var __symbol: _root_.scala.Predef.String = ""
+    var __language: dotty.tools.dotc.semanticdb.Language = dotty.tools.dotc.semanticdb.Language.UNKNOWN_LANGUAGE
+    var __kind: dotty.tools.dotc.semanticdb.SymbolInformation.Kind = dotty.tools.dotc.semanticdb.SymbolInformation.Kind.UNKNOWN_KIND
+    var __properties: _root_.scala.Int = 0
+    var __displayName: _root_.scala.Predef.String = ""
+    var __signature: _root_.scala.Option[dotty.tools.dotc.semanticdb.SignatureMessage] = _root_.scala.None
+    val __annotations: _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Annotation] = new _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Annotation]
+    var __access: _root_.scala.Option[dotty.tools.dotc.semanticdb.AccessMessage] = _root_.scala.None
+    val __overriddenSymbols: _root_.scala.collection.immutable.VectorBuilder[_root_.scala.Predef.String] = new _root_.scala.collection.immutable.VectorBuilder[_root_.scala.Predef.String]
+    var __documentation: _root_.scala.Option[dotty.tools.dotc.semanticdb.Documentation] = _root_.scala.None
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __symbol = _input__.readStringRequireUtf8()
+        case 128 =>
+          __language = dotty.tools.dotc.semanticdb.Language.fromValue(_input__.readEnum())
+        case 24 =>
+          __kind = dotty.tools.dotc.semanticdb.SymbolInformation.Kind.fromValue(_input__.readEnum())
+        case 32 =>
+          __properties = _input__.readInt32()
+        case 42 =>
+          __displayName = _input__.readStringRequireUtf8()
+        case 138 =>
+          __signature = _root_.scala.Some(__signature.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.SignatureMessage](_input__))(LiteParser.readMessage(_input__, _)))
+        case 106 =>
+          __annotations += LiteParser.readMessage[dotty.tools.dotc.semanticdb.Annotation](_input__)
+        case 146 =>
+          __access = _root_.scala.Some(__access.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.AccessMessage](_input__))(LiteParser.readMessage(_input__, _)))
+        case 154 =>
+          __overriddenSymbols += _input__.readStringRequireUtf8()
+        case 162 =>
+          __documentation = Option(__documentation.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.Documentation](_input__))(LiteParser.readMessage(_input__, _)))
+        case tag => _input__.skipField(tag)
       }
-      SymbolInformation(
+    }
+    dotty.tools.dotc.semanticdb.SymbolInformation(
         symbol = __symbol,
         language = __language,
         kind = __kind,
         properties = __properties,
-        displayName = __displayName
-      )
+        displayName = __displayName,
+        signature = dotty.tools.dotc.semanticdb.SymbolInformation._typemapper_signature.toCustom(__signature.getOrElse(dotty.tools.dotc.semanticdb.SignatureMessage.defaultInstance)),
+        annotations = __annotations.result(),
+        access = dotty.tools.dotc.semanticdb.SymbolInformation._typemapper_access.toCustom(__access.getOrElse(dotty.tools.dotc.semanticdb.AccessMessage.defaultInstance)),
+        overriddenSymbols = __overriddenSymbols.result(),
+        documentation = __documentation
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.SymbolInformation(
+    symbol = "",
+    language = dotty.tools.dotc.semanticdb.Language.UNKNOWN_LANGUAGE,
+    kind = dotty.tools.dotc.semanticdb.SymbolInformation.Kind.UNKNOWN_KIND,
+    properties = 0,
+    displayName = "",
+    signature = dotty.tools.dotc.semanticdb.SymbolInformation._typemapper_signature.toCustom(dotty.tools.dotc.semanticdb.SignatureMessage.defaultInstance),
+    annotations = _root_.scala.Seq.empty,
+    access = dotty.tools.dotc.semanticdb.SymbolInformation._typemapper_access.toCustom(dotty.tools.dotc.semanticdb.AccessMessage.defaultInstance),
+    overriddenSymbols = _root_.scala.Seq.empty,
+    documentation = _root_.scala.None
+  )
+  sealed abstract class Kind(val value: _root_.scala.Int)  extends SemanticdbGeneratedEnum  derives CanEqual {
+    type EnumType = Kind
+    def isUnknownKind: _root_.scala.Boolean = false
+    def isLocal: _root_.scala.Boolean = false
+    def isField: _root_.scala.Boolean = false
+    def isMethod: _root_.scala.Boolean = false
+    def isConstructor: _root_.scala.Boolean = false
+    def isMacro: _root_.scala.Boolean = false
+    def isType: _root_.scala.Boolean = false
+    def isParameter: _root_.scala.Boolean = false
+    def isSelfParameter: _root_.scala.Boolean = false
+    def isTypeParameter: _root_.scala.Boolean = false
+    def isObject: _root_.scala.Boolean = false
+    def isPackage: _root_.scala.Boolean = false
+    def isPackageObject: _root_.scala.Boolean = false
+    def isClass: _root_.scala.Boolean = false
+    def isTrait: _root_.scala.Boolean = false
+    def isInterface: _root_.scala.Boolean = false
+    
+    final def asRecognized: _root_.scala.Option[dotty.tools.dotc.semanticdb.SymbolInformation.Kind.Recognized] = if (isUnrecognized) _root_.scala.None else _root_.scala.Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.SymbolInformation.Kind.Recognized])
+  }
+  
+  object Kind  {
+    sealed trait Recognized extends Kind
+    
+    @SerialVersionUID(0L)
+    case object UNKNOWN_KIND extends Kind(0) with Kind.Recognized {
+      val index = 0
+      val name = "UNKNOWN_KIND"
+      override def isUnknownKind: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object LOCAL extends Kind(19) with Kind.Recognized {
+      val index = 1
+      val name = "LOCAL"
+      override def isLocal: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object FIELD extends Kind(20) with Kind.Recognized {
+      val index = 2
+      val name = "FIELD"
+      override def isField: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object METHOD extends Kind(3) with Kind.Recognized {
+      val index = 3
+      val name = "METHOD"
+      override def isMethod: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object CONSTRUCTOR extends Kind(21) with Kind.Recognized {
+      val index = 4
+      val name = "CONSTRUCTOR"
+      override def isConstructor: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object MACRO extends Kind(6) with Kind.Recognized {
+      val index = 5
+      val name = "MACRO"
+      override def isMacro: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object TYPE extends Kind(7) with Kind.Recognized {
+      val index = 6
+      val name = "TYPE"
+      override def isType: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object PARAMETER extends Kind(8) with Kind.Recognized {
+      val index = 7
+      val name = "PARAMETER"
+      override def isParameter: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object SELF_PARAMETER extends Kind(17) with Kind.Recognized {
+      val index = 8
+      val name = "SELF_PARAMETER"
+      override def isSelfParameter: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object TYPE_PARAMETER extends Kind(9) with Kind.Recognized {
+      val index = 9
+      val name = "TYPE_PARAMETER"
+      override def isTypeParameter: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object OBJECT extends Kind(10) with Kind.Recognized {
+      val index = 10
+      val name = "OBJECT"
+      override def isObject: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object PACKAGE extends Kind(11) with Kind.Recognized {
+      val index = 11
+      val name = "PACKAGE"
+      override def isPackage: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object PACKAGE_OBJECT extends Kind(12) with Kind.Recognized {
+      val index = 12
+      val name = "PACKAGE_OBJECT"
+      override def isPackageObject: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object CLASS extends Kind(13) with Kind.Recognized {
+      val index = 13
+      val name = "CLASS"
+      override def isClass: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object TRAIT extends Kind(14) with Kind.Recognized {
+      val index = 14
+      val name = "TRAIT"
+      override def isTrait: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object INTERFACE extends Kind(18) with Kind.Recognized {
+      val index = 15
+      val name = "INTERFACE"
+      override def isInterface: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    final case class Unrecognized(unrecognizedValue: _root_.scala.Int)  extends Kind(unrecognizedValue) with SemanticdbUnrecognizedEnum
+    
+    lazy val values = scala.collection.immutable.Seq(UNKNOWN_KIND, LOCAL, FIELD, METHOD, CONSTRUCTOR, MACRO, TYPE, PARAMETER, SELF_PARAMETER, TYPE_PARAMETER, OBJECT, PACKAGE, PACKAGE_OBJECT, CLASS, TRAIT, INTERFACE)
+    def fromValue(__value: _root_.scala.Int): Kind = __value match {
+      case 0 => UNKNOWN_KIND
+      case 3 => METHOD
+      case 6 => MACRO
+      case 7 => TYPE
+      case 8 => PARAMETER
+      case 9 => TYPE_PARAMETER
+      case 10 => OBJECT
+      case 11 => PACKAGE
+      case 12 => PACKAGE_OBJECT
+      case 13 => CLASS
+      case 14 => TRAIT
+      case 17 => SELF_PARAMETER
+      case 18 => INTERFACE
+      case 19 => LOCAL
+      case 20 => FIELD
+      case 21 => CONSTRUCTOR
+      case __other => Unrecognized(__other)
+    }
+    
+    
+  }
+  sealed abstract class Property(val value: _root_.scala.Int)  extends SemanticdbGeneratedEnum  derives CanEqual {
+    type EnumType = Property
+    def isUnknownProperty: _root_.scala.Boolean = false
+    def isAbstract: _root_.scala.Boolean = false
+    def isFinal: _root_.scala.Boolean = false
+    def isSealed: _root_.scala.Boolean = false
+    def isImplicit: _root_.scala.Boolean = false
+    def isLazy: _root_.scala.Boolean = false
+    def isCase: _root_.scala.Boolean = false
+    def isCovariant: _root_.scala.Boolean = false
+    def isContravariant: _root_.scala.Boolean = false
+    def isVal: _root_.scala.Boolean = false
+    def isVar: _root_.scala.Boolean = false
+    def isStatic: _root_.scala.Boolean = false
+    def isPrimary: _root_.scala.Boolean = false
+    def isEnum: _root_.scala.Boolean = false
+    def isDefault: _root_.scala.Boolean = false
+    def isGiven: _root_.scala.Boolean = false
+    def isInline: _root_.scala.Boolean = false
+    def isOpen: _root_.scala.Boolean = false
+    def isTransparent: _root_.scala.Boolean = false
+    def isInfix: _root_.scala.Boolean = false
+    def isOpaque: _root_.scala.Boolean = false
+    
+    final def asRecognized: _root_.scala.Option[dotty.tools.dotc.semanticdb.SymbolInformation.Property.Recognized] = if (isUnrecognized) _root_.scala.None else _root_.scala.Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.SymbolInformation.Property.Recognized])
+  }
+  
+  object Property  {
+    sealed trait Recognized extends Property
+    
+    @SerialVersionUID(0L)
+    case object UNKNOWN_PROPERTY extends Property(0) with Property.Recognized {
+      val index = 0
+      val name = "UNKNOWN_PROPERTY"
+      override def isUnknownProperty: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object ABSTRACT extends Property(4) with Property.Recognized {
+      val index = 1
+      val name = "ABSTRACT"
+      override def isAbstract: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object FINAL extends Property(8) with Property.Recognized {
+      val index = 2
+      val name = "FINAL"
+      override def isFinal: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object SEALED extends Property(16) with Property.Recognized {
+      val index = 3
+      val name = "SEALED"
+      override def isSealed: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object IMPLICIT extends Property(32) with Property.Recognized {
+      val index = 4
+      val name = "IMPLICIT"
+      override def isImplicit: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object LAZY extends Property(64) with Property.Recognized {
+      val index = 5
+      val name = "LAZY"
+      override def isLazy: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object CASE extends Property(128) with Property.Recognized {
+      val index = 6
+      val name = "CASE"
+      override def isCase: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object COVARIANT extends Property(256) with Property.Recognized {
+      val index = 7
+      val name = "COVARIANT"
+      override def isCovariant: _root_.scala.Boolean = true
     }
+    
+    @SerialVersionUID(0L)
+    case object CONTRAVARIANT extends Property(512) with Property.Recognized {
+      val index = 8
+      val name = "CONTRAVARIANT"
+      override def isContravariant: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object VAL extends Property(1024) with Property.Recognized {
+      val index = 9
+      val name = "VAL"
+      override def isVal: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object VAR extends Property(2048) with Property.Recognized {
+      val index = 10
+      val name = "VAR"
+      override def isVar: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object STATIC extends Property(4096) with Property.Recognized {
+      val index = 11
+      val name = "STATIC"
+      override def isStatic: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object PRIMARY extends Property(8192) with Property.Recognized {
+      val index = 12
+      val name = "PRIMARY"
+      override def isPrimary: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object ENUM extends Property(16384) with Property.Recognized {
+      val index = 13
+      val name = "ENUM"
+      override def isEnum: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object DEFAULT extends Property(32768) with Property.Recognized {
+      val index = 14
+      val name = "DEFAULT"
+      override def isDefault: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object GIVEN extends Property(65536) with Property.Recognized {
+      val index = 15
+      val name = "GIVEN"
+      override def isGiven: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object INLINE extends Property(131072) with Property.Recognized {
+      val index = 16
+      val name = "INLINE"
+      override def isInline: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object OPEN extends Property(262144) with Property.Recognized {
+      val index = 17
+      val name = "OPEN"
+      override def isOpen: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object TRANSPARENT extends Property(524288) with Property.Recognized {
+      val index = 18
+      val name = "TRANSPARENT"
+      override def isTransparent: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object INFIX extends Property(1048576) with Property.Recognized {
+      val index = 19
+      val name = "INFIX"
+      override def isInfix: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object OPAQUE extends Property(2097152) with Property.Recognized {
+      val index = 20
+      val name = "OPAQUE"
+      override def isOpaque: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    final case class Unrecognized(unrecognizedValue: _root_.scala.Int)  extends Property(unrecognizedValue) with SemanticdbUnrecognizedEnum
+    
+    lazy val values = scala.collection.immutable.Seq(UNKNOWN_PROPERTY, ABSTRACT, FINAL, SEALED, IMPLICIT, LAZY, CASE, COVARIANT, CONTRAVARIANT, VAL, VAR, STATIC, PRIMARY, ENUM, DEFAULT, GIVEN, INLINE, OPEN, TRANSPARENT, INFIX, OPAQUE)
+    def fromValue(__value: _root_.scala.Int): Property = __value match {
+      case 0 => UNKNOWN_PROPERTY
+      case 4 => ABSTRACT
+      case 8 => FINAL
+      case 16 => SEALED
+      case 32 => IMPLICIT
+      case 64 => LAZY
+      case 128 => CASE
+      case 256 => COVARIANT
+      case 512 => CONTRAVARIANT
+      case 1024 => VAL
+      case 2048 => VAR
+      case 4096 => STATIC
+      case 8192 => PRIMARY
+      case 16384 => ENUM
+      case 32768 => DEFAULT
+      case 65536 => GIVEN
+      case 131072 => INLINE
+      case 262144 => OPEN
+      case 524288 => TRANSPARENT
+      case 1048576 => INFIX
+      case 2097152 => OPAQUE
+      case __other => Unrecognized(__other)
+    }
+    
+    
+  }
+  final val SYMBOL_FIELD_NUMBER = 1
+  final val LANGUAGE_FIELD_NUMBER = 16
+  final val KIND_FIELD_NUMBER = 3
+  final val PROPERTIES_FIELD_NUMBER = 4
+  final val DISPLAY_NAME_FIELD_NUMBER = 5
+  final val SIGNATURE_FIELD_NUMBER = 17
+  final val ANNOTATIONS_FIELD_NUMBER = 13
+  final val ACCESS_FIELD_NUMBER = 18
+  final val OVERRIDDEN_SYMBOLS_FIELD_NUMBER = 19
+  final val DOCUMENTATION_FIELD_NUMBER = 20
+  @transient @sharable
+  private[semanticdb] val _typemapper_signature: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.SignatureMessage, dotty.tools.dotc.semanticdb.Signature] = implicitly[SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.SignatureMessage, dotty.tools.dotc.semanticdb.Signature]]
+  @transient @sharable
+  private[semanticdb] val _typemapper_access: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.AccessMessage, dotty.tools.dotc.semanticdb.Access] = implicitly[SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.AccessMessage, dotty.tools.dotc.semanticdb.Access]]
+  def of(
+    symbol: _root_.scala.Predef.String,
+    language: dotty.tools.dotc.semanticdb.Language,
+    kind: dotty.tools.dotc.semanticdb.SymbolInformation.Kind,
+    properties: _root_.scala.Int,
+    displayName: _root_.scala.Predef.String,
+    signature: dotty.tools.dotc.semanticdb.Signature,
+    annotations: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Annotation],
+    access: dotty.tools.dotc.semanticdb.Access,
+    overriddenSymbols: _root_.scala.Seq[_root_.scala.Predef.String],
+    documentation: _root_.scala.Option[dotty.tools.dotc.semanticdb.Documentation]
+  ): _root_.dotty.tools.dotc.semanticdb.SymbolInformation = _root_.dotty.tools.dotc.semanticdb.SymbolInformation(
+    symbol,
+    language,
+    kind,
+    properties,
+    displayName,
+    signature,
+    annotations,
+    access,
+    overriddenSymbols,
+    documentation
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SymbolInformation])
 }
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/SymbolOccurrence.scala b/compiler/src/dotty/tools/dotc/semanticdb/SymbolOccurrence.scala
index 3123a54edd56..ea5ecfc73147 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/SymbolOccurrence.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/SymbolOccurrence.scala
@@ -1,117 +1,177 @@
-package dotty.tools.dotc.semanticdb
+// Generated by https://github.com/tanishiking/semanticdb-for-scala3
+// Generated by the Scala Plugin for the Protocol Buffer Compiler.
+// Do not edit!
+//
+// Protofile syntax: PROTO3
 
+package dotty.tools.dotc.semanticdb
 import dotty.tools.dotc.semanticdb.internal._
 import scala.annotation.internal.sharable
 
-object SymbolOccurrence {
-
-  sealed trait Role(val value: Int) extends SemanticdbEnum derives CanEqual {
-    def isDefinition: Boolean = this == Role.DEFINITION
-    def isReference: Boolean = this == Role.REFERENCE
-  }
-
-  object Role {
-
-    case object UNKNOWN_ROLE extends Role(0)
-    case object REFERENCE extends Role(1)
-    case object DEFINITION extends Role(2)
-    final case class Unrecognized(id: Int) extends Role(id)
-
-    def fromValue(value: Int): Role = value match {
-      case 0 => UNKNOWN_ROLE
-      case 1 => REFERENCE
-      case 2 => DEFINITION
-      case id => Unrecognized(id)
-    }
-
-  }
-
-  val defaultInstance: SymbolOccurrence = SymbolOccurrence("", None, Role.UNKNOWN_ROLE)
-}
-
+@SerialVersionUID(0L)
 final case class SymbolOccurrence(
-  symbol: String,
-  range: Option[Range],
-  role: SymbolOccurrence.Role
-) extends SemanticdbMessage[SymbolOccurrence] derives CanEqual {
-  @sharable
-  private var __serializedSizeCachedValue: Int = 0
-  private def __computeSerializedValue(): Int = {
-    var __size = 0
-    if (range.isDefined) {
-      val __value = range.get
-      __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(
-        __value.serializedSize
-      ) + __value.serializedSize
-    };
-
-    {
-      val __value = symbol
-      if (__value != "") {
-        __size += SemanticdbOutputStream
-          .computeStringSize(2, __value)
-      }
-    };
-
-    {
-      val __value = role
-      if (__value != SymbolOccurrence.Role.UNKNOWN_ROLE) {
-        __size += SemanticdbOutputStream
-          .computeEnumSize(3, __value.value)
-      }
-    };
-    __size
-  }
-  final override def serializedSize: Int = {
-    var read = __serializedSizeCachedValue
-    if (read == 0) {
-      read = __computeSerializedValue()
-      __serializedSizeCachedValue = read
+    range: _root_.scala.Option[dotty.tools.dotc.semanticdb.Range] = _root_.scala.None,
+    symbol: _root_.scala.Predef.String = "",
+    role: dotty.tools.dotc.semanticdb.SymbolOccurrence.Role = dotty.tools.dotc.semanticdb.SymbolOccurrence.Role.UNKNOWN_ROLE
+    )  extends SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      if (range.isDefined) {
+        val __value = range.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      
+      {
+        val __value = symbol
+        if (!__value.isEmpty) {
+          __size += SemanticdbOutputStream.computeStringSize(2, __value)
+        }
+      };
+      
+      {
+        val __value = role.value
+        if (__value != 0) {
+          __size += SemanticdbOutputStream.computeEnumSize(3, __value)
+        }
+      };
+      __size
     }
-    read
-  }
-  def writeTo(`_output__`: SemanticdbOutputStream): Unit = {
-    range.foreach { __v =>
-      val __m = __v
-      _output__.writeTag(1, 2)
-      _output__.writeUInt32NoTag(__m.serializedSize)
-      __m.writeTo(_output__)
-    };
-    {
-      val __v = symbol
-      if (__v != "") {
-        _output__.writeString(2, __v)
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
       }
-    };
-    {
-      val __v = role
-      if (__v != SymbolOccurrence.Role.UNKNOWN_ROLE) {
-        _output__.writeEnum(3, __v.value)
-      }
-    };
-  }
-  def mergeFrom(`_input__`: SemanticdbInputStream): SymbolOccurrence = {
-    var __range = this.range
-    var __symbol = this.symbol
-    var __role = this.role
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      range.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(1, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      {
+        val __v = symbol
+        if (!__v.isEmpty) {
+          _output__.writeString(2, __v)
+        }
+      };
+      {
+        val __v = role.value
+        if (__v != 0) {
+          _output__.writeEnum(3, __v)
+        }
+      };
+    }
+    def getRange: dotty.tools.dotc.semanticdb.Range = range.getOrElse(dotty.tools.dotc.semanticdb.Range.defaultInstance)
+    def clearRange: SymbolOccurrence = copy(range = _root_.scala.None)
+    def withRange(__v: dotty.tools.dotc.semanticdb.Range): SymbolOccurrence = copy(range = Option(__v))
+    def withSymbol(__v: _root_.scala.Predef.String): SymbolOccurrence = copy(symbol = __v)
+    def withRole(__v: dotty.tools.dotc.semanticdb.SymbolOccurrence.Role): SymbolOccurrence = copy(role = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.SymbolOccurrence])
+}
+
+object SymbolOccurrence  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SymbolOccurrence] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SymbolOccurrence] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.SymbolOccurrence = {
+    var __range: _root_.scala.Option[dotty.tools.dotc.semanticdb.Range] = _root_.scala.None
+    var __symbol: _root_.scala.Predef.String = ""
+    var __role: dotty.tools.dotc.semanticdb.SymbolOccurrence.Role = dotty.tools.dotc.semanticdb.SymbolOccurrence.Role.UNKNOWN_ROLE
     var _done__ = false
     while (!_done__) {
       val _tag__ = _input__.readTag()
       _tag__ match {
         case 0 => _done__ = true
         case 10 =>
-          __range = Option(LiteParser.readMessage(_input__, __range.getOrElse(Range.defaultInstance)))
+          __range = Option(__range.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.Range](_input__))(LiteParser.readMessage(_input__, _)))
         case 18 =>
-          __symbol = _input__.readString()
+          __symbol = _input__.readStringRequireUtf8()
         case 24 =>
-          __role = SymbolOccurrence.Role.fromValue(_input__.readEnum())
+          __role = dotty.tools.dotc.semanticdb.SymbolOccurrence.Role.fromValue(_input__.readEnum())
         case tag => _input__.skipField(tag)
       }
     }
-    SymbolOccurrence(
-      range = __range,
-      symbol = __symbol,
-      role = __role
+    dotty.tools.dotc.semanticdb.SymbolOccurrence(
+        range = __range,
+        symbol = __symbol,
+        role = __role
     )
   }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.SymbolOccurrence(
+    range = _root_.scala.None,
+    symbol = "",
+    role = dotty.tools.dotc.semanticdb.SymbolOccurrence.Role.UNKNOWN_ROLE
+  )
+  sealed abstract class Role(val value: _root_.scala.Int)  extends SemanticdbGeneratedEnum  derives CanEqual {
+    type EnumType = Role
+    def isUnknownRole: _root_.scala.Boolean = false
+    def isReference: _root_.scala.Boolean = false
+    def isDefinition: _root_.scala.Boolean = false
+    
+    final def asRecognized: _root_.scala.Option[dotty.tools.dotc.semanticdb.SymbolOccurrence.Role.Recognized] = if (isUnrecognized) _root_.scala.None else _root_.scala.Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.SymbolOccurrence.Role.Recognized])
+  }
+  
+  object Role  {
+    sealed trait Recognized extends Role
+    
+    @SerialVersionUID(0L)
+    case object UNKNOWN_ROLE extends Role(0) with Role.Recognized {
+      val index = 0
+      val name = "UNKNOWN_ROLE"
+      override def isUnknownRole: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object REFERENCE extends Role(1) with Role.Recognized {
+      val index = 1
+      val name = "REFERENCE"
+      override def isReference: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    case object DEFINITION extends Role(2) with Role.Recognized {
+      val index = 2
+      val name = "DEFINITION"
+      override def isDefinition: _root_.scala.Boolean = true
+    }
+    
+    @SerialVersionUID(0L)
+    final case class Unrecognized(unrecognizedValue: _root_.scala.Int)  extends Role(unrecognizedValue) with SemanticdbUnrecognizedEnum
+    
+    lazy val values = scala.collection.immutable.Seq(UNKNOWN_ROLE, REFERENCE, DEFINITION)
+    def fromValue(__value: _root_.scala.Int): Role = __value match {
+      case 0 => UNKNOWN_ROLE
+      case 1 => REFERENCE
+      case 2 => DEFINITION
+      case __other => Unrecognized(__other)
+    }
+    
+    
+  }
+  final val RANGE_FIELD_NUMBER = 1
+  final val SYMBOL_FIELD_NUMBER = 2
+  final val ROLE_FIELD_NUMBER = 3
+  def of(
+    range: _root_.scala.Option[dotty.tools.dotc.semanticdb.Range],
+    symbol: _root_.scala.Predef.String,
+    role: dotty.tools.dotc.semanticdb.SymbolOccurrence.Role
+  ): _root_.dotty.tools.dotc.semanticdb.SymbolOccurrence = _root_.dotty.tools.dotc.semanticdb.SymbolOccurrence(
+    range,
+    symbol,
+    role
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SymbolOccurrence])
 }
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/Synthetic.scala b/compiler/src/dotty/tools/dotc/semanticdb/Synthetic.scala
new file mode 100644
index 000000000000..ab3301f209cb
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/semanticdb/Synthetic.scala
@@ -0,0 +1,112 @@
+// Generated by https://github.com/tanishiking/semanticdb-for-scala3
+// Generated by the Scala Plugin for the Protocol Buffer Compiler.
+// Do not edit!
+//
+// Protofile syntax: PROTO3
+
+package dotty.tools.dotc.semanticdb
+import dotty.tools.dotc.semanticdb.internal._
+import scala.annotation.internal.sharable
+
+@SerialVersionUID(0L)
+final case class Synthetic(
+    range: _root_.scala.Option[dotty.tools.dotc.semanticdb.Range] = _root_.scala.None,
+    tree: dotty.tools.dotc.semanticdb.Tree = dotty.tools.dotc.semanticdb.Synthetic._typemapper_tree.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance)
+    )  extends SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      if (range.isDefined) {
+        val __value = range.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      
+      {
+        val __value = dotty.tools.dotc.semanticdb.Synthetic._typemapper_tree.toBase(tree)
+        if (__value != dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) {
+          __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+        }
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      range.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(1, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      {
+        val __v = dotty.tools.dotc.semanticdb.Synthetic._typemapper_tree.toBase(tree)
+        if (__v != dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) {
+          _output__.writeTag(2, 2)
+          _output__.writeUInt32NoTag(__v.serializedSize)
+          __v.writeTo(_output__)
+        }
+      };
+    }
+    def getRange: dotty.tools.dotc.semanticdb.Range = range.getOrElse(dotty.tools.dotc.semanticdb.Range.defaultInstance)
+    def clearRange: Synthetic = copy(range = _root_.scala.None)
+    def withRange(__v: dotty.tools.dotc.semanticdb.Range): Synthetic = copy(range = Option(__v))
+    def withTree(__v: dotty.tools.dotc.semanticdb.Tree): Synthetic = copy(tree = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Synthetic])
+}
+
+object Synthetic  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Synthetic] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Synthetic] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.Synthetic = {
+    var __range: _root_.scala.Option[dotty.tools.dotc.semanticdb.Range] = _root_.scala.None
+    var __tree: _root_.scala.Option[dotty.tools.dotc.semanticdb.TreeMessage] = _root_.scala.None
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __range = Option(__range.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.Range](_input__))(LiteParser.readMessage(_input__, _)))
+        case 18 =>
+          __tree = _root_.scala.Some(__tree.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TreeMessage](_input__))(LiteParser.readMessage(_input__, _)))
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.Synthetic(
+        range = __range,
+        tree = dotty.tools.dotc.semanticdb.Synthetic._typemapper_tree.toCustom(__tree.getOrElse(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance))
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.Synthetic(
+    range = _root_.scala.None,
+    tree = dotty.tools.dotc.semanticdb.Synthetic._typemapper_tree.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance)
+  )
+  final val RANGE_FIELD_NUMBER = 1
+  final val TREE_FIELD_NUMBER = 2
+  @transient @sharable
+  private[semanticdb] val _typemapper_tree: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TreeMessage, dotty.tools.dotc.semanticdb.Tree] = implicitly[SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TreeMessage, dotty.tools.dotc.semanticdb.Tree]]
+  def of(
+    range: _root_.scala.Option[dotty.tools.dotc.semanticdb.Range],
+    tree: dotty.tools.dotc.semanticdb.Tree
+  ): _root_.dotty.tools.dotc.semanticdb.Synthetic = _root_.dotty.tools.dotc.semanticdb.Synthetic(
+    range,
+    tree
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Synthetic])
+}
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/SyntheticsExtractor.scala b/compiler/src/dotty/tools/dotc/semanticdb/SyntheticsExtractor.scala
new file mode 100644
index 000000000000..0818b7cf1be1
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/semanticdb/SyntheticsExtractor.scala
@@ -0,0 +1,138 @@
+package dotty.tools.dotc.semanticdb
+
+import dotty.tools.dotc.ast.tpd._
+import dotty.tools.dotc.core.Contexts._
+import dotty.tools.dotc.core.Flags._
+import dotty.tools.dotc.core.StdNames.nme
+import dotty.tools.dotc.{semanticdb => s}
+
+import scala.collection.mutable
+
+class SyntheticsExtractor:
+  import Scala3.{_, given}
+
+  val visited = collection.mutable.HashSet[Tree]()
+
+  def tryFindSynthetic(tree: Tree)(using Context, SemanticSymbolBuilder, TypeOps): Option[s.Synthetic] =
+    extension (synth: s.Synthetic)
+      def toOpt: Some[s.Synthetic] = Some(synth)
+
+    val forSynthetic = tree match // not yet supported (for synthetics)
+      case tree: Apply if isForSynthetic(tree) => true
+      case tree: TypeApply if isForSynthetic(tree) => true
+      case _ => false
+
+    if visited.contains(tree) || forSynthetic then None
+    else
+      tree match
+        case tree: TypeApply
+          if tree.span.isSynthetic &&
+            tree.args.forall(arg => !arg.symbol.is(Scala2x)) &&
+            !tree.span.isZeroExtent =>
+          visited.add(tree)
+          val fnTree = tree.fun match
+            // Something like `List.apply[Int](1,2,3)`
+            case select @ Select(qual, _) if isSyntheticName(select) =>
+              s.SelectTree(
+                s.OriginalTree(range(qual.span, tree.source)),
+                Some(select.toSemanticId)
+              )
+            case _ =>
+              s.OriginalTree(
+                range(tree.fun.span, tree.source)
+              )
+          val targs = tree.args.map(targ => targ.tpe.toSemanticType(targ.symbol)(using LinkMode.SymlinkChildren))
+          s.Synthetic(
+            range(tree.span, tree.source),
+            s.TypeApplyTree(
+              fnTree, targs
+            )
+          ).toOpt
+
+        case tree: Apply
+          if tree.args.nonEmpty &&
+            tree.args.forall(arg =>
+              arg.symbol.isOneOf(GivenOrImplicit) &&
+              arg.span.isSynthetic
+            ) =>
+          s.Synthetic(
+            range(tree.span, tree.source),
+            s.ApplyTree(
+              tree.fun.toSemanticOriginal,
+              tree.args.map(_.toSemanticTree)
+            )
+          ).toOpt
+
+        case tree: Apply if tree.fun.symbol.is(Implicit) =>
+          val pos = range(tree.span, tree.source)
+          s.Synthetic(
+            pos,
+            s.ApplyTree(
+              tree.fun.toSemanticTree,
+              arguments = List(
+                s.OriginalTree(pos)
+              )
+            )
+          ).toOpt
+
+        case _ => None
+
+  private given TreeOps: AnyRef with
+    extension (tree: Tree)
+      def toSemanticTree(using Context, SemanticSymbolBuilder, TypeOps): s.Tree =
+        tree match
+          case tree: Apply =>
+            s.ApplyTree(
+              tree.fun.toSemanticQualifierTree,
+              tree.args.map(_.toSemanticTree)
+            )
+          case tree: TypeApply =>
+            s.TypeApplyTree(
+              tree.fun.toSemanticQualifierTree,
+              tree.args.map { targ =>
+                targ.tpe.toSemanticType(targ.symbol)(using LinkMode.SymlinkChildren)
+              }
+            )
+          case tree: Ident => tree.toSemanticId
+          case tree: Select => tree.toSemanticId
+          case _ => s.Tree.defaultInstance
+
+      def toSemanticQualifierTree(using Context, SemanticSymbolBuilder): s.Tree = tree match
+        case sel @ Select(qual, _) if sel.symbol.owner != qual.symbol =>
+          s.SelectTree(qual.toSemanticId, Some(sel.toSemanticId))
+        case fun => fun.toSemanticId
+
+      def toSemanticId(using Context, SemanticSymbolBuilder) =
+        s.IdTree(tree.symbol.symbolName)
+
+      def toSemanticOriginal(using Context) =
+        s.OriginalTree(range(tree.span, tree.source))
+  end TreeOps
+
+
+  private def isForSynthetic(tree: Tree): Boolean =
+    def isForComprehensionSyntheticName(select: Select): Boolean =
+      select.span.toSynthetic == select.qualifier.span.toSynthetic && (
+        select.name == nme.map ||
+        select.name == nme.flatMap ||
+        select.name == nme.withFilter ||
+        select.name == nme.foreach
+      )
+    tree match
+      case Apply(fun, _) => isForSynthetic(fun)
+      case TypeApply(fun, _) => isForSynthetic(fun)
+      case select: Select => isForComprehensionSyntheticName(select)
+      case _ => false
+
+  private def isSyntheticName(select: Select): Boolean =
+    select.span.toSynthetic == select.qualifier.span.toSynthetic && (
+      select.name == nme.apply ||
+      select.name == nme.update ||
+      select.name == nme.foreach ||
+      select.name == nme.withFilter ||
+      select.name == nme.flatMap ||
+      select.name == nme.map ||
+      select.name == nme.unapplySeq ||
+      select.name == nme.unapply)
+
+end SyntheticsExtractor
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/TextDocument.scala b/compiler/src/dotty/tools/dotc/semanticdb/TextDocument.scala
index 05801340f955..5f377b2d0274 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/TextDocument.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/TextDocument.scala
@@ -1,147 +1,270 @@
-package dotty.tools.dotc.semanticdb
+// Generated by https://github.com/tanishiking/semanticdb-for-scala3
+// Generated by the Scala Plugin for the Protocol Buffer Compiler.
+// Do not edit!
+//
+// Protofile syntax: PROTO3
 
+package dotty.tools.dotc.semanticdb
 import dotty.tools.dotc.semanticdb.internal._
 import scala.annotation.internal.sharable
 
-object TextDocument {
-  val defaultInstance: TextDocument = TextDocument(Schema.LEGACY, Language.UNKNOWN_LANGUAGE, "", "", "", Nil, Nil)
-}
-
+@SerialVersionUID(0L)
 final case class TextDocument(
-  schema: Schema,
-  language: Language,
-  uri: String,
-  text: String,
-  md5: String,
-  symbols: Seq[SymbolInformation],
-  occurrences: Seq[SymbolOccurrence]
-) extends SemanticdbMessage[TextDocument] derives CanEqual {
-  @sharable
-  private var __serializedSizeCachedValue: Int = 0
-  private def __computeSerializedValue(): Int = {
-    var __size = 0
-
-    {
-      val __value = schema
-      if (__value != Schema.LEGACY) {
-        __size += SemanticdbOutputStream.computeEnumSize(1, __value.value)
+    schema: dotty.tools.dotc.semanticdb.Schema = dotty.tools.dotc.semanticdb.Schema.LEGACY,
+    uri: _root_.scala.Predef.String = "",
+    text: _root_.scala.Predef.String = "",
+    md5: _root_.scala.Predef.String = "",
+    language: dotty.tools.dotc.semanticdb.Language = dotty.tools.dotc.semanticdb.Language.UNKNOWN_LANGUAGE,
+    symbols: _root_.scala.Seq[dotty.tools.dotc.semanticdb.SymbolInformation] = _root_.scala.Seq.empty,
+    occurrences: _root_.scala.Seq[dotty.tools.dotc.semanticdb.SymbolOccurrence] = _root_.scala.Seq.empty,
+    diagnostics: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Diagnostic] = _root_.scala.Seq.empty,
+    synthetics: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Synthetic] = _root_.scala.Seq.empty
+    )  extends SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      
+      {
+        val __value = schema.value
+        if (__value != 0) {
+          __size += SemanticdbOutputStream.computeEnumSize(1, __value)
+        }
+      };
+      
+      {
+        val __value = uri
+        if (!__value.isEmpty) {
+          __size += SemanticdbOutputStream.computeStringSize(2, __value)
+        }
+      };
+      
+      {
+        val __value = text
+        if (!__value.isEmpty) {
+          __size += SemanticdbOutputStream.computeStringSize(3, __value)
+        }
+      };
+      
+      {
+        val __value = md5
+        if (!__value.isEmpty) {
+          __size += SemanticdbOutputStream.computeStringSize(11, __value)
+        }
+      };
+      
+      {
+        val __value = language.value
+        if (__value != 0) {
+          __size += SemanticdbOutputStream.computeEnumSize(10, __value)
+        }
+      };
+      symbols.foreach { __item =>
+        val __value = __item
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
       }
-    };
-
-    {
-      val __value = uri
-      if (__value != "") {
-        __size += SemanticdbOutputStream
-          .computeStringSize(2, __value)
+      occurrences.foreach { __item =>
+        val __value = __item
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
       }
-    };
-
-    {
-      val __value = md5
-      if (__value != "") {
-        __size += SemanticdbOutputStream.computeStringSize(11, __value)
+      diagnostics.foreach { __item =>
+        val __value = __item
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
       }
-    };
-
-    {
-      val __value = language
-      if (__value != Language.UNKNOWN_LANGUAGE) {
-        __size += SemanticdbOutputStream.computeEnumSize(10, __value.value)
+      synthetics.foreach { __item =>
+        val __value = __item
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
       }
-    };
-    symbols.foreach { __item =>
-      val __value = __item
-      __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      __size
     }
-    occurrences.foreach { __item =>
-      val __value = __item
-      __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(
-        __value.serializedSize
-      ) + __value.serializedSize
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
     }
-    __size
-  }
-  final override def serializedSize: Int = {
-    var read = __serializedSizeCachedValue
-    if (read == 0) {
-      read = __computeSerializedValue()
-      __serializedSizeCachedValue = read
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      {
+        val __v = schema.value
+        if (__v != 0) {
+          _output__.writeEnum(1, __v)
+        }
+      };
+      {
+        val __v = uri
+        if (!__v.isEmpty) {
+          _output__.writeString(2, __v)
+        }
+      };
+      {
+        val __v = text
+        if (!__v.isEmpty) {
+          _output__.writeString(3, __v)
+        }
+      };
+      symbols.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(5, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      occurrences.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(6, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      diagnostics.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(7, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      {
+        val __v = language.value
+        if (__v != 0) {
+          _output__.writeEnum(10, __v)
+        }
+      };
+      {
+        val __v = md5
+        if (!__v.isEmpty) {
+          _output__.writeString(11, __v)
+        }
+      };
+      synthetics.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(12, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
     }
-    read
-  }
-  def writeTo(`_output__`: SemanticdbOutputStream): Unit = {
-    {
-      val __v = schema
-      if (__v != Schema.LEGACY) {
-        _output__.writeEnum(1, __v.value)
-      }
-    };
-    {
-      val __v = uri
-      if (__v != "") {
-        _output__.writeString(2, __v)
-      }
-    };
-    symbols.foreach { __v =>
-      val __m = __v
-      _output__.writeTag(5, 2)
-      _output__.writeUInt32NoTag(__m.serializedSize)
-      __m.writeTo(_output__)
-    };
-    occurrences.foreach { __v =>
-      val __m = __v
-      _output__.writeTag(6, 2)
-      _output__.writeUInt32NoTag(__m.serializedSize)
-      __m.writeTo(_output__)
-    };
-    {
-      val __v = language
-      if (__v != Language.UNKNOWN_LANGUAGE) {
-        _output__.writeEnum(10, __v.value)
-      }
-    };
-    {
-      val __v = md5
-      if (__v != "") {
-        _output__.writeString(11, __v)
-      }
-    };
-  }
-  def mergeFrom(`_input__`: SemanticdbInputStream): TextDocument = {
-    var __schema = this.schema
-    var __uri = this.uri
-    var __md5 = this.md5
-    var __language = this.language
-    val __symbols = (Vector.newBuilder[SymbolInformation] ++= this.symbols)
-    val __occurrences = (Vector.newBuilder[SymbolOccurrence] ++= this.occurrences)
+    def withSchema(__v: dotty.tools.dotc.semanticdb.Schema): TextDocument = copy(schema = __v)
+    def withUri(__v: _root_.scala.Predef.String): TextDocument = copy(uri = __v)
+    def withText(__v: _root_.scala.Predef.String): TextDocument = copy(text = __v)
+    def withMd5(__v: _root_.scala.Predef.String): TextDocument = copy(md5 = __v)
+    def withLanguage(__v: dotty.tools.dotc.semanticdb.Language): TextDocument = copy(language = __v)
+    def clearSymbols = copy(symbols = _root_.scala.Seq.empty)
+    def addSymbols(__vs: dotty.tools.dotc.semanticdb.SymbolInformation*): TextDocument = addAllSymbols(__vs)
+    def addAllSymbols(__vs: Iterable[dotty.tools.dotc.semanticdb.SymbolInformation]): TextDocument = copy(symbols = symbols ++ __vs)
+    def withSymbols(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.SymbolInformation]): TextDocument = copy(symbols = __v)
+    def clearOccurrences = copy(occurrences = _root_.scala.Seq.empty)
+    def addOccurrences(__vs: dotty.tools.dotc.semanticdb.SymbolOccurrence*): TextDocument = addAllOccurrences(__vs)
+    def addAllOccurrences(__vs: Iterable[dotty.tools.dotc.semanticdb.SymbolOccurrence]): TextDocument = copy(occurrences = occurrences ++ __vs)
+    def withOccurrences(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.SymbolOccurrence]): TextDocument = copy(occurrences = __v)
+    def clearDiagnostics = copy(diagnostics = _root_.scala.Seq.empty)
+    def addDiagnostics(__vs: dotty.tools.dotc.semanticdb.Diagnostic*): TextDocument = addAllDiagnostics(__vs)
+    def addAllDiagnostics(__vs: Iterable[dotty.tools.dotc.semanticdb.Diagnostic]): TextDocument = copy(diagnostics = diagnostics ++ __vs)
+    def withDiagnostics(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Diagnostic]): TextDocument = copy(diagnostics = __v)
+    def clearSynthetics = copy(synthetics = _root_.scala.Seq.empty)
+    def addSynthetics(__vs: dotty.tools.dotc.semanticdb.Synthetic*): TextDocument = addAllSynthetics(__vs)
+    def addAllSynthetics(__vs: Iterable[dotty.tools.dotc.semanticdb.Synthetic]): TextDocument = copy(synthetics = synthetics ++ __vs)
+    def withSynthetics(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Synthetic]): TextDocument = copy(synthetics = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.TextDocument])
+}
+
+object TextDocument  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TextDocument] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TextDocument] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.TextDocument = {
+    var __schema: dotty.tools.dotc.semanticdb.Schema = dotty.tools.dotc.semanticdb.Schema.LEGACY
+    var __uri: _root_.scala.Predef.String = ""
+    var __text: _root_.scala.Predef.String = ""
+    var __md5: _root_.scala.Predef.String = ""
+    var __language: dotty.tools.dotc.semanticdb.Language = dotty.tools.dotc.semanticdb.Language.UNKNOWN_LANGUAGE
+    val __symbols: _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.SymbolInformation] = new _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.SymbolInformation]
+    val __occurrences: _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.SymbolOccurrence] = new _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.SymbolOccurrence]
+    val __diagnostics: _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Diagnostic] = new _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Diagnostic]
+    val __synthetics: _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Synthetic] = new _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Synthetic]
     var _done__ = false
     while (!_done__) {
       val _tag__ = _input__.readTag()
       _tag__ match {
         case 0 => _done__ = true
         case 8 =>
-          __schema = Schema.fromValue(_input__.readEnum())
+          __schema = dotty.tools.dotc.semanticdb.Schema.fromValue(_input__.readEnum())
         case 18 =>
-          __uri = _input__.readString()
+          __uri = _input__.readStringRequireUtf8()
+        case 26 =>
+          __text = _input__.readStringRequireUtf8()
         case 90 =>
-          __md5 = _input__.readString()
+          __md5 = _input__.readStringRequireUtf8()
         case 80 =>
-          __language = Language.fromValue(_input__.readEnum())
+          __language = dotty.tools.dotc.semanticdb.Language.fromValue(_input__.readEnum())
         case 42 =>
-          __symbols += LiteParser.readMessage(_input__, SymbolInformation.defaultInstance)
+          __symbols += LiteParser.readMessage[dotty.tools.dotc.semanticdb.SymbolInformation](_input__)
         case 50 =>
-          __occurrences += LiteParser.readMessage(_input__, SymbolOccurrence.defaultInstance)
+          __occurrences += LiteParser.readMessage[dotty.tools.dotc.semanticdb.SymbolOccurrence](_input__)
+        case 58 =>
+          __diagnostics += LiteParser.readMessage[dotty.tools.dotc.semanticdb.Diagnostic](_input__)
+        case 98 =>
+          __synthetics += LiteParser.readMessage[dotty.tools.dotc.semanticdb.Synthetic](_input__)
         case tag => _input__.skipField(tag)
       }
     }
-    TextDocument(
-      schema = __schema,
-      uri = __uri,
-      text = "",
-      md5 = __md5,
-      language = __language,
-      symbols = __symbols.result(),
-      occurrences = __occurrences.result(),
+    dotty.tools.dotc.semanticdb.TextDocument(
+        schema = __schema,
+        uri = __uri,
+        text = __text,
+        md5 = __md5,
+        language = __language,
+        symbols = __symbols.result(),
+        occurrences = __occurrences.result(),
+        diagnostics = __diagnostics.result(),
+        synthetics = __synthetics.result()
     )
   }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.TextDocument(
+    schema = dotty.tools.dotc.semanticdb.Schema.LEGACY,
+    uri = "",
+    text = "",
+    md5 = "",
+    language = dotty.tools.dotc.semanticdb.Language.UNKNOWN_LANGUAGE,
+    symbols = _root_.scala.Seq.empty,
+    occurrences = _root_.scala.Seq.empty,
+    diagnostics = _root_.scala.Seq.empty,
+    synthetics = _root_.scala.Seq.empty
+  )
+  final val SCHEMA_FIELD_NUMBER = 1
+  final val URI_FIELD_NUMBER = 2
+  final val TEXT_FIELD_NUMBER = 3
+  final val MD5_FIELD_NUMBER = 11
+  final val LANGUAGE_FIELD_NUMBER = 10
+  final val SYMBOLS_FIELD_NUMBER = 5
+  final val OCCURRENCES_FIELD_NUMBER = 6
+  final val DIAGNOSTICS_FIELD_NUMBER = 7
+  final val SYNTHETICS_FIELD_NUMBER = 12
+  def of(
+    schema: dotty.tools.dotc.semanticdb.Schema,
+    uri: _root_.scala.Predef.String,
+    text: _root_.scala.Predef.String,
+    md5: _root_.scala.Predef.String,
+    language: dotty.tools.dotc.semanticdb.Language,
+    symbols: _root_.scala.Seq[dotty.tools.dotc.semanticdb.SymbolInformation],
+    occurrences: _root_.scala.Seq[dotty.tools.dotc.semanticdb.SymbolOccurrence],
+    diagnostics: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Diagnostic],
+    synthetics: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Synthetic]
+  ): _root_.dotty.tools.dotc.semanticdb.TextDocument = _root_.dotty.tools.dotc.semanticdb.TextDocument(
+    schema,
+    uri,
+    text,
+    md5,
+    language,
+    symbols,
+    occurrences,
+    diagnostics,
+    synthetics
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TextDocument])
 }
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/TextDocuments.scala b/compiler/src/dotty/tools/dotc/semanticdb/TextDocuments.scala
index 68d667f27f74..8bc46a1f703d 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/TextDocuments.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/TextDocuments.scala
@@ -1,58 +1,86 @@
-package dotty.tools.dotc.semanticdb
+// Generated by https://github.com/tanishiking/semanticdb-for-scala3
+// Generated by the Scala Plugin for the Protocol Buffer Compiler.
+// Do not edit!
+//
+// Protofile syntax: PROTO3
 
+package dotty.tools.dotc.semanticdb
 import dotty.tools.dotc.semanticdb.internal._
 import scala.annotation.internal.sharable
 
-object TextDocuments {
-  def parseFrom(in: Array[Byte]): TextDocuments = {
-    parseFrom(SemanticdbInputStream.newInstance(in))
-  }
-  def parseFrom(in: SemanticdbInputStream): TextDocuments = {
-    defaultInstance.mergeFrom(in)
-  }
-  val defaultInstance: TextDocuments = TextDocuments(Nil)
-}
-final case class TextDocuments(documents: Seq[TextDocument]) extends SemanticdbMessage[TextDocuments] derives CanEqual {
-  @sharable
-  private var __serializedSizeCachedValue: Int = 0
-  private def __computeSerializedValue(): Int = {
-    var __size = 0
-    documents.foreach { __item =>
-      val __value = __item
-      __size += 1 +
-        SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) +
-        __value.serializedSize
+@SerialVersionUID(0L)
+final case class TextDocuments(
+    documents: _root_.scala.Seq[dotty.tools.dotc.semanticdb.TextDocument] = _root_.scala.Seq.empty
+    )  extends SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      documents.foreach { __item =>
+        val __value = __item
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      }
+      __size
     }
-    __size
-  }
-  final override def serializedSize: Int = {
-    var read = __serializedSizeCachedValue
-    if (read == 0) {
-      read = __computeSerializedValue()
-      __serializedSizeCachedValue = read
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
     }
-    read
-  }
-  def writeTo(`_output__`: SemanticdbOutputStream): Unit = {
-    documents.foreach { __v =>
-      val __m = __v
-      _output__.writeTag(1, 2)
-      _output__.writeUInt32NoTag(__m.serializedSize)
-      __m.writeTo(_output__)
-    };
-  }
-  def mergeFrom(`_input__`: SemanticdbInputStream): TextDocuments = {
-    val __documents = (Vector.newBuilder[TextDocument] ++= this.documents)
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      documents.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(1, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+    }
+    def clearDocuments = copy(documents = _root_.scala.Seq.empty)
+    def addDocuments(__vs: dotty.tools.dotc.semanticdb.TextDocument*): TextDocuments = addAllDocuments(__vs)
+    def addAllDocuments(__vs: Iterable[dotty.tools.dotc.semanticdb.TextDocument]): TextDocuments = copy(documents = documents ++ __vs)
+    def withDocuments(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.TextDocument]): TextDocuments = copy(documents = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.TextDocuments])
+}
+
+object TextDocuments  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TextDocuments] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TextDocuments] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.TextDocuments = {
+    val __documents: _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.TextDocument] = new _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.TextDocument]
     var _done__ = false
     while (!_done__) {
       val _tag__ = _input__.readTag()
       _tag__ match {
         case 0 => _done__ = true
         case 10 =>
-          __documents += LiteParser.readMessage(_input__, TextDocument.defaultInstance)
+          __documents += LiteParser.readMessage[dotty.tools.dotc.semanticdb.TextDocument](_input__)
         case tag => _input__.skipField(tag)
       }
     }
-    TextDocuments(documents = __documents.result())
+    dotty.tools.dotc.semanticdb.TextDocuments(
+        documents = __documents.result()
+    )
   }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.TextDocuments(
+    documents = _root_.scala.Seq.empty
+  )
+  final val DOCUMENTS_FIELD_NUMBER = 1
+  def of(
+    documents: _root_.scala.Seq[dotty.tools.dotc.semanticdb.TextDocument]
+  ): _root_.dotty.tools.dotc.semanticdb.TextDocuments = _root_.dotty.tools.dotc.semanticdb.TextDocuments(
+    documents
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TextDocuments])
 }
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/Tools.scala b/compiler/src/dotty/tools/dotc/semanticdb/Tools.scala
index ce2e943ed7a1..54b57f7c6a2f 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/Tools.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/Tools.scala
@@ -41,14 +41,22 @@ object Tools:
         document.copy(text = text)
   end loadTextDocument
 
+  def loadTextDocumentUnsafe(scalaAbsolutePath: Path, semanticdbAbsolutePath: Path): TextDocument =
+    val docs = parseTextDocuments(semanticdbAbsolutePath).documents
+    assert(docs.length == 1)
+    docs.head.copy(text = new String(Files.readAllBytes(scalaAbsolutePath), StandardCharsets.UTF_8))
+
   /** Parses SemanticDB text documents from an absolute path to a `*.semanticdb` file. */
   private def parseTextDocuments(path: Path): TextDocuments =
     val bytes = Files.readAllBytes(path) // NOTE: a semanticdb file is a TextDocuments message, not TextDocument
     TextDocuments.parseFrom(bytes)
 
   def metac(doc: TextDocument, realPath: Path)(using sb: StringBuilder): StringBuilder =
+    val symtab = PrinterSymtab.fromTextDocument(doc)
+    val symPrinter = SymbolInformationPrinter(symtab)
     val realURI = realPath.toString
-    given SourceFile = SourceFile.virtual(doc.uri, doc.text)
+    given sourceFile: SourceFile = SourceFile.virtual(doc.uri, doc.text)
+    val synthPrinter = SyntheticPrinter(symtab, sourceFile)
     sb.append(realURI).nl
     sb.append("-" * realURI.length).nl
     sb.nl
@@ -59,13 +67,20 @@ object Tools:
     sb.append("Language => ").append(languageString(doc.language)).nl
     sb.append("Symbols => ").append(doc.symbols.length).append(" entries").nl
     sb.append("Occurrences => ").append(doc.occurrences.length).append(" entries").nl
+    if doc.synthetics.nonEmpty then
+      sb.append("Synthetics => ").append(doc.synthetics.length).append(" entries").nl
     sb.nl
     sb.append("Symbols:").nl
-    doc.symbols.sorted.foreach(processSymbol)
+    doc.symbols.sorted.foreach(s => processSymbol(s, symPrinter))
     sb.nl
     sb.append("Occurrences:").nl
     doc.occurrences.sorted.foreach(processOccurrence)
     sb.nl
+    if doc.synthetics.nonEmpty then
+      sb.append("Synthetics:").nl
+      doc.synthetics.sorted.foreach(s => processSynth(s, synthPrinter))
+      sb.nl
+    sb
   end metac
 
   private def schemaString(schema: Schema) =
@@ -85,51 +100,16 @@ object Tools:
     case UNKNOWN_LANGUAGE | Unrecognized(_) => "unknown"
   end languageString
 
-  private def processSymbol(info: SymbolInformation)(using sb: StringBuilder): Unit =
-    import SymbolInformation.Kind._
-    sb.append(info.symbol).append(" => ")
-    if info.isAbstract then sb.append("abstract ")
-    if info.isFinal then sb.append("final ")
-    if info.isSealed then sb.append("sealed ")
-    if info.isImplicit then sb.append("implicit ")
-    if info.isLazy then sb.append("lazy ")
-    if info.isCase then sb.append("case ")
-    if info.isCovariant then sb.append("covariant ")
-    if info.isContravariant then sb.append("contravariant ")
-    if info.isVal then sb.append("val ")
-    if info.isVar then sb.append("var ")
-    if info.isStatic then sb.append("static ")
-    if info.isPrimary then sb.append("primary ")
-    if info.isEnum then sb.append("enum ")
-    if info.isDefault then sb.append("default ")
-    info.kind match
-      case LOCAL => sb.append("local ")
-      case FIELD => sb.append("field ")
-      case METHOD => sb.append("method ")
-      case CONSTRUCTOR => sb.append("ctor ")
-      case MACRO => sb.append("macro ")
-      case TYPE => sb.append("type ")
-      case PARAMETER => sb.append("param ")
-      case SELF_PARAMETER => sb.append("selfparam ")
-      case TYPE_PARAMETER => sb.append("typeparam ")
-      case OBJECT => sb.append("object ")
-      case PACKAGE => sb.append("package ")
-      case PACKAGE_OBJECT => sb.append("package object ")
-      case CLASS => sb.append("class ")
-      case TRAIT => sb.append("trait ")
-      case INTERFACE => sb.append("interface ")
-      case UNKNOWN_KIND | Unrecognized(_) => sb.append("unknown ")
-    sb.append(info.displayName).nl
-  end processSymbol
+  private def processSymbol(info: SymbolInformation, printer: SymbolInformationPrinter)(using sb: StringBuilder): Unit =
+    sb.append(printer.pprintSymbolInformation(info)).nl
+
+  private def processSynth(synth: Synthetic, printer: SyntheticPrinter)(using sb: StringBuilder): Unit =
+    sb.append(printer.pprint(synth)).nl
 
   private def processOccurrence(occ: SymbolOccurrence)(using sb: StringBuilder, sourceFile: SourceFile): Unit =
     occ.range match
     case Some(range) =>
-      sb.append('[')
-        .append(range.startLine).append(':').append(range.startCharacter)
-        .append("..")
-        .append(range.endLine).append(':').append(range.endCharacter)
-        .append("):")
+      processRange(sb, range)
       if range.endLine == range.startLine
       && range.startCharacter != range.endCharacter
       && !(occ.symbol.isConstructor && occ.role.isDefinition) then
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/Tree.scala b/compiler/src/dotty/tools/dotc/semanticdb/Tree.scala
new file mode 100644
index 000000000000..c6b107852fb2
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/semanticdb/Tree.scala
@@ -0,0 +1,1079 @@
+// Generated by https://github.com/tanishiking/semanticdb-for-scala3
+// Generated by the Scala Plugin for the Protocol Buffer Compiler.
+// Do not edit!
+//
+// Protofile syntax: PROTO3
+
+package dotty.tools.dotc.semanticdb
+import dotty.tools.dotc.semanticdb.internal._
+import scala.annotation.internal.sharable
+
+sealed trait Tree  extends SemanticdbGeneratedSealedOneof  derives CanEqual {
+  type MessageType = dotty.tools.dotc.semanticdb.TreeMessage
+  final def isEmpty = this.isInstanceOf[dotty.tools.dotc.semanticdb.Tree.Empty.type]
+  final def isDefined = !isEmpty
+  final def asMessage: dotty.tools.dotc.semanticdb.TreeMessage = dotty.tools.dotc.semanticdb.Tree.TreeTypeMapper.toBase(this)
+  final def asNonEmpty: Option[dotty.tools.dotc.semanticdb.Tree.NonEmpty] = if (isEmpty) None else Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.Tree.NonEmpty])
+}
+
+object Tree {
+  case object Empty extends dotty.tools.dotc.semanticdb.Tree
+  
+  sealed trait NonEmpty extends dotty.tools.dotc.semanticdb.Tree
+  def defaultInstance: dotty.tools.dotc.semanticdb.Tree = Empty
+  
+  implicit val TreeTypeMapper: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TreeMessage, dotty.tools.dotc.semanticdb.Tree] = new SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TreeMessage, dotty.tools.dotc.semanticdb.Tree] {
+    override def toCustom(__base: dotty.tools.dotc.semanticdb.TreeMessage): dotty.tools.dotc.semanticdb.Tree = __base.sealedValue match {
+      case __v: dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.ApplyTree => __v.value
+      case __v: dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.FunctionTree => __v.value
+      case __v: dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.IdTree => __v.value
+      case __v: dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.LiteralTree => __v.value
+      case __v: dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.MacroExpansionTree => __v.value
+      case __v: dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.OriginalTree => __v.value
+      case __v: dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.SelectTree => __v.value
+      case __v: dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.TypeApplyTree => __v.value
+      case dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.Empty => Empty
+    }
+    override def toBase(__custom: dotty.tools.dotc.semanticdb.Tree): dotty.tools.dotc.semanticdb.TreeMessage = dotty.tools.dotc.semanticdb.TreeMessage(__custom match {
+      case __v: dotty.tools.dotc.semanticdb.ApplyTree => dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.ApplyTree(__v)
+      case __v: dotty.tools.dotc.semanticdb.FunctionTree => dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.FunctionTree(__v)
+      case __v: dotty.tools.dotc.semanticdb.IdTree => dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.IdTree(__v)
+      case __v: dotty.tools.dotc.semanticdb.LiteralTree => dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.LiteralTree(__v)
+      case __v: dotty.tools.dotc.semanticdb.MacroExpansionTree => dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.MacroExpansionTree(__v)
+      case __v: dotty.tools.dotc.semanticdb.OriginalTree => dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.OriginalTree(__v)
+      case __v: dotty.tools.dotc.semanticdb.SelectTree => dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.SelectTree(__v)
+      case __v: dotty.tools.dotc.semanticdb.TypeApplyTree => dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.TypeApplyTree(__v)
+      case Empty => dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.Empty
+    })
+  }
+}
+@SerialVersionUID(0L)
+final case class TreeMessage(
+    sealedValue: dotty.tools.dotc.semanticdb.TreeMessage.SealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.Empty
+    )  extends SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      if (sealedValue.applyTree.isDefined) {
+        val __value = sealedValue.applyTree.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.functionTree.isDefined) {
+        val __value = sealedValue.functionTree.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.idTree.isDefined) {
+        val __value = sealedValue.idTree.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.literalTree.isDefined) {
+        val __value = sealedValue.literalTree.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.macroExpansionTree.isDefined) {
+        val __value = sealedValue.macroExpansionTree.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.originalTree.isDefined) {
+        val __value = sealedValue.originalTree.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.selectTree.isDefined) {
+        val __value = sealedValue.selectTree.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.typeApplyTree.isDefined) {
+        val __value = sealedValue.typeApplyTree.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      sealedValue.applyTree.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(1, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.functionTree.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(2, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.idTree.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(3, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.literalTree.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(4, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.macroExpansionTree.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(5, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.originalTree.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(6, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.selectTree.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(7, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.typeApplyTree.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(8, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+    }
+    def getApplyTree: dotty.tools.dotc.semanticdb.ApplyTree = sealedValue.applyTree.getOrElse(dotty.tools.dotc.semanticdb.ApplyTree.defaultInstance)
+    def withApplyTree(__v: dotty.tools.dotc.semanticdb.ApplyTree): TreeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.ApplyTree(__v))
+    def getFunctionTree: dotty.tools.dotc.semanticdb.FunctionTree = sealedValue.functionTree.getOrElse(dotty.tools.dotc.semanticdb.FunctionTree.defaultInstance)
+    def withFunctionTree(__v: dotty.tools.dotc.semanticdb.FunctionTree): TreeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.FunctionTree(__v))
+    def getIdTree: dotty.tools.dotc.semanticdb.IdTree = sealedValue.idTree.getOrElse(dotty.tools.dotc.semanticdb.IdTree.defaultInstance)
+    def withIdTree(__v: dotty.tools.dotc.semanticdb.IdTree): TreeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.IdTree(__v))
+    def getLiteralTree: dotty.tools.dotc.semanticdb.LiteralTree = sealedValue.literalTree.getOrElse(dotty.tools.dotc.semanticdb.LiteralTree.defaultInstance)
+    def withLiteralTree(__v: dotty.tools.dotc.semanticdb.LiteralTree): TreeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.LiteralTree(__v))
+    def getMacroExpansionTree: dotty.tools.dotc.semanticdb.MacroExpansionTree = sealedValue.macroExpansionTree.getOrElse(dotty.tools.dotc.semanticdb.MacroExpansionTree.defaultInstance)
+    def withMacroExpansionTree(__v: dotty.tools.dotc.semanticdb.MacroExpansionTree): TreeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.MacroExpansionTree(__v))
+    def getOriginalTree: dotty.tools.dotc.semanticdb.OriginalTree = sealedValue.originalTree.getOrElse(dotty.tools.dotc.semanticdb.OriginalTree.defaultInstance)
+    def withOriginalTree(__v: dotty.tools.dotc.semanticdb.OriginalTree): TreeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.OriginalTree(__v))
+    def getSelectTree: dotty.tools.dotc.semanticdb.SelectTree = sealedValue.selectTree.getOrElse(dotty.tools.dotc.semanticdb.SelectTree.defaultInstance)
+    def withSelectTree(__v: dotty.tools.dotc.semanticdb.SelectTree): TreeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.SelectTree(__v))
+    def getTypeApplyTree: dotty.tools.dotc.semanticdb.TypeApplyTree = sealedValue.typeApplyTree.getOrElse(dotty.tools.dotc.semanticdb.TypeApplyTree.defaultInstance)
+    def withTypeApplyTree(__v: dotty.tools.dotc.semanticdb.TypeApplyTree): TreeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.TypeApplyTree(__v))
+    def clearSealedValue: TreeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.Empty)
+    def withSealedValue(__v: dotty.tools.dotc.semanticdb.TreeMessage.SealedValue): TreeMessage = copy(sealedValue = __v)
+    
+    
+    
+    
+    def toTree: dotty.tools.dotc.semanticdb.Tree = dotty.tools.dotc.semanticdb.Tree.TreeTypeMapper.toCustom(this)
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Tree])
+}
+
+object TreeMessage  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TreeMessage] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TreeMessage] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.TreeMessage = {
+    var __sealedValue: dotty.tools.dotc.semanticdb.TreeMessage.SealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.Empty
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.ApplyTree(__sealedValue.applyTree.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.ApplyTree](_input__))(LiteParser.readMessage(_input__, _)))
+        case 18 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.FunctionTree(__sealedValue.functionTree.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.FunctionTree](_input__))(LiteParser.readMessage(_input__, _)))
+        case 26 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.IdTree(__sealedValue.idTree.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.IdTree](_input__))(LiteParser.readMessage(_input__, _)))
+        case 34 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.LiteralTree(__sealedValue.literalTree.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.LiteralTree](_input__))(LiteParser.readMessage(_input__, _)))
+        case 42 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.MacroExpansionTree(__sealedValue.macroExpansionTree.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.MacroExpansionTree](_input__))(LiteParser.readMessage(_input__, _)))
+        case 50 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.OriginalTree(__sealedValue.originalTree.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.OriginalTree](_input__))(LiteParser.readMessage(_input__, _)))
+        case 58 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.SelectTree(__sealedValue.selectTree.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.SelectTree](_input__))(LiteParser.readMessage(_input__, _)))
+        case 66 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.TypeApplyTree(__sealedValue.typeApplyTree.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeApplyTree](_input__))(LiteParser.readMessage(_input__, _)))
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.TreeMessage(
+        sealedValue = __sealedValue
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.TreeMessage(
+    sealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.Empty
+  )
+  sealed trait SealedValue  extends SemanticdbGeneratedOneof  derives CanEqual {
+    def isEmpty: _root_.scala.Boolean = false
+    def isDefined: _root_.scala.Boolean = true
+    def isApplyTree: _root_.scala.Boolean = false
+    def isFunctionTree: _root_.scala.Boolean = false
+    def isIdTree: _root_.scala.Boolean = false
+    def isLiteralTree: _root_.scala.Boolean = false
+    def isMacroExpansionTree: _root_.scala.Boolean = false
+    def isOriginalTree: _root_.scala.Boolean = false
+    def isSelectTree: _root_.scala.Boolean = false
+    def isTypeApplyTree: _root_.scala.Boolean = false
+    def applyTree: _root_.scala.Option[dotty.tools.dotc.semanticdb.ApplyTree] = _root_.scala.None
+    def functionTree: _root_.scala.Option[dotty.tools.dotc.semanticdb.FunctionTree] = _root_.scala.None
+    def idTree: _root_.scala.Option[dotty.tools.dotc.semanticdb.IdTree] = _root_.scala.None
+    def literalTree: _root_.scala.Option[dotty.tools.dotc.semanticdb.LiteralTree] = _root_.scala.None
+    def macroExpansionTree: _root_.scala.Option[dotty.tools.dotc.semanticdb.MacroExpansionTree] = _root_.scala.None
+    def originalTree: _root_.scala.Option[dotty.tools.dotc.semanticdb.OriginalTree] = _root_.scala.None
+    def selectTree: _root_.scala.Option[dotty.tools.dotc.semanticdb.SelectTree] = _root_.scala.None
+    def typeApplyTree: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeApplyTree] = _root_.scala.None
+  }
+  object SealedValue {
+    @SerialVersionUID(0L)
+    case object Empty extends dotty.tools.dotc.semanticdb.TreeMessage.SealedValue {
+      type ValueType = _root_.scala.Nothing
+      override def isEmpty: _root_.scala.Boolean = true
+      override def isDefined: _root_.scala.Boolean = false
+      override def number: _root_.scala.Int = 0
+      override def value: _root_.scala.Nothing = throw new java.util.NoSuchElementException("Empty.value")
+    }
+  
+    @SerialVersionUID(0L)
+    final case class ApplyTree(value: dotty.tools.dotc.semanticdb.ApplyTree) extends dotty.tools.dotc.semanticdb.TreeMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.ApplyTree
+      override def isApplyTree: _root_.scala.Boolean = true
+      override def applyTree: _root_.scala.Option[dotty.tools.dotc.semanticdb.ApplyTree] = Some(value)
+      override def number: _root_.scala.Int = 1
+    }
+    @SerialVersionUID(0L)
+    final case class FunctionTree(value: dotty.tools.dotc.semanticdb.FunctionTree) extends dotty.tools.dotc.semanticdb.TreeMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.FunctionTree
+      override def isFunctionTree: _root_.scala.Boolean = true
+      override def functionTree: _root_.scala.Option[dotty.tools.dotc.semanticdb.FunctionTree] = Some(value)
+      override def number: _root_.scala.Int = 2
+    }
+    @SerialVersionUID(0L)
+    final case class IdTree(value: dotty.tools.dotc.semanticdb.IdTree) extends dotty.tools.dotc.semanticdb.TreeMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.IdTree
+      override def isIdTree: _root_.scala.Boolean = true
+      override def idTree: _root_.scala.Option[dotty.tools.dotc.semanticdb.IdTree] = Some(value)
+      override def number: _root_.scala.Int = 3
+    }
+    @SerialVersionUID(0L)
+    final case class LiteralTree(value: dotty.tools.dotc.semanticdb.LiteralTree) extends dotty.tools.dotc.semanticdb.TreeMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.LiteralTree
+      override def isLiteralTree: _root_.scala.Boolean = true
+      override def literalTree: _root_.scala.Option[dotty.tools.dotc.semanticdb.LiteralTree] = Some(value)
+      override def number: _root_.scala.Int = 4
+    }
+    @SerialVersionUID(0L)
+    final case class MacroExpansionTree(value: dotty.tools.dotc.semanticdb.MacroExpansionTree) extends dotty.tools.dotc.semanticdb.TreeMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.MacroExpansionTree
+      override def isMacroExpansionTree: _root_.scala.Boolean = true
+      override def macroExpansionTree: _root_.scala.Option[dotty.tools.dotc.semanticdb.MacroExpansionTree] = Some(value)
+      override def number: _root_.scala.Int = 5
+    }
+    @SerialVersionUID(0L)
+    final case class OriginalTree(value: dotty.tools.dotc.semanticdb.OriginalTree) extends dotty.tools.dotc.semanticdb.TreeMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.OriginalTree
+      override def isOriginalTree: _root_.scala.Boolean = true
+      override def originalTree: _root_.scala.Option[dotty.tools.dotc.semanticdb.OriginalTree] = Some(value)
+      override def number: _root_.scala.Int = 6
+    }
+    @SerialVersionUID(0L)
+    final case class SelectTree(value: dotty.tools.dotc.semanticdb.SelectTree) extends dotty.tools.dotc.semanticdb.TreeMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.SelectTree
+      override def isSelectTree: _root_.scala.Boolean = true
+      override def selectTree: _root_.scala.Option[dotty.tools.dotc.semanticdb.SelectTree] = Some(value)
+      override def number: _root_.scala.Int = 7
+    }
+    @SerialVersionUID(0L)
+    final case class TypeApplyTree(value: dotty.tools.dotc.semanticdb.TypeApplyTree) extends dotty.tools.dotc.semanticdb.TreeMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.TypeApplyTree
+      override def isTypeApplyTree: _root_.scala.Boolean = true
+      override def typeApplyTree: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeApplyTree] = Some(value)
+      override def number: _root_.scala.Int = 8
+    }
+  }
+  final val APPLY_TREE_FIELD_NUMBER = 1
+  final val FUNCTION_TREE_FIELD_NUMBER = 2
+  final val ID_TREE_FIELD_NUMBER = 3
+  final val LITERAL_TREE_FIELD_NUMBER = 4
+  final val MACRO_EXPANSION_TREE_FIELD_NUMBER = 5
+  final val ORIGINAL_TREE_FIELD_NUMBER = 6
+  final val SELECT_TREE_FIELD_NUMBER = 7
+  final val TYPE_APPLY_TREE_FIELD_NUMBER = 8
+  def of(
+    sealedValue: dotty.tools.dotc.semanticdb.TreeMessage.SealedValue
+  ): _root_.dotty.tools.dotc.semanticdb.TreeMessage = _root_.dotty.tools.dotc.semanticdb.TreeMessage(
+    sealedValue
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Tree])
+}
+
+@SerialVersionUID(0L)
+final case class ApplyTree(
+    function: dotty.tools.dotc.semanticdb.Tree = dotty.tools.dotc.semanticdb.ApplyTree._typemapper_function.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance),
+    arguments: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Tree] = _root_.scala.Seq.empty
+    )  extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      
+      {
+        val __value = dotty.tools.dotc.semanticdb.ApplyTree._typemapper_function.toBase(function)
+        if (__value != dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) {
+          __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+        }
+      };
+      arguments.foreach { __item =>
+        val __value = dotty.tools.dotc.semanticdb.ApplyTree._typemapper_arguments.toBase(__item)
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      }
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      {
+        val __v = dotty.tools.dotc.semanticdb.ApplyTree._typemapper_function.toBase(function)
+        if (__v != dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) {
+          _output__.writeTag(1, 2)
+          _output__.writeUInt32NoTag(__v.serializedSize)
+          __v.writeTo(_output__)
+        }
+      };
+      arguments.foreach { __v =>
+        val __m = dotty.tools.dotc.semanticdb.ApplyTree._typemapper_arguments.toBase(__v)
+        _output__.writeTag(2, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+    }
+    def withFunction(__v: dotty.tools.dotc.semanticdb.Tree): ApplyTree = copy(function = __v)
+    def clearArguments = copy(arguments = _root_.scala.Seq.empty)
+    def addArguments(__vs: dotty.tools.dotc.semanticdb.Tree*): ApplyTree = addAllArguments(__vs)
+    def addAllArguments(__vs: Iterable[dotty.tools.dotc.semanticdb.Tree]): ApplyTree = copy(arguments = arguments ++ __vs)
+    def withArguments(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Tree]): ApplyTree = copy(arguments = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ApplyTree])
+}
+
+object ApplyTree  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ApplyTree] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ApplyTree] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ApplyTree = {
+    var __function: _root_.scala.Option[dotty.tools.dotc.semanticdb.TreeMessage] = _root_.scala.None
+    val __arguments: _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Tree] = new _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Tree]
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __function = _root_.scala.Some(__function.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TreeMessage](_input__))(LiteParser.readMessage(_input__, _)))
+        case 18 =>
+          __arguments += dotty.tools.dotc.semanticdb.ApplyTree._typemapper_arguments.toCustom(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TreeMessage](_input__))
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.ApplyTree(
+        function = dotty.tools.dotc.semanticdb.ApplyTree._typemapper_function.toCustom(__function.getOrElse(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance)),
+        arguments = __arguments.result()
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.ApplyTree(
+    function = dotty.tools.dotc.semanticdb.ApplyTree._typemapper_function.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance),
+    arguments = _root_.scala.Seq.empty
+  )
+  final val FUNCTION_FIELD_NUMBER = 1
+  final val ARGUMENTS_FIELD_NUMBER = 2
+  @transient @sharable
+  private[semanticdb] val _typemapper_function: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TreeMessage, dotty.tools.dotc.semanticdb.Tree] = implicitly[SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TreeMessage, dotty.tools.dotc.semanticdb.Tree]]
+  @transient @sharable
+  private[semanticdb] val _typemapper_arguments: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TreeMessage, dotty.tools.dotc.semanticdb.Tree] = implicitly[SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TreeMessage, dotty.tools.dotc.semanticdb.Tree]]
+  def of(
+    function: dotty.tools.dotc.semanticdb.Tree,
+    arguments: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Tree]
+  ): _root_.dotty.tools.dotc.semanticdb.ApplyTree = _root_.dotty.tools.dotc.semanticdb.ApplyTree(
+    function,
+    arguments
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ApplyTree])
+}
+
+@SerialVersionUID(0L)
+final case class FunctionTree(
+    parameters: _root_.scala.Seq[dotty.tools.dotc.semanticdb.IdTree] = _root_.scala.Seq.empty,
+    body: dotty.tools.dotc.semanticdb.Tree = dotty.tools.dotc.semanticdb.FunctionTree._typemapper_body.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance)
+    )  extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      parameters.foreach { __item =>
+        val __value = __item
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      }
+      
+      {
+        val __value = dotty.tools.dotc.semanticdb.FunctionTree._typemapper_body.toBase(body)
+        if (__value != dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) {
+          __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+        }
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      parameters.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(1, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      {
+        val __v = dotty.tools.dotc.semanticdb.FunctionTree._typemapper_body.toBase(body)
+        if (__v != dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) {
+          _output__.writeTag(2, 2)
+          _output__.writeUInt32NoTag(__v.serializedSize)
+          __v.writeTo(_output__)
+        }
+      };
+    }
+    def clearParameters = copy(parameters = _root_.scala.Seq.empty)
+    def addParameters(__vs: dotty.tools.dotc.semanticdb.IdTree*): FunctionTree = addAllParameters(__vs)
+    def addAllParameters(__vs: Iterable[dotty.tools.dotc.semanticdb.IdTree]): FunctionTree = copy(parameters = parameters ++ __vs)
+    def withParameters(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.IdTree]): FunctionTree = copy(parameters = __v)
+    def withBody(__v: dotty.tools.dotc.semanticdb.Tree): FunctionTree = copy(body = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.FunctionTree])
+}
+
+object FunctionTree  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.FunctionTree] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.FunctionTree] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.FunctionTree = {
+    val __parameters: _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.IdTree] = new _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.IdTree]
+    var __body: _root_.scala.Option[dotty.tools.dotc.semanticdb.TreeMessage] = _root_.scala.None
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __parameters += LiteParser.readMessage[dotty.tools.dotc.semanticdb.IdTree](_input__)
+        case 18 =>
+          __body = _root_.scala.Some(__body.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TreeMessage](_input__))(LiteParser.readMessage(_input__, _)))
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.FunctionTree(
+        parameters = __parameters.result(),
+        body = dotty.tools.dotc.semanticdb.FunctionTree._typemapper_body.toCustom(__body.getOrElse(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance))
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.FunctionTree(
+    parameters = _root_.scala.Seq.empty,
+    body = dotty.tools.dotc.semanticdb.FunctionTree._typemapper_body.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance)
+  )
+  final val PARAMETERS_FIELD_NUMBER = 1
+  final val BODY_FIELD_NUMBER = 2
+  @transient @sharable
+  private[semanticdb] val _typemapper_body: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TreeMessage, dotty.tools.dotc.semanticdb.Tree] = implicitly[SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TreeMessage, dotty.tools.dotc.semanticdb.Tree]]
+  def of(
+    parameters: _root_.scala.Seq[dotty.tools.dotc.semanticdb.IdTree],
+    body: dotty.tools.dotc.semanticdb.Tree
+  ): _root_.dotty.tools.dotc.semanticdb.FunctionTree = _root_.dotty.tools.dotc.semanticdb.FunctionTree(
+    parameters,
+    body
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.FunctionTree])
+}
+
+@SerialVersionUID(0L)
+final case class IdTree(
+    symbol: _root_.scala.Predef.String = ""
+    )  extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      
+      {
+        val __value = symbol
+        if (!__value.isEmpty) {
+          __size += SemanticdbOutputStream.computeStringSize(1, __value)
+        }
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      {
+        val __v = symbol
+        if (!__v.isEmpty) {
+          _output__.writeString(1, __v)
+        }
+      };
+    }
+    def withSymbol(__v: _root_.scala.Predef.String): IdTree = copy(symbol = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.IdTree])
+}
+
+object IdTree  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.IdTree] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.IdTree] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.IdTree = {
+    var __symbol: _root_.scala.Predef.String = ""
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __symbol = _input__.readStringRequireUtf8()
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.IdTree(
+        symbol = __symbol
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.IdTree(
+    symbol = ""
+  )
+  final val SYMBOL_FIELD_NUMBER = 1
+  def of(
+    symbol: _root_.scala.Predef.String
+  ): _root_.dotty.tools.dotc.semanticdb.IdTree = _root_.dotty.tools.dotc.semanticdb.IdTree(
+    symbol
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.IdTree])
+}
+
+@SerialVersionUID(0L)
+final case class LiteralTree(
+    constant: dotty.tools.dotc.semanticdb.Constant = dotty.tools.dotc.semanticdb.LiteralTree._typemapper_constant.toCustom(dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance)
+    )  extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      
+      {
+        val __value = dotty.tools.dotc.semanticdb.LiteralTree._typemapper_constant.toBase(constant)
+        if (__value != dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance) {
+          __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+        }
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      {
+        val __v = dotty.tools.dotc.semanticdb.LiteralTree._typemapper_constant.toBase(constant)
+        if (__v != dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance) {
+          _output__.writeTag(1, 2)
+          _output__.writeUInt32NoTag(__v.serializedSize)
+          __v.writeTo(_output__)
+        }
+      };
+    }
+    def withConstant(__v: dotty.tools.dotc.semanticdb.Constant): LiteralTree = copy(constant = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.LiteralTree])
+}
+
+object LiteralTree  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.LiteralTree] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.LiteralTree] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.LiteralTree = {
+    var __constant: _root_.scala.Option[dotty.tools.dotc.semanticdb.ConstantMessage] = _root_.scala.None
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __constant = _root_.scala.Some(__constant.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.ConstantMessage](_input__))(LiteParser.readMessage(_input__, _)))
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.LiteralTree(
+        constant = dotty.tools.dotc.semanticdb.LiteralTree._typemapper_constant.toCustom(__constant.getOrElse(dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance))
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.LiteralTree(
+    constant = dotty.tools.dotc.semanticdb.LiteralTree._typemapper_constant.toCustom(dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance)
+  )
+  final val CONSTANT_FIELD_NUMBER = 1
+  @transient @sharable
+  private[semanticdb] val _typemapper_constant: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.ConstantMessage, dotty.tools.dotc.semanticdb.Constant] = implicitly[SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.ConstantMessage, dotty.tools.dotc.semanticdb.Constant]]
+  def of(
+    constant: dotty.tools.dotc.semanticdb.Constant
+  ): _root_.dotty.tools.dotc.semanticdb.LiteralTree = _root_.dotty.tools.dotc.semanticdb.LiteralTree(
+    constant
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.LiteralTree])
+}
+
+@SerialVersionUID(0L)
+final case class MacroExpansionTree(
+    beforeExpansion: dotty.tools.dotc.semanticdb.Tree = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_beforeExpansion.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance),
+    tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)
+    )  extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      
+      {
+        val __value = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_beforeExpansion.toBase(beforeExpansion)
+        if (__value != dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) {
+          __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+        }
+      };
+      
+      {
+        val __value = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_tpe.toBase(tpe)
+        if (__value != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) {
+          __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+        }
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      {
+        val __v = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_beforeExpansion.toBase(beforeExpansion)
+        if (__v != dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) {
+          _output__.writeTag(1, 2)
+          _output__.writeUInt32NoTag(__v.serializedSize)
+          __v.writeTo(_output__)
+        }
+      };
+      {
+        val __v = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_tpe.toBase(tpe)
+        if (__v != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) {
+          _output__.writeTag(2, 2)
+          _output__.writeUInt32NoTag(__v.serializedSize)
+          __v.writeTo(_output__)
+        }
+      };
+    }
+    def withBeforeExpansion(__v: dotty.tools.dotc.semanticdb.Tree): MacroExpansionTree = copy(beforeExpansion = __v)
+    def withTpe(__v: dotty.tools.dotc.semanticdb.Type): MacroExpansionTree = copy(tpe = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.MacroExpansionTree])
+}
+
+object MacroExpansionTree  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.MacroExpansionTree] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.MacroExpansionTree] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.MacroExpansionTree = {
+    var __beforeExpansion: _root_.scala.Option[dotty.tools.dotc.semanticdb.TreeMessage] = _root_.scala.None
+    var __tpe: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __beforeExpansion = _root_.scala.Some(__beforeExpansion.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TreeMessage](_input__))(LiteParser.readMessage(_input__, _)))
+        case 18 =>
+          __tpe = _root_.scala.Some(__tpe.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _)))
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.MacroExpansionTree(
+        beforeExpansion = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_beforeExpansion.toCustom(__beforeExpansion.getOrElse(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance)),
+        tpe = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance))
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.MacroExpansionTree(
+    beforeExpansion = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_beforeExpansion.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance),
+    tpe = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)
+  )
+  final val BEFORE_EXPANSION_FIELD_NUMBER = 1
+  final val TPE_FIELD_NUMBER = 2
+  @transient @sharable
+  private[semanticdb] val _typemapper_beforeExpansion: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TreeMessage, dotty.tools.dotc.semanticdb.Tree] = implicitly[SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TreeMessage, dotty.tools.dotc.semanticdb.Tree]]
+  @transient @sharable
+  private[semanticdb] val _typemapper_tpe: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type] = implicitly[SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type]]
+  def of(
+    beforeExpansion: dotty.tools.dotc.semanticdb.Tree,
+    tpe: dotty.tools.dotc.semanticdb.Type
+  ): _root_.dotty.tools.dotc.semanticdb.MacroExpansionTree = _root_.dotty.tools.dotc.semanticdb.MacroExpansionTree(
+    beforeExpansion,
+    tpe
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.MacroExpansionTree])
+}
+
+@SerialVersionUID(0L)
+final case class OriginalTree(
+    range: _root_.scala.Option[dotty.tools.dotc.semanticdb.Range] = _root_.scala.None
+    )  extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      if (range.isDefined) {
+        val __value = range.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      range.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(1, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+    }
+    def getRange: dotty.tools.dotc.semanticdb.Range = range.getOrElse(dotty.tools.dotc.semanticdb.Range.defaultInstance)
+    def clearRange: OriginalTree = copy(range = _root_.scala.None)
+    def withRange(__v: dotty.tools.dotc.semanticdb.Range): OriginalTree = copy(range = Option(__v))
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.OriginalTree])
+}
+
+object OriginalTree  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.OriginalTree] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.OriginalTree] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.OriginalTree = {
+    var __range: _root_.scala.Option[dotty.tools.dotc.semanticdb.Range] = _root_.scala.None
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __range = Option(__range.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.Range](_input__))(LiteParser.readMessage(_input__, _)))
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.OriginalTree(
+        range = __range
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.OriginalTree(
+    range = _root_.scala.None
+  )
+  final val RANGE_FIELD_NUMBER = 1
+  def of(
+    range: _root_.scala.Option[dotty.tools.dotc.semanticdb.Range]
+  ): _root_.dotty.tools.dotc.semanticdb.OriginalTree = _root_.dotty.tools.dotc.semanticdb.OriginalTree(
+    range
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.OriginalTree])
+}
+
+@SerialVersionUID(0L)
+final case class SelectTree(
+    qualifier: dotty.tools.dotc.semanticdb.Tree = dotty.tools.dotc.semanticdb.SelectTree._typemapper_qualifier.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance),
+    id: _root_.scala.Option[dotty.tools.dotc.semanticdb.IdTree] = _root_.scala.None
+    )  extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      
+      {
+        val __value = dotty.tools.dotc.semanticdb.SelectTree._typemapper_qualifier.toBase(qualifier)
+        if (__value != dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) {
+          __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+        }
+      };
+      if (id.isDefined) {
+        val __value = id.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      {
+        val __v = dotty.tools.dotc.semanticdb.SelectTree._typemapper_qualifier.toBase(qualifier)
+        if (__v != dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) {
+          _output__.writeTag(1, 2)
+          _output__.writeUInt32NoTag(__v.serializedSize)
+          __v.writeTo(_output__)
+        }
+      };
+      id.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(2, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+    }
+    def withQualifier(__v: dotty.tools.dotc.semanticdb.Tree): SelectTree = copy(qualifier = __v)
+    def getId: dotty.tools.dotc.semanticdb.IdTree = id.getOrElse(dotty.tools.dotc.semanticdb.IdTree.defaultInstance)
+    def clearId: SelectTree = copy(id = _root_.scala.None)
+    def withId(__v: dotty.tools.dotc.semanticdb.IdTree): SelectTree = copy(id = Option(__v))
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.SelectTree])
+}
+
+object SelectTree  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SelectTree] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SelectTree] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.SelectTree = {
+    var __qualifier: _root_.scala.Option[dotty.tools.dotc.semanticdb.TreeMessage] = _root_.scala.None
+    var __id: _root_.scala.Option[dotty.tools.dotc.semanticdb.IdTree] = _root_.scala.None
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __qualifier = _root_.scala.Some(__qualifier.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TreeMessage](_input__))(LiteParser.readMessage(_input__, _)))
+        case 18 =>
+          __id = Option(__id.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.IdTree](_input__))(LiteParser.readMessage(_input__, _)))
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.SelectTree(
+        qualifier = dotty.tools.dotc.semanticdb.SelectTree._typemapper_qualifier.toCustom(__qualifier.getOrElse(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance)),
+        id = __id
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.SelectTree(
+    qualifier = dotty.tools.dotc.semanticdb.SelectTree._typemapper_qualifier.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance),
+    id = _root_.scala.None
+  )
+  final val QUALIFIER_FIELD_NUMBER = 1
+  final val ID_FIELD_NUMBER = 2
+  @transient @sharable
+  private[semanticdb] val _typemapper_qualifier: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TreeMessage, dotty.tools.dotc.semanticdb.Tree] = implicitly[SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TreeMessage, dotty.tools.dotc.semanticdb.Tree]]
+  def of(
+    qualifier: dotty.tools.dotc.semanticdb.Tree,
+    id: _root_.scala.Option[dotty.tools.dotc.semanticdb.IdTree]
+  ): _root_.dotty.tools.dotc.semanticdb.SelectTree = _root_.dotty.tools.dotc.semanticdb.SelectTree(
+    qualifier,
+    id
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SelectTree])
+}
+
+@SerialVersionUID(0L)
+final case class TypeApplyTree(
+    function: dotty.tools.dotc.semanticdb.Tree = dotty.tools.dotc.semanticdb.TypeApplyTree._typemapper_function.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance),
+    typeArguments: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type] = _root_.scala.Seq.empty
+    )  extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      
+      {
+        val __value = dotty.tools.dotc.semanticdb.TypeApplyTree._typemapper_function.toBase(function)
+        if (__value != dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) {
+          __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+        }
+      };
+      typeArguments.foreach { __item =>
+        val __value = dotty.tools.dotc.semanticdb.TypeApplyTree._typemapper_typeArguments.toBase(__item)
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      }
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      {
+        val __v = dotty.tools.dotc.semanticdb.TypeApplyTree._typemapper_function.toBase(function)
+        if (__v != dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) {
+          _output__.writeTag(1, 2)
+          _output__.writeUInt32NoTag(__v.serializedSize)
+          __v.writeTo(_output__)
+        }
+      };
+      typeArguments.foreach { __v =>
+        val __m = dotty.tools.dotc.semanticdb.TypeApplyTree._typemapper_typeArguments.toBase(__v)
+        _output__.writeTag(2, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+    }
+    def withFunction(__v: dotty.tools.dotc.semanticdb.Tree): TypeApplyTree = copy(function = __v)
+    def clearTypeArguments = copy(typeArguments = _root_.scala.Seq.empty)
+    def addTypeArguments(__vs: dotty.tools.dotc.semanticdb.Type*): TypeApplyTree = addAllTypeArguments(__vs)
+    def addAllTypeArguments(__vs: Iterable[dotty.tools.dotc.semanticdb.Type]): TypeApplyTree = copy(typeArguments = typeArguments ++ __vs)
+    def withTypeArguments(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type]): TypeApplyTree = copy(typeArguments = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.TypeApplyTree])
+}
+
+object TypeApplyTree  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TypeApplyTree] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TypeApplyTree] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.TypeApplyTree = {
+    var __function: _root_.scala.Option[dotty.tools.dotc.semanticdb.TreeMessage] = _root_.scala.None
+    val __typeArguments: _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Type] = new _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Type]
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __function = _root_.scala.Some(__function.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TreeMessage](_input__))(LiteParser.readMessage(_input__, _)))
+        case 18 =>
+          __typeArguments += dotty.tools.dotc.semanticdb.TypeApplyTree._typemapper_typeArguments.toCustom(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.TypeApplyTree(
+        function = dotty.tools.dotc.semanticdb.TypeApplyTree._typemapper_function.toCustom(__function.getOrElse(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance)),
+        typeArguments = __typeArguments.result()
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.TypeApplyTree(
+    function = dotty.tools.dotc.semanticdb.TypeApplyTree._typemapper_function.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance),
+    typeArguments = _root_.scala.Seq.empty
+  )
+  final val FUNCTION_FIELD_NUMBER = 1
+  final val TYPE_ARGUMENTS_FIELD_NUMBER = 2
+  @transient @sharable
+  private[semanticdb] val _typemapper_function: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TreeMessage, dotty.tools.dotc.semanticdb.Tree] = implicitly[SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TreeMessage, dotty.tools.dotc.semanticdb.Tree]]
+  @transient @sharable
+  private[semanticdb] val _typemapper_typeArguments: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type] = implicitly[SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type]]
+  def of(
+    function: dotty.tools.dotc.semanticdb.Tree,
+    typeArguments: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type]
+  ): _root_.dotty.tools.dotc.semanticdb.TypeApplyTree = _root_.dotty.tools.dotc.semanticdb.TypeApplyTree(
+    function,
+    typeArguments
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TypeApplyTree])
+}
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/Type.scala b/compiler/src/dotty/tools/dotc/semanticdb/Type.scala
new file mode 100644
index 000000000000..17d2d28bcf36
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/semanticdb/Type.scala
@@ -0,0 +1,1781 @@
+// Generated by https://github.com/tanishiking/semanticdb-for-scala3
+// Generated by the Scala Plugin for the Protocol Buffer Compiler.
+// Do not edit!
+//
+// Protofile syntax: PROTO3
+
+package dotty.tools.dotc.semanticdb
+import dotty.tools.dotc.semanticdb.internal._
+import scala.annotation.internal.sharable
+
+sealed trait Type  extends SemanticdbGeneratedSealedOneof  derives CanEqual {
+  type MessageType = dotty.tools.dotc.semanticdb.TypeMessage
+  final def isEmpty = this.isInstanceOf[dotty.tools.dotc.semanticdb.Type.Empty.type]
+  final def isDefined = !isEmpty
+  final def asMessage: dotty.tools.dotc.semanticdb.TypeMessage = dotty.tools.dotc.semanticdb.Type.TypeTypeMapper.toBase(this)
+  final def asNonEmpty: Option[dotty.tools.dotc.semanticdb.Type.NonEmpty] = if (isEmpty) None else Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.Type.NonEmpty])
+}
+
+object Type {
+  case object Empty extends dotty.tools.dotc.semanticdb.Type
+  
+  sealed trait NonEmpty extends dotty.tools.dotc.semanticdb.Type
+  def defaultInstance: dotty.tools.dotc.semanticdb.Type = Empty
+  
+  implicit val TypeTypeMapper: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type] = new SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type] {
+    override def toCustom(__base: dotty.tools.dotc.semanticdb.TypeMessage): dotty.tools.dotc.semanticdb.Type = __base.sealedValue match {
+      case __v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.TypeRef => __v.value
+      case __v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.SingleType => __v.value
+      case __v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.ThisType => __v.value
+      case __v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.SuperType => __v.value
+      case __v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.ConstantType => __v.value
+      case __v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.IntersectionType => __v.value
+      case __v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.UnionType => __v.value
+      case __v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.WithType => __v.value
+      case __v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.StructuralType => __v.value
+      case __v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.AnnotatedType => __v.value
+      case __v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.ExistentialType => __v.value
+      case __v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.UniversalType => __v.value
+      case __v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.ByNameType => __v.value
+      case __v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.RepeatedType => __v.value
+      case dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.Empty => Empty
+    }
+    override def toBase(__custom: dotty.tools.dotc.semanticdb.Type): dotty.tools.dotc.semanticdb.TypeMessage = dotty.tools.dotc.semanticdb.TypeMessage(__custom match {
+      case __v: dotty.tools.dotc.semanticdb.TypeRef => dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.TypeRef(__v)
+      case __v: dotty.tools.dotc.semanticdb.SingleType => dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.SingleType(__v)
+      case __v: dotty.tools.dotc.semanticdb.ThisType => dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.ThisType(__v)
+      case __v: dotty.tools.dotc.semanticdb.SuperType => dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.SuperType(__v)
+      case __v: dotty.tools.dotc.semanticdb.ConstantType => dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.ConstantType(__v)
+      case __v: dotty.tools.dotc.semanticdb.IntersectionType => dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.IntersectionType(__v)
+      case __v: dotty.tools.dotc.semanticdb.UnionType => dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.UnionType(__v)
+      case __v: dotty.tools.dotc.semanticdb.WithType => dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.WithType(__v)
+      case __v: dotty.tools.dotc.semanticdb.StructuralType => dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.StructuralType(__v)
+      case __v: dotty.tools.dotc.semanticdb.AnnotatedType => dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.AnnotatedType(__v)
+      case __v: dotty.tools.dotc.semanticdb.ExistentialType => dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.ExistentialType(__v)
+      case __v: dotty.tools.dotc.semanticdb.UniversalType => dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.UniversalType(__v)
+      case __v: dotty.tools.dotc.semanticdb.ByNameType => dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.ByNameType(__v)
+      case __v: dotty.tools.dotc.semanticdb.RepeatedType => dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.RepeatedType(__v)
+      case Empty => dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.Empty
+    })
+  }
+}
+@SerialVersionUID(0L)
+final case class TypeMessage(
+    sealedValue: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.Empty
+    )  extends SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      if (sealedValue.typeRef.isDefined) {
+        val __value = sealedValue.typeRef.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.singleType.isDefined) {
+        val __value = sealedValue.singleType.get
+        __size += 2 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.thisType.isDefined) {
+        val __value = sealedValue.thisType.get
+        __size += 2 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.superType.isDefined) {
+        val __value = sealedValue.superType.get
+        __size += 2 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.constantType.isDefined) {
+        val __value = sealedValue.constantType.get
+        __size += 2 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.intersectionType.isDefined) {
+        val __value = sealedValue.intersectionType.get
+        __size += 2 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.unionType.isDefined) {
+        val __value = sealedValue.unionType.get
+        __size += 2 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.withType.isDefined) {
+        val __value = sealedValue.withType.get
+        __size += 2 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.structuralType.isDefined) {
+        val __value = sealedValue.structuralType.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.annotatedType.isDefined) {
+        val __value = sealedValue.annotatedType.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.existentialType.isDefined) {
+        val __value = sealedValue.existentialType.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.universalType.isDefined) {
+        val __value = sealedValue.universalType.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.byNameType.isDefined) {
+        val __value = sealedValue.byNameType.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      if (sealedValue.repeatedType.isDefined) {
+        val __value = sealedValue.repeatedType.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      sealedValue.typeRef.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(2, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.structuralType.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(7, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.annotatedType.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(8, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.existentialType.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(9, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.universalType.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(10, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.byNameType.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(13, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.repeatedType.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(14, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.intersectionType.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(17, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.unionType.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(18, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.withType.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(19, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.singleType.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(20, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.thisType.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(21, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.superType.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(22, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+      sealedValue.constantType.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(23, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+    }
+    def getTypeRef: dotty.tools.dotc.semanticdb.TypeRef = sealedValue.typeRef.getOrElse(dotty.tools.dotc.semanticdb.TypeRef.defaultInstance)
+    def withTypeRef(__v: dotty.tools.dotc.semanticdb.TypeRef): TypeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.TypeRef(__v))
+    def getSingleType: dotty.tools.dotc.semanticdb.SingleType = sealedValue.singleType.getOrElse(dotty.tools.dotc.semanticdb.SingleType.defaultInstance)
+    def withSingleType(__v: dotty.tools.dotc.semanticdb.SingleType): TypeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.SingleType(__v))
+    def getThisType: dotty.tools.dotc.semanticdb.ThisType = sealedValue.thisType.getOrElse(dotty.tools.dotc.semanticdb.ThisType.defaultInstance)
+    def withThisType(__v: dotty.tools.dotc.semanticdb.ThisType): TypeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.ThisType(__v))
+    def getSuperType: dotty.tools.dotc.semanticdb.SuperType = sealedValue.superType.getOrElse(dotty.tools.dotc.semanticdb.SuperType.defaultInstance)
+    def withSuperType(__v: dotty.tools.dotc.semanticdb.SuperType): TypeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.SuperType(__v))
+    def getConstantType: dotty.tools.dotc.semanticdb.ConstantType = sealedValue.constantType.getOrElse(dotty.tools.dotc.semanticdb.ConstantType.defaultInstance)
+    def withConstantType(__v: dotty.tools.dotc.semanticdb.ConstantType): TypeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.ConstantType(__v))
+    def getIntersectionType: dotty.tools.dotc.semanticdb.IntersectionType = sealedValue.intersectionType.getOrElse(dotty.tools.dotc.semanticdb.IntersectionType.defaultInstance)
+    def withIntersectionType(__v: dotty.tools.dotc.semanticdb.IntersectionType): TypeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.IntersectionType(__v))
+    def getUnionType: dotty.tools.dotc.semanticdb.UnionType = sealedValue.unionType.getOrElse(dotty.tools.dotc.semanticdb.UnionType.defaultInstance)
+    def withUnionType(__v: dotty.tools.dotc.semanticdb.UnionType): TypeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.UnionType(__v))
+    def getWithType: dotty.tools.dotc.semanticdb.WithType = sealedValue.withType.getOrElse(dotty.tools.dotc.semanticdb.WithType.defaultInstance)
+    def withWithType(__v: dotty.tools.dotc.semanticdb.WithType): TypeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.WithType(__v))
+    def getStructuralType: dotty.tools.dotc.semanticdb.StructuralType = sealedValue.structuralType.getOrElse(dotty.tools.dotc.semanticdb.StructuralType.defaultInstance)
+    def withStructuralType(__v: dotty.tools.dotc.semanticdb.StructuralType): TypeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.StructuralType(__v))
+    def getAnnotatedType: dotty.tools.dotc.semanticdb.AnnotatedType = sealedValue.annotatedType.getOrElse(dotty.tools.dotc.semanticdb.AnnotatedType.defaultInstance)
+    def withAnnotatedType(__v: dotty.tools.dotc.semanticdb.AnnotatedType): TypeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.AnnotatedType(__v))
+    def getExistentialType: dotty.tools.dotc.semanticdb.ExistentialType = sealedValue.existentialType.getOrElse(dotty.tools.dotc.semanticdb.ExistentialType.defaultInstance)
+    def withExistentialType(__v: dotty.tools.dotc.semanticdb.ExistentialType): TypeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.ExistentialType(__v))
+    def getUniversalType: dotty.tools.dotc.semanticdb.UniversalType = sealedValue.universalType.getOrElse(dotty.tools.dotc.semanticdb.UniversalType.defaultInstance)
+    def withUniversalType(__v: dotty.tools.dotc.semanticdb.UniversalType): TypeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.UniversalType(__v))
+    def getByNameType: dotty.tools.dotc.semanticdb.ByNameType = sealedValue.byNameType.getOrElse(dotty.tools.dotc.semanticdb.ByNameType.defaultInstance)
+    def withByNameType(__v: dotty.tools.dotc.semanticdb.ByNameType): TypeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.ByNameType(__v))
+    def getRepeatedType: dotty.tools.dotc.semanticdb.RepeatedType = sealedValue.repeatedType.getOrElse(dotty.tools.dotc.semanticdb.RepeatedType.defaultInstance)
+    def withRepeatedType(__v: dotty.tools.dotc.semanticdb.RepeatedType): TypeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.RepeatedType(__v))
+    def clearSealedValue: TypeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.Empty)
+    def withSealedValue(__v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue): TypeMessage = copy(sealedValue = __v)
+    
+    
+    
+    
+    def toType: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.Type.TypeTypeMapper.toCustom(this)
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Type])
+}
+
+object TypeMessage  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TypeMessage] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TypeMessage] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.TypeMessage = {
+    var __sealedValue: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.Empty
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 18 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.TypeRef(__sealedValue.typeRef.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeRef](_input__))(LiteParser.readMessage(_input__, _)))
+        case 162 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.SingleType(__sealedValue.singleType.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.SingleType](_input__))(LiteParser.readMessage(_input__, _)))
+        case 170 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.ThisType(__sealedValue.thisType.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.ThisType](_input__))(LiteParser.readMessage(_input__, _)))
+        case 178 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.SuperType(__sealedValue.superType.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.SuperType](_input__))(LiteParser.readMessage(_input__, _)))
+        case 186 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.ConstantType(__sealedValue.constantType.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.ConstantType](_input__))(LiteParser.readMessage(_input__, _)))
+        case 138 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.IntersectionType(__sealedValue.intersectionType.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.IntersectionType](_input__))(LiteParser.readMessage(_input__, _)))
+        case 146 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.UnionType(__sealedValue.unionType.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.UnionType](_input__))(LiteParser.readMessage(_input__, _)))
+        case 154 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.WithType(__sealedValue.withType.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.WithType](_input__))(LiteParser.readMessage(_input__, _)))
+        case 58 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.StructuralType(__sealedValue.structuralType.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.StructuralType](_input__))(LiteParser.readMessage(_input__, _)))
+        case 66 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.AnnotatedType(__sealedValue.annotatedType.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.AnnotatedType](_input__))(LiteParser.readMessage(_input__, _)))
+        case 74 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.ExistentialType(__sealedValue.existentialType.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.ExistentialType](_input__))(LiteParser.readMessage(_input__, _)))
+        case 82 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.UniversalType(__sealedValue.universalType.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.UniversalType](_input__))(LiteParser.readMessage(_input__, _)))
+        case 106 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.ByNameType(__sealedValue.byNameType.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.ByNameType](_input__))(LiteParser.readMessage(_input__, _)))
+        case 114 =>
+          __sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.RepeatedType(__sealedValue.repeatedType.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.RepeatedType](_input__))(LiteParser.readMessage(_input__, _)))
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.TypeMessage(
+        sealedValue = __sealedValue
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.TypeMessage(
+    sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.Empty
+  )
+  sealed trait SealedValue  extends SemanticdbGeneratedOneof  derives CanEqual {
+    def isEmpty: _root_.scala.Boolean = false
+    def isDefined: _root_.scala.Boolean = true
+    def isTypeRef: _root_.scala.Boolean = false
+    def isSingleType: _root_.scala.Boolean = false
+    def isThisType: _root_.scala.Boolean = false
+    def isSuperType: _root_.scala.Boolean = false
+    def isConstantType: _root_.scala.Boolean = false
+    def isIntersectionType: _root_.scala.Boolean = false
+    def isUnionType: _root_.scala.Boolean = false
+    def isWithType: _root_.scala.Boolean = false
+    def isStructuralType: _root_.scala.Boolean = false
+    def isAnnotatedType: _root_.scala.Boolean = false
+    def isExistentialType: _root_.scala.Boolean = false
+    def isUniversalType: _root_.scala.Boolean = false
+    def isByNameType: _root_.scala.Boolean = false
+    def isRepeatedType: _root_.scala.Boolean = false
+    def typeRef: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeRef] = _root_.scala.None
+    def singleType: _root_.scala.Option[dotty.tools.dotc.semanticdb.SingleType] = _root_.scala.None
+    def thisType: _root_.scala.Option[dotty.tools.dotc.semanticdb.ThisType] = _root_.scala.None
+    def superType: _root_.scala.Option[dotty.tools.dotc.semanticdb.SuperType] = _root_.scala.None
+    def constantType: _root_.scala.Option[dotty.tools.dotc.semanticdb.ConstantType] = _root_.scala.None
+    def intersectionType: _root_.scala.Option[dotty.tools.dotc.semanticdb.IntersectionType] = _root_.scala.None
+    def unionType: _root_.scala.Option[dotty.tools.dotc.semanticdb.UnionType] = _root_.scala.None
+    def withType: _root_.scala.Option[dotty.tools.dotc.semanticdb.WithType] = _root_.scala.None
+    def structuralType: _root_.scala.Option[dotty.tools.dotc.semanticdb.StructuralType] = _root_.scala.None
+    def annotatedType: _root_.scala.Option[dotty.tools.dotc.semanticdb.AnnotatedType] = _root_.scala.None
+    def existentialType: _root_.scala.Option[dotty.tools.dotc.semanticdb.ExistentialType] = _root_.scala.None
+    def universalType: _root_.scala.Option[dotty.tools.dotc.semanticdb.UniversalType] = _root_.scala.None
+    def byNameType: _root_.scala.Option[dotty.tools.dotc.semanticdb.ByNameType] = _root_.scala.None
+    def repeatedType: _root_.scala.Option[dotty.tools.dotc.semanticdb.RepeatedType] = _root_.scala.None
+  }
+  object SealedValue {
+    @SerialVersionUID(0L)
+    case object Empty extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue {
+      type ValueType = _root_.scala.Nothing
+      override def isEmpty: _root_.scala.Boolean = true
+      override def isDefined: _root_.scala.Boolean = false
+      override def number: _root_.scala.Int = 0
+      override def value: _root_.scala.Nothing = throw new java.util.NoSuchElementException("Empty.value")
+    }
+  
+    @SerialVersionUID(0L)
+    final case class TypeRef(value: dotty.tools.dotc.semanticdb.TypeRef) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.TypeRef
+      override def isTypeRef: _root_.scala.Boolean = true
+      override def typeRef: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeRef] = Some(value)
+      override def number: _root_.scala.Int = 2
+    }
+    @SerialVersionUID(0L)
+    final case class SingleType(value: dotty.tools.dotc.semanticdb.SingleType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.SingleType
+      override def isSingleType: _root_.scala.Boolean = true
+      override def singleType: _root_.scala.Option[dotty.tools.dotc.semanticdb.SingleType] = Some(value)
+      override def number: _root_.scala.Int = 20
+    }
+    @SerialVersionUID(0L)
+    final case class ThisType(value: dotty.tools.dotc.semanticdb.ThisType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.ThisType
+      override def isThisType: _root_.scala.Boolean = true
+      override def thisType: _root_.scala.Option[dotty.tools.dotc.semanticdb.ThisType] = Some(value)
+      override def number: _root_.scala.Int = 21
+    }
+    @SerialVersionUID(0L)
+    final case class SuperType(value: dotty.tools.dotc.semanticdb.SuperType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.SuperType
+      override def isSuperType: _root_.scala.Boolean = true
+      override def superType: _root_.scala.Option[dotty.tools.dotc.semanticdb.SuperType] = Some(value)
+      override def number: _root_.scala.Int = 22
+    }
+    @SerialVersionUID(0L)
+    final case class ConstantType(value: dotty.tools.dotc.semanticdb.ConstantType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.ConstantType
+      override def isConstantType: _root_.scala.Boolean = true
+      override def constantType: _root_.scala.Option[dotty.tools.dotc.semanticdb.ConstantType] = Some(value)
+      override def number: _root_.scala.Int = 23
+    }
+    @SerialVersionUID(0L)
+    final case class IntersectionType(value: dotty.tools.dotc.semanticdb.IntersectionType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.IntersectionType
+      override def isIntersectionType: _root_.scala.Boolean = true
+      override def intersectionType: _root_.scala.Option[dotty.tools.dotc.semanticdb.IntersectionType] = Some(value)
+      override def number: _root_.scala.Int = 17
+    }
+    @SerialVersionUID(0L)
+    final case class UnionType(value: dotty.tools.dotc.semanticdb.UnionType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.UnionType
+      override def isUnionType: _root_.scala.Boolean = true
+      override def unionType: _root_.scala.Option[dotty.tools.dotc.semanticdb.UnionType] = Some(value)
+      override def number: _root_.scala.Int = 18
+    }
+    @SerialVersionUID(0L)
+    final case class WithType(value: dotty.tools.dotc.semanticdb.WithType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.WithType
+      override def isWithType: _root_.scala.Boolean = true
+      override def withType: _root_.scala.Option[dotty.tools.dotc.semanticdb.WithType] = Some(value)
+      override def number: _root_.scala.Int = 19
+    }
+    @SerialVersionUID(0L)
+    final case class StructuralType(value: dotty.tools.dotc.semanticdb.StructuralType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.StructuralType
+      override def isStructuralType: _root_.scala.Boolean = true
+      override def structuralType: _root_.scala.Option[dotty.tools.dotc.semanticdb.StructuralType] = Some(value)
+      override def number: _root_.scala.Int = 7
+    }
+    @SerialVersionUID(0L)
+    final case class AnnotatedType(value: dotty.tools.dotc.semanticdb.AnnotatedType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.AnnotatedType
+      override def isAnnotatedType: _root_.scala.Boolean = true
+      override def annotatedType: _root_.scala.Option[dotty.tools.dotc.semanticdb.AnnotatedType] = Some(value)
+      override def number: _root_.scala.Int = 8
+    }
+    @SerialVersionUID(0L)
+    final case class ExistentialType(value: dotty.tools.dotc.semanticdb.ExistentialType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.ExistentialType
+      override def isExistentialType: _root_.scala.Boolean = true
+      override def existentialType: _root_.scala.Option[dotty.tools.dotc.semanticdb.ExistentialType] = Some(value)
+      override def number: _root_.scala.Int = 9
+    }
+    @SerialVersionUID(0L)
+    final case class UniversalType(value: dotty.tools.dotc.semanticdb.UniversalType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.UniversalType
+      override def isUniversalType: _root_.scala.Boolean = true
+      override def universalType: _root_.scala.Option[dotty.tools.dotc.semanticdb.UniversalType] = Some(value)
+      override def number: _root_.scala.Int = 10
+    }
+    @SerialVersionUID(0L)
+    final case class ByNameType(value: dotty.tools.dotc.semanticdb.ByNameType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.ByNameType
+      override def isByNameType: _root_.scala.Boolean = true
+      override def byNameType: _root_.scala.Option[dotty.tools.dotc.semanticdb.ByNameType] = Some(value)
+      override def number: _root_.scala.Int = 13
+    }
+    @SerialVersionUID(0L)
+    final case class RepeatedType(value: dotty.tools.dotc.semanticdb.RepeatedType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue  derives CanEqual {
+      type ValueType = dotty.tools.dotc.semanticdb.RepeatedType
+      override def isRepeatedType: _root_.scala.Boolean = true
+      override def repeatedType: _root_.scala.Option[dotty.tools.dotc.semanticdb.RepeatedType] = Some(value)
+      override def number: _root_.scala.Int = 14
+    }
+  }
+  final val TYPE_REF_FIELD_NUMBER = 2
+  final val SINGLE_TYPE_FIELD_NUMBER = 20
+  final val THIS_TYPE_FIELD_NUMBER = 21
+  final val SUPER_TYPE_FIELD_NUMBER = 22
+  final val CONSTANT_TYPE_FIELD_NUMBER = 23
+  final val INTERSECTION_TYPE_FIELD_NUMBER = 17
+  final val UNION_TYPE_FIELD_NUMBER = 18
+  final val WITH_TYPE_FIELD_NUMBER = 19
+  final val STRUCTURAL_TYPE_FIELD_NUMBER = 7
+  final val ANNOTATED_TYPE_FIELD_NUMBER = 8
+  final val EXISTENTIAL_TYPE_FIELD_NUMBER = 9
+  final val UNIVERSAL_TYPE_FIELD_NUMBER = 10
+  final val BY_NAME_TYPE_FIELD_NUMBER = 13
+  final val REPEATED_TYPE_FIELD_NUMBER = 14
+  def of(
+    sealedValue: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue
+  ): _root_.dotty.tools.dotc.semanticdb.TypeMessage = _root_.dotty.tools.dotc.semanticdb.TypeMessage(
+    sealedValue
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Type])
+}
+
+@SerialVersionUID(0L)
+final case class TypeRef(
+    prefix: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.TypeRef._typemapper_prefix.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance),
+    symbol: _root_.scala.Predef.String = "",
+    typeArguments: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type] = _root_.scala.Seq.empty
+    )  extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      
+      {
+        val __value = dotty.tools.dotc.semanticdb.TypeRef._typemapper_prefix.toBase(prefix)
+        if (__value != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) {
+          __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+        }
+      };
+      
+      {
+        val __value = symbol
+        if (!__value.isEmpty) {
+          __size += SemanticdbOutputStream.computeStringSize(2, __value)
+        }
+      };
+      typeArguments.foreach { __item =>
+        val __value = dotty.tools.dotc.semanticdb.TypeRef._typemapper_typeArguments.toBase(__item)
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      }
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      {
+        val __v = dotty.tools.dotc.semanticdb.TypeRef._typemapper_prefix.toBase(prefix)
+        if (__v != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) {
+          _output__.writeTag(1, 2)
+          _output__.writeUInt32NoTag(__v.serializedSize)
+          __v.writeTo(_output__)
+        }
+      };
+      {
+        val __v = symbol
+        if (!__v.isEmpty) {
+          _output__.writeString(2, __v)
+        }
+      };
+      typeArguments.foreach { __v =>
+        val __m = dotty.tools.dotc.semanticdb.TypeRef._typemapper_typeArguments.toBase(__v)
+        _output__.writeTag(3, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+    }
+    def withPrefix(__v: dotty.tools.dotc.semanticdb.Type): TypeRef = copy(prefix = __v)
+    def withSymbol(__v: _root_.scala.Predef.String): TypeRef = copy(symbol = __v)
+    def clearTypeArguments = copy(typeArguments = _root_.scala.Seq.empty)
+    def addTypeArguments(__vs: dotty.tools.dotc.semanticdb.Type*): TypeRef = addAllTypeArguments(__vs)
+    def addAllTypeArguments(__vs: Iterable[dotty.tools.dotc.semanticdb.Type]): TypeRef = copy(typeArguments = typeArguments ++ __vs)
+    def withTypeArguments(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type]): TypeRef = copy(typeArguments = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.TypeRef])
+}
+
+object TypeRef  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TypeRef] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TypeRef] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.TypeRef = {
+    var __prefix: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None
+    var __symbol: _root_.scala.Predef.String = ""
+    val __typeArguments: _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Type] = new _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Type]
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __prefix = _root_.scala.Some(__prefix.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _)))
+        case 18 =>
+          __symbol = _input__.readStringRequireUtf8()
+        case 26 =>
+          __typeArguments += dotty.tools.dotc.semanticdb.TypeRef._typemapper_typeArguments.toCustom(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.TypeRef(
+        prefix = dotty.tools.dotc.semanticdb.TypeRef._typemapper_prefix.toCustom(__prefix.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)),
+        symbol = __symbol,
+        typeArguments = __typeArguments.result()
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.TypeRef(
+    prefix = dotty.tools.dotc.semanticdb.TypeRef._typemapper_prefix.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance),
+    symbol = "",
+    typeArguments = _root_.scala.Seq.empty
+  )
+  final val PREFIX_FIELD_NUMBER = 1
+  final val SYMBOL_FIELD_NUMBER = 2
+  final val TYPE_ARGUMENTS_FIELD_NUMBER = 3
+  @transient @sharable
+  private[semanticdb] val _typemapper_prefix: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type] = implicitly[SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type]]
+  @transient @sharable
+  private[semanticdb] val _typemapper_typeArguments: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type] = implicitly[SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type]]
+  def of(
+    prefix: dotty.tools.dotc.semanticdb.Type,
+    symbol: _root_.scala.Predef.String,
+    typeArguments: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type]
+  ): _root_.dotty.tools.dotc.semanticdb.TypeRef = _root_.dotty.tools.dotc.semanticdb.TypeRef(
+    prefix,
+    symbol,
+    typeArguments
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TypeRef])
+}
+
+@SerialVersionUID(0L)
+final case class SingleType(
+    prefix: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.SingleType._typemapper_prefix.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance),
+    symbol: _root_.scala.Predef.String = ""
+    )  extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      
+      {
+        val __value = dotty.tools.dotc.semanticdb.SingleType._typemapper_prefix.toBase(prefix)
+        if (__value != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) {
+          __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+        }
+      };
+      
+      {
+        val __value = symbol
+        if (!__value.isEmpty) {
+          __size += SemanticdbOutputStream.computeStringSize(2, __value)
+        }
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      {
+        val __v = dotty.tools.dotc.semanticdb.SingleType._typemapper_prefix.toBase(prefix)
+        if (__v != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) {
+          _output__.writeTag(1, 2)
+          _output__.writeUInt32NoTag(__v.serializedSize)
+          __v.writeTo(_output__)
+        }
+      };
+      {
+        val __v = symbol
+        if (!__v.isEmpty) {
+          _output__.writeString(2, __v)
+        }
+      };
+    }
+    def withPrefix(__v: dotty.tools.dotc.semanticdb.Type): SingleType = copy(prefix = __v)
+    def withSymbol(__v: _root_.scala.Predef.String): SingleType = copy(symbol = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.SingleType])
+}
+
+object SingleType  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SingleType] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SingleType] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.SingleType = {
+    var __prefix: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None
+    var __symbol: _root_.scala.Predef.String = ""
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __prefix = _root_.scala.Some(__prefix.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _)))
+        case 18 =>
+          __symbol = _input__.readStringRequireUtf8()
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.SingleType(
+        prefix = dotty.tools.dotc.semanticdb.SingleType._typemapper_prefix.toCustom(__prefix.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)),
+        symbol = __symbol
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.SingleType(
+    prefix = dotty.tools.dotc.semanticdb.SingleType._typemapper_prefix.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance),
+    symbol = ""
+  )
+  final val PREFIX_FIELD_NUMBER = 1
+  final val SYMBOL_FIELD_NUMBER = 2
+  @transient @sharable
+  private[semanticdb] val _typemapper_prefix: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type] = implicitly[SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type]]
+  def of(
+    prefix: dotty.tools.dotc.semanticdb.Type,
+    symbol: _root_.scala.Predef.String
+  ): _root_.dotty.tools.dotc.semanticdb.SingleType = _root_.dotty.tools.dotc.semanticdb.SingleType(
+    prefix,
+    symbol
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SingleType])
+}
+
+@SerialVersionUID(0L)
+final case class ThisType(
+    symbol: _root_.scala.Predef.String = ""
+    )  extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      
+      {
+        val __value = symbol
+        if (!__value.isEmpty) {
+          __size += SemanticdbOutputStream.computeStringSize(1, __value)
+        }
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      {
+        val __v = symbol
+        if (!__v.isEmpty) {
+          _output__.writeString(1, __v)
+        }
+      };
+    }
+    def withSymbol(__v: _root_.scala.Predef.String): ThisType = copy(symbol = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ThisType])
+}
+
+object ThisType  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ThisType] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ThisType] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ThisType = {
+    var __symbol: _root_.scala.Predef.String = ""
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __symbol = _input__.readStringRequireUtf8()
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.ThisType(
+        symbol = __symbol
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.ThisType(
+    symbol = ""
+  )
+  final val SYMBOL_FIELD_NUMBER = 1
+  def of(
+    symbol: _root_.scala.Predef.String
+  ): _root_.dotty.tools.dotc.semanticdb.ThisType = _root_.dotty.tools.dotc.semanticdb.ThisType(
+    symbol
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ThisType])
+}
+
+@SerialVersionUID(0L)
+final case class SuperType(
+    prefix: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.SuperType._typemapper_prefix.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance),
+    symbol: _root_.scala.Predef.String = ""
+    )  extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      
+      {
+        val __value = dotty.tools.dotc.semanticdb.SuperType._typemapper_prefix.toBase(prefix)
+        if (__value != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) {
+          __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+        }
+      };
+      
+      {
+        val __value = symbol
+        if (!__value.isEmpty) {
+          __size += SemanticdbOutputStream.computeStringSize(2, __value)
+        }
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      {
+        val __v = dotty.tools.dotc.semanticdb.SuperType._typemapper_prefix.toBase(prefix)
+        if (__v != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) {
+          _output__.writeTag(1, 2)
+          _output__.writeUInt32NoTag(__v.serializedSize)
+          __v.writeTo(_output__)
+        }
+      };
+      {
+        val __v = symbol
+        if (!__v.isEmpty) {
+          _output__.writeString(2, __v)
+        }
+      };
+    }
+    def withPrefix(__v: dotty.tools.dotc.semanticdb.Type): SuperType = copy(prefix = __v)
+    def withSymbol(__v: _root_.scala.Predef.String): SuperType = copy(symbol = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.SuperType])
+}
+
+object SuperType  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SuperType] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SuperType] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.SuperType = {
+    var __prefix: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None
+    var __symbol: _root_.scala.Predef.String = ""
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __prefix = _root_.scala.Some(__prefix.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _)))
+        case 18 =>
+          __symbol = _input__.readStringRequireUtf8()
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.SuperType(
+        prefix = dotty.tools.dotc.semanticdb.SuperType._typemapper_prefix.toCustom(__prefix.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)),
+        symbol = __symbol
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.SuperType(
+    prefix = dotty.tools.dotc.semanticdb.SuperType._typemapper_prefix.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance),
+    symbol = ""
+  )
+  final val PREFIX_FIELD_NUMBER = 1
+  final val SYMBOL_FIELD_NUMBER = 2
+  @transient @sharable
+  private[semanticdb] val _typemapper_prefix: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type] = implicitly[SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type]]
+  def of(
+    prefix: dotty.tools.dotc.semanticdb.Type,
+    symbol: _root_.scala.Predef.String
+  ): _root_.dotty.tools.dotc.semanticdb.SuperType = _root_.dotty.tools.dotc.semanticdb.SuperType(
+    prefix,
+    symbol
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SuperType])
+}
+
+@SerialVersionUID(0L)
+final case class ConstantType(
+    constant: dotty.tools.dotc.semanticdb.Constant = dotty.tools.dotc.semanticdb.ConstantType._typemapper_constant.toCustom(dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance)
+    )  extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      
+      {
+        val __value = dotty.tools.dotc.semanticdb.ConstantType._typemapper_constant.toBase(constant)
+        if (__value != dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance) {
+          __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+        }
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      {
+        val __v = dotty.tools.dotc.semanticdb.ConstantType._typemapper_constant.toBase(constant)
+        if (__v != dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance) {
+          _output__.writeTag(1, 2)
+          _output__.writeUInt32NoTag(__v.serializedSize)
+          __v.writeTo(_output__)
+        }
+      };
+    }
+    def withConstant(__v: dotty.tools.dotc.semanticdb.Constant): ConstantType = copy(constant = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ConstantType])
+}
+
+object ConstantType  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ConstantType] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ConstantType] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ConstantType = {
+    var __constant: _root_.scala.Option[dotty.tools.dotc.semanticdb.ConstantMessage] = _root_.scala.None
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __constant = _root_.scala.Some(__constant.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.ConstantMessage](_input__))(LiteParser.readMessage(_input__, _)))
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.ConstantType(
+        constant = dotty.tools.dotc.semanticdb.ConstantType._typemapper_constant.toCustom(__constant.getOrElse(dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance))
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.ConstantType(
+    constant = dotty.tools.dotc.semanticdb.ConstantType._typemapper_constant.toCustom(dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance)
+  )
+  final val CONSTANT_FIELD_NUMBER = 1
+  @transient @sharable
+  private[semanticdb] val _typemapper_constant: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.ConstantMessage, dotty.tools.dotc.semanticdb.Constant] = implicitly[SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.ConstantMessage, dotty.tools.dotc.semanticdb.Constant]]
+  def of(
+    constant: dotty.tools.dotc.semanticdb.Constant
+  ): _root_.dotty.tools.dotc.semanticdb.ConstantType = _root_.dotty.tools.dotc.semanticdb.ConstantType(
+    constant
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ConstantType])
+}
+
+@SerialVersionUID(0L)
+final case class IntersectionType(
+    types: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type] = _root_.scala.Seq.empty
+    )  extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      types.foreach { __item =>
+        val __value = dotty.tools.dotc.semanticdb.IntersectionType._typemapper_types.toBase(__item)
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      }
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      types.foreach { __v =>
+        val __m = dotty.tools.dotc.semanticdb.IntersectionType._typemapper_types.toBase(__v)
+        _output__.writeTag(1, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+    }
+    def clearTypes = copy(types = _root_.scala.Seq.empty)
+    def addTypes(__vs: dotty.tools.dotc.semanticdb.Type*): IntersectionType = addAllTypes(__vs)
+    def addAllTypes(__vs: Iterable[dotty.tools.dotc.semanticdb.Type]): IntersectionType = copy(types = types ++ __vs)
+    def withTypes(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type]): IntersectionType = copy(types = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.IntersectionType])
+}
+
+object IntersectionType  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.IntersectionType] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.IntersectionType] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.IntersectionType = {
+    val __types: _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Type] = new _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Type]
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __types += dotty.tools.dotc.semanticdb.IntersectionType._typemapper_types.toCustom(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.IntersectionType(
+        types = __types.result()
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.IntersectionType(
+    types = _root_.scala.Seq.empty
+  )
+  final val TYPES_FIELD_NUMBER = 1
+  @transient @sharable
+  private[semanticdb] val _typemapper_types: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type] = implicitly[SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type]]
+  def of(
+    types: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type]
+  ): _root_.dotty.tools.dotc.semanticdb.IntersectionType = _root_.dotty.tools.dotc.semanticdb.IntersectionType(
+    types
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.IntersectionType])
+}
+
+@SerialVersionUID(0L)
+final case class UnionType(
+    types: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type] = _root_.scala.Seq.empty
+    )  extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      types.foreach { __item =>
+        val __value = dotty.tools.dotc.semanticdb.UnionType._typemapper_types.toBase(__item)
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      }
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      types.foreach { __v =>
+        val __m = dotty.tools.dotc.semanticdb.UnionType._typemapper_types.toBase(__v)
+        _output__.writeTag(1, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+    }
+    def clearTypes = copy(types = _root_.scala.Seq.empty)
+    def addTypes(__vs: dotty.tools.dotc.semanticdb.Type*): UnionType = addAllTypes(__vs)
+    def addAllTypes(__vs: Iterable[dotty.tools.dotc.semanticdb.Type]): UnionType = copy(types = types ++ __vs)
+    def withTypes(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type]): UnionType = copy(types = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.UnionType])
+}
+
+object UnionType  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.UnionType] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.UnionType] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.UnionType = {
+    val __types: _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Type] = new _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Type]
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __types += dotty.tools.dotc.semanticdb.UnionType._typemapper_types.toCustom(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.UnionType(
+        types = __types.result()
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.UnionType(
+    types = _root_.scala.Seq.empty
+  )
+  final val TYPES_FIELD_NUMBER = 1
+  @transient @sharable
+  private[semanticdb] val _typemapper_types: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type] = implicitly[SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type]]
+  def of(
+    types: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type]
+  ): _root_.dotty.tools.dotc.semanticdb.UnionType = _root_.dotty.tools.dotc.semanticdb.UnionType(
+    types
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.UnionType])
+}
+
+@SerialVersionUID(0L)
+final case class WithType(
+    types: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type] = _root_.scala.Seq.empty
+    )  extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      types.foreach { __item =>
+        val __value = dotty.tools.dotc.semanticdb.WithType._typemapper_types.toBase(__item)
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      }
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      types.foreach { __v =>
+        val __m = dotty.tools.dotc.semanticdb.WithType._typemapper_types.toBase(__v)
+        _output__.writeTag(1, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+    }
+    def clearTypes = copy(types = _root_.scala.Seq.empty)
+    def addTypes(__vs: dotty.tools.dotc.semanticdb.Type*): WithType = addAllTypes(__vs)
+    def addAllTypes(__vs: Iterable[dotty.tools.dotc.semanticdb.Type]): WithType = copy(types = types ++ __vs)
+    def withTypes(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type]): WithType = copy(types = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.WithType])
+}
+
+object WithType  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.WithType] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.WithType] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.WithType = {
+    val __types: _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Type] = new _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Type]
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __types += dotty.tools.dotc.semanticdb.WithType._typemapper_types.toCustom(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.WithType(
+        types = __types.result()
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.WithType(
+    types = _root_.scala.Seq.empty
+  )
+  final val TYPES_FIELD_NUMBER = 1
+  @transient @sharable
+  private[semanticdb] val _typemapper_types: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type] = implicitly[SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type]]
+  def of(
+    types: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type]
+  ): _root_.dotty.tools.dotc.semanticdb.WithType = _root_.dotty.tools.dotc.semanticdb.WithType(
+    types
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.WithType])
+}
+
+@SerialVersionUID(0L)
+final case class StructuralType(
+    tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.StructuralType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance),
+    declarations: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None
+    )  extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      
+      {
+        val __value = dotty.tools.dotc.semanticdb.StructuralType._typemapper_tpe.toBase(tpe)
+        if (__value != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) {
+          __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+        }
+      };
+      if (declarations.isDefined) {
+        val __value = declarations.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      {
+        val __v = dotty.tools.dotc.semanticdb.StructuralType._typemapper_tpe.toBase(tpe)
+        if (__v != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) {
+          _output__.writeTag(4, 2)
+          _output__.writeUInt32NoTag(__v.serializedSize)
+          __v.writeTo(_output__)
+        }
+      };
+      declarations.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(5, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+    }
+    def withTpe(__v: dotty.tools.dotc.semanticdb.Type): StructuralType = copy(tpe = __v)
+    def getDeclarations: dotty.tools.dotc.semanticdb.Scope = declarations.getOrElse(dotty.tools.dotc.semanticdb.Scope.defaultInstance)
+    def clearDeclarations: StructuralType = copy(declarations = _root_.scala.None)
+    def withDeclarations(__v: dotty.tools.dotc.semanticdb.Scope): StructuralType = copy(declarations = Option(__v))
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.StructuralType])
+}
+
+object StructuralType  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.StructuralType] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.StructuralType] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.StructuralType = {
+    var __tpe: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None
+    var __declarations: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 34 =>
+          __tpe = _root_.scala.Some(__tpe.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _)))
+        case 42 =>
+          __declarations = Option(__declarations.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.Scope](_input__))(LiteParser.readMessage(_input__, _)))
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.StructuralType(
+        tpe = dotty.tools.dotc.semanticdb.StructuralType._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)),
+        declarations = __declarations
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.StructuralType(
+    tpe = dotty.tools.dotc.semanticdb.StructuralType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance),
+    declarations = _root_.scala.None
+  )
+  final val TPE_FIELD_NUMBER = 4
+  final val DECLARATIONS_FIELD_NUMBER = 5
+  @transient @sharable
+  private[semanticdb] val _typemapper_tpe: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type] = implicitly[SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type]]
+  def of(
+    tpe: dotty.tools.dotc.semanticdb.Type,
+    declarations: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope]
+  ): _root_.dotty.tools.dotc.semanticdb.StructuralType = _root_.dotty.tools.dotc.semanticdb.StructuralType(
+    tpe,
+    declarations
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.StructuralType])
+}
+
+@SerialVersionUID(0L)
+final case class AnnotatedType(
+    annotations: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Annotation] = _root_.scala.Seq.empty,
+    tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.AnnotatedType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)
+    )  extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      annotations.foreach { __item =>
+        val __value = __item
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      }
+      
+      {
+        val __value = dotty.tools.dotc.semanticdb.AnnotatedType._typemapper_tpe.toBase(tpe)
+        if (__value != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) {
+          __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+        }
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      {
+        val __v = dotty.tools.dotc.semanticdb.AnnotatedType._typemapper_tpe.toBase(tpe)
+        if (__v != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) {
+          _output__.writeTag(1, 2)
+          _output__.writeUInt32NoTag(__v.serializedSize)
+          __v.writeTo(_output__)
+        }
+      };
+      annotations.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(3, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+    }
+    def clearAnnotations = copy(annotations = _root_.scala.Seq.empty)
+    def addAnnotations(__vs: dotty.tools.dotc.semanticdb.Annotation*): AnnotatedType = addAllAnnotations(__vs)
+    def addAllAnnotations(__vs: Iterable[dotty.tools.dotc.semanticdb.Annotation]): AnnotatedType = copy(annotations = annotations ++ __vs)
+    def withAnnotations(__v: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Annotation]): AnnotatedType = copy(annotations = __v)
+    def withTpe(__v: dotty.tools.dotc.semanticdb.Type): AnnotatedType = copy(tpe = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.AnnotatedType])
+}
+
+object AnnotatedType  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.AnnotatedType] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.AnnotatedType] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.AnnotatedType = {
+    val __annotations: _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Annotation] = new _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Annotation]
+    var __tpe: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 26 =>
+          __annotations += LiteParser.readMessage[dotty.tools.dotc.semanticdb.Annotation](_input__)
+        case 10 =>
+          __tpe = _root_.scala.Some(__tpe.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _)))
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.AnnotatedType(
+        annotations = __annotations.result(),
+        tpe = dotty.tools.dotc.semanticdb.AnnotatedType._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance))
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.AnnotatedType(
+    annotations = _root_.scala.Seq.empty,
+    tpe = dotty.tools.dotc.semanticdb.AnnotatedType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)
+  )
+  final val ANNOTATIONS_FIELD_NUMBER = 3
+  final val TPE_FIELD_NUMBER = 1
+  @transient @sharable
+  private[semanticdb] val _typemapper_tpe: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type] = implicitly[SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type]]
+  def of(
+    annotations: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Annotation],
+    tpe: dotty.tools.dotc.semanticdb.Type
+  ): _root_.dotty.tools.dotc.semanticdb.AnnotatedType = _root_.dotty.tools.dotc.semanticdb.AnnotatedType(
+    annotations,
+    tpe
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.AnnotatedType])
+}
+
+@SerialVersionUID(0L)
+final case class ExistentialType(
+    tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.ExistentialType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance),
+    declarations: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None
+    )  extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      
+      {
+        val __value = dotty.tools.dotc.semanticdb.ExistentialType._typemapper_tpe.toBase(tpe)
+        if (__value != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) {
+          __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+        }
+      };
+      if (declarations.isDefined) {
+        val __value = declarations.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      {
+        val __v = dotty.tools.dotc.semanticdb.ExistentialType._typemapper_tpe.toBase(tpe)
+        if (__v != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) {
+          _output__.writeTag(1, 2)
+          _output__.writeUInt32NoTag(__v.serializedSize)
+          __v.writeTo(_output__)
+        }
+      };
+      declarations.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(3, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+    }
+    def withTpe(__v: dotty.tools.dotc.semanticdb.Type): ExistentialType = copy(tpe = __v)
+    def getDeclarations: dotty.tools.dotc.semanticdb.Scope = declarations.getOrElse(dotty.tools.dotc.semanticdb.Scope.defaultInstance)
+    def clearDeclarations: ExistentialType = copy(declarations = _root_.scala.None)
+    def withDeclarations(__v: dotty.tools.dotc.semanticdb.Scope): ExistentialType = copy(declarations = Option(__v))
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ExistentialType])
+}
+
+object ExistentialType  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ExistentialType] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ExistentialType] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ExistentialType = {
+    var __tpe: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None
+    var __declarations: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __tpe = _root_.scala.Some(__tpe.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _)))
+        case 26 =>
+          __declarations = Option(__declarations.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.Scope](_input__))(LiteParser.readMessage(_input__, _)))
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.ExistentialType(
+        tpe = dotty.tools.dotc.semanticdb.ExistentialType._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)),
+        declarations = __declarations
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.ExistentialType(
+    tpe = dotty.tools.dotc.semanticdb.ExistentialType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance),
+    declarations = _root_.scala.None
+  )
+  final val TPE_FIELD_NUMBER = 1
+  final val DECLARATIONS_FIELD_NUMBER = 3
+  @transient @sharable
+  private[semanticdb] val _typemapper_tpe: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type] = implicitly[SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type]]
+  def of(
+    tpe: dotty.tools.dotc.semanticdb.Type,
+    declarations: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope]
+  ): _root_.dotty.tools.dotc.semanticdb.ExistentialType = _root_.dotty.tools.dotc.semanticdb.ExistentialType(
+    tpe,
+    declarations
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ExistentialType])
+}
+
+@SerialVersionUID(0L)
+final case class UniversalType(
+    typeParameters: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None,
+    tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.UniversalType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)
+    )  extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      if (typeParameters.isDefined) {
+        val __value = typeParameters.get
+        __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+      };
+      
+      {
+        val __value = dotty.tools.dotc.semanticdb.UniversalType._typemapper_tpe.toBase(tpe)
+        if (__value != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) {
+          __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+        }
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      {
+        val __v = dotty.tools.dotc.semanticdb.UniversalType._typemapper_tpe.toBase(tpe)
+        if (__v != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) {
+          _output__.writeTag(2, 2)
+          _output__.writeUInt32NoTag(__v.serializedSize)
+          __v.writeTo(_output__)
+        }
+      };
+      typeParameters.foreach { __v =>
+        val __m = __v
+        _output__.writeTag(3, 2)
+        _output__.writeUInt32NoTag(__m.serializedSize)
+        __m.writeTo(_output__)
+      };
+    }
+    def getTypeParameters: dotty.tools.dotc.semanticdb.Scope = typeParameters.getOrElse(dotty.tools.dotc.semanticdb.Scope.defaultInstance)
+    def clearTypeParameters: UniversalType = copy(typeParameters = _root_.scala.None)
+    def withTypeParameters(__v: dotty.tools.dotc.semanticdb.Scope): UniversalType = copy(typeParameters = Option(__v))
+    def withTpe(__v: dotty.tools.dotc.semanticdb.Type): UniversalType = copy(tpe = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.UniversalType])
+}
+
+object UniversalType  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.UniversalType] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.UniversalType] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.UniversalType = {
+    var __typeParameters: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None
+    var __tpe: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 26 =>
+          __typeParameters = Option(__typeParameters.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.Scope](_input__))(LiteParser.readMessage(_input__, _)))
+        case 18 =>
+          __tpe = _root_.scala.Some(__tpe.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _)))
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.UniversalType(
+        typeParameters = __typeParameters,
+        tpe = dotty.tools.dotc.semanticdb.UniversalType._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance))
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.UniversalType(
+    typeParameters = _root_.scala.None,
+    tpe = dotty.tools.dotc.semanticdb.UniversalType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)
+  )
+  final val TYPE_PARAMETERS_FIELD_NUMBER = 3
+  final val TPE_FIELD_NUMBER = 2
+  @transient @sharable
+  private[semanticdb] val _typemapper_tpe: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type] = implicitly[SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type]]
+  def of(
+    typeParameters: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope],
+    tpe: dotty.tools.dotc.semanticdb.Type
+  ): _root_.dotty.tools.dotc.semanticdb.UniversalType = _root_.dotty.tools.dotc.semanticdb.UniversalType(
+    typeParameters,
+    tpe
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.UniversalType])
+}
+
+@SerialVersionUID(0L)
+final case class ByNameType(
+    tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.ByNameType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)
+    )  extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      
+      {
+        val __value = dotty.tools.dotc.semanticdb.ByNameType._typemapper_tpe.toBase(tpe)
+        if (__value != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) {
+          __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+        }
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      {
+        val __v = dotty.tools.dotc.semanticdb.ByNameType._typemapper_tpe.toBase(tpe)
+        if (__v != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) {
+          _output__.writeTag(1, 2)
+          _output__.writeUInt32NoTag(__v.serializedSize)
+          __v.writeTo(_output__)
+        }
+      };
+    }
+    def withTpe(__v: dotty.tools.dotc.semanticdb.Type): ByNameType = copy(tpe = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ByNameType])
+}
+
+object ByNameType  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ByNameType] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ByNameType] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ByNameType = {
+    var __tpe: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __tpe = _root_.scala.Some(__tpe.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _)))
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.ByNameType(
+        tpe = dotty.tools.dotc.semanticdb.ByNameType._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance))
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.ByNameType(
+    tpe = dotty.tools.dotc.semanticdb.ByNameType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)
+  )
+  final val TPE_FIELD_NUMBER = 1
+  @transient @sharable
+  private[semanticdb] val _typemapper_tpe: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type] = implicitly[SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type]]
+  def of(
+    tpe: dotty.tools.dotc.semanticdb.Type
+  ): _root_.dotty.tools.dotc.semanticdb.ByNameType = _root_.dotty.tools.dotc.semanticdb.ByNameType(
+    tpe
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ByNameType])
+}
+
+@SerialVersionUID(0L)
+final case class RepeatedType(
+    tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.RepeatedType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)
+    )  extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage  derives CanEqual {
+    @transient @sharable
+    private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
+    private[this] def __computeSerializedValue(): _root_.scala.Int = {
+      var __size = 0
+      
+      {
+        val __value = dotty.tools.dotc.semanticdb.RepeatedType._typemapper_tpe.toBase(tpe)
+        if (__value != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) {
+          __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
+        }
+      };
+      __size
+    }
+    override def serializedSize: _root_.scala.Int = {
+      var read = __serializedSizeCachedValue
+      if (read == 0) {
+        read = __computeSerializedValue()
+        __serializedSizeCachedValue = read
+      }
+      read
+    }
+    def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = {
+      {
+        val __v = dotty.tools.dotc.semanticdb.RepeatedType._typemapper_tpe.toBase(tpe)
+        if (__v != dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) {
+          _output__.writeTag(1, 2)
+          _output__.writeUInt32NoTag(__v.serializedSize)
+          __v.writeTo(_output__)
+        }
+      };
+    }
+    def withTpe(__v: dotty.tools.dotc.semanticdb.Type): RepeatedType = copy(tpe = __v)
+    
+    
+    
+    
+    // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.RepeatedType])
+}
+
+object RepeatedType  extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.RepeatedType] {
+  implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.RepeatedType] = this
+  def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.RepeatedType = {
+    var __tpe: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None
+    var _done__ = false
+    while (!_done__) {
+      val _tag__ = _input__.readTag()
+      _tag__ match {
+        case 0 => _done__ = true
+        case 10 =>
+          __tpe = _root_.scala.Some(__tpe.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _)))
+        case tag => _input__.skipField(tag)
+      }
+    }
+    dotty.tools.dotc.semanticdb.RepeatedType(
+        tpe = dotty.tools.dotc.semanticdb.RepeatedType._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance))
+    )
+  }
+  
+  
+  
+  
+  
+  
+  lazy val defaultInstance = dotty.tools.dotc.semanticdb.RepeatedType(
+    tpe = dotty.tools.dotc.semanticdb.RepeatedType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)
+  )
+  final val TPE_FIELD_NUMBER = 1
+  @transient @sharable
+  private[semanticdb] val _typemapper_tpe: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type] = implicitly[SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type]]
+  def of(
+    tpe: dotty.tools.dotc.semanticdb.Type
+  ): _root_.dotty.tools.dotc.semanticdb.RepeatedType = _root_.dotty.tools.dotc.semanticdb.RepeatedType(
+    tpe
+  )
+  // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.RepeatedType])
+}
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/TypeOps.scala b/compiler/src/dotty/tools/dotc/semanticdb/TypeOps.scala
new file mode 100644
index 000000000000..963a153388a3
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/semanticdb/TypeOps.scala
@@ -0,0 +1,444 @@
+package dotty.tools
+package dotc
+package semanticdb
+
+import core.Symbols._
+import core.Contexts.Context
+import core.Types._
+import core.Annotations.Annotation
+import core.Flags
+import core.Names.Name
+import core.StdNames.tpnme
+import ast.tpd._
+import scala.util.chaining.scalaUtilChainingOps
+
+import collection.mutable
+
+import dotty.tools.dotc.{semanticdb => s}
+import Scala3.{FakeSymbol, SemanticSymbol, WildcardTypeSymbol, TypeParamRefSymbol, TermParamRefSymbol, RefinementSymbol}
+
+class TypeOps:
+  import SymbolScopeOps._
+  import Scala3.given
+  private val paramRefSymtab = mutable.Map[(LambdaType, Name), Symbol]()
+  private val refinementSymtab = mutable.Map[(RefinedType, Name), Symbol]()
+
+  // save generated fake symbols so we can insert them into symbols section of SemanticDB
+  val fakeSymbols = mutable.Set[FakeSymbol]()
+  given typeOps: TypeOps = this
+
+  extension [T <: LambdaType | RefinedType](symtab: mutable.Map[(T, Name), Symbol])
+    private def lookup(
+      binder: T,
+      name: Name,
+    )(using Context): Option[Symbol] =
+      symtab.get((binder, name))
+
+  extension [T <: LambdaType](symtab: mutable.Map[(T, Name), Symbol])
+    private def lookupOrErr(
+      binder: T,
+      name: Name,
+      parent: Symbol,
+    )(using Context): Option[Symbol] =
+      // In case refinement or type param cannot be accessed from traverser and
+      // no symbols are registered to the symbol table, fall back to Type.member
+      symtab.lookup(binder, name) match
+        case found @ Some(_) => found
+        case None =>
+          val member = binder.member(name).symbol
+          if !member.exists then
+            symbolNotFound(binder, name, parent)
+            None
+          else
+            Some(member)
+
+  private def symbolNotFound(binder: Type, name: Name, parent: Symbol)(using ctx: Context): Unit =
+    warn(s"Ignoring ${name} of symbol ${parent}, type ${binder}")
+
+  private def warn(msg: String)(using ctx: Context): Unit =
+    report.warning(
+      s"Internal error in extracting SemanticDB while compiling ${ctx.compilationUnit.source}: ${msg}"
+    )
+
+  private def registerFakeSymbol(sym: FakeSymbol)(using Context, SemanticSymbolBuilder): Unit =
+    fakeSymbols.add(sym)
+
+  extension (tpe: Type)
+    def toSemanticSig(using LinkMode, Context, SemanticSymbolBuilder)(sym: Symbol): s.Signature =
+      def enterParamRef(tpe: Type): Unit =
+        tpe match {
+          case lam: LambdaType =>
+            // Find the "actual" binder type for nested LambdaType
+            // For example, `def foo(x: T)(y: T): T` and for `.owner.info` would be like
+            // `MethodType(..., resType = MethodType(..., resType = ))`.
+            // (Let's say the outer `MethodType` "outer", and `MethodType` who is
+            // `resType` of outer "inner")
+            //
+            // We try to find the "actual" binder of : `inner`,
+            // and register them to the symbol table with `(, inner) -> `
+            // instead of `("y", outer) -> `
+            if lam.paramNames.contains(sym.name) then
+              paramRefSymtab((lam, sym.name)) = sym
+            else
+              enterParamRef(lam.resType)
+
+          // for class constructor
+          // class C[T] { ... }
+          case cls: ClassInfo if sym.info.isInstanceOf[LambdaType] =>
+            val lam = sym.info.asInstanceOf[LambdaType]
+            cls.cls.typeParams.foreach { param =>
+              paramRefSymtab((lam, param.name)) = param
+            }
+
+          // type X[T] = ...
+          case tb: TypeBounds =>
+            enterParamRef(tb.lo)
+            enterParamRef(tb.hi)
+
+          case _ => ()
+        }
+
+      def enterRefined(tpe: Type): Unit =
+        tpe match {
+          case refined: RefinedType =>
+            val key = (refined, sym.name)
+            refinementSymtab(key) = sym
+
+          case rec: RecType =>
+            enterRefined(rec.parent)
+
+          // Register symbol for opaque type,
+          // opaque type alias will be stored into the refinement of
+          // the self type of the enclosing class.
+          // Key: the tuple of
+          //   - self-type of enclosing class
+          //   - name of the opaque type
+          // Value: the symbol of the opaque type
+          // See: SymDenotation.opaqueToBounds
+          case cls: ClassInfo if sym.is(Flags.Opaque) =>
+            cls.classSymbol.asClass.givenSelfType match
+              case rt: RefinedType =>
+                refinementSymtab((rt, sym.name)) = sym
+              case _ => ()
+
+          case cls: ClassInfo if (cls.cls.name == tpnme.REFINE_CLASS) =>
+            enterRefined(sym.owner.owner.info)
+
+          // type x = Person { refinement }
+          case tb: TypeBounds =>
+            // tb = TypeBounds(
+            //   lo = RefinedType(...)
+            //   hi = RefinedType(...)
+            // )
+            enterRefined(tb.lo)
+            enterRefined(tb.hi)
+
+          // def s(x: Int): { refinement } = ...
+          case expr: ExprType =>
+            enterRefined(expr.resType)
+          case m: LambdaType =>
+            enterRefined(m.resType)
+          case _ => ()
+        }
+      if sym.exists && sym.owner.exists then
+        enterParamRef(sym.owner.info)
+        enterRefined(sym.owner.info)
+
+      def loop(tpe: Type): s.Signature = tpe match {
+        case mp: MethodOrPoly =>
+          def flatten(
+            t: Type,
+            paramss: List[List[SemanticSymbol]],
+            tparams: List[SemanticSymbol]
+          ): (Type, List[List[SemanticSymbol]], List[SemanticSymbol]) = t match {
+            case mt: MethodType =>
+              val syms: List[SemanticSymbol] = mt.paramNames.zip(mt.paramInfos).map { (name, info) =>
+                paramRefSymtab.lookup(mt, name).getOrElse {
+                  TermParamRefSymbol(sym, name, info).tap(registerFakeSymbol)
+                }
+              }
+              flatten(mt.resType, paramss :+ syms, tparams)
+            case pt: PolyType =>
+              val syms: List[SemanticSymbol] = pt.paramNames.zip(pt.paramInfos).map { (name, info) =>
+                paramRefSymtab.lookup(pt, name).getOrElse {
+                  TypeParamRefSymbol(sym, name, info).tap(registerFakeSymbol)
+                }
+              }
+              flatten(pt.resType, paramss, tparams ++ syms)
+            case other =>
+              (other, paramss, tparams)
+          }
+          val (resType, paramss, tparams) = flatten(mp, Nil, Nil)
+
+          val sparamss = paramss.map(_.sscope)
+          val stparams = tparams.sscopeOpt
+          s.MethodSignature(
+            stparams,
+            sparamss,
+            resType.toSemanticType(sym)
+          )
+
+        case cls: ClassInfo =>
+          val stparams = cls.cls.typeParams.sscopeOpt
+          val sparents = cls.parents.map(_.toSemanticType(sym))
+          val sself = cls.selfType.toSemanticType(sym)
+          val decls = cls.decls.toList.sscopeOpt
+          s.ClassSignature(stparams, sparents, sself, decls)
+
+        case TypeBounds(lo, hi) =>
+          // for `type X[T] = T` is equivalent to `[T] =>> T`
+          def tparams(tpe: Type): (Type, List[SemanticSymbol]) = tpe match {
+            case lambda: HKTypeLambda =>
+              val paramSyms: List[SemanticSymbol] = lambda.paramNames.zip(lambda.paramInfos).map { (paramName, bounds) =>
+                // def x[T[_]] = ???
+                if paramName.isWildcard then
+                  WildcardTypeSymbol(sym, bounds).tap(registerFakeSymbol)
+                else
+                  paramRefSymtab.lookup(lambda, paramName).getOrElse {
+                    TypeParamRefSymbol(sym, paramName, bounds).tap(registerFakeSymbol)
+                  }
+              }
+              (lambda.resType, paramSyms)
+            case _ => (tpe, Nil)
+          }
+          val (loRes, loParams) = tparams(lo)
+          val (hiRes, hiParams) = tparams(hi)
+          val stparams = (loParams ++ hiParams).distinctBy(_.name).sscopeOpt
+          val slo = loRes.toSemanticType(sym)
+          val shi = hiRes.toSemanticType(sym)
+          s.TypeSignature(stparams, slo, shi)
+
+        case other =>
+          s.ValueSignature(
+            other.toSemanticType(sym)
+          )
+      }
+      loop(tpe)
+
+    def toSemanticType(sym: Symbol)(using LinkMode, SemanticSymbolBuilder, Context): s.Type =
+      import ConstantOps._
+      def loop(tpe: Type): s.Type = tpe match {
+        case t if t.isFromJavaObject =>
+          loop(defn.AnyType)
+        case ExprType(tpe) =>
+          val stpe = loop(tpe)
+          s.ByNameType(stpe)
+
+        case TypeRef(pre, sym: Symbol) =>
+          val spre = if tpe.hasTrivialPrefix then s.Type.Empty else loop(pre)
+          val ssym = sym.symbolName
+          s.TypeRef(spre, ssym, Seq.empty)
+
+        case TermRef(pre, sym: Symbol) =>
+          val spre = if(tpe.hasTrivialPrefix) s.Type.Empty else loop(pre)
+          val ssym = sym.symbolName
+          s.SingleType(spre, ssym)
+
+        case ThisType(TypeRef(_, sym: Symbol)) =>
+          s.ThisType(sym.symbolName)
+
+        case tref: TermParamRef =>
+          paramRefSymtab.lookupOrErr(
+            tref.binder, tref.paramName, sym
+          ) match
+            case Some(ref) =>
+              val ssym = ref.symbolName
+              s.SingleType(s.Type.Empty, ssym)
+            case None =>
+              s.Type.Empty
+
+        case tref: TypeParamRef =>
+          val tsym = paramRefSymtab.lookup(tref.binder, tref.paramName) match
+            case found @ Some(sym) => found
+            case None =>
+              tref.binder.typeParams.find(param => param.paramName == tref.paramName) match
+                case Some(param) =>
+                  val info = param.paramInfo
+                  Some(TypeParamRefSymbol(sym, tref.paramName, info).tap(registerFakeSymbol))
+                case None =>
+                  symbolNotFound(tref.binder, tref.paramName, sym)
+                  None
+          tsym match
+            case Some(sym) =>
+              val ssym = sym.symbolName
+              s.TypeRef(s.Type.Empty, ssym, Seq.empty)
+            case None =>
+              s.Type.Empty
+
+        case SuperType(thistpe, supertpe) =>
+          val spre = loop(thistpe.typeSymbol.info)
+          val ssym = supertpe.typeSymbol.symbolName
+          s.SuperType(spre, ssym)
+
+        // val clazzOf = classOf[...]
+        case ConstantType(const) if const.tag == core.Constants.ClazzTag =>
+          loop(const.typeValue)
+
+        case ConstantType(const) =>
+          s.ConstantType(const.toSemanticConst)
+
+        case rt @ RefinedType(parent, name, info) =>
+          // `X { def x: Int; def y: Int }`
+          // RefinedType(
+          //   parent = RefinedType(
+          //     parent = TypeRef(..., X)
+          //     ...
+          //   )
+          //   refinedName = x
+          //   refinedInfo = TypeRef(..., Int)
+          // )
+          type RefinedInfo = (core.Names.Name, Type)
+          def flatten(tpe: Type, acc: List[RefinedInfo]): (Type, List[RefinedInfo]) = tpe match {
+            case RefinedType(parent, name, info) =>
+              flatten(parent, acc :+ (name, info))
+            case _ =>
+              (tpe, acc)
+          }
+
+          // flatten parent types to list
+          // e.g. `X with Y with Z { refined }`
+          // RefinedType(parent = AndType(X, AndType(Y, Z)), ...)
+          // => List(X, Y, Z)
+          def flattenParent(parent: Type): List[s.Type] = parent match {
+            case AndType(tp1, tp2) =>
+              flattenParent(tp1) ++ flattenParent(tp2)
+            case _ => List(loop(parent))
+          }
+
+          val (parent, refinedInfos) = flatten(rt, List.empty)
+          val stpe = s.IntersectionType(flattenParent(parent))
+
+          val decls: List[SemanticSymbol] = refinedInfos.map { (name, info) =>
+            refinementSymtab.lookup(rt, name).getOrElse {
+              RefinementSymbol(sym, name, info).tap(registerFakeSymbol)
+            }
+          }
+          val sdecls = decls.sscopeOpt(using LinkMode.HardlinkChildren)
+          s.StructuralType(stpe, sdecls)
+
+        case rec: RecType =>
+          loop(rec.parent) // should be handled as RefinedType
+
+        // repeated params: e.g. `Int*`, which is the syntax sugar of
+        // `Seq[Int] @Repeated` (or `Array[Int] @Repeated`)
+        // See: Desugar.scala and TypeApplications.scala
+        case AnnotatedType(AppliedType(_, targs), annot)
+          if (annot matches defn.RepeatedAnnot) && (targs.length == 1) =>
+          val stpe = loop(targs(0))
+          s.RepeatedType(stpe)
+
+        case ann: AnnotatedType if ann.annot.symbol.info.isInstanceOf[ClassInfo] =>
+          def flatten(tpe: Type, annots: List[Annotation]): (Type, List[Annotation]) = tpe match
+            case AnnotatedType(parent, annot) if annot.symbol.info.isInstanceOf[ClassInfo] =>
+              flatten(parent, annot +: annots)
+            case other => (other, annots)
+
+          val (parent, annots) = flatten(ann, List.empty)
+          val sparent = loop(parent)
+          val sannots = annots.map(a =>
+            s.Annotation(loop(a.symbol.info.asInstanceOf[ClassInfo].selfType))
+          )
+          s.AnnotatedType(sannots, sparent)
+
+        case AppliedType(tycon, args) if tycon == defn.RepeatedParamType && args.length == 1 =>
+          val stpe = loop(args(0))
+          s.RepeatedType(stpe)
+
+        case app @ AppliedType(tycon, args) =>
+          val targs = args.map { arg =>
+            arg match
+              // For wildcard type C[_ <: T], it's internal type representation will be
+              // `AppliedType(TypeBounds(lo = , hi = ))`.
+              //
+              // As scalameta for Scala2 does, we'll convert the wildcard type to
+              // `ExistentialType(TypeRef(NoPrefix, C, ), Scope(hardlinks = List()))`
+              // where `` has
+              // display_name: "_" and,
+              // signature: type_signature(..., lo = , hi = )
+              case bounds: TypeBounds =>
+                val wildcardSym = WildcardTypeSymbol(sym, bounds)
+                // DO NOT register the wildcard symbol to symbol section here
+                // since it's not a symbol definition
+                // registerFakeSymbol(wildcardSym)
+                val ssym = wildcardSym.symbolName
+                (Some(wildcardSym), s.TypeRef(s.Type.Empty, ssym, Seq.empty))
+              case other =>
+                val sarg = loop(other)
+                (None, sarg)
+          }
+          val wildcardSyms = targs.flatMap(_._1)
+          val sargs = targs.map(_._2)
+
+          val applied = loop(tycon) match
+            case ref @ s.TypeRef(_, _, targs) =>
+              // For curried applied type `F[T][U]` and tycon is also an `AppliedType`
+              // Convert it to TypeRef(..., targs = List(T, U))
+              ref.copy(typeArguments = targs ++ sargs)
+            case _ =>
+              s.Type.Empty
+
+          if (wildcardSyms.isEmpty) applied
+          else s.ExistentialType(
+            applied,
+            wildcardSyms.sscopeOpt(using LinkMode.HardlinkChildren)
+          )
+
+        case and: AndType =>
+          def flatten(child: Type): List[Type] = child match
+            case AndType(ct1, ct2) => flatten(ct1) ++ flatten(ct2)
+            case other => List(other)
+          val stpes = flatten(and).map(loop)
+          s.IntersectionType(stpes)
+
+        case or: OrType =>
+          def flatten(child: Type): List[Type] = child match
+            case OrType(ct1, ct2) => flatten(ct1) ++ flatten(ct2)
+            case other => List(other)
+          val stpes = flatten(or).map(loop)
+          s.UnionType(stpes)
+
+        case l: LazyRef =>
+          loop(l.ref)
+
+        case NoPrefix =>
+          s.Type.Empty
+
+        // Not yet supported
+        case _: HKTypeLambda =>
+          s.Type.Empty
+        case _: MatchType =>
+          s.Type.Empty
+
+        case tvar: TypeVar =>
+          loop(tvar.stripped)
+
+        case _ =>
+          s.Type.Empty
+      }
+      loop(tpe)
+
+    /** Return true if the prefix is like `_root_.this` */
+    private def hasTrivialPrefix(using Context): Boolean =
+      def checkTrivialPrefix(pre: Type, sym: Symbol)(using Context): Boolean =
+        pre =:= sym.owner.thisType
+      tpe match {
+        case TypeRef(pre, sym: Symbol) =>
+          checkTrivialPrefix(pre, sym)
+        case TermRef(pre, sym: Symbol) =>
+          checkTrivialPrefix(pre, sym)
+        case _ => false
+      }
+
+
+object SymbolScopeOps:
+  import Scala3.{_, given}
+  extension (syms: List[SemanticSymbol])
+    def sscope(using linkMode: LinkMode)(using SemanticSymbolBuilder, TypeOps, Context): s.Scope =
+      linkMode match
+        case LinkMode.SymlinkChildren =>
+          s.Scope(symlinks = syms.map(_.symbolName))
+        case LinkMode.HardlinkChildren =>
+          s.Scope(hardlinks = syms.map(_.symbolInfo(Set.empty)))
+
+    def sscopeOpt(using LinkMode, SemanticSymbolBuilder, TypeOps, Context): Option[s.Scope] =
+      if syms.nonEmpty then Some(syms.sscope) else None
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/internal/LiteParser.scala b/compiler/src/dotty/tools/dotc/semanticdb/internal/LiteParser.scala
index 48412fd027b8..dea43c0ede19 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/internal/LiteParser.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/internal/LiteParser.scala
@@ -4,10 +4,28 @@ import java.io.InputStream
 
 object LiteParser {
 
-  def readMessage[A](input: SemanticdbInputStream, message: SemanticdbMessage[A]): A = {
-    val length    = input.readRawVarint32()
-    val oldLimit  = input.pushLimit(length)
-    val result: A = message.mergeFrom(input)
+  def readMessage[A <: SemanticdbGeneratedMessage](
+      input: SemanticdbInputStream,
+      message: A
+  )(implicit
+      cmp: SemanticdbGeneratedMessageCompanion[A]
+  ): A = {
+    val length = input.readRawVarint32()
+    val oldLimit = input.pushLimit(length)
+    val result: A = cmp.merge(message, input)
+    input.checkLastTagWas(0)
+    input.popLimit(oldLimit)
+    result
+  }
+
+  def readMessage[A <: SemanticdbGeneratedMessage](
+      input: SemanticdbInputStream
+  )(implicit
+      cmp: SemanticdbGeneratedMessageCompanion[A]
+  ): A = {
+    val length = input.readRawVarint32()
+    val oldLimit = input.pushLimit(length)
+    val result: A = cmp.parseFrom(input)
     input.checkLastTagWas(0)
     input.popLimit(oldLimit)
     result
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbGeneratedMessageCompanion.scala b/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbGeneratedMessageCompanion.scala
new file mode 100644
index 000000000000..4974e3ea256c
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbGeneratedMessageCompanion.scala
@@ -0,0 +1,80 @@
+package dotty.tools.dotc.semanticdb.internal
+
+import java.io.InputStream
+
+// https://github.com/scalapb/ScalaPB/blob/194463272125b872b99d4902b7712355a53e96c4/scalapb-runtime/src/main/scala/scalapb/GeneratedMessageCompanion.scala#L61-L68
+trait SemanticdbGeneratedOneof extends Any with Product with Serializable {
+  type ValueType
+  def number: Int
+  def isDefined: Boolean
+  def isEmpty: Boolean
+  def value: ValueType
+  def valueOption: Option[ValueType] = if (isDefined) Some(value) else None
+}
+
+// https://github.com/scalapb/ScalaPB/blob/194463272125b872b99d4902b7712355a53e96c4/scalapb-runtime/src/main/scala/scalapb/GeneratedMessageCompanion.scala#L72-L138
+trait SemanticdbGeneratedMessage extends Any with Product with Serializable {
+  def serializedSize: Int
+
+  def writeTo(output: SemanticdbOutputStream): Unit
+
+  /** Serializes the messgae and returns a byte array containing its raw bytes */
+  final def toByteArray: Array[Byte] = {
+    val a = new Array[Byte](serializedSize)
+    val outputStream = SemanticdbOutputStream.newInstance(a)
+    writeTo(outputStream)
+    outputStream.checkNoSpaceLeft()
+    a
+  }
+}
+
+trait SemanticdbGeneratedSealedOneof
+    extends Any
+    with Product
+    with Serializable {
+  type MessageType <: SemanticdbGeneratedMessage
+  def isEmpty: Boolean
+  def isDefined: Boolean
+  def asMessage: MessageType
+}
+
+trait SemanticdbGeneratedEnum extends Any with Product with Serializable {
+  type EnumType <: SemanticdbGeneratedEnum
+
+  def value: Int
+
+  def index: Int
+
+  def name: String
+
+  override def toString = name
+
+  def isUnrecognized: Boolean = false
+
+}
+
+trait SemanticdbUnrecognizedEnum extends SemanticdbGeneratedEnum {
+  def name = "UNRECOGNIZED"
+
+  def index = -1
+
+  override def isUnrecognized: Boolean = true
+}
+
+trait SemanticdbGeneratedMessageCompanion[A <: SemanticdbGeneratedMessage]
+    extends Serializable {
+  self =>
+  type ValueType = A
+
+  /** Parses a message from a CodedInputStream. */
+  def parseFrom(input: SemanticdbInputStream): A
+
+  def parseFrom(input: Array[Byte]): A = parseFrom(
+    SemanticdbInputStream.newInstance(input)
+  )
+
+  /** Merges the given message with the additional fields in the steam. */
+  def merge(a: A, input: SemanticdbInputStream): A = {
+    parseFrom(a.toByteArray ++ parseFrom(input).toByteArray)
+  }
+}
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbInputStream.scala b/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbInputStream.scala
index 2d228ffeed64..3a2b616477c3 100644
--- a/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbInputStream.scala
+++ b/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbInputStream.scala
@@ -448,6 +448,29 @@ class SemanticdbInputStream private (buffer: Array[Byte], input: InputStream) {
     }
   }
 
+  def readStringRequireUtf8(): String = {
+    val size: Int = readRawVarint32()
+    var bytes: Array[Byte] = Array()
+    var pos = bufferPos;
+    if (size <= (bufferSize - pos) && size > 0) {
+      // Fast path:  We already have the bytes in a contiguous buffer, so
+      //   just copy directly from it.
+      bytes = buffer;
+      bufferPos = pos + size;
+    } else if (size == 0) {
+      return "";
+    } else {
+      // Slow path:  Build a byte array first then copy it.
+      bytes = readRawBytesSlowPath(size);
+      pos = 0;
+    }
+    // TODO(martinrb): We could save a pass by validating while decoding.
+    // if (!Utf8.isValidUtf8(bytes, pos, pos + size)) {
+    //   throw InvalidProtocolBufferException.invalidUtf8();
+    // }
+    return new String(bytes, pos, size, "UTF-8");
+  }
+
   def checkLastTagWas(value: Int): Unit = {
     if (lastTag != value) {
       throw InvalidProtocolBufferException.invalidEndTag();
diff --git a/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbTypeMapper.scala b/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbTypeMapper.scala
new file mode 100644
index 000000000000..2310bcdbc97c
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbTypeMapper.scala
@@ -0,0 +1,17 @@
+package dotty.tools.dotc.semanticdb.internal
+
+abstract class SemanticdbTypeMapper[BaseType, CustomType] {
+  def toCustom(base: BaseType): CustomType
+  def toBase(custom: CustomType): BaseType
+}
+
+object SemanticdbTypeMapper {
+  def apply[BaseType, CustomType](baseToCustom: BaseType => CustomType)(
+      customToBase: CustomType => BaseType
+  ): SemanticdbTypeMapper[BaseType, CustomType] =
+    new SemanticdbTypeMapper[BaseType, CustomType] {
+      def toCustom(base: BaseType): CustomType = baseToCustom(base)
+      def toBase(custom: CustomType): BaseType = customToBase(custom)
+    }
+}
+
diff --git a/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala b/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala
index 0399386a9e89..de25cb140da0 100644
--- a/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala
+++ b/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala
@@ -31,12 +31,9 @@ abstract class AccessProxies {
    */
   protected def passReceiverAsArg(accessorName: Name)(using Context): Boolean = false
 
-  /** The accessor definitions that need to be added to class `cls`
-   *  As a side-effect, this method removes entries from the `accessedBy` map.
-   *  So a second call of the same method will yield the empty list.
-   */
+  /** The accessor definitions that need to be added to class `cls` */
   private def accessorDefs(cls: Symbol)(using Context): Iterator[DefDef] =
-    for (accessor <- cls.info.decls.iterator; accessed <- accessedBy.remove(accessor).toOption) yield
+    for accessor <- cls.info.decls.iterator; accessed <- accessedBy.get(accessor) yield
       DefDef(accessor.asTerm, prefss => {
         def numTypeParams = accessed.info match {
           case info: PolyType => info.paramNames.length
@@ -54,7 +51,10 @@ abstract class AccessProxies {
               forwardedArgss.nonEmpty && forwardedArgss.head.nonEmpty) // defensive conditions
             accessRef.becomes(forwardedArgss.head.head)
           else
-            accessRef.appliedToTypeTrees(forwardedTpts).appliedToArgss(forwardedArgss)
+            accessRef
+              .appliedToTypeTrees(forwardedTpts)
+              .appliedToArgss(forwardedArgss)
+              .etaExpandCFT(using ctx.withOwner(accessor))
         rhs.withSpan(accessed.span)
       })
 
diff --git a/compiler/src/dotty/tools/dotc/transform/ArrayApply.scala b/compiler/src/dotty/tools/dotc/transform/ArrayApply.scala
index 348bc735bd9c..cc39c5a3517b 100644
--- a/compiler/src/dotty/tools/dotc/transform/ArrayApply.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ArrayApply.scala
@@ -21,7 +21,9 @@ import scala.reflect.ClassTag
 class ArrayApply extends MiniPhase {
   import tpd._
 
-  override def phaseName: String = "arrayApply"
+  override def phaseName: String = ArrayApply.name
+
+  override def description: String = ArrayApply.description
 
   override def transformApply(tree: tpd.Apply)(using Context): tpd.Tree =
     if isArrayModuleApply(tree.symbol) then
@@ -71,3 +73,7 @@ class ArrayApply extends MiniPhase {
     }
   }
 }
+
+object ArrayApply:
+  val name: String = "arrayApply"
+  val description: String = "optimize `scala.Array.apply`"
diff --git a/compiler/src/dotty/tools/dotc/transform/ArrayConstructors.scala b/compiler/src/dotty/tools/dotc/transform/ArrayConstructors.scala
index 301026548444..1bcaa2626ee3 100644
--- a/compiler/src/dotty/tools/dotc/transform/ArrayConstructors.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ArrayConstructors.scala
@@ -22,7 +22,9 @@ import scala.collection.immutable.::
 class ArrayConstructors extends MiniPhase {
   import ast.tpd._
 
-  override def phaseName: String = "arrayConstructors"
+  override def phaseName: String = ArrayConstructors.name
+
+  override def description: String = ArrayConstructors.description
 
   override def transformApply(tree: tpd.Apply)(using Context): tpd.Tree = {
     def expand(elemType: Type, dims: List[Tree]) =
@@ -49,3 +51,7 @@ class ArrayConstructors extends MiniPhase {
     else tree
   }
 }
+
+object ArrayConstructors:
+  val name: String = "arrayConstructors"
+  val description: String = "intercept creation of (non-generic) arrays and intrinsify"
diff --git a/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala b/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala
index 17ce6020e2a9..79b68b331930 100644
--- a/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala
+++ b/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala
@@ -33,7 +33,9 @@ import ast.TreeTypeMap
 class BetaReduce extends MiniPhase:
   import ast.tpd._
 
-  def phaseName: String = "betaReduce"
+  override def phaseName: String = BetaReduce.name
+
+  override def description: String = BetaReduce.description
 
   override def transformApply(app: Apply)(using Context): Tree = app.fun match
     case Select(fn, nme.apply) if defn.isFunctionType(fn.tpe) =>
@@ -47,6 +49,9 @@ class BetaReduce extends MiniPhase:
 object BetaReduce:
   import ast.tpd._
 
+  val name: String = "betaReduce"
+  val description: String = "reduce closure applications"
+
   /** Beta-reduces a call to `fn` with arguments `argSyms` or returns `tree` */
   def apply(original: Tree, fn: Tree, args: List[Tree])(using Context): Tree =
     fn match
diff --git a/compiler/src/dotty/tools/dotc/transform/Bridges.scala b/compiler/src/dotty/tools/dotc/transform/Bridges.scala
index 8cb78c258704..f7c00ef74b94 100644
--- a/compiler/src/dotty/tools/dotc/transform/Bridges.scala
+++ b/compiler/src/dotty/tools/dotc/transform/Bridges.scala
@@ -9,6 +9,11 @@ import ast.untpd
 import collection.{mutable, immutable}
 import util.Spans.Span
 import util.SrcPos
+import ContextFunctionResults.{contextResultCount, contextFunctionResultTypeAfter}
+import StdNames.nme
+import Constants.Constant
+import TypeErasure.transformInfo
+import Erasure.Boxing.adaptClosure
 
 /** A helper class for generating bridge methods in class `root`. */
 class Bridges(root: ClassSymbol, thisPhase: DenotTransformer)(using Context) {
@@ -34,9 +39,12 @@ class Bridges(root: ClassSymbol, thisPhase: DenotTransformer)(using Context) {
 
     override def exclude(sym: Symbol) =
       !sym.isOneOf(MethodOrModule) || super.exclude(sym)
+
+    override def canBeHandledByParent(sym1: Symbol, sym2: Symbol, parent: Symbol): Boolean =
+      OverridingPairs.isOverridingPair(sym1, sym2, parent.thisType)
   }
 
-  //val site = root.thisType
+  val site = root.thisType
 
   private var toBeRemoved = immutable.Set[Symbol]()
   private val bridges = mutable.ListBuffer[Tree]()
@@ -77,7 +85,13 @@ class Bridges(root: ClassSymbol, thisPhase: DenotTransformer)(using Context) {
                       |clashes with definition of the member itself; both have erased type ${info(member)(using elimErasedCtx)}."""",
                   bridgePosFor(member))
     }
-    else if (!bridgeExists)
+    else if !inContext(preErasureCtx)(site.memberInfo(member).matches(site.memberInfo(other))) then
+      // Neither symbol signatures nor pre-erasure types seen from root match; this means
+      // according to Scala 2 semantics there is no override.
+      // A bridge might introduce a classcast exception.
+      // Example where this was observed: run/i12828a.scala and MapView in stdlib213
+      report.log(i"suppress bridge in $root for ${member} in ${member.owner} and ${other.showLocated} since member infos ${site.memberInfo(member)} and ${site.memberInfo(other)} do not match")
+    else if !bridgeExists then
       addBridge(member, other)
   }
 
@@ -103,12 +117,52 @@ class Bridges(root: ClassSymbol, thisPhase: DenotTransformer)(using Context) {
       toBeRemoved += other
     }
 
-    def bridgeRhs(argss: List[List[Tree]]) = {
+    val memberCount = contextResultCount(member)
+
+    /** Eta expand application `ref(args)` as needed.
+     *  To do this correctly, we have to look at the member's original pre-erasure
+     *  type and figure out which context function types in its result are
+     *  not yet instantiated.
+     */
+    def etaExpand(ref: Tree, args: List[Tree])(using Context): Tree =
+      def expand(args: List[Tree], tp: Type, n: Int)(using Context): Tree =
+        if n <= 0 then
+          assert(ctx.typer.isInstanceOf[Erasure.Typer])
+          ctx.typer.typed(untpd.cpy.Apply(ref)(ref, args), member.info.finalResultType)
+        else
+          val defn.ContextFunctionType(argTypes, resType, isErased) = tp: @unchecked
+          val anonFun = newAnonFun(ctx.owner,
+            MethodType(if isErased then Nil else argTypes, resType),
+            coord = ctx.owner.coord)
+          anonFun.info = transformInfo(anonFun, anonFun.info)
+
+          def lambdaBody(refss: List[List[Tree]]) =
+            val refs :: Nil = refss: @unchecked
+            val expandedRefs = refs.map(_.withSpan(ctx.owner.span.endPos)) match
+              case (bunchedParam @ Ident(nme.ALLARGS)) :: Nil =>
+                argTypes.indices.toList.map(n =>
+                  bunchedParam
+                    .select(nme.primitive.arrayApply)
+                    .appliedTo(Literal(Constant(n))))
+              case refs1 => refs1
+            expand(args ::: expandedRefs, resType, n - 1)(using ctx.withOwner(anonFun))
+
+          val unadapted = Closure(anonFun, lambdaBody)
+          cpy.Block(unadapted)(unadapted.stats,
+            adaptClosure(unadapted.expr.asInstanceOf[Closure]))
+      end expand
+
+      val otherCount = contextResultCount(other)
+      val start = contextFunctionResultTypeAfter(member, otherCount)(using preErasureCtx)
+      expand(args, start, memberCount - otherCount)(using ctx.withOwner(bridge))
+    end etaExpand
+
+    def bridgeRhs(argss: List[List[Tree]]) =
       assert(argss.tail.isEmpty)
       val ref = This(root).select(member)
-      if (member.info.isParameterless) ref // can happen if `member` is a module
-      else Erasure.partialApply(ref, argss.head)
-    }
+      if member.info.isParameterless then ref // can happen if `member` is a module
+      else if memberCount == 0 then ref.appliedToTermArgs(argss.head)
+      else etaExpand(ref, argss.head)
 
     bridges += DefDef(bridge, bridgeRhs(_).withSpan(bridge.span))
   }
diff --git a/compiler/src/dotty/tools/dotc/transform/ByNameClosures.scala b/compiler/src/dotty/tools/dotc/transform/ByNameClosures.scala
deleted file mode 100644
index 00bcd1e5076a..000000000000
--- a/compiler/src/dotty/tools/dotc/transform/ByNameClosures.scala
+++ /dev/null
@@ -1,42 +0,0 @@
-package dotty.tools.dotc
-package transform
-
-import core._
-import Contexts._
-import Symbols._
-import Types._
-import Flags._
-import DenotTransformers.IdentityDenotTransformer
-import core.StdNames.nme
-
-/** This phase translates arguments to call-by-name parameters, using the rules
- *
- *      x           ==>    x                  if x is a => parameter
- *      e.apply()   ==>    (e)       if e is pure
- *      e           ==>    (() => e) for all other arguments
- *
- *  where
- *
- *     : [T](() => T): T
- *
- *  is a synthetic method defined in Definitions. Erasure will later strip the  wrappers.
- */
-class ByNameClosures extends TransformByNameApply with IdentityDenotTransformer { thisPhase =>
-  import ast.tpd._
-
-  override def phaseName: String = ByNameClosures.name
-
-  override def runsAfterGroupsOf: Set[String] = Set(ExpandSAMs.name)
-    // ExpanSAMs applied to partial functions creates methods that need
-    // to be fully defined before converting. Test case is pos/i9391.scala.
-
-  override def mkByNameClosure(arg: Tree, argType: Type)(using Context): Tree = {
-    val meth = newSymbol(
-      ctx.owner, nme.ANON_FUN, Synthetic | Method, MethodType(Nil, Nil, argType))
-    Closure(meth, _ => arg.changeOwnerAfter(ctx.owner, meth, thisPhase)).withSpan(arg.span)
-  }
-}
-
-object ByNameClosures {
-  val name: String = "byNameClosures"
-}
\ No newline at end of file
diff --git a/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala b/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala
index 7b6d7ee64cd2..2969b00c7dd2 100644
--- a/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala
+++ b/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala
@@ -10,6 +10,7 @@ import core.Decorators._
 import core.StdNames.nme
 import core.Names._
 import core.NameKinds.TempResultName
+import core.Constants._
 import ast.Trees._
 import util.Store
 import collection.mutable
@@ -17,11 +18,13 @@ import collection.mutable
 /** This phase translates variables that are captured in closures to
  *  heap-allocated refs.
  */
-class CapturedVars extends MiniPhase with IdentityDenotTransformer { thisPhase =>
+class CapturedVars extends MiniPhase with IdentityDenotTransformer:
+  thisPhase =>
   import ast.tpd._
 
-  /** the following two members override abstract members in Transform */
-  val phaseName: String = "capturedVars"
+  override def phaseName: String = CapturedVars.name
+
+  override def description: String = CapturedVars.description
 
   override def runsAfterGroupsOf: Set[String] = Set(LiftTry.name)
     // lifting tries changes what variables are considered to be captured
@@ -45,6 +48,9 @@ class CapturedVars extends MiniPhase with IdentityDenotTransformer { thisPhase =
 
     val boxedRefClasses: collection.Set[Symbol] =
       refClassKeys.flatMap(k => Set(refClass(k), volatileRefClass(k)))
+
+    val objectRefClasses: collection.Set[Symbol] =
+      Set(refClass(defn.ObjectClass), volatileRefClass(defn.ObjectClass))
   }
 
   private var myRefInfo: RefInfo = null
@@ -123,32 +129,46 @@ class CapturedVars extends MiniPhase with IdentityDenotTransformer { thisPhase =
   }
 
   /** If assignment is to a boxed ref type, e.g.
-    *
-    *      intRef.elem = expr
-    *
-    *  rewrite using a temporary var to
-    *
-    *      val ev$n = expr
-    *      intRef.elem = ev$n
-    *
-    *  That way, we avoid the problem that `expr` might contain a `try` that would
-    *  run on a non-empty stack (which is illegal under JVM rules). Note that LiftTry
-    *  has already run before, so such `try`s would not be eliminated.
-    *
-    *  Also: If the ref type lhs is followed by a cast (can be an artifact of nested translation),
-    *  drop the cast.
-    */
-  override def transformAssign(tree: Assign)(using Context): Tree = {
-    def recur(lhs: Tree): Tree = lhs match {
-      case TypeApply(Select(qual, nme.asInstanceOf_), _) =>
-        val Select(_, nme.elem) = qual
+   *
+   *      intRef.elem = expr
+   *
+   *  rewrite using a temporary var to
+   *
+   *      val ev$n = expr
+   *      intRef.elem = ev$n
+   *
+   *  That way, we avoid the problem that `expr` might contain a `try` that would
+   *  run on a non-empty stack (which is illegal under JVM rules). Note that LiftTry
+   *  has already run before, so such `try`s would not be eliminated.
+   *
+   *  If the ref type lhs is followed by a cast (can be an artifact of nested translation),
+   *  drop the cast.
+   *
+   *  If the ref type is `ObjectRef` or `VolatileObjectRef`, immediately assign `null`
+   *  to the temporary to make the underlying target of the reference available for
+   *  garbage collection. Nullification is omitted if the `expr` is already `null`.
+   *
+   *      var ev$n: RHS = expr
+   *      objRef.elem = ev$n
+   *      ev$n = null.asInstanceOf[RHS]
+   */
+  override def transformAssign(tree: Assign)(using Context): Tree =
+    def absolved: Boolean = tree.rhs match
+      case Literal(Constant(null)) | Typed(Literal(Constant(null)), _) => true
+      case _ => false
+    def recur(lhs: Tree): Tree = lhs match
+      case TypeApply(Select(qual@Select(_, nme.elem), nme.asInstanceOf_), _) =>
         recur(qual)
       case Select(_, nme.elem) if refInfo.boxedRefClasses.contains(lhs.symbol.maybeOwner) =>
-        val tempDef = transformFollowing(SyntheticValDef(TempResultName.fresh(), tree.rhs))
-        transformFollowing(Block(tempDef :: Nil, cpy.Assign(tree)(lhs, ref(tempDef.symbol))))
+        val tempDef = transformFollowing(SyntheticValDef(TempResultName.fresh(), tree.rhs, flags = Mutable))
+        val update  = cpy.Assign(tree)(lhs, ref(tempDef.symbol))
+        def reset   = cpy.Assign(tree)(ref(tempDef.symbol), nullLiteral.cast(tempDef.symbol.info))
+        val res     = if refInfo.objectRefClasses(lhs.symbol.maybeOwner) && !absolved then reset else unitLiteral
+        transformFollowing(Block(tempDef :: update :: Nil, res))
       case _ =>
         tree
-    }
     recur(tree.lhs)
-  }
-}
+
+object CapturedVars:
+  val name: String = "capturedVars"
+  val description: String = "represent vars captured by closures as heap objects"
diff --git a/compiler/src/dotty/tools/dotc/transform/CheckLoopingImplicits.scala b/compiler/src/dotty/tools/dotc/transform/CheckLoopingImplicits.scala
new file mode 100644
index 000000000000..074f5a634821
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/CheckLoopingImplicits.scala
@@ -0,0 +1,93 @@
+package dotty.tools.dotc
+package transform
+
+import core.*
+import MegaPhase.MiniPhase
+import Contexts.*, Types.*, Symbols.*, SymDenotations.*, Flags.*
+import ast.*
+import Trees.*
+import Decorators.*
+
+import annotation.threadUnsafe
+
+object CheckLoopingImplicits:
+  val name: String = "checkLoopingImplicits"
+  val description: String = "check that implicit defs do not call themselves in an infinite loop"
+
+/** Checks that implicit defs do not call themselves in an infinite loop */
+class CheckLoopingImplicits extends MiniPhase:
+  thisPhase =>
+  import tpd._
+
+  override def phaseName: String = CheckLoopingImplicits.name
+
+  override def description: String = CheckLoopingImplicits.description
+
+  override def transformValDef(mdef: ValDef)(using Context): Tree = 
+    transform(mdef)
+
+  override def transformDefDef(mdef: DefDef)(using Context): Tree =
+    transform(mdef)
+
+  def transform(mdef: ValOrDefDef)(using Context): Tree =
+    val sym = mdef.symbol
+
+    def checkNotSelfRef(t: RefTree) =
+      if t.symbol eq sym then
+          report.warning(
+              em"""Infinite loop in function body
+                  |${mdef.rhs}""",
+              mdef.rhs.srcPos
+            )
+
+    def checkNotLooping(t: Tree): Unit = t match
+      case t: Ident =>
+        checkNotSelfRef(t)
+      case t @ Select(qual, _) =>
+        checkNotSelfRef(t)
+        checkNotLooping(qual)
+      case Apply(fn, args) =>
+        checkNotLooping(fn)
+        fn.tpe.widen match
+          case mt: MethodType
+               // Boolean && and || aren't defined with by-name parameters
+               // and therefore their type isn't an ExprType, so we exempt them by symbol name
+               if t.symbol != defn.Boolean_&& && t.symbol != defn.Boolean_|| =>
+            args.lazyZip(mt.paramInfos).foreach { (arg, pinfo) =>
+              if !pinfo.isInstanceOf[ExprType] then checkNotLooping(arg)
+            }
+          case _ =>
+      case TypeApply(fn, _) =>
+        checkNotLooping(fn)
+      case Block(stats, expr) =>
+        stats.foreach(checkNotLooping)
+        checkNotLooping(expr)
+      case Typed(expr, _) =>
+        checkNotLooping(expr)
+      case Assign(lhs, rhs) =>
+        checkNotLooping(lhs)
+        checkNotLooping(rhs)
+      case If(cond, _, _) =>
+        checkNotLooping(cond)
+      case Match(selector, _) =>
+        checkNotLooping(selector)
+      case Labeled(_, expr) =>
+        checkNotLooping(expr)
+      case Return(expr, _) =>
+        checkNotLooping(expr)
+      case WhileDo(cond, _) =>
+        checkNotLooping(cond)
+      case Try(block, _, finalizer) =>
+        checkNotLooping(block)
+        checkNotLooping(finalizer)
+      case SeqLiteral(elems, _) =>
+        elems.foreach(checkNotLooping)
+      case t: ValDef =>  
+        checkNotLooping(t.rhs)
+      case _ =>
+
+    if sym.isOneOf(GivenOrImplicit | Lazy | ExtensionMethod) then
+      checkNotLooping(mdef.rhs)
+    mdef
+  end transform
+end CheckLoopingImplicits
diff --git a/compiler/src/dotty/tools/dotc/transform/CheckNoSuperThis.scala b/compiler/src/dotty/tools/dotc/transform/CheckNoSuperThis.scala
new file mode 100644
index 000000000000..5d11cbdf7bb5
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/CheckNoSuperThis.scala
@@ -0,0 +1,53 @@
+package dotty.tools.dotc
+package transform
+
+import core.*
+import MegaPhase.MiniPhase
+import Contexts.*, Types.*, Symbols.*, SymDenotations.*, Flags.*
+import ast.*
+import Trees.*
+import Decorators.*
+
+import annotation.threadUnsafe
+
+object CheckNoSuperThis:
+  val name: String = "checkNoSuperThis"
+  val description: String = "check that supercalls don't contain references to This"
+
+/** Checks that super and this calls do not pass `this` as (part of) an argument. */
+class CheckNoSuperThis extends MiniPhase:
+  thisPhase =>
+  import tpd._
+
+  override def phaseName: String = CheckNoSuperThis.name
+
+  override def description: String = CheckNoSuperThis.description
+
+  override def runsAfterGroupsOf: Set[String] = Set(Constructors.name)
+
+  override def transformDefDef(mdef: DefDef)(using Context): DefDef =
+    if mdef.symbol.isClassConstructor then
+      mdef.rhs match
+        case Block(stats, _) => splitAtSuper(stats) match
+          case (Apply(_, superArgs) :: _, _) =>
+            val cls = mdef.symbol.owner
+            def fail(t: Tree) =
+              report.error(em"super constructor cannot be passed a self reference $t unless parameter is declared by-name", t.srcPos)
+            for arg <- superArgs do
+              arg.foreachSubTree {
+                case t: This if t.symbol == cls =>
+                  fail(t)
+                case t: RefTree => t.tpe match
+                  case tpe @ TermRef(prefix, _)
+                  if (prefix == cls.thisType
+                      || cls.is(Module)
+                         && (prefix.termSymbol == cls.sourceModule || tpe.symbol == cls.sourceModule)
+                    ) && !tpe.symbol.is(JavaStatic) => fail(t)
+                  case _ =>
+                case _ =>
+              }
+          case _ =>
+        case _ =>
+    mdef
+
+end CheckNoSuperThis
diff --git a/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala b/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala
index 56c7c8b9b3d9..6b0a4c3e9737 100644
--- a/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala
+++ b/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala
@@ -29,7 +29,9 @@ import Decorators._
 class CheckReentrant extends MiniPhase {
   import ast.tpd._
 
-  override def phaseName: String = "checkReentrant"
+  override def phaseName: String = CheckReentrant.name
+
+  override def description: String = CheckReentrant.description
 
   private var shared: Set[Symbol] = Set()
   private var seen: Set[ClassSymbol] = Set()
@@ -84,3 +86,7 @@ class CheckReentrant extends MiniPhase {
     tree
   }
 }
+
+object CheckReentrant:
+  val name: String = "checkReentrant"
+  val description: String = "check no data races involving global vars"
diff --git a/compiler/src/dotty/tools/dotc/transform/CheckStatic.scala b/compiler/src/dotty/tools/dotc/transform/CheckStatic.scala
index 3edddadfb2ae..ddd42d615ad4 100644
--- a/compiler/src/dotty/tools/dotc/transform/CheckStatic.scala
+++ b/compiler/src/dotty/tools/dotc/transform/CheckStatic.scala
@@ -29,6 +29,8 @@ class CheckStatic extends MiniPhase {
 
   override def phaseName: String = CheckStatic.name
 
+  override def description: String = CheckStatic.description
+
   override def transformTemplate(tree: tpd.Template)(using Context): tpd.Tree = {
     val defns = tree.body.collect{case t: ValOrDefDef => t}
     var hadNonStaticField = false
@@ -63,4 +65,5 @@ class CheckStatic extends MiniPhase {
 
 object CheckStatic {
   val name: String = "checkStatic"
+  val description: String = "check restrictions that apply to @static members"
 }
diff --git a/compiler/src/dotty/tools/dotc/transform/CollectEntryPoints.scala b/compiler/src/dotty/tools/dotc/transform/CollectEntryPoints.scala
new file mode 100644
index 000000000000..de304f5d3077
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/CollectEntryPoints.scala
@@ -0,0 +1,61 @@
+package dotty.tools.dotc.transform
+
+import dotty.tools.dotc.ast.tpd
+import dotty.tools.dotc.core.Contexts.Context
+import dotty.tools.dotc.core.Types
+import dotty.tools.dotc.transform.MegaPhase._
+import java.io.{File => _}
+
+import dotty.tools.dotc.core._
+import SymDenotations._
+import Contexts._
+import Types._
+import Symbols._
+import Phases._
+import dotty.tools.dotc.util.SourcePosition
+import Decorators._
+import StdNames.nme
+import dotty.tools.io.JarArchive
+import dotty.tools.backend.jvm.GenBCode
+
+/**
+ * Small phase to be run to collect main classes and store them in the context.
+ * The general rule to run this phase is:
+ * - The output of compilation is JarArchive
+ * - There is no `-Xmain-class` defined
+ *
+ * The following flags affect this phase:
+ *   -d path.jar
+ *   -Xmain-class
+ */
+class CollectEntryPoints extends MiniPhase:
+
+  override def phaseName: String = CollectEntryPoints.name
+
+  override def description: String = CollectEntryPoints.description
+
+  override def isRunnable(using Context): Boolean =
+    def forceRun = ctx.settings.XmainClass.isDefault && ctx.settings.outputDir.value.isInstanceOf[JarArchive]
+    super.isRunnable && forceRun
+
+  override def transformTypeDef(tree: tpd.TypeDef)(using Context): tpd.Tree =
+    getEntryPoint(tree).map(registerEntryPoint)
+    tree
+
+  private def getEntryPoint(tree: tpd.TypeDef)(using Context): Option[String] =
+    val sym = tree.symbol
+    import dotty.tools.dotc.core.NameOps.stripModuleClassSuffix
+    val name = sym.fullName.stripModuleClassSuffix.toString
+    Option.when(sym.isStatic && !sym.is(Flags.Trait) && ctx.platform.hasMainMethod(sym))(name)
+
+  private def registerEntryPoint(s: String)(using Context) = {
+    genBCodePhase match {
+      case genBCodePhase: GenBCode =>
+        genBCodePhase.registerEntryPoint(s)
+      case _ =>
+    }
+  }
+
+object CollectEntryPoints:
+  val name: String = "Collect entry points"
+  val description: String = "collect all entry points and save them in the context"
diff --git a/compiler/src/dotty/tools/dotc/transform/CollectNullableFields.scala b/compiler/src/dotty/tools/dotc/transform/CollectNullableFields.scala
index 71bae0ec5a6d..cf575e84975d 100644
--- a/compiler/src/dotty/tools/dotc/transform/CollectNullableFields.scala
+++ b/compiler/src/dotty/tools/dotc/transform/CollectNullableFields.scala
@@ -13,6 +13,7 @@ import java.util.IdentityHashMap
 
 object CollectNullableFields {
   val name: String = "collectNullableFields"
+  val description: String = "collect fields that can be nulled out after use in lazy initialization"
 }
 
 /** Collect fields that can be nulled out after use in lazy initialization.
@@ -43,6 +44,8 @@ class CollectNullableFields extends MiniPhase {
 
   override def phaseName: String = CollectNullableFields.name
 
+  override def description: String = CollectNullableFields.description
+
   /** Running after `ElimByName` to see by names as nullable types. */
   override def runsAfter: Set[String] = Set(ElimByName.name)
 
diff --git a/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala b/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala
index 4aa352f60f1b..9f90ae43c751 100644
--- a/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala
+++ b/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala
@@ -20,6 +20,7 @@ import annotation.threadUnsafe
 
 object CompleteJavaEnums {
   val name: String = "completeJavaEnums"
+  val description: String = "fill in constructors for Java enums"
 
   private val nameParamName: TermName = "_$name".toTermName
   private val ordinalParamName: TermName = "_$ordinal".toTermName
@@ -35,6 +36,8 @@ class CompleteJavaEnums extends MiniPhase with InfoTransformer { thisPhase =>
 
   override def phaseName: String = CompleteJavaEnums.name
 
+  override def description: String = CompleteJavaEnums.description
+
   override def relaxedTypingInGroup: Boolean = true
     // Because it adds additional parameters to some constructors
 
diff --git a/compiler/src/dotty/tools/dotc/transform/Constructors.scala b/compiler/src/dotty/tools/dotc/transform/Constructors.scala
index 024261746265..be77103e2395 100644
--- a/compiler/src/dotty/tools/dotc/transform/Constructors.scala
+++ b/compiler/src/dotty/tools/dotc/transform/Constructors.scala
@@ -9,7 +9,9 @@ import dotty.tools.dotc.core.StdNames._
 import ast._
 import Trees._
 import Flags._
+import Names.Name
 import NameOps._
+import NameKinds.{FieldName, ExplicitFieldName}
 import SymUtils._
 import Symbols._
 import Decorators._
@@ -19,6 +21,7 @@ import collection.mutable
 
 object Constructors {
   val name: String = "constructors"
+  val description: String = "collect initialization code in primary constructors"
 }
 
 /** This transform
@@ -31,6 +34,9 @@ class Constructors extends MiniPhase with IdentityDenotTransformer { thisPhase =
   import tpd._
 
   override def phaseName: String = Constructors.name
+
+  override def description: String = Constructors.description
+
   override def runsAfter: Set[String] = Set(HoistSuperArgs.name)
   override def runsAfterGroupsOf: Set[String] = Set(Memoize.name)
     // Memoized needs to be finished because we depend on the ownerchain after Memoize
@@ -146,10 +152,16 @@ class Constructors extends MiniPhase with IdentityDenotTransformer { thisPhase =
     //  (2) If the parameter accessor reference was to an alias getter,
     //      drop the () when replacing by the parameter.
     object intoConstr extends TreeMap {
+      private var isSuperCall = false
       override def transform(tree: Tree)(using Context): Tree = tree match {
         case Ident(_) | Select(This(_), _) =>
           var sym = tree.symbol
-          if (sym.is(ParamAccessor, butNot = Mutable)) sym = sym.subst(accessors, paramSyms)
+          if sym.is(ParamAccessor) && (!sym.is(Mutable) || isSuperCall)
+            // Variables need to go through the getter since they might have been updated,
+            // except if we are in a super call, since then the virtual getter call would
+            // be illegal.
+          then
+            sym = sym.subst(accessors, paramSyms)
           if (sym.maybeOwner.isConstructor) ref(sym).withSpan(tree.span) else tree
         case Apply(fn, Nil) =>
           val fn1 = transform(fn)
@@ -161,6 +173,7 @@ class Constructors extends MiniPhase with IdentityDenotTransformer { thisPhase =
       }
 
       def apply(tree: Tree, prevOwner: Symbol)(using Context): Tree =
+        isSuperCall = isSuperConstrCall(tree)
         transform(tree).changeOwnerAfter(prevOwner, constr.symbol, thisPhase)
     }
 
@@ -242,7 +255,28 @@ class Constructors extends MiniPhase with IdentityDenotTransformer { thisPhase =
         splitStats(stats1)
       case Nil =>
     }
+
+    /** Check that we do not have both a private field with name `x` and a private field
+     *  with name `FieldName(x)`. These will map to the same JVM name and therefore cause
+     *  a duplicate field error. If that case arises (as in i13862.scala), use an explicit
+     *  name `x$field` instead of `FieldName(x).
+     */
+    def checkNoFieldClashes() =
+      val fieldNames = mutable.HashSet[Name]()
+      for case field: ValDef <- clsStats do
+        field.symbol.name match
+          case FieldName(_) =>
+          case name => fieldNames += name
+      for case field: ValDef <- clsStats do
+        field.symbol.name match
+          case fldName @ FieldName(name) if fieldNames.contains(name) =>
+            val newName = ExplicitFieldName(name)
+            report.log(i"avoid field/field conflict by renaming $fldName to $newName")
+            field.symbol.copySymDenotation(name = newName).installAfter(thisPhase)
+          case _ =>
+
     splitStats(tree.body)
+    checkNoFieldClashes()
 
     // The initializers for the retained accessors */
     val copyParams = accessors flatMap { acc =>
diff --git a/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala b/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala
index ee56767054fd..14134b4bb1fa 100644
--- a/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala
@@ -8,12 +8,16 @@ import StdNames.nme
 import ast.untpd
 import ast.tpd._
 import config.Config
+import Decorators.*
 
 object ContextFunctionResults:
 
   /** Annotate methods that have context function result types directly matched by context
    *  closures on their right-hand side. Parameters to such closures will be integrated
    *  as additional method parameters in erasure.
+   *
+   *  A @ContextResultCount(n) annotation means that the method's result type
+   *  consists of a string of `n` nested context closures.
    */
   def annotateContextResults(mdef: DefDef)(using Context): Unit =
     def contextResultCount(rhs: Tree, tp: Type): Int = tp match
@@ -50,6 +54,15 @@ object ContextFunctionResults:
         crCount
       case none => 0
 
+  /** True iff `ContextResultCount` is not zero and all context functions in the result
+   *  type are erased.
+   */
+  def contextResultsAreErased(sym: Symbol)(using Context): Boolean =
+    def allErased(tp: Type): Boolean = tp.dealias match
+      case defn.ContextFunctionType(_, resTpe, isErased) => isErased && allErased(resTpe)
+      case _ => true
+    contextResultCount(sym) > 0 && allErased(sym.info.finalResultType)
+
   /** Turn the first `crCount` context function types in the result type of `tp`
    *  into the curried method types.
    */
@@ -86,33 +99,13 @@ object ContextFunctionResults:
     normalParamCount(sym.info)
   end totalParamCount
 
-  /** The rightmost context function type in the result type of `meth`
-   *  that represents `paramCount` curried, non-erased parameters that
-   *  are included in the `contextResultCount` of `meth`.
-   *  Example:
-   *
-   *  Say we have `def m(x: A): B ?=> (C1, C2, C3) ?=> D ?=> E ?=> F`,
-   *  paramCount == 4, and the contextResultCount of `m` is 3.
-   *  Then we return the type `(C1, C2, C3) ?=> D ?=> E ?=> F`, since this
-   *  type covers the 4 rightmost parameters C1, C2, C3 and D before the
-   *  contextResultCount runs out at E ?=> F.
-   *  Erased parameters are ignored; they contribute nothing to the
-   *  parameter count.
-   */
-  def contextFunctionResultTypeCovering(meth: Symbol, paramCount: Int)(using Context) =
-    atPhase(erasurePhase) {
-      // Recursive instances return pairs of context types and the
-      // # of parameters they represent.
-      def missingCR(tp: Type, crCount: Int): (Type, Int) =
-        if crCount == 0 then (tp, 0)
-        else
-          val defn.ContextFunctionType(formals, resTpe, isErased) = tp: @unchecked
-          val result @ (rt, nparams) = missingCR(resTpe, crCount - 1)
-          assert(nparams <= paramCount)
-          if nparams == paramCount || isErased then result
-          else (tp, nparams + formals.length)
-      missingCR(meth.info.finalResultType, contextResultCount(meth))._1
-    }
+  /** The `depth` levels nested context function type in the result type of `meth` */
+  def contextFunctionResultTypeAfter(meth: Symbol, depth: Int)(using Context) =
+    def recur(tp: Type, n: Int): Type =
+      if n == 0 then tp
+      else tp match
+        case defn.ContextFunctionType(_, resTpe, _) => recur(resTpe, n - 1)
+    recur(meth.info.finalResultType, depth)
 
   /** Should selection `tree` be eliminated since it refers to an `apply`
    *  node of a context function type whose parameters will end up being
diff --git a/compiler/src/dotty/tools/dotc/transform/CookComments.scala b/compiler/src/dotty/tools/dotc/transform/CookComments.scala
index 6ef0bdc4c247..27f34891fc2c 100644
--- a/compiler/src/dotty/tools/dotc/transform/CookComments.scala
+++ b/compiler/src/dotty/tools/dotc/transform/CookComments.scala
@@ -6,7 +6,10 @@ import dotty.tools.dotc.core.ContextOps._
 import dotty.tools.dotc.typer.Docstrings
 
 class CookComments extends MegaPhase.MiniPhase {
-  override def phaseName: String = "cookComments"
+
+  override def phaseName: String = CookComments.name
+
+  override def description: String = CookComments.description
 
   override def transformTypeDef(tree: tpd.TypeDef)(using Context): tpd.Tree = {
     if (ctx.settings.YcookComments.value && tree.isClassDef) {
@@ -26,4 +29,6 @@ class CookComments extends MegaPhase.MiniPhase {
   }
 }
 
-
+object CookComments:
+  val name = "cookComments"
+  val description: String = "cook the comments: expand variables, doc, etc."
diff --git a/compiler/src/dotty/tools/dotc/transform/CountOuterAccesses.scala b/compiler/src/dotty/tools/dotc/transform/CountOuterAccesses.scala
index 27a7907b266e..54ac7478c332 100644
--- a/compiler/src/dotty/tools/dotc/transform/CountOuterAccesses.scala
+++ b/compiler/src/dotty/tools/dotc/transform/CountOuterAccesses.scala
@@ -15,6 +15,7 @@ import collection.mutable
 
 object CountOuterAccesses:
   val name: String = "countOuterAccesses"
+  val description: String = "identify outer accessors that can be dropped"
 
   /** Characterizes outer accessors and outer fields that can be dropped
    *  if there are no references to them from within the toplevel class
@@ -38,6 +39,8 @@ class CountOuterAccesses extends MiniPhase:
 
   override def phaseName: String = CountOuterAccesses.name
 
+  override def description: String = CountOuterAccesses.description
+
   override def runsAfter: Set[String] = Set(LambdaLift.name)
     // LambdaLift can create outer paths. These need to be known in this phase.
 
diff --git a/compiler/src/dotty/tools/dotc/transform/Dependencies.scala b/compiler/src/dotty/tools/dotc/transform/Dependencies.scala
new file mode 100644
index 000000000000..c5c6c5baaa7b
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/Dependencies.scala
@@ -0,0 +1,266 @@
+package dotty.tools.dotc
+package transform
+
+import core.*
+import Symbols.*, Contexts.*, Types.*, Flags.*, Decorators.*
+import ast.Trees.*
+import SymUtils.*
+import collection.mutable.{LinkedHashMap, TreeSet}
+import annotation.constructorOnly
+
+/** Exposes the dependencies of the `root` tree in three functions or maps:
+ *  `freeVars`, `tracked`, and `logicalOwner`.
+ */
+abstract class Dependencies(root: ast.tpd.Tree, @constructorOnly rootContext: Context):
+  import ast.tpd._
+
+  /** The symbol is a method or a lazy val that will be mapped to a method */
+  protected def isExpr(sym: Symbol)(using Context): Boolean
+
+  /** The closest enclosing symbol in the current context for which `isExpr` is true */
+  protected def enclosure(using Context): Symbol
+
+  /** The set of free variables of a function, including free variables of its callees */
+  def freeVars(sym: Symbol): collection.Set[Symbol] = free.getOrElse(sym, Set.empty)
+
+  /** The set of functions that have free variables, i.e for which `freeVars` is non-empty */
+  def tracked: Iterable[Symbol] = free.keys
+
+  /** The outermost class that captures all free variables of a function
+   *  that are captured by enclosinh classes (this means that the function could
+   *  be placed in that class without having to add more environment parameters)
+   */
+  def logicalOwner: collection.Map[Symbol, Symbol] = logicOwner
+
+  private type SymSet = TreeSet[Symbol]
+
+  /** A map storing free variables of functions and classes */
+  private val free: LinkedHashMap[Symbol, SymSet] = new LinkedHashMap
+
+  /** A hashtable storing calls between functions */
+  private val called = new LinkedHashMap[Symbol, SymSet]
+
+  /** A map from local methods and classes to the owners to which they will be lifted as members.
+   *  For methods and classes that do not have any dependencies this will be the enclosing package.
+   *  symbols with packages as lifted owners will subsequently represented as static
+   *  members of their toplevel class, unless their enclosing class was already static.
+   *  Note: During tree transform (which runs at phase LambdaLift + 1), liftedOwner
+   *  is also used to decide whether a method had a term owner before.
+   */
+  private val logicOwner = new LinkedHashMap[Symbol, Symbol]
+
+  /** A flag to indicate whether new free variables have been found */
+  private var changedFreeVars: Boolean = _
+
+  /** A flag to indicate whether lifted owners have changed */
+  private var changedLogicOwner: Boolean = _
+
+  private val ord: Ordering[Symbol] = Ordering.by(_.id)
+  private def newSymSet = TreeSet.empty[Symbol](ord)
+
+  private def symSet(f: LinkedHashMap[Symbol, SymSet], sym: Symbol): SymSet =
+    f.getOrElseUpdate(sym, newSymSet)
+
+  /** A symbol is local if it is owned by a term or a local trait,
+   *  or if it is a constructor of a local symbol.
+   *  Note: we count members of local traits as local since their free variables
+   *  have to be passed on from their callers. By contrast, class members get their
+   *  free variable proxies from their enclosing class.
+   */
+  private def isLocal(sym: Symbol)(using Context): Boolean =
+    val owner = sym.maybeOwner
+    owner.isTerm
+    || owner.is(Trait) && isLocal(owner)
+    || sym.isConstructor && isLocal(owner)
+
+  /** Set `liftedOwner(sym)` to `owner` if `owner` is more deeply nested
+   *  than the previous value of `liftedowner(sym)`.
+   */
+  private def narrowLogicOwner(sym: Symbol, owner: Symbol)(using Context): Unit =
+    if sym.maybeOwner.isTerm
+        && owner.isProperlyContainedIn(logicOwner(sym))
+        && owner != sym
+    then
+      report.log(i"narrow lifted $sym to $owner")
+      changedLogicOwner = true
+      logicOwner(sym) = owner
+
+  /** Mark symbol `sym` as being free in `enclosure`, unless `sym` is defined
+   *  in `enclosure` or there is an intermediate class properly containing `enclosure`
+   *  in which `sym` is also free. Also, update `liftedOwner` of `enclosure` so
+   *  that `enclosure` can access `sym`, or its proxy in an intermediate class.
+   *  This means:
+   *
+   *    1. If there is an intermediate class in which `sym` is free, `enclosure`
+   *       must be contained in that class (in order to access the `sym proxy stored
+   *       in the class).
+   *
+   *    2. If there is no intermediate class, `enclosure` must be contained
+   *       in the class enclosing `sym`.
+   *
+   *  @return  If there is a non-trait class between `enclosure` and
+   *           the owner of `sym`, the largest such class.
+   *           Otherwise, if there is a trait between `enclosure` and
+   *           the owner of `sym`, the largest such trait.
+   *           Otherwise, NoSymbol.
+   *
+   *  @pre sym.owner.isTerm, (enclosure.isMethod || enclosure.isClass)
+   *
+   *  The idea of `markFree` is illustrated with an example:
+   *
+   *  def f(x: int) = {
+   *    class C {
+   *      class D {
+   *        val y = x
+   *      }
+   *    }
+   *  }
+   *
+   *  In this case `x` is free in the primary constructor of class `C`.
+   *  but it is not free in `D`, because after lambda lift the code would be transformed
+   *  as follows:
+   *
+   *  def f(x$0: int) {
+   *    class C(x$0: int) {
+   *      val x$1 = x$0
+   *      class D {
+   *        val y = outer.x$1
+   *      }
+   *    }
+   *  }
+   */
+  private def markFree(sym: Symbol, enclosure: Symbol)(using Context): Symbol =
+    import Dependencies.NoPath
+    try
+      if !enclosure.exists then throw NoPath()
+      if enclosure == sym.enclosure then NoSymbol
+      else
+        def nestedInConstructor(sym: Symbol): Boolean =
+          sym.isConstructor
+          || sym.isTerm && nestedInConstructor(sym.enclosure)
+        report.debuglog(i"mark free: ${sym.showLocated} with owner ${sym.maybeOwner} marked free in $enclosure")
+        val intermediate =
+          if enclosure.is(PackageClass) then enclosure
+          else if enclosure.isConstructor then markFree(sym, enclosure.owner.enclosure)
+          else markFree(sym, enclosure.enclosure)
+        if intermediate.exists then
+          narrowLogicOwner(enclosure, intermediate)
+        if !intermediate.isRealClass || nestedInConstructor(enclosure) then
+          // Constructors and methods nested inside traits get the free variables
+          // of the enclosing trait or class.
+          // Conversely, local traits do not get free variables.
+          // Methods inside constructors also don't have intermediates,
+          // need to get all their free variables passed directly.
+          if !enclosure.is(Trait) then
+            if symSet(free, enclosure).add(sym) then
+              changedFreeVars = true
+              report.log(i"$sym is free in $enclosure")
+        if intermediate.isRealClass then intermediate
+        else if enclosure.isRealClass then enclosure
+        else if intermediate.isClass then intermediate
+        else if enclosure.isClass then enclosure
+        else NoSymbol
+    catch case ex: NoPath =>
+      println(i"error lambda lifting ${ctx.compilationUnit}: $sym is not visible from $enclosure")
+      throw ex
+
+  private def markCalled(callee: Symbol, caller: Symbol)(using Context): Unit = {
+    report.debuglog(i"mark called: $callee of ${callee.owner} is called by $caller in ${caller.owner}")
+    assert(isLocal(callee))
+    symSet(called, caller) += callee
+  }
+
+  protected def process(tree: Tree)(using Context) =
+    val sym = tree.symbol
+
+    def narrowTo(thisClass: ClassSymbol) =
+      val enclMethod = enclosure
+      val enclClass = enclMethod.enclosingClass
+      narrowLogicOwner(enclMethod,
+        if enclClass.isContainedIn(thisClass) then thisClass
+        else enclClass) // unknown this reference, play it safe and assume the narrowest possible owner
+
+    tree match
+      case tree: Ident =>
+        if isLocal(sym) then
+          if isExpr(sym) then markCalled(sym, enclosure)
+          else if sym.isTerm then markFree(sym, enclosure)
+        def captureImplicitThis(x: Type): Unit = x match
+          case tr@TermRef(x, _) if !tr.termSymbol.isStatic => captureImplicitThis(x)
+          case x: ThisType if !x.tref.typeSymbol.isStaticOwner => narrowTo(x.tref.typeSymbol.asClass)
+          case _ =>
+        captureImplicitThis(tree.tpe)
+      case tree: Select =>
+        if isExpr(sym) && isLocal(sym) then markCalled(sym, enclosure)
+      case tree: This =>
+        narrowTo(tree.symbol.asClass)
+      case tree: MemberDef if isExpr(sym) && sym.owner.isTerm =>
+        logicOwner(sym) = sym.enclosingPackageClass
+          // this will make methods in supercall constructors of top-level classes owned
+          // by the enclosing package, which means they will be static.
+          // On the other hand, all other methods will be indirectly owned by their
+          // top-level class. This avoids possible deadlocks when a static method
+          // has to access its enclosing object from the outside.
+      case tree: DefDef if sym.isPrimaryConstructor && isLocal(sym.owner) && !sym.owner.is(Trait) =>
+        // add a call edge from the constructor of a local non-trait class to
+        // the class itself. This is done so that the constructor inherits
+        // the free variables of the class.
+        symSet(called, sym) += sym.owner
+      case tree: TypeDef =>
+        if sym.owner.isTerm then logicOwner(sym) = sym.topLevelClass.owner
+      case _ =>
+  end process
+
+  private class CollectDependencies extends TreeTraverser:
+    def traverse(tree: Tree)(using Context) =
+      try
+        process(tree)
+        traverseChildren(tree)
+      catch case ex: Exception =>
+        println(i"$ex while traversing $tree")
+        throw ex
+
+  /** Compute final free variables map `fvs by closing over caller dependencies. */
+  private def computeFreeVars()(using Context): Unit =
+    while
+      changedFreeVars = false
+      for
+        caller <- called.keys
+        callee <- called(caller)
+        fvs <- free get callee
+        fv <- fvs
+      do
+        markFree(fv, caller)
+      changedFreeVars
+    do ()
+
+  /** Compute final liftedOwner map by closing over caller dependencies */
+  private def computeLogicOwners()(using Context): Unit =
+    while
+      changedLogicOwner = false
+      for
+        caller <- called.keys
+        callee <- called(caller)
+      do
+        val normalizedCallee = callee.skipConstructor
+        val calleeOwner = normalizedCallee.owner
+        if calleeOwner.isTerm then narrowLogicOwner(caller, logicOwner(normalizedCallee))
+        else
+          assert(calleeOwner.is(Trait))
+          // methods nested inside local trait methods cannot be lifted out
+          // beyond the trait. Note that we can also call a trait method through
+          // a qualifier; in that case no restriction to lifted owner arises.
+          if caller.isContainedIn(calleeOwner) then
+            narrowLogicOwner(caller, calleeOwner)
+      changedLogicOwner
+    do ()
+
+  // initialization
+  inContext(rootContext) {
+    CollectDependencies().traverse(root)
+    computeFreeVars()
+    computeLogicOwners()
+  }
+object Dependencies:
+  private class NoPath extends Exception
+end Dependencies
diff --git a/compiler/src/dotty/tools/dotc/transform/DropOuterAccessors.scala b/compiler/src/dotty/tools/dotc/transform/DropOuterAccessors.scala
index c6b3785e99ed..e9322a450cb9 100644
--- a/compiler/src/dotty/tools/dotc/transform/DropOuterAccessors.scala
+++ b/compiler/src/dotty/tools/dotc/transform/DropOuterAccessors.scala
@@ -18,6 +18,7 @@ import annotation.threadUnsafe
 
 object DropOuterAccessors:
   val name: String = "dropOuterAccessors"
+  val description: String = "drop unused outer accessors"
 
 /** Drops unused outer accessors of inner classes that are visible only in one
  *  toplevel class. For other classes, we can't tell whether an outer accessor
@@ -29,6 +30,8 @@ class DropOuterAccessors extends MiniPhase with IdentityDenotTransformer:
 
   override def phaseName: String = DropOuterAccessors.name
 
+  override def description: String = DropOuterAccessors.description
+
   override def runsAfterGroupsOf: Set[String] = Set(CountOuterAccesses.name)
 
   override def changesMembers: Boolean = true // the phase drops outer accessors
diff --git a/compiler/src/dotty/tools/dotc/transform/ElimByName.scala b/compiler/src/dotty/tools/dotc/transform/ElimByName.scala
index aeed5ac76d39..3a91f6623243 100644
--- a/compiler/src/dotty/tools/dotc/transform/ElimByName.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ElimByName.scala
@@ -1,80 +1,163 @@
-package dotty.tools.dotc
+package dotty.tools
+package dotc
 package transform
 
 import core._
-import DenotTransformers.InfoTransformer
-import Symbols._
 import Contexts._
+import Symbols._
 import Types._
+import Flags._
+import SymDenotations.*
+import DenotTransformers.InfoTransformer
+import NameKinds.SuperArgName
 import core.StdNames.nme
-import ast.Trees._
+import MegaPhase.*
+import Decorators.*
+import typer.RefChecks
+import reporting.trace
 
-/** This phase eliminates ExprTypes `=> T` as types of method parameter references, and replaces them b
- *  nullary function types.  More precisely:
+/** This phase implements the following transformations:
  *
- *  For the types of parameter symbols:
+ *  1. For types of method and class parameters:
  *
- *         => T       ==>    () => T
+ *     => T     becomes    () ?=> T
  *
- *  For cbn parameter values
+ *  2. For references to cbn-parameters:
  *
- *         x          ==>    x()
+ *     x        becomes    x.apply()
  *
- *  Note: This scheme to have inconsistent types between method types (whose formal types are still
- *  ExprTypes and parameter valdefs (which are now FunctionTypes) is not pretty. There are two
- *  other options which have been abandoned or not yet pursued.
+ *  3. For arguments to cbn parameters
  *
- *  Option 1: Transform => T to () => T also in method and function types. The problem with this is
- *  that is that it requires to look at every type, and this forces too much, causing
- *  Cyclic Reference errors. Abandoned for this reason.
+ *     e        becomes    () ?=> e
  *
- *  Option 2: Merge ElimByName with erasure, or have it run immediately before. This has not been
- *  tried yet.
+ *  An optimization is applied: If the argument `e` to a cbn parameter is already
+ *  of type `() ?=> T` and is a pure expression, we avoid (2) and (3), i.e. we
+ *  pass `e` directly instead of `() ?=> e.apply()`.
+ *
+ *  Note that `() ?=> T`  cannot be written in source since user-defined context functions
+ *  must have at least one parameter. We use the type here as a convenient marker
+ *  of something that will erase to Function0, and where we know that it came from
+ *  a by-name parameter.
+ *
+ *  Note also that the transformation applies only to types of parameters, not to other
+ *  occurrences of ExprTypes. In particular, embedded occurrences in function types
+ *  such as `(=> T) => U` are left as-is here (they are eliminated in erasure).
+ *  Trying to convert these as well would mean traversing all the types, and that
+ *  leads to cyclic reference errors in many cases.  This can cause problems in that
+ *  we might have sometimes a `() ?=> T` where a `=> T` is expected. To compensate,
+ *  there is a new clause in TypeComparer#subArg that declares `() ?=> T` to be a
+ *  subtype of `=> T` for arguments of type applications at any point after this phase
+ *  and up to erasure.
  */
-class ElimByName extends TransformByNameApply with InfoTransformer {
+class ElimByName extends MiniPhase, InfoTransformer:
+  thisPhase =>
+
   import ast.tpd._
 
   override def phaseName: String = ElimByName.name
 
-  override def changesParents: Boolean = true // Only true for by-names
+  override def description: String = ElimByName.description
+
+  override def runsAfterGroupsOf: Set[String] = Set(ExpandSAMs.name, ElimRepeated.name, RefChecks.name)
+    // - ExpanSAMs applied to partial functions creates methods that need
+    //   to be fully defined before converting. Test case is pos/i9391.scala.
+    // - ElimByName needs to run in a group after ElimRepeated since ElimRepeated
+    //   works on simple arguments but not converted closures, and it sees the arguments
+    //   after transformations by subsequent miniphases in the same group.
+    // - ElimByName should run in a group after RefChecks, since RefChecks does heavy
+    //   comparisons of signatures, and ElimByName distorts these signatures by not
+    //   replacing `=>` with `() ?=> T` everywhere.
+
+  override def changesParents: Boolean = true
+    // Expr types in parent type arguments are changed to function types.
+
+  /** If denotation had an ExprType before, it now gets a function type */
+  private def exprBecomesFunction(symd: SymDenotation)(using Context): Boolean =
+    symd.is(Param) || symd.is(ParamAccessor, butNot = Method)
+
+  def transformInfo(tp: Type, sym: Symbol)(using Context): Type = tp match {
+    case ExprType(rt) if exprBecomesFunction(sym) =>
+      defn.ByNameFunction(rt)
+    case tp: MethodType =>
+      def exprToFun(tp: Type) = tp match
+        case ExprType(rt) => defn.ByNameFunction(rt)
+        case tp => tp
+      tp.derivedLambdaType(
+        paramInfos = tp.paramInfos.mapConserve(exprToFun),
+        resType = transformInfo(tp.resType, sym))
+    case tp: PolyType =>
+      tp.derivedLambdaType(resType = transformInfo(tp.resType, sym))
+    case _ => tp
+  }
 
-  /** Map `tree` to `tree.apply()` is `ftree` was of ExprType and becomes now a function */
-  private def applyIfFunction(tree: Tree, ftree: Tree)(using Context) =
-    if (isByNameRef(ftree)) {
+  override def infoMayChange(sym: Symbol)(using Context): Boolean =
+    sym.is(Method) || exprBecomesFunction(sym)
+
+  def byNameClosure(arg: Tree, argType: Type)(using Context): Tree =
+    val meth = newAnonFun(ctx.owner, MethodType(Nil, argType), coord = arg.span)
+    Closure(meth,
+        _ => arg.changeOwnerAfter(ctx.owner, meth, thisPhase),
+        targetType = defn.ByNameFunction(argType)
+      ).withSpan(arg.span)
+
+  private def isByNameRef(tree: Tree)(using Context): Boolean =
+    defn.isByNameFunction(tree.tpe.widen)
+
+  /** Map `tree` to `tree.apply()` is `tree` is of type `() ?=> T` */
+  private def applyIfFunction(tree: Tree)(using Context) =
+    if isByNameRef(tree) then
       val tree0 = transformFollowing(tree)
-      atPhase(next) { tree0.select(defn.Function0_apply).appliedToNone }
-    }
+      atPhase(next) { tree0.select(defn.ContextFunction0_apply).appliedToNone }
     else tree
 
   override def transformIdent(tree: Ident)(using Context): Tree =
-    applyIfFunction(tree, tree)
+    applyIfFunction(tree)
 
   override def transformSelect(tree: Select)(using Context): Tree =
-    applyIfFunction(tree, tree)
+    applyIfFunction(tree)
 
   override def transformTypeApply(tree: TypeApply)(using Context): Tree = tree match {
     case TypeApply(Select(_, nme.asInstanceOf_), arg :: Nil) =>
       // tree might be of form e.asInstanceOf[x.type] where x becomes a function.
       // See pos/t296.scala
-      applyIfFunction(tree, arg)
+      applyIfFunction(tree)
     case _ => tree
   }
 
+  override def transformApply(tree: Apply)(using Context): Tree =
+    trace(s"transforming ${tree.show} at phase ${ctx.phase}", show = true) {
+
+      def transformArg(arg: Tree, formal: Type): Tree = formal match
+        case defn.ByNameFunction(formalResult) =>
+          def stripTyped(t: Tree): Tree = t match
+            case Typed(expr, _) => stripTyped(expr)
+            case _ => t
+          stripTyped(arg) match
+            case Apply(Select(qual, nme.apply), Nil)
+            if isByNameRef(qual) && (isPureExpr(qual) || qual.symbol.isAllOf(InlineParam)) =>
+              qual
+            case _ =>
+              if isByNameRef(arg) || arg.symbol.name.is(SuperArgName)
+              then arg
+              else
+                var argType = arg.tpe.widenIfUnstable
+                if argType.isBottomType then argType = formalResult
+                byNameClosure(arg, argType)
+        case _ =>
+          arg
+
+      val mt @ MethodType(_) = tree.fun.tpe.widen
+      val args1 = tree.args.zipWithConserve(mt.paramInfos)(transformArg)
+      cpy.Apply(tree)(tree.fun, args1)
+    }
+
   override def transformValDef(tree: ValDef)(using Context): Tree =
     atPhase(next) {
-      if (exprBecomesFunction(tree.symbol))
+      if exprBecomesFunction(tree.symbol) then
         cpy.ValDef(tree)(tpt = tree.tpt.withType(tree.symbol.info))
       else tree
     }
 
-  def transformInfo(tp: Type, sym: Symbol)(using Context): Type = tp match {
-    case ExprType(rt) => defn.FunctionOf(Nil, rt)
-    case _ => tp
-  }
-
-  override def infoMayChange(sym: Symbol)(using Context): Boolean = sym.isTerm && exprBecomesFunction(sym)
-}
-
-object ElimByName {
+object ElimByName:
   val name: String = "elimByName"
-}
+  val description: String = "map by-name parameters to functions"
diff --git a/compiler/src/dotty/tools/dotc/transform/ElimErasedValueType.scala b/compiler/src/dotty/tools/dotc/transform/ElimErasedValueType.scala
index 6545d7653c6b..503561915040 100644
--- a/compiler/src/dotty/tools/dotc/transform/ElimErasedValueType.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ElimErasedValueType.scala
@@ -13,6 +13,18 @@ import NameKinds.SuperAccessorName
 
 object ElimErasedValueType {
   val name: String = "elimErasedValueType"
+  val description: String = "expand erased value types to their underlying implmementation types"
+
+  def elimEVT(tp: Type)(using Context): Type = tp match {
+    case ErasedValueType(_, underlying) =>
+      elimEVT(underlying)
+    case tp: MethodType =>
+      val paramTypes = tp.paramInfos.mapConserve(elimEVT)
+      val retType = elimEVT(tp.resultType)
+      tp.derivedLambdaType(tp.paramNames, paramTypes, retType)
+    case _ =>
+      tp
+  }
 }
 
 /** This phase erases ErasedValueType to their underlying type.
@@ -25,9 +37,12 @@ object ElimErasedValueType {
 class ElimErasedValueType extends MiniPhase with InfoTransformer { thisPhase =>
 
   import tpd._
+  import ElimErasedValueType.elimEVT
 
   override def phaseName: String = ElimErasedValueType.name
 
+  override def description: String = ElimErasedValueType.description
+
   override def runsAfter: Set[String] = Set(Erasure.name)
 
   def transformInfo(tp: Type, sym: Symbol)(using Context): Type = sym match {
@@ -48,17 +63,6 @@ class ElimErasedValueType extends MiniPhase with InfoTransformer { thisPhase =>
       elimEVT(tp)
   }
 
-  def elimEVT(tp: Type)(using Context): Type = tp match {
-    case ErasedValueType(_, underlying) =>
-      elimEVT(underlying)
-    case tp: MethodType =>
-      val paramTypes = tp.paramInfos.mapConserve(elimEVT)
-      val retType = elimEVT(tp.resultType)
-      tp.derivedLambdaType(tp.paramNames, paramTypes, retType)
-    case _ =>
-      tp
-  }
-
   def transformTypeOfTree(tree: Tree)(using Context): Tree =
     tree.withType(elimEVT(tree.tpe))
 
@@ -116,7 +120,7 @@ class ElimErasedValueType extends MiniPhase with InfoTransformer { thisPhase =>
       // Do the test at the earliest phase where both symbols existed.
       val phaseId =
         sym1.originDenotation.validFor.firstPhaseId max sym2.originDenotation.validFor.firstPhaseId
-      atPhase(elimRepeatedPhase.next)(checkNoConflict(sym1, sym2, sym1.info))
+      atPhase(elimByNamePhase.next)(checkNoConflict(sym1, sym2, sym1.info))
       opc.next()
     }
   }
diff --git a/compiler/src/dotty/tools/dotc/transform/ElimOpaque.scala b/compiler/src/dotty/tools/dotc/transform/ElimOpaque.scala
index 3eca6ea6b28c..a6f9cbf7dbe9 100644
--- a/compiler/src/dotty/tools/dotc/transform/ElimOpaque.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ElimOpaque.scala
@@ -18,6 +18,7 @@ import ast.Trees._
 
 object ElimOpaque {
   val name: String = "elimOpaque"
+  val description: String = "turn opaque into normal aliases"
 }
 
 /** Rewrites opaque type aliases to normal alias types */
@@ -27,6 +28,8 @@ class ElimOpaque extends MiniPhase with DenotTransformer {
 
   override def phaseName: String = ElimOpaque.name
 
+  override def description: String = ElimOpaque.description
+
   // Override checks need to take place before treating opaque types as aliases
   override def runsAfterGroupsOf: Set[String] = Set(typer.RefChecks.name)
 
@@ -72,4 +75,4 @@ class ElimOpaque extends MiniPhase with DenotTransformer {
           tree
     else
       tree
-}
\ No newline at end of file
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/ElimOuterSelect.scala b/compiler/src/dotty/tools/dotc/transform/ElimOuterSelect.scala
index 20a3c3223ea8..f161cd4f8cb7 100644
--- a/compiler/src/dotty/tools/dotc/transform/ElimOuterSelect.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ElimOuterSelect.scala
@@ -13,7 +13,9 @@ import NameKinds.OuterSelectName
 class ElimOuterSelect extends MiniPhase {
   import ast.tpd._
 
-  override def phaseName: String = "elimOuterSelect"
+  override def phaseName: String = ElimOuterSelect.name
+
+  override def description: String = ElimOuterSelect.description
 
   override def runsAfterGroupsOf: Set[String] = Set(ExplicitOuter.name)
     // ExplicitOuter needs to have run to completion before so that all classes
@@ -30,3 +32,7 @@ class ElimOuterSelect extends MiniPhase {
       case _ => tree
     }
 }
+
+object ElimOuterSelect:
+  val name: String = "elimOuterSelect"
+  val description: String = "expand outer selections"
diff --git a/compiler/src/dotty/tools/dotc/transform/ElimPackagePrefixes.scala b/compiler/src/dotty/tools/dotc/transform/ElimPackagePrefixes.scala
index 9179fb6c2d2c..83349f1f6199 100644
--- a/compiler/src/dotty/tools/dotc/transform/ElimPackagePrefixes.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ElimPackagePrefixes.scala
@@ -12,7 +12,9 @@ import MegaPhase.MiniPhase
  */
 class ElimPackagePrefixes extends MiniPhase {
 
-  override def phaseName: String = "elimPackagePrefixes"
+  override def phaseName: String = ElimPackagePrefixes.name
+
+  override def description: String = ElimPackagePrefixes.description
 
   override def transformSelect(tree: Select)(using Context): Tree =
     if (isPackageClassRef(tree)) Ident(tree.tpe.asInstanceOf[TypeRef]) else tree
@@ -29,3 +31,7 @@ class ElimPackagePrefixes extends MiniPhase {
     case _ => false
   }
 }
+
+object ElimPackagePrefixes:
+  val name: String = "elimPackagePrefixes"
+  val description: String = "eliminate references to package prefixes in Select nodes"
diff --git a/compiler/src/dotty/tools/dotc/transform/ElimPolyFunction.scala b/compiler/src/dotty/tools/dotc/transform/ElimPolyFunction.scala
index cb99e8ca9c8a..756ddd9bf0eb 100644
--- a/compiler/src/dotty/tools/dotc/transform/ElimPolyFunction.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ElimPolyFunction.scala
@@ -24,6 +24,8 @@ class ElimPolyFunction extends MiniPhase with DenotTransformer {
 
   override def phaseName: String = ElimPolyFunction.name
 
+  override def description: String = ElimPolyFunction.description
+
   override def runsAfter = Set(Erasure.name)
 
   override def changesParents: Boolean = true // Replaces PolyFunction by FunctionN
@@ -63,6 +65,7 @@ class ElimPolyFunction extends MiniPhase with DenotTransformer {
 }
 
 object ElimPolyFunction {
-  val name = "elimPolyFunction"
+  val name: String = "elimPolyFunction"
+  val description: String = "rewrite PolyFunction subclasses to FunctionN subclasses"
 }
 
diff --git a/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala b/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala
index f4d11f5a51a3..1ca258420aa2 100644
--- a/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala
@@ -1,4 +1,5 @@
-package dotty.tools.dotc
+package dotty.tools
+package dotc
 package transform
 
 import core._
@@ -14,9 +15,11 @@ import Decorators._
 import Denotations._, SymDenotations._
 import TypeErasure.erasure
 import DenotTransformers._
+import NullOpsDecorator._
 
 object ElimRepeated {
   val name: String = "elimRepeated"
+  val description: String = "rewrite vararg parameters and arguments"
 }
 
 /** A transformer that eliminates repeated parameters (T*) from all types, replacing
@@ -28,6 +31,8 @@ class ElimRepeated extends MiniPhase with InfoTransformer { thisPhase =>
 
   override def phaseName: String = ElimRepeated.name
 
+  override def description: String = ElimRepeated.description
+
   override def changesMembers: Boolean = true // the phase adds vararg forwarders
 
   def transformInfo(tp: Type, sym: Symbol)(using Context): Type =
@@ -76,16 +81,32 @@ class ElimRepeated extends MiniPhase with InfoTransformer { thisPhase =>
 
   override def infoMayChange(sym: Symbol)(using Context): Boolean = sym.is(Method)
 
-  private def overridesJava(sym: Symbol)(using Context) = sym.allOverriddenSymbols.exists(_.is(JavaDefined))
+  /** Does `sym` override a symbol defined in a Java class? One might think that
+   *  this can be expressed as
+   *
+   *      sym.allOverriddenSymbols.exists(_.is(JavaDefined))
+   *
+   *  but that does not work, since `allOverriddenSymbols` gets confused because the
+   *  signatures of a Java varargs method and a Scala varargs override are not the same.
+   */
+  private def overridesJava(sym: Symbol)(using Context) =
+    sym.owner.info.baseClasses.drop(1).exists { bc =>
+      bc.is(JavaDefined) && {
+        val other = bc.info.nonPrivateDecl(sym.name)
+        other.hasAltWith { alt =>
+          sym.owner.thisType.memberInfo(alt.symbol).matchesLoosely(sym.info)
+        }
+      }
+    }
 
   private def hasVarargsAnnotation(sym: Symbol)(using Context) = sym.hasAnnotation(defn.VarargsAnnot)
 
   private def parentHasVarargsAnnotation(sym: Symbol)(using Context) = sym.allOverriddenSymbols.exists(hasVarargsAnnotation)
 
   private def isVarargsMethod(sym: Symbol)(using Context) =
-    hasVarargsAnnotation(sym) ||
-      hasRepeatedParams(sym) &&
-      (sym.allOverriddenSymbols.exists(s => s.is(JavaDefined) || hasVarargsAnnotation(s)))
+    hasVarargsAnnotation(sym)
+    || hasRepeatedParams(sym)
+        && (overridesJava(sym) || sym.allOverriddenSymbols.exists(hasVarargsAnnotation))
 
   /** Eliminate repeated parameters from method types. */
   private def elimRepeated(tp: Type, isJava: Boolean)(using Context): Type = tp.stripTypeVar match
@@ -96,22 +117,7 @@ class ElimRepeated extends MiniPhase with InfoTransformer { thisPhase =>
         if lastIdx >= 0 then
           val last = paramTypes(lastIdx)
           if last.isRepeatedParam then
-            // We need to be careful when handling Java repeated parameters
-            // of the form `Object...` or `T...` where `T` is unbounded:
-            // in both cases, `Object` will have been translated to `FromJavaObject`
-            // to allow passing primitives as repeated arguments, but we can't
-            // pass a primitive array as argument to such a method since the
-            // parameter will be erased to `Object[]`. To handle this correctly we
-            // drop usage of `FromJavaObject` as an element type here, the
-            // tree transformer of this phase is then responsible for handling
-            // mismatches by emitting the correct adaptation (cf `adaptToArray`).
-            // See also the documentation of `FromJavaObjectSymbol`.
-            val last1 =
-              if isJava && last.elemType.isFromJavaObject then
-                defn.ArrayOf(TypeBounds.upper(defn.ObjectType))
-              else
-                last.translateFromRepeated(toArray = isJava)
-            paramTypes.updated(lastIdx, last1)
+            paramTypes.updated(lastIdx, last.translateFromRepeated(toArray = isJava))
           else paramTypes
         else paramTypes
       tp.derivedLambdaType(paramNames, paramTypes1, resultType1)
@@ -126,8 +132,7 @@ class ElimRepeated extends MiniPhase with InfoTransformer { thisPhase =>
         val isJavaDefined = tree.fun.symbol.is(JavaDefined)
         val tpe = arg.expr.tpe
         if isJavaDefined then
-          val pt = tree.fun.tpe.widen.firstParamTypes.last
-          adaptToArray(arg.expr, pt.elemType.bounds.hi)
+          adaptToArray(arg.expr)
         else if tpe.derivesFrom(defn.ArrayClass) then
           arrayToSeq(arg.expr)
         else
@@ -136,58 +141,25 @@ class ElimRepeated extends MiniPhase with InfoTransformer { thisPhase =>
     }
     cpy.Apply(tree)(tree.fun, args)
 
-  /** Convert sequence argument to Java array */
-  private def seqToArray(tree: Tree)(using Context): Tree = tree match
+  private def adaptToArray(tree: Tree)(implicit ctx: Context): Tree = tree match
     case SeqLiteral(elems, elemtpt) =>
-      JavaSeqLiteral(elems, elemtpt)
+      JavaSeqLiteral(elems, elemtpt).withSpan(tree.span)
     case _ =>
-      val elemType = tree.tpe.elemType
-      var elemClass = erasure(elemType).classSymbol
-      if defn.NotRuntimeClasses.contains(elemClass) then
-        elemClass = defn.ObjectClass
-      end if
-      ref(defn.DottyArraysModule)
-        .select(nme.seqToArray)
-        .appliedToType(elemType)
-        .appliedTo(tree, clsOf(elemClass.typeRef))
-
-  /** Adapt a Seq or Array tree to be a subtype of `Array[_ <: $elemPt]`.
-   *
-   *  @pre `elemPt` must either be a super type of the argument element type or `Object`.
-   *        The special handling of `Object` is required to deal with the translation
-   *        of generic Java varargs in `elimRepeated`.
-   */
-  private def adaptToArray(tree: Tree, elemPt: Type)(implicit ctx: Context): Tree =
-    val elemTp = tree.tpe.elemType
-    val elemTpMatches = elemTp <:< elemPt
-    val treeIsArray = tree.tpe.derivesFrom(defn.ArrayClass)
-    if elemTpMatches && treeIsArray then
-      tree // No adaptation necessary
-    else tree match
-      case SeqLiteral(elems, elemtpt) =>
-        // By the precondition, we only have mismatches if elemPt is Object, in
-        // that case we use `FromJavaObject` as the element type to allow the
-        // sequence literal to typecheck no matter the types of the elements,
-        // Erasure will take care of any necessary boxing (see documentation
-        // of `FromJavaObjectSymbol` for more information).
-        val adaptedElemTpt = if elemTpMatches then elemtpt else TypeTree(defn.FromJavaObjectType)
-        JavaSeqLiteral(elems, adaptedElemTpt).withSpan(tree.span)
-      case _ =>
-        if treeIsArray then
-          // Convert an Array[T] to an Array[Object]
-          ref(defn.ScalaRuntime_toObjectArray)
-            .appliedTo(tree)
-        else if elemTpMatches then
-          // Convert a Seq[T] to an Array[$elemPt]
-          ref(defn.DottyArraysModule)
-            .select(nme.seqToArray)
-            .appliedToType(elemPt)
-            .appliedTo(tree, clsOf(elemPt))
+      val elemTp = tree.tpe.elemType
+      val adapted =
+        if tree.tpe.derivesFrom(defn.ArrayClass) then
+          tree
         else
-          // Convert a Seq[T] to an Array[Object]
-          ref(defn.ScalaRuntime_toArray)
-            .appliedToType(elemTp)
-            .appliedTo(tree)
+          ref(defn.DottyArraysModule)
+          .select(nme.seqToArray)
+          .appliedToType(elemTp)
+          .appliedTo(tree, clsOf(elemTp))
+      // This seemingly redundant type ascription is needed because the result
+      // type of `adapted` might be erased to `Object`, but we need to keep
+      // the precise result type at erasure for `Erasure.Boxing.cast` to adapt
+      // a primitive array into a reference array if needed.
+      // Test case in tests/run/t1360.scala.
+      Typed(adapted, TypeTree(defn.ArrayOf(elemTp)))
 
   /** Convert an Array into a scala.Seq */
   private def arrayToSeq(tree: Tree)(using Context): Tree =
@@ -288,10 +260,11 @@ class ElimRepeated extends MiniPhase with InfoTransformer { thisPhase =>
         val src =
           if hasAnnotation then "@varargs"
           else if isBridge then "overriding a java varargs method"
-          else "@varargs (on overriden method)"
+          else "@varargs (on overridden method)"
         report.error(s"$src produces a forwarder method that conflicts with ${conflict.showDcl}", original.srcPos)
       case Nil =>
         forwarder.enteredAfter(thisPhase)
+  end addVarArgsForwarder
 
   /** Convert type from Scala to Java varargs method */
   private def toJavaVarArgs(tp: Type)(using Context): Type = tp match
@@ -317,6 +290,9 @@ class ElimRepeated extends MiniPhase with InfoTransformer { thisPhase =>
     val array = tp.translateFromRepeated(toArray = true) // Array[? <: T]
     val element = array.elemType.hiBound // T
 
-    if element <:< defn.AnyRefType || element.typeSymbol.isPrimitiveValueClass then array
+
+    if element <:< defn.AnyRefType
+      || ctx.mode.is(Mode.SafeNulls) && element.stripNull <:< defn.AnyRefType
+      || element.typeSymbol.isPrimitiveValueClass then array
     else defn.ArrayOf(TypeBounds.upper(AndType(element, defn.AnyRefType))) // Array[? <: T & AnyRef]
 }
diff --git a/compiler/src/dotty/tools/dotc/transform/ElimStaticThis.scala b/compiler/src/dotty/tools/dotc/transform/ElimStaticThis.scala
index 8bf188a6a2de..02612253c735 100644
--- a/compiler/src/dotty/tools/dotc/transform/ElimStaticThis.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ElimStaticThis.scala
@@ -13,7 +13,10 @@ import dotty.tools.dotc.core.Types.{ThisType, TermRef}
  */
 class ElimStaticThis extends MiniPhase {
   import ast.tpd._
-  def phaseName: String = "elimStaticThis"
+
+  override def phaseName: String = ElimStaticThis.name
+
+  override def description: String = ElimStaticThis.description
 
   override def transformThis(tree: This)(using Context): Tree =
     if (!tree.symbol.is(Package) && ctx.owner.enclosingMethod.is(JavaStatic)) {
@@ -34,3 +37,7 @@ class ElimStaticThis extends MiniPhase {
       }
     else tree
 }
+
+object ElimStaticThis:
+  val name: String = "elimStaticThis"
+  val description: String = "replace This references to static objects by global identifiers"
diff --git a/compiler/src/dotty/tools/dotc/transform/Erasure.scala b/compiler/src/dotty/tools/dotc/transform/Erasure.scala
index 915aaf371219..d16cc2f66f51 100644
--- a/compiler/src/dotty/tools/dotc/transform/Erasure.scala
+++ b/compiler/src/dotty/tools/dotc/transform/Erasure.scala
@@ -12,7 +12,7 @@ import core.Types._
 import core.Names._
 import core.StdNames._
 import core.NameOps._
-import core.NameKinds.{AdaptedClosureName, BodyRetainerName}
+import core.NameKinds.{AdaptedClosureName, BodyRetainerName, DirectMethName}
 import core.Scopes.newScopeWith
 import core.Decorators._
 import core.Constants._
@@ -42,6 +42,8 @@ class Erasure extends Phase with DenotTransformer {
 
   override def phaseName: String = Erasure.name
 
+  override def description: String = Erasure.description
+
   /** List of names of phases that should precede this phase */
   override def runsAfter: Set[String] = Set(InterceptedMethods.name, ElimRepeated.name)
 
@@ -58,6 +60,18 @@ class Erasure extends Phase with DenotTransformer {
           }
         }
 
+      def erasedName =
+        if ref.is(Flags.Method)
+            && contextResultsAreErased(ref.symbol)
+            && (ref.owner.is(Flags.Trait) || ref.symbol.allOverriddenSymbols.hasNext)
+        then
+          // Add a `$direct` to prevent this method from having the same signature
+          // as a method it overrides. We need a bridge between the
+          // two methods, so they are not allowed to already override after erasure.
+          DirectMethName(ref.targetName.asTermName)
+        else
+          ref.targetName
+
       assert(ctx.phase == this, s"transforming $ref at ${ctx.phase}")
       if (ref.symbol eq defn.ObjectClass) {
         // After erasure, all former Any members are now Object members
@@ -80,7 +94,7 @@ class Erasure extends Phase with DenotTransformer {
         val oldOwner = ref.owner
         val newOwner = if oldOwner == defn.AnyClass then defn.ObjectClass else oldOwner
         val oldName = ref.name
-        val newName = ref.targetName
+        val newName = erasedName
         val oldInfo = ref.info
         var newInfo = transformInfo(oldSymbol, oldInfo)
         val oldFlags = ref.flags
@@ -191,6 +205,7 @@ object Erasure {
   import TypeTestsCasts._
 
   val name: String = "erasure"
+  val description: String = "rewrite types to JVM model"
 
   /** An attachment on Apply nodes indicating that multiple arguments
    *  are passed in a single array. This occurs only if the function
@@ -325,6 +340,13 @@ object Erasure {
       assert(!pt.isInstanceOf[SingletonType], pt)
       if (pt isRef defn.UnitClass) unbox(tree, pt)
       else (tree.tpe.widen, pt) match {
+        // Convert primitive arrays into reference arrays, this path is only
+        // needed to handle repeated arguments, see
+        // `Definitions#FromJavaObjectSymbol` and `ElimRepeated#adaptToArray`.
+        case (JavaArrayType(treeElem), JavaArrayType(ptElem))
+        if treeElem.widen.isPrimitiveValueType && !ptElem.isPrimitiveValueType =>
+          cast(ref(defn.ScalaRuntime_toObjectArray).appliedTo(tree), pt)
+
         // When casting between two EVTs, we need to check which one underlies the other to determine
         // whether u2evt or evt2u should be used.
         case (tp1 @ ErasedValueType(tycon1, underlying1), tp2 @ ErasedValueType(tycon2, underlying2)) =>
@@ -365,8 +387,8 @@ object Erasure {
       case _: FunProto | AnyFunctionProto => tree
       case _ => tree.tpe.widen match
         case mt: MethodType if tree.isTerm =>
-          if mt.paramInfos.isEmpty then adaptToType(tree.appliedToNone, pt)
-          else etaExpand(tree, mt, pt)
+          assert(mt.paramInfos.isEmpty)
+          adaptToType(tree.appliedToNone, pt)
         case tpw =>
           if (pt.isInstanceOf[ProtoType] || tree.tpe <:< pt)
             tree
@@ -385,7 +407,6 @@ object Erasure {
             cast(tree, pt)
     end adaptToType
 
-
     /** The following code:
      *
      *      val f: Function1[Int, Any] = x => ...
@@ -516,61 +537,6 @@ object Erasure {
       else
         tree
     end adaptClosure
-
-    /** Eta expand given `tree` that has the given method type `mt`, so that
-     *  it conforms to erased result type `pt`.
-     *  To do this correctly, we have to look at the tree's original pre-erasure
-     *  type and figure out which context function types in its result are
-     *  not yet instantiated.
-     */
-    def etaExpand(tree: Tree, mt: MethodType, pt: Type)(using Context): Tree =
-      report.log(i"eta expanding $tree")
-      val defs = new mutable.ListBuffer[Tree]
-      val tree1 = LiftErased.liftApp(defs, tree)
-      val xmt = if tree.isInstanceOf[Apply] then mt else expandedMethodType(mt, tree)
-      val targetLength = xmt.paramInfos.length
-      val origOwner = ctx.owner
-
-      // The original type from which closures should be constructed
-      val origType = contextFunctionResultTypeCovering(tree.symbol, targetLength)
-
-      def abstracted(args: List[Tree], tp: Type, pt: Type)(using Context): Tree =
-        if args.length < targetLength then
-          try
-            val defn.ContextFunctionType(argTpes, resTpe, isErased) = tp: @unchecked
-            if isErased then abstracted(args, resTpe, pt)
-            else
-              val anonFun = newSymbol(
-                ctx.owner, nme.ANON_FUN, Flags.Synthetic | Flags.Method,
-                MethodType(argTpes, resTpe), coord = tree.span.endPos)
-              anonFun.info = transformInfo(anonFun, anonFun.info)
-              def lambdaBody(refss: List[List[Tree]]) =
-                val refs :: Nil = refss: @unchecked
-                val expandedRefs = refs.map(_.withSpan(tree.span.endPos)) match
-                  case (bunchedParam @ Ident(nme.ALLARGS)) :: Nil =>
-                    argTpes.indices.toList.map(n =>
-                      bunchedParam
-                        .select(nme.primitive.arrayApply)
-                        .appliedTo(Literal(Constant(n))))
-                  case refs1 => refs1
-                abstracted(args ::: expandedRefs, resTpe, anonFun.info.finalResultType)(
-                  using ctx.withOwner(anonFun))
-
-              val unadapted = Closure(anonFun, lambdaBody)
-              cpy.Block(unadapted)(unadapted.stats, adaptClosure(unadapted.expr.asInstanceOf[Closure]))
-          catch case ex: MatchError =>
-            println(i"error while abstracting tree = $tree | mt = $mt | args = $args%, % | tp = $tp | pt = $pt")
-            throw ex
-        else
-          assert(args.length == targetLength, i"wrong # args tree = $tree | args = $args%, % | mt = $mt | tree type = ${tree.tpe}")
-          val app = untpd.cpy.Apply(tree1)(tree1, args)
-          assert(ctx.typer.isInstanceOf[Erasure.Typer])
-          ctx.typer.typed(app, pt)
-            .changeOwnerAfter(origOwner, ctx.owner, erasurePhase.asInstanceOf[Erasure])
-
-      seq(defs.toList, abstracted(Nil, origType, pt))
-    end etaExpand
-
   end Boxing
 
   class Typer(erasurePhase: DenotTransformer) extends typer.ReTyper with NoChecking {
@@ -586,8 +552,14 @@ object Erasure {
       */
     private def checkNotErased(tree: Tree)(using Context): tree.type = {
       if (!ctx.mode.is(Mode.Type)) {
-        if (isErased(tree))
-          report.error(em"${tree.symbol} is declared as erased, but is in fact used", tree.srcPos)
+        if isErased(tree) then
+          val msg =
+            if tree.symbol.is(Flags.Inline) then
+              em"""${tree.symbol} is declared as `inline`, but was not inlined
+                  |
+                  |Try increasing `-Xmax-inlines` above ${ctx.settings.XmaxInlines.value}""".stripMargin
+            else em"${tree.symbol} is declared as `erased`, but is in fact used"
+          report.error(msg, tree.srcPos)
         tree.symbol.getAnnotation(defn.CompileTimeOnlyAnnot) match {
           case Some(annot) =>
             def defaultMsg =
@@ -698,16 +670,24 @@ object Erasure {
       def mapOwner(sym: Symbol): Symbol =
         if !sym.exists && tree.name == nme.apply then
           // PolyFunction apply Selects will not have a symbol, so deduce the owner
-          // from the typed qual.
-          val owner = qual1.tpe.typeSymbol
-          if defn.isFunctionClass(owner) then owner else NoSymbol
+          // from the typed tree of the erasure of the original qualifier's PolyFunction type.
+          // We cannot simply call `erasure` on the qualifier because its erasure might be
+          // `Object` due to how we erase intersections (see pos/i13950.scala).
+          // Instead, we manually lookup the type of `apply` in the qualifier.
+          inContext(preErasureCtx) {
+            val qualTp = tree.qualifier.typeOpt.widen
+            if qualTp.derivesFrom(defn.PolyFunctionClass) then
+              erasePolyFunctionApply(qualTp.select(nme.apply).widen).classSymbol
+            else
+              NoSymbol
+          }
         else
           val owner = sym.maybeOwner
           if defn.specialErasure.contains(owner) then
             assert(sym.isConstructor, s"${sym.showLocated}")
             defn.specialErasure(owner)
           else if defn.isSyntheticFunctionClass(owner) then
-            defn.erasedFunctionClass(owner)
+            defn.functionTypeErasure(owner).typeSymbol
           else
             owner
 
@@ -718,7 +698,7 @@ object Erasure {
 
       val owner = mapOwner(origSym)
       val sym = if (owner eq origSym.maybeOwner) origSym else owner.info.decl(tree.name).symbol
-      assert(sym.exists, origSym.showLocated)
+      assert(sym.exists, i"no owner from $owner/${origSym.showLocated} in $tree")
 
       if owner == defn.ObjectClass then checkValue(qual1)
 
@@ -828,49 +808,46 @@ object Erasure {
      */
     override def typedApply(tree: untpd.Apply, pt: Type)(using Context): Tree =
       val Apply(fun, args) = tree
-      if fun.symbol == defn.cbnArg then
-        typedUnadapted(args.head, pt)
-      else
-        val origFun = fun.asInstanceOf[tpd.Tree]
-        val origFunType = origFun.tpe.widen(using preErasureCtx)
-        val ownArgs = if origFunType.isErasedMethod then Nil else args
-        val fun1 = typedExpr(fun, AnyFunctionProto)
-        fun1.tpe.widen match
-          case mt: MethodType =>
-            val (xmt,        // A method type like `mt` but with bunched arguments expanded to individual ones
-                 bunchArgs,  // whether arguments are bunched
-                 outers) =   // the outer reference parameter(s)
-              if fun1.isInstanceOf[Apply] then
-                (mt, fun1.removeAttachment(BunchedArgs).isDefined, Nil)
-              else
-                val xmt = expandedMethodType(mt, origFun)
-                (xmt, xmt ne mt, outer.args(origFun))
-
-            val args0 = outers ::: ownArgs
-            val args1 = args0.zipWithConserve(xmt.paramInfos)(typedExpr)
-              .asInstanceOf[List[Tree]]
-
-            def mkApply(finalFun: Tree, finalArgs: List[Tree]) =
-              val app = untpd.cpy.Apply(tree)(finalFun, finalArgs)
-                .withType(applyResultType(xmt, args1))
-              if bunchArgs then app.withAttachment(BunchedArgs, ()) else app
-
-            def app(fun1: Tree): Tree = fun1 match
-              case Block(stats, expr) =>
-                cpy.Block(fun1)(stats, app(expr))
-              case Apply(fun2, SeqLiteral(prevArgs, argTpt) :: _) if bunchArgs =>
-                mkApply(fun2, JavaSeqLiteral(prevArgs ++ args1, argTpt) :: Nil)
-              case Apply(fun2, prevArgs) =>
-                mkApply(fun2, prevArgs ++ args1)
-              case _ if bunchArgs =>
-                mkApply(fun1, JavaSeqLiteral(args1, TypeTree(defn.ObjectType)) :: Nil)
-              case _ =>
-                mkApply(fun1, args1)
-
-            app(fun1)
-          case t =>
-            if ownArgs.isEmpty then fun1
-            else throw new MatchError(i"tree $tree has unexpected type of function $fun/$fun1: $t, was $origFunType, args = $ownArgs")
+      val origFun = fun.asInstanceOf[tpd.Tree]
+      val origFunType = origFun.tpe.widen(using preErasureCtx)
+      val ownArgs = if origFunType.isErasedMethod then Nil else args
+      val fun1 = typedExpr(fun, AnyFunctionProto)
+      fun1.tpe.widen match
+        case mt: MethodType =>
+          val (xmt,        // A method type like `mt` but with bunched arguments expanded to individual ones
+                bunchArgs,  // whether arguments are bunched
+                outers) =   // the outer reference parameter(s)
+            if fun1.isInstanceOf[Apply] then
+              (mt, fun1.removeAttachment(BunchedArgs).isDefined, Nil)
+            else
+              val xmt = expandedMethodType(mt, origFun)
+              (xmt, xmt ne mt, outer.args(origFun))
+
+          val args0 = outers ::: ownArgs
+          val args1 = args0.zipWithConserve(xmt.paramInfos)(typedExpr)
+            .asInstanceOf[List[Tree]]
+
+          def mkApply(finalFun: Tree, finalArgs: List[Tree]) =
+            val app = untpd.cpy.Apply(tree)(finalFun, finalArgs)
+              .withType(applyResultType(xmt, args1))
+            if bunchArgs then app.withAttachment(BunchedArgs, ()) else app
+
+          def app(fun1: Tree): Tree = fun1 match
+            case Block(stats, expr) =>
+              cpy.Block(fun1)(stats, app(expr))
+            case Apply(fun2, SeqLiteral(prevArgs, argTpt) :: _) if bunchArgs =>
+              mkApply(fun2, JavaSeqLiteral(prevArgs ++ args1, argTpt) :: Nil)
+            case Apply(fun2, prevArgs) =>
+              mkApply(fun2, prevArgs ++ args1)
+            case _ if bunchArgs =>
+              mkApply(fun1, JavaSeqLiteral(args1, TypeTree(defn.ObjectType)) :: Nil)
+            case _ =>
+              mkApply(fun1, args1)
+
+          app(fun1)
+        case t =>
+          if ownArgs.isEmpty then fun1
+          else throw new MatchError(i"tree $tree has unexpected type of function $fun/$fun1: $t, was $origFunType, args = $ownArgs")
     end typedApply
 
     // The following four methods take as the proto-type the erasure of the pre-existing type,
@@ -960,7 +937,7 @@ object Erasure {
       if constr.isConstructor && needsOuterParam(constr.owner.asClass) then
         constr.info match
           case MethodTpe(outerName :: _, outerType :: _, _) =>
-            val outerSym = newSymbol(constr, outerName, Flags.Param, outerType)
+            val outerSym = newSymbol(constr, outerName, Flags.Param | Flags.SyntheticArtifact, outerType)
             ValDef(outerSym) :: Nil
           case _ =>
             // There's a possible race condition that a constructor was looked at
@@ -1038,24 +1015,9 @@ object Erasure {
     override def typedTypeDef(tdef: untpd.TypeDef, sym: Symbol)(using Context): Tree =
       EmptyTree
 
-    /** Drop all constructor proxies of members of class `cls`.
-     *  If `cls` is itself a constructor proxy, mark it as absent after erasure.
-     */
-    private def dropConstructorProxies(cls: ClassSymbol)(using Context) =
-      import Flags._
-      if cls.linkedClass.is(ConstructorProxy) then
-        if cls.owner.is(PackageClass) && cls.isDefinedInCurrentRun then
-          cls.linkedClass.copySymDenotation(initFlags = EmptyFlags, info = NoType)
-            .installAfter(erasurePhase)
-        cls.registeredCompanion = NoSymbol
-      for mbr <- cls.info.decls do
-        if mbr.is(ConstructorProxy) then mbr.dropAfter(erasurePhase)
-
     override def typedClassDef(cdef: untpd.TypeDef, cls: ClassSymbol)(using Context): Tree =
       if cls.is(Flags.Erased) then erasedDef(cls)
-      else
-        try super.typedClassDef(cdef, cls)
-        finally dropConstructorProxies(cls)
+      else super.typedClassDef(cdef, cls)
 
     override def typedAnnotated(tree: untpd.Annotated, pt: Type)(using Context): Tree =
       typed(tree.arg, pt)
diff --git a/compiler/src/dotty/tools/dotc/transform/EtaReduce.scala b/compiler/src/dotty/tools/dotc/transform/EtaReduce.scala
new file mode 100644
index 000000000000..ab5190daf0e8
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/EtaReduce.scala
@@ -0,0 +1,50 @@
+package dotty.tools
+package dotc
+package transform
+
+import MegaPhase.MiniPhase
+import core.*
+import Symbols.*, Contexts.*, Types.*, Decorators.*
+import StdNames.nme
+import ast.Trees.*
+
+/** Rewrite `(x1, ... xN) => f(x1, ... xN)` for N >= 0 to `f`,
+ *  provided `f` is a pure path of function type.
+ *
+ *  This optimization is crucial for context functions. The compiler
+ *  produces a contextual closure around values passed as arguments
+ *  where a context function is expected, unless that value has the
+ *  syntactic form of a context function literal.
+ *
+ *  Without this phase, when a contextual function is passed as an argument to a
+ *  recursive function, that would have the unfortunate effect of a linear growth
+ *  in transient thunks of identical type wrapped around each other, leading
+ *  to performance degradation, and in some cases, stack overflows.
+ */
+class EtaReduce extends MiniPhase:
+  import ast.tpd._
+
+  override def phaseName: String = EtaReduce.name
+
+  override def description: String = EtaReduce.description
+
+  override def transformBlock(tree: Block)(using Context): Tree = tree match
+    case Block((meth : DefDef) :: Nil, closure: Closure)
+    if meth.symbol == closure.meth.symbol =>
+      meth.rhs match
+        case Apply(Select(fn, nme.apply), args)
+        if meth.paramss.head.corresponds(args)((param, arg) =>
+              arg.isInstanceOf[Ident] && arg.symbol == param.symbol)
+            && isPurePath(fn)
+            && fn.tpe <:< tree.tpe
+            && defn.isFunctionClass(fn.tpe.widen.typeSymbol) =>
+          report.log(i"eta reducing $tree --> $fn")
+          fn
+        case _ => tree
+    case _ => tree
+
+end EtaReduce
+
+object EtaReduce:
+  val name: String = "etaReduce"
+  val description: String = "reduce eta expansions of pure paths"
diff --git a/compiler/src/dotty/tools/dotc/transform/ExpandPrivate.scala b/compiler/src/dotty/tools/dotc/transform/ExpandPrivate.scala
index a9f2aaec2052..f02f6b61d588 100644
--- a/compiler/src/dotty/tools/dotc/transform/ExpandPrivate.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ExpandPrivate.scala
@@ -32,7 +32,9 @@ import ValueClasses._
 class ExpandPrivate extends MiniPhase with IdentityDenotTransformer { thisPhase =>
   import ast.tpd._
 
-  override def phaseName: String = "expandPrivate"
+  override def phaseName: String = ExpandPrivate.name
+
+  override def description: String = ExpandPrivate.description
 
   // This phase moves methods around (in infotransform) so it may need to make other methods public
   override def runsAfter: Set[String] = Set(MoveStatics.name)
@@ -111,3 +113,7 @@ class ExpandPrivate extends MiniPhase with IdentityDenotTransformer { thisPhase
     tree
   }
 }
+
+object ExpandPrivate:
+  val name: String = "expandPrivate"
+  val description: String = "widen private definitions accessed from nested classes"
diff --git a/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala b/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala
index a152ec3ed981..0a2e49501278 100644
--- a/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala
@@ -1,12 +1,15 @@
-package dotty.tools.dotc
+package dotty.tools
+package dotc
 package transform
 
 import core._
+import Scopes.newScope
 import Contexts._, Symbols._, Types._, Flags._, Decorators._, StdNames._, Constants._
 import MegaPhase._
 import SymUtils._
 import NullOpsDecorator._
 import ast.Trees._
+import ast.untpd
 import reporting._
 import dotty.tools.dotc.util.Spans.Span
 
@@ -25,6 +28,7 @@ import dotty.tools.dotc.util.Spans.Span
  */
 object ExpandSAMs:
   val name: String = "expandSAMs"
+  val description: String = "expand SAM closures to anonymous classes"
 
   /** Is the SAMType `cls` also a SAM under the rules of the platform? */
   def isPlatformSam(cls: ClassSymbol)(using Context): Boolean =
@@ -40,6 +44,8 @@ class ExpandSAMs extends MiniPhase:
 
   override def phaseName: String = ExpandSAMs.name
 
+  override def description: String = ExpandSAMs.description
+
   override def transformBlock(tree: Block)(using Context): Tree = tree match {
     case Block(stats @ (fn: DefDef) :: Nil, Closure(_, fnRef, tpt)) if fnRef.symbol == fn.symbol =>
       tpt.tpe match {
@@ -103,78 +109,73 @@ class ExpandSAMs extends MiniPhase:
    *  ```
    */
   private def toPartialFunction(tree: Block, tpe: Type)(using Context): Tree = {
-    /** An extractor for match, either contained in a block or standalone. */
-    object PartialFunctionRHS {
-      def unapply(tree: Tree): Option[Match] = tree match {
-        case Block(Nil, expr) => unapply(expr)
-        case m: Match => Some(m)
-        case _ => None
-      }
-    }
-
     val closureDef(anon @ DefDef(_, List(List(param)), _, _)) = tree
-    anon.rhs match {
-      case PartialFunctionRHS(pf) =>
-        val anonSym = anon.symbol
-        val anonTpe = anon.tpe.widen
-        val parents = List(
-          defn.AbstractPartialFunctionClass.typeRef.appliedTo(anonTpe.firstParamTypes.head, anonTpe.resultType),
-          defn.SerializableType)
-        val pfSym = newNormalizedClassSymbol(anonSym.owner, tpnme.ANON_CLASS, Synthetic | Final, parents, coord = tree.span)
-
-        def overrideSym(sym: Symbol) = sym.copy(
-          owner = pfSym,
-          flags = Synthetic | Method | Final | Override,
-          info = tpe.memberInfo(sym),
-          coord = tree.span).asTerm.entered
-        val isDefinedAtFn = overrideSym(defn.PartialFunction_isDefinedAt)
-        val applyOrElseFn = overrideSym(defn.PartialFunction_applyOrElse)
-
-        def translateMatch(tree: Match, pfParam: Symbol, cases: List[CaseDef], defaultValue: Tree)(using Context) = {
-          val selector = tree.selector
-          val selectorTpe = selector.tpe.widen
-          val defaultSym = newSymbol(pfParam.owner, nme.WILDCARD, Synthetic | Case, selectorTpe)
-          val defaultCase =
-            CaseDef(
-              Bind(defaultSym, Underscore(selectorTpe)),
-              EmptyTree,
-              defaultValue)
-          val unchecked = selector.annotated(New(ref(defn.UncheckedAnnot.typeRef)))
-          cpy.Match(tree)(unchecked, cases :+ defaultCase)
-            .subst(param.symbol :: Nil, pfParam :: Nil)
-              // Needed because  a partial function can be written as:
-              // param => param match { case "foo" if foo(param) => param }
-              // And we need to update all references to 'param'
-        }
-
-        def isDefinedAtRhs(paramRefss: List[List[Tree]])(using Context) = {
-          val tru = Literal(Constant(true))
-          def translateCase(cdef: CaseDef) =
-            cpy.CaseDef(cdef)(body = tru).changeOwner(anonSym, isDefinedAtFn)
-          val paramRef = paramRefss.head.head
-          val defaultValue = Literal(Constant(false))
-          translateMatch(pf, paramRef.symbol, pf.cases.map(translateCase), defaultValue)
-        }
-
-        def applyOrElseRhs(paramRefss: List[List[Tree]])(using Context) = {
-          val List(paramRef, defaultRef) = paramRefss(1)
-          def translateCase(cdef: CaseDef) =
-            cdef.changeOwner(anonSym, applyOrElseFn)
-          val defaultValue = defaultRef.select(nme.apply).appliedTo(paramRef)
-          translateMatch(pf, paramRef.symbol, pf.cases.map(translateCase), defaultValue)
-        }
-
-        val constr = newConstructor(pfSym, Synthetic, Nil, Nil).entered
-        val isDefinedAtDef = transformFollowingDeep(DefDef(isDefinedAtFn, isDefinedAtRhs(_)(using ctx.withOwner(isDefinedAtFn))))
-        val applyOrElseDef = transformFollowingDeep(DefDef(applyOrElseFn, applyOrElseRhs(_)(using ctx.withOwner(applyOrElseFn))))
-        val pfDef = ClassDef(pfSym, DefDef(constr), List(isDefinedAtDef, applyOrElseDef))
-        cpy.Block(tree)(pfDef :: Nil, New(pfSym.typeRef, Nil))
 
+    // The right hand side from which to construct the partial function. This is always a Match.
+    // If the original rhs is already a Match (possibly in braces), return that.
+    // Otherwise construct a match `x match case _ => rhs` where `x` is the parameter of the closure.
+    def partialFunRHS(tree: Tree): Match = tree match
+      case m: Match => m
+      case Block(Nil, expr) => partialFunRHS(expr)
       case _ =>
-        val found = tpe.baseType(defn.Function1)
-        report.error(TypeMismatch(found, tpe), tree.srcPos)
-        tree
+        Match(ref(param.symbol),
+          CaseDef(untpd.Ident(nme.WILDCARD).withType(param.symbol.info), EmptyTree, tree) :: Nil)
+
+    val pfRHS = partialFunRHS(anon.rhs)
+    val anonSym = anon.symbol
+    val anonTpe = anon.tpe.widen
+    val parents = List(
+      defn.AbstractPartialFunctionClass.typeRef.appliedTo(anonTpe.firstParamTypes.head, anonTpe.resultType),
+      defn.SerializableType)
+    val pfSym = newNormalizedClassSymbol(anonSym.owner, tpnme.ANON_CLASS, Synthetic | Final, parents, newScope, coord = tree.span)
+
+    def overrideSym(sym: Symbol) = sym.copy(
+      owner = pfSym,
+      flags = Synthetic | Method | Final | Override,
+      info = tpe.memberInfo(sym),
+      coord = tree.span).asTerm.entered
+    val isDefinedAtFn = overrideSym(defn.PartialFunction_isDefinedAt)
+    val applyOrElseFn = overrideSym(defn.PartialFunction_applyOrElse)
+
+    def translateMatch(tree: Match, pfParam: Symbol, cases: List[CaseDef], defaultValue: Tree)(using Context) = {
+      val selector = tree.selector
+      val selectorTpe = selector.tpe.widen
+      val defaultSym = newSymbol(pfParam.owner, nme.WILDCARD, Synthetic | Case, selectorTpe)
+      val defaultCase =
+        CaseDef(
+          Bind(defaultSym, Underscore(selectorTpe)),
+          EmptyTree,
+          defaultValue)
+      val unchecked = selector.annotated(New(ref(defn.UncheckedAnnot.typeRef)))
+      cpy.Match(tree)(unchecked, cases :+ defaultCase)
+        .subst(param.symbol :: Nil, pfParam :: Nil)
+          // Needed because  a partial function can be written as:
+          // param => param match { case "foo" if foo(param) => param }
+          // And we need to update all references to 'param'
     }
+
+    def isDefinedAtRhs(paramRefss: List[List[Tree]])(using Context) = {
+      val tru = Literal(Constant(true))
+      def translateCase(cdef: CaseDef) =
+        cpy.CaseDef(cdef)(body = tru).changeOwner(anonSym, isDefinedAtFn)
+      val paramRef = paramRefss.head.head
+      val defaultValue = Literal(Constant(false))
+      translateMatch(pfRHS, paramRef.symbol, pfRHS.cases.map(translateCase), defaultValue)
+    }
+
+    def applyOrElseRhs(paramRefss: List[List[Tree]])(using Context) = {
+      val List(paramRef, defaultRef) = paramRefss(1)
+      def translateCase(cdef: CaseDef) =
+        cdef.changeOwner(anonSym, applyOrElseFn)
+      val defaultValue = defaultRef.select(nme.apply).appliedTo(paramRef)
+      translateMatch(pfRHS, paramRef.symbol, pfRHS.cases.map(translateCase), defaultValue)
+    }
+
+    val constr = newConstructor(pfSym, Synthetic, Nil, Nil).entered
+    val isDefinedAtDef = transformFollowingDeep(DefDef(isDefinedAtFn, isDefinedAtRhs(_)(using ctx.withOwner(isDefinedAtFn))))
+    val applyOrElseDef = transformFollowingDeep(DefDef(applyOrElseFn, applyOrElseRhs(_)(using ctx.withOwner(applyOrElseFn))))
+    val pfDef = ClassDef(pfSym, DefDef(constr), List(isDefinedAtDef, applyOrElseDef))
+    cpy.Block(tree)(pfDef :: Nil, New(pfSym.typeRef, Nil))
   }
 
   private def checkRefinements(tpe: Type, tree: Tree)(using Context): Type = tpe.dealias match {
diff --git a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala
index 8b13633e6125..1ae915a1fc10 100644
--- a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala
@@ -40,10 +40,10 @@ class ExplicitOuter extends MiniPhase with InfoTransformer { thisPhase =>
 
   override def phaseName: String = ExplicitOuter.name
 
-  /** List of names of phases that should have finished their processing of all compilation units
-    * before this phase starts
-    */
-  override def runsAfter: Set[String] = Set(PatternMatcher.name, HoistSuperArgs.name)
+  override def description: String = ExplicitOuter.description
+
+  override def runsAfter:         Set[String] = Set(HoistSuperArgs.name)
+  override def runsAfterGroupsOf: Set[String] = Set(PatternMatcher.name)
 
   override def changesMembers: Boolean = true // the phase adds outer accessors
 
@@ -127,6 +127,7 @@ object ExplicitOuter {
   import ast.tpd._
 
   val name: String = "explicitOuter"
+  val description: String = "add accessors to outer classes from nested ones"
 
   /** Ensure that class `cls` has outer accessors */
   def ensureOuterAccessors(cls: ClassSymbol)(using Context): Unit =
@@ -179,7 +180,7 @@ object ExplicitOuter {
           else prefix.widen)
     val info = if (flags.is(Method)) ExprType(target) else target
     atPhaseNoEarlier(explicitOuterPhase.next) { // outer accessors are entered at explicitOuter + 1, should not be defined before.
-      newSymbol(owner, name, Synthetic | flags, info, coord = cls.coord)
+      newSymbol(owner, name, SyntheticArtifact | flags, info, coord = cls.coord)
     }
   }
 
@@ -282,6 +283,10 @@ object ExplicitOuter {
       case TypeRef(prefix, _) => isOuterRef(prefix)
       case _ => false
     }
+    def containsOuterRefs(tp: Type): Boolean = tp match
+      case tp: SingletonType => isOuterRef(tp)
+      case tp: AndOrType => containsOuterRefs(tp.tp1) || containsOuterRefs(tp.tp2)
+      case _ => false
     tree match {
       case _: This | _: Ident => isOuterRef(tree.tpe)
       case nw: New =>
@@ -292,6 +297,9 @@ object ExplicitOuter {
           // newCls might get proxies for free variables. If current class is
           // properly contained in newCls, it needs an outer path to newCls access the
           // proxies and forward them to the new instance.
+      case app: TypeApply if app.symbol.isTypeTest =>
+        // Type tests of singletons translate to `eq` tests with references, which might require outer pointers
+        containsOuterRefs(app.args.head.tpe)
       case _ =>
         false
     }
diff --git a/compiler/src/dotty/tools/dotc/transform/ExplicitSelf.scala b/compiler/src/dotty/tools/dotc/transform/ExplicitSelf.scala
index 3a2bda437d80..0398ac7f8d19 100644
--- a/compiler/src/dotty/tools/dotc/transform/ExplicitSelf.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ExplicitSelf.scala
@@ -21,7 +21,9 @@ import Contexts._, Types._, MegaPhase._, ast.Trees._, Symbols._, Decorators._, F
 class ExplicitSelf extends MiniPhase {
   import ast.tpd._
 
-  override def phaseName: String = "explicitSelf"
+  override def phaseName: String = ExplicitSelf.name
+
+  override def description: String = ExplicitSelf.description
 
   private def needsCast(tree: RefTree, cls: ClassSymbol)(using Context) =
     !cls.is(Package) && cls.givenSelfType.exists && !cls.derivesFrom(tree.symbol.owner)
@@ -49,3 +51,7 @@ class ExplicitSelf extends MiniPhase {
     case _ => tree
   }
 }
+
+object ExplicitSelf:
+  val  name: String = "explicitSelf"
+  val description: String = "make references to non-trivial self types explicit as casts"
diff --git a/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala b/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala
index 546c4e18c633..27aee2d445d9 100644
--- a/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala
@@ -41,9 +41,10 @@ class ExtensionMethods extends MiniPhase with DenotTransformer with FullParamete
   import tpd._
   import ExtensionMethods._
 
-  /** the following two members override abstract members in Transform */
   override def phaseName: String = ExtensionMethods.name
 
+  override def description: String = ExtensionMethods.description
+
   override def runsAfter: Set[String] = Set(
     ElimRepeated.name,
     ProtectedAccessors.name,  // protected accessors cannot handle code that is moved from class to companion object
@@ -174,6 +175,7 @@ class ExtensionMethods extends MiniPhase with DenotTransformer with FullParamete
 
 object ExtensionMethods {
   val name: String = "extmethods"
+  val description: String = "expand methods of value classes with extension methods"
 
   /** Name of the extension method that corresponds to given instance method `meth`. */
   def extensionName(imeth: Symbol)(using Context): TermName =
diff --git a/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala b/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala
index 1ff9edda16d8..b01fc0a992df 100644
--- a/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala
+++ b/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala
@@ -22,10 +22,12 @@ import TypeUtils.isErasedValueType
 
 object FirstTransform {
   val name: String = "firstTransform"
+  val description: String = "some transformations to put trees into a canonical form"
 }
 
 /** The first tree transform
- *   - eliminates some kinds of trees: Imports, NamedArgs
+ *   - eliminates some kinds of trees: Imports other than language imports,
+ *     Exports, NamedArgs, type trees other than TypeTree
  *   - stubs out native methods
  *   - eliminates self tree in Template and self symbol in ClassInfo
  *   - collapses all type trees to trees of class TypeTree
@@ -39,6 +41,8 @@ class FirstTransform extends MiniPhase with InfoTransformer { thisPhase =>
 
   override def phaseName: String = FirstTransform.name
 
+  override def description: String = FirstTransform.description
+
   /** eliminate self symbol in ClassInfo */
   override def transformInfo(tp: Type, sym: Symbol)(using Context): Type = tp match {
     case tp @ ClassInfo(_, _, _, _, self: Symbol) =>
@@ -58,7 +62,7 @@ class FirstTransform extends MiniPhase with InfoTransformer { thisPhase =>
             tree.symbol.is(JavaStatic) && qualTpe.derivesFrom(tree.symbol.enclosingClass),
           i"non member selection of ${tree.symbol.showLocated} from ${qualTpe} in $tree")
       case _: TypeTree =>
-      case _: Import | _: NamedArg | _: TypTree =>
+      case _: Export | _: NamedArg | _: TypTree =>
         assert(false, i"illegal tree: $tree")
       case _ =>
     }
@@ -136,7 +140,8 @@ class FirstTransform extends MiniPhase with InfoTransformer { thisPhase =>
   }
 
   override def transformOther(tree: Tree)(using Context): Tree = tree match {
-    case tree: ImportOrExport => EmptyTree
+    case tree: Import if untpd.languageImport(tree.expr).isEmpty => EmptyTree
+    case tree: Export => EmptyTree
     case tree: NamedArg => transformAllDeep(tree.arg)
     case tree => if (tree.isType) toTypeTree(tree) else tree
   }
diff --git a/compiler/src/dotty/tools/dotc/transform/Flatten.scala b/compiler/src/dotty/tools/dotc/transform/Flatten.scala
index 2009076e5846..678a202709e0 100644
--- a/compiler/src/dotty/tools/dotc/transform/Flatten.scala
+++ b/compiler/src/dotty/tools/dotc/transform/Flatten.scala
@@ -14,7 +14,9 @@ import util.Store
 class Flatten extends MiniPhase with SymTransformer {
   import ast.tpd._
 
-  override def phaseName: String = "flatten"
+  override def phaseName: String = Flatten.name
+
+  override def description: String = Flatten.description
 
   // private[this] and protected[this] modifiers must be dropped
   // before classes are lifted. Getters drop these modifiers.
@@ -56,3 +58,7 @@ class Flatten extends MiniPhase with SymTransformer {
   override def transformTypeDef(tree: TypeDef)(using Context): Tree =
     liftIfNested(tree)
 }
+
+object Flatten:
+  val name: String = "flatten"
+  val description: String = "lift all inner classes to package scope"
diff --git a/compiler/src/dotty/tools/dotc/transform/ForwardDepChecks.scala b/compiler/src/dotty/tools/dotc/transform/ForwardDepChecks.scala
new file mode 100644
index 000000000000..e01c975d0f0d
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/ForwardDepChecks.scala
@@ -0,0 +1,114 @@
+package dotty.tools
+package dotc
+package transform
+
+import core.*
+import Symbols.*, Types.*, Contexts.*, Flags.*, Decorators.*, reporting.*
+import util.Spans.Span
+import util.Store
+import collection.immutable
+import ast.tpd
+import MegaPhase.MiniPhase
+
+object ForwardDepChecks:
+
+  import tpd.*
+
+  val name: String = "forwardDepChecks"
+  val description: String = "ensure no forward references to local vals"
+
+  type LevelAndIndex = immutable.Map[Symbol, (LevelInfo, Int)]
+
+  class OptLevelInfo {
+    def levelAndIndex: LevelAndIndex = Map()
+    def enterReference(sym: Symbol, span: Span): Unit = ()
+  }
+
+  /** A class to help in forward reference checking */
+  class LevelInfo(outerLevelAndIndex: LevelAndIndex, stats: List[Tree])(using Context)
+  extends OptLevelInfo {
+    override val levelAndIndex: LevelAndIndex =
+      stats.foldLeft(outerLevelAndIndex, 0) {(mi, stat) =>
+        val (m, idx) = mi
+        val m1 = stat match {
+          case stat: MemberDef => m.updated(stat.symbol, (this, idx))
+          case _ => m
+        }
+        (m1, idx + 1)
+      }._1
+    var maxIndex: Int = Int.MinValue
+    var refSpan: Span = _
+    var refSym: Symbol = _
+
+    override def enterReference(sym: Symbol, span: Span): Unit =
+      if (sym.exists && sym.owner.isTerm)
+        levelAndIndex.get(sym) match {
+          case Some((level, idx)) if (level.maxIndex < idx) =>
+            level.maxIndex = idx
+            level.refSpan = span
+            level.refSym = sym
+          case _ =>
+        }
+  }
+
+  val NoLevelInfo: OptLevelInfo = new OptLevelInfo()
+
+class ForwardDepChecks extends MiniPhase:
+  import ForwardDepChecks.*
+  import tpd.*
+
+  override def phaseName: String = ForwardDepChecks.name
+
+  override def description: String = ForwardDepChecks.description
+
+  override def runsAfter: Set[String] = Set(ElimByName.name)
+
+  private var LevelInfo: Store.Location[OptLevelInfo] = _
+  private def currentLevel(using Context): OptLevelInfo = ctx.store(LevelInfo)
+
+  override def initContext(ctx: FreshContext): Unit =
+    LevelInfo = ctx.addLocation(NoLevelInfo)
+
+  override def prepareForStats(trees: List[Tree])(using Context): Context =
+    if (ctx.owner.isTerm)
+      ctx.fresh.updateStore(LevelInfo, new LevelInfo(currentLevel.levelAndIndex, trees))
+    else ctx
+
+  override def transformValDef(tree: ValDef)(using Context): ValDef =
+    val sym = tree.symbol
+    if sym.exists && sym.owner.isTerm && !sym.is(Lazy) then
+      currentLevel.levelAndIndex.get(sym) match
+        case Some((level, symIdx)) if symIdx <= level.maxIndex =>
+          report.error(ForwardReferenceExtendsOverDefinition(sym, level.refSym),
+            ctx.source.atSpan(level.refSpan))
+        case _ =>
+    tree
+
+  override def transformIdent(tree: Ident)(using Context): Ident = {
+    currentLevel.enterReference(tree.symbol, tree.span)
+    tree
+  }
+
+  override def transformApply(tree: Apply)(using Context): Apply = {
+    if (isSelfConstrCall(tree)) {
+      assert(currentLevel.isInstanceOf[LevelInfo], s"${ctx.owner}/" + i"$tree")
+      val level = currentLevel.asInstanceOf[LevelInfo]
+      if (level.maxIndex > 0) {
+        // An implementation restriction to avoid VerifyErrors and lazyvals mishaps; see SI-4717
+        report.debuglog("refsym = " + level.refSym)
+        report.error("forward reference not allowed from self constructor invocation",
+          ctx.source.atSpan(level.refSpan))
+      }
+    }
+    tree
+  }
+
+  override def transformNew(tree: New)(using Context): New = {
+    currentLevel.enterReference(tree.tpe.typeSymbol, tree.span)
+    tree.tpe.dealias.foreachPart {
+      case TermRef(_, s: Symbol) => currentLevel.enterReference(s, tree.span)
+      case _ =>
+    }
+    tree
+  }
+end ForwardDepChecks
diff --git a/compiler/src/dotty/tools/dotc/transform/FunctionXXLForwarders.scala b/compiler/src/dotty/tools/dotc/transform/FunctionXXLForwarders.scala
index 4fd075aa0dba..a7fa5c0fe909 100644
--- a/compiler/src/dotty/tools/dotc/transform/FunctionXXLForwarders.scala
+++ b/compiler/src/dotty/tools/dotc/transform/FunctionXXLForwarders.scala
@@ -25,7 +25,9 @@ import Types._
 class FunctionXXLForwarders extends MiniPhase with IdentityDenotTransformer {
   import ast.tpd._
 
-  override def phaseName: String = "functionXXLForwarders"
+  override def phaseName: String = FunctionXXLForwarders.name
+
+  override def description: String = FunctionXXLForwarders.description
 
   override def transformTemplate(impl: Template)(using Context): Template = {
 
@@ -59,3 +61,6 @@ class FunctionXXLForwarders extends MiniPhase with IdentityDenotTransformer {
   }
 }
 
+object FunctionXXLForwarders:
+  val name: String = "functionXXLForwarders"
+  val description: String = "add forwarders for FunctionXXL apply methods"
diff --git a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala
index e2977a03e804..c18009784ba9 100644
--- a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala
+++ b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala
@@ -81,21 +81,14 @@ object GenericSignatures {
       val (repr :: _, others) = splitIntersection(bounds)
       builder.append(':')
 
-      // According to the Java spec
-      // (https://docs.oracle.com/javase/specs/jls/se8/html/jls-4.html#jls-4.4),
-      // intersections erase to their first member and must start with a class.
-      // So, if our intersection erases to a trait, in theory we should emit
-      // just that trait in the generic signature even if the intersection type
-      // is composed of multiple traits. But in practice Scala 2 has always
-      // ignored this restriction as intersections of traits seem to be handled
-      // correctly by javac, we do the same here since type soundness seems
-      // more important than adhering to the spec.
+      // In Java, intersections always erase to their first member, so put
+      // whatever parent erases to the Scala intersection erasure first in the
+      // signature.
       if repr.classSymbol.is(Trait) then
+        // An initial ':' is needed if the intersection starts with an interface
+        // (cf https://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-TypeParameter)
         builder.append(':')
-        boxedSig(repr)
-        // If we wanted to be compliant with the spec, we would `return` here.
-      else
-        boxedSig(repr)
+      boxedSig(repr)
       others.filter(_.classSymbol.is(Trait)).foreach { tp =>
         builder.append(':')
         boxedSig(tp)
@@ -131,10 +124,10 @@ object GenericSignatures {
      */
     def splitIntersection(parents: List[Type])(using Context): (List[Type], List[Type]) =
       val erasedParents = parents.map(erasure)
-      val erasedCls = erasedGlb(erasedParents).classSymbol
+      val erasedTp = erasedGlb(erasedParents)
       parents.zip(erasedParents)
         .partitionMap((parent, erasedParent) =>
-          if erasedParent.classSymbol eq erasedCls then
+          if erasedParent =:= erasedTp then
             Left(parent)
           else
             Right(parent))
@@ -281,10 +274,10 @@ object GenericSignatures {
               jsig(erasedUnderlying, toplevel, primitiveOK)
           }
           else if (defn.isSyntheticFunctionClass(sym)) {
-            val erasedSym = defn.erasedFunctionClass(sym)
+            val erasedSym = defn.functionTypeErasure(sym).typeSymbol
             classSig(erasedSym, pre, if (erasedSym.typeParams.isEmpty) Nil else args)
           }
-          else if (sym.isClass)
+          else if sym.isClass then
             classSig(sym, pre, args)
           else
             jsig(erasure(tp), toplevel, primitiveOK)
@@ -298,7 +291,7 @@ object GenericSignatures {
 
         case PolyType(tparams, mtpe: MethodType) =>
           assert(tparams.nonEmpty)
-          if (toplevel) polyParamSig(tparams)
+          if (toplevel && !sym0.isConstructor) polyParamSig(tparams)
           jsig(mtpe)
 
         // Nullary polymorphic method
@@ -462,7 +455,7 @@ object GenericSignatures {
   private class NeedsSigCollector(using Context) extends TypeAccumulator[Boolean] {
     override def apply(x: Boolean, tp: Type): Boolean =
       if (!x)
-        tp match {
+        tp.dealias match {
           case RefinedType(parent, refinedName, refinedInfo) =>
             val sym = parent.typeSymbol
             if (sym == defn.ArrayClass) foldOver(x, refinedInfo)
@@ -478,9 +471,9 @@ object GenericSignatures {
             foldOver(tp.typeParams.nonEmpty, parents)
           case AnnotatedType(tpe, _) =>
             foldOver(x, tpe)
-          case proxy: TypeProxy =>
-            foldOver(x, proxy)
-          case _ =>
+          case ExprType(tpe) =>
+            true
+          case tp =>
             foldOver(x, tp)
         }
       else x
diff --git a/compiler/src/dotty/tools/dotc/transform/Getters.scala b/compiler/src/dotty/tools/dotc/transform/Getters.scala
index 7e5ea9c81b2a..2a46185a0512 100644
--- a/compiler/src/dotty/tools/dotc/transform/Getters.scala
+++ b/compiler/src/dotty/tools/dotc/transform/Getters.scala
@@ -62,6 +62,8 @@ class Getters extends MiniPhase with SymTransformer { thisPhase =>
 
   override def phaseName: String = Getters.name
 
+  override def description: String = Getters.description
+
   override def transformSym(d: SymDenotation)(using Context): SymDenotation = {
     def noGetterNeeded =
       d.isOneOf(NoGetterNeededFlags) ||
@@ -120,4 +122,5 @@ class Getters extends MiniPhase with SymTransformer { thisPhase =>
 
 object Getters {
   val name: String = "getters"
+  val description: String = "replace non-private vals and vars with getter defs"
 }
diff --git a/compiler/src/dotty/tools/dotc/transform/HoistSuperArgs.scala b/compiler/src/dotty/tools/dotc/transform/HoistSuperArgs.scala
index ca584561b711..3233601310ae 100644
--- a/compiler/src/dotty/tools/dotc/transform/HoistSuperArgs.scala
+++ b/compiler/src/dotty/tools/dotc/transform/HoistSuperArgs.scala
@@ -16,6 +16,7 @@ import SymUtils._
 
 object HoistSuperArgs {
   val name: String = "hoistSuperArgs"
+  val description: String = "hoist complex arguments of supercalls to enclosing scope"
 }
 
 /** This phase hoists complex arguments of supercalls and this-calls out of the enclosing class.
@@ -43,12 +44,12 @@ object HoistSuperArgs {
 class HoistSuperArgs extends MiniPhase with IdentityDenotTransformer { thisPhase =>
   import ast.tpd._
 
-  def phaseName: String = HoistSuperArgs.name
+  override def phaseName: String = HoistSuperArgs.name
 
-  override def runsAfter: Set[String] = Set(ByNameClosures.name)
+  override def description: String = HoistSuperArgs.description
+
+  override def runsAfter: Set[String] = Set(ElimByName.name)
     // By name closures need to be introduced first in order to be hoisted out here.
-    // There's an interaction with by name closures in that the  marker
-    // application should not be hoisted, but be left at the point of call.
 
   /** Defines methods for hoisting complex supercall arguments out of
    *  parent super calls and constructor definitions.
@@ -88,7 +89,7 @@ class HoistSuperArgs extends MiniPhase with IdentityDenotTransformer { thisPhase
       def newSuperArgMethod(argType: Type) = {
         val (staticFlag, methOwner) =
           if (cls.owner.is(Package)) (JavaStatic, cls) else (EmptyFlags, cls.owner)
-        val argTypeWrtConstr = argType.subst(origParams, allParamRefs(constr.info))
+        val argTypeWrtConstr = argType.widenTermRefExpr.subst(origParams, allParamRefs(constr.info))
         // argType with references to paramRefs of the primary constructor instead of
         // local parameter accessors
         newSymbol(
@@ -126,8 +127,6 @@ class HoistSuperArgs extends MiniPhase with IdentityDenotTransformer { thisPhase
 
       // begin hoistSuperArg
       arg match {
-        case Apply(fn, arg1 :: Nil) if fn.symbol == defn.cbnArg =>
-          cpy.Apply(arg)(fn, hoistSuperArg(arg1, cdef) :: Nil)
         case _ if arg.existsSubTree(needsHoist) =>
           val superMeth = newSuperArgMethod(arg.tpe)
           val superArgDef = DefDef(superMeth, prefss => {
diff --git a/compiler/src/dotty/tools/dotc/transform/InlinePatterns.scala b/compiler/src/dotty/tools/dotc/transform/InlinePatterns.scala
index fdef553a835d..80f7402dfc53 100644
--- a/compiler/src/dotty/tools/dotc/transform/InlinePatterns.scala
+++ b/compiler/src/dotty/tools/dotc/transform/InlinePatterns.scala
@@ -29,7 +29,9 @@ import ast.TreeTypeMap
 class InlinePatterns extends MiniPhase:
   import ast.tpd._
 
-  def phaseName: String = "inlinePatterns"
+  override def phaseName: String = InlinePatterns.name
+
+  override def description: String = InlinePatterns.description
 
   // This phase needs to run after because it need to transform trees that are generated
   // by the pattern matcher but are still not visible in that group of phases.
@@ -59,3 +61,8 @@ class InlinePatterns extends MiniPhase:
           case List(ddef @ DefDef(`name`, _, _, _)) => BetaReduce(ddef, args)
           case _ => tree
       case _ => tree
+
+object InlinePatterns:
+  val name: String = "inlinePatterns"
+  val description: String = "remove placeholders of inlined patterns"
+
diff --git a/compiler/src/dotty/tools/dotc/transform/InlineVals.scala b/compiler/src/dotty/tools/dotc/transform/InlineVals.scala
index 9c3dc295d207..25fb573ccf10 100644
--- a/compiler/src/dotty/tools/dotc/transform/InlineVals.scala
+++ b/compiler/src/dotty/tools/dotc/transform/InlineVals.scala
@@ -14,7 +14,9 @@ import dotty.tools.dotc.typer.Inliner
 class InlineVals extends MiniPhase:
   import ast.tpd._
 
-  def phaseName: String = "inlineVals"
+  override def phaseName: String = InlineVals.name
+
+  override def description: String = InlineVals.description
 
   override def checkPostCondition(tree: Tree)(using Context): Unit =
     if !ctx.erasedTypes then
@@ -33,15 +35,25 @@ class InlineVals extends MiniPhase:
     then
       val rhs = tree.rhs
       val tpt = tree.tpt
-      tpt.tpe.widenTermRefExpr.dealias.normalized match
+      tpt.tpe.widenTermRefExpr.dealiasKeepOpaques.normalized match
         case tp: ConstantType =>
           if !isPureExpr(rhs) then
             val details = if enclosingInlineds.isEmpty then "" else em"but was: $rhs"
             report.error(s"inline value must be pure$details", rhs.srcPos)
         case tp =>
-          if tp.derivesFrom(defn.StringClass) || defn.ScalaValueClasses().exists(tp.derivesFrom(_)) then
+          if tp.typeSymbol.is(Opaque) then
+            report.error(em"The type of an `inline val` cannot be an opaque type.\n\nTo inline, consider using `inline def` instead", rhs)
+          else if tp.derivesFrom(defn.UnitClass) then
+            report.error(em"`inline val` of type `Unit` is not supported.\n\nTo inline a `Unit` consider using `inline def`", rhs)
+          else if tp.derivesFrom(defn.StringClass) || defn.ScalaValueClasses().exists(tp.derivesFrom(_)) then
             val pos = if tpt.span.isZeroExtent then rhs.srcPos else tpt.srcPos
             report.error(em"inline value must have a literal constant type", pos)
+          else if tp.derivesFrom(defn.NullClass) then
+            report.error(em"`inline val` with `null` is not supported.\n\nTo inline a `null` consider using `inline def`", rhs)
           else
             report.error(em"inline value must contain a literal constant value.\n\nTo inline more complex types consider using `inline def`", rhs)
-  }
\ No newline at end of file
+  }
+
+object InlineVals:
+  val name: String = "inlineVals"
+  val description: String = "check right hand-sides of an `inline val`s"
diff --git a/compiler/src/dotty/tools/dotc/transform/Inlining.scala b/compiler/src/dotty/tools/dotc/transform/Inlining.scala
index 7b6a45ce3cda..32805ed0d596 100644
--- a/compiler/src/dotty/tools/dotc/transform/Inlining.scala
+++ b/compiler/src/dotty/tools/dotc/transform/Inlining.scala
@@ -11,11 +11,11 @@ import Constants._
 import ast.Trees._
 import ast.{TreeTypeMap, untpd}
 import util.Spans._
-import tasty.TreePickler.Hole
 import SymUtils._
 import NameKinds._
 import dotty.tools.dotc.ast.tpd
 import typer.Implicits.SearchFailureType
+import typer.PrepareInlineable
 
 import scala.collection.mutable
 import dotty.tools.dotc.core.Annotations._
@@ -37,6 +37,8 @@ class Inlining extends MacroTransform {
 
   override def phaseName: String = Inlining.name
 
+  override def description: String = Inlining.description
+
   override def allowsImplicitSearch: Boolean = true
 
   override def run(using Context): Unit =
@@ -98,3 +100,4 @@ class Inlining extends MacroTransform {
 
 object Inlining:
   val name: String = "inlining"
+  val description: String = "inline and execute macros"
diff --git a/compiler/src/dotty/tools/dotc/transform/Instrumentation.scala b/compiler/src/dotty/tools/dotc/transform/Instrumentation.scala
index 676cf00e2f46..12d845ca9a68 100644
--- a/compiler/src/dotty/tools/dotc/transform/Instrumentation.scala
+++ b/compiler/src/dotty/tools/dotc/transform/Instrumentation.scala
@@ -23,7 +23,9 @@ import Constants.Constant
 class Instrumentation extends MiniPhase { thisPhase =>
   import ast.tpd._
 
-  override def phaseName: String = "instrumentation"
+  override def phaseName: String = Instrumentation.name
+
+  override def description: String = Instrumentation.description
 
   override def isEnabled(using Context) =
     ctx.settings.Yinstrument.value
@@ -105,3 +107,7 @@ class Instrumentation extends MiniPhase { thisPhase =>
       tree
   }
 }
+
+object Instrumentation:
+  val name: String = "instrumentation"
+  val description: String = "count calls and allocations under -Yinstrument"
diff --git a/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala b/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala
index 0b42162643f8..a2ec4da70c1c 100644
--- a/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala
+++ b/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala
@@ -13,6 +13,7 @@ import dotty.tools.dotc.transform.MegaPhase.MiniPhase
 
 object InterceptedMethods {
   val name: String = "intercepted"
+  val description: String = "handling of `==`, `|=`, `getClass` methods"
 }
 
 /** Replace member references as follows:
@@ -27,6 +28,8 @@ class InterceptedMethods extends MiniPhase {
 
   override def phaseName: String = InterceptedMethods.name
 
+  override def description: String = InterceptedMethods.description
+
   // this should be removed if we have guarantee that ## will get Apply node
   override def transformSelect(tree: tpd.Select)(using Context): Tree =
     transformRefTree(tree)
diff --git a/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala b/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala
index 58d4d7083d32..ce392c636a76 100644
--- a/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala
+++ b/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala
@@ -17,22 +17,38 @@ import ast.Trees._
 import SymUtils._
 import ExplicitOuter.outer
 import util.Store
-import collection.mutable
-import collection.mutable.{ HashMap, HashSet, LinkedHashMap, TreeSet }
+import collection.mutable.{HashMap, LinkedHashMap, ListBuffer}
 
-object LambdaLift {
+object LambdaLift:
   import ast.tpd._
-  private class NoPath extends Exception
 
   val name: String = "lambdaLift"
+  val description: String = "lifts out nested functions to class scope"
 
   /** The core lambda lift functionality. */
-  class Lifter(thisPhase: MiniPhase with DenotTransformer)(using Context) {
+  class Lifter(thisPhase: MiniPhase & DenotTransformer)(using Context):
 
-    private type SymSet = TreeSet[Symbol]
+    /** The outer parameter of a constructor */
+    private val outerParam = new HashMap[Symbol, Symbol]
 
-    /** A map storing free variables of functions and classes */
-    val free: mutable.LinkedHashMap[Symbol, SymSet] = new LinkedHashMap
+    /** Buffers for lifted out classes and methods, indexed by owner */
+    val liftedDefs: HashMap[Symbol, ListBuffer[Tree]] = new HashMap
+
+    val deps = new Dependencies(ctx.compilationUnit.tpdTree, ctx.withPhase(thisPhase)):
+      def isExpr(sym: Symbol)(using Context): Boolean = sym.is(Method)
+      def enclosure(using Context) = ctx.owner.enclosingMethod
+
+      override def process(tree: Tree)(using Context): Unit =
+        super.process(tree)
+        tree match
+          case tree: DefDef if tree.symbol.isConstructor =>
+            tree.termParamss.head.find(_.name == nme.OUTER) match
+              case Some(vdef) => outerParam(tree.symbol) = vdef.symbol
+              case _ =>
+          case tree: Template =>
+            liftedDefs(tree.symbol.owner) = new ListBuffer
+          case _ =>
+    end deps
 
     /** A map storing the free variable proxies of functions and classes.
      *  For every function and class, this is a map from the free variables
@@ -40,262 +56,10 @@ object LambdaLift {
      */
     private val proxyMap = new LinkedHashMap[Symbol, Map[Symbol, Symbol]]
 
-    /** A hashtable storing calls between functions */
-    private val called = new LinkedHashMap[Symbol, SymSet]
-
-    /** Symbols that are called from an inner class. */
-    private val calledFromInner = new HashSet[Symbol]
-
-    /** A map from local methods and classes to the owners to which they will be lifted as members.
-     *  For methods and classes that do not have any dependencies this will be the enclosing package.
-     *  symbols with packages as lifted owners will subsequently represented as static
-     *  members of their toplevel class, unless their enclosing class was already static.
-     *  Note: During tree transform (which runs at phase LambdaLift + 1), liftedOwner
-     *  is also used to decide whether a method had a term owner before.
-     */
-    private val liftedOwner = new LinkedHashMap[Symbol, Symbol]
-
-    /** The outer parameter of a constructor */
-    private val outerParam = new HashMap[Symbol, Symbol]
-
-    /** Buffers for lifted out classes and methods, indexed by owner */
-    val liftedDefs: mutable.HashMap[Symbol, mutable.ListBuffer[Tree]] = new HashMap
-
-    /** A flag to indicate whether new free variables have been found */
-    private var changedFreeVars: Boolean = _
-
-    /** A flag to indicate whether lifted owners have changed */
-    private var changedLiftedOwner: Boolean = _
-
-    private val ord: Ordering[Symbol] = Ordering.by(_.id)
-    private def newSymSet = TreeSet.empty[Symbol](ord)
-
-    private def symSet(f: LinkedHashMap[Symbol, SymSet], sym: Symbol): SymSet =
-      f.getOrElseUpdate(sym, newSymSet)
-
-    def freeVars(sym: Symbol): List[Symbol] = free get sym match {
-      case Some(set) => set.toList
-      case None => Nil
-    }
-
     def proxyOf(sym: Symbol, fv: Symbol): Symbol = proxyMap.getOrElse(sym, Map.empty)(fv)
 
-    def proxies(sym: Symbol): List[Symbol] =  freeVars(sym).map(proxyOf(sym, _))
-
-    /** A symbol is local if it is owned by a term or a local trait,
-     *  or if it is a constructor of a local symbol.
-     */
-    def isLocal(sym: Symbol)(using Context): Boolean = {
-      val owner = sym.maybeOwner
-      owner.isTerm ||
-      owner.is(Trait) && isLocal(owner) ||
-      sym.isConstructor && isLocal(owner)
-    }
-
-    /** Set `liftedOwner(sym)` to `owner` if `owner` is more deeply nested
-     *  than the previous value of `liftedowner(sym)`.
-     */
-    def narrowLiftedOwner(sym: Symbol, owner: Symbol)(using Context): Unit =
-      if (sym.maybeOwner.isTerm &&
-        owner.isProperlyContainedIn(liftedOwner(sym)) &&
-        owner != sym) {
-          report.log(i"narrow lifted $sym to $owner")
-          changedLiftedOwner = true
-          liftedOwner(sym) = owner
-      }
-
-    /** Mark symbol `sym` as being free in `enclosure`, unless `sym` is defined
-     *  in `enclosure` or there is an intermediate class properly containing `enclosure`
-     *  in which `sym` is also free. Also, update `liftedOwner` of `enclosure` so
-     *  that `enclosure` can access `sym`, or its proxy in an intermediate class.
-     *  This means:
-     *
-     *    1. If there is an intermediate class in which `sym` is free, `enclosure`
-     *       must be contained in that class (in order to access the `sym proxy stored
-     *       in the class).
-     *
-     *    2. If there is no intermediate class, `enclosure` must be contained
-     *       in the class enclosing `sym`.
-     *
-     *  @return  If there is a non-trait class between `enclosure` and
-     *           the owner of `sym`, the largest such class.
-     *           Otherwise, if there is a trait between `enclosure` and
-     *           the owner of `sym`, the largest such trait.
-     *           Otherwise, NoSymbol.
-     *
-     *  @pre sym.owner.isTerm, (enclosure.isMethod || enclosure.isClass)
-     *
-     *  The idea of `markFree` is illustrated with an example:
-     *
-     *  def f(x: int) = {
-     *    class C {
-     *      class D {
-     *        val y = x
-     *      }
-     *    }
-     *  }
-     *
-     *  In this case `x` is free in the primary constructor of class `C`.
-     *  but it is not free in `D`, because after lambda lift the code would be transformed
-     *  as follows:
-     *
-     *  def f(x$0: int) {
-     *    class C(x$0: int) {
-     *      val x$1 = x$0
-     *      class D {
-     *        val y = outer.x$1
-     *      }
-     *    }
-     *  }
-     */
-    private def markFree(sym: Symbol, enclosure: Symbol)(using Context): Symbol = try {
-      if (!enclosure.exists) throw new NoPath
-      if (enclosure == sym.enclosure) NoSymbol
-      else {
-        def nestedInConstructor(sym: Symbol): Boolean =
-          sym.isConstructor ||
-          sym.isTerm && nestedInConstructor(sym.enclosure)
-        report.debuglog(i"mark free: ${sym.showLocated} with owner ${sym.maybeOwner} marked free in $enclosure")
-        val intermediate =
-          if (enclosure.is(PackageClass)) enclosure
-          else if (enclosure.isConstructor) markFree(sym, enclosure.owner.enclosure)
-          else markFree(sym, enclosure.enclosure)
-        if (intermediate.exists) narrowLiftedOwner(enclosure, intermediate)
-        if !intermediate.isRealClass || nestedInConstructor(enclosure) then
-          // Constructors and methods nested inside traits get the free variables
-          // of the enclosing trait or class.
-          // Conversely, local traits do not get free variables.
-          // Methods inside constructors also don't have intermediates,
-          // need to get all their free variables passed directly.
-          if (!enclosure.is(Trait))
-            if (symSet(free, enclosure).add(sym)) {
-              changedFreeVars = true
-              report.log(i"$sym is free in $enclosure")
-            }
-        if (intermediate.isRealClass) intermediate
-        else if (enclosure.isRealClass) enclosure
-        else if (intermediate.isClass) intermediate
-        else if (enclosure.isClass) enclosure
-        else NoSymbol
-      }
-    }
-    catch {
-      case ex: NoPath =>
-        println(i"error lambda lifting ${ctx.compilationUnit}: $sym is not visible from $enclosure")
-        throw ex
-    }
-
-    private def markCalled(callee: Symbol, caller: Symbol)(using Context): Unit = {
-      report.debuglog(i"mark called: $callee of ${callee.owner} is called by $caller in ${caller.owner}")
-      assert(isLocal(callee))
-      symSet(called, caller) += callee
-      if (callee.enclosingClass != caller.enclosingClass) calledFromInner += callee
-    }
-
-    private class CollectDependencies extends TreeTraverser {
-      def traverse(tree: Tree)(using Context) = try { //debug
-        val sym = tree.symbol
-
-        def enclosure = ctx.owner.enclosingMethod
-
-        def narrowTo(thisClass: ClassSymbol) = {
-          val enclMethod = enclosure
-          val enclClass = enclMethod.enclosingClass
-          narrowLiftedOwner(enclMethod,
-            if (enclClass.isContainedIn(thisClass)) thisClass
-            else enclClass) // unknown this reference, play it safe and assume the narrowest possible owner
-        }
-
-        tree match {
-          case tree: Ident =>
-            if (isLocal(sym))
-              if (sym is Method) markCalled(sym, enclosure)
-              else if (sym.isTerm) markFree(sym, enclosure)
-            def captureImplicitThis(x: Type): Unit =
-              x match {
-                case tr@TermRef(x, _) if (!tr.termSymbol.isStatic) => captureImplicitThis(x)
-                case x: ThisType if (!x.tref.typeSymbol.isStaticOwner) => narrowTo(x.tref.typeSymbol.asClass)
-                case _ =>
-              }
-            captureImplicitThis(tree.tpe)
-          case tree: Select =>
-            if (sym.is(Method) && isLocal(sym)) markCalled(sym, enclosure)
-          case tree: This =>
-            narrowTo(tree.symbol.asClass)
-          case tree: DefDef =>
-            if (sym.owner.isTerm)
-              liftedOwner(sym) = sym.enclosingPackageClass
-                // this will make methods in supercall constructors of top-level classes owned
-                // by the enclosing package, which means they will be static.
-                // On the other hand, all other methods will be indirectly owned by their
-                // top-level class. This avoids possible deadlocks when a static method
-                // has to access its enclosing object from the outside.
-            else if (sym.isConstructor) {
-              if (sym.isPrimaryConstructor && isLocal(sym.owner) && !sym.owner.is(Trait))
-                // add a call edge from the constructor of a local non-trait class to
-                // the class itself. This is done so that the constructor inherits
-                // the free variables of the class.
-                symSet(called, sym) += sym.owner
-
-              tree.termParamss.head.find(_.name == nme.OUTER) match {
-                case Some(vdef) => outerParam(sym) = vdef.symbol
-                case _ =>
-              }
-            }
-          case tree: TypeDef =>
-            if (sym.owner.isTerm) liftedOwner(sym) = sym.topLevelClass.owner
-          case tree: Template =>
-            liftedDefs(tree.symbol.owner) = new mutable.ListBuffer
-          case _ =>
-        }
-        traverseChildren(tree)
-      }
-      catch { //debug
-        case ex: Exception =>
-          println(i"$ex while traversing $tree")
-          throw ex
-      }
-    }
-
-    /** Compute final free variables map `fvs by closing over caller dependencies. */
-    private def computeFreeVars()(using Context): Unit =
-      while ({
-        changedFreeVars = false
-        for {
-          caller <- called.keys
-          callee <- called(caller)
-          fvs <- free get callee
-          fv <- fvs
-        }
-        markFree(fv, caller)
-        changedFreeVars
-      })
-      ()
-
-    /** Compute final liftedOwner map by closing over caller dependencies */
-    private def computeLiftedOwners()(using Context): Unit =
-      while ({
-        changedLiftedOwner = false
-        for {
-          caller <- called.keys
-          callee <- called(caller)
-        }
-        {
-          val normalizedCallee = callee.skipConstructor
-          val calleeOwner = normalizedCallee.owner
-          if (calleeOwner.isTerm) narrowLiftedOwner(caller, liftedOwner(normalizedCallee))
-          else {
-            assert(calleeOwner.is(Trait))
-            // methods nested inside local trait methods cannot be lifted out
-            // beyond the trait. Note that we can also call a trait method through
-            // a qualifier; in that case no restriction to lifted owner arises.
-            if (caller.isContainedIn(calleeOwner))
-              narrowLiftedOwner(caller, calleeOwner)
-          }
-        }
-        changedLiftedOwner
-      })
-      ()
+    def proxies(sym: Symbol): List[Symbol] =
+      deps.freeVars(sym).toList.map(proxyOf(sym, _))
 
     private def newName(sym: Symbol)(using Context): Name =
       if (sym.isAnonymousFunction && sym.owner.is(Method))
@@ -305,19 +69,18 @@ object LambdaLift {
       else sym.name.freshened
 
     private def generateProxies()(using Context): Unit =
-      for ((owner, freeValues) <- free.iterator) {
+      for owner <- deps.tracked do
+        val fvs = deps.freeVars(owner).toList
         val newFlags = Synthetic | (if (owner.isClass) ParamAccessor | Private else Param)
-        report.debuglog(i"free var proxy of ${owner.showLocated}: ${freeValues.toList}%, %")
-        proxyMap(owner) = {
-          for (fv <- freeValues.toList) yield {
+        report.debuglog(i"free var proxy of ${owner.showLocated}: $fvs%, %")
+        val freeProxyPairs =
+          for fv <- fvs yield
             val proxyName = newName(fv)
             val proxy =
               newSymbol(owner, proxyName.asTermName, newFlags, fv.info, coord = fv.coord)
                 .enteredAfter(thisPhase)
             (fv, proxy)
-          }
-        }.toMap
-      }
+        proxyMap(owner) = freeProxyPairs.toMap
 
     private def liftedInfo(local: Symbol)(using Context): Type = local.info match {
       case MethodTpe(pnames, ptypes, restpe) =>
@@ -330,7 +93,7 @@ object LambdaLift {
     }
 
     private def liftLocals()(using Context): Unit = {
-      for ((local, lOwner) <- liftedOwner) {
+      for ((local, lOwner) <- deps.logicalOwner) {
         val (newOwner, maybeStatic) =
           if (lOwner is Package) {
             val encClass = local.enclosingClass
@@ -365,22 +128,11 @@ object LambdaLift {
           initFlags = initFlags,
           info = liftedInfo(local)).installAfter(thisPhase)
       }
-      for (local <- free.keys)
-        if (!liftedOwner.contains(local))
+      for (local <- deps.tracked)
+        if (!deps.logicalOwner.contains(local))
           local.copySymDenotation(info = liftedInfo(local)).installAfter(thisPhase)
     }
 
-    // initialization
-    atPhase(thisPhase) {
-      (new CollectDependencies).traverse(ctx.compilationUnit.tpdTree)
-      computeFreeVars()
-      computeLiftedOwners()
-    }
-    atPhase(thisPhase.next) {
-      generateProxies()
-      liftLocals()
-    }
-
     def currentEnclosure(using Context): Symbol =
       ctx.owner.enclosingMethodOrClass
 
@@ -388,7 +140,8 @@ object LambdaLift {
       sym.enclosure == currentEnclosure
 
     private def proxy(sym: Symbol)(using Context): Symbol = {
-      def liftedEnclosure(sym: Symbol) = liftedOwner.getOrElse(sym, sym.enclosure)
+      def liftedEnclosure(sym: Symbol) =
+        deps.logicalOwner.getOrElse(sym, sym.enclosure)
       def searchIn(enclosure: Symbol): Symbol = {
         if (!enclosure.exists) {
           def enclosures(encl: Symbol): List[Symbol] =
@@ -429,10 +182,8 @@ object LambdaLift {
     }
 
     def addFreeArgs(sym: Symbol, args: List[Tree])(using Context): List[Tree] =
-      free get sym match {
-        case Some(fvs) => fvs.toList.map(proxyRef(_)) ++ args
-        case _ => args
-      }
+      val fvs = deps.freeVars(sym)
+      if fvs.nonEmpty then fvs.toList.map(proxyRef(_)) ++ args else args
 
     def addFreeParams(tree: Tree, proxies: List[Symbol])(using Context): Tree = proxies match {
       case Nil => tree
@@ -445,7 +196,7 @@ object LambdaLift {
 
         /** Initialize proxy fields from proxy parameters and map `rhs` from fields to parameters */
         def copyParams(rhs: Tree) = {
-          val fvs = freeVars(sym.owner)
+          val fvs = deps.freeVars(sym.owner).toList
           val classProxies = fvs.map(proxyOf(sym.owner, _))
           val constrProxies = fvs.map(proxyOf(sym, _))
           report.debuglog(i"copy params ${constrProxies.map(_.showLocated)}%, % to ${classProxies.map(_.showLocated)}%, %}")
@@ -470,9 +221,15 @@ object LambdaLift {
       EmptyTree
     }
 
-    def needsLifting(sym: Symbol): Boolean = liftedOwner contains sym
-  }
-}
+    def needsLifting(sym: Symbol): Boolean = deps.logicalOwner.contains(sym)
+
+    // initialization
+    atPhase(thisPhase.next) {
+      generateProxies()
+      liftLocals()
+    }
+  end Lifter
+end LambdaLift
 
 /** This phase performs the necessary rewritings to eliminate classes and methods
  *  nested in other methods. In detail:
@@ -509,8 +266,9 @@ class LambdaLift extends MiniPhase with IdentityDenotTransformer { thisPhase =>
   import LambdaLift._
   import ast.tpd._
 
-  /** the following two members override abstract members in Transform */
-  val phaseName: String = LambdaLift.name
+  override def phaseName: String = LambdaLift.name
+
+  override def description: String = LambdaLift.description
 
   override def relaxedTypingInGroup: Boolean = true
     // Because it adds free vars as additional proxy parameters
@@ -558,7 +316,7 @@ class LambdaLift extends MiniPhase with IdentityDenotTransformer { thisPhase =>
     // reload them manually here.
     // Note: If you tweak this code, make sure to test your changes with
     // `Config.reuseSymDenotations` set to false to exercise this path more.
-    if denot.isInstanceOf[NonSymSingleDenotation] && lifter.free.contains(sym) then
+    if denot.isInstanceOf[NonSymSingleDenotation] && lifter.deps.freeVars(sym).nonEmpty then
       tree.qualifier.select(sym).withSpan(tree.span)
     else tree
 
@@ -572,7 +330,7 @@ class LambdaLift extends MiniPhase with IdentityDenotTransformer { thisPhase =>
     val sym = tree.symbol
     val lft = lifter
     val paramsAdded =
-      if (lft.free.contains(sym)) lft.addFreeParams(tree, lft.proxies(sym)).asInstanceOf[DefDef]
+      if lft.deps.freeVars(sym).nonEmpty then lft.addFreeParams(tree, lft.proxies(sym)).asInstanceOf[DefDef]
       else tree
     if (lft.needsLifting(sym)) lft.liftDef(paramsAdded)
     else paramsAdded
diff --git a/compiler/src/dotty/tools/dotc/transform/LazyVals.scala b/compiler/src/dotty/tools/dotc/transform/LazyVals.scala
index 121a2fd01a31..aabf8cf9a680 100644
--- a/compiler/src/dotty/tools/dotc/transform/LazyVals.scala
+++ b/compiler/src/dotty/tools/dotc/transform/LazyVals.scala
@@ -30,6 +30,8 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer {
 
   override def phaseName: String = LazyVals.name
 
+  override def description: String = LazyVals.description
+
   /** List of names of phases that should have finished processing of tree
     * before this phase starts processing same tree */
   override def runsAfter: Set[String] = Set(Mixin.name, CollectNullableFields.name)
@@ -439,6 +441,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer {
 
 object LazyVals {
   val name: String = "lazyVals"
+  val description: String = "expand lazy vals"
 
   object lazyNme {
     import Names.TermName
diff --git a/compiler/src/dotty/tools/dotc/transform/LetOverApply.scala b/compiler/src/dotty/tools/dotc/transform/LetOverApply.scala
index cd61c0f5ac82..c7ad2381c5f7 100644
--- a/compiler/src/dotty/tools/dotc/transform/LetOverApply.scala
+++ b/compiler/src/dotty/tools/dotc/transform/LetOverApply.scala
@@ -15,7 +15,9 @@ import ast.Trees._
 class LetOverApply extends MiniPhase:
   import ast.tpd._
 
-  override def phaseName: String = "letOverApply"
+  override def phaseName: String = LetOverApply.name
+
+  override def description: String = LetOverApply.description
 
   override def transformApply(tree: Apply)(using Context): Tree =
     tree.fun match
@@ -30,3 +32,7 @@ class LetOverApply extends MiniPhase:
         tree
 
 end LetOverApply
+
+object LetOverApply:
+  val name: String = "letOverApply"
+  val description: String = "lift blocks from receivers of applications"
diff --git a/compiler/src/dotty/tools/dotc/transform/LiftTry.scala b/compiler/src/dotty/tools/dotc/transform/LiftTry.scala
index e5e234b8fe8e..f70e6a38fcf5 100644
--- a/compiler/src/dotty/tools/dotc/transform/LiftTry.scala
+++ b/compiler/src/dotty/tools/dotc/transform/LiftTry.scala
@@ -30,7 +30,9 @@ import util.Store
 class LiftTry extends MiniPhase with IdentityDenotTransformer { thisPhase =>
   import ast.tpd._
 
-  val phaseName: String = LiftTry.name
+  override def phaseName: String = LiftTry.name
+
+  override def description: String = LiftTry.description
 
   private var NeedLift: Store.Location[Boolean] = _
   private def needLift(using Context): Boolean = ctx.store(NeedLift)
@@ -44,6 +46,9 @@ class LiftTry extends MiniPhase with IdentityDenotTransformer { thisPhase =>
   override def prepareForApply(tree: Apply)(using Context): Context =
     liftingCtx(true)
 
+  override def prepareForDefDef(tree: DefDef)(using Context): Context =
+    liftingCtx(false)
+
   override def prepareForValDef(tree: ValDef)(using Context): Context =
     if !tree.symbol.exists
        || tree.symbol.isSelfSym
@@ -80,3 +85,4 @@ class LiftTry extends MiniPhase with IdentityDenotTransformer { thisPhase =>
 }
 object LiftTry:
   val name = "liftTry"
+  val description: String = "Lifts try's that might be executed on non-empty expression stacks"
diff --git a/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala b/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala
index 6a246e509e61..0baf85c4e21d 100644
--- a/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala
+++ b/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala
@@ -29,19 +29,11 @@ abstract class MacroTransform extends Phase {
    */
   protected def transformPhase(using Context): Phase = this
 
-  class Transformer extends TreeMap(cpy = cpyBetweenPhases) {
+  class Transformer extends TreeMapWithPreciseStatContexts(cpy = cpyBetweenPhases):
 
-    protected def localCtx(tree: Tree)(using Context): FreshContext = 
+    protected def localCtx(tree: Tree)(using Context): FreshContext =
       ctx.fresh.setTree(tree).setOwner(localOwner(tree))
 
-    override def transformStats(trees: List[Tree], exprOwner: Symbol)(using Context): List[Tree] = {
-      def transformStat(stat: Tree): Tree = stat match {
-        case _: Import | _: DefTree => transform(stat)
-        case _ => transform(stat)(using ctx.exprContext(stat, exprOwner))
-      }
-      flatten(trees.mapconserve(transformStat(_)))
-    }
-
     override def transform(tree: Tree)(using Context): Tree =
       try
         tree match {
@@ -67,5 +59,5 @@ abstract class MacroTransform extends Phase {
 
     def transformSelf(vd: ValDef)(using Context): ValDef =
       cpy.ValDef(vd)(tpt = transform(vd.tpt))
-  }
+  end Transformer
 }
diff --git a/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala b/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala
index 77671fc6498c..56342322824c 100644
--- a/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala
+++ b/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala
@@ -432,16 +432,10 @@ class MegaPhase(val miniPhases: Array[MiniPhase]) extends Phase {
   def transformSpecificTree[T <: Tree](tree: T, start: Int)(using Context): T =
     transformTree(tree, start).asInstanceOf[T]
 
-  def transformStats(trees: List[Tree], exprOwner: Symbol, start: Int)(using Context): List[Tree] = {
-    def transformStat(stat: Tree)(using Context): Tree = stat match {
-      case _: Import | _: DefTree => transformTree(stat, start)
-      case Thicket(stats) => cpy.Thicket(stat)(stats.mapConserve(transformStat))
-      case _ => transformTree(stat, start)(using ctx.exprContext(stat, exprOwner))
-    }
+  def transformStats(trees: List[Tree], exprOwner: Symbol, start: Int)(using Context): List[Tree] =
     val nestedCtx = prepStats(trees, start)
-    val trees1 = trees.mapInline(transformStat(_)(using nestedCtx))
+    val trees1 = trees.mapStatements(exprOwner, transformTree(_, start))(using nestedCtx)
     goStats(trees1, start)(using nestedCtx)
-  }
 
   def transformUnit(tree: Tree)(using Context): Tree = {
     val nestedCtx = prepUnit(tree, 0)
@@ -461,11 +455,6 @@ class MegaPhase(val miniPhases: Array[MiniPhase]) extends Phase {
 
   // Initialization code
 
-  for ((phase, idx) <- miniPhases.zipWithIndex) {
-    phase.superPhase = this
-    phase.idxInGroup = idx
-  }
-
   /** Class#getDeclaredMethods is slow, so we cache its output */
   private val clsMethodsCache = new java.util.IdentityHashMap[Class[?], Array[java.lang.reflect.Method]]
 
@@ -570,6 +559,11 @@ class MegaPhase(val miniPhases: Array[MiniPhase]) extends Phase {
   private val nxOtherPrepPhase = init("prepareForOther")
   private val nxOtherTransPhase = init("transformOther")
 
+  for ((phase, idx) <- miniPhases.zipWithIndex) {
+    phase.superPhase = this
+    phase.idxInGroup = idx
+  }
+
   // Boilerplate snippets
 
   def prepIdent(tree: Ident, start: Int)(using Context): Context = {
diff --git a/compiler/src/dotty/tools/dotc/transform/Memoize.scala b/compiler/src/dotty/tools/dotc/transform/Memoize.scala
index 2f73ed8bd9eb..8e9bea8384c2 100644
--- a/compiler/src/dotty/tools/dotc/transform/Memoize.scala
+++ b/compiler/src/dotty/tools/dotc/transform/Memoize.scala
@@ -22,6 +22,7 @@ import util.Store
 
 object Memoize {
   val name: String = "memoize"
+  val description: String = "add private fields to getters and setters"
 
   private final class MyState {
     val classesThatNeedReleaseFence = new util.HashSet[Symbol]
@@ -49,6 +50,8 @@ class Memoize extends MiniPhase with IdentityDenotTransformer { thisPhase =>
 
   override def phaseName: String = Memoize.name
 
+  override def description: String = Memoize.description
+
   private var MyState: Store.Location[MyState] = _
   private def myState(using Context): MyState = ctx.store(MyState)
 
diff --git a/compiler/src/dotty/tools/dotc/transform/Mixin.scala b/compiler/src/dotty/tools/dotc/transform/Mixin.scala
index d223566a208c..c3828327385c 100644
--- a/compiler/src/dotty/tools/dotc/transform/Mixin.scala
+++ b/compiler/src/dotty/tools/dotc/transform/Mixin.scala
@@ -10,6 +10,7 @@ import SymUtils._
 import Symbols._
 import SymDenotations._
 import Types._
+import Periods._
 import Decorators._
 import DenotTransformers._
 import StdNames._
@@ -21,6 +22,7 @@ import collection.mutable
 
 object Mixin {
   val name: String = "mixin"
+  val description: String = "expand trait fields and trait initializers"
 
   def traitSetterName(getter: TermSymbol)(using Context): TermName =
     getter.ensureNotPrivate.name
@@ -111,6 +113,8 @@ class Mixin extends MiniPhase with SymTransformer { thisPhase =>
 
   override def phaseName: String = Mixin.name
 
+  override def description: String = Mixin.description
+
   override def relaxedTypingInGroup: Boolean = true
     // Because it changes number of parameters in trait initializers
 
@@ -146,6 +150,7 @@ class Mixin extends MiniPhase with SymTransformer { thisPhase =>
         // !decl.isClass avoids forcing nested traits, preventing cycles
         if !decl.isClass && needsTraitSetter(decl) then
           val setter = makeTraitSetter(decl.asTerm)
+          setter.validFor = thisPhase.validFor // validity of setter = next phase up to next transformer afterwards
           decls1.enter(setter)
           modified = true
       if modified then
diff --git a/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala b/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala
index ce71734e16d9..bf25bfe71569 100644
--- a/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala
+++ b/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala
@@ -26,6 +26,8 @@ class MoveStatics extends MiniPhase with SymTransformer {
 
   override def phaseName: String = MoveStatics.name
 
+  override def description: String = MoveStatics.description
+
   def transformSym(sym: SymDenotation)(using Context): SymDenotation =
     if (sym.hasAnnotation(defn.ScalaStaticAnnot) && sym.owner.is(Flags.Module) && sym.owner.companionClass.exists &&
         (sym.is(Flags.Method) || !(sym.is(Flags.Mutable) && sym.owner.companionClass.is(Flags.Trait)))) {
@@ -88,4 +90,5 @@ class MoveStatics extends MiniPhase with SymTransformer {
 
 object MoveStatics {
   val name: String = "moveStatic"
+  val description: String = "move static methods from companion to the class itself"
 }
diff --git a/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala b/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala
index e65809fd97ba..4ab9663f6529 100644
--- a/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala
+++ b/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala
@@ -10,6 +10,10 @@ import config.SourceVersion._
 
 object NonLocalReturns {
   import ast.tpd._
+
+  val name: String = "nonLocalReturns"
+  val description: String = "expand non-local returns"
+
   def isNonLocalReturn(ret: Return)(using Context): Boolean =
     !ret.from.symbol.is(Label) && (ret.from.symbol != ctx.owner.enclosingMethod || ctx.owner.is(Lazy))
 }
@@ -17,7 +21,10 @@ object NonLocalReturns {
 /** Implement non-local returns using NonLocalReturnControl exceptions.
  */
 class NonLocalReturns extends MiniPhase {
-  override def phaseName: String = "nonLocalReturns"
+
+  override def phaseName: String = NonLocalReturns.name
+
+  override def description: String = NonLocalReturns.description
 
   import NonLocalReturns._
   import ast.tpd._
diff --git a/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala b/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala
index b2851fd92619..437dfea9f156 100644
--- a/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala
+++ b/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala
@@ -1,8 +1,10 @@
-package dotty.tools.dotc
+package dotty.tools
+package dotc
 package transform
 
 import core._
-import Flags._, Symbols._, Contexts._, Scopes._, Decorators._
+import Flags._, Symbols._, Contexts._, Scopes._, Decorators._, Types.Type
+import NameKinds.DefaultGetterName
 import collection.mutable
 import collection.immutable.BitSet
 import scala.annotation.tailrec
@@ -15,12 +17,12 @@ import scala.annotation.tailrec
  *  Adapted from the 2.9 version of OverridingPairs. The 2.10 version is IMO
  *  way too unwieldy to be maintained.
  */
-object OverridingPairs {
+object OverridingPairs:
 
   /** The cursor class
    *  @param base   the base class that contains the overriding pairs
    */
-  class Cursor(base: Symbol)(using Context) {
+  class Cursor(base: Symbol)(using Context):
 
     private val self = base.thisType
 
@@ -33,11 +35,11 @@ object OverridingPairs {
      *  pair has already been treated in a parent class.
      *  This may be refined in subclasses. @see Bridges for a use case.
      */
-    protected def parents: Array[Symbol] = base.info.parents.toArray.map(_.typeSymbol)
+    protected def parents: Array[Symbol] = base.info.parents.toArray.map(_.classSymbol)
 
-    /** Does `sym1` match `sym2` so that it qualifies as overriding.
-     *  Types always match. Term symbols match if their membertypes
-     *  relative to .this do
+    /** Does `sym1` match `sym2` so that it qualifies as overriding when both symbols are
+     *  seen as members of `self`? Types always match. Term symbols match if their membertypes
+     *  relative to `self` do.
      */
     protected def matches(sym1: Symbol, sym2: Symbol): Boolean =
       sym1.isType || sym1.asSeenFrom(self).matches(sym2.asSeenFrom(self))
@@ -85,11 +87,22 @@ object OverridingPairs {
         then bits += i
       subParents(bc) = bits
 
-    private def hasCommonParentAsSubclass(cls1: Symbol, cls2: Symbol): Boolean =
-      (subParents(cls1) intersect subParents(cls2)).nonEmpty
+    /** Is the override of `sym1` and `sym2` already handled when checking
+     *  a parent of `self`?
+     */
+    private def isHandledByParent(sym1: Symbol, sym2: Symbol): Boolean =
+      val commonParents = subParents(sym1.owner).intersect(subParents(sym2.owner))
+      commonParents.nonEmpty
+      && commonParents.exists(i => canBeHandledByParent(sym1, sym2, parents(i)))
+
+    /** Can pair `sym1`/`sym2` be handled by parent `parentType` which is a common subtype
+     *  of both symbol's owners? Assumed to be true by default, but overridden in RefChecks.
+     */
+    protected def canBeHandledByParent(sym1: Symbol, sym2: Symbol, parent: Symbol): Boolean =
+      true
 
     /** The scope entries that have already been visited as overridden
-     *  (maybe excluded because of hasCommonParentAsSubclass).
+     *  (maybe excluded because of already handled by a parent).
      *  These will not appear as overriding
      */
     private val visited = util.HashSet[Symbol]()
@@ -134,30 +147,83 @@ object OverridingPairs {
      *    overridden = overridden member of the pair, provided hasNext is true
      */
     @tailrec final def next(): Unit =
-      if (nextEntry ne null) {
+      if nextEntry != null then
         nextEntry = decls.lookupNextEntry(nextEntry)
-        if (nextEntry ne null)
-          try {
+        if nextEntry != null then
+          try
             overridden = nextEntry.sym
-            if (overriding.owner != overridden.owner && matches(overriding, overridden)) {
+            if overriding.owner != overridden.owner && matches(overriding, overridden) then
               visited += overridden
-              if (!hasCommonParentAsSubclass(overriding.owner, overridden.owner)) return
-            }
-          }
-          catch {
-            case ex: TypeError =>
-              // See neg/i1750a for an example where a cyclic error can arise.
-              // The root cause in this example is an illegal "override" of an inner trait
-              report.error(ex, base.srcPos)
-          }
-        else {
+              if !isHandledByParent(overriding, overridden) then return
+          catch case ex: TypeError =>
+            // See neg/i1750a for an example where a cyclic error can arise.
+            // The root cause in this example is an illegal "override" of an inner trait
+            report.error(ex, base.srcPos)
+        else
           curEntry = curEntry.prev
           nextOverriding()
-        }
         next()
-      }
 
     nextOverriding()
     next()
-  }
-}
+  end Cursor
+
+  /** Is this `sym1` considered an override of `sym2` (or vice versa) if both are
+   *  seen as members of `site`?
+   *  We declare a match if either we have a full match including matching names
+   *  or we have a loose match with different target name but the types are the same.
+   *  We leave out pairs of methods in Java classes under the assumption since these
+   *  have already been checked and handled by javac.
+   *  This leaves two possible sorts of discrepancies to be reported as errors
+   *  in `RefChecks`:
+   *
+   *    - matching names, target names, and signatures but different types
+   *    - matching names and types, but different target names
+   *
+   *  This method is used as a replacement of `matches` in some subclasses of
+   *  OverridingPairs.
+   */
+  def isOverridingPair(sym1: Symbol, sym2: Symbol, self: Type)(using Context): Boolean =
+    if     sym1.owner.is(JavaDefined, butNot = Trait)
+        && sym2.owner.is(JavaDefined, butNot = Trait)
+    then false // javac already handles these checks and inserts bridges
+    else if sym1.isType then true
+    else
+      val sd1 = sym1.asSeenFrom(self)
+      val sd2 = sym2.asSeenFrom(self)
+      sd1.matchesLoosely(sd2)
+        && (sym1.hasTargetName(sym2.targetName)
+            || isOverridingPair(sym1, sd1.info, sym2, sd2.info))
+
+  /** Let `member` and `other` be members of some common class C with types
+   *  `memberTp` and `otherTp` in C. Are the two symbols considered an overriding
+   *  pair in C? We assume that names already match so we test only the types here.
+   *  @param fallBack  A function called if the initial test is false and
+   *                   `member` and `other` are term symbols.
+   */
+  def isOverridingPair(member: Symbol, memberTp: Type, other: Symbol, otherTp: Type, fallBack: => Boolean = false)(using Context): Boolean =
+    if member.isType then // intersection of bounds to refined types must be nonempty
+      memberTp.bounds.hi.hasSameKindAs(otherTp.bounds.hi)
+      && (
+        (memberTp frozen_<:< otherTp)
+        || !member.owner.derivesFrom(other.owner)
+            && {
+              // if member and other come from independent classes or traits, their
+              // bounds must have non-empty-intersection
+              val jointBounds = (memberTp.bounds & otherTp.bounds).bounds
+              jointBounds.lo frozen_<:< jointBounds.hi
+            }
+      )
+    else
+      // releaxed override check for explicit nulls if one of the symbols is Java defined,
+      // force `Null` being a subtype of reference types during override checking
+      val relaxedCtxForNulls =
+        if ctx.explicitNulls && (member.is(JavaDefined) || other.is(JavaDefined)) then
+          ctx.retractMode(Mode.SafeNulls)
+        else ctx
+      member.name.is(DefaultGetterName) // default getters are not checked for compatibility
+      || memberTp.overrides(otherTp,
+            member.matchNullaryLoosely || other.matchNullaryLoosely || fallBack
+          )(using relaxedCtxForNulls)
+
+end OverridingPairs
diff --git a/compiler/src/dotty/tools/dotc/transform/PCPCheckAndHeal.scala b/compiler/src/dotty/tools/dotc/transform/PCPCheckAndHeal.scala
index 626455da63d1..47e5ae96f5da 100644
--- a/compiler/src/dotty/tools/dotc/transform/PCPCheckAndHeal.scala
+++ b/compiler/src/dotty/tools/dotc/transform/PCPCheckAndHeal.scala
@@ -124,6 +124,7 @@ class PCPCheckAndHeal(@constructorOnly ictx: Context) extends TreeMapWithStages(
       val targs2 = targs.map(targ => TypeTree(healTypeOfTerm(quote.fun.srcPos)(targ.tpe)))
       cpy.Apply(quote)(cpy.TypeApply(quote.fun)(fun, targs2), body2 :: Nil)
     else
+      val quotes = quote.args.mapConserve(transform)
       body.tpe match
         case tp @ TypeRef(x: TermRef, _) if tp.symbol == defn.QuotedType_splice =>
           // Optimization: `quoted.Type.of[x.Underlying](quotes)`  -->  `x`
@@ -131,7 +132,7 @@ class PCPCheckAndHeal(@constructorOnly ictx: Context) extends TreeMapWithStages(
         case _ =>
           // `quoted.Type.of[](quotes)`  --> `quoted.Type.of[](quotes)`
           val TypeApply(fun, _) = quote.fun
-          cpy.Apply(quote)(cpy.TypeApply(quote.fun)(fun, body2 :: Nil), quote.args)
+          cpy.Apply(quote)(cpy.TypeApply(quote.fun)(fun, body2 :: Nil), quotes)
   }
 
   /** Transform splice
diff --git a/compiler/src/dotty/tools/dotc/transform/ParamForwarding.scala b/compiler/src/dotty/tools/dotc/transform/ParamForwarding.scala
index 028516f1ca07..6eb29f26edea 100644
--- a/compiler/src/dotty/tools/dotc/transform/ParamForwarding.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ParamForwarding.scala
@@ -36,7 +36,9 @@ class ParamForwarding extends MiniPhase with IdentityDenotTransformer:
 
   private def thisPhase: ParamForwarding = this
 
-  val phaseName: String = "paramForwarding"
+  override def phaseName: String = ParamForwarding.name
+
+  override def description: String = ParamForwarding.description
 
   def transformIfParamAlias(mdef: ValOrDefDef)(using Context): Tree =
 
@@ -80,3 +82,7 @@ class ParamForwarding extends MiniPhase with IdentityDenotTransformer:
 
   override def transformDefDef(mdef: DefDef)(using Context): Tree =
     transformIfParamAlias(mdef)
+
+object ParamForwarding:
+  val name: String = "paramForwarding"
+  val description: String = "add forwarders for aliases of superclass parameters"
diff --git a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala
index 047b2587299a..21aa331367ca 100644
--- a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala
+++ b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala
@@ -8,7 +8,7 @@ import collection.mutable
 import Symbols._, Contexts._, Types._, StdNames._, NameOps._
 import ast.Trees._
 import util.Spans._
-import typer.Applications.{isProductMatch, isGetMatch, isProductSeqMatch, productSelectors, productArity, unapplySeqTypeElemTp}
+import typer.Applications.*
 import SymUtils._
 import Flags._, Constants._
 import Decorators._
@@ -20,13 +20,16 @@ import util.Property._
 
 /** The pattern matching transform.
  *  After this phase, the only Match nodes remaining in the code are simple switches
- *  where every pattern is an integer constant
+ *  where every pattern is an integer or string constant
  */
 class PatternMatcher extends MiniPhase {
   import ast.tpd._
   import PatternMatcher._
 
   override def phaseName: String = PatternMatcher.name
+
+  override def description: String = PatternMatcher.description
+
   override def runsAfter: Set[String] = Set(ElimRepeated.name)
 
   override def transformMatch(tree: Match)(using Context): Tree =
@@ -53,11 +56,12 @@ object PatternMatcher {
   import ast.tpd._
 
   val name: String = "patternMatcher"
+  val description: String = "compile pattern matches"
 
-  final val selfCheck = false // debug option, if on we check that no case gets generated twice
+  inline val selfCheck = false // debug option, if on we check that no case gets generated twice
 
   /** Minimal number of cases to emit a switch */
-  final val MinSwitchCases = 4
+  inline val MinSwitchCases = 4
 
   val TrustedTypeTestKey: Key[Unit] = new StickyKey[Unit]
 
@@ -231,8 +235,8 @@ object PatternMatcher {
         case _ =>
           tree.tpe
 
-      /** Plan for matching `selectors` against argument patterns `args` */
-      def matchArgsPlan(selectors: List[Tree], args: List[Tree], onSuccess: Plan): Plan = {
+      /** Plan for matching `components` against argument patterns `args` */
+      def matchArgsPlan(components: List[Tree], args: List[Tree], onSuccess: Plan): Plan = {
         /* For a case with arguments that have some test on them such as
          * ```
          * case Foo(1, 2) => someCode
@@ -249,9 +253,9 @@ object PatternMatcher {
          * } else ()
          * ```
          */
-        def matchArgsSelectorsPlan(selectors: List[Tree], syms: List[Symbol]): Plan =
-          selectors match {
-            case selector :: selectors1 => letAbstract(selector, selector.avoidPatBoundType())(sym => matchArgsSelectorsPlan(selectors1, sym :: syms))
+        def matchArgsComponentsPlan(components: List[Tree], syms: List[Symbol]): Plan =
+          components match {
+            case component :: components1 => letAbstract(component, component.avoidPatBoundType())(sym => matchArgsComponentsPlan(components1, sym :: syms))
             case Nil => matchArgsPatternPlan(args, syms.reverse)
           }
         def matchArgsPatternPlan(args: List[Tree], syms: List[Symbol]): Plan =
@@ -263,7 +267,7 @@ object PatternMatcher {
               assert(syms.isEmpty)
               onSuccess
           }
-        matchArgsSelectorsPlan(selectors, Nil)
+        matchArgsComponentsPlan(components, Nil)
       }
 
       /** Plan for matching the sequence in `seqSym` against sequence elements `args`.
@@ -325,8 +329,17 @@ object PatternMatcher {
         def isSyntheticScala2Unapply(sym: Symbol) =
           sym.isAllOf(SyntheticCase) && sym.owner.is(Scala2x)
 
+        def tupleApp(i: Int, receiver: Tree) = // manually inlining the call to NonEmptyTuple#apply, because it's an inline method
+          ref(defn.RuntimeTuplesModule)
+            .select(defn.RuntimeTuples_apply)
+            .appliedTo(receiver, Literal(Constant(i)))
+            .cast(args(i).tpe.widen)
+
         if (isSyntheticScala2Unapply(unapp.symbol) && caseAccessors.length == args.length)
-          matchArgsPlan(caseAccessors.map(ref(scrutinee).select(_)), args, onSuccess)
+          def tupleSel(sym: Symbol) = ref(scrutinee).select(sym)
+          val isGenericTuple = defn.isTupleClass(caseClass) && !defn.isTupleNType(tree.tpe)
+          val components = if isGenericTuple then caseAccessors.indices.toList.map(tupleApp(_, ref(scrutinee))) else caseAccessors.map(tupleSel)
+          matchArgsPlan(components, args, onSuccess)
         else if (unapp.tpe <:< (defn.BooleanType))
           TestPlan(GuardTest, unapp, unapp.span, onSuccess)
         else
@@ -344,6 +357,9 @@ object PatternMatcher {
             else if (isUnapplySeq && unapplySeqTypeElemTp(unapp.tpe.widen.finalResultType).exists) {
               unapplySeqPlan(unappResult, args)
             }
+            else if unappResult.info <:< defn.NonEmptyTupleTypeRef then
+              val components = (0 until foldApplyTupleType(unappResult.denot.info).length).toList.map(tupleApp(_, ref(unappResult)))
+              matchArgsPlan(components, args, onSuccess)
             else {
               assert(isGetMatch(unapp.tpe))
               val argsPlan = {
@@ -719,16 +735,19 @@ object PatternMatcher {
           val expectedTp = tpt.tpe
 
           // An outer test is needed in a situation like  `case x: y.Inner => ...`
-          def outerTestNeeded: Boolean =
-            // See the test for SI-7214 for motivation for dealias. Later `treeCondStrategy#outerTest`
-            // generates an outer test based on `patType.prefix` with automatically dealises.
-            expectedTp.dealias match {
+          def outerTestNeeded: Boolean = {
+            def go(expected: Type): Boolean = expected match {
               case tref @ TypeRef(pre: SingletonType, _) =>
                 tref.symbol.isClass &&
                 ExplicitOuter.needsOuterIfReferenced(tref.symbol.asClass)
+              case AppliedType(tpe, _) => go(tpe)
               case _ =>
                 false
             }
+            // See the test for SI-7214 for motivation for dealias. Later `treeCondStrategy#outerTest`
+            // generates an outer test based on `patType.prefix` with automatically dealises.
+            go(expectedTp.dealias)
+          }
 
           def outerTest: Tree = thisPhase.transformFollowingDeep {
             val expectedOuter = singleton(expectedTp.normalizedPrefix)
@@ -768,13 +787,15 @@ object PatternMatcher {
         (tpe isRef defn.IntClass) ||
         (tpe isRef defn.ByteClass) ||
         (tpe isRef defn.ShortClass) ||
-        (tpe isRef defn.CharClass)
+        (tpe isRef defn.CharClass) ||
+        (tpe isRef defn.StringClass)
 
-      val seen = mutable.Set[Int]()
+      val seen = mutable.Set[Any]()
 
-      def isNewIntConst(tree: Tree) = tree match {
-        case Literal(const) if const.isIntRange && !seen.contains(const.intValue) =>
-          seen += const.intValue
+      def isNewSwitchableConst(tree: Tree) = tree match {
+        case Literal(const)
+        if (const.isIntRange || const.tag == Constants.StringTag) && !seen.contains(const.value) =>
+          seen += const.value
           true
         case _ =>
           false
@@ -789,7 +810,7 @@ object PatternMatcher {
               val alts = List.newBuilder[Tree]
               def rec(innerPlan: Plan): Boolean = innerPlan match {
                 case SeqPlan(TestPlan(EqualTest(tree), scrut, _, ReturnPlan(`innerLabel`)), tail)
-                if scrut === scrutinee && isNewIntConst(tree) =>
+                if scrut === scrutinee && isNewSwitchableConst(tree) =>
                   alts += tree
                   rec(tail)
                 case ReturnPlan(`outerLabel`) =>
@@ -809,7 +830,7 @@ object PatternMatcher {
 
       def recur(plan: Plan): List[(List[Tree], Plan)] = plan match {
         case SeqPlan(testPlan @ TestPlan(EqualTest(tree), scrut, _, ons), tail)
-        if scrut === scrutinee && !canFallThrough(ons) && isNewIntConst(tree) =>
+        if scrut === scrutinee && !canFallThrough(ons) && isNewSwitchableConst(tree) =>
           (tree :: Nil, ons) :: recur(tail)
         case SeqPlan(AlternativesPlan(alts, ons), tail) =>
           (alts, ons) :: recur(tail)
@@ -832,29 +853,32 @@ object PatternMatcher {
 
     /** Emit a switch-match */
     private def emitSwitchMatch(scrutinee: Tree, cases: List[(List[Tree], Plan)]): Match = {
-      /* Make sure to adapt the scrutinee to Int, as well as all the alternatives
-       * of all cases, so that only Matches on pritimive Ints survive this phase.
+      /* Make sure to adapt the scrutinee to Int or String, as well as all the
+       * alternatives, so that only Matches on pritimive Ints or Strings survive
+       * this phase.
        */
 
-      val intScrutinee =
-        if (scrutinee.tpe.widen.isRef(defn.IntClass)) scrutinee
-        else scrutinee.select(nme.toInt)
+      val (primScrutinee, scrutineeTpe) =
+        if (scrutinee.tpe.widen.isRef(defn.IntClass)) (scrutinee, defn.IntType)
+        else if (scrutinee.tpe.widen.isRef(defn.StringClass)) (scrutinee, defn.StringType)
+        else (scrutinee.select(nme.toInt), defn.IntType)
 
-      def intLiteral(lit: Tree): Tree =
+      def primLiteral(lit: Tree): Tree =
         val Literal(constant) = lit
         if (constant.tag == Constants.IntTag) lit
+        else if (constant.tag == Constants.StringTag) lit
         else cpy.Literal(lit)(Constant(constant.intValue))
 
       val caseDefs = cases.map { (alts, ons) =>
         val pat = alts match {
-          case alt :: Nil => intLiteral(alt)
-          case Nil => Underscore(defn.IntType) // default case
-          case _ => Alternative(alts.map(intLiteral))
+          case alt :: Nil => primLiteral(alt)
+          case Nil => Underscore(scrutineeTpe) // default case
+          case _ => Alternative(alts.map(primLiteral))
         }
         CaseDef(pat, EmptyTree, emit(ons))
       }
 
-      Match(intScrutinee, caseDefs)
+      Match(primScrutinee, caseDefs)
     }
 
     /** If selfCheck is `true`, used to check whether a tree gets generated twice */
@@ -928,7 +952,7 @@ object PatternMatcher {
               default
           }
         case ResultPlan(tree) =>
-          if (tree.tpe <:< defn.NothingType) tree // For example MatchError
+          if (tree.symbol == defn.throwMethod) tree // For example MatchError
           else Return(tree, ref(resultLabel))
       }
     }
diff --git a/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala b/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala
index a0c2385e7866..96506d3c2b05 100644
--- a/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala
+++ b/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala
@@ -11,7 +11,6 @@ import Constants._
 import ast.Trees._
 import ast.{TreeTypeMap, untpd}
 import util.Spans._
-import tasty.TreePickler.Hole
 import SymUtils._
 import NameKinds._
 import dotty.tools.dotc.ast.tpd
@@ -74,6 +73,8 @@ class PickleQuotes extends MacroTransform {
 
   override def phaseName: String = PickleQuotes.name
 
+  override def description: String = PickleQuotes.description
+
   override def allowsImplicitSearch: Boolean = true
 
   override def checkPostCondition(tree: Tree)(using Context): Unit =
@@ -481,10 +482,16 @@ class PickleQuotes extends MacroTransform {
         transform(tree)(using ctx.withSource(tree.source))
       else reporting.trace(i"Reifier.transform $tree at $level", show = true) {
         tree match {
-          case Apply(TypeApply(fn, (body: RefTree) :: Nil), _) if fn.symbol == defn.QuotedTypeModule_of && isCaptured(body.symbol, level + 1) =>
+          case Apply(TypeApply(fn, (body: RefTree) :: Nil), _)
+          if fn.symbol == defn.QuotedTypeModule_of && isCaptured(body.symbol, level + 1) =>
             // Optimization: avoid the full conversion when capturing `X` with `x$1: Type[X$1]`
             // in `Type.of[X]` to `Type.of[x$1.Underlying]` and go directly to `X$1`
             capturers(body.symbol)(body)
+          case Apply(Select(Apply(TypeApply(fn,_), List(ref: RefTree)),nme.apply),List(quotes))
+          if fn.symbol == defn.QuotedRuntime_exprQuote && isCaptured(ref.symbol, level + 1) =>
+            // Optimization: avoid the full conversion when capturing `x` with `x$1: Expr[X]`
+            // in `'{x}` to `'{ ${x$1} }'` and go directly to `x$1`
+            capturers(ref.symbol)(ref).select(nme.apply).appliedTo(quotes)
           case tree: RefTree if isCaptured(tree.symbol, level) =>
             val body = capturers(tree.symbol).apply(tree)
             if (tree.isType)
@@ -523,6 +530,7 @@ object PickleQuotes {
   import tpd._
 
   val name: String = "pickleQuotes"
+  val description: String = "turn quoted trees into explicit run-time data structures"
 
   def getLiteral(tree: tpd.Tree): Option[Literal] = tree match {
     case tree: Literal => Some(tree)
diff --git a/compiler/src/dotty/tools/dotc/transform/Pickler.scala b/compiler/src/dotty/tools/dotc/transform/Pickler.scala
index b05d388bba98..98b61d8b6b60 100644
--- a/compiler/src/dotty/tools/dotc/transform/Pickler.scala
+++ b/compiler/src/dotty/tools/dotc/transform/Pickler.scala
@@ -18,6 +18,7 @@ import scala.concurrent.duration.Duration
 
 object Pickler {
   val name: String = "pickler"
+  val description: String = "generates TASTy info"
 
   /** If set, perform jump target compacting, position and comment pickling,
    *  as well as final assembly in parallel with downstream phases; force
@@ -32,6 +33,8 @@ class Pickler extends Phase {
 
   override def phaseName: String = Pickler.name
 
+  override def description: String = Pickler.description
+
   // No need to repickle trees coming from TASTY
   override def isRunnable(using Context): Boolean =
     super.isRunnable && !ctx.settings.fromTasty.value
@@ -91,7 +94,7 @@ class Pickler extends Phase {
           if pickling ne noPrinter then
             pickling.synchronized {
               println(i"**** pickled info of $cls")
-              println(TastyPrinter.show(pickled))
+              println(TastyPrinter.showContents(pickled, ctx.settings.color.value == "never"))
             }
           pickled
         }(using ExecutionContext.global)
@@ -110,12 +113,12 @@ class Pickler extends Phase {
   override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = {
     val result = super.runOn(units)
     if ctx.settings.YtestPickler.value then
+      val ctx2 = ctx.fresh.setSetting(ctx.settings.YreadComments, true)
       testUnpickler(
-        using ctx.fresh
-            .setPeriod(Period(ctx.runId + 1, FirstPhaseId))
+        using ctx2
+            .setPeriod(Period(ctx.runId + 1, ctx.base.typerPhase.id))
             .setReporter(new ThrowingReporter(ctx.reporter))
             .addMode(Mode.ReadPositions)
-            .addMode(Mode.ReadComments)
             .addMode(Mode.PrintShowExceptions))
     result
   }
@@ -125,7 +128,7 @@ class Pickler extends Phase {
     ctx.initialize()
     val unpicklers =
       for ((cls, pickler) <- picklers) yield {
-        val unpickler = new DottyUnpickler(pickler.assembleParts())
+        val unpickler = new DottyUnpickler(pickler.assembleParts(), ctx.tastyVersion)
         unpickler.enter(roots = Set.empty)
         cls -> unpickler
       }
diff --git a/compiler/src/dotty/tools/dotc/transform/PostInlining.scala b/compiler/src/dotty/tools/dotc/transform/PostInlining.scala
index 54e654781aed..e50a8b77ff0e 100644
--- a/compiler/src/dotty/tools/dotc/transform/PostInlining.scala
+++ b/compiler/src/dotty/tools/dotc/transform/PostInlining.scala
@@ -13,6 +13,9 @@ class PostInlining extends MacroTransform, IdentityDenotTransformer:
   thisPhase =>
 
   override def phaseName: String = PostInlining.name
+
+  override def description: String = PostInlining.description
+
   override def changesMembers = true
 
   override def run(using Context): Unit =
@@ -31,4 +34,5 @@ class PostInlining extends MacroTransform, IdentityDenotTransformer:
         case tree1 => tree1
 
 object PostInlining:
-  val name: String = "postInlining"
\ No newline at end of file
+  val name: String = "postInlining"
+  val description: String = "add mirror support for inlined code"
diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala
index 8e9b95a0c572..bfcf6cc6e5bf 100644
--- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala
+++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala
@@ -17,6 +17,7 @@ import reporting._
 
 object PostTyper {
   val name: String = "posttyper"
+  val description: String = "additional checks and cleanups after type checking"
 }
 
 /** A macro transform that runs immediately after typer and that performs the following functions:
@@ -57,9 +58,10 @@ object PostTyper {
 class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase =>
   import tpd._
 
-  /** the following two members override abstract members in Transform */
   override def phaseName: String = PostTyper.name
 
+  override def description: String = PostTyper.description
+
   override def checkPostCondition(tree: tpd.Tree)(using Context): Unit = tree match {
     case tree: ValOrDefDef =>
       assert(!tree.symbol.signature.isUnderDefined)
@@ -243,10 +245,8 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase
     private object dropInlines extends TreeMap {
       override def transform(tree: Tree)(using Context): Tree = tree match {
         case Inlined(call, _, expansion) =>
-          val newExpansion = tree.tpe match
-            case ConstantType(c) => Literal(c)
-            case _ => Typed(ref(defn.Predef_undefined), TypeTree(tree.tpe))
-          cpy.Inlined(tree)(call, Nil, newExpansion.withSpan(tree.span))
+          val newExpansion = PruneErasedDefs.trivialErasedTree(tree)
+          cpy.Inlined(tree)(call, Nil, newExpansion)
         case _ => super.transform(tree)
       }
     }
@@ -257,6 +257,14 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase
 
     override def transform(tree: Tree)(using Context): Tree =
       try tree match {
+        // TODO move CaseDef case lower: keep most probable trees first for performance
+        case CaseDef(pat, _, _) =>
+          val gadtCtx =
+           pat.removeAttachment(typer.Typer.InferredGadtConstraints) match
+             case Some(gadt) => ctx.fresh.setGadt(gadt)
+             case None =>
+               ctx
+          super.transform(tree)(using gadtCtx)
         case tree: Ident if !tree.isType =>
           if tree.symbol.is(Inline) && !Inliner.inInlineMethod then
             ctx.compilationUnit.needsInlining = true
@@ -282,7 +290,12 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase
               tpd.cpy.Apply(tree)(
                 tree.fun,
                 tree.args.mapConserve(arg =>
-                  if (methType.isImplicitMethod && arg.span.isSynthetic) ref(defn.Predef_undefined)
+                  if (methType.isImplicitMethod && arg.span.isSynthetic)
+                    arg match
+                      case _: RefTree | _: Apply | _: TypeApply if arg.symbol.is(Erased) =>
+                        dropInlines.transform(arg)
+                      case _ =>
+                        PruneErasedDefs.trivialErasedTree(arg)
                   else dropInlines.transform(arg)))
             else
               tree
@@ -296,6 +309,10 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase
               // need to check instantiability here, because the type of the New itself
               // might be a type constructor.
               ctx.typer.checkClassType(tree.tpe, tree.srcPos, traitReq = false, stablePrefixReq = true)
+              if !nu.tpe.isLambdaSub then
+                // Check the constructor type as well; it could be an illegal singleton type
+                // which would not be reflected as `tree.tpe`
+                ctx.typer.checkClassType(nu.tpe, tree.srcPos, traitReq = false, stablePrefixReq = false)
               Checking.checkInstantiable(tree.tpe, nu.srcPos)
               withNoCheckNews(nu :: Nil)(app1)
             case _ =>
@@ -337,9 +354,11 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase
             )
           }
         case tree: ValDef =>
+          checkErasedDef(tree)
           val tree1 = cpy.ValDef(tree)(rhs = normalizeErasedRhs(tree.rhs, tree.symbol))
           processValOrDefDef(super.transform(tree1))
         case tree: DefDef =>
+          checkErasedDef(tree)
           annotateContextResults(tree)
           val tree1 = cpy.DefDef(tree)(rhs = normalizeErasedRhs(tree.rhs, tree.symbol))
           processValOrDefDef(superAcc.wrapDefDef(tree1)(super.transform(tree1).asInstanceOf[DefDef]))
@@ -347,6 +366,11 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase
           val sym = tree.symbol
           if (sym.isClass)
             VarianceChecker.check(tree)
+            annotateExperimental(sym)
+            tree.rhs match
+              case impl: Template =>
+                for parent <- impl.parents do
+                  Checking.checkTraitInheritance(parent.tpe.classSymbol, sym.asClass, parent.srcPos)
             // Add SourceFile annotation to top-level classes
             if sym.owner.is(Package)
                && ctx.compilationUnit.source.exists
@@ -358,6 +382,8 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase
           else (tree.rhs, sym.info) match
             case (rhs: LambdaTypeTree, bounds: TypeBounds) =>
               VarianceChecker.checkLambda(rhs, bounds)
+              if sym.isOpaqueAlias then
+                VarianceChecker.checkLambda(rhs, TypeBounds.upper(sym.opaqueAlias))
             case _ =>
           processMemberDef(super.transform(tree))
         case tree: New if isCheckable(tree) =>
@@ -414,12 +440,12 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase
             //     case x: (_: Tree[?])
         case m @ MatchTypeTree(bounds, selector, cases) =>
           // Analog to the case above for match types
-          def tranformIgnoringBoundsCheck(x: CaseDef): CaseDef =
+          def transformIgnoringBoundsCheck(x: CaseDef): CaseDef =
             withMode(Mode.Pattern)(super.transform(x)).asInstanceOf[CaseDef]
           cpy.MatchTypeTree(tree)(
             super.transform(bounds),
             super.transform(selector),
-            cases.mapConserve(tranformIgnoringBoundsCheck)
+            cases.mapConserve(transformIgnoringBoundsCheck)
           )
         case Block(_, Closure(_, _, tpt)) if ExpandSAMs.needsWrapperClass(tpt.tpe) =>
           superAcc.withInvalidCurrentClass(super.transform(tree))
@@ -432,10 +458,28 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase
           throw ex
       }
 
+    override def transformStats(trees: List[Tree], exprOwner: Symbol)(using Context): List[Tree] =
+      try super.transformStats(trees, exprOwner)
+      finally Checking.checkExperimentalImports(trees)
+
     /** Transforms the rhs tree into a its default tree if it is in an `erased` val/def.
      *  Performed to shrink the tree that is known to be erased later.
      */
     private def normalizeErasedRhs(rhs: Tree, sym: Symbol)(using Context) =
       if (sym.isEffectivelyErased) dropInlines.transform(rhs) else rhs
+
+    private def checkErasedDef(tree: ValOrDefDef)(using Context): Unit =
+      if tree.symbol.is(Erased, butNot = Macro) then
+        val tpe = tree.rhs.tpe
+        if tpe.derivesFrom(defn.NothingClass) then
+          report.error("`erased` definition cannot be implemented with en expression of type Nothing", tree.srcPos)
+        else if tpe.derivesFrom(defn.NullClass) then
+          report.error("`erased` definition cannot be implemented with en expression of type Null", tree.srcPos)
+
+    private def annotateExperimental(sym: Symbol)(using Context): Unit =
+      if sym.is(Module) && sym.companionClass.hasAnnotation(defn.ExperimentalAnnot) then
+        sym.addAnnotation(defn.ExperimentalAnnot)
+        sym.companionModule.addAnnotation(defn.ExperimentalAnnot)
+
   }
-}
\ No newline at end of file
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala b/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala
index 8c59a192e09f..98e835293303 100644
--- a/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala
@@ -9,6 +9,7 @@ import core.Decorators._
 import core.Names.TermName
 import MegaPhase.MiniPhase
 import config.Printers.transforms
+import dotty.tools.dotc.util.Property
 
 /** Add accessors for all protected accesses. An accessor is needed if
  *  according to the rules of the JVM a protected class member is not accessible
@@ -17,6 +18,7 @@ import config.Printers.transforms
  */
 object ProtectedAccessors {
   val name: String = "protectedAccessors"
+  val description: String = "add accessors for protected members"
 
   /** Is the current context's owner inside the access boundary established by `sym`? */
   def insideBoundaryOf(sym: Symbol)(using Context): Boolean =
@@ -50,7 +52,17 @@ class ProtectedAccessors extends MiniPhase {
 
   override def phaseName: String = ProtectedAccessors.name
 
-  object Accessors extends AccessProxies {
+  override def description: String = ProtectedAccessors.description
+
+  private val AccessorsKey = new Property.Key[Accessors]
+
+  private def accessors(using Context): Accessors =
+   ctx.property(AccessorsKey).get
+
+  override def prepareForUnit(tree: Tree)(using Context): Context =
+    ctx.fresh.setProperty(AccessorsKey, new Accessors)
+
+  private class Accessors extends AccessProxies {
     val insert: Insert = new Insert {
       def accessorNameOf(name: TermName, site: Symbol)(using Context): TermName = ProtectedAccessorName(name)
       def needsAccessor(sym: Symbol)(using Context) = ProtectedAccessors.needsAccessor(sym)
@@ -66,19 +78,20 @@ class ProtectedAccessors extends MiniPhase {
   }
 
   override def transformIdent(tree: Ident)(using Context): Tree =
-    Accessors.insert.accessorIfNeeded(tree)
+    accessors.insert.accessorIfNeeded(tree)
 
   override def transformSelect(tree: Select)(using Context): Tree =
-    Accessors.insert.accessorIfNeeded(tree)
+    accessors.insert.accessorIfNeeded(tree)
 
   override def transformAssign(tree: Assign)(using Context): Tree =
     tree.lhs match {
       case lhs: RefTree if lhs.name.is(ProtectedAccessorName) =>
-        cpy.Apply(tree)(Accessors.insert.useSetter(lhs), tree.rhs :: Nil)
+        cpy.Apply(tree)(accessors.insert.useSetter(lhs), tree.rhs :: Nil)
       case _ =>
         tree
     }
 
   override def transformTemplate(tree: Template)(using Context): Tree =
-    cpy.Template(tree)(body = Accessors.addAccessorDefs(tree.symbol.owner, tree.body))
+    cpy.Template(tree)(body = accessors.addAccessorDefs(tree.symbol.owner, tree.body))
+
 }
diff --git a/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala b/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala
index e18de6e60876..fca0b6b34928 100644
--- a/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala
+++ b/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala
@@ -12,6 +12,9 @@ import typer.RefChecks
 import MegaPhase.MiniPhase
 import StdNames.nme
 import ast.tpd
+import SymUtils._
+import config.Feature
+import Decorators.*
 
 /** This phase makes all erased term members of classes private so that they cannot
  *  conflict with non-erased members. This is needed so that subsequent phases like
@@ -19,62 +22,62 @@ import ast.tpd
  *  The phase also replaces all expressions that appear in an erased context by
  *  default values. This is necessary so that subsequent checking phases such
  *  as IsInstanceOfChecker don't give false negatives.
- *  Finally, the phase replaces `compiletime.uninitialized` on the right hand side
- *  of a mutable field definition by `_`. This avoids a "is declared erased, but is
- *  in fact used" error in Erasure and communicates to Constructors that the
- *  variable does not have an initializer.
+ *  Finally, the phase drops (language-) imports.
  */
 class PruneErasedDefs extends MiniPhase with SymTransformer { thisTransform =>
   import tpd._
+  import PruneErasedDefs._
 
   override def phaseName: String = PruneErasedDefs.name
 
+  override def description: String = PruneErasedDefs.description
+
   override def changesMembers: Boolean = true   // makes erased members private
 
   override def runsAfterGroupsOf: Set[String] = Set(RefChecks.name, ExplicitOuter.name)
 
   override def transformSym(sym: SymDenotation)(using Context): SymDenotation =
-    if (sym.isEffectivelyErased && sym.isTerm && !sym.is(Private) && sym.owner.isClass)
-      sym.copySymDenotation(initFlags = sym.flags | Private)
-    else sym
+    if !sym.isEffectivelyErased || !sym.isTerm || sym.is(Private) || !sym.owner.isClass then sym
+    else sym.copySymDenotation(initFlags = sym.flags | Private)
 
   override def transformApply(tree: Apply)(using Context): Tree =
-    if (tree.fun.tpe.widen.isErasedMethod)
-      cpy.Apply(tree)(tree.fun, tree.args.map(trivialErasedTree))
-    else tree
-
-  private def hasUninitializedRHS(tree: ValOrDefDef)(using Context): Boolean =
-    def recur(rhs: Tree): Boolean = rhs match
-      case rhs: RefTree =>
-        rhs.symbol == defn.Compiletime_uninitialized
-        && tree.symbol.is(Mutable) && tree.symbol.owner.isClass
-      case closureDef(ddef) if defn.isContextFunctionType(tree.tpt.tpe.dealias) =>
-        recur(ddef.rhs)
-      case _ =>
-        false
-    recur(tree.rhs)
+    if !tree.fun.tpe.widen.isErasedMethod then tree
+    else cpy.Apply(tree)(tree.fun, tree.args.map(trivialErasedTree))
 
   override def transformValDef(tree: ValDef)(using Context): Tree =
-    val sym = tree.symbol
-    if tree.symbol.isEffectivelyErased && !tree.rhs.isEmpty then
-      cpy.ValDef(tree)(rhs = trivialErasedTree(tree))
-    else if hasUninitializedRHS(tree) then
-      cpy.ValDef(tree)(rhs = cpy.Ident(tree.rhs)(nme.WILDCARD).withType(tree.tpt.tpe))
-    else
-      tree
+    checkErasedInExperimental(tree.symbol)
+    if !tree.symbol.isEffectivelyErased || tree.rhs.isEmpty then tree
+    else cpy.ValDef(tree)(rhs = trivialErasedTree(tree.rhs))
 
   override def transformDefDef(tree: DefDef)(using Context): Tree =
-    if (tree.symbol.isEffectivelyErased && !tree.rhs.isEmpty)
-      cpy.DefDef(tree)(rhs = trivialErasedTree(tree))
-    else tree
+    checkErasedInExperimental(tree.symbol)
+    if !tree.symbol.isEffectivelyErased || tree.rhs.isEmpty then tree
+    else cpy.DefDef(tree)(rhs = trivialErasedTree(tree.rhs))
+
+  override def transformTypeDef(tree: TypeDef)(using Context): Tree =
+    checkErasedInExperimental(tree.symbol)
+    tree
+
+  override def transformOther(tree: Tree)(using Context): Tree = tree match
+    case tree: Import => EmptyTree
+    case _ => tree
 
-  private def trivialErasedTree(tree: Tree)(using Context): Tree =
-    tree.tpe.widenTermRefExpr.dealias.normalized match
-      case ConstantType(c) => Literal(c)
-      case _ => ref(defn.Predef_undefined)
+  def checkErasedInExperimental(sym: Symbol)(using Context): Unit =
+    // Make an exception for Scala 2 experimental macros to allow dual Scala 2/3 macros under non experimental mode
+    if sym.is(Erased, butNot = Macro) && sym != defn.Compiletime_erasedValue && !sym.isInExperimentalScope then
+      Feature.checkExperimentalFeature("erased", sym.sourcePos)
 
+  override def checkPostCondition(tree: Tree)(using Context): Unit = tree match
+    case _: tpd.Import => assert(false, i"illegal tree: $tree")
+    case _ =>
 }
 
 object PruneErasedDefs {
+  import tpd._
+
   val name: String = "pruneErasedDefs"
+  val description: String = "drop erased definitions and simplify erased expressions"
+
+  def trivialErasedTree(tree: Tree)(using Context): Tree =
+    ref(defn.Compiletime_erasedValue).appliedToType(tree.tpe).withSpan(tree.span)
 }
diff --git a/compiler/src/dotty/tools/dotc/transform/PureStats.scala b/compiler/src/dotty/tools/dotc/transform/PureStats.scala
index fc9b3bd6543e..b747d7d6b9e4 100644
--- a/compiler/src/dotty/tools/dotc/transform/PureStats.scala
+++ b/compiler/src/dotty/tools/dotc/transform/PureStats.scala
@@ -9,6 +9,7 @@ import Symbols._, StdNames._, Trees._
 
 object PureStats {
   val name: String = "pureStats"
+  val description: String = "remove pure statements in blocks"
 }
 
 /** Remove pure statements in blocks */
@@ -18,6 +19,8 @@ class PureStats extends MiniPhase {
 
   override def phaseName: String = PureStats.name
 
+  override def description: String = PureStats.description
+
   override def runsAfter: Set[String] = Set(Erasure.name)
 
   override def transformBlock(tree: Block)(using Context): Tree =
diff --git a/compiler/src/dotty/tools/dotc/transform/RenameLifted.scala b/compiler/src/dotty/tools/dotc/transform/RenameLifted.scala
deleted file mode 100644
index 60327d59be88..000000000000
--- a/compiler/src/dotty/tools/dotc/transform/RenameLifted.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-package dotty.tools.dotc.transform
-
-import dotty.tools.dotc.core.Contexts._
-import dotty.tools.dotc.core.Decorators._
-import dotty.tools.dotc.core.DenotTransformers.SymTransformer
-import dotty.tools.dotc.core.Flags._
-import dotty.tools.dotc.core.NameKinds._
-import dotty.tools.dotc.core.Names._
-import dotty.tools.dotc.core.SymDenotations.SymDenotation
-import dotty.tools.dotc.core.Symbols._
-import dotty.tools.dotc.transform.MegaPhase.MiniPhase
-
-/** Renames lifted classes to local numbering scheme */
-class RenameLifted extends MiniPhase with SymTransformer {
-
-  override def phaseName: String = "renameLifted"
-
-  // Not clear why this should run after restoreScopes
-  // override def runsAfterGroupsOf = Set(RestoreScopes.name)
-
-  def transformSym(ref: SymDenotation)(using Context): SymDenotation =
-    if (needsRefresh(ref.symbol)) ref.copySymDenotation(name = refreshedName(ref.symbol))
-    else ref
-
-  /** If the name of the symbol with a unique name needs to be refreshed
-   *    - if it is a lifted class
-   *    - if it is a lifted method
-   */
-  private def needsRefresh(sym: Symbol)(using Context): Boolean =
-    (sym.isClass || sym.isOneOf(Private | Method | JavaStatic)) && sym.name.is(UniqueName)
-
-  /** Refreshes the number of the name based on the full name of the symbol */
-  private def refreshedName(sym: Symbol)(using Context): Name = {
-    def rewriteUnique: PartialFunction[Name, Name] = {
-      case name: DerivedName if name.info.kind == UniqueName =>
-        val fullName = (sym.owner.fullName.toString + name.underlying).toTermName
-        val freshName = UniqueName.fresh(fullName)
-        val info = freshName.asInstanceOf[DerivedName].info
-        DerivedName(name.underlying.replace(rewriteUnique), info)
-      case DerivedName(underlying, info: QualifiedInfo) =>
-        underlying.replace(rewriteUnique).derived(info)
-    }
-
-    sym.name.replace(rewriteUnique)
-  }
-}
diff --git a/compiler/src/dotty/tools/dotc/transform/RepeatableAnnotations.scala b/compiler/src/dotty/tools/dotc/transform/RepeatableAnnotations.scala
index ecf2d3e2b96f..24c46cd98e4c 100644
--- a/compiler/src/dotty/tools/dotc/transform/RepeatableAnnotations.scala
+++ b/compiler/src/dotty/tools/dotc/transform/RepeatableAnnotations.scala
@@ -12,7 +12,10 @@ import Types._
 import Decorators._
 
 class RepeatableAnnotations extends MiniPhase:
-  override def phaseName = "repeatableAnnotations"
+
+  override def phaseName: String = RepeatableAnnotations.name
+
+  override def description: String = RepeatableAnnotations.description
 
   override def transformTypeDef(tree: TypeDef)(using Context): Tree = transformDef(tree)
   override def transformValDef(tree: ValDef)(using Context): Tree = transformDef(tree)
@@ -46,3 +49,7 @@ class RepeatableAnnotations extends MiniPhase:
             Nil
       case (_, anns) => anns
     }.toList
+
+object RepeatableAnnotations:
+  val name: String = "repeatableAnnotations"
+  val description: String = "aggregate repeatable annotations"
diff --git a/compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala b/compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala
index 30ccc69e37ca..2a4a775b834f 100644
--- a/compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala
+++ b/compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala
@@ -35,6 +35,8 @@ class ResolveSuper extends MiniPhase with IdentityDenotTransformer { thisPhase =
 
   override def phaseName: String = ResolveSuper.name
 
+  override def description: String = ResolveSuper.description
+
   override def runsAfter: Set[String] = Set(ElimByName.name, // verified empirically, need to figure out what the reason is.
                                PruneErasedDefs.name) // Erased decls make `isCurrent` work incorrectly
 
@@ -72,6 +74,7 @@ class ResolveSuper extends MiniPhase with IdentityDenotTransformer { thisPhase =
 
 object ResolveSuper {
   val name: String = "resolveSuper"
+  val description: String = "implement super accessors"
 
   /** Returns the symbol that is accessed by a super-accessor in a mixin composition.
    *
@@ -107,7 +110,14 @@ object ResolveSuper {
         // of the superaccessor's type, see i5433.scala for an example where this matters
         val otherTp = other.asSeenFrom(base.typeRef).info
         val accTp = acc.asSeenFrom(base.typeRef).info
-        if (!(otherTp.overrides(accTp, matchLoosely = true)))
+        // Since the super class can be Java defined,
+        // we use releaxed overriding check for explicit nulls if one of the symbols is Java defined.
+        // This forces `Null` being a subtype of reference types during override checking.
+        val relaxedCtxForNulls =
+        if ctx.explicitNulls && (sym.is(JavaDefined) || acc.is(JavaDefined)) then
+          ctx.retractMode(Mode.SafeNulls)
+        else ctx
+        if (!(otherTp.overrides(accTp, matchLoosely = true)(using relaxedCtxForNulls)))
           report.error(IllegalSuperAccessor(base, memberName, targetName, acc, accTp, other.symbol, otherTp), base.srcPos)
 
       bcs = bcs.tail
diff --git a/compiler/src/dotty/tools/dotc/transform/RestoreScopes.scala b/compiler/src/dotty/tools/dotc/transform/RestoreScopes.scala
index 6fe75ce8a901..4937b3ec80b1 100644
--- a/compiler/src/dotty/tools/dotc/transform/RestoreScopes.scala
+++ b/compiler/src/dotty/tools/dotc/transform/RestoreScopes.scala
@@ -16,7 +16,10 @@ import StdNames._
  */
 class RestoreScopes extends MiniPhase with IdentityDenotTransformer { thisPhase =>
   import ast.tpd._
-  override def phaseName: String = "restoreScopes"
+
+  override def phaseName: String = RestoreScopes.name
+
+  override def description: String = RestoreScopes.description
 
   override def changesMembers: Boolean = true // the phase affects scopes, applying tree transformations of previous phases
 
@@ -50,3 +53,6 @@ class RestoreScopes extends MiniPhase with IdentityDenotTransformer { thisPhase
   }
 }
 
+object RestoreScopes:
+  val name: String = "restoreScopes"
+  val description: String = "repair rendered invalid scopes"
diff --git a/compiler/src/dotty/tools/dotc/transform/SelectStatic.scala b/compiler/src/dotty/tools/dotc/transform/SelectStatic.scala
index 6e9fd6f48568..1db210634ab6 100644
--- a/compiler/src/dotty/tools/dotc/transform/SelectStatic.scala
+++ b/compiler/src/dotty/tools/dotc/transform/SelectStatic.scala
@@ -46,7 +46,9 @@ import dotty.tools.dotc.transform.SymUtils._
 class SelectStatic extends MiniPhase with IdentityDenotTransformer {
   import ast.tpd._
 
-  override def phaseName: String = "selectStatic"
+  override def phaseName: String = SelectStatic.name
+
+  override def description: String = SelectStatic.description
 
   override def transformSelect(tree: tpd.Select)(using Context): tpd.Tree = {
     val sym = tree.symbol
@@ -94,3 +96,7 @@ class SelectStatic extends MiniPhase with IdentityDenotTransformer {
   override def transformClosure(tree: tpd.Closure)(using Context): tpd.Tree =
     normalize(tree)
 }
+
+object SelectStatic:
+  val name: String = "selectStatic"
+  val description: String = "get rid of selects that would be compiled into GetStatic"
diff --git a/compiler/src/dotty/tools/dotc/transform/SeqLiterals.scala b/compiler/src/dotty/tools/dotc/transform/SeqLiterals.scala
index aacf3373dbe0..2f586104c4e3 100644
--- a/compiler/src/dotty/tools/dotc/transform/SeqLiterals.scala
+++ b/compiler/src/dotty/tools/dotc/transform/SeqLiterals.scala
@@ -17,7 +17,10 @@ import Contexts._
 class SeqLiterals extends MiniPhase {
   import ast.tpd._
 
-  override def phaseName: String = "seqLiterals"
+  override def phaseName: String = SeqLiterals.name
+
+  override def description: String = SeqLiterals.description
+
   override def runsAfter: Set[String] = Set(PatternMatcher.name)
 
   override def checkPostCondition(tree: Tree)(using Context): Unit = tree match {
@@ -34,3 +37,8 @@ class SeqLiterals extends MiniPhase {
       wrapArray(arr, elemtp).withSpan(tree.span).ensureConforms(tree.tpe)
   }
 }
+
+object SeqLiterals:
+  val name: String = "seqLiterals"
+  val description: String = "express vararg arguments as arrays"
+
diff --git a/compiler/src/dotty/tools/dotc/transform/SetRootTree.scala b/compiler/src/dotty/tools/dotc/transform/SetRootTree.scala
index 06c3d3ffa0eb..a2bd55a1035d 100644
--- a/compiler/src/dotty/tools/dotc/transform/SetRootTree.scala
+++ b/compiler/src/dotty/tools/dotc/transform/SetRootTree.scala
@@ -9,6 +9,9 @@ import dotty.tools.dotc.core.Phases.Phase
 class SetRootTree extends Phase {
 
   override val phaseName: String = SetRootTree.name
+
+  override val description: String = SetRootTree.description
+
   override def isRunnable(using Context) =
     super.isRunnable && ctx.settings.YretainTrees.value
 
@@ -43,4 +46,5 @@ class SetRootTree extends Phase {
 
 object SetRootTree {
   val name: String = "SetRootTree"
+  val description: String = "set the rootTreeOrProvider on class symbols"
 }
diff --git a/compiler/src/dotty/tools/dotc/transform/SpecializeApplyMethods.scala b/compiler/src/dotty/tools/dotc/transform/SpecializeApplyMethods.scala
index ef825832c1cd..6ffa05075201 100644
--- a/compiler/src/dotty/tools/dotc/transform/SpecializeApplyMethods.scala
+++ b/compiler/src/dotty/tools/dotc/transform/SpecializeApplyMethods.scala
@@ -20,7 +20,9 @@ import scala.collection.mutable
 class SpecializeApplyMethods extends MiniPhase with InfoTransformer {
   import ast.tpd._
 
-  val phaseName = "specializeApplyMethods"
+  override def phaseName: String = SpecializeApplyMethods.name
+
+  override def description: String = SpecializeApplyMethods.description
 
   override def isEnabled(using Context): Boolean =
     !ctx.settings.scalajs.value
@@ -116,3 +118,7 @@ class SpecializeApplyMethods extends MiniPhase with InfoTransformer {
       tree
   }
 }
+
+object SpecializeApplyMethods:
+  val name: String = "specializeApplyMethods"
+  val description: String = "adds specialized methods to FunctionN"
diff --git a/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala b/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala
index 91555dc2b995..97e8e37ae444 100644
--- a/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala
+++ b/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala
@@ -13,7 +13,11 @@ import scala.collection.mutable
  */
 class SpecializeFunctions extends MiniPhase {
   import ast.tpd._
-  val phaseName = "specializeFunctions"
+
+  override def phaseName: String = SpecializeFunctions.name
+
+  override def description: String = SpecializeFunctions.description
+
   override def runsAfter = Set(ElimByName.name)
 
   override def isEnabled(using Context): Boolean =
@@ -76,24 +80,27 @@ class SpecializeFunctions extends MiniPhase {
             argTypes,
             retType
           )
-
-        if (!isSpecializable || argTypes.exists(_.isInstanceOf[ExprType])) return tree
-
-        val specializedApply = nme.apply.specializedFunction(retType, argTypes)
-        val newSel = fun match {
-          case Select(qual, _) =>
-            qual.select(specializedApply)
-          case _ =>
-            (fun.tpe: @unchecked) match {
-              case TermRef(prefix: ThisType, name) =>
-                tpd.This(prefix.cls).select(specializedApply)
-              case TermRef(prefix: NamedType, name) =>
-                tpd.ref(prefix).select(specializedApply)
-            }
-        }
-
-        newSel.appliedToTermArgs(args)
-
+        if isSpecializable then
+          val specializedApply = nme.apply.specializedFunction(retType, argTypes)
+          val newSel = fun match
+            case Select(qual, _) =>
+              val qual1 = qual.tpe.widen match
+                case defn.ByNameFunction(res) =>
+                  // Need to cast to regular function, since specialied apply methods
+                  // are not members of ContextFunction0. The cast will be eliminated in
+                  // erasure.
+                  qual.cast(defn.FunctionOf(Nil, res))
+                case _ =>
+                  qual
+              qual1.select(specializedApply)
+            case _ =>
+              (fun.tpe: @unchecked) match
+                case TermRef(prefix: ThisType, name) =>
+                  tpd.This(prefix.cls).select(specializedApply)
+                case TermRef(prefix: NamedType, name) =>
+                  tpd.ref(prefix).select(specializedApply)
+          newSel.appliedToTermArgs(args)
+        else tree
       case _ => tree
     }
 
@@ -102,3 +109,7 @@ class SpecializeFunctions extends MiniPhase {
       p == defn.Function0 || p == defn.Function1 || p == defn.Function2
     }
 }
+
+object SpecializeFunctions:
+  val name: String = "specializeFunctions"
+  val description: String = "specialize Function{0,1,2} by replacing super with specialized super"
diff --git a/compiler/src/dotty/tools/dotc/transform/Splicer.scala b/compiler/src/dotty/tools/dotc/transform/Splicer.scala
index 6fb008c88889..dcd5b8d8c4b5 100644
--- a/compiler/src/dotty/tools/dotc/transform/Splicer.scala
+++ b/compiler/src/dotty/tools/dotc/transform/Splicer.scala
@@ -49,7 +49,7 @@ object Splicer {
           val oldContextClassLoader = Thread.currentThread().getContextClassLoader
           Thread.currentThread().setContextClassLoader(classLoader)
           try {
-            val interpreter = new Interpreter(spliceExpansionPos, classLoader)
+            val interpreter = new Interpreter(splicePos, classLoader)
 
             // Some parts of the macro are evaluated during the unpickling performed in quotedExprToTree
             val interpretedExpr = interpreter.interpret[Quotes => scala.quoted.Expr[Any]](tree)
@@ -63,8 +63,10 @@ object Splicer {
       catch {
         case ex: CompilationUnit.SuspendException =>
           throw ex
-        case ex: scala.quoted.runtime.StopMacroExpansion if ctx.reporter.hasErrors =>
-           // errors have been emitted
+        case ex: scala.quoted.runtime.StopMacroExpansion =>
+          if !ctx.reporter.hasErrors then
+            report.error("Macro expansion was aborted by the macro without any errors reported. Macros should issue errors to end-users to facilitate debugging when aborting a macro expansion.", splicePos)
+          // errors have been emitted
           EmptyTree
         case ex: StopInterpretation =>
           report.error(ex.msg, ex.pos)
@@ -147,7 +149,14 @@ object Splicer {
         case Typed(expr, _) => checkIfValidArgument(expr)
 
         case Apply(Select(Apply(fn, quoted :: Nil), nme.apply), _) if fn.symbol == defn.QuotedRuntime_exprQuote =>
-          // OK
+          val noSpliceChecker = new TreeTraverser {
+            def traverse(tree: Tree)(using Context): Unit = tree match
+              case Spliced(_) =>
+                report.error("Quoted argument of macros may not have splices", tree.srcPos)
+              case _ =>
+                traverseChildren(tree)
+          }
+          noSpliceChecker.traverse(quoted)
 
         case Apply(TypeApply(fn, List(quoted)), _)if fn.symbol == defn.QuotedTypeModule_of =>
           // OK
@@ -161,7 +170,7 @@ object Splicer {
         case SeqLiteral(elems, _) =>
           elems.foreach(checkIfValidArgument)
 
-        case tree: Ident if summon[Env].contains(tree.symbol) =>
+        case tree: Ident if summon[Env].contains(tree.symbol) || tree.symbol.is(Inline, butNot = Method) =>
           // OK
 
         case _ =>
@@ -172,6 +181,7 @@ object Splicer {
               |Parameters may only be:
               | * Quoted parameters or fields
               | * Literal values of primitive types
+              | * References to `inline val`s
               |""".stripMargin, tree.srcPos)
       }
 
@@ -242,6 +252,11 @@ object Splicer {
       case Literal(Constant(value)) =>
         interpretLiteral(value)
 
+      case tree: Ident if tree.symbol.is(Inline, butNot = Method) =>
+        tree.tpe.widenTermRefExpr match
+          case ConstantType(c) => c.value.asInstanceOf[Object]
+          case _ => throw new StopInterpretation(em"${tree.symbol} could not be inlined", tree.srcPos)
+
       // TODO disallow interpreted method calls as arguments
       case Call(fn, args) =>
         if (fn.symbol.isConstructor && fn.symbol.owner.owner.is(Package))
@@ -342,12 +357,18 @@ object Splicer {
 
     private def interpretedStaticMethodCall(moduleClass: Symbol, fn: Symbol)(implicit env: Env): List[Object] => Object = {
       val (inst, clazz) =
-        if (moduleClass.name.startsWith(str.REPL_SESSION_LINE))
-          (null, loadReplLineClass(moduleClass))
-        else {
-          val inst = loadModule(moduleClass)
-          (inst, inst.getClass)
-        }
+        try
+          if (moduleClass.name.startsWith(str.REPL_SESSION_LINE))
+            (null, loadReplLineClass(moduleClass))
+          else {
+            val inst = loadModule(moduleClass)
+            (inst, inst.getClass)
+          }
+        catch
+          case MissingClassDefinedInCurrentRun(sym)  if ctx.compilationUnit.isSuspendable =>
+            if (ctx.settings.XprintSuspension.value)
+              report.echo(i"suspension triggered by a dependency on $sym", pos)
+            ctx.compilationUnit.suspend() // this throws a SuspendException
 
       val name = fn.name.asTermName
       val method = getMethod(clazz, name, paramsSig(fn))
@@ -408,6 +429,10 @@ object Splicer {
         case _: NoSuchMethodException =>
           val msg = em"Could not find method ${clazz.getCanonicalName}.$name with parameters ($paramClasses%, %)"
           throw new StopInterpretation(msg, pos)
+        case MissingClassDefinedInCurrentRun(sym) if ctx.compilationUnit.isSuspendable =>
+            if (ctx.settings.XprintSuspension.value)
+              report.echo(i"suspension triggered by a dependency on $sym", pos)
+            ctx.compilationUnit.suspend() // this throws a SuspendException
       }
 
     private def stopIfRuntimeException[T](thunk: => T, method: JLRMethod): T =
@@ -540,4 +565,3 @@ object Splicer {
     }
   }
 }
-
diff --git a/compiler/src/dotty/tools/dotc/transform/Staging.scala b/compiler/src/dotty/tools/dotc/transform/Staging.scala
index 936fc22c5d71..dbfeaf4728b1 100644
--- a/compiler/src/dotty/tools/dotc/transform/Staging.scala
+++ b/compiler/src/dotty/tools/dotc/transform/Staging.scala
@@ -12,7 +12,6 @@ import dotty.tools.dotc.core.NameKinds._
 import dotty.tools.dotc.core.StagingContext._
 import dotty.tools.dotc.core.StdNames._
 import dotty.tools.dotc.core.Symbols._
-import dotty.tools.dotc.core.tasty.TreePickler.Hole
 import dotty.tools.dotc.core.Types._
 import dotty.tools.dotc.quoted._
 import dotty.tools.dotc.util.{SourceFile, SrcPos}
@@ -35,6 +34,8 @@ class Staging extends MacroTransform {
 
   override def phaseName: String = Staging.name
 
+  override def description: String = Staging.description
+
   override def runsAfter: Set[String] = Set(Inlining.name)
 
   override def allowsImplicitSearch: Boolean = true
@@ -86,4 +87,5 @@ class Staging extends MacroTransform {
 
 object Staging {
   val name: String = "staging"
+  val description: String = "check staging levels and heal staged types"
 }
diff --git a/compiler/src/dotty/tools/dotc/transform/SymUtils.scala b/compiler/src/dotty/tools/dotc/transform/SymUtils.scala
index 3bbcdef68932..93268b71a477 100644
--- a/compiler/src/dotty/tools/dotc/transform/SymUtils.scala
+++ b/compiler/src/dotty/tools/dotc/transform/SymUtils.scala
@@ -87,7 +87,44 @@ object SymUtils:
 
     def isGenericProduct(using Context): Boolean = whyNotGenericProduct.isEmpty
 
-    def useCompanionAsMirror(using Context): Boolean = self.linkedClass.exists && !self.is(Scala2x)
+    /** Is this an old style implicit conversion?
+     *  @param directOnly            only consider explicitly written methods
+     *  @param forImplicitClassOnly  only consider methods generated from implicit classes
+     */
+    def isOldStyleImplicitConversion(directOnly: Boolean = false, forImplicitClassOnly: Boolean = false)(using Context): Boolean =
+      self.is(Implicit) && self.info.stripPoly.match
+        case mt @ MethodType(_ :: Nil) if !mt.isImplicitMethod =>
+          if self.isCoDefinedGiven(mt.finalResultType.typeSymbol)
+          then !directOnly
+          else !forImplicitClassOnly
+        case _ =>
+          false
+
+    /** Is this the method that summons a structural given instance? */
+    def isGivenInstanceSummoner(using Context): Boolean =
+      def isCodefined(info: Type): Boolean = info.stripPoly match
+        case mt: MethodType =>
+          // given summoner can only have contextual params
+          mt.isImplicitMethod && isCodefined(mt.resultType)
+        case mt: ExprType =>
+          isCodefined(mt.resultType)
+        case res =>
+          self.isCoDefinedGiven(res.typeSymbol)
+      self.isAllOf(Given | Method) && isCodefined(self.info)
+
+    def useCompanionAsSumMirror(using Context): Boolean =
+      self.linkedClass.exists
+      && !self.is(Scala2x)
+      && (
+        // If the sum type is compiled from source, and `self` is a "generic sum"
+        // then its companion object will become a sum mirror in `posttyper`. (This method
+        // can be called from `typer` when summoning a Mirror.)
+        // However if `self` is from a prior run then we should check that its companion subclasses `Mirror.Sum`.
+        // e.g. before Scala 3.1, hierarchical sum types were not considered "generic sums", so their
+        // companion would not cache the mirror. Companions from TASTy will already be typed as `Mirror.Sum`.
+        self.isDefinedInCurrentRun
+        || self.linkedClass.isSubClass(defn.Mirror_SumClass)
+      )
 
     /** Is this a sealed class or trait for which a sum mirror is generated?
     *  It must satisfy the following conditions:
@@ -95,14 +132,16 @@ object SymUtils:
     *   - none of its children are anonymous classes
     *   - all of its children are addressable through a path from the parent class
     *     and also the location of the generated mirror.
-    *   - all of its children are generic products or singletons
+    *   - all of its children are generic products, singletons, or generic sums themselves.
     */
     def whyNotGenericSum(declScope: Symbol)(using Context): String =
       if (!self.is(Sealed))
         s"it is not a sealed ${self.kindString}"
+      else if (!self.isOneOf(AbstractOrTrait))
+        s"it is not an abstract class"
       else {
         val children = self.children
-        val companionMirror = self.useCompanionAsMirror
+        val companionMirror = self.useCompanionAsSumMirror
         assert(!(companionMirror && (declScope ne self.linkedClass)))
         def problem(child: Symbol) = {
 
@@ -116,7 +155,11 @@ object SymUtils:
           else {
             val s = child.whyNotGenericProduct
             if (s.isEmpty) s
-            else i"its child $child is not a generic product because $s"
+            else if (child.is(Sealed)) {
+              val s = child.whyNotGenericSum(if child.useCompanionAsSumMirror then child.linkedClass else ctx.owner)
+              if (s.isEmpty) s
+              else i"its child $child is not a generic sum because $s"
+            } else i"its child $child is not a generic product because $s"
           }
         }
         if (children.isEmpty) "it does not have subclasses"
@@ -259,6 +302,25 @@ object SymUtils:
         && self.owner.linkedClass.is(Case)
         && self.owner.linkedClass.isDeclaredInfix
 
+    /** Is symbol declared or inherits @experimental? */
+    def isExperimental(using Context): Boolean =
+      self.hasAnnotation(defn.ExperimentalAnnot)
+      || (self.maybeOwner.isClass && self.owner.hasAnnotation(defn.ExperimentalAnnot))
+
+    def isInExperimentalScope(using Context): Boolean =
+      def isDefaultArgumentOfExperimentalMethod =
+        self.name.is(DefaultGetterName)
+        && self.owner.isClass
+        && {
+          val overloads = self.owner.asClass.membersNamed(self.name.firstPart)
+          overloads.filterWithFlags(HasDefaultParams, EmptyFlags) match
+            case denot: SymDenotation => denot.symbol.isExperimental
+            case _ => false
+        }
+      self.hasAnnotation(defn.ExperimentalAnnot)
+      || isDefaultArgumentOfExperimentalMethod
+      || (!self.is(Package) && self.owner.isInExperimentalScope)
+
     /** The declared self type of this class, as seen from `site`, stripping
     *  all refinements for opaque types.
     */
@@ -278,5 +340,50 @@ object SymUtils:
         self.addAnnotation(
           Annotation(defn.TargetNameAnnot,
             Literal(Constant(nameFn(original.targetName).toString)).withSpan(original.span)))
+
+    /** The return type as seen from the body of this definition. It is
+     *  computed from the symbol's type by replacing param refs by param symbols.
+     */
+    def localReturnType(using Context): Type =
+      if self.isConstructor then defn.UnitType
+      else
+        def instantiateRT(info: Type, psymss: List[List[Symbol]]): Type = info match
+          case info: PolyType =>
+            instantiateRT(info.instantiate(psymss.head.map(_.typeRef)), psymss.tail)
+          case info: MethodType =>
+            instantiateRT(info.instantiate(psymss.head.map(_.termRef)), psymss.tail)
+          case info =>
+            info.widenExpr
+        instantiateRT(self.info, self.paramSymss)
+
+    /** The expected type of a return to `self` at the place indicated by the context.
+     *  This is the local return type instantiated by the symbols of any context function
+     *  closures that enclose the site of the return
+     */
+    def returnProto(using Context): Type =
+
+      /** If `pt` is a context function type, its return type. If the CFT
+       * is dependent, instantiate with the parameters of the associated
+       * anonymous function.
+       * @param  paramss  the parameters of the anonymous functions
+       *                  enclosing the return expression
+       */
+      def instantiateCFT(pt: Type, paramss: => List[List[Symbol]]): Type =
+        val ift = defn.asContextFunctionType(pt)
+        if ift.exists then
+          ift.nonPrivateMember(nme.apply).info match
+            case appType: MethodType =>
+              instantiateCFT(appType.instantiate(paramss.head.map(_.termRef)), paramss.tail)
+        else pt
+
+      def iftParamss = ctx.owner.ownersIterator
+          .filter(_.is(Method, butNot = Accessor))
+          .takeWhile(_.isAnonymousFunction)
+          .toList
+          .reverse
+          .map(_.paramSymss.head)
+
+      instantiateCFT(self.localReturnType, iftParamss)
+    end returnProto
   end extension
 end SymUtils
diff --git a/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala b/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala
index 817be9cba633..4ce4619aa3b9 100644
--- a/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala
+++ b/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala
@@ -63,12 +63,10 @@ class SyntheticMembers(thisPhase: DenotTransformer) {
   private def initSymbols(using Context) =
     if (myValueSymbols.isEmpty) {
       myValueSymbols = List(defn.Any_hashCode, defn.Any_equals)
-      myCaseSymbols = myValueSymbols ++ List(defn.Any_toString, defn.Product_canEqual,
-        defn.Product_productArity, defn.Product_productPrefix, defn.Product_productElement,
-        defn.Product_productElementName)
+      myCaseSymbols = defn.caseClassSynthesized
       myCaseModuleSymbols = myCaseSymbols.filter(_ ne defn.Any_equals)
       myEnumValueSymbols = List(defn.Product_productPrefix)
-      myNonJavaEnumValueSymbols = myEnumValueSymbols :+ defn.Any_toString
+      myNonJavaEnumValueSymbols = myEnumValueSymbols :+ defn.Any_toString :+ defn.Enum_ordinal
     }
 
   def valueSymbols(using Context): List[Symbol] = { initSymbols; myValueSymbols }
@@ -134,6 +132,17 @@ class SyntheticMembers(thisPhase: DenotTransformer) {
         else // assume owner is `val Foo = new MyEnum { def ordinal = 0 }`
           Literal(Constant(clazz.owner.name.toString))
 
+      def ordinalRef: Tree =
+        if isSimpleEnumValue then // owner is `def $new(_$ordinal: Int, $name: String) = new MyEnum { ... }`
+          ref(clazz.owner.paramSymss.head.find(_.name == nme.ordinalDollar_).get)
+        else // val CaseN = new MyEnum { ... def ordinal: Int = n }
+          val vdef = clazz.owner
+          val parentEnum = vdef.owner.companionClass
+          val children = parentEnum.children.zipWithIndex
+          val candidate: Option[Int] = children.collectFirst { case (child, idx) if child == vdef => idx }
+          assert(candidate.isDefined, i"could not find child for $vdef")
+          Literal(Constant(candidate.get))
+
       def toStringBody(vrefss: List[List[Tree]]): Tree =
         if (clazz.is(ModuleClass)) ownName
         else if (isNonJavaEnumValue) identifierRef
@@ -145,6 +154,7 @@ class SyntheticMembers(thisPhase: DenotTransformer) {
         case nme.toString_ => toStringBody(vrefss)
         case nme.equals_ => equalsBody(vrefss.head.head)
         case nme.canEqual_ => canEqualBody(vrefss.head.head)
+        case nme.ordinal => ordinalRef
         case nme.productArity => Literal(Constant(accessors.length))
         case nme.productPrefix if isEnumValue => nameRef
         case nme.productPrefix => ownName
@@ -525,7 +535,7 @@ class SyntheticMembers(thisPhase: DenotTransformer) {
           val pat = Typed(untpd.Ident(nme.WILDCARD).withType(patType), TypeTree(patType))
           CaseDef(pat, EmptyTree, Literal(Constant(idx)))
         }
-      Match(param, cases)
+      Match(param.annotated(New(defn.UncheckedAnnot.typeRef, Nil)), cases)
     }
 
   /** - If `impl` is the companion of a generic sum, add `deriving.Mirror.Sum` parent
@@ -600,7 +610,16 @@ class SyntheticMembers(thisPhase: DenotTransformer) {
 
   def addSyntheticMembers(impl: Template)(using Context): Template = {
     val clazz = ctx.owner.asClass
+    val syntheticMembers = serializableObjectMethod(clazz) ::: serializableEnumValueMethod(clazz) ::: caseAndValueMethods(clazz)
+    checkInlining(syntheticMembers)
     addMirrorSupport(
-      cpy.Template(impl)(body = serializableObjectMethod(clazz) ::: serializableEnumValueMethod(clazz) ::: caseAndValueMethods(clazz) ::: impl.body))
+      cpy.Template(impl)(body = syntheticMembers ::: impl.body))
   }
+
+  private def checkInlining(syntheticMembers: List[Tree])(using Context): Unit =
+    if syntheticMembers.exists(_.existsSubTree {
+      case tree: GenericApply => tree.symbol.isAllOf(InlineMethod)
+      case tree: Select => tree.symbol.isAllOf(InlineMethod)
+      case _ => false
+    }) then ctx.compilationUnit.needsInlining = true
 }
diff --git a/compiler/src/dotty/tools/dotc/transform/TailRec.scala b/compiler/src/dotty/tools/dotc/transform/TailRec.scala
index 9330016f3292..66dcd4a9dfbd 100644
--- a/compiler/src/dotty/tools/dotc/transform/TailRec.scala
+++ b/compiler/src/dotty/tools/dotc/transform/TailRec.scala
@@ -113,6 +113,8 @@ class TailRec extends MiniPhase {
 
   override def phaseName: String = TailRec.name
 
+  override def description: String = TailRec.description
+
   override def runsAfter: Set[String] = Set(Erasure.name) // tailrec assumes erased types
 
   override def transformDefDef(tree: DefDef)(using Context): Tree = {
@@ -277,23 +279,11 @@ class TailRec extends MiniPhase {
     def yesTailTransform(tree: Tree)(using Context): Tree =
       transform(tree, tailPosition = true)
 
-    /** If not in tail position a tree traversal may not be needed.
-     *
-     *  A recursive  call may still be in tail position if within the return
-     *  expression of a labeled block.
-     *  A tree traversal may also be needed to report a failure to transform
-     *  a recursive call of a @tailrec annotated method (i.e. `isMandatory`).
-     */
-    private def isTraversalNeeded =
-      isMandatory || tailPositionLabeledSyms.size > 0
-
     def noTailTransform(tree: Tree)(using Context): Tree =
-      if (isTraversalNeeded) transform(tree, tailPosition = false)
-      else tree
+      transform(tree, tailPosition = false)
 
     def noTailTransforms[Tr <: Tree](trees: List[Tr])(using Context): List[Tr] =
-      if (isTraversalNeeded) trees.mapConserve(noTailTransform).asInstanceOf[List[Tr]]
-      else trees
+      trees.mapConserve(noTailTransform).asInstanceOf[List[Tr]]
 
     override def transform(tree: Tree)(using Context): Tree = {
       /* Rewrite an Apply to be considered for tail call transformation. */
@@ -444,7 +434,7 @@ class TailRec extends MiniPhase {
 
         case Return(expr, from) =>
           val fromSym = from.symbol
-          val inTailPosition = fromSym.is(Label) && tailPositionLabeledSyms.contains(fromSym)
+          val inTailPosition = !fromSym.is(Label) || tailPositionLabeledSyms.contains(fromSym)
           cpy.Return(tree)(transform(expr, inTailPosition), from)
 
         case _ =>
@@ -456,4 +446,5 @@ class TailRec extends MiniPhase {
 
 object TailRec {
   val name: String = "tailrec"
+  val description: String = "rewrite tail recursion to loops"
 }
diff --git a/compiler/src/dotty/tools/dotc/transform/TransformByNameApply.scala b/compiler/src/dotty/tools/dotc/transform/TransformByNameApply.scala
deleted file mode 100644
index dab1d8f25ca2..000000000000
--- a/compiler/src/dotty/tools/dotc/transform/TransformByNameApply.scala
+++ /dev/null
@@ -1,64 +0,0 @@
-package dotty.tools
-package dotc
-package transform
-
-import MegaPhase._
-import core._
-import Symbols._
-import SymDenotations._
-import Contexts._
-import Types._
-import Flags._
-import Decorators._
-import DenotTransformers._
-import core.StdNames.nme
-import ast.Trees._
-import reporting.trace
-
-/** Abstract base class of ByNameClosures and ElimByName, factoring out the
- *  common functionality to transform arguments of by-name parameters.
- */
-abstract class TransformByNameApply extends MiniPhase { thisPhase: DenotTransformer =>
-  import ast.tpd._
-
-  /** The info of the tree's symbol before it is potentially transformed in this phase */
-  private def originalDenotation(tree: Tree)(using Context) =
-    atPhase(thisPhase)(tree.symbol.denot)
-
-  /** If denotation had an ExprType before, it now gets a function type */
-  protected def exprBecomesFunction(symd: SymDenotation)(using Context): Boolean =
-    symd.is(Param) || symd.is(ParamAccessor, butNot = Method)
-
-  protected def isByNameRef(tree: Tree)(using Context): Boolean = {
-    val origDenot = originalDenotation(tree)
-    origDenot.info.isInstanceOf[ExprType] && exprBecomesFunction(origDenot)
-  }
-
-  def mkByNameClosure(arg: Tree, argType: Type)(using Context): Tree = unsupported(i"mkClosure($arg)")
-
-  override def transformApply(tree: Apply)(using Context): Tree =
-    trace(s"transforming ${tree.show} at phase ${ctx.phase}", show = true) {
-
-      def transformArg(arg: Tree, formal: Type): Tree = formal.dealias match {
-        case formalExpr: ExprType =>
-          var argType = arg.tpe.widenIfUnstable
-          if (argType.isBottomType) argType = formal.widenExpr
-          def wrap(arg: Tree) =
-            ref(defn.cbnArg).appliedToType(argType).appliedTo(arg).withSpan(arg.span)
-          arg match {
-            case Apply(Select(qual, nme.apply), Nil)
-            if qual.tpe.derivesFrom(defn.Function0) && (isPureExpr(qual) || qual.symbol.isAllOf(Inline | Param)) =>
-              wrap(qual)
-            case _ =>
-              if (isByNameRef(arg) || arg.symbol == defn.cbnArg) arg
-              else wrap(mkByNameClosure(arg, argType))
-          }
-        case _ =>
-          arg
-      }
-
-      val mt @ MethodType(_) = tree.fun.tpe.widen
-      val args1 = tree.args.zipWithConserve(mt.paramInfos)(transformArg)
-      cpy.Apply(tree)(tree.fun, args1)
-    }
-}
diff --git a/compiler/src/dotty/tools/dotc/transform/TransformWildcards.scala b/compiler/src/dotty/tools/dotc/transform/TransformWildcards.scala
index eac7ef11b7de..ffed65f7676e 100644
--- a/compiler/src/dotty/tools/dotc/transform/TransformWildcards.scala
+++ b/compiler/src/dotty/tools/dotc/transform/TransformWildcards.scala
@@ -14,7 +14,9 @@ import ast.tpd
 class TransformWildcards extends MiniPhase with IdentityDenotTransformer {
   import tpd._
 
-  override def phaseName: String = "transformWildcards"
+  override def phaseName: String = TransformWildcards.name
+
+  override def description: String = TransformWildcards.description
 
   override def checkPostCondition(tree: Tree)(using Context): Unit =
     tree match {
@@ -26,3 +28,7 @@ class TransformWildcards extends MiniPhase with IdentityDenotTransformer {
     if (ctx.owner.isClass) tree
     else cpy.ValDef(tree)(rhs = tree.rhs.wildcardToDefault)
 }
+
+object TransformWildcards:
+  val name: String = "transformWildcards"
+  val description: String = "replace wildcards with default values"
diff --git a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala
index e3a0489cf0fa..29fd1adb6688 100644
--- a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala
+++ b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala
@@ -91,7 +91,7 @@ class TreeChecker extends Phase with SymTransformer {
     // until erasure, see the comment above `Compiler#phases`.
     if (ctx.phaseId <= erasurePhase.id) {
       val initial = symd.initial
-      assert(symd.signature == initial.signature,
+      assert(symd == initial || symd.signature == initial.signature,
         i"""Signature of ${sym.showLocated} changed at phase ${ctx.base.fusedContaining(ctx.phase.prev)}
            |Initial info: ${initial.info}
            |Initial sig : ${initial.signature}
@@ -128,6 +128,8 @@ class TreeChecker extends Phase with SymTransformer {
     report.echo(s"checking ${ctx.compilationUnit} after phase ${fusedPhase}")(using ctx)
 
     inContext(ctx) {
+      assert(ctx.typerState.constraint.domainLambdas.isEmpty,
+        i"non-empty constraint at end of $fusedPhase: ${ctx.typerState.constraint}, ownedVars = ${ctx.typerState.ownedVars.toList}%, %")
       assertSelectWrapsNew(ctx.compilationUnit.tpdTree)
     }
 
@@ -420,6 +422,30 @@ class TreeChecker extends Phase with SymTransformer {
       assert(tree.qual.tpe.isInstanceOf[ThisType], i"expect prefix of Super to be This, actual = ${tree.qual}")
       super.typedSuper(tree, pt)
 
+    override def typedTyped(tree: untpd.Typed, pt: Type)(using Context): Tree =
+      val tpt1 = checkSimpleKinded(typedType(tree.tpt))
+      val expr1 = tree.expr match
+        case id: untpd.Ident if (ctx.mode is Mode.Pattern) && untpd.isVarPattern(id) && (id.name == nme.WILDCARD || id.name == nme.WILDCARD_STAR) =>
+          tree.expr.withType(tpt1.tpe)
+        case _ =>
+          var pt1 = tpt1.tpe
+          if pt1.isRepeatedParam then
+            pt1 = pt1.translateFromRepeated(toArray = tree.expr.typeOpt.derivesFrom(defn.ArrayClass))
+          val isAfterInlining =
+            val inliningPhase = ctx.base.inliningPhase
+            inliningPhase.exists && ctx.phase.id > inliningPhase.id
+          if isAfterInlining then
+            // The staging phase destroys in PCPCheckAndHeal the property that
+            // tree.expr.tpe <:< pt1. A test case where this arises is run-macros/enum-nat-macro.
+            // We should follow up why this happens. If the problem is fixed, we can
+            // drop the isAfterInlining special case. To reproduce the problem, just
+            // change the condition from `isAfterInlining` to `false`.
+            typed(tree.expr)
+          else
+            //println(i"typing $tree, ${tree.expr.typeOpt}, $pt1, ${ctx.mode is Mode.Pattern}")
+            typed(tree.expr, pt1)
+      untpd.cpy.Typed(tree)(expr1, tpt1).withType(tree.typeOpt)
+
     private def checkOwner(tree: untpd.Tree)(using Context): Unit = {
       def ownerMatches(symOwner: Symbol, ctxOwner: Symbol): Boolean =
         symOwner == ctxOwner ||
@@ -446,8 +472,7 @@ class TreeChecker extends Phase with SymTransformer {
       val decls   = cls.classInfo.decls.toList.toSet.filter(isNonMagicalMember)
       val defined = impl.body.map(_.symbol)
 
-      def isAllowed(sym: Symbol): Boolean =
-        sym.is(ConstructorProxy) && !ctx.phase.erasedTypes
+      def isAllowed(sym: Symbol): Boolean = sym.is(ConstructorProxy)
 
       val symbolsNotDefined = (decls -- defined - constr.symbol).filterNot(isAllowed)
 
@@ -567,7 +592,7 @@ class TreeChecker extends Phase with SymTransformer {
           !isPrimaryConstructorReturn &&
           !pt.isInstanceOf[FunOrPolyProto])
         assert(tree.tpe <:< pt, {
-          val mismatch = TypeMismatch(tree.tpe, pt)
+          val mismatch = TypeMismatch(tree.tpe, pt, Some(tree))
           i"""|${mismatch.msg}
               |found: ${infoStr(tree.tpe)}
               |expected: ${infoStr(pt)}
diff --git a/compiler/src/dotty/tools/dotc/transform/TreeMapWithStages.scala b/compiler/src/dotty/tools/dotc/transform/TreeMapWithStages.scala
index c8019bf5deb8..c4a3ead114b0 100644
--- a/compiler/src/dotty/tools/dotc/transform/TreeMapWithStages.scala
+++ b/compiler/src/dotty/tools/dotc/transform/TreeMapWithStages.scala
@@ -13,7 +13,6 @@ import dotty.tools.dotc.core.Contexts._
 import dotty.tools.dotc.core.StagingContext._
 import dotty.tools.dotc.core.StdNames._
 import dotty.tools.dotc.core.Symbols._
-import dotty.tools.dotc.core.tasty.TreePickler.Hole
 import dotty.tools.dotc.quoted._
 import dotty.tools.dotc.util.Spans._
 import dotty.tools.dotc.util.Property
diff --git a/compiler/src/dotty/tools/dotc/transform/TryCatchPatterns.scala b/compiler/src/dotty/tools/dotc/transform/TryCatchPatterns.scala
index 6be58352e6dc..34971911bc7d 100644
--- a/compiler/src/dotty/tools/dotc/transform/TryCatchPatterns.scala
+++ b/compiler/src/dotty/tools/dotc/transform/TryCatchPatterns.scala
@@ -42,7 +42,9 @@ import dotty.tools.dotc.util.Spans.Span
 class TryCatchPatterns extends MiniPhase {
   import dotty.tools.dotc.ast.tpd._
 
-  def phaseName: String = "tryCatchPatterns"
+  override def phaseName: String = TryCatchPatterns.name
+
+  override def description: String = TryCatchPatterns.description
 
   override def runsAfter: Set[String] = Set(ElimRepeated.name)
 
@@ -98,3 +100,6 @@ class TryCatchPatterns extends MiniPhase {
     }
 }
 
+object TryCatchPatterns:
+  val name: String = "tryCatchPatterns"
+  val description: String = "compile cases in try/catch"
diff --git a/compiler/src/dotty/tools/dotc/transform/TupleOptimizations.scala b/compiler/src/dotty/tools/dotc/transform/TupleOptimizations.scala
index 73e75597c2b3..d7705111444f 100644
--- a/compiler/src/dotty/tools/dotc/transform/TupleOptimizations.scala
+++ b/compiler/src/dotty/tools/dotc/transform/TupleOptimizations.scala
@@ -21,7 +21,9 @@ import scala.annotation.tailrec
 class TupleOptimizations extends MiniPhase with IdentityDenotTransformer {
   import tpd._
 
-  def phaseName: String = "genericTuples"
+  override def phaseName: String = TupleOptimizations.name
+
+  override def description: String = TupleOptimizations.description
 
   override def transformApply(tree: tpd.Apply)(using Context): tpd.Tree =
     if (!tree.symbol.exists || tree.symbol.owner != defn.RuntimeTuplesModuleClass) tree
@@ -218,3 +220,6 @@ class TupleOptimizations extends MiniPhase with IdentityDenotTransformer {
     (0 until size).map(i => tup.select(nme.selectorName(i))).toList
 }
 
+object TupleOptimizations:
+  val name: String = "genericTuples"
+  val description: String = "optimize generic operations on tuples"
diff --git a/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala
index 5209f71e5dd1..8ffe2198c4d9 100644
--- a/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala
+++ b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala
@@ -28,7 +28,7 @@ import config.Printers.{ transforms => debug }
 object TypeTestsCasts {
   import ast.tpd._
   import typer.Inferencing.maximizeType
-  import typer.ProtoTypes.{ constrained, newTypeVar }
+  import typer.ProtoTypes.constrained
 
   /** Whether `(x:X).isInstanceOf[P]` can be checked at runtime?
    *
@@ -98,8 +98,10 @@ object TypeTestsCasts {
         //
         // If we perform widening, we will get X = Nothing, and we don't have
         // Ident[X] <:< Ident[Int] any more.
-        TypeComparer.constrainPatternType(P1, X, widenParams = false)
-        debug.println(TypeComparer.explained(_.constrainPatternType(P1, X, widenParams = false)))
+        TypeComparer.constrainPatternType(P1, X, forceInvariantRefinement = true)
+        debug.println(
+          TypeComparer.explained(_.constrainPatternType(P1, X, forceInvariantRefinement = true))
+        )
       }
 
       // Maximization of the type means we try to cover all possible values
@@ -295,7 +297,7 @@ object TypeTestsCasts {
             derivedTree(expr, defn.Any_asInstanceOf, testType)
         }
 
-        /** Transform isInstanceOf OrType
+        /** Transform isInstanceOf
          *
          *    expr.isInstanceOf[A | B]  ~~>  expr.isInstanceOf[A] | expr.isInstanceOf[B]
          *    expr.isInstanceOf[A & B]  ~~>  expr.isInstanceOf[A] & expr.isInstanceOf[B]
@@ -337,15 +339,21 @@ object TypeTestsCasts {
           case AppliedType(tref: TypeRef, _) if tref.symbol == defn.PairClass =>
             ref(defn.RuntimeTuples_isInstanceOfNonEmptyTuple).appliedTo(expr)
           case _ =>
-            val erasedTestType = erasure(testType)
-            transformIsInstanceOf(expr, erasedTestType, erasedTestType, flagUnrelated)
+            val testWidened = testType.widen
+            defn.untestableClasses.find(testWidened.isRef(_)) match
+              case Some(untestable) =>
+                report.error(i"$untestable cannot be used in runtime type tests", tree.srcPos)
+                constant(expr, Literal(Constant(false)))
+              case _ =>
+                val erasedTestType = erasure(testType)
+                transformIsInstanceOf(expr, erasedTestType, erasedTestType, flagUnrelated)
         }
 
         if (sym.isTypeTest) {
           val argType = tree.args.head.tpe
           val isTrusted = tree.hasAttachment(PatternMatcher.TrustedTypeTestKey)
           if (!isTrusted && !checkable(expr.tpe, argType, tree.span))
-            report.warning(i"the type test for $argType cannot be checked at runtime", tree.srcPos)
+            report.uncheckedWarning(i"the type test for $argType cannot be checked at runtime", expr.srcPos)
           transformTypeTest(expr, tree.args.head.tpe, flagUnrelated = true)
         }
         else if (sym.isTypeCast)
diff --git a/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala b/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala
index ecbfbeb6d6e5..7a3da6ad4bde 100644
--- a/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala
+++ b/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala
@@ -24,6 +24,16 @@ object TypeUtils {
     def isErasedClass(using Context): Boolean =
       self.underlyingClassRef(refinementOK = true).typeSymbol.is(Flags.Erased)
 
+    /** Is this type a checked exception? This is the case if the type
+     *  derives from Exception but not from RuntimeException. According to
+     *  that definition Throwable is unchecked. That makes sense since you should
+     *  neither throw nor catch `Throwable` anyway, so we should not define
+     *  a capability to do so.
+     */
+    def isCheckedException(using Context): Boolean =
+      self.derivesFrom(defn.ExceptionClass)
+      && !self.derivesFrom(defn.RuntimeExceptionClass)
+
     def isByName: Boolean =
       self.isInstanceOf[ExprType]
 
diff --git a/compiler/src/dotty/tools/dotc/transform/UncacheGivenAliases.scala b/compiler/src/dotty/tools/dotc/transform/UncacheGivenAliases.scala
index 958fce04da77..29f9e68aa7fa 100644
--- a/compiler/src/dotty/tools/dotc/transform/UncacheGivenAliases.scala
+++ b/compiler/src/dotty/tools/dotc/transform/UncacheGivenAliases.scala
@@ -15,6 +15,7 @@ import ast.tpd
 
 object UncacheGivenAliases:
   val name: String = "uncacheGivenAliases"
+  val description: String = "avoid caching RHS of simple parameterless given aliases"
 
 /** This phase optimizes alias givens represented as lazy vals to be uncached
  *  if that does not change runtime behavior. A definition does not need to be
@@ -30,6 +31,8 @@ class UncacheGivenAliases extends MiniPhase with IdentityDenotTransformer:
 
   override def phaseName: String = UncacheGivenAliases.name
 
+  override def description: String = UncacheGivenAliases.description
+
   private def needsCache(sym: Symbol, rhs: Tree)(using Context): Boolean = rhs.tpe match
     case rhsTpe @ TermRef(NoPrefix, _)
     if rhsTpe.isStable => false
diff --git a/compiler/src/dotty/tools/dotc/transform/UninitializedDefs.scala b/compiler/src/dotty/tools/dotc/transform/UninitializedDefs.scala
new file mode 100644
index 000000000000..d63e2d453b44
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/UninitializedDefs.scala
@@ -0,0 +1,52 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import Contexts._
+import DenotTransformers.SymTransformer
+import Flags._
+import SymDenotations._
+import Symbols._
+import Types._
+import typer.RefChecks
+import MegaPhase.MiniPhase
+import StdNames.nme
+import ast.tpd
+
+/** This phase replaces `compiletime.uninitialized` on the right hand side of a mutable field definition by `_`.
+ *  This avoids a
+ *  ```scala
+ *  "@compileTimeOnly("`uninitialized` can only be used as the right hand side of a mutable field definition")`
+ *  ```
+ *  error in Erasure and communicates to Constructors that the variable does not have an initializer.
+ *
+ *  @syntax markdown
+ */
+class UninitializedDefs extends MiniPhase:
+  import tpd._
+
+  override def phaseName: String = UninitializedDefs.name
+
+  override def description: String = UninitializedDefs.description
+
+  override def transformValDef(tree: ValDef)(using Context): Tree =
+    if !hasUninitializedRHS(tree) then tree
+    else cpy.ValDef(tree)(rhs = cpy.Ident(tree.rhs)(nme.WILDCARD).withType(tree.tpt.tpe))
+
+  private def hasUninitializedRHS(tree: ValOrDefDef)(using Context): Boolean =
+    def recur(rhs: Tree): Boolean = rhs match
+      case rhs: RefTree =>
+        rhs.symbol == defn.Compiletime_uninitialized
+        && tree.symbol.is(Mutable) && tree.symbol.owner.isClass
+      case closureDef(ddef) if defn.isContextFunctionType(tree.tpt.tpe.dealias) =>
+        recur(ddef.rhs)
+      case _ =>
+        false
+    recur(tree.rhs)
+
+end UninitializedDefs
+
+object UninitializedDefs:
+  val name: String = "uninitializedDefs"
+  val description: String = "replaces `compiletime.uninitialized` by `_`"
+end UninitializedDefs
diff --git a/compiler/src/dotty/tools/dotc/transform/VCElideAllocations.scala b/compiler/src/dotty/tools/dotc/transform/VCElideAllocations.scala
index d9fb4567e169..879a885d626e 100644
--- a/compiler/src/dotty/tools/dotc/transform/VCElideAllocations.scala
+++ b/compiler/src/dotty/tools/dotc/transform/VCElideAllocations.scala
@@ -18,7 +18,9 @@ import TreeExtractors._, ValueClasses._
 class VCElideAllocations extends MiniPhase with IdentityDenotTransformer {
   import tpd._
 
-  override def phaseName: String = "vcElideAllocations"
+  override def phaseName: String = VCElideAllocations.name
+
+  override def description: String = VCElideAllocations.description
 
   override def runsAfter: Set[String] = Set(ElimErasedValueType.name)
 
@@ -47,3 +49,7 @@ class VCElideAllocations extends MiniPhase with IdentityDenotTransformer {
         tree
     }
 }
+
+object VCElideAllocations:
+  val name: String = "vcElideAllocations"
+  val description: String = "peep-hole optimization to eliminate unnecessary value class allocations"
diff --git a/compiler/src/dotty/tools/dotc/transform/VCInlineMethods.scala b/compiler/src/dotty/tools/dotc/transform/VCInlineMethods.scala
index a777a570da27..219945d4ebb1 100644
--- a/compiler/src/dotty/tools/dotc/transform/VCInlineMethods.scala
+++ b/compiler/src/dotty/tools/dotc/transform/VCInlineMethods.scala
@@ -42,7 +42,9 @@ import ExtensionMethods._, ValueClasses._
 class VCInlineMethods extends MiniPhase with IdentityDenotTransformer {
   import tpd._
 
-  override def phaseName: String = "vcInlineMethods"
+  override def phaseName: String = VCInlineMethods.name
+
+  override def description: String = VCInlineMethods.description
 
   override def runsAfter: Set[String] =
     Set(ExtensionMethods.name, PatternMatcher.name)
@@ -105,3 +107,7 @@ class VCInlineMethods extends MiniPhase with IdentityDenotTransformer {
   override def transformApply(tree: Apply)(using Context): Tree =
     rewireIfNeeded(tree)
 }
+
+object VCInlineMethods:
+  val name: String = "vcInlineMethods"
+  val description: String = "inlines calls to value class methods"
diff --git a/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala b/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala
index 44655e3bead3..fb0731198a98 100644
--- a/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala
+++ b/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala
@@ -14,7 +14,9 @@ import dotty.tools.dotc.util.SourceFile
 class YCheckPositions extends Phase {
   import tpd._
 
-  def phaseName: String = "inlinedPositions"
+  override def phaseName: String = YCheckPositions.name
+
+  override def description: String = YCheckPositions.description
 
   override def run(using Context): Unit = () // YCheck only
 
@@ -63,3 +65,6 @@ class YCheckPositions extends Phase {
 
 }
 
+object YCheckPositions:
+  val name: String = "inlinedPositions"
+  val description: String = "check inlined positions"
diff --git a/compiler/src/dotty/tools/dotc/transform/init/Cache.scala b/compiler/src/dotty/tools/dotc/transform/init/Cache.scala
deleted file mode 100644
index 763c7b70b52e..000000000000
--- a/compiler/src/dotty/tools/dotc/transform/init/Cache.scala
+++ /dev/null
@@ -1,43 +0,0 @@
-package dotty.tools.dotc
-package transform
-package init
-
-import core._
-import Contexts._
-import Types._
-import Symbols._
-import Decorators._
-
-import ast.Trees._
-import ast.tpd
-
-import reporting.trace
-import config.Printers.init
-
-import scala.collection.mutable
-
-import Effects._, Potentials._, Summary._
-
-class Cache {
-  /** Summary of a class */
-  private val summaryCache = mutable.Map.empty[ClassSymbol, ClassSummary]
-  def summaryOf(cls: ClassSymbol)(using Env): ClassSummary =
-    if (summaryCache.contains(cls)) summaryCache(cls)
-    else trace("summary for " + cls.show, init, s => s.asInstanceOf[ClassSummary].show) {
-      val summary = Summarization.classSummary(cls)
-      summaryCache(cls) = summary
-      summary
-    }
-
-  /** Cache for outer this */
-  private case class OuterKey(warm: Warm, cls: ClassSymbol)
-  private val outerCache: mutable.Map[OuterKey, Potentials] = mutable.Map.empty
-  def resolveOuter(warm: Warm, cls: ClassSymbol)(using Env): Potentials =
-    val key = OuterKey(warm, cls)
-    if (outerCache.contains(key)) outerCache(key)
-    else {
-      val pots = Potentials.resolveOuter(warm.classSymbol, warm.outer.toPots, cls)
-      outerCache(key) = pots
-      pots
-    }
-}
diff --git a/compiler/src/dotty/tools/dotc/transform/init/Checker.scala b/compiler/src/dotty/tools/dotc/transform/init/Checker.scala
index ec15d35096f6..9d95951cb36c 100644
--- a/compiler/src/dotty/tools/dotc/transform/init/Checker.scala
+++ b/compiler/src/dotty/tools/dotc/transform/init/Checker.scala
@@ -5,36 +5,67 @@ package init
 
 import dotty.tools.dotc._
 import ast.tpd
+import tpd._
 
 import dotty.tools.dotc.core._
 import Contexts._
 import Types._
 import Symbols._
+import StdNames._
 
 import dotty.tools.dotc.transform._
-import MegaPhase._
-
+import Phases._
 
 import scala.collection.mutable
 
+import Semantic._
 
-class Checker extends MiniPhase {
-  import tpd._
+class Checker extends Phase {
 
-  val phaseName = "initChecker"
+  override def phaseName: String = Checker.name
 
-  // cache of class summary
-  private val cache = new Cache
+  override def description: String = Checker.description
 
   override val runsAfter = Set(Pickler.name)
 
   override def isEnabled(using Context): Boolean =
     super.isEnabled && ctx.settings.YcheckInit.value
 
-  override def transformTypeDef(tree: TypeDef)(using Context): tpd.Tree = {
-    if (!tree.isClassDef) return tree
+  override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] =
+    val checkCtx = ctx.fresh.setPhase(this.start)
+    Semantic.withInitialState {
+      val traverser = new InitTreeTraverser()
+      units.foreach { unit => traverser.traverse(unit.tpdTree) }
+      given Context = checkCtx
+      Semantic.check()
+      super.runOn(units)
+    }
+
+  def run(using Context): Unit = {
+    // ignore, we already called `Semantic.check()` in `runOn`
+  }
+
+  class InitTreeTraverser(using WorkList) extends TreeTraverser {
+    override def traverse(tree: Tree)(using Context): Unit =
+      traverseChildren(tree)
+      tree match {
+        case mdef: MemberDef =>
+          // self-type annotation ValDef has no symbol
+          if mdef.name != nme.WILDCARD then
+            mdef.symbol.defTree = tree
+
+          mdef match
+          case tdef: TypeDef if tdef.isClassDef =>
+            val cls = tdef.symbol.asClass
+            val thisRef = ThisRef(cls)
+            if shouldCheckClass(cls) then Semantic.addTask(thisRef)
+          case _ =>
+
+        case _ =>
+      }
+  }
 
-    val cls = tree.symbol.asClass
+  private def shouldCheckClass(cls: ClassSymbol)(using Context) = {
     val instantiable: Boolean =
       cls.is(Flags.Module) ||
       !cls.isOneOf(Flags.AbstractOrTrait) && {
@@ -46,20 +77,10 @@ class Checker extends MiniPhase {
       }
 
     // A concrete class may not be instantiated if the self type is not satisfied
-    if (instantiable && cls.enclosingPackageClass != defn.StdLibPatchesPackage.moduleClass) {
-      implicit val state: Checking.State = Checking.State(
-        visited = Set.empty,
-        path = Vector.empty,
-        thisClass = cls,
-        fieldsInited = mutable.Set.empty,
-        parentsInited = mutable.Set.empty,
-        safePromoted = mutable.Set.empty,
-        env = Env(ctx.withOwner(cls), cache)
-      )
-
-      Checking.checkClassBody(tree)
-    }
-
-    tree
+    instantiable && cls.enclosingPackageClass != defn.StdLibPatchesPackage.moduleClass
   }
 }
+
+object Checker:
+  val name: String = "initChecker"
+  val description: String = "check initialization of objects"
diff --git a/compiler/src/dotty/tools/dotc/transform/init/Checking.scala b/compiler/src/dotty/tools/dotc/transform/init/Checking.scala
deleted file mode 100644
index cc8f95b20e8c..000000000000
--- a/compiler/src/dotty/tools/dotc/transform/init/Checking.scala
+++ /dev/null
@@ -1,420 +0,0 @@
-package dotty.tools.dotc
-package transform
-package init
-
-import scala.collection.mutable
-
-import core._
-import Contexts._
-import ast.tpd._
-import Decorators._
-import Symbols._
-import Constants.Constant
-import Types._
-import util.NoSourcePosition
-import reporting.trace
-import config.Printers.init
-
-import Effects._, Potentials._, Summary._, Util._, Errors._
-
-object Checking {
-  /** The checking state
-   *
-   *  Why `visited` is a set of effects instead of `Symbol`? Think the following program:
-   *
-   *      class C(x: Int, a: A @cold) {
-   *        val n = if (x > 0) new C(x - 1, a).m() else 0
-   *        val b: Int = this.m()
-   *        def m(): Int = b
-   *      }
-   *
-   */
-
-  case class State(
-    var visited: Set[Effect],                  // effects that have been checked or are being checked
-    path: Vector[Tree],                        // the path that leads to the current effect
-    thisClass: ClassSymbol,                    // the concrete class of `this`
-    fieldsInited: mutable.Set[Symbol],
-    parentsInited: mutable.Set[ClassSymbol],
-    safePromoted: mutable.Set[Potential],      // Potentials that can be safely promoted
-    env: Env
-  ) {
-    def withOwner[T](sym: Symbol)(op: State ?=> T): T =
-      val state = this.copy(env = env.withOwner(sym))
-      val res = op(using state)
-      this.visited = state.visited
-      res
-
-
-    def visit[T](eff: Effect)(op: State ?=> T): T =
-      val state: State = this.copy(path = path :+ eff.source, visited = this.visited + eff)
-      val res = op(using state)
-      this.visited = state.visited
-      res
-
-    def test(op: State ?=> Errors): Errors = {
-      val savedVisited = visited
-      val errors = op(using this)
-      visited = savedVisited
-      errors
-    }
-  }
-
-  given theEnv(using State): Env = summon[State].env
-  given theCtx(using State): Context = summon[State].env.ctx
-
-  private def check(eff: Effect)(using state: State): Errors = {
-    trace("checking effect " + eff.show, init, errs => Errors.show(errs.asInstanceOf[Errors])) {
-      if (state.visited.contains(eff)) {
-        traceIndented("Already checked " + eff.show, init)
-        Errors.empty
-      }
-      else
-        state.visit(eff) {
-          eff match {
-            case eff: Promote      => Checking.checkPromote(eff)
-            case eff: FieldAccess  => Checking.checkFieldAccess(eff)
-            case eff: MethodCall   => Checking.checkMethodCall(eff)
-          }
-        }
-    }
-  }
-
-  private def checkEffects(effs: Effects)(using state: State): Unit = traceOp("checking effects " + Effects.show(effs), init) {
-    for {
-      eff <- effs
-      error <- check(eff)
-    } error.issue
-  }
-
-  /** Check that the given concrete class may be initialized safely
-   *
-   *  It assumes that all definitions are properly summarized before-hand.
-   *  However, summarization can be done lazily on-demand to improve
-   *  performance.
-   */
-  def checkClassBody(cdef: TypeDef)(using state: State): Unit = {
-    traceIndented("\n\n>>>> checking " + cdef.symbol.show, init)
-
-    val cls = cdef.symbol.asClass
-    val tpl = cdef.rhs.asInstanceOf[Template]
-
-    if state.parentsInited.contains(cls) then return
-
-    // mark current class as initialized, required for linearization
-    state.parentsInited += cls
-
-    def checkClassBodyStat(tree: Tree)(using state: State): Unit = traceOp("checking " + tree.show, init) {
-      tree match {
-        case vdef : ValDef =>
-          val summary = Summarization.analyze(vdef.rhs)
-          theEnv.summaryOf(cls).cacheFor(vdef.symbol, summary)
-          if (!vdef.symbol.isOneOf(Flags.Lazy | Flags.Deferred)) {
-            checkEffects(summary.effs)
-            traceIndented(vdef.symbol.show + " initialized", init)
-            state.fieldsInited += vdef.symbol
-          }
-
-        case tree =>
-          val summary = Summarization.analyze(tree)
-          checkEffects(summary.effs)
-      }
-    }
-
-    // check parent calls : follows linearization ordering
-    // see spec 5.1 about "Template Evaluation".
-    // https://www.scala-lang.org/files/archive/spec/2.13/05-classes-and-objects.html
-
-    def checkConstructor(ctor: Symbol, tp: Type, source: Tree)(using state: State): Unit = traceOp("checking " + ctor.show, init) {
-      val cls = ctor.owner
-      val classDef = cls.defTree
-      if (!classDef.isEmpty)
-        state.withOwner(cls) {
-          if (ctor.isPrimaryConstructor) checkClassBody(classDef.asInstanceOf[TypeDef])
-          else checkSecondaryConstructor(ctor)
-        }
-    }
-
-    def checkSecondaryConstructor(ctor: Symbol)(using state: State): Unit = traceOp("checking " + ctor.show, init) {
-      val Block(ctorCall :: stats, expr) = ctor.defTree.asInstanceOf[DefDef].rhs
-      val cls = ctor.owner.asClass
-
-      traceOp("check ctor: " + ctorCall.show, init) {
-        val ctor = ctorCall.symbol
-        if (ctor.isPrimaryConstructor)
-          checkClassBody(cls.defTree.asInstanceOf[TypeDef])
-        else
-          checkSecondaryConstructor(ctor)
-      }
-
-      checkStats(stats :+ expr, ctor)
-    }
-
-    def checkStats(stats: List[Tree], owner: Symbol)(using state: State): Unit =
-      stats.foreach { stat =>
-        val summary = Summarization.analyze(stat)(theEnv.withOwner(owner))
-        checkEffects(summary.effs)
-      }
-
-    cls.paramAccessors.foreach { acc =>
-      if (!acc.is(Flags.Method)) {
-        traceIndented(acc.show + " initialized", init)
-        state.fieldsInited += acc
-      }
-    }
-
-    tpl.parents.foreach {
-      case tree @ Block(_, parent) =>
-        checkConstructor(funPart(parent).symbol, parent.tpe, tree)
-
-      case tree @ Apply(Block(_, parent), _) =>
-        checkConstructor(funPart(parent).symbol, tree.tpe, tree)
-
-      case parent : Apply =>
-        checkConstructor(funPart(parent).symbol, parent.tpe, parent)
-
-      case ref =>
-        val cls = ref.tpe.classSymbol.asClass
-        if (cls.primaryConstructor.exists)
-          checkConstructor(cls.primaryConstructor, ref.tpe, ref)
-    }
-
-    // check class body
-    tpl.body.foreach { checkClassBodyStat(_) }
-  }
-
-  private def checkMethodCall(eff: MethodCall)(using state: State): Errors =
-    val MethodCall(pot, sym) = eff
-    pot match {
-      case thisRef: ThisRef =>
-        val target = resolve(state.thisClass, sym)
-        if (!target.isOneOf(Flags.Method | Flags.Lazy))
-          check(FieldAccess(pot, target)(eff.source))
-        else if (target.hasSource) {
-          val effs = thisRef.effectsOf(target).toList
-          effs.flatMap { check(_) }
-        }
-        else CallUnknown(target, eff.source, state.path).toErrors
-
-      case SuperRef(thisRef: ThisRef, supercls) =>
-        val target = resolveSuper(state.thisClass, supercls, sym)
-        if (!target.is(Flags.Method))
-          check(FieldAccess(pot, target)(eff.source))
-        else if (target.hasSource) {
-          val effs = thisRef.effectsOf(target).toList
-          effs.flatMap { check(_) }
-        }
-        else CallUnknown(target, eff.source, state.path).toErrors
-
-      case warm @ Warm(cls, outer) =>
-        val target = resolve(cls, sym)
-
-        if (target.hasSource) {
-          val effs = warm.effectsOf(target).toList
-          effs.flatMap { check(_) }
-        }
-        else if (!sym.isConstructor)
-          CallUnknown(target, eff.source, state.path).toErrors
-        else
-          Errors.empty
-
-      case _: Cold =>
-        CallCold(sym, eff.source, state.path).toErrors
-
-      case Fun(pots, effs) =>
-        // TODO: assertion might be false, due to SAM
-        if (sym.name.toString == "apply") effs.toList.flatMap { check(_) }
-        else Errors.empty
-        // curried, tupled, toString are harmless
-
-      case pot =>
-        val Summary(pots, effs) = expand(pot)
-        val effs2 = pots.map(MethodCall(_, sym)(eff.source))
-        (effs2 ++ effs).toList.flatMap(check(_))
-    }
-
-  private def checkFieldAccess(eff: FieldAccess)(using state: State): Errors =
-    val FieldAccess(pot, field) = eff
-    pot match {
-      case _: ThisRef =>
-        val target = resolve(state.thisClass, field)
-        if (target.is(Flags.Lazy)) check(MethodCall(pot, target)(eff.source))
-        else if (!state.fieldsInited.contains(target)) AccessNonInit(target, state.path).toErrors
-        else Errors.empty
-
-      case SuperRef(_: ThisRef, supercls) =>
-        val target = resolveSuper(state.thisClass, supercls, field)
-        if (target.is(Flags.Lazy)) check(MethodCall(pot, target)(eff.source))
-        else if (!state.fieldsInited.contains(target)) AccessNonInit(target, state.path).toErrors
-        else Errors.empty
-
-      case Warm(cls, outer) =>
-        // all fields of warm values are initialized
-        val target = resolve(cls, field)
-        if (target.is(Flags.Lazy)) check(MethodCall(pot, target)(eff.source))
-        else Errors.empty
-
-      case _: Cold =>
-        AccessCold(field, eff.source, state.path).toErrors
-
-      case Fun(pots, effs) =>
-        throw new Exception("Unexpected effect " + eff.show)
-
-      case pot =>
-        val Summary(pots, effs) = expand(pot)
-        val effs2 = pots.map(FieldAccess(_, field)(eff.source))
-        (effs2 ++ effs).toList.flatMap(check(_))
-
-    }
-
-
-  private def checkPromote(eff: Promote)(using state: State): Errors =
-    if (state.safePromoted.contains(eff.potential)) Errors.empty
-    else {
-      val pot = eff.potential
-      val errs = pot match {
-        case pot: ThisRef =>
-          // If we have all fields initialized, then we can promote This to hot.
-          val classRef = state.thisClass.info.asInstanceOf[ClassInfo].appliedRef
-          val allFieldsInited = classRef.fields.forall { denot =>
-            val sym = denot.symbol
-            sym.isOneOf(Flags.Lazy | Flags.Deferred) || state.fieldsInited.contains(sym)
-          }
-          if (allFieldsInited)
-            Errors.empty
-          else
-            PromoteThis(pot, eff.source, state.path).toErrors
-
-        case _: Cold =>
-          PromoteCold(eff.source, state.path).toErrors
-
-        case pot @ Warm(cls, outer) =>
-          val errors = state.test { checkPromote(Promote(outer)(eff.source)) }
-          if (errors.isEmpty) Errors.empty
-          else PromoteWarm(pot, eff.source, state.path).toErrors
-
-        case Fun(pots, effs) =>
-          val errs1 = state.test {
-            effs.toList.flatMap(check(_))
-          }
-          val errs2 = state.test {
-            pots.toList.flatMap { pot =>
-              checkPromote(Promote(pot)(eff.source))
-            }
-          }
-
-          if (errs1.nonEmpty || errs2.nonEmpty)
-            UnsafePromotion(pot, eff.source, state.path, errs1 ++ errs2).toErrors
-          else
-            Errors.empty
-
-        case pot =>
-          val Summary(pots, effs) = expand(pot)
-          val effs2 = pots.map(Promote(_)(eff.source))
-          (effs2 ++ effs).toList.flatMap(check(_))
-      }
-      // If we can safely promote, then we don't need to check again
-      if (errs.isEmpty)
-        state.safePromoted += pot
-      errs
-    }
-
-  private def expand(pot: Potential)(using state: State): Summary = trace("expand " + pot.show, init, _.asInstanceOf[Summary].show) {
-    pot match {
-      case MethodReturn(pot1, sym) =>
-        pot1 match {
-          case thisRef: ThisRef =>
-            val target = resolve(state.thisClass, sym)
-            if (target.hasSource) Summary(thisRef.potentialsOf(target), Effects.empty)
-            else Summary.empty // warning already issued in call effect
-
-          case SuperRef(thisRef: ThisRef, supercls) =>
-            val target = resolveSuper(state.thisClass, supercls, sym)
-            if (target.hasSource) Summary(thisRef.potentialsOf(target), Effects.empty)
-            else Summary.empty // warning already issued in call effect
-
-
-          case Fun(pots, effs) =>
-            val name = sym.name.toString
-            if (name == "apply") Summary(pots)
-            else if (name == "tupled") Summary(pot1)
-            else if (name == "curried") {
-              val arity = defn.functionArity(sym.info.finalResultType)
-              val pots = (1 until arity).foldLeft(Vector(pot1)) { (acc, i) =>
-                Vector(Fun(acc, Effects.empty)(pot1.source))
-              }
-              Summary(pots)
-            }
-            else Summary.empty
-
-          case warm : Warm =>
-            val target = resolve(warm.classSymbol, sym)
-            if (target.hasSource) Summary(warm.potentialsOf(target), Effects.empty)
-            else Summary.empty // warning already issued in call effect
-
-          case _: Cold =>
-            Summary.empty // error already reported, ignore
-
-          case _ =>
-            val Summary(pots, effs) = expand(pot1)
-            val Summary(pots2, effs2) = pots.select(sym, pot.source, ignoreSelectEffect = false)
-            Summary(pots2, effs ++ effs2)
-        }
-
-      case FieldReturn(pot1, sym) =>
-        pot1 match {
-          case thisRef: ThisRef =>
-            val target = resolve(state.thisClass, sym)
-            if (sym.hasSource) Summary(thisRef.potentialsOf(target), Effects.empty)
-            else Summary(Cold()(pot.source))
-
-          case SuperRef(thisRef: ThisRef, supercls) =>
-            val target = resolveSuper(state.thisClass, supercls, sym)
-            if (target.hasSource) Summary(thisRef.potentialsOf(target), Effects.empty)
-            else Summary(Cold()(pot.source))
-
-          case _: Fun =>
-            throw new Exception("Unexpected code reached")
-
-          case warm: Warm =>
-            val target = resolve(warm.classSymbol, sym)
-            if (target.hasSource) Summary(warm.potentialsOf(target), Effects.empty)
-            else Summary(Cold()(pot.source))
-
-          case _: Cold =>
-            Summary.empty // error already reported, ignore
-
-          case _ =>
-            val Summary(pots, effs) = expand(pot1)
-            val Summary(pots2, effs2) = pots.select(sym, pot.source, ignoreSelectEffect = false)
-            Summary(pots2, effs ++ effs2)
-        }
-
-      case Outer(pot1, cls) =>
-        pot1 match {
-          case _: ThisRef =>
-            // all outers for `this` are assumed to be hot
-            Summary.empty
-
-          case _: Fun =>
-            throw new Exception("Unexpected code reached")
-
-          case warm: Warm =>
-            Summary(warm.resolveOuter(cls))
-
-          case _ =>
-            val Summary(pots, effs) = expand(pot1)
-            val pots2 = pots.map { Outer(_, cls)(pot.source): Potential }
-            Summary(pots2, effs)
-        }
-
-      case _: ThisRef | _: Fun | _: Warm | _: Cold =>
-        Summary(pot)
-
-      case SuperRef(pot1, supercls) =>
-        val Summary(pots, effs) = expand(pot1)
-        val pots2 = pots.map { SuperRef(_, supercls)(pot.source): Potential }
-        Summary(pots2, effs)
-    }
-  }
-}
diff --git a/compiler/src/dotty/tools/dotc/transform/init/Effects.scala b/compiler/src/dotty/tools/dotc/transform/init/Effects.scala
deleted file mode 100644
index ce9d8d8aa497..000000000000
--- a/compiler/src/dotty/tools/dotc/transform/init/Effects.scala
+++ /dev/null
@@ -1,80 +0,0 @@
-package dotty.tools.dotc
-package transform
-package init
-
-import ast.tpd._
-import reporting.trace
-import config.Printers.init
-import core.Types._
-import core.Symbols._
-import core.Contexts._
-
-import Potentials._
-
-object Effects {
-  type Effects = Vector[Effect]
-  val empty: Effects = Vector.empty
-
-  def show(effs: Effects)(using Context): String =
-    effs.map(_.show).mkString(", ")
-
-  /** Effects that are related to safe initialization performed on potentials */
-  sealed trait Effect {
-    def potential: Potential
-
-    def show(using Context): String
-
-    def source: Tree
-
-    def toEffs: Effects = Vector(this)
-  }
-
-  /** A promotion effect means that a value that's possibly under initialization
-   *  is promoted from the initializing world to the fully-initialized world.
-   *
-   *  Essentially, this effect enforces that the object pointed to by
-   *  `potential` is transitively initialized.
-   *
-   *  This effect is trigger in several scenarios:
-   *  - a potential is used as arguments to method calls or new-expressions
-   *  - a potential is assigned (not initialize) to a field
-   *  - the selection chain on a potential is too long
-   */
-  case class Promote(potential: Potential)(val source: Tree) extends Effect {
-    def show(using Context): String = potential.show + "↑"
-  }
-
-  /** Field access, `a.f` */
-  case class FieldAccess(potential: Potential, field: Symbol)(val source: Tree) extends Effect {
-    assert(field != NoSymbol)
-
-    def show(using Context): String = potential.show + "." + field.name.show + "!"
-  }
-
-  /** Method call, `a.m()` */
-  case class MethodCall(potential: Potential, method: Symbol)(val source: Tree) extends Effect {
-    assert(method != NoSymbol)
-
-    def show(using Context): String = potential.show + "." + method.name.show + "!"
-  }
-
-  // ------------------ operations on effects ------------------
-
-  def asSeenFrom(eff: Effect, thisValue: Potential)(implicit env: Env): Effect =
-    trace(eff.show + " asSeenFrom " + thisValue.show + ", current = " + currentClass.show, init, _.asInstanceOf[Effect].show) { eff match {
-      case Promote(pot) =>
-        val pot1 = Potentials.asSeenFrom(pot, thisValue)
-        Promote(pot1)(eff.source)
-
-      case FieldAccess(pot, field) =>
-        val pot1 = Potentials.asSeenFrom(pot, thisValue)
-        FieldAccess(pot1, field)(eff.source)
-
-      case MethodCall(pot, sym) =>
-        val pot1 = Potentials.asSeenFrom(pot, thisValue)
-        MethodCall(pot1, sym)(eff.source)
-    } }
-
-  def asSeenFrom(effs: Effects, thisValue: Potential)(implicit env: Env): Effects =
-    effs.map(asSeenFrom(_, thisValue))
-}
\ No newline at end of file
diff --git a/compiler/src/dotty/tools/dotc/transform/init/Env.scala b/compiler/src/dotty/tools/dotc/transform/init/Env.scala
deleted file mode 100644
index 02ff4aeef2cc..000000000000
--- a/compiler/src/dotty/tools/dotc/transform/init/Env.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-package dotty.tools.dotc
-package transform
-package init
-
-import core._
-import Contexts._
-import Types._
-import Symbols._
-import Decorators._
-
-import Effects._, Potentials._, Summary._
-
-given theCtx(using Env): Context = summon[Env].ctx
-
-case class Env(ctx: Context, cache: Cache) {
-  private implicit def self: Env = this
-
-  /** Can the method call be ignored? */
-  def canIgnoreMethod(symbol: Symbol): Boolean =
-    !symbol.exists || // possible with outer selection, tests/init/crash/i1990b.scala
-    canIgnoreClass(symbol.owner)
-
-  def canIgnoreClass(cls: Symbol): Boolean =
-    cls == defn.AnyClass ||
-    cls == defn.AnyValClass ||
-    cls == defn.ObjectClass
-
-  def withCtx(newCtx: Context): Env = this.copy(ctx = newCtx)
-
-  def withOwner(owner: Symbol) = this.copy(ctx = this.ctx.withOwner(owner))
-
-  /** Whether values of a given type is always fully initialized?
-   *
-   *  It's true for primitive values
-   */
-  def isAlwaysInitialized(tp: Type)(implicit env: Env): Boolean = {
-    val sym = tp.widen.finalResultType.typeSymbol
-    sym.isPrimitiveValueClass || sym == defn.StringClass
-  }
-
-  def summaryOf(cls: ClassSymbol): ClassSummary = cache.summaryOf(cls)
-
-  def resolveOuter(warm: Warm, cls: ClassSymbol): Potentials = cache.resolveOuter(warm, cls)
-}
diff --git a/compiler/src/dotty/tools/dotc/transform/init/Errors.scala b/compiler/src/dotty/tools/dotc/transform/init/Errors.scala
index 73b8cd123033..9f3e0d1fcc47 100644
--- a/compiler/src/dotty/tools/dotc/transform/init/Errors.scala
+++ b/compiler/src/dotty/tools/dotc/transform/init/Errors.scala
@@ -9,10 +9,8 @@ import core._
 import Decorators._, printing.SyntaxHighlighting
 import Types._, Symbols._, Contexts._
 
-import Effects._, Potentials._
-
 object Errors {
-  type Errors = List[Error]
+  type Errors = Seq[Error]
   val empty: Errors = Nil
 
   def show(errs: Errors)(using Context): String =
@@ -20,7 +18,7 @@ object Errors {
 
   sealed trait Error {
     def source: Tree
-    def trace: Vector[Tree]
+    def trace: Seq[Tree]
     def show(using Context): String
 
     def issue(using Context): Unit =
@@ -57,10 +55,12 @@ object Errors {
       case unsafe: UnsafePromotion => unsafe.errors.flatMap(_.flatten)
       case _ => this :: Nil
     }
+
+    override def toString() = this.getClass.getName
   }
 
   /** Access non-initialized field */
-  case class AccessNonInit(field: Symbol, trace: Vector[Tree]) extends Error {
+  case class AccessNonInit(field: Symbol, trace: Seq[Tree]) extends Error {
     def source: Tree = trace.last
     def show(using Context): String =
       "Access non-initialized " + field.show + "."
@@ -70,52 +70,37 @@ object Errors {
   }
 
   /** Promote `this` under initialization to fully-initialized */
-  case class PromoteThis(pot: ThisRef, source: Tree, trace: Vector[Tree]) extends Error {
-    def show(using Context): String = "Promote the value under initialization to fully-initialized."
-  }
-
-  /** Promote `this` under initialization to fully-initialized */
-  case class PromoteWarm(pot: Warm, source: Tree, trace: Vector[Tree]) extends Error {
-    def show(using Context): String =
-      "Promoting the value under initialization to fully-initialized."
+  case class PromoteError(msg: String, source: Tree, trace: Seq[Tree]) extends Error {
+    def show(using Context): String = "Cannot prove that the value is fully initialized. " + msg + "."
   }
 
-  /** Promote a cold value under initialization to fully-initialized */
-  case class PromoteCold(source: Tree, trace: Vector[Tree]) extends Error {
+  case class AccessCold(field: Symbol, source: Tree, trace: Seq[Tree]) extends Error {
     def show(using Context): String =
-      "Promoting the value " + source.show + " to fully-initialized while it is under initialization" + "."
+      "Access field " + source.show + " on a value with an unknown initialization status."
   }
 
-  case class AccessCold(field: Symbol, source: Tree, trace: Vector[Tree]) extends Error {
-    def show(using Context): String =
-      "Access field " + source.show + " on a value with an unknown initialization status" + "."
-  }
-
-  case class CallCold(meth: Symbol, source: Tree, trace: Vector[Tree]) extends Error {
+  case class CallCold(meth: Symbol, source: Tree, trace: Seq[Tree]) extends Error {
     def show(using Context): String =
       "Call method " + source.show + " on a value with an unknown initialization" + "."
   }
 
-  case class CallUnknown(meth: Symbol, source: Tree, trace: Vector[Tree]) extends Error {
+  case class CallUnknown(meth: Symbol, source: Tree, trace: Seq[Tree]) extends Error {
     def show(using Context): String =
-      "Calling the external method " + meth.show + " may cause initialization errors" + "."
+      val prefix = if meth.is(Flags.Method) then "Calling the external method " else "Accessing the external field"
+      prefix + meth.show + " may cause initialization errors" + "."
   }
 
   /** Promote a value under initialization to fully-initialized */
-  case class UnsafePromotion(pot: Potential, source: Tree, trace: Vector[Tree], errors: Errors) extends Error {
+  case class UnsafePromotion(msg: String, source: Tree, trace: Seq[Tree], errors: Errors) extends Error {
     assert(errors.nonEmpty)
-
     override def issue(using Context): Unit =
       report.warning(show, source.srcPos)
 
     def show(using Context): String = {
       var index = 0
-      "Promoting the value to fully-initialized is unsafe.\n" + stacktrace +
-        "\nThe unsafe promotion may cause the following problem(s):\n" +
-        (errors.flatMap(_.flatten).map { error =>
-          index += 1
-          s"\n$index. " + error.show + error.stacktrace
-        }.mkString)
+      "Cannot prove that the value is fully initialized. " + msg + ".\n" + stacktrace +
+        "\nThe unsafe promotion may cause the following problem:\n" +
+        errors.head.show + errors.head.stacktrace
     }
   }
 }
\ No newline at end of file
diff --git a/compiler/src/dotty/tools/dotc/transform/init/Potentials.scala b/compiler/src/dotty/tools/dotc/transform/init/Potentials.scala
deleted file mode 100644
index 39eac29a2741..000000000000
--- a/compiler/src/dotty/tools/dotc/transform/init/Potentials.scala
+++ /dev/null
@@ -1,232 +0,0 @@
-package dotty.tools
-package dotc
-package transform
-package init
-
-import scala.collection.mutable
-
-import ast.tpd._
-import reporting.trace
-import config.Printers.init
-
-import core._
-import Types._, Symbols._, Contexts._
-
-import Effects._, Summary._
-
-object Potentials {
-  type Potentials = Vector[Potential]
-  val empty: Potentials = Vector.empty
-
-  def show(pots: Potentials)(using Context): String =
-    pots.map(_.show).mkString(", ")
-
-  /** A potential represents an aliasing of a value that is possibly under initialization */
-  sealed trait Potential {
-    /** Length of the potential. Used for widening */
-    def size: Int = 1
-
-    /** Nested levels of the potential. Used for widening */
-    def level: Int = 1
-
-    def show(using Context): String
-    def source: Tree
-
-    def toPots: Potentials = Vector(this)
-  }
-
-  sealed trait Refinable extends Potential {
-    /** Effects of a method call or a lazy val access
-     *
-     *  The method performs prefix substitution
-     */
-    def effectsOf(sym: Symbol)(implicit env: Env): Effects = trace("effects of " + sym.show, init, r => Effects.show(r.asInstanceOf)) {
-      val cls = sym.owner.asClass
-      val effs = env.summaryOf(cls).effectsOf(sym)
-      this match
-      case _: ThisRef => effs
-      case _ =>  Effects.asSeenFrom(effs, this)
-    }
-
-    /** Potentials of a field, a method call or a lazy val access
-     *
-     *  The method performs prefix substitution
-     */
-    def potentialsOf(sym: Symbol)(implicit env: Env): Potentials = trace("potentials of " + sym.show, init, r => Potentials.show(r.asInstanceOf)) {
-      val cls = sym.owner.asClass
-      val pots = env.summaryOf(cls).potentialsOf(sym)
-      this match
-      case _: ThisRef => pots
-      case _ => Potentials.asSeenFrom(pots, this)
-    }
-  }
-
-  /** The object pointed by `this` */
-  case class ThisRef()(val source: Tree) extends Refinable {
-    def show(using Context): String = "this"
-  }
-
-  /** The object pointed by `C.super.this`, mainly used for override resolution */
-  case class SuperRef(pot: Potential, supercls: ClassSymbol)(val source: Tree) extends Potential {
-    override def size: Int = pot.size
-    override def level: Int = pot.level
-    def show(using Context): String = pot.show + ".super[" + supercls.name.show + "]"
-  }
-
-  /** A warm potential represents an object of which all fields are initialized, but it may contain
-   *  reference to objects under initialization.
-   *
-   *  @param classSymbol  The concrete class of the object
-   *  @param outer        The potential for `this` of the enclosing class
-   */
-  case class Warm(classSymbol: ClassSymbol, outer: Potential)(val source: Tree) extends Refinable {
-    override def level: Int = 1 + outer.level
-    def show(using Context): String = "Warm[" + classSymbol.show + ", outer = " + outer.show + "]"
-
-    def resolveOuter(cls: ClassSymbol)(implicit env: Env): Potentials =
-      env.resolveOuter(this, cls)
-  }
-
-  def resolveOuter(cur: ClassSymbol, outerPots: Potentials, cls: ClassSymbol)(implicit env: Env): Potentials =
-  trace("resolveOuter for " + cls.show + ", outer = " + show(outerPots) + ", cur = " + cur.show, init, s => Potentials.show(s.asInstanceOf[Potentials])) {
-    if (cur == cls) outerPots
-    else {
-      val bottomClsSummary = env.summaryOf(cur)
-      bottomClsSummary.parentOuter.find((k, v) => k.derivesFrom(cls)) match {
-        case Some((parentCls, pots)) =>
-          val rebased: Potentials = outerPots.flatMap { Potentials.asSeenFrom(pots, _) }
-          resolveOuter(parentCls, rebased, cls)
-        case None => unreachable()
-      }
-    }
-  }
-
-  /** The Outer potential for `classSymbol` of the object `pot`
-   *
-   *  It's only used internally for expansion of potentials.
-   *
-   *  Note: Usage of `Type.baseType(cls)` may simplify the code.
-   *        Current implementation avoids using complex type machinary,
-   *        and may be potentially faster.
-   */
-  case class Outer(pot: Potential, classSymbol: ClassSymbol)(val source: Tree) extends Potential {
-    // be lenient with size of outer selection, no worry for non-termination
-    override def size: Int = pot.size
-    override def level: Int = pot.level
-    def show(using Context): String = pot.show + ".outer[" + classSymbol.show + "]"
-  }
-
-  /** The object pointed by `this.f` */
-  case class FieldReturn(potential: Potential, field: Symbol)(val source: Tree) extends Potential {
-    assert(field != NoSymbol)
-
-    override def size: Int = potential.size + 1
-    override def level: Int = potential.level
-    def show(using Context): String = potential.show + "." + field.name.show
-  }
-
-  /** The object returned by `this.m()` */
-  case class MethodReturn(potential: Potential, method: Symbol)(val source: Tree) extends Potential {
-    assert(method != NoSymbol)
-
-    override def size: Int = potential.size + 1
-    override def level: Int = potential.level
-    def show(using Context): String = potential.show + "." + method.name.show
-  }
-
-  /** The object whose initialization status is unknown */
-  case class Cold()(val source: Tree) extends Potential {
-    def show(using Context): String = "Cold"
-  }
-
-  /** A function when called will produce the `effects` and return the `potentials` */
-  case class Fun(potentials: Potentials, effects: Effects)(val source: Tree) extends Potential {
-    override def size: Int = 1
-
-    override def level: Int = {
-      val max1 = potentials.map(_.level).max
-      val max2 = effects.map(_.potential.level).max
-      if max1 > max2 then max1 else max2
-    }
-
-    def show(using Context): String =
-      "Fun[pots = " + potentials.map(_.show).mkString(";") + ", effs = " + effects.map(_.show).mkString(";") + "]"
-  }
-
-  // ------------------ operations on potentials ------------------
-
-  /** Selection on a set of potentials
-   *
-   *  @param ignoreSelectEffect Where selection effects should be ignored
-   *
-   *  During expansion of potentials, we ignore select effects and only care
-   *  about promotion effects. This is because the selection effects have
-   *  already been checked.
-   */
-  extension (ps: Potentials) def select (symbol: Symbol, source: Tree, ignoreSelectEffect: Boolean = true)(using Context): Summary =
-    ps.foldLeft(Summary.empty) { case (summary, pot) =>
-      // max potential length
-      // TODO: it can be specified on a project basis via compiler options
-      if (pot.size > 2)
-        summary + Promote(pot)(pot.source)
-      else if (symbol.isConstructor)
-        val res = summary + pot
-        if ignoreSelectEffect then res + MethodCall(pot, symbol)(source)
-        else res
-      else if (symbol.isOneOf(Flags.Method | Flags.Lazy))
-        val res = summary + MethodReturn(pot, symbol)(source)
-        if ignoreSelectEffect then res + MethodCall(pot, symbol)(source)
-        else res
-      else
-        val res = summary + FieldReturn(pot, symbol)(source)
-        if ignoreSelectEffect then res + FieldAccess(pot, symbol)(source)
-        else res
-    }
-
-  extension (ps: Potentials) def promote(source: Tree): Effects = ps.map(Promote(_)(source))
-
-  def asSeenFrom(pot: Potential, thisValue: Potential)(implicit env: Env): Potential = trace(pot.show + " asSeenFrom " + thisValue.show, init, _.asInstanceOf[Potential].show) {
-    pot match {
-      case MethodReturn(pot1, sym) =>
-        val pot = asSeenFrom(pot1, thisValue)
-        MethodReturn(pot, sym)(pot.source)
-
-      case FieldReturn(pot1, sym) =>
-        val pot = asSeenFrom(pot1, thisValue)
-        FieldReturn(pot, sym)(pot.source)
-
-      case Outer(pot1, cls) =>
-        val pot = asSeenFrom(pot1, thisValue)
-        Outer(pot, cls)(pot.source)
-
-      case _: ThisRef =>
-        thisValue
-
-      case Fun(pots, effs) =>
-        val pots1 = Potentials.asSeenFrom(pots, thisValue)
-        val effs1 = Effects.asSeenFrom(effs, thisValue)
-        Fun(pots1, effs1)(pot.source)
-
-      case Warm(cls, outer2) =>
-        // widening to terminate
-        val thisValue2 =
-          if thisValue.level + outer2.level > 4 then
-            Cold()(outer2.source)
-          else
-            thisValue
-
-        val outer3 = asSeenFrom(outer2, thisValue2)
-        Warm(cls, outer3)(pot.source)
-
-      case _: Cold =>
-        pot
-
-      case SuperRef(potThis, supercls) =>
-        val pot1 = asSeenFrom(potThis, thisValue)
-        SuperRef(pot1, supercls)(pot.source)
-    }
-  }
-
-  def asSeenFrom(pots: Potentials, thisValue: Potential)(implicit env: Env): Potentials =
-    pots.map(asSeenFrom(_, thisValue))
-}
\ No newline at end of file
diff --git a/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala b/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala
new file mode 100644
index 000000000000..c1e7d6ab9f2a
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala
@@ -0,0 +1,1594 @@
+package dotty.tools.dotc
+package transform
+package init
+
+import core._
+import Contexts._
+import Symbols._
+import Types._
+import StdNames._
+import NameKinds.OuterSelectName
+
+import ast.tpd._
+import util.EqHashMap
+import config.Printers.init as printer
+import reporting.trace as log
+
+import Errors._
+
+import scala.collection.mutable
+import scala.annotation.tailrec
+import scala.annotation.constructorOnly
+
+object Semantic {
+
+// ----- Domain definitions --------------------------------
+
+  /** Abstract values
+   *
+   *  Value = Hot | Cold | Warm | ThisRef | Fun | RefSet
+   *
+   *                 Cold
+   *        ┌──────►  ▲  ◄────┐  ◄────┐
+   *        │         │       │       │
+   *        │         │       │       │
+   *        |         │       │       │
+   *        |         │       │       │
+   *     ThisRef     Warm   Fun    RefSet
+   *        │         ▲       ▲       ▲
+   *        │         │       │       │
+   *        |         │       │       │
+   *        ▲         │       │       │
+   *        │         │       │       │
+   *        └─────────┴───────┴───────┘
+   *                  Hot
+   *
+   *   The diagram above does not reflect relationship between `RefSet`
+   *   and other values. `RefSet` represents a set of values which could
+   *   be `ThisRef`, `Warm` or `Fun`. The following ordering applies for
+   *   RefSet:
+   *
+   *         R_a ⊑ R_b if R_a ⊆ R_b
+   *
+   *         V ⊑ R if V ∈ R
+   *
+   */
+  sealed abstract class Value {
+    def show: String = this.toString()
+
+    def isHot = this == Hot
+    def isCold = this == Cold
+    def isWarm = this.isInstanceOf[Warm]
+    def isThisRef = this.isInstanceOf[ThisRef]
+  }
+
+  /** A transitively initialized object */
+  case object Hot extends Value
+
+  /** An object with unknown initialization status */
+  case object Cold extends Value
+
+  sealed abstract class Ref extends Value {
+    def klass: ClassSymbol
+    def outer: Value
+  }
+
+  /** A reference to the object under initialization pointed by `this` */
+  case class ThisRef(klass: ClassSymbol) extends Ref {
+    val outer = Hot
+  }
+
+  /** An object with all fields initialized but reaches objects under initialization
+   *
+   *  We need to restrict nesting levels of `outer` to finitize the domain.
+   */
+  case class Warm(klass: ClassSymbol, outer: Value, ctor: Symbol, args: List[Value]) extends Ref {
+
+    /** If a warm value is in the process of populating parameters, class bodies are not executed. */
+    private var populatingParams: Boolean = false
+
+    def isPopulatingParams = populatingParams
+
+    /** Ensure that outers and class parameters are initialized.
+     *
+     *  Fields in class body are not initialized.
+     *
+     *  We need to populate class parameters and outers for warm values for the
+     *  following cases:
+     *
+     *  - Widen an already checked warm value to another warm value without
+     *    corresponding object
+     *
+     *  - Using a warm value from the cache, whose corresponding object from
+     *    the last iteration have been remove due to heap reversion
+     *    {@see Cache.prepareForNextIteration}
+     *
+     *  After populating class parameters and outers, it is possible to lazily
+     *  compute the field values in class bodies when they are accessed.
+     */
+    private def populateParams(): Contextual[this.type] = log("populating parameters", printer, (_: Warm).objekt.toString) {
+      assert(!populatingParams, "the object is already populating parameters")
+      populatingParams = true
+      val tpl = klass.defTree.asInstanceOf[TypeDef].rhs.asInstanceOf[Template]
+      this.callConstructor(ctor, args.map(arg => ArgInfo(arg, EmptyTree)), tpl)
+      populatingParams = false
+      this
+    }
+
+    def ensureObjectExistsAndPopulated(): Contextual[this.type] =
+      if cache.containsObject(this) then this
+      else this.ensureFresh().populateParams()
+  }
+
+  /** A function value */
+  case class Fun(expr: Tree, thisV: Ref, klass: ClassSymbol, env: Env) extends Value
+
+  /** A value which represents a set of addresses
+   *
+   * It comes from `if` expressions.
+   */
+  case class RefSet(refs: List[Fun | Ref]) extends Value
+
+  // end of value definition
+
+  /** The abstract object which stores value about its fields and immediate outers.
+   *
+   *  Semantically it suffices to store the outer for `klass`. We cache other outers
+   *  for performance reasons.
+   *
+   *  Note: Object is NOT a value.
+   */
+  case class Objekt(val klass: ClassSymbol, val fields: Map[Symbol, Value], val outers: Map[ClassSymbol, Value]) {
+    def field(f: Symbol): Value = fields(f)
+
+    def outer(klass: ClassSymbol) = outers(klass)
+
+    def hasOuter(klass: ClassSymbol) = outers.contains(klass)
+
+    def hasField(f: Symbol) = fields.contains(f)
+  }
+
+  /** The environment for method parameters
+   *
+   *  For performance and usability, we restrict parameters to be either `Cold`
+   *  or `Hot`.
+   *
+   *  Despite that we have environment for evaluating expressions in secondary
+   *  constructors, we don't need to put environment as the cache key. The
+   *  reason is that constructor parameters are determined by the value of
+   *  `this` --- it suffices to make the value of `this` as part of the cache
+   *  key.
+   *
+   *  This crucially depends on the fact that in the initialization process
+   *  there can be exactly one call to a specific constructor for a given
+   *  receiver. However, once we relax the design to allow non-hot values to
+   *  methods and functions, we have to put the environment as part of the cache
+   *  key. The reason is that given the same receiver, a method or function may
+   *  be called with different arguments -- they are not decided by the receiver
+   *  anymore.
+   */
+  object Env {
+    opaque type Env = Map[Symbol, Value]
+
+    val empty: Env = Map.empty
+
+    def apply(bindings: Map[Symbol, Value]): Env = bindings
+
+    def apply(ddef: DefDef, args: List[Value])(using Context): Env =
+      val params = ddef.termParamss.flatten.map(_.symbol)
+      assert(args.size == params.size, "arguments = " + args.size + ", params = " + params.size)
+      params.zip(args).toMap
+
+    extension (env: Env)
+      def lookup(sym: Symbol)(using Context): Value = env(sym)
+
+      def getOrElse(sym: Symbol, default: Value)(using Context): Value = env.getOrElse(sym, default)
+
+      def union(other: Env): Env = env ++ other
+
+      def isHot: Boolean = env.values.forall(_ == Hot)
+  }
+
+  type Env = Env.Env
+  inline def env(using env: Env) = env
+  inline def withEnv[T](env: Env)(op: Env ?=> T): T = op(using env)
+
+  import Env._
+
+  object Promoted {
+    class PromotionInfo {
+      var isCurrentObjectPromoted: Boolean = false
+      val values = mutable.Set.empty[Value]
+      override def toString(): String = values.toString()
+    }
+    /** Values that have been safely promoted */
+    opaque type Promoted = PromotionInfo
+
+    /** Note: don't use `val` to avoid incorrect sharing */
+    def empty: Promoted = new PromotionInfo
+
+    extension (promoted: Promoted)
+      def isCurrentObjectPromoted: Boolean = promoted.isCurrentObjectPromoted
+      def promoteCurrent(thisRef: ThisRef): Unit = promoted.isCurrentObjectPromoted = true
+      def contains(value: Value): Boolean = promoted.values.contains(value)
+      def add(value: Value): Unit = promoted.values += value
+      def remove(value: Value): Unit = promoted.values -= value
+    end extension
+  }
+  type Promoted = Promoted.Promoted
+
+  import Promoted._
+  inline def promoted(using p: Promoted): Promoted = p
+
+  /** Interpreter configuration
+   *
+   * The (abstract) interpreter can be seen as a push-down automaton
+   * that transits between the configurations where the stack is the
+   * implicit call stack of the meta-language.
+   *
+   * It's important that the configuration is finite for the analysis
+   * to terminate.
+   *
+   * For soundness, we need to compute fixed point of the cache, which
+   * maps configuration to evaluation result.
+   *
+   * Thanks to heap monotonicity, heap is not part of the configuration.
+   *
+   * This class is only used for the purpose of documentation.
+   */
+  case class Config(thisV: Value, expr: Tree)
+
+  /** Cache used to terminate the analysis
+   *
+   * A finitary configuration is not enough for the analysis to
+   * terminate.  We need to use cache to let the interpreter "know"
+   * that it can terminate.
+   *
+   * For performance reasons we use curried key.
+   *
+   * Note: It's tempting to use location of trees as key. That should
+   * be avoided as a template may have the same location as its single
+   * statement body. Macros may also create incorrect locations.
+   *
+   */
+
+  object Cache {
+    opaque type CacheStore = mutable.Map[Value, EqHashMap[Tree, Value]]
+    private type Heap = Map[Ref, Objekt]
+
+    class Cache {
+      private var last: CacheStore =  mutable.Map.empty
+      private var current: CacheStore = mutable.Map.empty
+      private val stable: CacheStore = mutable.Map.empty
+      private var changed: Boolean = false
+
+      /** Abstract heap stores abstract objects
+       *
+       *  The heap serves as cache of summaries for warm objects and is shared for checking all classes.
+       *
+       *  The fact that objects of `ThisRef` are stored in heap is just an engineering convenience.
+       *  Technically, we can also store the object directly in `ThisRef`.
+       *
+       *  The heap contains objects of two conceptually distinct kinds.
+       *
+       *  - Objects that are also in `heapStable` are flow-insensitive views of already initialized objects that are
+       *    cached for reuse in analysis of later classes. These objects and their fields should never change; this is
+       *    enforced using assertions.
+       *
+       *  - Objects that are not (yet) in `heapStable` are the flow-sensitive abstract state of objects being analyzed
+       *    in the current iteration of the analysis of the current class. Their fields do change flow-sensitively: more
+       *    fields are added as fields become initialized. These objects are valid only within the current iteration and
+       *    are removed when moving to a new iteration of analyzing the current class. When the analysis of a class
+       *    reaches a fixed point, these now stable flow-sensitive views of the object at the end of the constructor
+       *    of the analyzed class now become the flow-insensitive views of already initialized objects and can therefore
+       *    be added to `heapStable`.
+       */
+      private var heap: Heap = Map.empty
+
+      /** Used to revert heap to last stable heap. */
+      private var heapStable: Heap = Map.empty
+
+      def hasChanged = changed
+
+      def contains(value: Value, expr: Tree) =
+        current.contains(value, expr) || stable.contains(value, expr)
+
+      def apply(value: Value, expr: Tree) =
+        if current.contains(value, expr) then current(value)(expr)
+        else stable(value)(expr)
+
+      /** Copy the value of `(value, expr)` from the last cache to the current cache
+       * (assuming it's `Hot` if it doesn't exist in the cache).
+       *
+       * Then, runs `fun` and update the caches if the values change.
+       */
+      def assume(value: Value, expr: Tree, cacheResult: Boolean)(fun: => Result): Contextual[Result] =
+        val assumeValue: Value =
+          if last.contains(value, expr) then
+            last.get(value, expr)
+          else
+            last.put(value, expr, Hot)
+            Hot
+          end if
+        current.put(value, expr, assumeValue)
+
+        val actual = fun
+        if actual.value != assumeValue then
+          this.changed = true
+          last.put(value, expr, actual.value)
+          current.put(value, expr, actual.value)
+        else
+          // It's tempting to cache the value in stable, but it's unsound.
+          // The reason is that the current value may depend on other values
+          // which might change.
+          //
+          // stable.put(value, expr, actual)
+          ()
+        end if
+
+        actual
+      end assume
+
+      /** Commit current cache to stable cache. */
+      private def commitToStableCache() =
+        current.foreach { (v, m) =>
+          // It's useless to cache value for ThisRef.
+          if v.isWarm then m.iterator.foreach { (e, res) =>
+            stable.put(v, e, res)
+          }
+        }
+
+      /** Prepare cache for the next iteration
+       *
+       *  1. Reset changed flag.
+       *
+       *  2. Reset current cache (last cache already synced in `assume`).
+       *
+       *  3. Revert heap if instable.
+       *
+       */
+      def prepareForNextIteration()(using Context) =
+        this.changed = false
+        this.current = mutable.Map.empty
+        this.heap = this.heapStable
+
+      /** Prepare for checking next class
+       *
+       *  1. Reset changed flag.
+       *
+       *  2. Commit current cache to stable cache if not changed.
+       *
+       *  3. Update stable heap if not changed.
+       *
+       *  4. Reset last cache.
+       */
+      def prepareForNextClass()(using Context) =
+        if this.changed then
+          this.changed = false
+          this.heap = this.heapStable
+        else
+          this.commitToStableCache()
+          this.heapStable = this.heap
+
+        this.last = mutable.Map.empty
+        this.current = mutable.Map.empty
+
+      def updateObject(ref: Ref, obj: Objekt) =
+        assert(!this.heapStable.contains(ref))
+        this.heap = this.heap.updated(ref, obj)
+
+      def containsObject(ref: Ref) = heap.contains(ref)
+
+      def getObject(ref: Ref) = heap(ref)
+    }
+
+    extension (cache: CacheStore)
+      def contains(value: Value, expr: Tree) = cache.contains(value) && cache(value).contains(expr)
+      def get(value: Value, expr: Tree): Value = cache(value)(expr)
+      def remove(value: Value, expr: Tree) = cache(value).remove(expr)
+      def put(value: Value, expr: Tree, result: Value): Unit = {
+        val innerMap = cache.getOrElseUpdate(value, new EqHashMap[Tree, Value])
+        innerMap(expr) = result
+      }
+    end extension
+  }
+
+  import Cache._
+
+  inline def cache(using c: Cache): Cache = c
+
+  /** Result of abstract interpretation */
+  case class Result(value: Value, errors: Seq[Error]) {
+    def show(using Context) = value.show + ", errors = " + errors.map(_.toString)
+
+    def ++(errors: Seq[Error]): Result = this.copy(errors = this.errors ++ errors)
+
+    def +(error: Error): Result = this.copy(errors = this.errors :+ error)
+
+    def ensureHot(msg: String, source: Tree): Contextual[Result] =
+      this ++ value.promote(msg, source)
+
+    def select(f: Symbol, source: Tree): Contextual[Result] =
+      value.select(f, source) ++ errors
+
+    def call(meth: Symbol, args: List[ArgInfo], superType: Type, source: Tree): Contextual[Result] =
+      value.call(meth, args, superType, source) ++ errors
+
+    def callConstructor(ctor: Symbol, args: List[ArgInfo], source: Tree): Contextual[Result] =
+      value.callConstructor(ctor, args, source) ++ errors
+
+    def instantiate(klass: ClassSymbol, ctor: Symbol, args: List[ArgInfo], source: Tree): Contextual[Result] =
+      value.instantiate(klass, ctor, args, source) ++ errors
+  }
+
+// ----- Checker State -----------------------------------
+
+  /** The state that threads through the interpreter */
+  type Contextual[T] = (Env, Context, Trace, Promoted, Cache) ?=> T
+
+// ----- Error Handling -----------------------------------
+
+  object Trace {
+    opaque type Trace = Vector[Tree]
+
+    val empty: Trace = Vector.empty
+
+    extension (trace: Trace)
+      def add(node: Tree): Trace = trace :+ node
+      def toVector: Vector[Tree] = trace
+  }
+
+  type Trace = Trace.Trace
+
+  import Trace._
+  def trace(using t: Trace): Trace = t
+  inline def withTrace[T](t: Trace)(op: Trace ?=> T): T = op(using t)
+
+// ----- Operations on domains -----------------------------
+  extension (a: Value)
+    def join(b: Value): Value =
+      (a, b) match
+      case (Hot, _)  => b
+      case (_, Hot)  => a
+
+      case (Cold, _) => Cold
+      case (_, Cold) => Cold
+
+      case (a: (Fun | Warm | ThisRef), b: (Fun | Warm | ThisRef)) =>
+        if a == b then a else RefSet(a :: b :: Nil)
+
+      case (a: (Fun | Warm | ThisRef), RefSet(refs)) =>
+        if refs.exists(_ == a) then b: Value // fix pickling test
+        else RefSet(a :: refs)
+
+      case (RefSet(refs), b: (Fun | Warm | ThisRef)) =>
+        if refs.exists(_ == b) then a: Value // fix pickling test
+        else RefSet(b :: refs)
+
+      case (RefSet(refs1), RefSet(refs2)) =>
+        val diff = refs2.filter(ref => refs1.forall(_ != ref))
+        RefSet(refs1 ++ diff)
+
+    /** Conservatively approximate the value with `Cold` or `Hot` */
+    def widenArg: Value =
+      a match
+      case _: Ref | _: Fun => Cold
+      case RefSet(refs) => refs.map(_.widenArg).join
+      case _ => a
+
+
+  extension (values: Seq[Value])
+    def join: Value =
+      if values.isEmpty then Hot
+      else values.reduce { (v1, v2) => v1.join(v2) }
+
+    def widenArgs: List[Value] = values.map(_.widenArg).toList
+
+
+  extension (ref: Ref)
+    def objekt: Contextual[Objekt] =
+      // TODO: improve performance
+      ref match
+        case warm: Warm => warm.ensureObjectExistsAndPopulated()
+        case _ =>
+      cache.getObject(ref)
+
+    def ensureObjectExists()(using Cache): ref.type =
+      if cache.containsObject(ref) then
+        printer.println("object " + ref + " already exists")
+        ref
+      else
+        ensureFresh()
+
+    def ensureFresh()(using Cache): ref.type =
+      val obj = Objekt(ref.klass, fields = Map.empty, outers = Map(ref.klass -> ref.outer))
+      printer.println("reset object " + ref)
+      cache.updateObject(ref, obj)
+      ref
+
+    /** Update field value of the abstract object
+     *
+     *  Invariant: fields are immutable and only set once
+     */
+    def updateField(field: Symbol, value: Value): Contextual[Unit] = log("set field " + field + " of " + ref + " to " + value) {
+      val obj = objekt
+      // We may reset the outers or params of a populated warm object.
+      // This is the case if we need access the field of a warm object, which
+      // requires population of parameters and outers; and later create an
+      // instance of the exact warm object, which requires initialization check.
+      //
+      // See tests/init/neg/unsound1.scala
+      assert(!obj.hasField(field) || field.is(Flags.ParamAccessor) && obj.field(field) == value, field.show + " already init, new = " + value + ", old = " + obj.field(field) + ", ref = " + ref)
+      val obj2 = obj.copy(fields = obj.fields.updated(field, value))
+      cache.updateObject(ref, obj2)
+    }
+
+    /** Update the immediate outer of the given `klass` of the abstract object
+     *
+     *  Invariant: outers are immutable and only set once
+     */
+    def updateOuter(klass: ClassSymbol, value: Value): Contextual[Unit] = log("set outer " + klass + " of " + ref + " to " + value) {
+      val obj = objekt
+      // See the comment in `updateField` for setting the value twice.
+      assert(!obj.hasOuter(klass) || obj.outer(klass) == value, klass.show + " already has outer, new = " + value + ", old = " + obj.outer(klass) + ", ref = " + ref)
+      val obj2 = obj.copy(outers = obj.outers.updated(klass, value))
+      cache.updateObject(ref, obj2)
+    }
+  end extension
+
+  extension (value: Value)
+    def select(field: Symbol, source: Tree, needResolve: Boolean = true): Contextual[Result] = log("select " + field.show + ", this = " + value, printer, (_: Result).show) {
+      if promoted.isCurrentObjectPromoted then Result(Hot, Nil)
+      else value match {
+        case Hot  =>
+          Result(Hot, Errors.empty)
+
+        case Cold =>
+          val error = AccessCold(field, source, trace.toVector)
+          Result(Hot, error :: Nil)
+
+        case ref: Ref =>
+          val target = if needResolve then resolve(ref.klass, field) else field
+          val trace1 = trace.add(source)
+          if target.is(Flags.Lazy) then
+            given Trace = trace1
+            val rhs = target.defTree.asInstanceOf[ValDef].rhs
+            eval(rhs, ref, target.owner.asClass, cacheResult = true)
+          else
+            val obj = ref.objekt
+            if obj.hasField(target) then
+              Result(obj.field(target), Nil)
+            else if ref.isInstanceOf[Warm] then
+              assert(obj.klass.isSubClass(target.owner))
+              if target.is(Flags.ParamAccessor) then
+                // possible for trait parameters
+                // see tests/init/neg/trait2.scala
+                //
+                // return `Hot` here, errors are reported in checking `ThisRef`
+                Result(Hot, Nil)
+              else if target.hasSource then
+                val rhs = target.defTree.asInstanceOf[ValOrDefDef].rhs
+                eval(rhs, ref, target.owner.asClass, cacheResult = true)
+              else
+                val error = CallUnknown(field, source, trace.toVector)
+                Result(Hot, error :: Nil)
+            else
+              val error = AccessNonInit(target, trace.add(source).toVector)
+              Result(Hot, error :: Nil)
+
+        case fun: Fun =>
+          report.error("unexpected tree in selecting a function, fun = " + fun.expr.show, source)
+          Result(Hot, Nil)
+
+        case RefSet(refs) =>
+          val resList = refs.map(_.select(field, source))
+          val value2 = resList.map(_.value).join
+          val errors = resList.flatMap(_.errors)
+          Result(value2, errors)
+      }
+    }
+
+    def call(meth: Symbol, args: List[ArgInfo], superType: Type, source: Tree, needResolve: Boolean = true): Contextual[Result] = log("call " + meth.show + ", args = " + args, printer, (_: Result).show) {
+      def checkArgs = args.flatMap(_.promote)
+
+      def isSyntheticApply(meth: Symbol) =
+        meth.is(Flags.Synthetic)
+        && meth.owner.is(Flags.Module)
+        && meth.owner.companionClass.is(Flags.Case)
+
+      def isAlwaysSafe(meth: Symbol) =
+        (meth eq defn.Object_eq)
+        || (meth eq defn.Object_ne)
+        || (meth eq defn.Any_isInstanceOf)
+
+      // fast track if the current object is already initialized
+      if promoted.isCurrentObjectPromoted then Result(Hot, Nil)
+      else if isAlwaysSafe(meth) then Result(Hot, Nil)
+      else if meth eq defn.Any_asInstanceOf then Result(value, Nil)
+      else value match {
+        case Hot  =>
+          if isSyntheticApply(meth) then
+            val klass = meth.owner.companionClass.asClass
+            instantiate(klass, klass.primaryConstructor, args, source)
+          else
+            Result(Hot, checkArgs)
+
+        case Cold =>
+          val error = CallCold(meth, source, trace.toVector)
+          Result(Hot, error :: checkArgs)
+
+        case ref: Ref =>
+          val isLocal = !meth.owner.isClass
+          val target =
+            if !needResolve then
+              meth
+            else if superType.exists then
+              resolveSuper(ref.klass, superType, meth)
+            else
+              resolve(ref.klass, meth)
+
+          if target.isOneOf(Flags.Method) then
+            val trace1 = trace.add(source)
+            if target.hasSource then
+              given Trace = trace1
+              val cls = target.owner.enclosingClass.asClass
+              val ddef = target.defTree.asInstanceOf[DefDef]
+              val argErrors = checkArgs
+              // normal method call
+              if argErrors.nonEmpty && isSyntheticApply(meth) then
+                val klass = meth.owner.companionClass.asClass
+                val outerCls = klass.owner.lexicallyEnclosingClass.asClass
+                val outer = resolveOuterSelect(outerCls, ref, 1, source)
+                outer.instantiate(klass, klass.primaryConstructor, args, source)
+              else
+                withEnv(if isLocal then env else Env.empty) {
+                  eval(ddef.rhs, ref, cls, cacheResult = true) ++ argErrors
+                }
+            else if ref.canIgnoreMethodCall(target) then
+              Result(Hot, Nil)
+            else
+              // no source code available
+              val error = CallUnknown(target, source, trace.toVector)
+              Result(Hot, error :: checkArgs)
+          else
+            // method call resolves to a field
+            val obj = ref.objekt
+            if obj.hasField(target) then
+              Result(obj.field(target), Nil)
+            else
+              value.select(target, source, needResolve = false)
+
+        case Fun(body, thisV, klass, env) =>
+          // meth == NoSymbol for poly functions
+          if meth.name.toString == "tupled" then Result(value, Nil) // a call like `fun.tupled`
+          else
+            withEnv(env) {
+              eval(body, thisV, klass, cacheResult = true) ++ checkArgs
+            }
+
+        case RefSet(refs) =>
+          val resList = refs.map(_.call(meth, args, superType, source))
+          val value2 = resList.map(_.value).join
+          val errors = resList.flatMap(_.errors)
+          Result(value2, errors)
+      }
+    }
+
+    def callConstructor(ctor: Symbol, args: List[ArgInfo], source: Tree): Contextual[Result] = log("call " + ctor.show + ", args = " + args, printer, (_: Result).show) {
+      // init "fake" param fields for the secondary constructor
+      def addParamsAsFields(env: Env, ref: Ref, ctorDef: DefDef) = {
+        val paramSyms = ctorDef.termParamss.flatten.map(_.symbol)
+        paramSyms.map { acc =>
+          val value = env.lookup(acc)
+          ref.updateField(acc, value)
+          printer.println(acc.show + " initialized with " + value)
+        }
+      }
+      value match {
+        case Hot | Cold | _: RefSet | _: Fun =>
+          report.error("unexpected constructor call, meth = " + ctor + ", value = " + value, source)
+          Result(Hot, Nil)
+
+        case ref: Warm if ref.isPopulatingParams =>
+          val trace1 = trace.add(source)
+          if ctor.hasSource then
+            given Trace = trace1
+            val cls = ctor.owner.enclosingClass.asClass
+            val ddef = ctor.defTree.asInstanceOf[DefDef]
+            given Env = Env(ddef, args.map(_.value).widenArgs)
+            if ctor.isPrimaryConstructor then
+              val tpl = cls.defTree.asInstanceOf[TypeDef].rhs.asInstanceOf[Template]
+              init(tpl, ref, cls)
+            else
+              addParamsAsFields(env, ref, ddef)
+              val initCall = ddef.rhs match
+                case Block(call :: _, _) => call
+                case call => call
+              eval(initCall, ref, cls)
+            end if
+          else
+            Result(Hot, Nil)
+
+        case ref: Ref =>
+          val trace1 = trace.add(source)
+          if ctor.hasSource then
+            given Trace = trace1
+            val cls = ctor.owner.enclosingClass.asClass
+            val ddef = ctor.defTree.asInstanceOf[DefDef]
+            given Env = Env(ddef, args.map(_.value).widenArgs)
+            if ctor.isPrimaryConstructor then
+              val tpl = cls.defTree.asInstanceOf[TypeDef].rhs.asInstanceOf[Template]
+              val res = withTrace(trace.add(cls.defTree)) { eval(tpl, ref, cls, cacheResult = true) }
+              Result(ref, res.errors)
+            else
+              addParamsAsFields(env, ref, ddef)
+              eval(ddef.rhs, ref, cls, cacheResult = true)
+          else if ref.canIgnoreMethodCall(ctor) then
+            Result(Hot, Nil)
+          else
+            // no source code available
+            val error = CallUnknown(ctor, source, trace.toVector)
+            Result(Hot, error :: Nil)
+      }
+
+    }
+
+    /** Handle a new expression `new p.C` where `p` is abstracted by `value` */
+    def instantiate(klass: ClassSymbol, ctor: Symbol, args: List[ArgInfo], source: Tree): Contextual[Result] = log("instantiating " + klass.show + ", value = " + value + ", args = " + args, printer, (_: Result).show) {
+      val trace1 = trace.add(source)
+      if promoted.isCurrentObjectPromoted then Result(Hot, Nil)
+      else value match {
+        case Hot  =>
+          val buffer = new mutable.ArrayBuffer[Error]
+          val args2 = args.map { arg =>
+            val errors = arg.promote
+            buffer ++= errors
+            if errors.isEmpty then Hot
+            else arg.value.widenArg
+          }
+
+          if buffer.isEmpty then
+            Result(Hot, Errors.empty)
+          else
+            val outer = Hot
+            val warm = Warm(klass, outer, ctor, args2).ensureObjectExists()
+            val argInfos2 = args.zip(args2).map { (argInfo, v) => argInfo.copy(value = v) }
+            val res = warm.callConstructor(ctor, argInfos2, source)
+            Result(warm, res.errors)
+
+        case Cold =>
+          val error = CallCold(ctor, source, trace1.toVector)
+          Result(Hot, error :: Nil)
+
+        case ref: Ref =>
+          given Trace = trace1
+          // widen the outer to finitize the domain
+          val outer = ref match
+            case warm @ Warm(_, _: Warm, _, _) =>
+              // the widened warm object might not exist in the heap
+              warm.copy(outer = Cold).ensureObjectExistsAndPopulated()
+            case _ => ref
+
+          val argsWidened = args.map(_.value).widenArgs
+          val argInfos2 = args.zip(argsWidened).map { (argInfo, v) => argInfo.copy(value = v) }
+          val warm = Warm(klass, outer, ctor, argsWidened).ensureObjectExists()
+          val res = warm.callConstructor(ctor, argInfos2, source)
+          Result(warm, res.errors)
+
+        case Fun(body, thisV, klass, env) =>
+          report.error("unexpected tree in instantiating a function, fun = " + body.show, source)
+          Result(Hot, Nil)
+
+        case RefSet(refs) =>
+          val resList = refs.map(_.instantiate(klass, ctor, args, source))
+          val value2 = resList.map(_.value).join
+          val errors = resList.flatMap(_.errors)
+          Result(value2, errors)
+      }
+    }
+  end extension
+
+  extension (ref: Ref)
+    def accessLocal(tmref: TermRef, klass: ClassSymbol, source: Tree): Contextual[Result] =
+      val sym = tmref.symbol
+
+      def default() = Result(Hot, Nil)
+
+      if sym.is(Flags.Param) && sym.owner.isConstructor then
+        // if we can get the field from the Ref (which can only possibly be
+        // a secondary constructor parameter), then use it.
+        if (ref.objekt.hasField(sym))
+          Result(ref.objekt.field(sym), Errors.empty)
+        // instances of local classes inside secondary constructors cannot
+        // reach here, as those values are abstracted by Cold instead of Warm.
+        // This enables us to simplify the domain without sacrificing
+        // expressiveness nor soundess, as local classes inside secondary
+        // constructors are uncommon.
+        else if sym.isContainedIn(klass) then
+          Result(env.lookup(sym), Nil)
+        else
+          // We don't know much about secondary constructor parameters in outer scope.
+          // It's always safe to approximate them with `Cold`.
+          Result(Cold, Nil)
+      else if sym.is(Flags.Param) then
+        default()
+      else
+        sym.defTree match {
+          case vdef: ValDef =>
+            // resolve this for local variable
+            val enclosingClass = sym.owner.enclosingClass.asClass
+            val thisValue2 = resolveThis(enclosingClass, ref, klass, source)
+            thisValue2 match {
+              case Hot => Result(Hot, Errors.empty)
+
+              case Cold => Result(Cold, Nil)
+
+              case ref: Ref => eval(vdef.rhs, ref, enclosingClass)
+
+              case _ =>
+                 report.error("unexpected defTree when accessing local variable, sym = " + sym.show + ", defTree = " + sym.defTree.show, source)
+                 default()
+            }
+
+          case _ => default()
+        }
+  end extension
+
+// ----- Promotion ----------------------------------------------------
+  extension (ref: Ref)
+    /** Whether the object is fully assigned
+     *
+     *  It means all fields and outers are set. For performance, we don't check
+     *  outers here, because Scala semantics ensure that they are always set
+     *  before any user code in the constructor.
+     *
+     *  Note that `isFullyFilled = true` does not mean we can use the
+     *  object freely, as its fields or outers may still reach uninitialized
+     *  objects.
+     */
+    def isFullyFilled: Contextual[Boolean] = log("isFullyFilled " + ref, printer) {
+      val obj = ref.objekt
+      ref.klass.baseClasses.forall { klass =>
+        !klass.hasSource || {
+          val nonInits = klass.info.decls.filter { member =>
+            !member.isOneOf(Flags.Method | Flags.Lazy | Flags.Deferred)
+            && !member.isType
+            && !obj.hasField(member)
+          }
+          printer.println("nonInits = " + nonInits)
+          nonInits.isEmpty
+        }
+      }
+    }
+
+  end extension
+
+  extension (thisRef: ThisRef)
+    def tryPromoteCurrentObject: Contextual[Boolean] = log("tryPromoteCurrentObject ", printer) {
+      if promoted.isCurrentObjectPromoted then
+        true
+      else if thisRef.isFullyFilled then
+        // If we have all fields initialized, then we can promote This to hot.
+        promoted.promoteCurrent(thisRef)
+        true
+      else
+        false
+    }
+
+  extension (value: Value)
+    /** Promotion of values to hot */
+    def promote(msg: String, source: Tree): Contextual[List[Error]] = log("promoting " + value + ", promoted = " + promoted, printer) {
+      if promoted.isCurrentObjectPromoted then Nil else
+
+      value.match
+      case Hot   =>  Nil
+
+      case Cold  =>  PromoteError(msg, source, trace.toVector) :: Nil
+
+      case thisRef: ThisRef =>
+        if thisRef.tryPromoteCurrentObject then Nil
+        else PromoteError(msg, source, trace.toVector) :: Nil
+
+      case warm: Warm =>
+        if promoted.contains(warm) then Nil
+        else {
+          promoted.add(warm)
+          val errors = warm.tryPromote(msg, source)
+          if errors.nonEmpty then promoted.remove(warm)
+          errors
+        }
+
+      case fun @ Fun(body, thisV, klass, env) =>
+        if promoted.contains(fun) then Nil
+        else
+          val res = withEnv(env) { eval(body, thisV, klass) }
+          val errors2 = res.value.promote(msg, source)
+          if (res.errors.nonEmpty || errors2.nonEmpty)
+            UnsafePromotion(msg, source, trace.toVector, res.errors ++ errors2) :: Nil
+          else
+            promoted.add(fun)
+            Nil
+
+      case RefSet(refs) =>
+        refs.flatMap(_.promote(msg, source))
+    }
+  end extension
+
+  extension (warm: Warm)
+    /** Try early promotion of warm objects
+     *
+     *  Promotion is expensive and should only be performed for small classes.
+     *
+     *  1. for each concrete method `m` of the warm object:
+     *     call the method and promote the result
+     *
+     *  2. for each concrete field `f` of the warm object:
+     *     promote the field value
+     *
+     *  If the object contains nested classes as members, the checker simply
+     *  reports a warning to avoid expensive checks.
+     *
+     *  TODO: we need to revisit whether this is needed once we make the
+     *  system more flexible in other dimentions: e.g. leak to
+     *  methods or constructors, or use ownership for creating cold data structures.
+     */
+    def tryPromote(msg: String, source: Tree): Contextual[List[Error]] = log("promote " + warm.show + ", promoted = " + promoted, printer) {
+      val classRef = warm.klass.appliedRef
+      if classRef.memberClasses.nonEmpty || !warm.isFullyFilled then
+        return PromoteError(msg, source, trace.toVector) :: Nil
+
+      val buffer  = new mutable.ArrayBuffer[Error]
+
+      warm.klass.baseClasses.exists { klass =>
+        klass.hasSource && klass.info.decls.exists { member =>
+          if !member.isType && !member.isConstructor && member.hasSource  && !member.is(Flags.Deferred) then
+            if member.is(Flags.Method) then
+              val trace2 = trace.add(source)
+              locally {
+                given Trace = trace2
+                val args = member.info.paramInfoss.flatten.map(_ => ArgInfo(Hot, EmptyTree))
+                val res = warm.call(member, args, superType = NoType, source = member.defTree)
+                buffer ++= res.ensureHot(msg, source).errors
+              }
+            else
+              val res = warm.select(member, source)
+              buffer ++= res.ensureHot(msg, source).errors
+          buffer.nonEmpty
+        }
+      }
+
+      if buffer.isEmpty then Nil
+      else UnsafePromotion(msg, source, trace.toVector, buffer.toList) :: Nil
+    }
+
+  end extension
+
+// ----- Policies ------------------------------------------------------
+  extension (value: Ref)
+    /** Can the method call on `value` be ignored?
+     *
+     *  Note: assume overriding resolution has been performed.
+     */
+    def canIgnoreMethodCall(meth: Symbol)(using Context): Boolean =
+      val cls = meth.owner
+      cls == defn.AnyClass ||
+      cls == defn.AnyValClass ||
+      cls == defn.ObjectClass
+
+// ----- Work list ---------------------------------------------------
+  case class Task(value: ThisRef)
+
+  class WorkList private[Semantic]() {
+    private var pendingTasks: List[Task] = Nil
+
+    def addTask(task: Task): Unit =
+      if !pendingTasks.contains(task) then pendingTasks = task :: pendingTasks
+
+    /** Process the worklist until done */
+    final def work()(using Cache, Context): Unit =
+      for task <- pendingTasks
+      do doTask(task)
+
+    /** Check an individual class
+     *
+     *  This method should only be called from the work list scheduler.
+     */
+    private def doTask(task: Task)(using Cache, Context): Unit = {
+      val thisRef = task.value
+      val tpl = thisRef.klass.defTree.asInstanceOf[TypeDef].rhs.asInstanceOf[Template]
+
+      val paramValues = tpl.constr.termParamss.flatten.map(param => param.symbol -> Hot).toMap
+
+      @tailrec
+      def iterate(): Unit = {
+        given Promoted = Promoted.empty
+        given Trace = Trace.empty
+        given Env = Env(paramValues)
+
+        thisRef.ensureFresh()
+        val res = log("checking " + task) { eval(tpl, thisRef, thisRef.klass) }
+        res.errors.foreach(_.issue)
+
+        if cache.hasChanged && res.errors.isEmpty then
+          // code to prepare cache and heap for next iteration
+          cache.prepareForNextIteration()
+          iterate()
+        else
+          cache.prepareForNextClass()
+      }
+
+      iterate()
+    }
+  }
+  inline def workList(using wl: WorkList): WorkList = wl
+
+// ----- API --------------------------------
+
+  /** Add a checking task to the work list */
+  def addTask(thisRef: ThisRef)(using WorkList) = workList.addTask(Task(thisRef))
+
+  /** Perform check on the work list until it becomes empty
+   *
+   *  Should only be called once from the checker.
+   */
+  def check()(using Cache, WorkList, Context) = workList.work()
+
+  /** Perform actions with initial checking state.
+   *
+   *      Semantic.withInitialState {
+   *         Semantic.addTask(...)
+   *         ...
+   *         Semantic.check()
+   *      }
+   */
+  def withInitialState[T](work: (Cache, WorkList) ?=> T): T = {
+    work(using new Cache, new WorkList)
+  }
+
+// ----- Semantic definition --------------------------------
+
+  /** Utility definition used for better error-reporting of argument errors */
+  case class ArgInfo(value: Value, source: Tree) {
+    def promote: Contextual[List[Error]] = value.promote("Only initialized values may be used as arguments", source)
+  }
+
+  /** Evaluate an expression with the given value for `this` in a given class `klass`
+   *
+   *  Note that `klass` might be a super class of the object referred by `thisV`.
+   *  The parameter `klass` is needed for `this` resolution. Consider the following code:
+   *
+   *  class A {
+   *    A.this
+   *    class B extends A { A.this }
+   *  }
+   *
+   *  As can be seen above, the meaning of the expression `A.this` depends on where
+   *  it is located.
+   *
+   *  This method only handles cache logic and delegates the work to `cases`.
+   */
+  def eval(expr: Tree, thisV: Ref, klass: ClassSymbol, cacheResult: Boolean = false): Contextual[Result] = log("evaluating " + expr.show + ", this = " + thisV.show + " in " + klass.show, printer, (_: Result).show) {
+    if (cache.contains(thisV, expr)) Result(cache(thisV, expr), Errors.empty)
+    else cache.assume(thisV, expr, cacheResult) { cases(expr, thisV, klass) }
+  }
+
+  /** Evaluate a list of expressions */
+  def eval(exprs: List[Tree], thisV: Ref, klass: ClassSymbol): Contextual[List[Result]] =
+    exprs.map { expr => eval(expr, thisV, klass) }
+
+  /** Evaluate arguments of methods */
+  def evalArgs(args: List[Arg], thisV: Ref, klass: ClassSymbol): Contextual[(List[Error], List[ArgInfo])] =
+    val errors = new mutable.ArrayBuffer[Error]
+    val argInfos = new mutable.ArrayBuffer[ArgInfo]
+    args.foreach { arg =>
+      val res =
+        if arg.isByName then
+          val fun = Fun(arg.tree, thisV, klass, env)
+          Result(fun, Nil)
+        else
+          eval(arg.tree, thisV, klass)
+
+      errors ++= res.errors
+      argInfos += ArgInfo(res.value, arg.tree)
+    }
+    (errors.toList, argInfos.toList)
+
+  /** Handles the evaluation of different expressions
+   *
+   *  Note: Recursive call should go to `eval` instead of `cases`.
+   */
+  def cases(expr: Tree, thisV: Ref, klass: ClassSymbol): Contextual[Result] =
+    expr match {
+      case Ident(nme.WILDCARD) =>
+        // TODO:  disallow `var x: T = _`
+        Result(Hot, Errors.empty)
+
+      case id @ Ident(name) if !id.symbol.is(Flags.Method)  =>
+        assert(name.isTermName, "type trees should not reach here")
+        cases(expr.tpe, thisV, klass, expr)
+
+      case NewExpr(tref, New(tpt), ctor, argss) =>
+        // check args
+        val (errors, args) = evalArgs(argss.flatten, thisV, klass)
+
+        val cls = tref.classSymbol.asClass
+        val res = outerValue(tref, thisV, klass, tpt)
+        val trace2 = trace.add(expr)
+        locally {
+          given Trace = trace2
+          (res ++ errors).instantiate(cls, ctor, args, source = expr)
+        }
+
+      case Call(ref, argss) =>
+        // check args
+        val (errors, args) = evalArgs(argss.flatten, thisV, klass)
+
+        ref match
+        case Select(supert: Super, _) =>
+          val SuperType(thisTp, superTp) = supert.tpe
+          val thisValue2 = resolveThis(thisTp.classSymbol.asClass, thisV, klass, ref)
+          Result(thisValue2, errors).call(ref.symbol, args, superTp, expr)
+
+        case Select(qual, _) =>
+          val res = eval(qual, thisV, klass) ++ errors
+          if ref.symbol.isConstructor then
+            res.callConstructor(ref.symbol, args, source = expr)
+          else
+            res.call(ref.symbol, args, superType = NoType, source = expr)
+
+        case id: Ident =>
+          id.tpe match
+          case TermRef(NoPrefix, _) =>
+            // resolve this for the local method
+            val enclosingClass = id.symbol.owner.enclosingClass.asClass
+            val thisValue2 = resolveThis(enclosingClass, thisV, klass, id)
+            // local methods are not a member, but we can reuse the method `call`
+            thisValue2.call(id.symbol, args, superType = NoType, expr, needResolve = false)
+          case TermRef(prefix, _) =>
+            val res = cases(prefix, thisV, klass, id) ++ errors
+            if id.symbol.isConstructor then
+              res.callConstructor(id.symbol, args, source = expr)
+            else
+              res.call(id.symbol, args, superType = NoType, source = expr)
+
+      case Select(qualifier, name) =>
+        val qualRes = eval(qualifier, thisV, klass)
+
+        name match
+          case OuterSelectName(_, hops) =>
+            val SkolemType(tp) = expr.tpe
+            val outer = resolveOuterSelect(tp.classSymbol.asClass, qualRes.value, hops, source = expr)
+            Result(outer, qualRes.errors)
+          case _ =>
+            qualRes.select(expr.symbol, expr)
+
+      case _: This =>
+        cases(expr.tpe, thisV, klass, expr)
+
+      case Literal(_) =>
+        Result(Hot, Errors.empty)
+
+      case Typed(expr, tpt) =>
+        if (tpt.tpe.hasAnnotation(defn.UncheckedAnnot)) Result(Hot, Errors.empty)
+        else eval(expr, thisV, klass) ++ checkTermUsage(tpt, thisV, klass)
+
+      case NamedArg(name, arg) =>
+        eval(arg, thisV, klass)
+
+      case Assign(lhs, rhs) =>
+        lhs match
+        case Select(qual, _) =>
+          val res = eval(qual, thisV, klass)
+          eval(rhs, thisV, klass).ensureHot("May only assign fully initialized value", rhs) ++ res.errors
+        case id: Ident =>
+          eval(rhs, thisV, klass).ensureHot("May only assign fully initialized value", rhs)
+
+      case closureDef(ddef) =>
+        val value = Fun(ddef.rhs, thisV, klass, env)
+        Result(value, Nil)
+
+      case PolyFun(body) =>
+        val value = Fun(body, thisV, klass, env)
+        Result(value, Nil)
+
+      case Block(stats, expr) =>
+        val ress = eval(stats, thisV, klass)
+        eval(expr, thisV, klass) ++ ress.flatMap(_.errors)
+
+      case If(cond, thenp, elsep) =>
+        val ress = eval(cond :: thenp :: elsep :: Nil, thisV, klass)
+        val value = ress.map(_.value).join
+        val errors = ress.flatMap(_.errors)
+        Result(value, errors)
+
+      case Annotated(arg, annot) =>
+        if (expr.tpe.hasAnnotation(defn.UncheckedAnnot)) Result(Hot, Errors.empty)
+        else eval(arg, thisV, klass)
+
+      case Match(selector, cases) =>
+        val res1 = eval(selector, thisV, klass).ensureHot("The value to be matched needs to be fully initialized", selector)
+        val ress = eval(cases.map(_.body), thisV, klass)
+        val value = ress.map(_.value).join
+        val errors = res1.errors ++ ress.flatMap(_.errors)
+        Result(value, errors)
+
+      case Return(expr, from) =>
+        eval(expr, thisV, klass).ensureHot("return expression may only be initialized value", expr)
+
+      case WhileDo(cond, body) =>
+        val ress = eval(cond :: body :: Nil, thisV, klass)
+        Result(Hot, ress.flatMap(_.errors))
+
+      case Labeled(_, expr) =>
+        eval(expr, thisV, klass)
+
+      case Try(block, cases, finalizer) =>
+        val res1 = eval(block, thisV, klass)
+        val ress = eval(cases.map(_.body), thisV, klass)
+        val errors = ress.flatMap(_.errors)
+        val resValue = ress.map(_.value).join
+        if finalizer.isEmpty then
+          Result(resValue, res1.errors ++ errors)
+        else
+          val res2 = eval(finalizer, thisV, klass)
+          Result(resValue, res1.errors ++ errors ++ res2.errors)
+
+      case SeqLiteral(elems, elemtpt) =>
+        val ress = elems.map { elem =>
+          eval(elem, thisV, klass).ensureHot("May only use initialized value as method arguments", elem)
+        }
+        Result(Hot, ress.flatMap(_.errors))
+
+      case Inlined(call, bindings, expansion) =>
+        val ress = eval(bindings, thisV, klass)
+        eval(expansion, thisV, klass) ++ ress.flatMap(_.errors)
+
+      case Thicket(List()) =>
+        // possible in try/catch/finally, see tests/crash/i6914.scala
+        Result(Hot, Errors.empty)
+
+      case vdef : ValDef =>
+        // local val definition
+        // TODO: support explicit @cold annotation for local definitions
+        eval(vdef.rhs, thisV, klass)
+
+      case ddef : DefDef =>
+        // local method
+        Result(Hot, Errors.empty)
+
+      case tdef: TypeDef =>
+        // local type definition
+        if tdef.isClassDef then Result(Hot, Errors.empty)
+        else Result(Hot, checkTermUsage(tdef.rhs, thisV, klass))
+
+      case tpl: Template =>
+        init(tpl, thisV, klass)
+
+      case _: Import | _: Export =>
+        Result(Hot, Errors.empty)
+
+      case _ =>
+        throw new Exception("unexpected tree: " + expr.show)
+    }
+
+  /** Handle semantics of leaf nodes */
+  def cases(tp: Type, thisV: Ref, klass: ClassSymbol, source: Tree): Contextual[Result] = log("evaluating " + tp.show, printer, (_: Result).show) {
+    tp match {
+      case _: ConstantType =>
+        Result(Hot, Errors.empty)
+
+      case tmref: TermRef if tmref.prefix == NoPrefix =>
+        thisV.accessLocal(tmref, klass, source)
+
+      case tmref: TermRef =>
+        cases(tmref.prefix, thisV, klass, source).select(tmref.symbol, source)
+
+      case tp @ ThisType(tref) =>
+        val cls = tref.classSymbol.asClass
+        if cls.isStaticOwner && !klass.isContainedIn(cls) then
+          // O.this outside the body of the object O
+          Result(Hot, Nil)
+        else
+          val value = resolveThis(cls, thisV, klass, source)
+          Result(value, Errors.empty)
+
+      case _: TermParamRef | _: RecThis  =>
+        // possible from checking effects of types
+        Result(Hot, Errors.empty)
+
+      case _ =>
+        throw new Exception("unexpected type: " + tp)
+    }
+  }
+
+  /** Resolve C.this that appear in `klass` */
+  def resolveThis(target: ClassSymbol, thisV: Value, klass: ClassSymbol, source: Tree): Contextual[Value] = log("resolving " + target.show + ", this = " + thisV.show + " in " + klass.show, printer, (_: Value).show) {
+    if target == klass then thisV
+    else if target.is(Flags.Package) then Hot
+    else
+      thisV match
+        case Hot => Hot
+        case ref: Ref =>
+          val obj = ref.objekt
+          val outerCls = klass.owner.lexicallyEnclosingClass.asClass
+          if !obj.hasOuter(klass) then
+            val error = PromoteError("outer not yet initialized, target = " + target + ", klass = " + klass + ", object = " + obj, source, trace.toVector)
+            report.error(error.show + error.stacktrace, source)
+            Hot
+          else
+            resolveThis(target, obj.outer(klass), outerCls, source)
+        case RefSet(refs) =>
+          refs.map(ref => resolveThis(target, ref, klass, source)).join
+        case fun: Fun =>
+          report.warning("unexpected thisV = " + thisV + ", target = " + target.show + ", klass = " + klass.show, source.srcPos)
+          Cold
+        case Cold => Cold
+
+  }
+
+  /** Resolve outer select introduced during inlining.
+   *
+   *  See `tpd.outerSelect` and `ElimOuterSelect`.
+   */
+  def resolveOuterSelect(target: ClassSymbol, thisV: Value, hops: Int, source: Tree): Contextual[Value] = log("resolving outer " + target.show + ", this = " + thisV.show + ", hops = " + hops, printer, (_: Value).show) {
+    // Is `target` reachable from `cls` with the given `hops`?
+    def reachable(cls: ClassSymbol, hops: Int): Boolean = log("reachable from " + cls + " -> " + target + " in " + hops, printer) {
+      if hops == 0 then cls == target
+      else reachable(cls.owner.lexicallyEnclosingClass.asClass, hops - 1)
+    }
+
+    thisV match
+      case Hot => Hot
+
+      case ref: Ref =>
+        val obj = ref.objekt
+        val curOpt = obj.klass.baseClasses.find(cls => reachable(cls, hops))
+        curOpt match
+          case Some(cur) =>
+            resolveThis(target, thisV, cur, source)
+
+          case None =>
+            report.warning("unexpected outerSelect, thisV = " + thisV + ", target = " + target.show + ", hops = " + hops, source.srcPos)
+            Cold
+
+      case RefSet(refs) =>
+        refs.map(ref => resolveOuterSelect(target, ref, hops, source)).join
+
+      case fun: Fun =>
+        report.warning("unexpected thisV = " + thisV + ", target = " + target.show + ", hops = " + hops, source.srcPos)
+        Cold
+
+      case Cold => Cold
+  }
+
+  /** Compute the outer value that correspond to `tref.prefix` */
+  def outerValue(tref: TypeRef, thisV: Ref, klass: ClassSymbol, source: Tree): Contextual[Result] =
+    val cls = tref.classSymbol.asClass
+    if tref.prefix == NoPrefix then
+      val enclosing = cls.owner.lexicallyEnclosingClass.asClass
+      val outerV = resolveThis(enclosing, thisV, klass, source)
+      Result(outerV, Errors.empty)
+    else
+      if cls.isAllOf(Flags.JavaInterface) then Result(Hot, Nil)
+      else cases(tref.prefix, thisV, klass, source)
+
+  /** Initialize part of an abstract object in `klass` of the inheritance chain */
+  def init(tpl: Template, thisV: Ref, klass: ClassSymbol): Contextual[Result] = log("init " + klass.show, printer, (_: Result).show) {
+    val errorBuffer = new mutable.ArrayBuffer[Error]
+
+    val paramsMap = tpl.constr.termParamss.flatten.map { vdef =>
+      vdef.name -> env.lookup(vdef.symbol)
+    }.toMap
+
+    // init param fields
+    klass.paramGetters.foreach { acc =>
+      val value = paramsMap(acc.name.toTermName)
+      thisV.updateField(acc, value)
+      printer.println(acc.show + " initialized with " + value)
+    }
+
+    // Tasks is used to schedule super constructor calls.
+    // Super constructor calls are delayed until all outers are set.
+    type Tasks = mutable.ArrayBuffer[() => Unit]
+    def superCall(tref: TypeRef, ctor: Symbol, args: List[ArgInfo], source: Tree, tasks: Tasks)(using Env): Unit =
+      val cls = tref.classSymbol.asClass
+      // update outer for super class
+      val res = outerValue(tref, thisV, klass, source)
+      errorBuffer ++= res.errors
+      thisV.updateOuter(cls, res.value)
+
+      // follow constructor
+      if cls.hasSource then
+        tasks.append { () =>
+          printer.println("init super class " + cls.show)
+          val res2 = thisV.callConstructor(ctor, args, source)
+          errorBuffer ++= res2.errors
+          ()
+        }
+
+    // parents
+    def initParent(parent: Tree, tasks: Tasks)(using Env) = parent match {
+      case tree @ Block(stats, NewExpr(tref, New(tpt), ctor, argss)) =>  // can happen
+        eval(stats, thisV, klass).foreach { res => errorBuffer ++= res.errors }
+        val (errors, args) = evalArgs(argss.flatten, thisV, klass)
+        errorBuffer ++= errors
+        superCall(tref, ctor, args, tree, tasks)
+
+      case tree @ NewExpr(tref, New(tpt), ctor, argss) =>       // extends A(args)
+      val (errors, args) = evalArgs(argss.flatten, thisV, klass)
+      errorBuffer ++= errors
+      superCall(tref, ctor, args, tree, tasks)
+
+      case _ =>   // extends A or extends A[T]
+        val tref = typeRefOf(parent.tpe)
+        superCall(tref, tref.classSymbol.primaryConstructor, Nil, parent, tasks)
+    }
+
+    // see spec 5.1 about "Template Evaluation".
+    // https://www.scala-lang.org/files/archive/spec/2.13/05-classes-and-objects.html
+    if !klass.is(Flags.Trait) then
+      given Env = Env.empty
+
+      // outers are set first
+      val tasks = new mutable.ArrayBuffer[() => Unit]
+
+      // 1. first init parent class recursively
+      // 2. initialize traits according to linearization order
+      val superParent = tpl.parents.head
+      val superCls = superParent.tpe.classSymbol.asClass
+      initParent(superParent, tasks)
+
+      val parents = tpl.parents.tail
+      val mixins = klass.baseClasses.tail.takeWhile(_ != superCls)
+
+      // The interesting case is the outers for traits.  The compiler
+      // synthesizes proxy accessors for the outers in the class that extends
+      // the trait. As those outers must be stable values, they are initialized
+      // immediately following class parameters and before super constructor
+      // calls and user code in the class body.
+      mixins.reverse.foreach { mixin =>
+        parents.find(_.tpe.classSymbol == mixin) match
+        case Some(parent) => initParent(parent, tasks)
+        case None =>
+          // According to the language spec, if the mixin trait requires
+          // arguments, then the class must provide arguments to it explicitly
+          // in the parent list. That means we will encounter it in the Some
+          // branch.
+          //
+          // When a trait A extends a parameterized trait B, it cannot provide
+          // term arguments to B. That can only be done in a concrete class.
+          val tref = typeRefOf(klass.typeRef.baseType(mixin).typeConstructor)
+          val ctor = tref.classSymbol.primaryConstructor
+          if ctor.exists then superCall(tref, ctor, Nil, superParent, tasks)
+      }
+
+      // initialize super classes after outers are set
+      tasks.foreach(task => task())
+    end if
+
+    var fieldsChanged = true
+
+    // class body
+    if thisV.isThisRef || !thisV.asInstanceOf[Warm].isPopulatingParams then tpl.body.foreach {
+      case vdef : ValDef if !vdef.symbol.is(Flags.Lazy) && !vdef.rhs.isEmpty =>
+        given Env = Env.empty
+        val res = eval(vdef.rhs, thisV, klass)
+        errorBuffer ++= res.errors
+        thisV.updateField(vdef.symbol, res.value)
+        fieldsChanged = true
+
+      case _: MemberDef =>
+
+      case tree =>
+        if fieldsChanged && thisV.isThisRef then thisV.asInstanceOf[ThisRef].tryPromoteCurrentObject
+        fieldsChanged = false
+
+        given Env = Env.empty
+        errorBuffer ++= eval(tree, thisV, klass).errors
+    }
+
+    // The result value is ignored, use Hot to avoid futile fixed point computation
+    Result(Hot, errorBuffer.toList)
+  }
+
+  /** Check that path in path-dependent types are initialized
+   *
+   *  This is intended to avoid type soundness issues in Dotty.
+   */
+  def checkTermUsage(tpt: Tree, thisV: Ref, klass: ClassSymbol): Contextual[List[Error]] =
+    val buf = new mutable.ArrayBuffer[Error]
+    val traverser = new TypeTraverser {
+      def traverse(tp: Type): Unit = tp match {
+        case TermRef(_: SingletonType, _) =>
+          buf ++= cases(tp, thisV, klass, tpt).errors
+        case _ =>
+          traverseChildren(tp)
+      }
+    }
+    traverser.traverse(tpt.tpe)
+    buf.toList
+
+// ----- Utility methods and extractors --------------------------------
+
+  def typeRefOf(tp: Type)(using Context): TypeRef = tp.dealias.typeConstructor match {
+    case tref: TypeRef => tref
+    case hklambda: HKTypeLambda => typeRefOf(hklambda.resType)
+  }
+
+  opaque type Arg  = Tree | ByNameArg
+  case class ByNameArg(tree: Tree)
+
+  extension (arg: Arg)
+    def isByName = arg.isInstanceOf[ByNameArg]
+    def tree: Tree = arg match
+      case t: Tree      => t
+      case ByNameArg(t) => t
+
+  object Call {
+
+    def unapply(tree: Tree)(using Context): Option[(Tree, List[List[Arg]])] =
+      tree match
+      case Apply(fn, args) =>
+        val argTps = fn.tpe.widen match
+          case mt: MethodType => mt.paramInfos
+        val normArgs: List[Arg] = args.zip(argTps).map {
+          case (arg, _: ExprType) => ByNameArg(arg)
+          case (arg, _)           => arg
+        }
+        unapply(fn) match
+        case Some((ref, args0)) => Some((ref, args0 :+ normArgs))
+        case None => None
+
+      case TypeApply(fn, targs) =>
+        unapply(fn)
+
+      case ref: RefTree if ref.tpe.widenSingleton.isInstanceOf[MethodicType] =>
+        Some((ref, Nil))
+
+      case _ => None
+  }
+
+  object NewExpr {
+    def unapply(tree: Tree)(using Context): Option[(TypeRef, New, Symbol, List[List[Arg]])] =
+      tree match
+      case Call(fn @ Select(newTree: New, init), argss) if init == nme.CONSTRUCTOR =>
+        val tref = typeRefOf(newTree.tpe)
+        Some((tref, newTree, fn.symbol, argss))
+      case _ => None
+  }
+
+  object PolyFun {
+    def unapply(tree: Tree)(using Context): Option[Tree] =
+      tree match
+      case Block((cdef: TypeDef) :: Nil, Typed(NewExpr(tref, _, _, _), _))
+      if tref.symbol.isAnonymousClass && tref <:< defn.PolyFunctionType
+      =>
+        val body = cdef.rhs.asInstanceOf[Template].body
+        val apply = body.head.asInstanceOf[DefDef]
+        Some(apply.rhs)
+      case _ =>
+        None
+  }
+
+  extension (symbol: Symbol) def hasSource(using Context): Boolean =
+    !symbol.defTree.isEmpty
+
+  def resolve(cls: ClassSymbol, sym: Symbol)(using Context): Symbol = log("resove " + cls + ", " + sym, printer, (_: Symbol).show) {
+    if (sym.isEffectivelyFinal || sym.isConstructor) sym
+    else sym.matchingMember(cls.appliedRef)
+  }
+
+  def resolveSuper(cls: ClassSymbol, superType: Type, sym: Symbol)(using Context): Symbol = {
+    import annotation.tailrec
+    @tailrec def loop(bcs: List[ClassSymbol]): Symbol = bcs match {
+      case bc :: bcs1 =>
+        val cand = sym.matchingDecl(bcs.head, cls.thisType)
+          .suchThat(alt => !alt.is(Flags.Deferred)).symbol
+        if (cand.exists) cand else loop(bcs.tail)
+      case _ =>
+        NoSymbol
+    }
+    loop(cls.info.baseClasses.dropWhile(sym.owner != _))
+  }
+
+}
diff --git a/compiler/src/dotty/tools/dotc/transform/init/Summarization.scala b/compiler/src/dotty/tools/dotc/transform/init/Summarization.scala
deleted file mode 100644
index 8da415bf82bb..000000000000
--- a/compiler/src/dotty/tools/dotc/transform/init/Summarization.scala
+++ /dev/null
@@ -1,382 +0,0 @@
-package dotty.tools.dotc
-package transform
-package init
-
-import core._
-import Contexts._
-import Decorators._
-import StdNames._
-import Symbols._
-import Constants.Constant
-import Types._
-
-import ast.tpd._
-import config.Printers.init
-import reporting.trace
-
-import Effects._, Potentials._, Summary._, Util._
-
-object Summarization {
-
-  /** Summarization of potentials and effects for an expression
-   *
-   *  Optimization:
-   *
-   *   1. potentials for expression of primitive value types can be
-   *      safely abandoned, as they are always fully initialized.
-   */
-  def analyze(expr: Tree)(implicit env: Env): Summary =
-  trace("summarizing " + expr.show, init, s => s.asInstanceOf[Summary].show) {
-    val summary: Summary = expr match {
-      case Ident(nme.WILDCARD) =>
-        // TODO:  disallow `var x: T = _`
-        Summary.empty
-
-      case Ident(name) =>
-        assert(name.isTermName, "type trees should not reach here")
-        analyze(expr.tpe, expr)
-
-      case supert: Super =>
-        analyze(supert.tpe, supert)
-
-      case Select(qualifier, name) =>
-        val Summary(pots, effs) = analyze(qualifier)
-        if (env.canIgnoreMethod(expr.symbol)) Summary(effs)
-        else if (!expr.symbol.exists) { // polymorphic function apply and structural types
-          Summary(pots.promote(expr) ++ effs)
-        }
-        else {
-          val Summary(pots2, effs2) = pots.select(expr.symbol, expr)
-          Summary(pots2, effs ++ effs2)
-        }
-
-      case _: This =>
-        analyze(expr.tpe, expr)
-
-      case Apply(fun, args) =>
-        val summary = analyze(fun)
-        val ignoredCall = env.canIgnoreMethod(expr.symbol)
-
-        val argTps = fun.tpe.widen match
-          case mt: MethodType => mt.paramInfos
-
-        val res = args.zip(argTps).foldLeft(summary) { case (sum, (arg, argTp)) =>
-          val Summary(pots1, effs1) = analyze(arg)
-          if (ignoredCall) sum ++ effs1
-          else if (argTp.isInstanceOf[ExprType]) sum + Promote(Fun(pots1, effs1)(arg))(arg)
-          else sum ++ pots1.promote(arg) ++ effs1
-        }
-
-        if (ignoredCall) Summary(res.effs)
-        else res
-
-      case TypeApply(fun, args) =>
-        analyze(fun)
-
-      case Literal(const) =>
-        Summary.empty
-
-      case New(tpt) =>
-        def typeRefOf(tp: Type): TypeRef = tp.dealias.typeConstructor match {
-          case tref: TypeRef => tref
-          case hklambda: HKTypeLambda => typeRefOf(hklambda.resType)
-        }
-
-        val tref = typeRefOf(tpt.tpe)
-        val cls = tref.classSymbol.asClass
-        // local class may capture, thus we need to track it
-        if (tref.prefix == NoPrefix) {
-          val cur = theCtx.owner.lexicallyEnclosingClass.asClass
-          val thisRef = ThisRef()(expr)
-          val enclosing = cls.owner.lexicallyEnclosingClass.asClass
-          val summary = resolveThis(enclosing, thisRef, cur, expr)
-          if summary.pots.isEmpty then summary
-          else {
-            assert(summary.pots.size == 1)
-            summary.dropPotentials + Warm(cls, summary.pots.head)(expr)
-          }
-        }
-        else {
-          val summary = analyze(tref.prefix, expr)
-          if summary.pots.isEmpty then summary
-          else {
-            assert(summary.pots.size == 1)
-            summary.dropPotentials + Warm(cls, summary.pots.head)(expr)
-          }
-        }
-
-      case Typed(expr, tpt) =>
-        if (tpt.tpe.hasAnnotation(defn.UncheckedAnnot)) Summary.empty
-        else analyze(expr) ++ effectsOfType(tpt.tpe, tpt)
-
-      case NamedArg(name, arg) =>
-        analyze(arg)
-
-      case Assign(lhs, rhs) =>
-        val Summary(pots, effs) = analyze(rhs)
-        Summary(pots.promote(expr) ++ effs)
-
-      case closureDef(ddef) =>     // must be before `Block`
-        val Summary(pots, effs) = analyze(ddef.rhs)
-        Summary(Fun(pots, effs)(expr))
-
-      case Block(stats, expr) =>
-        val effs = stats.foldLeft(Effects.empty) { (acc, stat) => acc ++ analyze(stat).effs }
-        val Summary(pots2, effs2) = analyze(expr)
-        Summary(pots2, effs ++ effs2)
-
-      case If(cond, thenp, elsep) =>
-        val Summary(_, effs0) = analyze(cond)
-        val Summary(pots1, effs1) = analyze(thenp)
-        val Summary(pots2, effs2) = analyze(elsep)
-        Summary(pots1 ++ pots2, effs0 ++ effs1 ++ effs2)
-
-      case Annotated(arg, annot) =>
-        if (expr.tpe.hasAnnotation(defn.UncheckedAnnot)) Summary.empty
-        else analyze(arg)
-
-      case Match(selector, cases) =>
-        // possible for switches
-        val Summary(pots, effs) = analyze(selector)
-        val init = Summary(Potentials.empty, pots.promote(selector) ++ effs)
-        cases.foldLeft(init) { (acc, cas) =>
-          acc + analyze(cas.body)
-        }
-
-      // case CaseDef(pat, guard, body) =>
-      //   Summary.empty
-
-      case Return(expr, from) =>
-        val Summary(pots, effs) = analyze(expr)
-        Summary(effs ++ pots.promote(expr))
-
-      case WhileDo(cond, body) =>
-        // for lazy fields, the translation may result in `while ()`
-        val Summary(_, effs1) = if (cond.isEmpty) Summary.empty else analyze(cond)
-        val Summary(_, effs2) = analyze(body)
-        Summary(effs1 ++ effs2)
-
-      case Labeled(_, expr) =>
-        val summary = analyze(expr)
-        summary.dropPotentials
-
-      case Try(block, cases, finalizer) =>
-        val Summary(pots, effs) =  cases.foldLeft(analyze(block)) { (acc, cas) =>
-          acc + analyze(cas.body)
-        }
-        val Summary(_, eff2) = if (finalizer.isEmpty) Summary.empty else analyze(finalizer)
-        Summary(pots, effs ++ eff2)
-
-      case SeqLiteral(elems, elemtpt) =>
-        val effsAll: Effects = elems.foldLeft(Effects.empty) { (effs, elem) =>
-          val Summary(pots1, effs1) = analyze(elem)
-          pots1.promote(expr) ++ effs1 ++ effs
-        }
-        Summary(effsAll)
-
-      case Inlined(call, bindings, expansion) =>
-        val effs = bindings.foldLeft(Effects.empty) { (acc, mdef) =>
-          acc ++ analyze(mdef).effs
-        }
-        analyze(expansion) ++ effs
-
-      case vdef : ValDef =>
-        val Summary(pots, effs) = analyze(vdef.rhs)
-
-        if (vdef.symbol.owner.isClass)
-          if (vdef.symbol.is(Flags.Lazy)) Summary.empty else Summary(effs)
-        else
-          Summary(pots.promote(vdef) ++ effs)
-
-      case Thicket(List()) =>
-        // possible in try/catch/finally, see tests/crash/i6914.scala
-        Summary.empty
-
-      case ddef : DefDef =>
-        if (ddef.symbol.owner.isClass) Summary.empty
-        else {
-          val Summary(pots, effs) = analyze(ddef.rhs)
-          Summary(pots.promote(ddef) ++ effs)
-        }
-
-      case tdef: TypeDef =>
-        if tdef.isClassDef then Summary.empty
-        else Summary(effectsOfType(tdef.symbol.info, tdef.rhs))
-
-      case _: Import | _: Export =>
-        Summary.empty
-
-      case _ =>
-        throw new Exception("unexpected tree: " + expr.show)
-    }
-
-    if (env.isAlwaysInitialized(expr.tpe)) Summary(Potentials.empty, summary.effs)
-    else summary
-  }
-
-  private def effectsOfType(tp: Type, source: Tree)(implicit env: Env): Effects =
-    var summary = Summary.empty
-    val traverser = new TypeTraverser {
-      def traverse(tp: Type): Unit = tp match {
-        case TermRef(_: SingletonType, _) =>
-          summary = summary + analyze(tp, source)
-        case _ =>
-          traverseChildren(tp)
-      }
-    }
-    traverser.traverse(tp)
-    summary.effs
-
-  def analyze(tp: Type, source: Tree)(implicit env: Env): Summary =
-  trace("summarizing " + tp.show, init, s => s.asInstanceOf[Summary].show) {
-    val summary: Summary = tp match {
-      case _: ConstantType =>
-        Summary.empty
-
-      case tmref: TermRef if tmref.prefix == NoPrefix =>
-        Summary.empty
-
-      case tmref: TermRef =>
-        val Summary(pots, effs) = analyze(tmref.prefix, source)
-        if (env.canIgnoreMethod(tmref.symbol)) Summary(effs)
-        else {
-          val summary = pots.select(tmref.symbol, source)
-          summary ++ effs
-        }
-
-      case ThisType(tref) =>
-        val enclosing = env.ctx.owner.lexicallyEnclosingClass.asClass
-        val cls = tref.symbol.asClass
-        resolveThis(cls, ThisRef()(source), enclosing, source)
-
-      case SuperType(thisTp, superTp) =>
-        val Summary(pots, effs) = analyze(thisTp, source)
-        val pots2 = pots.map {
-          // TODO: properly handle super of the form A & B
-          SuperRef(_, superTp.classSymbols.head.asClass)(source): Potential
-        }
-        Summary(pots2, effs)
-
-      case _: TermParamRef | _: RecThis  =>
-        // possible from checking effects of types
-        Summary.empty
-
-      case _ =>
-        throw new Exception("unexpected type: " + tp)
-    }
-
-    if (env.isAlwaysInitialized(tp)) Summary(Potentials.empty, summary.effs)
-    else summary
-  }
-
-  def analyzeMethod(sym: Symbol)(implicit env: Env): Summary = {
-    val ddef = sym.defTree.asInstanceOf[DefDef]
-    traceIndented(sym.show + " = " + ddef.show, init)
-    analyze(ddef.rhs)(env.withOwner(sym))
-  }
-
-  def analyzeField(sym: Symbol)(implicit env: Env): Summary = {
-    val vdef = sym.defTree.asInstanceOf[ValDef]
-    analyze(vdef.rhs)(env.withOwner(sym))
-  }
-
-  def resolveThis(cls: ClassSymbol, pot: Potential, cur: ClassSymbol, source: Tree)(implicit env: Env): Summary =
-  trace("resolve " + cls.show + ", pot = " + pot.show + ", cur = " + cur.show, init, s => s.asInstanceOf[Summary].show) {
-    if (cls.is(Flags.Package)) Summary.empty
-    else if (cls == cur) Summary(pot)
-    else if (pot.size > 2) Summary(Promote(pot)(source))
-    else {
-      val enclosing = cur.owner.lexicallyEnclosingClass.asClass
-      // Dotty uses O$.this outside of the object O
-      if (enclosing.is(Flags.Package) && cls.is(Flags.Module))
-        return Summary.empty
-
-      assert(!enclosing.is(Flags.Package), "enclosing = " + enclosing.show + ", cls = " + cls.show + ", pot = " + pot.show + ", cur = " + cur.show)
-      val pot2 = Outer(pot, cur)(pot.source)
-      resolveThis(cls, pot2, enclosing, source)
-    }
-  }
-
-  /** Summarize secondary constructors or class body */
-  def analyzeConstructor(ctor: Symbol)(implicit env: Env): Summary =
-  trace("summarizing constructor " + ctor.owner.show, init, s => s.asInstanceOf[Summary].show) {
-    if (ctor.isPrimaryConstructor) {
-      val cls = ctor.owner.asClass
-      val tpl = ctor.owner.defTree.asInstanceOf[TypeDef].rhs.asInstanceOf[Template]
-      val effs = analyze(Block(tpl.body, unitLiteral)).effs
-
-      def parentArgEffsWithInit(stats: List[Tree], ctor: Symbol, source: Tree): Effects =
-        val init =
-          if env.canIgnoreMethod(ctor) then Effects.empty
-          else Effects.empty :+ MethodCall(ThisRef()(source), ctor)(source)
-        stats.foldLeft(init) { (acc, stat) =>
-          val summary = Summarization.analyze(stat)
-          acc ++ summary.effs
-        }
-
-      val effsAll = tpl.parents.foldLeft(effs) { (effs, parent) =>
-        effs ++ (parent match {
-          case tree @ Block(stats, parent) =>
-            val ctor @ Select(qual, _) = funPart(parent)
-            parentArgEffsWithInit(qual :: stats ++ termArgss(parent).flatten, ctor.symbol, tree)
-
-          case tree @ Apply(Block(stats, parent), args) =>
-            val ctor @ Select(qual, _) = funPart(parent)
-            parentArgEffsWithInit(qual :: stats ++ args ++ termArgss(parent).flatten, ctor.symbol, tree)
-
-          case parent : Apply =>
-            val ctor @ Select(qual, _) = funPart(parent)
-            parentArgEffsWithInit(qual :: termArgss(parent).flatten, ctor.symbol, parent)
-
-          case ref =>
-            val tref: TypeRef = ref.tpe.typeConstructor.asInstanceOf
-            val cls = tref.classSymbol.asClass
-            if env.canIgnoreClass(cls) then Effects.empty
-            else {
-              val ctor = cls.primaryConstructor
-              val prefixEff =
-                if tref.prefix == NoPrefix then Effects.empty
-                else Summarization.analyze(tref.prefix, ref).effs
-
-              prefixEff :+ MethodCall(ThisRef()(ref), ctor)(ref)
-            }
-        })
-      }
-
-      Summary(effsAll)
-    }
-    else {
-      val ddef = ctor.defTree.asInstanceOf[DefDef]
-      analyze(ddef.rhs)(env.withOwner(ctor))
-    }
-  }
-
-  def classSummary(cls: ClassSymbol)(implicit env: Env): ClassSummary = trace("summarizing " + cls.show, init) {
-    def extractParentOuters(parent: Type, source: Tree): (ClassSymbol, Potentials) = {
-      val tref = parent.typeConstructor.stripAnnots.asInstanceOf[TypeRef]
-      val parentCls = tref.classSymbol.asClass
-      val env2: Env = env.withOwner(cls.owner.lexicallyEnclosingClass)
-      if (tref.prefix != NoPrefix)
-        parentCls -> analyze(tref.prefix, source)(env2).pots
-      else
-        parentCls -> analyze(cls.owner.lexicallyEnclosingClass.thisType, source)(env2).pots
-    }
-
-    if (cls.defTree.isEmpty)
-        val source = {
-          implicit val ctx2: Context = theCtx.withSource(cls.source(using theCtx))
-          TypeTree(cls.typeRef).withSpan(cls.span)
-        }
-
-        val parentOuter = cls.info.parents.map { extractParentOuters(_, source) }.toMap
-        ClassSummary(cls, parentOuter)
-    else {
-      val tpl = cls.defTree.asInstanceOf[TypeDef]
-      val parents = tpl.rhs.asInstanceOf[Template].parents
-
-      val parentOuter = parents.map { parent => extractParentOuters(parent.tpe, parent) }
-      ClassSummary(cls, parentOuter.toMap)
-    }
-  }
-
-}
diff --git a/compiler/src/dotty/tools/dotc/transform/init/Summary.scala b/compiler/src/dotty/tools/dotc/transform/init/Summary.scala
deleted file mode 100644
index 1c7a852d4847..000000000000
--- a/compiler/src/dotty/tools/dotc/transform/init/Summary.scala
+++ /dev/null
@@ -1,92 +0,0 @@
-package dotty.tools.dotc
-package transform
-package init
-
-import scala.collection.mutable
-import scala.annotation.targetName
-
-import core._
-import Contexts._
-import Symbols._
-import reporting.trace
-import config.Printers.init
-
-import Potentials._, Effects._, Util._
-
-case class Summary(pots: Potentials, effs: Effects) {
-  def +(summary2: Summary): Summary =
-    Summary(pots ++ summary2.pots, this.effs ++ summary2.effs)
-
-  def +(pot: Potential): Summary =
-    Summary(pots :+ pot, effs)
-
-  def +(eff: Effect): Summary =
-    Summary(pots, effs :+ eff)
-
-  def dropPotentials: Summary =
-    Summary(Potentials.empty, effs)
-
-  @targetName("withPotentials")
-  def ++(pots: Potentials): Summary =
-    Summary(this.pots ++ pots, effs)
-
-  @targetName("withEffects")
-  def ++(effs: Effects): Summary =
-    Summary(pots, this.effs ++ effs)
-
-  def show(using Context): String = {
-    val pots = Potentials.show(this.pots)
-    val effs = Effects.show(this.effs)
-    s"([$pots], [$effs])"
-  }
-}
-
-object Summary {
-  val empty: Summary = Summary(Potentials.empty, Effects.empty)
-
-  def apply(pots: Potentials): Summary = empty ++ pots
-
-  @targetName("withEffects")
-  def apply(effs: Effects): Summary = empty ++ effs
-
-  def apply(pot: Potential): Summary = empty + pot
-
-  def apply(eff: Effect): Summary = empty + eff
-}
-
-/** Summary of class.
-  *
-  *  It makes ObjectPart construction easier with already established raw outer for parents.
-  */
-case class ClassSummary(currentClass: ClassSymbol, parentOuter: Map[ClassSymbol, Potentials]) {
-  private val summaryCache: mutable.Map[Symbol, Summary] = mutable.Map.empty
-
-  def cacheFor(member: Symbol, summary: Summary)(using Context): Unit = {
-    traceIndented("cache for " + member.show + ", summary = " + summary.show, init)
-    assert(member.owner == currentClass, "owner = " + member.owner.show + ", current = " + currentClass.show)
-    summaryCache(member) = summary
-  }
-
-  def summaryOf(member: Symbol)(implicit env: Env): Summary =
-    if (summaryCache.contains(member)) summaryCache(member)
-    else trace("summary for " + member.show, init, s => s.asInstanceOf[Summary].show) {
-      implicit val env2 = env.withOwner(member)
-      val summary =
-        if (member.isConstructor)
-          Summarization.analyzeConstructor(member)
-        else if (member.is(Flags.Method))
-          Summarization.analyzeMethod(member)
-        else // field
-          Summarization.analyzeField(member)
-
-      summaryCache(member) = summary
-      summary
-    }
-
-  def effectsOf(member: Symbol)(implicit env: Env): Effects = summaryOf(member).effs
-  def potentialsOf(member: Symbol)(implicit env: Env): Potentials = summaryOf(member).pots
-
-  def show(using Context): String =
-    "ClassSummary(" + currentClass.name.show +
-      ", parents = " + parentOuter.map { case (k, v) => k.show + "->" + "[" + Potentials.show(v) + "]" }
-}
diff --git a/compiler/src/dotty/tools/dotc/transform/init/Util.scala b/compiler/src/dotty/tools/dotc/transform/init/Util.scala
deleted file mode 100644
index 41d77a8fe608..000000000000
--- a/compiler/src/dotty/tools/dotc/transform/init/Util.scala
+++ /dev/null
@@ -1,41 +0,0 @@
-package dotty.tools.dotc
-package transform
-package init
-
-import core._
-import Contexts._
-import Symbols._
-import config.Printers.Printer
-
-import annotation.tailrec
-
-object Util {
-  def traceIndented(msg: String, printer: Printer)(using Context): Unit =
-    printer.println(s"${ctx.base.indentTab * ctx.base.indent} $msg")
-
-  def traceOp(msg: String, printer: Printer)(op: => Unit)(using Context): Unit = {
-    traceIndented(s"==> ${msg}", printer)
-    op
-    traceIndented(s"<== ${msg}", printer)
-  }
-
-  extension (symbol: Symbol) def hasSource(using Context): Boolean =
-    !symbol.defTree.isEmpty
-
-  def resolve(cls: ClassSymbol, sym: Symbol)(using Context): Symbol =
-    if (sym.isEffectivelyFinal || sym.isConstructor) sym
-    else sym.matchingMember(cls.appliedRef)
-
-  def resolveSuper(cls: ClassSymbol, superCls: ClassSymbol, sym: Symbol)(using Context): Symbol = {
-    // println(s"bases of $cls: " + cls.info.baseClasses)
-    @tailrec def loop(bcs: List[ClassSymbol]): Symbol = bcs match {
-      case bc :: bcs1 =>
-        val cand = sym.matchingDecl(bcs.head, cls.thisType)
-          .suchThat(alt => !alt.is(Flags.Deferred)).symbol
-        if (cand.exists) cand else loop(bcs.tail)
-      case _ =>
-        NoSymbol
-    }
-    loop(cls.info.baseClasses.dropWhile(sym.owner != _))
-  }
-}
\ No newline at end of file
diff --git a/compiler/src/dotty/tools/dotc/transform/localopt/FormatChecker.scala b/compiler/src/dotty/tools/dotc/transform/localopt/FormatChecker.scala
new file mode 100644
index 000000000000..0ba7bd14a9b6
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/localopt/FormatChecker.scala
@@ -0,0 +1,287 @@
+package dotty.tools.dotc
+package transform.localopt
+
+import scala.annotation.tailrec
+import scala.collection.mutable.ListBuffer
+import scala.util.chaining.*
+import scala.util.matching.Regex.Match
+
+import java.util.{Calendar, Date, Formattable}
+
+import PartialFunction.cond
+
+import dotty.tools.dotc.ast.tpd.{Match => _, *}
+import dotty.tools.dotc.core.Contexts._
+import dotty.tools.dotc.core.Symbols._
+import dotty.tools.dotc.core.Types._
+import dotty.tools.dotc.core.Phases.typerPhase
+import dotty.tools.dotc.util.Spans.Span
+
+/** Formatter string checker. */
+class TypedFormatChecker(partsElems: List[Tree], parts: List[String], args: List[Tree])(using Context):
+
+  val argTypes = args.map(_.tpe)
+  val actuals = ListBuffer.empty[Tree]
+
+  // count of args, for checking indexes
+  val argc = argTypes.length
+
+  // Pick the first runtime type which the i'th arg can satisfy.
+  // If conversion is required, implementation must emit it.
+  def argType(argi: Int, types: Type*): Type =
+    require(argi < argc, s"$argi out of range picking from $types")
+    val tpe = argTypes(argi)
+    types.find(t => argConformsTo(argi, tpe, t))
+      .orElse(types.find(t => argConvertsTo(argi, tpe, t)))
+      .getOrElse {
+        report.argError(s"Found: ${tpe.show}, Required: ${types.map(_.show).mkString(", ")}", argi)
+        actuals += args(argi)
+        types.head
+      }
+
+  object formattableTypes:
+    val FormattableType = requiredClassRef("java.util.Formattable")
+    val BigIntType      = requiredClassRef("scala.math.BigInt")
+    val BigDecimalType  = requiredClassRef("scala.math.BigDecimal")
+    val CalendarType    = requiredClassRef("java.util.Calendar")
+    val DateType        = requiredClassRef("java.util.Date")
+  import formattableTypes.*
+  def argConformsTo(argi: Int, arg: Type, target: Type): Boolean = (arg <:< target).tap(if _ then actuals += args(argi))
+  def argConvertsTo(argi: Int, arg: Type, target: Type): Boolean =
+    import typer.Implicits.SearchSuccess
+    atPhase(typerPhase) {
+      ctx.typer.inferView(args(argi), target) match
+        case SearchSuccess(view, ref, _, _) => actuals += view ; true
+        case _ => false
+    }
+
+  // match a conversion specifier
+  val formatPattern = """%(?:(\d+)\$)?([-#+ 0,(<]+)?(\d+)?(\.\d+)?([tT]?[%a-zA-Z])?""".r
+
+  // ordinal is the regex group index in the format pattern
+  enum SpecGroup:
+    case Spec, Index, Flags, Width, Precision, CC
+  import SpecGroup.*
+
+  /** For N part strings and N-1 args to interpolate, normalize parts and check arg types.
+   *
+   *  Returns normalized part strings and args, where args correcpond to conversions in tail of parts.
+   */
+  def checked: (List[String], List[Tree]) =
+    val amended = ListBuffer.empty[String]
+    val convert = ListBuffer.empty[Conversion]
+
+    @tailrec
+    def loop(remaining: List[String], n: Int): Unit =
+      remaining match
+        case part0 :: more =>
+          def badPart(t: Throwable): String = "".tap(_ => report.partError(t.getMessage, index = n, offset = 0))
+          val part = try StringContext.processEscapes(part0) catch badPart
+          val matches = formatPattern.findAllMatchIn(part)
+
+          def insertStringConversion(): Unit =
+            amended += "%s" + part
+            convert += Conversion(formatPattern.findAllMatchIn("%s").next(), n)  // improve
+            argType(n-1, defn.AnyType)
+          def errorLeading(op: Conversion) = op.errorAt(Spec)(s"conversions must follow a splice; ${Conversion.literalHelp}")
+          def accept(op: Conversion): Unit =
+            if !op.isLeading then errorLeading(op)
+            op.accepts(argType(n-1, op.acceptableVariants*))
+            amended += part
+            convert += op
+
+          // after the first part, a leading specifier is required for the interpolated arg; %s is supplied if needed
+          if n == 0 then amended += part
+          else if !matches.hasNext then insertStringConversion()
+          else
+            val cv = Conversion(matches.next(), n)
+            if cv.isLiteral then insertStringConversion()
+            else if cv.isIndexed then
+              if cv.index.getOrElse(-1) == n then accept(cv) else insertStringConversion()
+            else if !cv.isError then accept(cv)
+
+          // any remaining conversions in this part must be either literals or indexed
+          while matches.hasNext do
+            val cv = Conversion(matches.next(), n)
+            if n == 0 && cv.hasFlag('<') then cv.badFlag('<', "No last arg")
+            else if !cv.isLiteral && !cv.isIndexed then errorLeading(cv)
+
+          loop(more, n + 1)
+        case Nil => ()
+    end loop
+
+    loop(parts, n = 0)
+    if reported then (Nil, Nil)
+    else
+      assert(argc == actuals.size, s"Expected ${argc} args but got ${actuals.size} for [${parts.mkString(", ")}]")
+      (amended.toList, actuals.toList)
+  end checked
+
+  extension (descriptor: Match)
+    def at(g: SpecGroup): Int = descriptor.start(g.ordinal)
+    def end(g: SpecGroup): Int = descriptor.end(g.ordinal)
+    def offset(g: SpecGroup, i: Int = 0): Int = at(g) + i
+    def group(g: SpecGroup): Option[String] = Option(descriptor.group(g.ordinal))
+    def stringOf(g: SpecGroup): String = group(g).getOrElse("")
+    def intOf(g: SpecGroup): Option[Int] = group(g).map(_.toInt)
+
+  extension (inline value: Boolean)
+    inline def or(inline body: => Unit): Boolean     = value || { body ; false }
+    inline def orElse(inline body: => Unit): Boolean = value || { body ; true }
+    inline def and(inline body: => Unit): Boolean    = value && { body ; true }
+    inline def but(inline body: => Unit): Boolean    = value && { body ; false }
+
+  enum Kind:
+    case StringXn, HashXn, BooleanXn, CharacterXn, IntegralXn, FloatingPointXn, DateTimeXn, LiteralXn, ErrorXn
+  import Kind.*
+
+  /** A conversion specifier matched in the argi'th string part, with `argc` arguments to interpolate.
+   */
+  final class Conversion(val descriptor: Match, val argi: Int, val kind: Kind):
+    // the descriptor fields
+    val index: Option[Int]     = descriptor.intOf(Index)
+    val flags: String          = descriptor.stringOf(Flags)
+    val width: Option[Int]     = descriptor.intOf(Width)
+    val precision: Option[Int] = descriptor.group(Precision).map(_.drop(1).toInt)
+    val op: String             = descriptor.stringOf(CC)
+
+    // the conversion char is the head of the op string (but see DateTimeXn)
+    val cc: Char =
+      kind match
+        case ErrorXn => if op.isEmpty then '?' else op(0)
+        case DateTimeXn => if op.length > 1 then op(1) else '?'
+        case _ => op(0)
+
+    def isIndexed: Boolean = index.nonEmpty || hasFlag('<')
+    def isError: Boolean   = kind == ErrorXn
+    def isLiteral: Boolean = kind == LiteralXn
+
+    // descriptor is at index 0 of the part string
+    def isLeading: Boolean = descriptor.at(Spec) == 0
+
+    // true if passes.
+    def verify: Boolean =
+      // various assertions
+      def goodies = goodFlags && goodIndex
+      def noFlags = flags.isEmpty or errorAt(Flags)("flags not allowed")
+      def noWidth = width.isEmpty or errorAt(Width)("width not allowed")
+      def noPrecision = precision.isEmpty or errorAt(Precision)("precision not allowed")
+      def only_-(msg: String) =
+        val badFlags = flags.filterNot { case '-' | '<' => true case _ => false }
+        badFlags.isEmpty or badFlag(badFlags(0), s"Only '-' allowed for $msg")
+      def goodFlags =
+        val badFlags = flags.filterNot(okFlags.contains)
+        for f <- badFlags do badFlag(f, s"Illegal flag '$f'")
+        badFlags.isEmpty
+      def goodIndex =
+        if index.nonEmpty && hasFlag('<') then warningAt(Index)("Argument index ignored if '<' flag is present")
+        val okRange = index.map(i => i > 0 && i <= argc).getOrElse(true)
+        okRange || hasFlag('<') or errorAt(Index)("Argument index out of range")
+    // begin verify
+      kind match
+        case StringXn        => goodies
+        case BooleanXn       => goodies
+        case HashXn          => goodies
+        case CharacterXn     => goodies && noPrecision && only_-("c conversion")
+        case IntegralXn      =>
+          def d_# = cc == 'd' && hasFlag('#') and badFlag('#', "# not allowed for d conversion")
+          def x_comma = cc != 'd' && hasFlag(',') and badFlag(',', "',' only allowed for d conversion of integral types")
+          goodies && noPrecision && !d_# && !x_comma
+        case FloatingPointXn =>
+          goodies && (cc match
+            case 'a' | 'A' =>
+              val badFlags = ",(".filter(hasFlag)
+              noPrecision && badFlags.isEmpty or badFlags.foreach(badf => badFlag(badf, s"'$badf' not allowed for a, A"))
+            case _ => true
+          )
+        case DateTimeXn      =>
+          def hasCC = op.length == 2 or errorAt(CC)("Date/time conversion must have two characters")
+          def goodCC = "HIklMSLNpzZsQBbhAaCYyjmdeRTrDFc".contains(cc) or errorAt(CC, 1)(s"'$cc' doesn't seem to be a date or time conversion")
+          goodies && hasCC && goodCC && noPrecision && only_-("date/time conversions")
+        case LiteralXn       =>
+          op match
+            case "%" => goodies && noPrecision and width.foreach(_ => warningAt(Width)("width ignored on literal"))
+            case "n" => noFlags && noWidth && noPrecision
+        case ErrorXn         =>
+          errorAt(CC)(s"illegal conversion character '$cc'")
+          false
+    end verify
+
+    // is the specifier OK with the given arg
+    def accepts(arg: Type): Boolean =
+      kind match
+        case BooleanXn  => arg == defn.BooleanType orElse warningAt(CC)("Boolean format is null test for non-Boolean")
+        case IntegralXn =>
+          arg == BigIntType || !cond(cc) {
+            case 'o' | 'x' | 'X' if hasAnyFlag("+ (") => "+ (".filter(hasFlag).foreach(bad => badFlag(bad, s"only use '$bad' for BigInt conversions to o, x, X")) ; true
+          }
+        case _ => true
+
+    // what arg type if any does the conversion accept
+    def acceptableVariants: List[Type] =
+      kind match
+        case StringXn        => if hasFlag('#') then FormattableType :: Nil else defn.AnyType :: Nil
+        case BooleanXn       => defn.BooleanType :: defn.NullType :: Nil
+        case HashXn          => defn.AnyType :: Nil
+        case CharacterXn     => defn.CharType :: defn.ByteType :: defn.ShortType :: defn.IntType :: Nil
+        case IntegralXn      => defn.IntType :: defn.LongType :: defn.ByteType :: defn.ShortType :: BigIntType :: Nil
+        case FloatingPointXn => defn.DoubleType :: defn.FloatType :: BigDecimalType :: Nil
+        case DateTimeXn      => defn.LongType :: CalendarType :: DateType :: Nil
+        case LiteralXn       => Nil
+        case ErrorXn         => Nil
+
+    // what flags does the conversion accept?
+    private def okFlags: String =
+      kind match
+        case StringXn => "-#<"
+        case BooleanXn | HashXn => "-<"
+        case LiteralXn => "-"
+        case _ => "-#+ 0,(<"
+
+    def hasFlag(f: Char) = flags.contains(f)
+    def hasAnyFlag(fs: String) = fs.exists(hasFlag)
+
+    def badFlag(f: Char, msg: String) =
+      val i = flags.indexOf(f) match { case -1 => 0 case j => j }
+      errorAt(Flags, i)(msg)
+
+    def errorAt(g: SpecGroup, i: Int = 0)(msg: String)   = report.partError(msg, argi, descriptor.offset(g, i), descriptor.end(g))
+    def warningAt(g: SpecGroup, i: Int = 0)(msg: String) = report.partWarning(msg, argi, descriptor.offset(g, i), descriptor.end(g))
+
+  object Conversion:
+    def apply(m: Match, i: Int): Conversion =
+      def kindOf(cc: Char) = cc match
+        case 's' | 'S' => StringXn
+        case 'h' | 'H' => HashXn
+        case 'b' | 'B' => BooleanXn
+        case 'c' | 'C' => CharacterXn
+        case 'd' | 'o' |
+             'x' | 'X' => IntegralXn
+        case 'e' | 'E' |
+             'f' |
+             'g' | 'G' |
+             'a' | 'A' => FloatingPointXn
+        case 't' | 'T' => DateTimeXn
+        case '%' | 'n' => LiteralXn
+        case _         => ErrorXn
+      end kindOf
+      m.group(CC) match
+        case Some(cc) => new Conversion(m, i, kindOf(cc(0))).tap(_.verify)
+        case None     => new Conversion(m, i, ErrorXn).tap(_.errorAt(Spec)(s"Missing conversion operator in '${m.matched}'; $literalHelp"))
+    end apply
+    val literalHelp = "use %% for literal %, %n for newline"
+  end Conversion
+
+  var reported = false
+
+  private def partPosAt(index: Int, offset: Int, end: Int) =
+    val pos = partsElems(index).sourcePos
+    val bgn = pos.span.start + offset
+    val fin = if end < 0 then pos.span.end else pos.span.start + end
+    pos.withSpan(Span(bgn, fin, bgn))
+
+  extension (r: report.type)
+    def argError(message: String, index: Int): Unit = r.error(message, args(index).srcPos).tap(_ => reported = true)
+    def partError(message: String, index: Int, offset: Int, end: Int = -1): Unit = r.error(message, partPosAt(index, offset, end)).tap(_ => reported = true)
+    def partWarning(message: String, index: Int, offset: Int, end: Int = -1): Unit = r.warning(message, partPosAt(index, offset, end)).tap(_ => reported = true)
+end TypedFormatChecker
diff --git a/compiler/src/dotty/tools/dotc/transform/localopt/FormatInterpolatorTransform.scala b/compiler/src/dotty/tools/dotc/transform/localopt/FormatInterpolatorTransform.scala
new file mode 100644
index 000000000000..79d94c26c692
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/transform/localopt/FormatInterpolatorTransform.scala
@@ -0,0 +1,39 @@
+package dotty.tools.dotc
+package transform.localopt
+
+import dotty.tools.dotc.ast.tpd.*
+import dotty.tools.dotc.core.Constants.Constant
+import dotty.tools.dotc.core.Contexts.*
+
+object FormatInterpolatorTransform:
+
+  /** For f"${arg}%xpart", check format conversions and return (format, args)
+   *  suitable for String.format(format, args).
+   */
+  def checked(fun: Tree, args0: Tree)(using Context): (Tree, Tree) =
+    val (partsExpr, parts) = fun match
+      case TypeApply(Select(Apply(_, (parts: SeqLiteral) :: Nil), _), _) =>
+        (parts.elems, parts.elems.map { case Literal(Constant(s: String)) => s })
+      case _ =>
+        report.error("Expected statically known StringContext", fun.srcPos)
+        (Nil, Nil)
+    val (args, elemtpt) = args0 match
+      case seqlit: SeqLiteral => (seqlit.elems, seqlit.elemtpt)
+      case _ =>
+        report.error("Expected statically known argument list", args0.srcPos)
+        (Nil, EmptyTree)
+
+    def literally(s: String) = Literal(Constant(s))
+    if parts.lengthIs != args.length + 1 then
+      val badParts =
+        if parts.isEmpty then "there are no parts"
+        else s"too ${if parts.lengthIs > args.length + 1 then "few" else "many"} arguments for interpolated string"
+      report.error(badParts, fun.srcPos)
+      (literally(""), args0)
+    else
+      val checker = TypedFormatChecker(partsExpr, parts, args)
+      val (format, formatArgs) = checker.checked
+      if format.isEmpty then (literally(parts.mkString), args0)
+      else (literally(format.mkString), SeqLiteral(formatArgs.toList, elemtpt))
+  end checked
+end FormatInterpolatorTransform
diff --git a/compiler/src/dotty/tools/dotc/transform/localopt/StringContextChecker.scala b/compiler/src/dotty/tools/dotc/transform/localopt/StringContextChecker.scala
deleted file mode 100644
index fbd09f43b853..000000000000
--- a/compiler/src/dotty/tools/dotc/transform/localopt/StringContextChecker.scala
+++ /dev/null
@@ -1,714 +0,0 @@
-package dotty.tools.dotc
-package transform.localopt
-
-import dotty.tools.dotc.ast.Trees._
-import dotty.tools.dotc.ast.tpd
-import dotty.tools.dotc.core.Decorators._
-import dotty.tools.dotc.core.Constants.Constant
-import dotty.tools.dotc.core.Contexts._
-import dotty.tools.dotc.core.StdNames._
-import dotty.tools.dotc.core.NameKinds._
-import dotty.tools.dotc.core.Symbols._
-import dotty.tools.dotc.core.Types._
-
-// Ported from old dotty.internal.StringContextMacro
-// TODO: port Scala 2 logic? (see https://github.com/scala/scala/blob/2.13.x/src/compiler/scala/tools/reflect/FormatInterpolator.scala#L74)
-object StringContextChecker {
-  import tpd._
-
-  /** This trait defines a tool to report errors/warnings that do not depend on Position. */
-  trait InterpolationReporter {
-
-    /** Reports error/warning of size 1 linked with a part of the StringContext.
-      *
-      * @param message the message to report as error/warning
-      * @param index the index of the part inside the list of parts of the StringContext
-      * @param offset the index in the part String where the error is
-      * @return an error/warning depending on the function
-      */
-    def partError(message : String, index : Int, offset : Int) : Unit
-    def partWarning(message : String, index : Int, offset : Int) : Unit
-
-    /** Reports error linked with an argument to format.
-      *
-      * @param message the message to report as error/warning
-      * @param index the index of the argument inside the list of arguments of the format function
-      * @return an error depending on the function
-      */
-    def argError(message : String, index : Int) : Unit
-
-    /** Reports error linked with the list of arguments or the StringContext.
-      *
-      * @param message the message to report in the error
-      * @return an error
-      */
-    def strCtxError(message : String) : Unit
-    def argsError(message : String) : Unit
-
-    /** Claims whether an error or a warning has been reported
-      *
-      * @return true if an error/warning has been reported, false
-      */
-    def hasReported() : Boolean
-
-    /** Stores the old value of the reported and reset it to false */
-    def resetReported() : Unit
-
-    /** Restores the value of the reported boolean that has been reset */
-    def restoreReported() : Unit
-  }
-
-  /** Check the format of the parts of the f".." arguments and returns the string parts of the StringContext */
-  def checkedParts(strContext_f: Tree, args0: Tree)(using Context): String = {
-
-    val (partsExpr, parts) = strContext_f match {
-      case TypeApply(Select(Apply(_, (parts: SeqLiteral) :: Nil), _), _) =>
-        (parts.elems, parts.elems.map { case Literal(Constant(str: String)) => str } )
-      case _ =>
-        report.error("Expected statically known String Context", strContext_f.srcPos)
-        return ""
-    }
-
-    val args = args0 match {
-      case args: SeqLiteral => args.elems
-      case _ =>
-        report.error("Expected statically known argument list", args0.srcPos)
-        return ""
-    }
-
-    val reporter = new InterpolationReporter{
-      private[this] var reported = false
-      private[this] var oldReported = false
-      def partError(message : String, index : Int, offset : Int) : Unit = {
-        reported = true
-        val pos = partsExpr(index).sourcePos
-        val posOffset = pos.withSpan(pos.span.shift(offset))
-        report.error(message, posOffset)
-      }
-      def partWarning(message : String, index : Int, offset : Int) : Unit = {
-        reported = true
-        val pos = partsExpr(index).sourcePos
-        val posOffset = pos.withSpan(pos.span.shift(offset))
-        report.warning(message, posOffset)
-      }
-
-      def argError(message : String, index : Int) : Unit = {
-        reported = true
-        report.error(message, args(index).srcPos)
-      }
-
-      def strCtxError(message : String) : Unit = {
-        reported = true
-        report.error(message, strContext_f.srcPos)
-      }
-      def argsError(message : String) : Unit = {
-        reported = true
-        report.error(message, args0.srcPos)
-      }
-
-      def hasReported() : Boolean = {
-        reported
-      }
-
-      def resetReported() : Unit = {
-        oldReported = reported
-        reported = false
-      }
-
-      def restoreReported() : Unit = {
-        reported = oldReported
-      }
-    }
-
-    checked(parts, args, reporter)
-  }
-
-  def checked(parts0: List[String], args: List[Tree], reporter: InterpolationReporter)(using Context): String = {
-
-
-    /** Checks if the number of arguments are the same as the number of formatting strings
-     *
-     *  @param format the number of formatting parts in the StringContext
-     *  @param argument the number of arguments to interpolate in the string
-     *  @return reports an error if the number of arguments does not match with the number of formatting strings,
-     *  nothing otherwise
-     */
-    def checkSizes(format : Int, argument : Int) : Unit = {
-      if (format > argument && !(format == -1 && argument == 0))
-        if (argument == 0)
-          reporter.argsError("too few arguments for interpolated string")
-        else
-          reporter.argError("too few arguments for interpolated string", argument - 1)
-      if (format < argument && !(format == -1 && argument == 0))
-        if (argument == 0)
-          reporter.argsError("too many arguments for interpolated string")
-        else
-          reporter.argError("too many arguments for interpolated string", format)
-      if (format == -1)
-        reporter.strCtxError("there are no parts")
-    }
-
-    /** Adds the default "%s" to the Strings that do not have any given format
-     *
-     *  @param parts the list of parts contained in the StringContext
-     *  @return a new list of string with all a defined formatting or reports an error if the '%' and
-     *  formatting parameter are too far away from the argument that they refer to
-     *  For example : f2"${d}random-leading-junk%d" will lead to an error
-     */
-    def addDefaultFormat(parts : List[String]) : List[String] = parts match {
-      case Nil => Nil
-      case p :: parts1 => p :: parts1.map((part : String) => {
-        if (!part.startsWith("%")) {
-          val index = part.indexOf('%')
-          if (!reporter.hasReported() && index != -1) {
-            reporter.partError("conversions must follow a splice; use %% for literal %, %n for newline", parts.indexOf(part), index)
-            "%s" + part
-          } else "%s" + part
-        } else part
-      })
-    }
-
-    /** Checks whether a part contains a formatting substring
-     *
-     *  @param part the part to check
-     *  @param l the length of the given part
-     *  @param index the index where to start to look for a potential new formatting string
-     *  @return an Option containing the index in the part where a new formatting String starts, None otherwise
-     */
-    def getFormattingSubstring(part : String, l : Int, index : Int) : Option[Int] = {
-      var i = index
-      var result : Option[Int] = None
-      while (i < l){
-        if (part.charAt(i) == '%' && result.isEmpty)
-          result = Some(i)
-        i += 1
-      }
-      result
-    }
-
-    /** Finds all the flags that are inside a formatting String from a given index
-     *
-     *  @param i the index in the String s where to start to check
-     *  @param l the length of s
-     *  @param s the String to check
-     *  @return a list containing all the flags that are inside the formatting String,
-     *  and their index in the String
-     */
-    def getFlags(i : Int, l : Int, s : String) : List[(Char, Int)] = {
-      def isFlag(c : Char) : Boolean = c match {
-        case '-' | '#' | '+' | ' ' | '0' | ',' | '(' => true
-        case _ => false
-      }
-      if (i < l && isFlag(s.charAt(i))) (s.charAt(i), i) :: getFlags(i + 1, l, s)
-      else Nil
-    }
-
-    /** Skips the Characters that are width or argumentIndex parameters
-     *
-     *  @param i the index where to start checking in the given String
-     *  @param s the String to check
-     *  @param l the length of s
-     *  @return a tuple containing the index in the String after skipping
-     *  the parameters, true if it has a width parameter and its value, false otherwise
-     */
-    def skipWidth(i : Int, s : String, l : Int) = {
-      var j = i
-      var width = (false, 0)
-      while (j < l && Character.isDigit(s.charAt(j))){
-        width = (true, j)
-        j += 1
-      }
-      (j, width._1, width._2)
-    }
-
-    /** Retrieves all the formatting parameters from a part and their index in it
-     *
-     *  @param part the String containing the formatting parameters
-     *  @param argIndex the index of the current argument inside the list of arguments to interpolate
-     *  @param partIndex the index of the current part inside the list of parts in the StringContext
-     *  @param noArg true if there is no arg, i.e. "%%" or "%n"
-     *  @param pos the initial index where to start checking the part
-     *  @return reports an error if any of the size of the arguments and the parts do not match or if a conversion
-     *  parameter is missing. Otherwise,
-     *  the index where the format specifier substring is,
-     *  hasArgumentIndex (true and the index of its corresponding argumentIndex if there is an argument index, false and 0 otherwise) and
-     *  flags that contains the list of flags (empty if there is none),
-     *  hasWidth (true and the index of the width parameter if there is a width, false and 0 otherwise),
-     *  hasPrecision (true and the index of the precision if there is a precision, false and 0 otherwise),
-     *  hasRelative (true if the specifiers use relative indexing, false otherwise) and
-     *  conversion character index
-     */
-    def getFormatSpecifiers(part : String, argIndex : Int, partIndex : Int, noArg : Boolean, pos : Int) : (Boolean, Int, List[(Char, Int)], Boolean, Int, Boolean, Int, Boolean, Int, Int) = {
-      var conversion = pos
-      var hasArgumentIndex = false
-      var argumentIndex = pos
-      var hasPrecision = false
-      var precision = pos
-      val l = part.length
-
-      if (l >= 1 && part.charAt(conversion) == '%')
-        conversion += 1
-      else if (!noArg)
-        reporter.argError("too many arguments for interpolated string", argIndex)
-
-      //argument index or width
-      val (i, hasWidth1, width1) = skipWidth(conversion, part, l)
-      conversion = i
-
-      //argument index
-      if (conversion < l && part.charAt(conversion) == '$'){
-        if (hasWidth1){
-          hasArgumentIndex = true
-          argumentIndex = width1
-          conversion += 1
-        } else {
-          reporter.partError("Missing conversion operator in '" + part.substring(0, conversion) + "'; use %% for literal %, %n for newline", partIndex, 0)
-        }
-      }
-
-      //relative indexing
-      val hasRelative = conversion < l && part.charAt(conversion) == '<'
-      val relativeIndex = conversion
-      if (hasRelative)
-        conversion += 1
-
-      //flags
-      val flags = getFlags(conversion, l, part)
-      conversion += flags.size
-
-      //width
-      val (j, hasWidth2, width2) = skipWidth(conversion, part, l)
-      conversion = j
-
-      //precision
-      if (conversion < l && part.charAt(conversion) == '.') {
-        precision = conversion
-        conversion += 1
-        hasPrecision = true
-        val oldConversion = conversion
-        while (conversion < l && Character.isDigit(part.charAt(conversion))) {
-          conversion += 1
-        }
-        if (oldConversion == conversion) {
-          reporter.partError("Missing conversion operator in '" + part.substring(pos, oldConversion - 1) + "'; use %% for literal %, %n for newline", partIndex, pos)
-          hasPrecision = false
-        }
-      }
-
-      //conversion
-      if((conversion >= l || (!part.charAt(conversion).isLetter && part.charAt(conversion) != '%')) && !reporter.hasReported())
-        reporter.partError("Missing conversion operator in '" + part.substring(pos, conversion) + "'; use %% for literal %, %n for newline", partIndex, pos)
-
-      val hasWidth = (hasWidth1 && !hasArgumentIndex) || hasWidth2
-      val width = if (hasWidth1 && !hasArgumentIndex) width1 else width2
-      (hasArgumentIndex, argumentIndex, flags, hasWidth, width, hasPrecision, precision, hasRelative, relativeIndex, conversion)
-    }
-
-    /** Checks if a given type is a subtype of any of the possibilities
-     *
-     *  @param actualType the given type
-     *  @param expectedType the type we are expecting
-     *  @param argIndex the index of the argument that should type check
-     *  @param possibilities all the types within which we want to find a super type of the actualType
-     *  @return reports a type mismatch error if the actual type is not a subtype of any of the possibilities,
-     *  nothing otherwise
-     */
-    def checkSubtype(actualType: Type, expectedType: String, argIndex: Int, possibilities: List[Type]) = {
-      if !possibilities.exists(actualType <:< _) then
-        reporter.argError("type mismatch;\n found   : " + actualType.widen.show.stripPrefix("scala.Predef.").stripPrefix("java.lang.").stripPrefix("scala.") + "\n required: " + expectedType, argIndex)
-    }
-
-    /** Checks whether a given argument index, relative or not, is in the correct bounds
-     *
-     *  @param partIndex the index of the part we are checking
-     *  @param offset the index in the part where there might be an error
-     *  @param relative true if relative indexing is used, false otherwise
-     *  @param argumentIndex the argument index parameter in the formatting String
-     *  @param expected true if we have an expectedArgumentIndex, false otherwise
-     *  @param expectedArgumentIndex the expected argument index parameter
-     *  @param maxArgumentIndex the maximum argument index parameter that can be used
-     *  @return reports a warning if relative indexing is used but an argument is still given,
-     *  an error is the argument index is not in the bounds [1, number of arguments]
-     */
-    def checkArgumentIndex(partIndex : Int, offset : Int, relative : Boolean, argumentIndex : Int, expected : Boolean, expectedArgumentIndex : Int, maxArgumentIndex : Int) = {
-      if (relative)
-        reporter.partWarning("Argument index ignored if '<' flag is present", partIndex, offset)
-
-      if (argumentIndex > maxArgumentIndex || argumentIndex <= 0)
-        reporter.partError("Argument index out of range", partIndex, offset)
-
-      if (expected && expectedArgumentIndex != argumentIndex && !reporter.hasReported())
-        reporter.partWarning("Index is not this arg", partIndex, offset)
-    }
-
-    /** Checks if a parameter is specified whereas it is not allowed
-     *
-     *  @param hasParameter true if parameter is specified, false otherwise
-     *  @param partIndex the index of the part inside the parts
-     *  @param offset the index in the part where to report an error
-     *  @param parameter the parameter that is not allowed
-     *  @return reports an error if hasParameter is true, nothing otherwise
-     */
-    def checkNotAllowedParameter(hasParameter : Boolean, partIndex : Int, offset : Int, parameter : String) = {
-      if (hasParameter)
-        reporter.partError(parameter + " not allowed", partIndex, offset)
-    }
-
-    /** Checks if the flags are allowed for the conversion
-     *
-     *  @param partIndex the index of the part in the String Context
-     *  @param flags the specified flags to check
-     *  @param notAllowedFlagsOnCondition a list that maps which flags are allowed depending on the conversion Char
-     *  @return reports an error if the flag is not allowed, nothing otherwise
-     */
-    def checkFlags(partIndex : Int, flags : List[(Char, Int)], notAllowedFlagOnCondition: List[(Char, Boolean, String)]) = {
-      for {flag <- flags ; (nonAllowedFlag, condition, message) <- notAllowedFlagOnCondition ; if (flag._1 == nonAllowedFlag && condition)}
-        reporter.partError(message, partIndex, flag._2)
-    }
-
-    /** Checks if the flags are allowed for the conversion
-     *
-     *  @param partIndex the index of the part in the String Context
-     *  @param flags the specified flags to check
-     *  @param notAllowedFlagsOnCondition a list that maps which flags are allowed depending on the conversion Char
-     *  @return reports an error only once if at least one of the flags is not allowed, nothing otherwise
-     */
-    def checkUniqueFlags(partIndex : Int, flags : List[(Char, Int)], notAllowedFlagOnCondition : List[(Char, Boolean, String)]) = {
-      reporter.resetReported()
-      for {flag <- flags ; (nonAllowedFlag, condition, message) <- notAllowedFlagOnCondition ; if (flag._1 == nonAllowedFlag && condition)} {
-        if (!reporter.hasReported())
-          reporter.partError(message, partIndex, flag._2)
-      }
-      if (!reporter.hasReported())
-        reporter.restoreReported()
-    }
-
-    /** Checks all the formatting parameters for a Character conversion
-     *
-     *  @param partIndex the index of the part, that we are checking, inside the parts
-     *  @param flags the flags parameters inside the formatting part
-     *  @param hasPrecision true if precision parameter is specified, false otherwise
-     *  @param precision the index of the precision parameter inside the part
-     *  @return reports an error
-     *  if precision is specified or if the used flags are different from '-'
-     */
-    def checkCharacterConversion(partIndex : Int, flags : List[(Char, Int)], hasPrecision : Boolean, precisionIndex : Int) = {
-      val notAllowedFlagOnCondition = for (flag <- List('#', '+', ' ', '0', ',', '(')) yield (flag, true, "Only '-' allowed for c conversion")
-      checkUniqueFlags(partIndex, flags, notAllowedFlagOnCondition)
-      checkNotAllowedParameter(hasPrecision, partIndex, precisionIndex, "precision")
-    }
-
-    /** Checks all the formatting parameters for an Integral conversion
-     *
-     *  @param partIndex the index of the part, that we are checking, inside the parts
-     *  @param argType the type of the argument matching with the given part
-     *  @param conversionChar the Char used for the formatting conversion
-     *  @param flags the flags parameters inside the formatting part
-     *  @param hasPrecision true if precision parameter is specified, false otherwise
-     *  @param precision the index of the precision parameter inside the part
-     *  @return reports an error
-     *  if precision is specified or if the used flags are not allowed :
-     *  ’d’: only ’#’ is allowed,
-     *  ’o’, ’x’, ’X’: ’-’, ’#’, ’0’ are always allowed, depending on the type, this will be checked in the type check step
-     */
-    def checkIntegralConversion(partIndex : Int, argType : Option[Type], conversionChar : Char, flags : List[(Char, Int)], hasPrecision : Boolean, precision : Int) = {
-      if (conversionChar == 'd')
-        checkFlags(partIndex, flags, List(('#', true,  "# not allowed for d conversion")))
-
-      checkNotAllowedParameter(hasPrecision, partIndex, precision, "precision")
-    }
-
-    /** Checks all the formatting parameters for a Floating Point conversion
-     *
-     *  @param partIndex the index of the part, that we are checking, inside the parts
-     *  @param conversionChar the Char used for the formatting conversion
-     *  @param flags the flags parameters inside the formatting part
-     *  @param hasPrecision true if precision parameter is specified, false otherwise
-     *  @param precision the index of the precision parameter inside the part
-     *  @return reports an error
-     *  if precision is specified for 'a', 'A' conversion or if the used flags are '(' and ',' for 'a', 'A'
-     */
-    def checkFloatingPointConversion(partIndex: Int, conversionChar : Char, flags : List[(Char, Int)], hasPrecision : Boolean, precision : Int) = {
-      if(conversionChar == 'a' || conversionChar == 'A'){
-        for {flag <- flags ; if (flag._1 == ',' || flag._1 == '(')}
-          reporter.partError("'" + flag._1 + "' not allowed for a, A", partIndex, flag._2)
-        checkNotAllowedParameter(hasPrecision, partIndex, precision, "precision")
-      }
-    }
-
-    /** Checks all the formatting parameters for a Time conversion
-     *
-     *  @param partIndex the index of the part, that we are checking, inside the parts
-     *  @param part the part that we are checking
-     *  @param conversionIndex the index of the conversion Char used in the part
-     *  @param flags the flags parameters inside the formatting part
-     *  @param hasPrecision true if precision parameter is specified, false otherwise
-     *  @param precision the index of the precision parameter inside the part
-     *  @return reports an error
-     *  if precision is specified, if the time suffix is not given/incorrect or if the used flags are
-     *  different from '-'
-     */
-    def checkTimeConversion(partIndex : Int, part : String, conversionIndex : Int, flags : List[(Char, Int)], hasPrecision : Boolean, precision : Int) = {
-      /** Checks whether a time suffix is given and whether it is allowed
-       *
-       *  @param part the part that we are checking
-       *  @param partIndex the index of the part inside of the parts of the StringContext
-       *  @param conversionIndex the index of the conversion Char inside the part
-       *  @param return reports an error if no suffix is specified or if the given suffix is not
-       *  part of the allowed ones
-       */
-      def checkTime(part : String, partIndex : Int, conversionIndex : Int) : Unit = {
-        if (conversionIndex + 1 >= part.size)
-          reporter.partError("Date/time conversion must have two characters", partIndex, conversionIndex)
-        else {
-          part.charAt(conversionIndex + 1) match {
-            case 'H' | 'I' | 'k' | 'l' | 'M' | 'S' | 'L' | 'N' | 'p' | 'z' | 'Z' | 's' | 'Q' => //times
-            case 'B' | 'b' | 'h' | 'A' | 'a' | 'C' | 'Y' | 'y' | 'j' | 'm' | 'd' | 'e' => //dates
-            case 'R' | 'T' | 'r' | 'D' | 'F' | 'c' => //dates and times
-            case c => reporter.partError("'" + c + "' doesn't seem to be a date or time conversion", partIndex, conversionIndex + 1)
-          }
-        }
-      }
-
-      val notAllowedFlagOnCondition = for (flag <- List('#', '+', ' ', '0', ',', '(')) yield (flag, true, "Only '-' allowed for date/time conversions")
-      checkUniqueFlags(partIndex, flags, notAllowedFlagOnCondition)
-      checkNotAllowedParameter(hasPrecision, partIndex, precision, "precision")
-      checkTime(part, partIndex, conversionIndex)
-    }
-
-    /** Checks all the formatting parameters for a General conversion
-     *
-     *  @param partIndex the index of the part, that we are checking, inside the parts
-     *  @param argType the type of the argument matching with the given part
-     *  @param conversionChar the Char used for the formatting conversion
-     *  @param flags the flags parameters inside the formatting part
-     *  @return reports an error
-     *  if '#' flag is used or if any other flag is used
-     */
-    def checkGeneralConversion(partIndex : Int, argType : Option[Type], conversionChar : Char, flags : List[(Char, Int)]) = {
-      for {flag <- flags ; if (flag._1 != '-' && flag._1 != '#')}
-        reporter.partError("Illegal flag '" + flag._1 + "'", partIndex, flag._2)
-    }
-
-    /** Checks all the formatting parameters for a special Char such as '%' and end of line
-     *
-     *  @param partIndex the index of the part, that we are checking, inside the parts
-     *  @param conversionChar the Char used for the formatting conversion
-     *  @param hasPrecision true if precision parameter is specified, false otherwise
-     *  @param precision the index of the precision parameter inside the part
-     *  @param hasWidth true if width parameter is specified, false otherwise
-     *  @param width the index of the width parameter inside the part
-     *  @return reports an error if precision or width is specified for '%' or
-     *  if precision is specified for end of line
-     */
-    def checkSpecials(partIndex : Int, conversionChar : Char, hasPrecision : Boolean, precision : Int, hasWidth : Boolean, width : Int, flags : List[(Char, Int)]) = conversionChar match {
-      case 'n' => {
-        checkNotAllowedParameter(hasPrecision, partIndex, precision, "precision")
-        checkNotAllowedParameter(hasWidth, partIndex, width, "width")
-        val notAllowedFlagOnCondition = for (flag <- List('-', '#', '+', ' ', '0', ',', '(')) yield (flag, true, "flags not allowed")
-        checkUniqueFlags(partIndex, flags, notAllowedFlagOnCondition)
-      }
-      case '%' => {
-        checkNotAllowedParameter(hasPrecision, partIndex, precision, "precision")
-        val notAllowedFlagOnCondition = for (flag <- List('#', '+', ' ', '0', ',', '(')) yield (flag, true, "Illegal flag '" + flag + "'")
-        checkFlags(partIndex, flags, notAllowedFlagOnCondition)
-      }
-      case _ => // OK
-    }
-
-    /** Checks whether the format specifiers are correct depending on the conversion parameter
-     *
-     *  @param partIndex the index of the part, that we are checking, inside the parts
-     *  @param part the part to check
-     *  The rest of the inputs correspond to the output of the function getFormatSpecifiers
-     *  @param hasArgumentIndex
-     *  @param actualArgumentIndex
-     *  @param expectedArgumentIndex
-     *  @param firstFormattingSubstring true if it is the first in the list, i.e. not an indexed argument
-     *  @param maxArgumentIndex
-     *  @param hasRelative
-     *  @param hasWidth
-     *  @param hasPrecision
-     *  @param precision
-     *  @param flags
-     *  @param conversion
-     *  @param argType
-     *  @return the argument index and its type if there is an argument, the flags and the conversion parameter
-     *  reports an error/warning if the formatting parameters are not allowed/wrong, nothing otherwise
-     */
-    def checkFormatSpecifiers(partIndex : Int, hasArgumentIndex : Boolean, actualArgumentIndex : Int, expectedArgumentIndex : Option[Int], firstFormattingSubstring : Boolean, maxArgumentIndex : Option[Int],
-      hasRelative : Boolean, hasWidth : Boolean, width : Int, hasPrecision : Boolean, precision : Int, flags : List[(Char, Int)], conversion : Int, argType : Option[Type], part : String) : (Option[(Type, Int)], Char, List[(Char, Int)])= {
-      val conversionChar = part.charAt(conversion)
-
-      if (hasArgumentIndex && expectedArgumentIndex.nonEmpty && maxArgumentIndex.nonEmpty && firstFormattingSubstring)
-        checkArgumentIndex(partIndex, actualArgumentIndex, hasRelative, part.charAt(actualArgumentIndex).asDigit, true, expectedArgumentIndex.get, maxArgumentIndex.get)
-      else if(hasArgumentIndex && maxArgumentIndex.nonEmpty && !firstFormattingSubstring)
-        checkArgumentIndex(partIndex, actualArgumentIndex, hasRelative, part.charAt(actualArgumentIndex).asDigit, false, 0, maxArgumentIndex.get)
-
-      conversionChar match {
-        case 'c' | 'C' => checkCharacterConversion(partIndex, flags, hasPrecision, precision)
-        case 'd' | 'o' | 'x' | 'X' => checkIntegralConversion(partIndex, argType, conversionChar, flags, hasPrecision, precision)
-        case 'e' | 'E' |'f' | 'g' | 'G' | 'a' | 'A' => checkFloatingPointConversion(partIndex, conversionChar, flags, hasPrecision, precision)
-        case 't' | 'T' => checkTimeConversion(partIndex, part, conversion, flags, hasPrecision, precision)
-        case 'b' | 'B' | 'h' | 'H' | 'S' | 's' => checkGeneralConversion(partIndex, argType, conversionChar, flags)
-        case 'n' | '%' => checkSpecials(partIndex, conversionChar, hasPrecision, precision, hasWidth, width, flags)
-        case illegal => reporter.partError("illegal conversion character '" + illegal + "'", partIndex, conversion)
-      }
-
-      (if (argType.isEmpty) None else Some(argType.get, (partIndex - 1)), conversionChar, flags)
-    }
-
-    /** Checks whether the argument type, if there is one, type checks with the formatting parameters
-     *
-     *  @param partIndex the index of the part, that we are checking, inside the parts
-     *  @param conversionChar the character used for the conversion
-     *  @param argument an option containing the type and index of the argument, None if there is no argument
-     *  @param flags the flags used for the formatting
-     *  @param formattingStart the index in the part where the formatting substring starts, i.e. where the '%' is
-     *  @return reports an error/warning if the formatting parameters are not allowed/wrong depending on the type, nothing otherwise
-     */
-    def checkArgTypeWithConversion(partIndex : Int, conversionChar : Char, argument : Option[(Type, Int)], flags : List[(Char, Int)], formattingStart : Int) = {
-      if (argument.nonEmpty)
-        checkTypeWithArgs(argument.get, conversionChar, partIndex, flags)
-      else
-        checkTypeWithoutArgs(conversionChar, partIndex, flags, formattingStart)
-    }
-
-    /** Checks whether the argument type checks with the formatting parameters
-     *
-     *  @param argument the given argument to check
-     *  @param conversionChar the conversion parameter inside the formatting String
-     *  @param partIndex index of the part inside the String Context
-     *  @param flags the list of flags, and their index, used inside the formatting String
-     *  @return reports an error if the argument type does not correspond with the conversion character,
-     *  nothing otherwise
-     */
-    def checkTypeWithArgs(argument : (Type, Int), conversionChar : Char, partIndex : Int, flags : List[(Char, Int)]) = {
-      def booleans = List(defn.BooleanType, defn.NullType)
-      def dates = List(defn.LongType, defn.JavaCalendarClass.typeRef, defn.JavaDateClass.typeRef)
-      def floatingPoints = List(defn.DoubleType, defn.FloatType, defn.JavaBigDecimalClass.typeRef)
-      def integral = List(defn.IntType, defn.LongType, defn.ShortType, defn.ByteType, defn.JavaBigIntegerClass.typeRef)
-      def character = List(defn.CharType, defn.ByteType, defn.ShortType, defn.IntType)
-
-      val (argType, argIndex) = argument
-      conversionChar match {
-        case 'c' | 'C' => checkSubtype(argType, "Char", argIndex, character)
-        case 'd' | 'o' | 'x' | 'X' => {
-          checkSubtype(argType, "Int", argIndex, integral)
-          if (conversionChar != 'd') {
-            val notAllowedFlagOnCondition = List(('+', !(argType <:< defn.JavaBigIntegerClass.typeRef), "only use '+' for BigInt conversions to o, x, X"),
-            (' ', !(argType <:< defn.JavaBigIntegerClass.typeRef), "only use ' ' for BigInt conversions to o, x, X"),
-            ('(', !(argType <:< defn.JavaBigIntegerClass.typeRef), "only use '(' for BigInt conversions to o, x, X"),
-            (',', true, "',' only allowed for d conversion of integral types"))
-            checkFlags(partIndex, flags, notAllowedFlagOnCondition)
-          }
-        }
-        case 'e' | 'E' |'f' | 'g' | 'G' | 'a' | 'A' => checkSubtype(argType, "Double", argIndex, floatingPoints)
-        case 't' | 'T' => checkSubtype(argType, "Date", argIndex, dates)
-        case 'b' | 'B' => checkSubtype(argType, "Boolean", argIndex, booleans)
-        case 'h' | 'H' | 'S' | 's' =>
-          if !(argType <:< defn.JavaFormattableClass.typeRef) then
-            for flag <- flags; if flag._1 == '#' do
-              reporter.argError("type mismatch;\n found   : " + argType.widen.show.stripPrefix("scala.Predef.").stripPrefix("java.lang.").stripPrefix("scala.") + "\n required: java.util.Formattable", argIndex)
-        case 'n' | '%' =>
-        case illegal =>
-      }
-    }
-
-    /** Reports error when the formatting parameter require a specific type but no argument is given
-     *
-     *  @param conversionChar the conversion parameter inside the formatting String
-     *  @param partIndex index of the part inside the String Context
-     *  @param flags the list of flags, and their index, used inside the formatting String
-     *  @param formattingStart the index in the part where the formatting substring starts, i.e. where the '%' is
-     *  @return reports an error if the formatting parameter refer to the type of the parameter but no parameter is given
-     *  nothing otherwise
-     */
-    def checkTypeWithoutArgs(conversionChar : Char, partIndex : Int, flags : List[(Char, Int)], formattingStart : Int) = {
-      conversionChar match {
-          case 'o' | 'x' | 'X' => {
-            val notAllowedFlagOnCondition = List(('+', true, "only use '+' for BigInt conversions to o, x, X"),
-            (' ', true, "only use ' ' for BigInt conversions to o, x, X"),
-            ('(', true, "only use '(' for BigInt conversions to o, x, X"),
-            (',', true, "',' only allowed for d conversion of integral types"))
-            checkFlags(partIndex, flags, notAllowedFlagOnCondition)
-          }
-          case _ => //OK
-        }
-    }
-
-    /** Checks that a given part of the String Context respects every formatting constraint per parameter
-     *
-     *  @param part a particular part of the String Context
-     *  @param start the index from which we start checking the part
-     *  @param argument an Option containing the argument corresponding to the part and its index in the list of args,
-     *  None if no args are specified.
-     *  @param maxArgumentIndex an Option containing the maximum argument index possible, None if no args are specified
-     *  @return a list with all the elements of the conversion per formatting string
-     */
-    def checkPart(part : String, start : Int, argument : Option[(Int, Tree)], maxArgumentIndex : Option[Int]) : List[(Option[(Type, Int)], Char, List[(Char, Int)])] = {
-      reporter.resetReported()
-      val hasFormattingSubstring = getFormattingSubstring(part, part.size, start)
-      if (hasFormattingSubstring.nonEmpty) {
-        val formattingStart = hasFormattingSubstring.get
-        var nextStart = formattingStart
-
-        argument match {
-          case Some(argIndex, arg) => {
-            val (hasArgumentIndex, argumentIndex, flags, hasWidth, width, hasPrecision, precision, hasRelative, relativeIndex, conversion) = getFormatSpecifiers(part, argIndex, argIndex + 1, false, formattingStart)
-            if (!reporter.hasReported()){
-              val conversionWithType = checkFormatSpecifiers(argIndex + 1, hasArgumentIndex, argumentIndex, Some(argIndex + 1), start == 0, maxArgumentIndex, hasRelative, hasWidth, width, hasPrecision, precision, flags, conversion, Some(arg.tpe), part)
-              nextStart = conversion + 1
-              conversionWithType :: checkPart(part, nextStart, argument, maxArgumentIndex)
-            } else checkPart(part, conversion + 1, argument, maxArgumentIndex)
-          }
-          case None => {
-            val (hasArgumentIndex, argumentIndex, flags, hasWidth, width, hasPrecision, precision, hasRelative, relativeIndex, conversion) = getFormatSpecifiers(part, 0, 0, true, formattingStart)
-            if (hasArgumentIndex && !(part.charAt(argumentIndex).asDigit == 1 && (part.charAt(conversion) == 'n' || part.charAt(conversion) == '%')))
-              reporter.partError("Argument index out of range", 0, argumentIndex)
-            if (hasRelative)
-              reporter.partError("No last arg", 0, relativeIndex)
-            if (!reporter.hasReported()){
-              val conversionWithType = checkFormatSpecifiers(0, hasArgumentIndex, argumentIndex, None, start == 0, maxArgumentIndex, hasRelative, hasWidth, width, hasPrecision, precision, flags, conversion, None, part)
-              nextStart = conversion + 1
-              if (!reporter.hasReported() && part.charAt(conversion) != '%' && part.charAt(conversion) != 'n' && !hasArgumentIndex && !hasRelative)
-                reporter.partError("conversions must follow a splice; use %% for literal %, %n for newline", 0, part.indexOf('%'))
-              conversionWithType :: checkPart(part, nextStart, argument, maxArgumentIndex)
-            } else checkPart(part, conversion + 1, argument, maxArgumentIndex)
-          }
-        }
-      } else {
-        reporter.restoreReported()
-        Nil
-      }
-    }
-
-    val argument = args.size
-
-    // check validity of formatting
-    checkSizes(parts0.size - 1, argument)
-
-    // add default format
-    val parts = addDefaultFormat(parts0)
-
-    if (!parts.isEmpty && !reporter.hasReported()) {
-      if (parts.size == 1 && args.size == 0 && parts.head.size != 0){
-        val argTypeWithConversion = checkPart(parts.head, 0, None, None)
-        if (!reporter.hasReported())
-          for ((argument, conversionChar, flags) <- argTypeWithConversion)
-            checkArgTypeWithConversion(0, conversionChar, argument, flags, parts.head.indexOf('%'))
-      } else {
-        val partWithArgs = parts.tail.zip(args)
-        for (i <- (0 until args.size)){
-          val (part, arg) = partWithArgs(i)
-          val argTypeWithConversion = checkPart(part, 0, Some((i, arg)), Some(args.size))
-          if (!reporter.hasReported())
-            for ((argument, conversionChar, flags) <- argTypeWithConversion)
-              checkArgTypeWithConversion(i + 1, conversionChar, argument, flags, parts(i).indexOf('%'))
-        }
-      }
-    }
-
-    parts.mkString
-  }
-}
diff --git a/compiler/src/dotty/tools/dotc/transform/localopt/StringInterpolatorOpt.scala b/compiler/src/dotty/tools/dotc/transform/localopt/StringInterpolatorOpt.scala
index 8f4aa6af8783..b8e6300f4e04 100644
--- a/compiler/src/dotty/tools/dotc/transform/localopt/StringInterpolatorOpt.scala
+++ b/compiler/src/dotty/tools/dotc/transform/localopt/StringInterpolatorOpt.scala
@@ -13,155 +13,139 @@ import dotty.tools.dotc.core.Types._
 import dotty.tools.dotc.transform.MegaPhase.MiniPhase
 import dotty.tools.dotc.typer.ConstFold
 
-/**
-  * MiniPhase to transform s and raw string interpolators from using StringContext to string
-  * concatenation. Since string concatenation uses the Java String builder, we get a performance
-  * improvement in terms of these two interpolators.
-  *
-  * More info here:
-  * https://medium.com/@dkomanov/scala-string-interpolation-performance-21dc85e83afd
-  */
-class StringInterpolatorOpt extends MiniPhase {
-  import tpd._
+/** MiniPhase to transform s and raw string interpolators from using StringContext to string
+ *  concatenation. Since string concatenation uses the Java String builder, we get a performance
+ *  improvement in terms of these two interpolators.
+ *
+ *  More info here:
+ *  https://medium.com/@dkomanov/scala-string-interpolation-performance-21dc85e83afd
+ */
+class StringInterpolatorOpt extends MiniPhase:
+  import tpd.*
 
-  override def phaseName: String = "stringInterpolatorOpt"
+  override def phaseName: String = StringInterpolatorOpt.name
 
-  override def checkPostCondition(tree: tpd.Tree)(using Context): Unit = {
-    tree match {
+  override def description: String = StringInterpolatorOpt.description
+
+  override def checkPostCondition(tree: tpd.Tree)(using Context): Unit =
+    tree match
       case tree: RefTree =>
         val sym = tree.symbol
-        assert(sym != defn.StringContext_raw && sym != defn.StringContext_s,
+        assert(sym != defn.StringContext_raw && sym != defn.StringContext_s && sym != defn.StringContext_f,
           i"$tree in ${ctx.owner.showLocated} should have been rewritten by phase $phaseName")
       case _ =>
-    }
-  }
 
   /** Matches a list of constant literals */
-  private object Literals {
-    def unapply(tree: SeqLiteral)(using Context): Option[List[Literal]] = {
-      tree.elems match {
-        case literals if literals.forall(_.isInstanceOf[Literal]) =>
-          Some(literals.map(_.asInstanceOf[Literal]))
+  private object Literals:
+    def unapply(tree: SeqLiteral)(using Context): Option[List[Literal]] =
+      tree.elems match
+        case literals if literals.forall(_.isInstanceOf[Literal]) => Some(literals.map(_.asInstanceOf[Literal]))
         case _ => None
-      }
-    }
-  }
 
-  private object StringContextApply {
-    def unapply(tree: Select)(using Context): Boolean = {
-      tree.symbol.eq(defn.StringContextModule_apply) &&
-      tree.qualifier.symbol.eq(defn.StringContextModule)
-    }
-  }
+  private object StringContextApply:
+    def unapply(tree: Select)(using Context): Boolean =
+      (tree.symbol eq defn.StringContextModule_apply) && (tree.qualifier.symbol eq defn.StringContextModule)
 
   /** Matches an s or raw string interpolator */
-  private object SOrRawInterpolator {
-    def unapply(tree: Tree)(using Context): Option[(List[Literal], List[Tree])] = {
-      tree match {
-        case Apply(Select(Apply(StringContextApply(), List(Literals(strs))), _),
-        List(SeqLiteral(elems, _))) if elems.length == strs.length - 1 =>
-          Some(strs, elems)
+  private object SOrRawInterpolator:
+    def unapply(tree: Tree)(using Context): Option[(List[Literal], List[Tree])] =
+      tree match
+        case Apply(Select(Apply(StringContextApply(), List(Literals(strs))), _), List(SeqLiteral(elems, _)))
+        if elems.length == strs.length - 1 => Some(strs, elems)
         case _ => None
-      }
-    }
-  }
 
   //Extract the position from InvalidUnicodeEscapeException
   //which due to bincompat reasons is unaccessible.
   //TODO: remove once there is less restrictive bincompat
-  private object InvalidEscapePosition {
-    def unapply(t: Throwable): Option[Int] = t match {
+  private object InvalidEscapePosition:
+    def unapply(t: Throwable): Option[Int] = t match
       case iee: StringContext.InvalidEscapeException => Some(iee.index)
-      case il: IllegalArgumentException => il.getMessage() match {
-          case s"""invalid unicode escape at index $index of $_""" => index.toIntOption
-          case _ => None
-      }
+      case iae: IllegalArgumentException => iae.getMessage() match
+        case s"""invalid unicode escape at index $index of $_""" => index.toIntOption
+        case _ => None
       case _ => None
-    }
-  }
 
-  /**
-    * Match trees that resemble s and raw string interpolations. In the case of the s
-    * interpolator, escapes the string constants. Exposes the string constants as well as
-    * the variable references.
-    */
-  private object StringContextIntrinsic {
-    def unapply(tree: Apply)(using Context): Option[(List[Literal], List[Tree])] = {
-      tree match {
+  /** Match trees that resemble s and raw string interpolations. In the case of the s
+   *  interpolator, escapes the string constants. Exposes the string constants as well as
+   *  the variable references.
+   */
+  private object StringContextIntrinsic:
+    def unapply(tree: Apply)(using Context): Option[(List[Literal], List[Tree])] =
+      tree match
         case SOrRawInterpolator(strs, elems) =>
-          if (tree.symbol == defn.StringContext_raw) Some(strs, elems)
-          else { // tree.symbol == defn.StringContextS
+          if tree.symbol == defn.StringContext_raw then Some(strs, elems)
+          else // tree.symbol == defn.StringContextS
             import dotty.tools.dotc.util.SourcePosition
             var stringPosition: SourcePosition = null
-            try {
-              val escapedStrs = strs.map(str => {
+            try
+              val escapedStrs = strs.map { str =>
                 stringPosition = str.sourcePos
                 val escaped = StringContext.processEscapes(str.const.stringValue)
                 cpy.Literal(str)(Constant(escaped))
-              })
+              }
               Some(escapedStrs, elems)
-            } catch {
-              case t @ InvalidEscapePosition(p) => {
+            catch
+              case t @ InvalidEscapePosition(p) =>
                 val errorSpan = stringPosition.span.startPos.shift(p)
                 val errorPosition = stringPosition.withSpan(errorSpan)
                 report.error(t.getMessage() + "\n", errorPosition)
                 None
-              }
-            }
-          }
         case _ => None
-      }
-    }
-  }
 
-  override def transformApply(tree: Apply)(using Context): Tree = {
+  override def transformApply(tree: Apply)(using Context): Tree =
+    def mkConcat(strs: List[Literal], elems: List[Tree]): Tree =
+      val stri = strs.iterator
+      val elemi = elems.iterator
+      var result: Tree = stri.next
+      def concat(tree: Tree): Unit =
+        result = result.select(defn.String_+).appliedTo(tree).withSpan(tree.span)
+      while elemi.hasNext
+      do
+        concat(elemi.next)
+        val str = stri.next
+        if !str.const.stringValue.isEmpty then concat(str)
+      result
+    end mkConcat
     val sym = tree.symbol
-    val isInterpolatedMethod = // Test names first to avoid loading scala.StringContext if not used
-      (sym.name == nme.raw_ && sym.eq(defn.StringContext_raw)) ||
-      (sym.name == nme.f && sym.eq(defn.StringContext_f)) ||
-      (sym.name == nme.s && sym.eq(defn.StringContext_s))
-    if (isInterpolatedMethod)
-      tree match {
+    // Test names first to avoid loading scala.StringContext if not used, and common names first
+    val isInterpolatedMethod =
+      sym.name match
+        case nme.s    => sym eq defn.StringContext_s
+        case nme.raw_ => sym eq defn.StringContext_raw
+        case nme.f    => sym eq defn.StringContext_f
+        case _        => false
+    // Perform format checking and normalization, then make it StringOps(fmt).format(args1) with tweaked args
+    def transformF(fun: Tree, args: Tree): Tree =
+      val (fmt, args1) = FormatInterpolatorTransform.checked(fun, args)
+      resolveConstructor(defn.StringOps.typeRef, List(fmt))
+        .select(nme.format)
+        .appliedTo(args1)
+    // Starting with Scala 2.13, s and raw are macros in the standard
+    // library, so we need to expand them manually.
+    // sc.s(args)    -->   standardInterpolator(processEscapes, args, sc.parts)
+    // sc.raw(args)  -->   standardInterpolator(x => x,         args, sc.parts)
+    def transformS(fun: Tree, args: Tree, isRaw: Boolean): Tree =
+      val pre = fun match
+        case Select(pre, _) => pre
+        case intp: Ident    => tpd.desugarIdentPrefix(intp)
+      val stringToString = defn.StringContextModule_processEscapes.info.asInstanceOf[MethodType]
+      val process = tpd.Lambda(stringToString, args =>
+        if isRaw then args.head else ref(defn.StringContextModule_processEscapes).appliedToTermArgs(args)
+      )
+      evalOnce(pre) { sc =>
+        val parts = sc.select(defn.StringContext_parts)
+        ref(defn.StringContextModule_standardInterpolator)
+          .appliedToTermArgs(List(process, args, parts))
+      }
+    end transformS
+    // begin transformApply
+    if isInterpolatedMethod then
+      (tree: @unchecked) match
         case StringContextIntrinsic(strs: List[Literal], elems: List[Tree]) =>
-          val stri = strs.iterator
-          val elemi = elems.iterator
-          var result: Tree = stri.next
-          def concat(tree: Tree): Unit = {
-            result = result.select(defn.String_+).appliedTo(tree).withSpan(tree.span)
-          }
-          while (elemi.hasNext) {
-            concat(elemi.next)
-            val str = stri.next
-            if (!str.const.stringValue.isEmpty) concat(str)
-          }
-          result
-        case Apply(intp, args :: Nil) if sym.eq(defn.StringContext_f) =>
-          val partsStr = StringContextChecker.checkedParts(intp, args).mkString
-          resolveConstructor(defn.StringOps.typeRef, List(Literal(Constant(partsStr))))
-            .select(nme.format)
-            .appliedTo(args)
-        // Starting with Scala 2.13, s and raw are macros in the standard
-        // library, so we need to expand them manually.
-        // sc.s(args)    -->   standardInterpolator(processEscapes, args, sc.parts)
-        // sc.raw(args)  -->   standardInterpolator(x => x,         args, sc.parts)
+          mkConcat(strs, elems)
         case Apply(intp, args :: Nil) =>
-          val pre = intp match {
-            case Select(pre, _) => pre
-            case intp: Ident => tpd.desugarIdentPrefix(intp)
-          }
-          val isRaw = sym eq defn.StringContext_raw
-          val stringToString = defn.StringContextModule_processEscapes.info.asInstanceOf[MethodType]
-
-          val process = tpd.Lambda(stringToString, args =>
-            if (isRaw) args.head else ref(defn.StringContextModule_processEscapes).appliedToTermArgs(args))
-
-          evalOnce(pre) { sc =>
-            val parts = sc.select(defn.StringContext_parts)
-
-            ref(defn.StringContextModule_standardInterpolator)
-              .appliedToTermArgs(List(process, args, parts))
-          }
-      }
+          if sym eq defn.StringContext_f then transformF(intp, args)
+          else transformS(intp, args, isRaw = sym eq defn.StringContext_raw)
     else
       tree.tpe match
         case _: ConstantType => tree
@@ -169,12 +153,12 @@ class StringInterpolatorOpt extends MiniPhase {
           ConstFold.Apply(tree).tpe match
             case ConstantType(x) => Literal(x).withSpan(tree.span).ensureConforms(tree.tpe)
             case _ => tree
-  }
 
-  override def transformSelect(tree: Select)(using Context): Tree = {
+  override def transformSelect(tree: Select)(using Context): Tree =
     ConstFold.Select(tree).tpe match
       case ConstantType(x) => Literal(x).withSpan(tree.span).ensureConforms(tree.tpe)
       case _ => tree
-  }
 
-}
+object StringInterpolatorOpt:
+  val name: String = "stringInterpolatorOpt"
+  val description: String = "optimize s, f and raw string interpolators"
diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala
index 001f8a711913..13310299ed00 100644
--- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala
+++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala
@@ -72,7 +72,7 @@ case class Typ(tp: Type, decomposed: Boolean = true) extends Space
 case class Prod(tp: Type, unappTp: TermRef, params: List[Space]) extends Space
 
 /** Union of spaces */
-case class Or(spaces: List[Space]) extends Space
+case class Or(spaces: Seq[Space]) extends Space
 
 /** abstract space logic */
 trait SpaceLogic {
@@ -105,53 +105,46 @@ trait SpaceLogic {
   def signature(unapp: TermRef, scrutineeTp: Type, argLen: Int): List[Type]
 
   /** Get components of decomposable types */
-  def decompose(tp: Type): List[Space]
+  def decompose(tp: Type): List[Typ]
 
   /** Whether the extractor covers the given type */
-  def covers(unapp: TermRef, scrutineeTp: Type): Boolean
+  def covers(unapp: TermRef, scrutineeTp: Type, argLen: Int): Boolean
 
   /** Display space in string format */
   def show(sp: Space): String
 
-  /** Simplify space using the laws, there's no nested union after simplify
-   *
-   *  @param aggressive if true and OR space has less than 5 components, `simplify` will
-   *                    collapse `sp1 | sp2` to `sp1` if `sp2` is a subspace of `sp1`.
-   *
-   *                    This reduces noise in counterexamples.
-   */
-  def simplify(space: Space, aggressive: Boolean = false)(using Context): Space = trace(s"simplify ${show(space)}, aggressive = $aggressive --> ", debug, x => show(x.asInstanceOf[Space]))(space match {
+  /** Simplify space such that a space equal to `Empty` becomes `Empty` */
+  def simplify(space: Space)(using Context): Space = trace(s"simplify ${show(space)} --> ", debug, show)(space match {
     case Prod(tp, fun, spaces) =>
-      val sp = Prod(tp, fun, spaces.map(simplify(_)))
-      if (sp.params.contains(Empty)) Empty
+      val sps = spaces.map(simplify(_))
+      if (sps.contains(Empty)) Empty
       else if (canDecompose(tp) && decompose(tp).isEmpty) Empty
-      else sp
+      else Prod(tp, fun, sps)
     case Or(spaces) =>
-      val buf = new mutable.ListBuffer[Space]
-      def include(s: Space) = if s != Empty then buf += s
-      for space <- spaces do
-        simplify(space) match
-          case Or(ss) => ss.foreach(include)
-          case s => include(s)
-      val set = buf.toList
-
-      if (set.isEmpty) Empty
-      else if (set.size == 1) set.toList(0)
-      else if (aggressive && spaces.size < 5) {
-        val res = set.map(sp => (sp, set.filter(_ ne sp))).find {
-          case (sp, sps) =>
-            isSubspace(sp, Or(sps))
-        }
-        if (res.isEmpty) Or(set)
-        else simplify(Or(res.get._2), aggressive)
-      }
-      else Or(set)
+      val spaces2 = spaces.map(simplify(_)).filter(_ != Empty)
+      if spaces2.isEmpty then Empty
+      else if spaces2.lengthCompare(1) == 0 then spaces2.head
+      else Or(spaces2)
     case Typ(tp, _) =>
       if (canDecompose(tp) && decompose(tp).isEmpty) Empty
       else space
     case _ => space
   })
 
+  /** Remove a space if it's a subspace of remaining spaces
+   *
+   *  Note: `dedup` will return the same result if the sequence >= 10
+   */
+  def dedup(spaces: Seq[Space])(using Context): Seq[Space] =
+    if (spaces.lengthCompare(1) <= 0 || spaces.lengthCompare(10) >= 0) spaces
+    else {
+      val res = spaces.map(sp => (sp, spaces.filter(_ ne sp))).find {
+        case (sp, sps) => isSubspace(sp, Or(LazyList(sps: _*)))
+      }
+      if (res.isEmpty) spaces
+      else res.get._2
+    }
+
   /** Flatten space to get rid of `Or` for pretty print */
   def flatten(space: Space)(using Context): Seq[Space] = space match {
     case Prod(tp, fun, spaces) =>
@@ -172,7 +165,7 @@ trait SpaceLogic {
   }
 
   /** Is `a` a subspace of `b`? Equivalent to `a - b == Empty`, but faster */
-  def isSubspace(a: Space, b: Space)(using Context): Boolean = trace(s"${show(a)} < ${show(b)}", debug) {
+  def isSubspace(a: Space, b: Space)(using Context): Boolean = trace(s"isSubspace(${show(a)}, ${show(b)})", debug) {
     def tryDecompose1(tp: Type) = canDecompose(tp) && isSubspace(Or(decompose(tp)), b)
     def tryDecompose2(tp: Type) = canDecompose(tp) && isSubspace(a, Or(decompose(tp)))
 
@@ -183,6 +176,8 @@ trait SpaceLogic {
         ss.forall(isSubspace(_, b))
       case (Typ(tp1, _), Typ(tp2, _)) =>
         isSubType(tp1, tp2)
+        || canDecompose(tp1) && tryDecompose1(tp1)
+        || canDecompose(tp2) && tryDecompose2(tp2)
       case (Typ(tp1, _), Or(ss)) =>  // optimization: don't go to subtraction too early
         ss.exists(isSubspace(a, _)) || tryDecompose1(tp1)
       case (_, Or(_)) =>
@@ -191,7 +186,7 @@ trait SpaceLogic {
         isSubType(tp1, tp2)
       case (Typ(tp1, _), Prod(tp2, fun, ss)) =>
         isSubType(tp1, tp2)
-        && covers(fun, tp1)
+        && covers(fun, tp1, ss.length)
         && isSubspace(Prod(tp2, fun, signature(fun, tp2, ss.length).map(Typ(_, false))), b)
       case (Prod(_, fun1, ss1), Prod(_, fun2, ss2)) =>
         isSameUnapply(fun1, fun2) && ss1.zip(ss2).forall((isSubspace _).tupled)
@@ -199,14 +194,14 @@ trait SpaceLogic {
   }
 
   /** Intersection of two spaces  */
-  def intersect(a: Space, b: Space)(using Context): Space = trace(s"${show(a)} & ${show(b)}", debug, x => show(x.asInstanceOf[Space])) {
+  def intersect(a: Space, b: Space)(using Context): Space = trace(s"${show(a)} & ${show(b)}", debug, show) {
     def tryDecompose1(tp: Type) = intersect(Or(decompose(tp)), b)
     def tryDecompose2(tp: Type) = intersect(a, Or(decompose(tp)))
 
     (a, b) match {
       case (Empty, _) | (_, Empty) => Empty
-      case (_, Or(ss)) => Or(ss.map(intersect(a, _)).filterConserve(_ ne Empty))
-      case (Or(ss), _) => Or(ss.map(intersect(_, b)).filterConserve(_ ne Empty))
+      case (_, Or(ss)) => Or(ss.map(intersect(a, _)).filter(_ ne Empty))
+      case (Or(ss), _) => Or(ss.map(intersect(_, b)).filter(_ ne Empty))
       case (Typ(tp1, _), Typ(tp2, _)) =>
         if (isSubType(tp1, tp2)) a
         else if (isSubType(tp2, tp1)) b
@@ -217,21 +212,27 @@ trait SpaceLogic {
         if (isSubType(tp2, tp1)) b
         else if (canDecompose(tp1)) tryDecompose1(tp1)
         else if (isSubType(tp1, tp2)) a // problematic corner case: inheriting a case class
-        else Empty
+        else intersectUnrelatedAtomicTypes(tp1, tp2) match
+          case Typ(tp, _) => Prod(tp, fun, ss)
+          case sp         => sp
       case (Prod(tp1, fun, ss), Typ(tp2, _)) =>
         if (isSubType(tp1, tp2)) a
         else if (canDecompose(tp2)) tryDecompose2(tp2)
         else if (isSubType(tp2, tp1)) a  // problematic corner case: inheriting a case class
-        else Empty
+        else intersectUnrelatedAtomicTypes(tp1, tp2) match
+          case Typ(tp, _) => Prod(tp, fun, ss)
+          case sp         => sp
       case (Prod(tp1, fun1, ss1), Prod(tp2, fun2, ss2)) =>
-        if (!isSameUnapply(fun1, fun2)) Empty
+        if (!isSameUnapply(fun1, fun2)) intersectUnrelatedAtomicTypes(tp1, tp2) match
+          case Typ(tp, _) => Prod(tp, fun1, ss1)
+          case sp         => sp
         else if (ss1.zip(ss2).exists(p => simplify(intersect(p._1, p._2)) == Empty)) Empty
         else Prod(tp1, fun1, ss1.zip(ss2).map((intersect _).tupled))
     }
   }
 
   /** The space of a not covered by b */
-  def minus(a: Space, b: Space)(using Context): Space = trace(s"${show(a)} - ${show(b)}", debug, x => show(x.asInstanceOf[Space])) {
+  def minus(a: Space, b: Space)(using Context): Space = trace(s"${show(a)} - ${show(b)}", debug, show) {
     def tryDecompose1(tp: Type) = minus(Or(decompose(tp)), b)
     def tryDecompose2(tp: Type) = minus(a, Or(decompose(tp)))
 
@@ -245,16 +246,16 @@ trait SpaceLogic {
         else a
       case (Typ(tp1, _), Prod(tp2, fun, ss)) =>
         // rationale: every instance of `tp1` is covered by `tp2(_)`
-        if isSubType(tp1, tp2) && covers(fun, tp1) then
+        if isSubType(tp1, tp2) && covers(fun, tp1, ss.length) then
           minus(Prod(tp1, fun, signature(fun, tp1, ss.length).map(Typ(_, false))), b)
         else if canDecompose(tp1) then
           tryDecompose1(tp1)
         else
           a
-      case (_, Or(ss)) =>
-        ss.foldLeft(a)(minus)
       case (Or(ss), _) =>
         Or(ss.map(minus(_, b)))
+      case (_, Or(ss)) =>
+        ss.foldLeft(a)(minus)
       case (Prod(tp1, fun, ss), Typ(tp2, _)) =>
         // uncovered corner case: tp2 :< tp1, may happen when inheriting case class
         if (isSubType(tp1, tp2))
@@ -263,12 +264,11 @@ trait SpaceLogic {
            Empty
         else if (canDecompose(tp2))
           tryDecompose2(tp2)
-        else if (isSubType(tp2, tp1) &&covers(fun, tp2))
-          minus(a, Prod(tp1, fun, signature(fun, tp1, ss.length).map(Typ(_, false))))
         else
           a
       case (Prod(tp1, fun1, ss1), Prod(tp2, fun2, ss2)) =>
         if (!isSameUnapply(fun1, fun2)) return a
+        if (fun1.symbol.name == nme.unapply && ss1.length != ss2.length) return a
 
         val range = (0 until ss1.size).toList
         val cache = Array.fill[Space](ss2.length)(null)
@@ -282,7 +282,10 @@ trait SpaceLogic {
         else if cache.forall(sub => isSubspace(sub, Empty)) then Empty
         else
           // `(_, _, _) - (Some, None, _)` becomes `(None, _, _) | (_, Some, _) | (_, _, Empty)`
-          Or(range.map { i => Prod(tp1, fun1, ss1.updated(i, sub(i))) })
+          val spaces = LazyList(range: _*).flatMap { i =>
+            flatten(sub(i)).map(s => Prod(tp1, fun1, ss1.updated(i, s)))
+          }
+          Or(spaces)
     }
   }
 }
@@ -292,13 +295,13 @@ object SpaceEngine {
   /** Is the unapply or unapplySeq irrefutable?
    *  @param  unapp   The unapply function reference
    */
-  def isIrrefutable(unapp: TermRef)(using Context): Boolean = {
+  def isIrrefutable(unapp: TermRef, argLen: Int)(using Context): Boolean = {
     val unappResult = unapp.widen.finalResultType
     unappResult.isRef(defn.SomeClass)
     || unappResult <:< ConstantType(Constant(true)) // only for unapply
     || (unapp.symbol.is(Synthetic) && unapp.symbol.owner.linkedClass.is(Case))  // scala2 compatibility
     || unapplySeqTypeElemTp(unappResult).exists // only for unapplySeq
-    || productArity(unappResult) > 0
+    || isProductMatch(unappResult, argLen)
     || {
       val isEmptyTp = extractorMemberType(unappResult, nme.isEmpty, NoSourcePosition)
       isEmptyTp <:< ConstantType(Constant(false))
@@ -308,10 +311,10 @@ object SpaceEngine {
   /** Is the unapply or unapplySeq irrefutable?
    *  @param  unapp   The unapply function tree
    */
-  def isIrrefutable(unapp: tpd.Tree)(using Context): Boolean = {
+  def isIrrefutable(unapp: tpd.Tree, argLen: Int)(using Context): Boolean = {
     val fun1 = tpd.funPart(unapp)
     val funRef = fun1.tpe.asInstanceOf[TermRef]
-    isIrrefutable(funRef)
+    isIrrefutable(funRef, argLen)
   }
 }
 
@@ -326,7 +329,6 @@ class SpaceEngine(using Context) extends SpaceLogic {
   private val scalaConsType        = defn.ConsClass.typeRef
 
   private val constantNullType     = ConstantType(Constant(null))
-  private val constantNullSpace    = Typ(constantNullType)
 
   /** Does the given tree stand for the literal `null`? */
   def isNullLit(tree: Tree): Boolean = tree match {
@@ -334,13 +336,6 @@ class SpaceEngine(using Context) extends SpaceLogic {
     case _ => false
   }
 
-  /** Does the given space contain just the value `null`? */
-  def isNullSpace(space: Space): Boolean = space match {
-    case Typ(tpe, _) => tpe.dealias == constantNullType || tpe.isNullType
-    case Or(spaces) => spaces.forall(isNullSpace)
-    case _ => false
-  }
-
   override def intersectUnrelatedAtomicTypes(tp1: Type, tp2: Type): Space = trace(s"atomic intersection: ${AndType(tp1, tp2).show}", debug) {
     // Precondition: !isSubType(tp1, tp2) && !isSubType(tp2, tp1).
     if (!ctx.explicitNulls && (tp1.isNullType || tp2.isNullType)) {
@@ -351,9 +346,7 @@ class SpaceEngine(using Context) extends SpaceLogic {
       val res = TypeComparer.provablyDisjoint(tp1, tp2)
 
       if (res) Empty
-      else if (tp1.isSingleton) Typ(tp1, true)
-      else if (tp2.isSingleton) Typ(tp2, true)
-      else Typ(AndType(tp1, tp2), true)
+      else Typ(AndType(tp1, tp2), decomposed = true)
     }
   }
 
@@ -361,18 +354,15 @@ class SpaceEngine(using Context) extends SpaceLogic {
   def project(pat: Tree): Space = pat match {
     case Literal(c) =>
       if (c.value.isInstanceOf[Symbol])
-        Typ(c.value.asInstanceOf[Symbol].termRef, false)
+        Typ(c.value.asInstanceOf[Symbol].termRef, decomposed = false)
       else
-        Typ(ConstantType(c), false)
+        Typ(ConstantType(c), decomposed = false)
 
     case pat: Ident if isBackquoted(pat) =>
-      Typ(pat.tpe, false)
-
-    case Ident(nme.WILDCARD) =>
-      Or(Typ(erase(pat.tpe.stripAnnots), false) :: constantNullSpace :: Nil)
+      Typ(pat.tpe, decomposed = false)
 
     case Ident(_) | Select(_, _) =>
-      Typ(erase(pat.tpe.stripAnnots), false)
+      Typ(erase(pat.tpe.stripAnnots.widenSkolem, isValue = true), decomposed = false)
 
     case Alternative(trees) =>
       Or(trees.map(project(_)))
@@ -387,36 +377,41 @@ class SpaceEngine(using Context) extends SpaceLogic {
       val fun1 = funPart(fun)
       val funRef = fun1.tpe.asInstanceOf[TermRef]
       if (fun.symbol.name == nme.unapplySeq)
-        if (fun.symbol.owner == scalaSeqFactoryClass)
+        val (arity, elemTp, resultTp) = unapplySeqInfo(fun.tpe.widen.finalResultType, fun.srcPos)
+        if (fun.symbol.owner == scalaSeqFactoryClass && scalaListType.appliedTo(elemTp) <:< pat.tpe)
+          // The exhaustivity and reachability logic already handles decomposing sum types (into its subclasses)
+          // and product types (into its components).  To get better counter-examples for patterns that are of type
+          // List (or a super-type of list, like LinearSeq) we project them into spaces that use `::` and Nil.
+          // Doing so with a pattern of `case Seq() =>` with a scrutinee of type `Vector()` doesn't work because the
+          // space is then discarded leading to a false positive reachability warning, see #13931.
           projectSeq(pats)
         else {
-          val (arity, elemTp, resultTp) = unapplySeqInfo(fun.tpe.widen.finalResultType, fun.srcPos)
           if (elemTp.exists)
-            Prod(erase(pat.tpe.stripAnnots), funRef, projectSeq(pats) :: Nil)
+            Prod(erase(pat.tpe.stripAnnots, isValue = false), funRef, projectSeq(pats) :: Nil)
           else
-            Prod(erase(pat.tpe.stripAnnots), funRef, pats.take(arity - 1).map(project) :+ projectSeq(pats.drop(arity - 1)))
+            Prod(erase(pat.tpe.stripAnnots, isValue = false), funRef, pats.take(arity - 1).map(project) :+ projectSeq(pats.drop(arity - 1)))
         }
       else
-        Prod(erase(pat.tpe.stripAnnots), funRef, pats.map(project))
+        Prod(erase(pat.tpe.stripAnnots, isValue = false), funRef, pats.map(project))
 
     case Typed(pat @ UnApply(_, _, _), _) =>
       project(pat)
 
-    case Typed(expr, _) =>
-      Typ(erase(expr.tpe.stripAnnots), true)
+    case Typed(_, tpt) =>
+      Typ(erase(tpt.tpe.stripAnnots, isValue = true), decomposed = false)
 
     case This(_) =>
-      Typ(pat.tpe.stripAnnots, false)
+      Typ(pat.tpe.stripAnnots, decomposed = false)
 
     case EmptyTree =>         // default rethrow clause of try/catch, check tests/patmat/try2.scala
-      Typ(WildcardType, false)
+      Typ(WildcardType, decomposed = false)
 
     case Block(Nil, expr) =>
       project(expr)
 
     case _ =>
       // Pattern is an arbitrary expression; assume a skolem (i.e. an unknown value) of the pattern type
-      Typ(pat.tpe.narrow, false)
+      Typ(pat.tpe.narrow, decomposed = false)
   }
 
   private def project(tp: Type): Space = tp match {
@@ -464,29 +459,37 @@ class SpaceEngine(using Context) extends SpaceLogic {
    *       case (IntExpr(_), IntExpr(_)) =>
    *       case (BooleanExpr(_), BooleanExpr(_)) =>
    *     }
+   *
+   *  @param inArray whether `tp` is a type argument to `Array`
+   *  @param isValue whether `tp` is the type which match against values
+   *
+   *  If `isValue` is true, then pattern-bound symbols are erased to its upper bound.
+   *  This is needed to avoid spurious unreachable warnings. See tests/patmat/i6197.scala.
    */
-  private def erase(tp: Type, inArray: Boolean = false): Type = trace(i"$tp erased to", debug) {
+  private def erase(tp: Type, inArray: Boolean = false, isValue: Boolean = false): Type = trace(i"$tp erased to", debug) {
 
     tp match {
       case tp @ AppliedType(tycon, args) =>
         if tycon.typeSymbol.isPatternBound then return WildcardType
 
         val args2 =
-          if (tycon.isRef(defn.ArrayClass)) args.map(arg => erase(arg, inArray = true))
-          else args.map(arg => erase(arg, inArray = false))
-        tp.derivedAppliedType(erase(tycon, inArray), args2)
+          if (tycon.isRef(defn.ArrayClass)) args.map(arg => erase(arg, inArray = true, isValue = false))
+          else args.map(arg => erase(arg, inArray = false, isValue = false))
+        tp.derivedAppliedType(erase(tycon, inArray, isValue = false), args2)
 
       case tp @ OrType(tp1, tp2) =>
-        OrType(erase(tp1, inArray), erase(tp2, inArray), tp.isSoft)
+        OrType(erase(tp1, inArray, isValue), erase(tp2, inArray, isValue), tp.isSoft)
 
       case AndType(tp1, tp2) =>
-        AndType(erase(tp1, inArray), erase(tp2, inArray))
+        AndType(erase(tp1, inArray, isValue), erase(tp2, inArray, isValue))
 
       case tp @ RefinedType(parent, _, _) =>
-        erase(parent)
+        erase(parent, inArray, isValue)
 
       case tref: TypeRef if tref.symbol.isPatternBound =>
-        if (inArray) tref.underlying else WildcardType
+        if inArray then tref.underlying
+        else if isValue then tref.superType
+        else WildcardType
 
       case _ => tp
     }
@@ -509,15 +512,45 @@ class SpaceEngine(using Context) extends SpaceLogic {
     }
   }
 
+  /** Numeric literals, while being constant values of unrelated types (e.g. Char and Int),
+   *  when used in a case may end up matching at runtime, because their equals may returns true.
+   *  Because these are universally available, general purpose types, it would be good to avoid
+   *  returning false positive warnings, such as in `(c: Char) match { case 67 => ... }` emitting a
+   *  reachability warning on the case.  So the type `ConstantType(Constant(67, IntTag))` is
+   *  converted to `ConstantType(Constant(67, CharTag))`.  #12805 */
+  def convertConstantType(tp: Type, pt: Type): Type = tp match
+    case tp @ ConstantType(const) =>
+      val converted = const.convertTo(pt)
+      if converted == null then tp else ConstantType(converted)
+    case _ => tp
+
+  def isPrimToBox(tp: Type, pt: Type) =
+    tp.classSymbol.isPrimitiveValueClass && (defn.boxedType(tp).classSymbol eq pt.classSymbol)
+
+  /** Adapt types by performing primitive value unboxing or boxing, or numeric constant conversion.  #12805
+   *
+   *  This makes these isSubType cases work like this:
+   *  {{{
+   *   1      <:< Integer  => ( : Integer) <:< Integer  = true
+   *  ONE     <:< Int      => ( : Int)     <:< Int      = true
+   *  Integer <:< (1: Int) => ( : Int)     <:< (1: Int) = false
+   *  }}}
+   */
+  def adaptType(tp1: Type, tp2: Type): Type = trace(i"adaptType($tp1, $tp2)", show = true) {
+    if      isPrimToBox(tp1, tp2) then defn.boxedType(tp1).narrow
+    else if isPrimToBox(tp2, tp1) then defn.unboxedType(tp1).narrow
+    else convertConstantType(tp1, tp2)
+  }
+
+  private val isSubspaceCache = mutable.HashMap.empty[(Space, Space, Context), Boolean]
+
+  override def isSubspace(a: Space, b: Space)(using Context): Boolean =
+    isSubspaceCache.getOrElseUpdate((a, b, ctx), super.isSubspace(a, b))
+
   /** Is `tp1` a subtype of `tp2`?  */
-  def isSubType(tp1: Type, tp2: Type): Boolean = {
-    debug.println(TypeComparer.explained(_.isSubType(tp1, tp2)))
-    val res = if (ctx.explicitNulls) {
-      tp1 <:< tp2
-    } else {
-      (tp1 != constantNullType || tp2 == constantNullType) && tp1 <:< tp2
-    }
-    res
+  def isSubType(tp1: Type, tp2: Type): Boolean = trace(i"$tp1 <:< $tp2", debug, show = true) {
+    if tp1 == constantNullType && !ctx.explicitNulls then tp2 == constantNullType
+    else adaptType(tp1, tp2) <:< tp2
   }
 
   def isSameUnapply(tp1: TermRef, tp2: TermRef): Boolean =
@@ -535,7 +568,7 @@ class SpaceEngine(using Context) extends SpaceLogic {
       case mt: MethodType => mt
       case pt: PolyType   =>
         inContext(ctx.fresh.setExploreTyperState()) {
-          val tvars = pt.paramInfos.map(newTypeVar)
+          val tvars = pt.paramInfos.map(newTypeVar(_))
           val mt = pt.instantiate(tvars).asInstanceOf[MethodType]
           scrutineeTp <:< mt.paramInfos(0)
           // force type inference to infer a narrower type: could be singleton
@@ -590,21 +623,34 @@ class SpaceEngine(using Context) extends SpaceLogic {
   }
 
   /** Whether the extractor covers the given type */
-  def covers(unapp: TermRef, scrutineeTp: Type): Boolean =
-    SpaceEngine.isIrrefutable(unapp) || unapp.symbol == defn.TypeTest_unapply && {
+  def covers(unapp: TermRef, scrutineeTp: Type, argLen: Int): Boolean =
+    SpaceEngine.isIrrefutable(unapp, argLen) || unapp.symbol == defn.TypeTest_unapply && {
       val AppliedType(_, _ :: tp :: Nil) = unapp.prefix.widen.dealias
       scrutineeTp <:< tp
     }
 
   /** Decompose a type into subspaces -- assume the type can be decomposed */
-  def decompose(tp: Type): List[Space] =
+  def decompose(tp: Type): List[Typ] =
     tp.dealias match {
       case AndType(tp1, tp2) =>
-        intersect(Typ(tp1, false), Typ(tp2, false)) match {
-          case Or(spaces) => spaces
-          case Empty => Nil
-          case space => List(space)
-        }
+        def decomposeComponent(tpA: Type, tpB: Type): List[Typ] =
+          decompose(tpA).flatMap {
+            case Typ(tp, _) =>
+              if tp <:< tpB then
+                Typ(tp, decomposed = true) :: Nil
+              else if tpB <:< tp then
+                Typ(tpB, decomposed = true) :: Nil
+              else if TypeComparer.provablyDisjoint(tp, tpB) then
+                Nil
+              else
+                Typ(AndType(tp, tpB), decomposed = true) :: Nil
+          }
+
+        if canDecompose(tp1) then
+          decomposeComponent(tp1, tp2)
+        else
+          decomposeComponent(tp2, tp1)
+
       case OrType(tp1, tp2) => List(Typ(tp1, true), Typ(tp2, true))
       case tp if tp.isRef(defn.BooleanClass) =>
         List(
@@ -643,7 +689,6 @@ class SpaceEngine(using Context) extends SpaceLogic {
         parts.map(Typ(_, true))
     }
 
-
   /** Abstract sealed types, or-types, Boolean and Java enums can be decomposed */
   def canDecompose(tp: Type): Boolean =
     val res = tp.dealias match
@@ -659,7 +704,7 @@ class SpaceEngine(using Context) extends SpaceLogic {
         || cls.isAllOf(JavaEnumTrait)
         || tp.isRef(defn.BooleanClass)
         || tp.isRef(defn.UnitClass)
-    debug.println(s"decomposable: ${tp.show} = $res")
+    //debug.println(s"decomposable: ${tp.show} = $res")
     res
 
   /** Show friendly type name with current scope in mind
@@ -743,6 +788,7 @@ class SpaceEngine(using Context) extends SpaceLogic {
   }
 
   def show(ss: Seq[Space]): String = ss.map(show).mkString(", ")
+
   /** Display spaces */
   def show(s: Space): String = {
     def params(tp: Type): List[Type] = tp.classSymbol.primaryConstructor.info.firstParamTypes
@@ -764,7 +810,7 @@ class SpaceEngine(using Context) extends SpaceLogic {
 
         val sym = tp.classSymbol
 
-        if (ctx.definitions.isTupleType(tp))
+        if (ctx.definitions.isTupleNType(tp))
           params(tp).map(_ => "_").mkString("(", ", ", ")")
         else if (scalaListType.isRef(sym))
           if (flattenList) "_*" else "_: List"
@@ -778,10 +824,10 @@ class SpaceEngine(using Context) extends SpaceLogic {
         else if (decomposed) "_: " + showType(tp, showTypeArgs = true)
         else "_"
       case Prod(tp, fun, params) =>
-        if (ctx.definitions.isTupleType(tp))
+        if (ctx.definitions.isTupleNType(tp))
           "(" + params.map(doShow(_)).mkString(", ") + ")"
         else if (tp.isRef(scalaConsType.symbol))
-          if (flattenList) params.map(doShow(_, flattenList)).mkString(", ")
+          if (flattenList) params.map(doShow(_, flattenList)).filter(_.nonEmpty).mkString(", ")
           else params.map(doShow(_, flattenList = true)).filter(!_.isEmpty).mkString("List(", ", ", ")")
         else {
           val sym = fun.symbol
@@ -797,23 +843,30 @@ class SpaceEngine(using Context) extends SpaceLogic {
   }
 
   private def exhaustivityCheckable(sel: Tree): Boolean = {
+    val seen = collection.mutable.Set.empty[Type]
+
     // Possible to check everything, but be compatible with scalac by default
     def isCheckable(tp: Type): Boolean =
-      !tp.hasAnnotation(defn.UncheckedAnnot) && {
-        val tpw = tp.widen.dealias
-        ctx.settings.YcheckAllPatmat.value ||
-        tpw.typeSymbol.is(Sealed) ||
-        tpw.isInstanceOf[OrType] ||
-        (tpw.isInstanceOf[AndType] && {
-          val and = tpw.asInstanceOf[AndType]
-          isCheckable(and.tp1) || isCheckable(and.tp2)
-        }) ||
-        tpw.isRef(defn.BooleanClass) ||
-        tpw.typeSymbol.isAllOf(JavaEnumTrait) ||
-        (defn.isTupleType(tpw) && tpw.argInfos.exists(isCheckable(_)))
+      val tpw = tp.widen.dealias
+      val classSym = tpw.classSymbol
+      classSym.is(Sealed) ||
+      tpw.isInstanceOf[OrType] ||
+      (tpw.isInstanceOf[AndType] && {
+        val and = tpw.asInstanceOf[AndType]
+        isCheckable(and.tp1) || isCheckable(and.tp2)
+      }) ||
+      tpw.isRef(defn.BooleanClass) ||
+      classSym.isAllOf(JavaEnumTrait) ||
+      classSym.is(Case) && {
+        if seen.add(tpw) then productSelectorTypes(tpw, sel.srcPos).exists(isCheckable(_))
+        else true // recursive case class: return true and other members can still fail the check
       }
 
-    val res = isCheckable(sel.tpe)
+    val res = !sel.tpe.hasAnnotation(defn.UncheckedAnnot) && {
+      ctx.settings.YcheckAllPatmat.value
+      || isCheckable(sel.tpe)
+    }
+
     debug.println(s"exhaustivity checkable: ${sel.show} = $res")
     res
   }
@@ -827,12 +880,28 @@ class SpaceEngine(using Context) extends SpaceLogic {
       }
     }.apply(false, tp)
 
+  /** Return the underlying type of non-module, non-constant, non-enum case singleton types.
+   *  Also widen ExprType to its result type, and rewrap any annotation wrappers.
+   *  For example, with `val opt = None`, widen `opt.type` to `None.type`. */
+  def toUnderlying(tp: Type)(using Context): Type = trace(i"toUnderlying($tp)", show = true)(tp match {
+    case _: ConstantType                            => tp
+    case tp: TermRef if tp.symbol.is(Module)        => tp
+    case tp: TermRef if tp.symbol.isAllOf(EnumCase) => tp
+    case tp: SingletonType                          => toUnderlying(tp.underlying)
+    case tp: ExprType                               => toUnderlying(tp.resultType)
+    case AnnotatedType(tp, annot)                   => AnnotatedType(toUnderlying(tp), annot)
+    case _                                          => tp
+  })
+
   def checkExhaustivity(_match: Match): Unit = {
     val Match(sel, cases) = _match
-    val selTyp = sel.tpe.widen.dealias
+    debug.println(i"checking exhaustivity of ${_match}")
 
     if (!exhaustivityCheckable(sel)) return
 
+    val selTyp = toUnderlying(sel.tpe).dealias
+    debug.println(i"selTyp = $selTyp")
+
     val patternSpace = Or(cases.foldLeft(List.empty[Space]) { (acc, x) =>
       val space = if (x.guard.isEmpty) project(x.pat) else Empty
       debug.println(s"${x.pat.show} ====> ${show(space)}")
@@ -842,14 +911,15 @@ class SpaceEngine(using Context) extends SpaceLogic {
     val checkGADTSAT = shouldCheckExamples(selTyp)
 
     val uncovered =
-      flatten(simplify(minus(project(selTyp), patternSpace), aggressive = true)).filter { s =>
+      flatten(simplify(minus(project(selTyp), patternSpace))).filter({ s =>
         s != Empty && (!checkGADTSAT || satisfiable(s))
-      }
+      })
 
 
     if uncovered.nonEmpty then
       val hasMore = uncovered.lengthCompare(6) > 0
-      report.warning(PatternMatchExhaustivity(show(uncovered.take(6)), hasMore), sel.srcPos)
+      val deduped = dedup(uncovered.take(6))
+      report.warning(PatternMatchExhaustivity(show(deduped), hasMore), sel.srcPos)
   }
 
   private def redundancyCheckable(sel: Tree): Boolean =
@@ -863,58 +933,58 @@ class SpaceEngine(using Context) extends SpaceLogic {
     && !sel.tpe.widen.isRef(defn.QuotedTypeClass)
 
   def checkRedundancy(_match: Match): Unit = {
-    val Match(sel, cases) = _match
-    val selTyp = sel.tpe.widen.dealias
+    val Match(sel, _) = _match
+    val cases = _match.cases.toIndexedSeq
+    debug.println(i"checking redundancy in $_match")
 
     if (!redundancyCheckable(sel)) return
 
-    val targetSpace =
-      if (ctx.explicitNulls || selTyp.classSymbol.isPrimitiveValueClass)
-        project(selTyp)
-      else
-        project(OrType(selTyp, constantNullType, soft = false))
+    val selTyp = toUnderlying(sel.tpe).dealias
+    debug.println(i"selTyp = $selTyp")
 
-    // in redundancy check, take guard as false in order to soundly approximate
-    def projectPrevCases(cases: List[CaseDef]): List[Space] =
-      cases.map { x =>
-        if (x.guard.isEmpty) project(x.pat)
-        else Empty
-      }
+    val isNullable = selTyp.classSymbol.isNullableClass
+    val targetSpace = if isNullable
+      then project(OrType(selTyp, constantNullType, soft = false))
+      else project(selTyp)
+    debug.println(s"targetSpace: ${show(targetSpace)}")
 
-    val spaces = projectPrevCases(cases)
+    var i        = 0
+    val len      = cases.length
+    var prevs    = List.empty[Space]
+    var deferred = List.empty[Tree]
 
-    (1 until cases.length).foreach { i =>
-      val pat = cases(i).pat
+    while (i < len) {
+      val CaseDef(pat, guard, _) = cases(i)
 
-      if (pat != EmptyTree) { // rethrow case of catch uses EmptyTree
-        val prevs = Or(spaces.take(i))
-        val curr = project(pat)
+      debug.println(i"case pattern: $pat")
 
-        debug.println(s"---------------reachable? ${show(curr)}")
-        debug.println(s"prev: ${show(prevs)}")
+      val curr = project(pat)
+      debug.println(i"reachable? ${show(curr)}")
 
-        var covered = simplify(intersect(curr, targetSpace))
-        debug.println(s"covered: $covered")
+      val prev = simplify(Or(prevs))
+      debug.println(s"prev: ${show(prev)}")
 
-        // `covered == Empty` may happen for primitive types with auto-conversion
-        // see tests/patmat/reader.scala  tests/patmat/byte.scala
-        if (covered == Empty && !isNullLit(pat)) covered = curr
+      val covered = simplify(intersect(curr, targetSpace))
+      debug.println(s"covered: ${show(covered)}")
 
-        if (isSubspace(covered, prevs)) {
+      if prev == Empty && covered == Empty then // defer until a case is reachable
+        deferred ::= pat
+      else {
+        for (pat <- deferred.reverseIterator)
           report.warning(MatchCaseUnreachable(), pat.srcPos)
+        if pat != EmptyTree // rethrow case of catch uses EmptyTree
+            && isSubspace(covered, prev)
+        then {
+          val nullOnly = isNullable && i == len - 1 && isWildcardArg(pat)
+          val msg = if nullOnly then MatchCaseOnlyNullWarning() else MatchCaseUnreachable()
+          report.warning(msg, pat.srcPos)
         }
-
-        // if last case is `_` and only matches `null`, produce a warning
-        // If explicit nulls are enabled, this check isn't needed because most of the cases
-        // that would trigger it would also trigger unreachability warnings.
-        if (!ctx.explicitNulls && i == cases.length - 1 && !isNullLit(pat) ) {
-          simplify(minus(covered, prevs)) match {
-            case Typ(`constantNullType`, _) =>
-              report.warning(MatchCaseOnlyNullWarning(), pat.srcPos)
-            case _ =>
-          }
-        }
+        deferred = Nil
       }
+
+      // in redundancy check, take guard as false in order to soundly approximate
+      prevs ::= (if guard.isEmpty then covered else Empty)
+      i += 1
     }
   }
 }
diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/AddLocalJSFakeNews.scala b/compiler/src/dotty/tools/dotc/transform/sjs/AddLocalJSFakeNews.scala
index 483622c2fb0c..da4c57afb3e6 100644
--- a/compiler/src/dotty/tools/dotc/transform/sjs/AddLocalJSFakeNews.scala
+++ b/compiler/src/dotty/tools/dotc/transform/sjs/AddLocalJSFakeNews.scala
@@ -54,6 +54,8 @@ class AddLocalJSFakeNews extends MiniPhase { thisPhase =>
 
   override def phaseName: String = AddLocalJSFakeNews.name
 
+  override def description: String = AddLocalJSFakeNews.description
+
   override def isEnabled(using Context): Boolean =
     ctx.settings.scalajs.value
 
@@ -96,4 +98,5 @@ class AddLocalJSFakeNews extends MiniPhase { thisPhase =>
 
 object AddLocalJSFakeNews {
   val name: String = "addLocalJSFakeNews"
+  val description: String = "adds fake new invocations to local JS classes in calls to `createLocalJSClass`"
 }
diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala b/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala
index d714151c66d5..c2685ee3c431 100644
--- a/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala
+++ b/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala
@@ -239,6 +239,8 @@ class ExplicitJSClasses extends MiniPhase with InfoTransformer { thisPhase =>
 
   override def phaseName: String = ExplicitJSClasses.name
 
+  override def description: String = ExplicitJSClasses.description
+
   private var MyState: Store.Location[MyState] = _
   private def myState(using Context) = ctx.store(MyState)
 
@@ -720,6 +722,7 @@ class ExplicitJSClasses extends MiniPhase with InfoTransformer { thisPhase =>
 
 object ExplicitJSClasses {
   val name: String = "explicitJSClasses"
+  val description: String = "make all JS classes explicit"
 
   val LocalJSClassValueName: UniqueNameKind = new UniqueNameKind("$jsclass")
 
diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/JSExportUtils.scala b/compiler/src/dotty/tools/dotc/transform/sjs/JSExportUtils.scala
index 6825ddcc1fef..57e5fc636ddc 100644
--- a/compiler/src/dotty/tools/dotc/transform/sjs/JSExportUtils.scala
+++ b/compiler/src/dotty/tools/dotc/transform/sjs/JSExportUtils.scala
@@ -26,9 +26,9 @@ import dotty.tools.backend.sjs.JSDefinitions.jsdefn
 
 /** Utilities for JS exports handling. */
 object JSExportUtils {
-  private final val ExportPrefix = "$js$exported$"
-  private final val MethodExportPrefix = ExportPrefix + "meth$"
-  private final val PropExportPrefix = ExportPrefix + "prop$"
+  private inline val ExportPrefix = "$js$exported$"
+  private inline val MethodExportPrefix = ExportPrefix + "meth$"
+  private inline val PropExportPrefix = ExportPrefix + "prop$"
 
   /** Creates a name for an export specification. */
   def makeExportName(jsName: String, isProp: Boolean): TermName = {
diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala b/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala
index 0651e33c4d7d..a78adaff6522 100644
--- a/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala
+++ b/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala
@@ -92,6 +92,24 @@ object JSSymUtils {
     }
   }
 
+  /** Info about a Scala method param when called as JS method.
+   *
+   *  @param info
+   *    Parameter type (type of a single element if repeated).
+   *  @param repeated
+   *    Whether the parameter is repeated.
+   *  @param capture
+   *    Whether the parameter is a capture.
+   */
+  final class JSParamInfo(
+    val info: Type,
+    val repeated: Boolean = false,
+    val capture: Boolean = false
+  ) {
+    override def toString(): String =
+      s"ParamSpec($info, repeated = $repeated, capture = $capture)"
+  }
+
   extension (sym: Symbol) {
     /** Is this symbol a JavaScript type? */
     def isJSType(using Context): Boolean =
@@ -138,33 +156,6 @@ object JSSymUtils {
     def isJSBracketCall(using Context): Boolean =
       sym.hasAnnotation(jsdefn.JSBracketCallAnnot)
 
-    /** Is this symbol a default param accessor for a JS method?
-     *
-     *  For default param accessors of *constructors*, we need to test whether
-     *  the companion *class* of the owner is a JS type; not whether the owner
-     *  is a JS type.
-     */
-    def isJSDefaultParam(using Context): Boolean = {
-      sym.name.is(DefaultGetterName) && {
-        val owner = sym.owner
-        val methName = sym.name.exclude(DefaultGetterName)
-        if (methName == nme.CONSTRUCTOR) {
-          owner.linkedClass.isJSType
-        } else {
-          def isAttachedMethodExposed: Boolean =
-            owner.info.decl(methName).hasAltWith(_.symbol.isJSExposed)
-          owner.isJSType && (!owner.isNonNativeJSClass || isAttachedMethodExposed)
-        }
-      }
-    }
-
-    /** Is this symbol a default param accessor for the constructor of a native JS class? */
-    def isJSNativeCtorDefaultParam(using Context): Boolean = {
-      sym.name.is(DefaultGetterName)
-        && sym.name.exclude(DefaultGetterName) == nme.CONSTRUCTOR
-        && sym.owner.linkedClass.hasAnnotation(jsdefn.JSNativeAnnot)
-    }
-
     def jsCallingConvention(using Context): JSCallingConvention =
       JSCallingConvention.of(sym)
 
@@ -190,6 +181,43 @@ object JSSymUtils {
     def defaultJSName(using Context): String =
       if (sym.isTerm) sym.asTerm.name.unexpandedName.getterName.toString()
       else sym.name.unexpandedName.stripModuleClassSuffix.toString()
+
+    def jsParamInfos(using Context): List[JSParamInfo] = {
+      assert(sym.is(Method), s"trying to take JS param info of non-method: $sym")
+
+      def paramNamesAndTypes(using Context): List[(Names.TermName, Type)] =
+        sym.info.paramNamess.flatten.zip(sym.info.paramInfoss.flatten)
+
+      val paramInfosAtElimRepeated = atPhase(elimRepeatedPhase) {
+        val list =
+          for ((name, info) <- paramNamesAndTypes) yield {
+            val v =
+              if (info.isRepeatedParam) Some(info.repeatedToSingle.widenDealias)
+              else None
+            name -> v
+          }
+        list.toMap
+      }
+
+      val paramInfosAtElimEVT = atPhase(elimErasedValueTypePhase) {
+        paramNamesAndTypes.toMap
+      }
+
+      for ((paramName, paramInfoNow) <- paramNamesAndTypes) yield {
+        paramInfosAtElimRepeated.get(paramName) match {
+          case None =>
+            // This is a capture parameter introduced by erasure or lambdalift
+            new JSParamInfo(paramInfoNow, capture = true)
+
+          case Some(Some(info)) =>
+            new JSParamInfo(info, repeated = true)
+
+          case Some(None) =>
+            val info = paramInfosAtElimEVT.getOrElse(paramName, paramInfoNow)
+            new JSParamInfo(info)
+        }
+      }
+    }
   }
 
   private object JSUnaryOpMethodName {
diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/JUnitBootstrappers.scala b/compiler/src/dotty/tools/dotc/transform/sjs/JUnitBootstrappers.scala
index 0c4e4a13cfce..c6f1feba5e98 100644
--- a/compiler/src/dotty/tools/dotc/transform/sjs/JUnitBootstrappers.scala
+++ b/compiler/src/dotty/tools/dotc/transform/sjs/JUnitBootstrappers.scala
@@ -111,7 +111,9 @@ class JUnitBootstrappers extends MiniPhase {
   import JUnitBootstrappers._
   import ast.tpd._
 
-  def phaseName: String = "junitBootstrappers"
+  override def phaseName: String = JUnitBootstrappers.name
+
+  override def description: String = JUnitBootstrappers.description
 
   override def isEnabled(using Context): Boolean =
     super.isEnabled && ctx.settings.scalajs.value
@@ -223,7 +225,7 @@ class JUnitBootstrappers extends MiniPhase {
 
     DefDef(sym, {
       val metadata = for (test <- tests) yield {
-        val name = Literal(Constant(test.name.toString))
+        val name = Literal(Constant(test.name.mangledString))
         val ignored = Literal(Constant(test.hasAnnotation(junitdefn.IgnoreAnnotClass)))
         val testAnnot = test.getAnnotation(junitdefn.TestAnnotClass).get
 
@@ -266,7 +268,7 @@ class JUnitBootstrappers extends MiniPhase {
           val tp = junitdefn.NoSuchMethodExceptionType
           Throw(resolveConstructor(tp, nameParamRef :: Nil))
         } { (test, next) =>
-          If(Literal(Constant(test.name.toString)).select(defn.Any_equals).appliedTo(nameParamRef),
+          If(Literal(Constant(test.name.mangledString)).select(defn.Any_equals).appliedTo(nameParamRef),
             genTestInvocation(testClass, test, ref(castInstanceSym)),
             next)
         }
@@ -312,6 +314,8 @@ class JUnitBootstrappers extends MiniPhase {
 }
 
 object JUnitBootstrappers {
+  val name: String = "junitBootstrappers"
+  val description: String = "generate JUnit-specific bootstrapper classes for Scala.js"
 
   private object junitNme {
     val beforeClass: TermName = termName("beforeClass")
diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala
index 8e9e11aef048..c9cf71554bc6 100644
--- a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala
+++ b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala
@@ -64,6 +64,8 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP
 
   override def phaseName: String = PrepJSInterop.name
 
+  override def description: String = PrepJSInterop.description
+
   override def isEnabled(using Context): Boolean =
     ctx.settings.scalajs.value
 
@@ -123,6 +125,8 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP
 
       checkInternalAnnotations(sym)
 
+      stripJSAnnotsOnExported(sym)
+
       /* Checks related to @js.native:
        * - if @js.native, verify that it is allowed in this context, and if
        *   yes, compute and store the JS native load spec
@@ -299,6 +303,14 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP
 
           super.transform(tree)
 
+        case _: Export =>
+          if enclosingOwner is OwnerKind.JSNative then
+            report.error("Native JS traits, classes and objects cannot contain exported definitions.", tree)
+          else if enclosingOwner is OwnerKind.JSTrait then
+            report.error("Non-native JS traits cannot contain exported definitions.", tree)
+
+          super.transform(tree)
+
         case _ =>
           super.transform(tree)
       }
@@ -457,7 +469,8 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP
       val kind = {
         if (!isJSNative) {
           if (sym.is(ModuleClass)) OwnerKind.JSMod
-          else OwnerKind.JSClass
+          else if (sym.is(Trait)) OwnerKind.JSTrait
+          else OwnerKind.JSNonTraitClass
         } else {
           if (sym.is(ModuleClass)) OwnerKind.JSNativeMod
           else OwnerKind.JSNativeClass
@@ -814,7 +827,29 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP
       super.transform(tree)
     }
 
+    /** Removes annotations from exported definitions (e.g. `export foo.bar`):
+     *  - `js.native`
+     *  - `js.annotation.*`
+     */
+    private def stripJSAnnotsOnExported(sym: Symbol)(using Context): Unit =
+      if !sym.is(Exported) then
+        return // only remove annotations from exported definitions
+
+      val JSNativeAnnot = jsdefn.JSNativeAnnot
+      val JSAnnotPackage = jsdefn.JSAnnotPackage
+
+      extension (sym: Symbol) def isJSAnnot =
+        (sym eq JSNativeAnnot) || (sym.owner eq JSAnnotPackage)
+
+      val newAnnots = sym.annotations.filterConserve(!_.symbol.isJSAnnot)
+      if newAnnots ne sym.annotations then
+        sym.annotations = newAnnots
+    end stripJSAnnotsOnExported
+
     private def checkRHSCallsJSNative(tree: ValOrDefDef, longKindStr: String)(using Context): Unit = {
+      if tree.symbol.is(Exported) then
+        return // we already report an error that exports are not allowed here, this prevents extra errors.
+
       // Check that the rhs is exactly `= js.native`
       tree.rhs match {
         case sel: Select if sel.symbol == jsdefn.JSPackage_native =>
@@ -960,6 +995,7 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP
 
 object PrepJSInterop {
   val name: String = "prepjsinterop"
+  val description: String = "additional checks and transformations for Scala.js"
 
   private final class OwnerKind private (private val baseKinds: Int) extends AnyVal {
     import OwnerKind._
@@ -992,10 +1028,12 @@ object PrepJSInterop {
     val JSNativeClass = new OwnerKind(0x04)
     /** A native JS object, which extends js.Any. */
     val JSNativeMod = new OwnerKind(0x08)
-    /** A non-native JS class/trait. */
-    val JSClass = new OwnerKind(0x10)
+    /** A non-native JS class (not a trait). */
+    val JSNonTraitClass = new OwnerKind(0x10)
+    /** A non-native JS trait. */
+    val JSTrait = new OwnerKind(0x20)
     /** A non-native JS object. */
-    val JSMod = new OwnerKind(0x20)
+    val JSMod = new OwnerKind(0x40)
 
     // Compound kinds
 
@@ -1005,12 +1043,12 @@ object PrepJSInterop {
     /** A native JS class/trait/object. */
     val JSNative = JSNativeClass | JSNativeMod
     /** A non-native JS class/trait/object. */
-    val JSNonNative = JSClass | JSMod
+    val JSNonNative = JSNonTraitClass | JSTrait | JSMod
     /** A JS type, i.e., something extending js.Any. */
     val JSType = JSNative | JSNonNative
 
     /** Any kind of class/trait, i.e., a Scala or JS class/trait. */
-    val AnyClass = ScalaClass | JSNativeClass | JSClass
+    val AnyClass = ScalaClass | JSNativeClass | JSNonTraitClass | JSTrait
   }
 
   /** Tests if the symbol extend `js.Any`.
diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala
index 48a5738b4074..1e324de33675 100644
--- a/compiler/src/dotty/tools/dotc/typer/Applications.scala
+++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala
@@ -118,7 +118,7 @@ object Applications {
   }
 
   def tupleComponentTypes(tp: Type)(using Context): List[Type] =
-    tp.widenExpr.dealias match
+    tp.widenExpr.dealias.normalized match
     case tp: AppliedType =>
       if defn.isTupleClass(tp.tycon.typeSymbol) then
         tp.args
@@ -173,6 +173,7 @@ object Applications {
       val elemTp = unapplySeqTypeElemTp(tp)
       if (elemTp.exists) args.map(Function.const(elemTp))
       else if (isProductSeqMatch(tp, args.length, pos)) productSeqSelectors(tp, args.length, pos)
+      else if tp.derivesFrom(defn.NonEmptyTupleClass) then foldApplyTupleType(tp)
       else fallback
     }
 
@@ -189,14 +190,26 @@ object Applications {
         getUnapplySelectors(getTp, args, pos)
       else if (unapplyResult.widenSingleton isRef defn.BooleanClass)
         Nil
-      else if (defn.isProductSubType(unapplyResult))
+      else if (defn.isProductSubType(unapplyResult) && productArity(unapplyResult, pos) != 0)
         productSelectorTypes(unapplyResult, pos)
           // this will cause a "wrong number of arguments in pattern" error later on,
           // which is better than the message in `fail`.
+      else if unapplyResult.derivesFrom(defn.NonEmptyTupleClass) then
+        foldApplyTupleType(unapplyResult)
       else fail
     }
   }
 
+  def foldApplyTupleType(tp: Type)(using Context): List[Type] =
+    object tupleFold extends TypeAccumulator[List[Type]]:
+      override def apply(accum: List[Type], t: Type): List[Type] =
+        t match
+          case AppliedType(tycon, x :: x2 :: Nil) if tycon.typeSymbol == defn.PairClass =>
+            apply(x :: accum, x2)
+          case x => foldOver(accum, x)
+    end tupleFold
+    tupleFold(Nil, tp).reverse
+
   def wrapDefs(defs: mutable.ListBuffer[Tree], tree: Tree)(using Context): Tree =
     if (defs != null && defs.nonEmpty) tpd.Block(defs.toList, tree) else tree
 
@@ -210,6 +223,7 @@ object Applications {
         case Select(receiver, _) => receiver
         case mr => mr.tpe.normalizedPrefix match {
           case mr: TermRef => ref(mr)
+          case mr: ThisType => singleton(mr)
           case mr =>
             if testOnly then
               // In this case it is safe to skolemize now; we will produce a stable prefix for the actual call.
@@ -261,7 +275,20 @@ object Applications {
   /** Splice new method reference `meth` into existing application `app` */
   private def spliceMeth(meth: Tree, app: Tree)(using Context): Tree = app match {
     case Apply(fn, args) =>
-      spliceMeth(meth, fn).appliedToArgs(args)
+      // Constructors always have one leading non-implicit parameter list.
+      // Empty list is inserted for constructors where the first parameter list is implicit.
+      //
+      // Therefore, we need to ignore the first empty argument list.
+      // This is needed for the test tests/neg/i12344.scala
+      //
+      // see NamerOps.normalizeIfConstructor
+      //
+      if args == Nil
+         && !fn.isInstanceOf[Apply]
+         && app.tpe.isImplicitMethod
+         && fn.symbol.isConstructor
+      then meth
+      else spliceMeth(meth, fn).appliedToArgs(args)
     case TypeApply(fn, targs) =>
       // Note: It is important that the type arguments `targs` are passed in new trees
       // instead of being spliced in literally. Otherwise, a type argument to a default
@@ -397,7 +424,7 @@ trait Applications extends Compatibility {
      */
     @threadUnsafe lazy val methType: Type = liftedFunType.widen match {
       case funType: MethodType => funType
-      case funType: PolyType => constrained(funType).resultType
+      case funType: PolyType => instantiateWithTypeVars(funType)
       case tp => tp //was: funType
     }
 
@@ -419,15 +446,18 @@ trait Applications extends Compatibility {
 
     protected def init(): Unit = methType match {
       case methType: MethodType =>
-        // apply the result type constraint, unless method type is dependent
         val resultApprox = resultTypeApprox(methType)
-        if (!constrainResult(methRef.symbol, resultApprox, resultType))
-          if (ctx.typerState.isCommittable)
-            // defer the problem until after the application;
-            // it might be healed by an implicit conversion
-            ()
-          else
-            fail(TypeMismatch(methType.resultType, resultType))
+        val sym = methRef.symbol
+        if ctx.typerState.isCommittable then
+          // Here we call `resultType` only to accumulate constraints (even if
+          // it fails, we might be able to heal the expression to conform to the
+          // result type) so don't check for views since `viewExists` doesn't
+          // have any side-effect and would only slow the compiler down (cf #14333).
+          NoViewsAllowed.constrainResult(sym, resultApprox, resultType)
+        else if !constrainResult(sym, resultApprox, resultType) then
+          // Here we actually record that this alternative failed so that
+          // overloading resolution might prune it.
+          fail(TypeMismatch(methType.resultType, resultType, None))
 
         // match all arguments with corresponding formal parameters
         matchArgs(orderedArgs, methType.paramInfos, 0)
@@ -614,7 +644,6 @@ trait Applications extends Compatibility {
 
   /** The degree to which an argument has to match a formal parameter */
   enum ArgMatch:
-    case SubType       // argument is a relaxed subtype of formal
     case Compatible    // argument is compatible with formal
     case CompatibleCAP // capture-converted argument is compatible with formal
 
@@ -635,21 +664,38 @@ trait Applications extends Compatibility {
         // matches expected type
         false
       case argtpe =>
-        def SAMargOK = formal match {
-          case SAMType(sam) => argtpe <:< sam.toFunctionType(isJava = formal.classSymbol.is(JavaDefined))
-          case _ => false
-        }
-        if argMatch == ArgMatch.SubType then
-          argtpe relaxed_<:< formal.widenExpr
-        else
-          isCompatible(argtpe, formal)
-          || ctx.mode.is(Mode.ImplicitsEnabled) && SAMargOK
-          || argMatch == ArgMatch.CompatibleCAP
-              && {
-                val argtpe1 = argtpe.widen
-                val captured = captureWildcards(argtpe1)
-                (captured ne argtpe1) && isCompatible(captured, formal.widenExpr)
-              }
+        val argtpe1 = argtpe.widen
+
+        def SAMargOK =
+          defn.isFunctionType(argtpe1) && formal.match
+            case SAMType(sam) => argtpe <:< sam.toFunctionType(isJava = formal.classSymbol.is(JavaDefined))
+            case _ => false
+
+        isCompatible(argtpe, formal)
+        // Only allow SAM-conversion to PartialFunction if implicit conversions
+        // are enabled. This is necessary to avoid ambiguity between an overload
+        // taking a PartialFunction and one taking a Function1 because
+        // PartialFunction extends Function1 but Function1 is SAM-convertible to
+        // PartialFunction. Concretely, given:
+        //
+        //   def foo(a: Int => Int): Unit = println("1")
+        //   def foo(a: PartialFunction[Int, Int]): Unit = println("2")
+        //
+        // - `foo(x => x)` will print 1, because the PartialFunction overload
+        //   won't be seen as applicable in the first call to
+        //   `resolveOverloaded`, this behavior happens to match what Java does
+        //   since PartialFunction is not a SAM type according to Java
+        //   (`isDefined` is abstract).
+        // - `foo { case x if x % 2 == 0 => x }` will print 2, because both
+        //    overloads are applicable, but PartialFunction is a subtype of
+        //    Function1 so it's more specific.
+        || (!formal.isRef(defn.PartialFunctionClass) || ctx.mode.is(Mode.ImplicitsEnabled)) && SAMargOK
+        || argMatch == ArgMatch.CompatibleCAP
+            && {
+              val argtpe1 = argtpe.widen
+              val captured = captureWildcards(argtpe1)
+              (captured ne argtpe1) && isCompatible(captured, formal.widenExpr)
+            }
 
     /** The type of the given argument */
     protected def argType(arg: Arg, formal: Type): Type
@@ -714,7 +760,10 @@ trait Applications extends Compatibility {
       typedArgBuf += seqToRepeated(SeqLiteral(args, elemtpt))
     }
 
-    def harmonizeArgs(args: List[TypedArg]): List[Tree] = harmonize(args)
+    def harmonizeArgs(args: List[TypedArg]): List[Tree] =
+      // harmonize args only if resType depends on parameter types
+      if (isFullyDefined(methodType.resType, ForceDegree.none)) args
+      else harmonize(args)
 
     override def appPos: SrcPos = app.srcPos
 
@@ -851,24 +900,12 @@ trait Applications extends Compatibility {
       record("typedApply")
       val fun1 = typedExpr(tree.fun, originalProto)
 
-      // Warning: The following lines are dirty and fragile.
-      // We record that auto-tupling or untupling was demanded as a side effect in adapt.
-      // If it was, we assume the tupled-dual proto-type in the rest of the application,
-      // until, possibly, we have to fall back to insert an implicit on the qualifier.
-      // This crucially relies on he fact that `proto` is used only in a single call of `adapt`,
-      // otherwise we would get possible cross-talk between different `adapt` calls using the same
-      // prototype. A cleaner alternative would be to return a modified prototype from `adapt` together with
-      // a modified tree but this would be more convoluted and less efficient.
-      val proto = if (originalProto.hasTupledDual) originalProto.tupledDual else originalProto
-
-      // If some of the application's arguments are function literals without explicitly declared
-      // parameter types, relate the normalized result type of the application with the
-      // expected type through `constrainResult`. This can add more constraints which
-      // help sharpen the inferred parameter types for the argument function literal(s).
-      // This tweak is needed to make i1378 compile.
-      if (tree.args.exists(untpd.isFunctionWithUnknownParamType(_)))
-        if (!constrainResult(tree.symbol, fun1.tpe.widen, proto.derivedFunProto(resultType = pt)))
-          typr.println(i"result failure for $tree with type ${fun1.tpe.widen}, expected = $pt")
+      // If adaptation created a tupled dual of `originalProto`, pick the right version
+      // (tupled or not) of originalProto to proceed.
+      val proto =
+        if originalProto.hasTupledDual && needsTupledDual(fun1.tpe, originalProto)
+        then originalProto.tupledDual
+        else originalProto
 
       /** Type application where arguments come from prototype, and no implicits are inserted */
       def simpleApply(fun1: Tree, proto: FunProto)(using Context): Tree =
@@ -887,8 +924,9 @@ trait Applications extends Compatibility {
        *  part. Return an optional value to indicate success.
        */
       def tryWithImplicitOnQualifier(fun1: Tree, proto: FunProto)(using Context): Option[Tree] =
-        if (ctx.mode.is(Mode.SynthesizeExtMethodReceiver))
+        if ctx.mode.is(Mode.SynthesizeExtMethodReceiver) || proto.hasErrorArg then
           // Suppress insertion of apply or implicit conversion on extension method receiver
+          // or if argument is erroneous by itself.
           None
         else
           tryInsertImplicitOnQualifier(fun1, proto, ctx.typerState.ownedVars) flatMap { fun2 =>
@@ -977,7 +1015,7 @@ trait Applications extends Compatibility {
      *     { val xs = es; e' = e' + args }
      */
     def typedOpAssign(using Context): Tree = {
-      val (lhs1, name, rhss) = tree match
+      val (lhs1, name, rhss) = (tree: @unchecked) match
         case Apply(Select(lhs, name), rhss) => (typedExpr(lhs), name, rhss)
         case Apply(untpd.TypedSplice(Select(lhs1, name)), rhss) => (lhs1, name, rhss)
       val liftedDefs = new mutable.ListBuffer[Tree]
@@ -1097,6 +1135,40 @@ trait Applications extends Compatibility {
       tree
   }
 
+  /** Is `tp` a unary function type or an overloaded type with with only unary function
+   *  types as alternatives?
+   */
+  def isUnary(tp: Type)(using Context): Boolean = tp match {
+    case tp: MethodicType =>
+      tp.firstParamTypes match {
+        case ptype :: Nil => !ptype.isRepeatedParam
+        case _ => false
+      }
+    case tp: TermRef =>
+      tp.denot.alternatives.forall(alt => isUnary(alt.info))
+    case _ =>
+      false
+  }
+
+  /** Should we tuple or untuple the argument before application?
+    *  If auto-tupling is enabled then
+    *
+    *   - we tuple n-ary arguments where n > 0 if the function consists
+    *     only of unary alternatives
+    *   - we untuple tuple arguments of infix operations if the function
+    *     does not consist only of unary alternatives.
+    */
+  def needsTupledDual(funType: Type, pt: FunProto)(using Context): Boolean =
+    pt.args match
+      case untpd.Tuple(elems) :: Nil =>
+        elems.length > 1
+        && pt.applyKind == ApplyKind.InfixTuple
+        && !isUnary(funType)
+      case args =>
+        args.lengthCompare(1) > 0
+        && isUnary(funType)
+        && Feature.autoTuplingEnabled
+
   /** If `tree` is a complete application of a compiler-generated `apply`
    *  or `copy` method of an enum case, widen its type to the underlying
    *  type by means of a type ascription, as long as the widened type is
@@ -1299,7 +1371,7 @@ trait Applications extends Compatibility {
         for (argType <- argTypes) assert(!isBounds(argType), unapplyApp.tpe.show)
         val bunchedArgs = argTypes match {
           case argType :: Nil =>
-            if (args.lengthCompare(1) > 0 && Feature.autoTuplingEnabled) untpd.Tuple(args) :: Nil
+            if (args.lengthCompare(1) > 0 && Feature.autoTuplingEnabled && defn.isTupleNType(argType)) untpd.Tuple(args) :: Nil
             else args
           case _ => args
         }
@@ -1507,7 +1579,7 @@ trait Applications extends Compatibility {
             case tp2: MethodType => true // (3a)
             case tp2: PolyType if tp2.resultType.isInstanceOf[MethodType] => true // (3a)
             case tp2: PolyType => // (3b)
-              explore(isAsSpecificValueType(tp1, constrained(tp2).resultType))
+              explore(isAsSpecificValueType(tp1, instantiateWithTypeVars(tp2)))
             case _ => // 3b)
               isAsSpecificValueType(tp1, tp2)
     }
@@ -1567,7 +1639,7 @@ trait Applications extends Compatibility {
     /** Widen the result type of synthetic given methods from the implementation class to the
      *  type that's implemented. Example
      *
-     *      given I[X] as T { ... }
+     *      given I[X]: T with { ... }
      *
      *  This desugars to
      *
@@ -1577,7 +1649,7 @@ trait Applications extends Compatibility {
      *  To compare specificity we should compare with `T`, not with its implementation `I[X]`.
      *  No such widening is performed for given aliases, which are not synthetic. E.g.
      *
-     *      given J[X] as T = rhs
+     *      given J[X]: T = rhs
      *
      *  already has the right result type `T`. Neither is widening performed for given
      *  objects, since these are anyway taken to be more specific than methods
@@ -1588,8 +1660,8 @@ trait Applications extends Compatibility {
         mt.derivedLambdaType(mt.paramNames, mt.paramInfos, widenGiven(mt.resultType, alt))
       case pt: PolyType =>
         pt.derivedLambdaType(pt.paramNames, pt.paramInfos, widenGiven(pt.resultType, alt))
-      case _ =>
-        if (alt.symbol.isAllOf(SyntheticGivenMethod)) tp.widenToParents
+      case rt =>
+        if alt.symbol.isCoDefinedGiven(rt.typeSymbol) then tp.widenToParents
         else tp
     }
 
@@ -1674,7 +1746,7 @@ trait Applications extends Compatibility {
       resultType.revealIgnored match {
         case resultType: ValueType =>
           altType.widen match {
-            case tp: PolyType => resultConforms(altSym, constrained(tp).resultType, resultType)
+            case tp: PolyType => resultConforms(altSym, instantiateWithTypeVars(tp), resultType)
             case tp: MethodType => constrainResult(altSym, tp.resultType, resultType)
             case _ => true
           }
@@ -1863,17 +1935,10 @@ trait Applications extends Compatibility {
           else
             alts
 
-        def narrowByTrees(alts: List[TermRef], args: List[Tree], resultType: Type): List[TermRef] = {
-          val alts2 = alts.filterConserve(alt =>
-            isApplicableMethodRef(alt, args, resultType, keepConstraint = false, ArgMatch.SubType)
+        def narrowByTrees(alts: List[TermRef], args: List[Tree], resultType: Type): List[TermRef] =
+          alts.filterConserve(alt =>
+            isApplicableMethodRef(alt, args, resultType, keepConstraint = false, ArgMatch.CompatibleCAP)
           )
-          if (alts2.isEmpty && !ctx.isAfterTyper)
-            alts.filterConserve(alt =>
-              isApplicableMethodRef(alt, args, resultType, keepConstraint = false, ArgMatch.CompatibleCAP)
-            )
-          else
-            alts2
-        }
 
         record("resolveOverloaded.FunProto", alts.length)
         val alts1 = narrowBySize(alts)
@@ -2061,7 +2126,11 @@ trait Applications extends Compatibility {
             else defn.FunctionOf(commonParamTypes, WildcardType)
           overload.println(i"pretype arg $arg with expected type $commonFormal")
           if (commonParamTypes.forall(isFullyDefined(_, ForceDegree.flipBottom)))
-            withMode(Mode.ImplicitsEnabled)(pt.typedArg(arg, commonFormal))
+            withMode(Mode.ImplicitsEnabled) {
+              // We can cache the adapted argument here because the expected type
+              // is a common type shared by all overloading candidates.
+              pt.cacheArg(arg, pt.typedArg(arg, commonFormal))
+            }
         }
         recur(altFormals.map(_.tail), args1)
       case _ =>
diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala
index 820f466723e2..be38221ef167 100644
--- a/compiler/src/dotty/tools/dotc/typer/Checking.scala
+++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala
@@ -20,6 +20,7 @@ import ErrorReporting.errorTree
 import rewrites.Rewrites.patch
 import util.Spans.Span
 import Phases.refchecksPhase
+import Constants.Constant
 
 import util.SrcPos
 import util.Spans.Span
@@ -33,9 +34,12 @@ import NameKinds.DefaultGetterName
 import NameOps._
 import SymDenotations.{NoCompleter, NoDenotation}
 import Applications.unapplyArgs
+import Inferencing.isFullyDefined
 import transform.patmat.SpaceEngine.isIrrefutable
-import config.Feature._
+import config.Feature
+import config.Feature.sourceVersion
 import config.SourceVersion._
+import transform.TypeUtils.*
 
 import collection.mutable
 import reporting._
@@ -191,6 +195,30 @@ object Checking {
       report.errorOrMigrationWarning(em"$tp is not a legal $what\nsince it${rstatus.msg}", pos)
   }
 
+  /** Given a parent `parent` of a class `cls`, if `parent` is a trait check that
+   *  the superclass of `cls` derived from the superclass of `parent`.
+   *
+   *  An exception is made if `cls` extends `Any`, and `parent` is `java.io.Serializable`
+   *  or `java.lang.Comparable`. These two classes are treated by Scala as universal
+   *  traits. E.g. the following is OK:
+   *
+   *      ... extends Any with java.io.Serializable
+   *
+   *  The standard library relies on this idiom.
+   */
+  def checkTraitInheritance(parent: Symbol, cls: ClassSymbol, pos: SrcPos)(using Context): Unit =
+    parent match {
+      case parent: ClassSymbol if parent.is(Trait) =>
+        val psuper = parent.superClass
+        val csuper = cls.superClass
+        val ok = csuper.derivesFrom(psuper) ||
+          parent.is(JavaDefined) && csuper == defn.AnyClass &&
+          (parent == defn.JavaSerializableClass || parent == defn.ComparableClass)
+        if (!ok)
+          report.error(em"illegal trait inheritance: super$csuper does not derive from $parent's super$psuper", pos)
+      case _ =>
+    }
+
   /** A type map which checks that the only cycles in a type are F-bounds
    *  and that protects all F-bounded references by LazyRefs.
    */
@@ -403,11 +431,11 @@ object Checking {
       return
 
     def qualifies(sym: Symbol) = sym.name.isTypeName && !sym.is(Private)
-    val abstractTypeNames =
-      for (parent <- parents; mbr <- parent.abstractTypeMembers if qualifies(mbr.symbol))
-      yield mbr.name.asTypeName
-
     withMode(Mode.CheckCyclic) {
+      val abstractTypeNames =
+        for (parent <- parents; mbr <- parent.abstractTypeMembers if qualifies(mbr.symbol))
+        yield mbr.name.asTypeName
+
       for name <- abstractTypeNames do
         try
           val mbr = joint.member(name)
@@ -425,6 +453,7 @@ object Checking {
   /** Check that symbol's definition is well-formed. */
   def checkWellFormed(sym: Symbol)(using Context): Unit = {
     def fail(msg: Message) = report.error(msg, sym.srcPos)
+    def warn(msg: Message) = report.warning(msg, sym.srcPos)
 
     def checkWithDeferred(flag: FlagSet) =
       if (sym.isOneOf(flag))
@@ -448,7 +477,7 @@ object Checking {
     if (sym.is(Implicit)) {
       if (sym.owner.is(Package))
         fail(TopLevelCantBeImplicit(sym))
-      if (sym.isType)
+      if sym.isType && (!sym.isClass || sym.is(Trait)) then
         fail(TypesAndTraitsCantBeImplicit())
     }
     if sym.is(Transparent) then
@@ -464,8 +493,15 @@ object Checking {
       fail(em"only classes can be ${(sym.flags & ClassOnlyFlags).flagsString}")
     if (sym.is(AbsOverride) && !sym.owner.is(Trait))
       fail(AbstractOverrideOnlyInTraits(sym))
-    if (sym.is(Trait) && sym.is(Final))
-      fail(TraitsMayNotBeFinal(sym))
+    if sym.is(Trait) then
+      if sym.is(Final) then
+        fail(TraitsMayNotBeFinal(sym))
+      else if sym.is(Open) then
+        warn(RedundantModifier(Open))
+    if sym.isAllOf(Abstract | Open) then
+      warn(RedundantModifier(Open))
+    if sym.is(Open) && sym.isLocal then
+      warn(RedundantModifier(Open))
     // Skip ModuleVal since the annotation will also be on the ModuleClass
     if sym.hasAnnotation(defn.TailrecAnnot) then
       if !sym.isOneOf(Method | ModuleVal) then
@@ -505,6 +541,9 @@ object Checking {
     checkCombination(Abstract, Override)
     checkCombination(Private, Override)
     checkCombination(Lazy, Inline)
+    // The issue with `erased inline` is that the erased semantics get lost
+    // as the code is inlined and the reference is removed before the erased usage check.
+    checkCombination(Erased, Inline)
     checkNoConflict(Lazy, ParamAccessor, s"parameter may not be `lazy`")
   }
 
@@ -687,6 +726,50 @@ object Checking {
         checkValue(tree)
       case _ =>
     tree
+
+  /** Check that experimental language imports in `trees`
+   *  are done only in experimental scopes, or in a top-level
+   *  scope with only @experimental definitions.
+   */
+  def checkExperimentalImports(trees: List[Tree])(using Context): Unit =
+
+    def nonExperimentalStat(trees: List[Tree]): Tree = trees match
+      case (_: Import | EmptyTree) :: rest =>
+        nonExperimentalStat(rest)
+      case (tree @ TypeDef(_, impl: Template)) :: rest if tree.symbol.isPackageObject =>
+        nonExperimentalStat(impl.body).orElse(nonExperimentalStat(rest))
+      case (tree: PackageDef) :: rest =>
+        nonExperimentalStat(tree.stats).orElse(nonExperimentalStat(rest))
+      case (tree: MemberDef) :: rest =>
+        if tree.symbol.isExperimental || tree.symbol.is(Synthetic) then
+          nonExperimentalStat(rest)
+        else
+          tree
+      case tree :: rest =>
+        tree
+      case Nil =>
+        EmptyTree
+
+    for case imp @ Import(qual, selectors) <- trees do
+      def isAllowedImport(sel: untpd.ImportSelector) =
+        val name = Feature.experimental(sel.name)
+        name == Feature.scala2macros || name == Feature.erasedDefinitions
+
+      languageImport(qual) match
+        case Some(nme.experimental)
+        if !ctx.owner.isInExperimentalScope && !selectors.forall(isAllowedImport) =>
+          def check(stable: => String) =
+            Feature.checkExperimentalFeature("features", imp.srcPos,
+              s"\n\nNote: the scope enclosing the import is not considered experimental because it contains the\nnon-experimental $stable")
+          if ctx.owner.is(Package) then
+            // allow top-level experimental imports if all definitions are @experimental
+            nonExperimentalStat(trees) match
+              case EmptyTree =>
+              case tree: MemberDef => check(i"${tree.symbol}")
+              case tree => check(i"expression ${tree}")
+          else Feature.checkExperimentalFeature("features", imp.srcPos)
+        case _ =>
+  end checkExperimentalImports
 }
 
 trait Checking {
@@ -742,7 +825,7 @@ trait Checking {
             recur(pat1, pt)
           case UnApply(fn, _, pats) =>
             check(pat, pt) &&
-            (isIrrefutable(fn) || fail(pat, pt)) && {
+            (isIrrefutable(fn, pats.length) || fail(pat, pt)) && {
               val argPts = unapplyArgs(fn.tpe.widen.finalResultType, fn, pats, pat.srcPos)
               pats.corresponds(argPts)(recur)
             }
@@ -802,12 +885,13 @@ trait Checking {
    *  that is concurrently compiled in another source file.
    */
   def checkNoModuleClash(sym: Symbol)(using Context): Unit =
-    if sym.effectiveOwner.is(Package)
-       && sym.owner.info.member(sym.name.moduleClassName).symbol.isAbsent()
+    val effectiveOwner = sym.effectiveOwner
+    if effectiveOwner.is(Package)
+       && effectiveOwner.info.member(sym.name.moduleClassName).symbol.isAbsent()
     then
-      val conflicting = sym.owner.info.member(sym.name.toTypeName).symbol
+      val conflicting = effectiveOwner.info.member(sym.name.toTypeName).symbol
       if conflicting.exists then
-        report.error(AlreadyDefined(sym.name, sym.owner, conflicting), sym.srcPos)
+        report.error(AlreadyDefined(sym.name, effectiveOwner, conflicting), sym.srcPos)
 
  /**  Check that `tp` is a class type.
   *   Also, if `traitReq` is true, check that `tp` is a trait.
@@ -829,22 +913,14 @@ trait Checking {
   /** If `sym` is an old-style implicit conversion, check that implicit conversions are enabled.
    *  @pre  sym.is(GivenOrImplicit)
    */
-  def checkImplicitConversionDefOK(sym: Symbol)(using Context): Unit = {
-    def check(): Unit =
+  def checkImplicitConversionDefOK(sym: Symbol)(using Context): Unit =
+    if sym.isOldStyleImplicitConversion(directOnly = true) then
       checkFeature(
         nme.implicitConversions,
         i"Definition of implicit conversion $sym",
         ctx.owner.topLevelClass,
         sym.srcPos)
 
-    sym.info.stripPoly match {
-      case mt @ MethodType(_ :: Nil)
-      if !mt.isImplicitMethod && !sym.is(Synthetic) => // it's an old-styleconversion
-        check()
-      case _ =>
-    }
-  }
-
   /** If `tree` is an application of a new-style implicit conversion (using the apply
    *  method of a `scala.Conversion` instance), check that implicit conversions are
    *  enabled.
@@ -906,7 +982,7 @@ trait Checking {
                    description: => String,
                    featureUseSite: Symbol,
                    pos: SrcPos)(using Context): Unit =
-    if !enabled(name) then
+    if !Feature.enabled(name) then
       report.featureWarning(name.toString, description, featureUseSite, required = false, pos)
 
   /** Check that `tp` is a class type and that any top-level type arguments in this type
@@ -954,7 +1030,9 @@ trait Checking {
           def doubleDefError(decl: Symbol, other: Symbol): Unit =
             if (!decl.info.isErroneous && !other.info.isErroneous)
               report.error(DoubleDefinition(decl, other, cls), decl.srcPos)
-          if (decl is Synthetic) doubleDefError(other, decl)
+          if decl.name.is(DefaultGetterName) && ctx.reporter.errorsReported then
+            () // do nothing; we already have reported an error that overloaded variants cannot have default arguments
+          else if (decl is Synthetic) doubleDefError(other, decl)
           else doubleDefError(decl, other)
         }
         if decl.hasDefaultParams && other.hasDefaultParams then
@@ -981,18 +1059,6 @@ trait Checking {
         report.error(i"""$called is already implemented by super${caller.superClass},
                    |its constructor cannot be called again""", call.srcPos)
 
-      if (caller.is(Module)) {
-        val traverser = new TreeTraverser {
-          def traverse(tree: Tree)(using Context) = tree match {
-            case tree: RefTree if tree.isTerm && (tree.tpe.classSymbol eq caller) =>
-              report.error("super constructor cannot be passed a self reference", tree.srcPos)
-            case _ =>
-              traverseChildren(tree)
-          }
-        }
-        traverser.traverse(call)
-      }
-
       // Check that constructor call is of the form _.(args1)...(argsN).
       // This guards against calls resulting from inserted implicits or applies.
       def checkLegalConstructorCall(tree: Tree, encl: Tree, kind: String): Unit = tree match {
@@ -1090,7 +1156,7 @@ trait Checking {
         }
       case _ =>
     }
-    tp.foreachPart(check, stopAtStatic = true)
+    tp.foreachPart(check, StopAt.Static)
     if (ok) tp else UnspecifiedErrorType
   }
 
@@ -1143,21 +1209,33 @@ trait Checking {
   /** Check arguments of compiler-defined annotations */
   def checkAnnotArgs(tree: Tree)(using Context): tree.type =
     val cls = Annotations.annotClass(tree)
-    def needsStringLit(arg: Tree) =
-      report.error(em"@${cls.name} needs a string literal as argument", arg.srcPos)
     tree match
       case Apply(tycon, arg :: Nil) if cls == defn.TargetNameAnnot =>
         arg match
+          case Literal(Constant("")) =>
+            report.error(em"target name cannot be empty", arg.srcPos)
           case Literal(_) => // ok
-          case _ => needsStringLit(arg)
+          case _ =>
+            report.error(em"@${cls.name} needs a string literal as argument", arg.srcPos)
       case _ =>
     tree
 
   /** 1. Check that all case classes that extend `scala.reflect.Enum` are `enum` cases
    *  2. Check that parameterised `enum` cases do not extend java.lang.Enum.
    *  3. Check that only a static `enum` base class can extend java.lang.Enum.
+   *  4. Check that user does not implement an `ordinal` method in the body of an enum class.
    */
   def checkEnum(cdef: untpd.TypeDef, cls: Symbol, firstParent: Symbol)(using Context): Unit = {
+    def existingDef(sym: Symbol, clazz: ClassSymbol)(using Context): Symbol = // adapted from SyntheticMembers
+      val existing = sym.matchingMember(clazz.thisType)
+      if existing != sym && !existing.is(Deferred) then existing else NoSymbol
+    def checkExistingOrdinal(using Context) =
+      val decl = existingDef(defn.Enum_ordinal, cls.asClass)
+      if decl.exists then
+        if decl.owner == cls then
+          report.error(em"the ordinal method of enum $cls can not be defined by the user", decl.srcPos)
+        else
+          report.error(em"enum $cls can not inherit the concrete ordinal method of ${decl.owner}", cdef.srcPos)
     def isEnumAnonCls =
       cls.isAnonymousClass
       && cls.owner.isTerm
@@ -1177,6 +1255,8 @@ trait Checking {
         // this test allows inheriting from `Enum` by hand;
         // see enum-List-control.scala.
         report.error(ClassCannotExtendEnum(cls, firstParent), cdef.srcPos)
+    if cls.isEnumClass && !isJavaEnum then
+      checkExistingOrdinal
   }
 
   /** Check that the firstParent for an enum case derives from the declaring enum class, if not, adds it as a parent
@@ -1298,6 +1378,25 @@ trait Checking {
     if !tp.derivesFrom(defn.MatchableClass) && sourceVersion.isAtLeast(`future-migration`) then
       val kind = if pattern then "pattern selector" else "value"
       report.warning(MatchableWarning(tp, pattern), pos)
+
+  def checkCanThrow(tp: Type, span: Span)(using Context): Unit =
+    if Feature.enabled(Feature.saferExceptions) && tp.isCheckedException then
+      ctx.typer.implicitArgTree(defn.CanThrowClass.typeRef.appliedTo(tp), span)
+
+  /** Check that catch can generate a good CanThrow exception */
+  def checkCatch(pat: Tree, guard: Tree)(using Context): Unit = pat match
+    case Typed(_: Ident, tpt) if isFullyDefined(tpt.tpe, ForceDegree.none) && guard.isEmpty =>
+      // OK
+    case Bind(_, pat1) =>
+      checkCatch(pat1, guard)
+    case _ =>
+      val req =
+        if guard.isEmpty then "for cases of the form `ex: T` where `T` is fully defined"
+        else "if no pattern guard is given"
+      report.error(
+        em"""Implementation restriction: cannot generate CanThrow capability for this kind of catch.
+            |CanThrow capabilities can only be generated $req.""",
+        pat.srcPos)
 }
 
 trait ReChecking extends Checking {
@@ -1310,6 +1409,8 @@ trait ReChecking extends Checking {
   override def checkAnnotApplicable(annot: Tree, sym: Symbol)(using Context): Boolean = true
   override def checkMatchable(tp: Type, pos: SrcPos, pattern: Boolean)(using Context): Unit = ()
   override def checkNoModuleClash(sym: Symbol)(using Context) = ()
+  override def checkCanThrow(tp: Type, span: Span)(using Context): Unit = ()
+  override def checkCatch(pat: Tree, guard: Tree)(using Context): Unit = ()
 }
 
 trait NoChecking extends ReChecking {
diff --git a/compiler/src/dotty/tools/dotc/typer/ConstFold.scala b/compiler/src/dotty/tools/dotc/typer/ConstFold.scala
index 4633c187912f..3e09642d291d 100644
--- a/compiler/src/dotty/tools/dotc/typer/ConstFold.scala
+++ b/compiler/src/dotty/tools/dotc/typer/ConstFold.scala
@@ -29,12 +29,12 @@ object ConstFold:
   def Apply[T <: Apply](tree: T)(using Context): T =
     tree.fun match
       case Select(xt, op) if foldedBinops.contains(op) =>
-        xt.tpe.widenTermRefExpr.normalized match
-          case ConstantType(x) =>
+        xt match
+          case ConstantTree(x) =>
             tree.args match
               case yt :: Nil =>
-                yt.tpe.widenTermRefExpr.normalized match
-                  case ConstantType(y) => tree.withFoldedType(foldBinop(op, x, y))
+                yt match
+                  case ConstantTree(y) => tree.withFoldedType(foldBinop(op, x, y))
                   case _ => tree
               case _ => tree
           case _ => tree
@@ -46,8 +46,8 @@ object ConstFold:
 
   def Select[T <: Select](tree: T)(using Context): T =
     if foldedUnops.contains(tree.name) then
-      tree.qualifier.tpe.widenTermRefExpr.normalized match
-        case ConstantType(x) => tree.withFoldedType(foldUnop(tree.name, x))
+      tree.qualifier match
+        case ConstantTree(x) => tree.withFoldedType(foldUnop(tree.name, x))
         case _ => tree
     else tree
 
@@ -59,6 +59,17 @@ object ConstFold:
       tree.withFoldedType(Constant(targ.tpe))
     case _ => tree
 
+  private object ConstantTree:
+    def unapply(tree: Tree)(using Context): Option[Constant] =
+      tree match
+        case Inlined(_, Nil, expr) => unapply(expr)
+        case Typed(expr, _) => unapply(expr)
+        case Literal(c) if c.tag == Constants.NullTag => Some(c)
+        case _ =>
+          tree.tpe.widenTermRefExpr.normalized.simplified match
+            case ConstantType(c) => Some(c)
+            case _ => None
+
   extension [T <: Tree](tree: T)(using Context)
     private def withFoldedType(c: Constant | Null): T =
       if c == null then tree else tree.withType(ConstantType(c)).asInstanceOf[T]
@@ -164,15 +175,24 @@ object ConstFold:
     case _ => null
   }
   private def foldStringOp(op: Name, x: Constant, y: Constant): Constant = op match {
-    case nme.ADD  => Constant(x.stringValue + y.stringValue)
+    case nme.ADD => Constant(x.stringValue + y.stringValue)
     case nme.EQ  => Constant(x.stringValue == y.stringValue)
+    case nme.NE  => Constant(x.stringValue != y.stringValue)
     case _ => null
   }
 
+  private def foldNullOp(op: Name, x: Constant, y: Constant): Constant =
+    assert(x.tag == NullTag || y.tag == NullTag)
+    op match
+      case nme.EQ => Constant(x.tag == y.tag)
+      case nme.NE => Constant(x.tag != y.tag)
+      case _ => null
+
   private def foldBinop(op: Name, x: Constant, y: Constant): Constant =
     val optag =
       if (x.tag == y.tag) x.tag
       else if (x.isNumeric && y.isNumeric) math.max(x.tag, y.tag)
+      else if (x.tag == NullTag || y.tag == NullTag) NullTag
       else NoTag
 
     try optag match
@@ -182,6 +202,7 @@ object ConstFold:
       case  FloatTag                              => foldFloatOp(op, x, y)
       case  DoubleTag                             => foldDoubleOp(op, x, y)
       case  StringTag                             => foldStringOp(op, x, y)
+      case  NullTag                               => foldNullOp(op, x, y)
       case  _                                     => null
       catch case ex: ArithmeticException => null // the code will crash at runtime,
                                                  // but that is better than the
diff --git a/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala b/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala
new file mode 100644
index 000000000000..970b771623f6
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala
@@ -0,0 +1,216 @@
+package dotty.tools
+package dotc
+package transform
+
+import core.*
+import Symbols.*, Types.*, Contexts.*, Flags.*, SymUtils.*, Decorators.*, reporting.*
+import util.SrcPos
+import config.{ScalaVersion, NoScalaVersion, Feature, ScalaRelease}
+import MegaPhase.MiniPhase
+import scala.util.{Failure, Success}
+import ast.tpd
+
+class CrossVersionChecks extends MiniPhase:
+  import tpd.*
+
+  override def phaseName: String = CrossVersionChecks.name
+
+  override def description: String = CrossVersionChecks.description
+
+  override def runsAfterGroupsOf: Set[String] = Set(FirstTransform.name)
+    // We assume all type trees except TypeTree have been eliminated
+
+  // Note: if a symbol has both @deprecated and @migration annotations and both
+  // warnings are enabled, only the first one checked here will be emitted.
+  // I assume that's a consequence of some code trying to avoid noise by suppressing
+  // warnings after the first, but I think it'd be better if we didn't have to
+  // arbitrarily choose one as more important than the other.
+  private def checkUndesiredProperties(sym: Symbol, pos: SrcPos)(using Context): Unit =
+    checkDeprecated(sym, pos)
+    checkExperimental(sym, pos)
+    checkSinceAnnot(sym, pos)
+
+    val xMigrationValue = ctx.settings.Xmigration.value
+    if xMigrationValue != NoScalaVersion then
+      checkMigration(sym, pos, xMigrationValue)
+
+
+  /** If @deprecated is present, and the point of reference is not enclosed
+   * in either a deprecated member or a scala bridge method, issue a warning.
+   */
+  private def checkDeprecated(sym: Symbol, pos: SrcPos)(using Context): Unit =
+
+    /** is the owner an enum or its companion and also the owner of sym */
+    def isEnumOwner(owner: Symbol)(using Context) =
+      // pre: sym is an enumcase
+      if owner.isEnumClass then owner.companionClass eq sym.owner
+      else if owner.is(ModuleClass) && owner.companionClass.isEnumClass then owner eq sym.owner
+      else false
+
+    def isDeprecatedOrEnum(owner: Symbol)(using Context) =
+      // pre: sym is an enumcase
+      owner.isDeprecated
+      || isEnumOwner(owner)
+
+    /**Scan the chain of outer declaring scopes from the current context
+     * a deprecation warning will be skipped if one the following holds
+     * for a given declaring scope:
+     * - the symbol associated with the scope is also deprecated.
+     * - if and only if `sym` is an enum case, the scope is either
+     *   a module that declares `sym`, or the companion class of the
+     *   module that declares `sym`.
+     */
+    def skipWarning(using Context) =
+      ctx.owner.ownersIterator.exists(if sym.isEnumCase then isDeprecatedOrEnum else _.isDeprecated)
+
+    for annot <- sym.getAnnotation(defn.DeprecatedAnnot) do
+      if !skipWarning then
+        val msg = annot.argumentConstant(0).map(": " + _.stringValue).getOrElse("")
+        val since = annot.argumentConstant(1).map(" since " + _.stringValue).getOrElse("")
+        report.deprecationWarning(s"${sym.showLocated} is deprecated${since}${msg}", pos)
+
+  private def checkExperimental(sym: Symbol, pos: SrcPos)(using Context): Unit =
+    if sym.isExperimental && !ctx.owner.isInExperimentalScope then
+      Feature.checkExperimentalDef(sym, pos)
+
+  private def checkExperimentalSignature(sym: Symbol, pos: SrcPos)(using Context): Unit =
+    class Checker extends TypeTraverser:
+      def traverse(tp: Type): Unit =
+        if tp.typeSymbol.isExperimental then
+          Feature.checkExperimentalDef(tp.typeSymbol, pos)
+        else
+          traverseChildren(tp)
+    if !sym.isInExperimentalScope then
+      new Checker().traverse(sym.info)
+
+  private def checkExperimentalAnnots(sym: Symbol)(using Context): Unit =
+    if !sym.isInExperimentalScope then
+      for annot <- sym.annotations if annot.symbol.isExperimental do
+        Feature.checkExperimentalDef(annot.symbol, annot.tree)
+
+  private def checkSinceAnnot(sym: Symbol, pos: SrcPos)(using Context): Unit =
+    for
+      annot <- sym.getAnnotation(defn.SinceAnnot)
+      releaseName <- annot.argumentConstantString(0)
+    do
+      ScalaRelease.parse(releaseName) match
+        case Some(release) if release > ctx.scalaRelease =>
+          report.error(
+            i"$sym was added in Scala release ${releaseName.show}, therefore it cannot be used in the code targeting Scala ${ctx.scalaRelease.show}",
+            pos)
+        case None =>
+          report.error(i"$sym has an unparsable release name: '${releaseName}'", annot.tree.srcPos)
+        case _ =>
+
+  private def checkSinceAnnotInSignature(sym: Symbol, pos: SrcPos)(using Context) =
+    new TypeTraverser:
+      def traverse(tp: Type) =
+        if tp.typeSymbol.hasAnnotation(defn.SinceAnnot) then
+          checkSinceAnnot(tp.typeSymbol, pos)
+        else
+          traverseChildren(tp)
+    .traverse(sym.info)
+
+  /** If @migration is present (indicating that the symbol has changed semantics between versions),
+   *  emit a warning.
+   */
+  private def checkMigration(sym: Symbol, pos: SrcPos, xMigrationValue: ScalaVersion)(using Context): Unit =
+    for annot <- sym.getAnnotation(defn.MigrationAnnot) do
+      val migrationVersion = ScalaVersion.parse(annot.argumentConstant(1).get.stringValue)
+      migrationVersion match
+        case Success(symVersion) if xMigrationValue < symVersion =>
+          val msg = annot.argumentConstant(0).get.stringValue
+          report.warning(SymbolChangedSemanticsInVersion(sym, symVersion, msg), pos)
+        case Failure(ex) =>
+          report.warning(SymbolHasUnparsableVersionNumber(sym, ex.getMessage), pos)
+        case _ =>
+
+  /** Check that a deprecated val or def does not override a
+   *  concrete, non-deprecated method.  If it does, then
+   *  deprecation is meaningless.
+   */
+  private def checkDeprecatedOvers(tree: Tree)(using Context): Unit = {
+    val symbol = tree.symbol
+    if (symbol.isDeprecated) {
+      val concrOvers =
+        symbol.allOverriddenSymbols.filter(sym =>
+          !sym.isDeprecated && !sym.is(Deferred))
+      if (!concrOvers.isEmpty)
+        report.deprecationWarning(
+          symbol.toString + " overrides concrete, non-deprecated symbol(s):" +
+            concrOvers.map(_.name).mkString("    ", ", ", ""), tree.srcPos)
+    }
+  }
+
+  /** Check that classes extending experimental classes or nested in experimental classes have the @experimental annotation. */
+  private def checkExperimentalInheritance(cls: ClassSymbol)(using Context): Unit =
+    if !cls.isAnonymousClass && !cls.hasAnnotation(defn.ExperimentalAnnot) then
+      cls.info.parents.find(_.typeSymbol.isExperimental) match
+        case Some(parent) =>
+          report.error(em"extension of experimental ${parent.typeSymbol} must have @experimental annotation", cls.srcPos)
+        case _ =>
+  end checkExperimentalInheritance
+
+  override def transformValDef(tree: ValDef)(using Context): ValDef =
+    checkDeprecatedOvers(tree)
+    checkExperimentalAnnots(tree.symbol)
+    checkExperimentalSignature(tree.symbol, tree)
+    checkSinceAnnot(tree.symbol, tree.srcPos)
+    checkSinceAnnotInSignature(tree.symbol, tree)
+    tree
+
+  override def transformDefDef(tree: DefDef)(using Context): DefDef =
+    checkDeprecatedOvers(tree)
+    checkExperimentalAnnots(tree.symbol)
+    checkExperimentalSignature(tree.symbol, tree)
+    checkSinceAnnotInSignature(tree.symbol, tree)
+    tree
+
+  override def transformTemplate(tree: Template)(using Context): Tree =
+    val cls = ctx.owner.asClass
+    checkExperimentalInheritance(cls)
+    checkExperimentalAnnots(cls)
+    tree
+
+  override def transformIdent(tree: Ident)(using Context): Ident = {
+    checkUndesiredProperties(tree.symbol, tree.srcPos)
+    tree
+  }
+
+  override def transformSelect(tree: Select)(using Context): Select = {
+    checkUndesiredProperties(tree.symbol, tree.srcPos)
+    tree
+  }
+
+  override def transformNew(tree: New)(using Context): New = {
+    checkUndesiredProperties(tree.tpe.typeSymbol, tree.srcPos)
+    tree
+  }
+
+  override def transformTypeTree(tree: TypeTree)(using Context): TypeTree = {
+    val tpe = tree.tpe
+    tpe.foreachPart {
+      case TypeRef(_, sym: Symbol)  =>
+        checkDeprecated(sym, tree.srcPos)
+        checkExperimental(sym, tree.srcPos)
+        checkSinceAnnot(sym, tree.srcPos)
+      case TermRef(_, sym: Symbol)  =>
+        checkDeprecated(sym, tree.srcPos)
+        checkExperimental(sym, tree.srcPos)
+        checkSinceAnnot(sym, tree.srcPos)
+      case _ =>
+    }
+    tree
+  }
+
+  override def transformTypeDef(tree: TypeDef)(using Context): TypeDef = {
+    checkExperimentalAnnots(tree.symbol)
+    checkSinceAnnot(tree.symbol, tree.srcPos)
+    tree
+  }
+
+end CrossVersionChecks
+
+object CrossVersionChecks:
+  val name: String = "crossVersionChecks"
+  val description: String = "check issues related to deprecated and experimental"
diff --git a/compiler/src/dotty/tools/dotc/typer/Deriving.scala b/compiler/src/dotty/tools/dotc/typer/Deriving.scala
index 269924c21114..39a775d61f8c 100644
--- a/compiler/src/dotty/tools/dotc/typer/Deriving.scala
+++ b/compiler/src/dotty/tools/dotc/typer/Deriving.scala
@@ -83,7 +83,10 @@ trait Deriving {
      */
     private def processDerivedInstance(derived: untpd.Tree): Unit = {
       val originalTypeClassType = typedAheadType(derived, AnyTypeConstructorProto).tpe
-      val typeClassType = checkClassType(underlyingClassRef(originalTypeClassType), derived.srcPos, traitReq = false, stablePrefixReq = true)
+      val underlyingClassType = underlyingClassRef(originalTypeClassType)
+      val typeClassType = checkClassType(
+          underlyingClassType.orElse(originalTypeClassType),
+          derived.srcPos, traitReq = false, stablePrefixReq = true)
       val typeClass = typeClassType.classSymbol
       val typeClassParams = typeClass.typeParams
       val typeClassArity = typeClassParams.length
@@ -160,7 +163,7 @@ trait Deriving {
         val clsParamInfos = clsType.typeParams
         val clsArity = clsParamInfos.length
         val alignedClsParamInfos = clsParamInfos.takeRight(instanceArity)
-        val alignedTypeClassParamInfos = typeClassParamInfos.take(alignedClsParamInfos.length)
+        val alignedTypeClassParamInfos = typeClassParamInfos.takeRight(alignedClsParamInfos.length)
 
 
         if ((instanceArity == clsArity || instanceArity > 0) && sameParamKinds(alignedClsParamInfos, alignedTypeClassParamInfos)) {
diff --git a/compiler/src/dotty/tools/dotc/typer/Dynamic.scala b/compiler/src/dotty/tools/dotc/typer/Dynamic.scala
index 232ad8456c0a..72a0cfe9dcbe 100644
--- a/compiler/src/dotty/tools/dotc/typer/Dynamic.scala
+++ b/compiler/src/dotty/tools/dotc/typer/Dynamic.scala
@@ -186,7 +186,7 @@ trait Dynamic {
       val base =
         untpd.Apply(
           untpd.TypedSplice(selectable.select(selectorName)).withSpan(fun.span),
-          (Literal(Constant(name.toString)) :: Nil).map(untpd.TypedSplice(_)))
+          (Literal(Constant(name.encode.toString)) :: Nil).map(untpd.TypedSplice(_)))
 
       val scall =
         if (vargss.isEmpty) base
diff --git a/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala b/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala
index 413ae14762b8..733977734329 100644
--- a/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala
+++ b/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala
@@ -51,6 +51,16 @@ object ErrorReporting {
       case _ =>
         report.error(em"missing arguments for $meth", tree.srcPos)
 
+  def matchReductionAddendum(tps: Type*)(using Context): String =
+    val collectMatchTrace = new TypeAccumulator[String]:
+      def apply(s: String, tp: Type): String =
+        if s.nonEmpty then s
+        else tp match
+          case tp: AppliedType if tp.isMatchAlias => MatchTypeTrace.record(tp.tryNormalize)
+          case tp: MatchType => MatchTypeTrace.record(tp.tryNormalize)
+          case _ => foldOver(s, tp)
+    tps.foldLeft("")(collectMatchTrace)
+
   class Errors(using Context) {
 
     /** An explanatory note to be added to error messages
@@ -121,7 +131,7 @@ object ErrorReporting {
         case If(_, _, elsep @ Literal(Constant(()))) if elsep.span.isSynthetic =>
           "\nMaybe you are missing an else part for the conditional?"
         case _ => ""
-      errorTree(tree, TypeMismatch(treeTp, pt, implicitFailure.whyNoConversion, missingElse))
+      errorTree(tree, TypeMismatch(treeTp, pt, Some(tree), implicitFailure.whyNoConversion, missingElse))
     }
 
     /** A subtype log explaining why `found` does not conform to `expected` */
@@ -132,18 +142,17 @@ object ErrorReporting {
           |conforms to
           |  $expected
           |but the comparison trace ended with `false`:
-          """
+          |"""
       val c = ctx.typerState.constraint
       val constraintText =
         if c.domainLambdas.isEmpty then
           "the empty constraint"
         else
           i"""a constraint with:
-             |${c.contentsToString}"""
-      i"""
-        |${TypeComparer.explained(_.isSubType(found, expected), header)}
-        |
-        |The tests were made under $constraintText"""
+             |$c"""
+      i"""${TypeComparer.explained(_.isSubType(found, expected), header)}
+         |
+         |The tests were made under $constraintText"""
 
     /** Format `raw` implicitNotFound or implicitAmbiguous argument, replacing
      *  all occurrences of `${X}` where `X` is in `paramNames` with the
@@ -230,7 +239,6 @@ object ErrorReporting {
   def err(using Context): Errors = new Errors
 }
 
-
 class ImplicitSearchError(
   arg: tpd.Tree,
   pt: Type,
@@ -239,6 +247,7 @@ class ImplicitSearchError(
   ignoredInstanceNormalImport: => Option[SearchSuccess],
   importSuggestionAddendum: => String
 )(using ctx: Context) {
+
   def missingArgMsg = arg.tpe match {
     case ambi: AmbiguousImplicits =>
       (ambi.alt1, ambi.alt2) match {
@@ -253,7 +262,9 @@ class ImplicitSearchError(
       val shortMessage = userDefinedImplicitNotFoundParamMessage
         .orElse(userDefinedImplicitNotFoundTypeMessage)
         .getOrElse(defaultImplicitNotFoundMessage)
-      formatMsg(shortMessage)() ++ hiddenImplicitsAddendum
+      formatMsg(shortMessage)()
+      ++ hiddenImplicitsAddendum
+      ++ ErrorReporting.matchReductionAddendum(pt)
   }
 
   private def formatMsg(shortForm: String)(headline: String = shortForm) = arg match {
@@ -303,7 +314,7 @@ class ImplicitSearchError(
   }
 
   private def defaultImplicitNotFoundMessage = {
-    em"no implicit argument of type $pt was found${location("for")}"
+    ex"no implicit argument of type $pt was found${location("for")}"
   }
 
   /** Construct a custom error message given an ambiguous implicit
@@ -402,7 +413,7 @@ class ImplicitSearchError(
 
   private def hiddenImplicitsAddendum: String =
     def hiddenImplicitNote(s: SearchSuccess) =
-      em"\n\nNote: given instance ${s.ref.symbol.showLocated} was not considered because it was not imported with `import given`."
+      em"\n\nNote: ${s.ref.symbol.showLocated} was not considered because it was not imported with `import given`."
 
     val normalImports = ignoredInstanceNormalImport.map(hiddenImplicitNote)
 
diff --git a/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala b/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala
index 32849de6f560..c04b5f1d2d85 100644
--- a/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala
+++ b/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala
@@ -48,7 +48,7 @@ abstract class Lifter {
     else {
       val name = UniqueName.fresh(prefix)
       // don't instantiate here, as the type params could be further constrained, see tests/pos/pickleinf.scala
-      var liftedType = expr.tpe.widen
+      var liftedType = expr.tpe.widen.deskolemized
       if (liftedFlags.is(Method)) liftedType = ExprType(liftedType)
       val lifted = newSymbol(ctx.owner, name, liftedFlags | Synthetic, liftedType, coord = spanCoord(expr.span))
       defs += liftedDef(lifted, expr)
@@ -88,8 +88,10 @@ abstract class Lifter {
     methRef.widen match {
       case mt: MethodType =>
         args.lazyZip(mt.paramNames).lazyZip(mt.paramInfos).map { (arg, name, tp) =>
-          val lifter = if (tp.isInstanceOf[ExprType]) exprLifter else this
-          lifter.liftArg(defs, arg, if (name.firstPart contains '$') EmptyTermName else name)
+          if tp.hasAnnotation(defn.InlineParamAnnot) then arg
+          else
+            val lifter = if (tp.isInstanceOf[ExprType]) exprLifter else this
+            lifter.liftArg(defs, arg, if (name.firstPart contains '$') EmptyTermName else name)
         }
       case _ =>
         args.mapConserve(liftArg(defs, _))
diff --git a/compiler/src/dotty/tools/dotc/typer/FrontEnd.scala b/compiler/src/dotty/tools/dotc/typer/FrontEnd.scala
deleted file mode 100644
index 0f51b19ddf91..000000000000
--- a/compiler/src/dotty/tools/dotc/typer/FrontEnd.scala
+++ /dev/null
@@ -1,133 +0,0 @@
-package dotty.tools
-package dotc
-package typer
-
-import core._
-import Phases._
-import Contexts._
-import Symbols._
-import Decorators._
-import ImportInfo.withRootImports
-import parsing.JavaParsers.JavaParser
-import parsing.Parsers.Parser
-import config.Config
-import config.Printers.{typr, default}
-import util.Stats._
-import util.{ SourcePosition, NoSourcePosition }
-import scala.util.control.NonFatal
-import ast.Trees._
-
-class FrontEnd extends Phase {
-
-  override def phaseName: String = FrontEnd.name
-  override def isTyper: Boolean = true
-  import ast.tpd
-
-  override def allowsImplicitSearch: Boolean = true
-
-  /** The contexts for compilation units that are parsed but not yet entered */
-  private var remaining: List[Context] = Nil
-
-  /** The position of the first XML literal encountered while parsing,
-   *  NoSourcePosition if there were no XML literals.
-   */
-  private var firstXmlPos: SourcePosition = NoSourcePosition
-
-  /** Does a source file ending with `.scala` belong to a compilation unit
-   *  that is parsed but not yet entered?
-   */
-  def stillToBeEntered(name: String): Boolean =
-    remaining.exists(_.compilationUnit.toString.endsWith(name + ".scala"))
-
-  def monitor(doing: String)(body: => Unit)(using Context): Unit =
-    try body
-    catch
-      case NonFatal(ex) =>
-        report.echo(s"exception occurred while $doing ${ctx.compilationUnit}")
-        throw ex
-
-  def parse(using Context): Unit = monitor("parsing") {
-    val unit = ctx.compilationUnit
-
-    unit.untpdTree =
-      if (unit.isJava) new JavaParser(unit.source).parse()
-      else {
-        val p = new Parser(unit.source)
-       //  p.in.debugTokenStream = true
-        val tree = p.parse()
-        if (p.firstXmlPos.exists && !firstXmlPos.exists)
-          firstXmlPos = p.firstXmlPos
-        tree
-      }
-
-    val printer = if (ctx.settings.Xprint.value.contains("parser")) default else typr
-    printer.println("parsed:\n" + unit.untpdTree.show)
-    if (Config.checkPositions)
-      unit.untpdTree.checkPos(nonOverlapping = !unit.isJava && !ctx.reporter.hasErrors)
-  }
-
-  def enterSyms(using Context): Unit = monitor("indexing") {
-    val unit = ctx.compilationUnit
-    ctx.typer.index(unit.untpdTree)
-    typr.println("entered: " + unit.source)
-  }
-
-  def typeCheck(using Context): Unit = monitor("typechecking") {
-    try
-      val unit = ctx.compilationUnit
-      if !unit.suspended then
-        unit.tpdTree = ctx.typer.typedExpr(unit.untpdTree)
-        typr.println("typed: " + unit.source)
-        record("retained untyped trees", unit.untpdTree.treeSize)
-        record("retained typed trees after typer", unit.tpdTree.treeSize)
-    catch
-      case ex: CompilationUnit.SuspendException =>
-  }
-
-  def javaCheck(using Context): Unit = monitor("checking java") {
-    val unit = ctx.compilationUnit
-    if unit.isJava then
-      JavaChecks.check(unit.tpdTree)
-  }
-
-
-  private def firstTopLevelDef(trees: List[tpd.Tree])(using Context): Symbol = trees match
-    case PackageDef(_, defs) :: _    => firstTopLevelDef(defs)
-    case Import(_, _) :: defs        => firstTopLevelDef(defs)
-    case (tree @ TypeDef(_, _)) :: _ => tree.symbol
-    case _ => NoSymbol
-
-  protected def discardAfterTyper(unit: CompilationUnit)(using Context): Boolean =
-    unit.isJava || unit.suspended
-
-  override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] =
-    val unitContexts =
-      for unit <- units yield
-        report.inform(s"compiling ${unit.source}")
-        ctx.fresh.setCompilationUnit(unit).withRootImports
-    unitContexts.foreach(parse(using _))
-    record("parsedTrees", ast.Trees.ntrees)
-    remaining = unitContexts
-    while remaining.nonEmpty do
-      enterSyms(using remaining.head)
-      remaining = remaining.tail
-
-    if firstXmlPos.exists && !defn.ScalaXmlPackageClass.exists then
-      report.error("""To support XML literals, your project must depend on scala-xml.
-                  |See https://github.com/scala/scala-xml for more information.""".stripMargin,
-        firstXmlPos)
-
-    unitContexts.foreach(typeCheck(using _))
-    record("total trees after typer", ast.Trees.ntrees)
-    unitContexts.foreach(javaCheck(using _)) // after typechecking to avoid cycles
-
-    val newUnits = unitContexts.map(_.compilationUnit).filterNot(discardAfterTyper)
-    ctx.run.checkSuspendedUnits(newUnits)
-    newUnits
-
-  def run(using Context): Unit = unsupported("run")
-}
-
-object FrontEnd {
-  val name: String = "typer"
-}
diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala
index 2b01a7245969..4824031f12bc 100644
--- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala
+++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala
@@ -25,6 +25,7 @@ import Constants._
 import ProtoTypes._
 import ErrorReporting._
 import Inferencing.{fullyDefinedType, isFullyDefined}
+import Scopes.newScope
 import Trees._
 import transform.SymUtils._
 import transform.TypeUtils._
@@ -66,10 +67,10 @@ object Implicits:
   }
   object Candidate {
     type Kind = Int
-    final val None = 0
-    final val Value = 1
-    final val Conversion = 2
-    final val Extension = 4
+    inline val None = 0
+    inline val Value = 1
+    inline val Conversion = 2
+    inline val Extension = 4
   }
 
   /** If `expected` is a selection prototype, does `tp` have an extension
@@ -287,7 +288,7 @@ object Implicits:
       }
 
     override def isAccessible(ref: TermRef)(using Context): Boolean =
-      ref.symbol.exists && !ref.symbol.is(Private)
+      ref.symbol.exists
 
     override def toString: String =
       i"OfTypeImplicits($tp), companions = ${companionRefs.showAsList}%, %; refs = $refs%, %."
@@ -330,11 +331,25 @@ object Implicits:
       (this eq finalImplicits) || (outerImplicits eq finalImplicits)
     }
 
+    private def combineEligibles(ownEligible: List[Candidate], outerEligible: List[Candidate]): List[Candidate] =
+      if ownEligible.isEmpty then outerEligible
+      else if outerEligible.isEmpty then ownEligible
+      else
+        val shadowed = ownEligible.map(_.ref.implicitName).toSet
+        ownEligible ::: outerEligible.filterConserve(cand => !shadowed.contains(cand.ref.implicitName))
+
+    def uncachedEligible(tp: Type)(using Context): List[Candidate] =
+      Stats.record("uncached eligible")
+      if monitored then record(s"check uncached eligible refs in irefCtx", refs.length)
+      val ownEligible = filterMatching(tp)
+      if isOuterMost then ownEligible
+      else combineEligibles(ownEligible, outerImplicits.uncachedEligible(tp))
+
     /** The implicit references that are eligible for type `tp`. */
     def eligible(tp: Type): List[Candidate] =
       if (tp.hash == NotCached)
         Stats.record(i"compute eligible not cached ${tp.getClass}")
-        Stats.record(i"compute eligible not cached")
+        Stats.record("compute eligible not cached")
         computeEligible(tp)
       else {
         val eligibles = eligibleCache.lookup(tp)
@@ -354,14 +369,8 @@ object Implicits:
     private def computeEligible(tp: Type): List[Candidate] = /*>|>*/ trace(i"computeEligible $tp in $refs%, %", implicitsDetailed) /*<|<*/ {
       if (monitored) record(s"check eligible refs in irefCtx", refs.length)
       val ownEligible = filterMatching(tp)
-      if (isOuterMost) ownEligible
-      else if ownEligible.isEmpty then outerImplicits.eligible(tp)
-      else
-        val outerEligible = outerImplicits.eligible(tp)
-        if outerEligible.isEmpty then ownEligible
-        else
-          val shadowed = ownEligible.map(_.ref.implicitName).toSet
-          ownEligible ::: outerEligible.filterConserve(cand => !shadowed.contains(cand.ref.implicitName))
+      if isOuterMost then ownEligible
+      else combineEligibles(ownEligible, outerImplicits.eligible(tp))
     }
 
     override def isAccessible(ref: TermRef)(using Context): Boolean =
@@ -462,20 +471,26 @@ object Implicits:
       val ctx1 = ctx.fresh.setExploreTyperState()
       ctx1.typerState.constraint = constraint
       inContext(ctx1) {
-        val map = new TypeMap {
-          def apply(t: Type): Type = t match {
+        val map = new TypeMap:
+          def apply(t: Type): Type = t match
             case t: TypeParamRef =>
-              constraint.entry(t) match {
-                case NoType => t
-                case bounds: TypeBounds => TypeComparer.fullBounds(t)
+              constraint.entry(t) match
+                case NoType | _: TypeBounds => t
                 case t1 => t1
-              }
             case t: TypeVar =>
               t.instanceOpt.orElse(apply(t.origin))
             case _ =>
               mapOver(t)
-          }
-        }
+
+          override def mapArgs(args: List[Type], tparams: List[ParamInfo]) =
+            args.mapConserve {
+              case t: TypeParamRef =>
+                constraint.entry(t) match
+                  case bounds: TypeBounds => TypeComparer.fullBounds(t)
+                  case _ => this(t)
+              case t => this(t)
+            }
+        end map
         map(tp)
       }
 
@@ -489,10 +504,20 @@ object Implicits:
   @sharable val NoMatchingImplicitsFailure: SearchFailure =
     SearchFailure(NoMatchingImplicits, NoSpan)(using NoContext)
 
+  @sharable object ImplicitSearchTooLarge extends NoMatchingImplicits(NoType, EmptyTree, OrderingConstraint.empty)
+
+  @sharable val ImplicitSearchTooLargeFailure: SearchFailure =
+    SearchFailure(ImplicitSearchTooLarge, NoSpan)(using NoContext)
+
   /** An ambiguous implicits failure */
   class AmbiguousImplicits(val alt1: SearchSuccess, val alt2: SearchSuccess, val expectedType: Type, val argument: Tree) extends SearchFailureType {
     def explanation(using Context): String =
-      em"both ${err.refStr(alt1.ref)} and ${err.refStr(alt2.ref)} $qualify"
+      var str1 = err.refStr(alt1.ref)
+      var str2 = err.refStr(alt2.ref)
+      if str1 == str2 then
+        str1 = ctx.printer.toTextRef(alt1.ref).show
+        str2 = ctx.printer.toTextRef(alt2.ref).show
+      em"both $str1 and $str2 $qualify"
     override def whyNoConversion(using Context): String =
       if !argument.isEmpty && argument.tpe.widen.isRef(defn.NothingClass) then
         ""
@@ -557,7 +582,6 @@ trait ImplicitRunInfo:
 
     object collectParts extends TypeTraverser:
 
-      private var provisional: Boolean = _
       private var parts: mutable.LinkedHashSet[Type] = _
       private val partSeen = util.HashSet[Type]()
 
@@ -582,19 +606,18 @@ trait ImplicitRunInfo:
             case t: ConstantType =>
               traverse(t.underlying)
             case t: TypeParamRef =>
+              assert(!ctx.typerState.constraint.contains(t), i"`wildApprox` failed to remove uninstantiated $t")
               traverse(t.underlying)
-              if ctx.typerState.constraint.contains(t) then provisional = true
             case t: TermParamRef =>
               traverse(t.underlying)
             case t =>
               traverseChildren(t)
 
-      def apply(tp: Type): (collection.Set[Type], Boolean) =
-        provisional = false
+      def apply(tp: Type): collection.Set[Type] =
         parts = mutable.LinkedHashSet()
         partSeen.clear()
         traverse(tp)
-        (parts, provisional)
+        parts
     end collectParts
 
     val seen = util.HashSet[Type]()
@@ -669,12 +692,11 @@ trait ImplicitRunInfo:
     end collectCompanions
 
     def recur(tp: Type): OfTypeImplicits =
-      val (parts, provisional) = collectParts(tp)
+      val parts = collectParts(tp)
       val companions = collectCompanions(tp, parts)
       val result = OfTypeImplicits(tp, companions)(runContext)
       if Config.cacheImplicitScopes
         && tp.hash != NotCached
-        && !provisional
         && (tp eq rootTp)              // first type traversed is always cached
            || !incomplete.contains(tp) // other types are cached if they are not incomplete
       then implicitScopeCache(tp) = result
@@ -727,7 +749,7 @@ trait ImplicitRunInfo:
       case null =>
         record(i"implicitScope")
         val liftToAnchors = new TypeMap:
-          override def stopAtStatic = true
+          override def stopAt = StopAt.Static
           private val seen = util.HashSet[Type]()
 
           def applyToUnderlying(t: TypeProxy) =
@@ -788,16 +810,8 @@ trait Implicits:
    */
   def inferView(from: Tree, to: Type)(using Context): SearchResult = {
     record("inferView")
-    val wfromtp = from.tpe.widen
-    if    to.isAny
-       || to.isAnyRef
-       || to.isRef(defn.UnitClass)
-       || wfromtp.isRef(defn.NothingClass)
-       || wfromtp.isRef(defn.NullClass)
-       || !ctx.mode.is(Mode.ImplicitsEnabled)
-       || from.isInstanceOf[Super]
-       || (wfromtp eq NoPrefix)
-    then NoMatchingImplicitsFailure
+    if !ctx.mode.is(Mode.ImplicitsEnabled) || from.isInstanceOf[Super] then
+      NoMatchingImplicitsFailure
     else {
       def adjust(to: Type) = to.stripTypeVar.widenExpr match {
         case SelectionProto(name, memberProto, compat, true) =>
@@ -874,7 +888,8 @@ trait Implicits:
     def ignoredInstanceNormalImport = arg.tpe match
       case fail: SearchFailureType =>
         if (fail.expectedType eq pt) || isFullyDefined(fail.expectedType, ForceDegree.none) then
-          inferImplicit(fail.expectedType, fail.argument, arg.span) match {
+          inferImplicit(fail.expectedType, fail.argument, arg.span)(
+            using findHiddenImplicitsCtx(ctx)) match {
             case s: SearchSuccess => Some(s)
             case f: SearchFailure =>
               f.reason match {
@@ -924,6 +939,8 @@ trait Implicits:
           apply(t.widen)
         case t: RefinedType =>
           apply(t.parent)
+        case t: LazyRef =>
+          t
         case _ =>
           if (variance > 0) mapOver(t) else t
       }
@@ -963,7 +980,7 @@ trait Implicits:
     trace(s"search implicit ${pt.show}, arg = ${argument.show}: ${argument.tpe.show}", implicits, show = true) {
       record("inferImplicit")
       assert(ctx.phase.allowsImplicitSearch,
-        if (argument.isEmpty) i"missing implicit parameter of type $pt after typer"
+        if (argument.isEmpty) i"missing implicit parameter of type $pt after typer at phase ${ctx.phase.phaseName}"
         else i"type error: ${argument.tpe} does not conform to $pt${err.whyNoMatchStr(argument.tpe, pt)}")
 
       if pt.unusableForInference
@@ -971,16 +988,31 @@ trait Implicits:
       then return NoMatchingImplicitsFailure
 
       val result0 =
-        try ImplicitSearch(pt, argument, span).bestImplicit
+        // If we are searching implicits when resolving an import symbol, start the search
+        // in the first enclosing context that does not have the same scope and owner as the current
+        // context. Without that precaution, an eligible implicit in the current scope
+        // would cause a cyclic reference error (if the import is named) or cause a
+        // spurious import skip (if the import is a wildcard import). See i12802 for a test case.
+        var searchCtx = ctx
+        if ctx.owner.isImport then
+          while
+            searchCtx = searchCtx.outer
+            (searchCtx.scope eq ctx.scope) && (searchCtx.owner eq ctx.owner.owner)
+          do ()
+
+        try ImplicitSearch(pt, argument, span)(using searchCtx).bestImplicit
         catch case ce: CyclicReference =>
           ce.inImplicitSearch = true
           throw ce
+      end result0
 
       val result =
         result0 match {
           case result: SearchSuccess =>
-            result.tstate.commit()
-            ctx.gadt.restore(result.gstate)
+            if result.tstate ne ctx.typerState then
+              result.tstate.commit()
+            if result.gstate ne ctx.gadt then
+              ctx.gadt.restore(result.gstate)
             if hasSkolem(false, result.tree) then
               report.error(SkolemInInferred(result.tree, pt, argument), ctx.source.atSpan(span))
             implicits.println(i"success: $result")
@@ -1018,13 +1050,22 @@ trait Implicits:
       val generated: Tree = tpd.ref(ref).withSpan(span.startPos)
       val locked = ctx.typerState.ownedVars
       val adapted =
-        if (argument.isEmpty)
-          adapt(generated, pt.widenExpr, locked)
+        if argument.isEmpty then
+          if defn.isContextFunctionType(pt) then
+            // need to go through typed, to build the context closure
+            typed(untpd.TypedSplice(generated), pt, locked)
+          else
+            // otherwise we can skip typing and go directly to adapt
+            adapt(generated, pt.widenExpr, locked)
         else {
           def untpdGenerated = untpd.TypedSplice(generated)
+          def producesConversion(info: Type): Boolean = info match
+            case info: PolyType => producesConversion(info.resType)
+            case info: MethodType if info.isImplicitMethod => producesConversion(info.resType)
+            case _ => info.derivesFrom(defn.ConversionClass)
           def tryConversion(using Context) = {
             val untpdConv =
-              if (ref.symbol.is(Given))
+              if ref.symbol.is(Given) && producesConversion(ref.symbol.info) then
                 untpd.Select(
                   untpd.TypedSplice(
                     adapt(generated,
@@ -1105,23 +1146,47 @@ trait Implicits:
 
     val isNotGiven: Boolean = wildProto.classSymbol == defn.NotGivenClass
 
+    private def searchTooLarge(): Boolean = ctx.searchHistory match
+      case root: SearchRoot =>
+        root.nestedSearches = 1
+        false
+      case h =>
+        val limit = ctx.settings.XimplicitSearchLimit.value
+        val nestedSearches = h.root.nestedSearches
+        val result = nestedSearches > limit
+        if result then
+          var c = ctx
+          while c.outer.typer eq ctx.typer do c = c.outer
+          report.warning(ImplicitSearchTooLargeWarning(limit, h.openSearchPairs), ctx.source.atSpan(span))(using c)
+        else
+          h.root.nestedSearches = nestedSearches + 1
+        result
+
     /** Try to type-check implicit reference, after checking that this is not
       * a diverging search
       */
     def tryImplicit(cand: Candidate, contextual: Boolean): SearchResult =
       if checkDivergence(cand) then
         SearchFailure(new DivergingImplicit(cand.ref, wideProto, argument), span)
-      else {
+      else if searchTooLarge() then
+        ImplicitSearchTooLargeFailure
+      else
         val history = ctx.searchHistory.nest(cand, pt)
-        val result =
-          typedImplicit(cand, pt, argument, span)(using nestedContext().setNewTyperState().setFreshGADTBounds.setSearchHistory(history))
-        result match {
+        val typingCtx =
+          nestedContext().setNewTyperState().setFreshGADTBounds.setSearchHistory(history)
+        val result = typedImplicit(cand, pt, argument, span)(using typingCtx)
+        result match
           case res: SearchSuccess =>
             ctx.searchHistory.defineBynameImplicit(wideProto, res)
           case _ =>
+            // Since the search failed, the local typerstate will be discarded
+            // without being committed, but type variables local to that state
+            // might still appear in an error message, so we run `gc()` here to
+            // make sure we don't forget their instantiation. This leads to more
+            // precise error messages in tests/neg/missing-implicit3.check and
+            // tests/neg/implicitSearch.check
+            typingCtx.typerState.gc()
             result
-        }
-      }
 
     /** Search a list of eligible implicit references */
     private def searchImplicit(eligible: List[Candidate], contextual: Boolean): SearchResult =
@@ -1150,7 +1215,29 @@ trait Implicits:
             // compare the extension methods instead of their wrappers.
             def stripExtension(alt: SearchSuccess) = methPart(stripApply(alt.tree)).tpe
             (stripExtension(alt1), stripExtension(alt2)) match
-              case (ref1: TermRef, ref2: TermRef) => diff = compare(ref1, ref2)
+              case (ref1: TermRef, ref2: TermRef) =>
+                // ref1 and ref2 might refer to type variables owned by
+                // alt1.tstate and alt2.tstate respectively, to compare the
+                // alternatives correctly we need a TyperState that includes
+                // constraints from both sides, see
+                // tests/*/extension-specificity2.scala for test cases.
+                val constraintsIn1 = alt1.tstate.constraint ne ctx.typerState.constraint
+                val constraintsIn2 = alt2.tstate.constraint ne ctx.typerState.constraint
+                def exploreState(alt: SearchSuccess): TyperState =
+                  alt.tstate.fresh(committable = false)
+                val comparisonState =
+                  if constraintsIn1 && constraintsIn2 then
+                    exploreState(alt1).mergeConstraintWith(alt2.tstate)
+                  else if constraintsIn1 then
+                    exploreState(alt1)
+                  else if constraintsIn2 then
+                    exploreState(alt2)
+                  else
+                    ctx.typerState
+
+                diff = inContext(ctx.withTyperState(comparisonState)) {
+                  compare(ref1, ref2)
+                }
               case _ =>
           if diff < 0 then alt2
           else if diff > 0 then alt1
@@ -1188,7 +1275,9 @@ trait Implicits:
 
             negateIfNot(tryImplicit(cand, contextual)) match {
               case fail: SearchFailure =>
-                if (fail.isAmbiguous)
+                if fail eq ImplicitSearchTooLargeFailure then
+                  fail
+                else if (fail.isAmbiguous)
                   if migrateTo3 then
                     val result = rank(remaining, found, NoMatchingImplicitsFailure :: rfailures)
                     if (result.isSuccess)
@@ -1247,78 +1336,158 @@ trait Implicits:
              |Consider using the scala.util.NotGiven class to implement similar functionality.""",
              ctx.source.atSpan(span))
 
-      /** A relation that influences the order in which implicits are tried.
+      /** Compare the length of the baseClasses of two symbols (except for objects,
+       *  where we use the length of the companion class instead if it's bigger).
+       *
+       *  This relation is meant to approximate `Applications#compareOwner` while also
+       *  inducing a total ordering: `compareOwner` returns `0` for unrelated symbols
+       *  and therefore only induces a partial ordering, meaning it cannot be used
+       *  as a sorting function (see `java.util.Comparator#compare`).
+       */
+      def compareBaseClassesLength(sym1: Symbol, sym2: Symbol): Int =
+        def len(sym: Symbol) =
+          if sym.is(ModuleClass) && sym.companionClass.exists then
+            Math.max(sym.asClass.baseClassesLength, sym.companionClass.asClass.baseClassesLength)
+          else if sym.isClass then
+            sym.asClass.baseClassesLength
+          else
+            0
+        len(sym1) - len(sym2)
+
+      /** A relation that influences the order in which eligible implicits are tried.
+       *
        *  We prefer (in order of importance)
        *   1. more deeply nested definitions
        *   2. definitions with fewer implicit parameters
-       *   3. definitions in subclasses
+       *   3. definitions whose owner has more parents (see `compareBaseClassesLength`)
        *  The reason for (2) is that we want to fail fast if the search type
        *  is underconstrained. So we look for "small" goals first, because that
        *  will give an ambiguity quickly.
        */
-      def prefer(cand1: Candidate, cand2: Candidate): Boolean =
-        val level1 = cand1.level
-        val level2 = cand2.level
-        if level1 > level2 then return true
-        if level1 < level2 then return false
-        val sym1 = cand1.ref.symbol
-        val sym2 = cand2.ref.symbol
+      def compareEligibles(e1: Candidate, e2: Candidate): Int =
+        if e1 eq e2 then return 0
+        val cmpLevel = e1.level - e2.level
+        if cmpLevel != 0 then return -cmpLevel // 1.
+        val sym1 = e1.ref.symbol
+        val sym2 = e2.ref.symbol
         val arity1 = sym1.info.firstParamTypes.length
         val arity2 = sym2.info.firstParamTypes.length
-        if arity1 < arity2 then return true
-        if arity1 > arity2 then return false
-        compareOwner(sym1.owner, sym2.owner) == 1
+        val cmpArity = arity1 - arity2
+        if cmpArity != 0 then return cmpArity // 2.
+        val cmpBcs = compareBaseClassesLength(sym1.owner, sym2.owner)
+        -cmpBcs // 3.
 
-      /** Sort list of implicit references according to `prefer`.
+      /** Check if `ord` respects the contract of `Ordering`.
+       *
+       *  More precisely, we check that its `compare` method respects the invariants listed
+       *  in https://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html#compare-T-T-
+       */
+      def validateOrdering(ord: Ordering[Candidate]): Unit =
+        for
+          x <- eligible
+          y <- eligible
+          cmpXY = Integer.signum(ord.compare(x, y))
+          cmpYX = Integer.signum(ord.compare(y, x))
+          z <- eligible
+          cmpXZ = Integer.signum(ord.compare(x, z))
+          cmpYZ = Integer.signum(ord.compare(y, z))
+        do
+          def reportViolation(msg: String): Unit =
+            Console.err.println(s"Internal error: comparison function violated ${msg.stripMargin}")
+          def showCandidate(c: Candidate): String =
+            s"$c (${c.ref.symbol.showLocated})"
+
+          if cmpXY != -cmpYX then
+            reportViolation(
+              s"""signum(cmp(x, y)) == -signum(cmp(y, x)) given:
+                 |x = ${showCandidate(x)}
+                 |y = ${showCandidate(y)}
+                 |cmpXY = $cmpXY
+                 |cmpYX = $cmpYX""")
+          if cmpXY != 0 && cmpXY == cmpYZ && cmpXZ != cmpXY then
+            reportViolation(
+              s"""transitivity given:
+                 |x = ${showCandidate(x)}
+                 |y = ${showCandidate(y)}
+                 |z = ${showCandidate(z)}
+                 |cmpXY = $cmpXY
+                 |cmpXZ = $cmpXZ
+                 |cmpYZ = $cmpYZ""")
+          if cmpXY == 0 && cmpXZ != cmpYZ then
+            reportViolation(
+              s"""cmp(x, y) == 0 implies that signum(cmp(x, z)) == signum(cmp(y, z)) given:
+                 |x = ${showCandidate(x)}
+                 |y = ${showCandidate(y)}
+                 |z = ${showCandidate(z)}
+                 |cmpXY = $cmpXY
+                 |cmpXZ = $cmpXZ
+                 |cmpYZ = $cmpYZ""")
+      end validateOrdering
+
+      /** Sort list of implicit references according to `compareEligibles`.
        *  This is just an optimization that aims at reducing the average
        *  number of candidates to be tested.
        */
-      def sort(eligible: List[Candidate]) = eligible match {
+      def sort(eligible: List[Candidate]) = eligible match
         case Nil => eligible
         case e1 :: Nil => eligible
         case e1 :: e2 :: Nil =>
-          if (prefer(e2, e1)) e2 :: e1 :: Nil
+          if compareEligibles(e2, e1) < 0 then e2 :: e1 :: Nil
           else eligible
         case _ =>
-          try eligible.sortWith(prefer)
+          val ord: Ordering[Candidate] = (a, b) => compareEligibles(a, b)
+          try eligible.sorted(using ord)
           catch case ex: IllegalArgumentException =>
-            // diagnostic to see what went wrong
-            for
-              e1 <- eligible
-              e2 <- eligible
-              if prefer(e1, e2)
-              e3 <- eligible
-              if prefer(e2, e3) && !prefer(e1, e3)
-            do
-              val es = List(e1, e2, e3)
-              println(i"transitivity violated for $es%, %\n ${es.map(_.implicitRef.underlyingRef.symbol.showLocated)}%, %")
+            // This exception being thrown probably means that our comparison
+            // function is broken, check if that's the case
+            validateOrdering(ord)
             throw ex
-      }
 
       rank(sort(eligible), NoMatchingImplicitsFailure, Nil)
     end searchImplicit
 
+    def isUnderSpecifiedArgument(tp: Type): Boolean =
+      tp.isRef(defn.NothingClass) || tp.isRef(defn.NullClass) || (tp eq NoPrefix)
+
+    private def isUnderspecified(tp: Type): Boolean = tp.stripTypeVar match
+      case tp: WildcardType =>
+        !tp.optBounds.exists || isUnderspecified(tp.optBounds.hiBound)
+      case tp: ViewProto =>
+        isUnderspecified(tp.resType)
+        || tp.resType.isRef(defn.UnitClass)
+        || isUnderSpecifiedArgument(tp.argType.widen)
+      case _ =>
+        tp.isAny || tp.isAnyRef
+
     private def searchImplicit(contextual: Boolean): SearchResult =
-      val eligible =
-        if contextual then ctx.implicits.eligible(wildProto)
-        else implicitScope(wildProto).eligible
-      searchImplicit(eligible, contextual) match
-        case result: SearchSuccess =>
-          result
-        case failure: SearchFailure =>
-          failure.reason match
-            case _: AmbiguousImplicits => failure
-            case reason =>
-              if contextual then
-                searchImplicit(contextual = false).recoverWith {
-                  failure2 => failure2.reason match
-                    case _: AmbiguousImplicits => failure2
-                    case _ =>
-                      reason match
-                        case (_: DivergingImplicit) => failure
-                        case _ => List(failure, failure2).maxBy(_.tree.treeSize)
-                }
-              else failure
+      if isUnderspecified(wildProto) then
+        NoMatchingImplicitsFailure
+      else
+        val eligible =
+          if contextual then
+            if ctx.gadt.isNarrowing then
+              withoutMode(Mode.ImplicitsEnabled) {
+                ctx.implicits.uncachedEligible(wildProto)
+              }
+            else ctx.implicits.eligible(wildProto)
+          else implicitScope(wildProto).eligible
+        searchImplicit(eligible, contextual) match
+          case result: SearchSuccess =>
+            result
+          case failure: SearchFailure =>
+            failure.reason match
+              case _: AmbiguousImplicits => failure
+              case reason =>
+                if contextual then
+                  searchImplicit(contextual = false).recoverWith {
+                    failure2 => failure2.reason match
+                      case _: AmbiguousImplicits => failure2
+                      case _ =>
+                        reason match
+                          case (_: DivergingImplicit) => failure
+                          case _ => List(failure, failure2).maxBy(_.tree.treeSize)
+                  }
+                else failure
     end searchImplicit
 
     /** Find a unique best implicit reference */
@@ -1497,13 +1666,17 @@ case class OpenSearch(cand: Candidate, pt: Type, outer: SearchHistory)(using Con
 end OpenSearch
 
 /**
- * The the state corresponding to the outermost context of an implicit searcch.
+ * The state corresponding to the outermost context of an implicit searcch.
  */
 final class SearchRoot extends SearchHistory:
   val root = this
   val byname = false
   def openSearchPairs = Nil
 
+  /** How many expressions were constructed so far in the current toplevel implicit search?
+   */
+  var nestedSearches: Int = 0
+
   /** The dictionary of recursive implicit types and corresponding terms for this search. */
   var myImplicitDictionary: mutable.Map[Type, (TermRef, tpd.Tree)] = null
   private def implicitDictionary =
@@ -1626,7 +1799,7 @@ final class SearchRoot extends SearchHistory:
             // }
 
             val parents = List(defn.ObjectType, defn.SerializableType)
-            val classSym = newNormalizedClassSymbol(ctx.owner, LazyImplicitName.fresh().toTypeName, Synthetic | Final, parents, coord = span)
+            val classSym = newNormalizedClassSymbol(ctx.owner, LazyImplicitName.fresh().toTypeName, Synthetic | Final, parents, newScope, coord = span)
             val vsyms = pruned.map(_._1.symbol)
             val nsyms = vsyms.map(vsym => newSymbol(classSym, vsym.name, EmptyFlags, vsym.info, coord = span).entered)
             val vsymMap = (vsyms zip nsyms).toMap
diff --git a/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala b/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala
index a975dd206d22..e8a923427b38 100644
--- a/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala
+++ b/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala
@@ -151,7 +151,7 @@ class ImportInfo(symf: Context ?=> Symbol,
     else
       for
         renamed <- reverseMapping.keys
-        denot <- pre.member(reverseMapping(renamed)).altsWith(_.isOneOf(GivenOrImplicit))
+        denot <- pre.member(reverseMapping(renamed)).altsWith(_.isOneOf(GivenOrImplicitVal))
       yield
         val original = reverseMapping(renamed)
         val ref = TermRef(pre, original, denot)
diff --git a/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala b/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala
index 7c11508ece65..bc82c9501c76 100644
--- a/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala
+++ b/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala
@@ -167,12 +167,15 @@ trait ImportSuggestions:
       allCandidates.map(_.implicitRef.underlyingRef.symbol).toSet
     }
 
+    def testContext(): Context =
+      ctx.fresh.retractMode(Mode.ImplicitsEnabled).setExploreTyperState()
+
     /** Test whether the head of a given instance matches the expected type `pt`,
      *  ignoring any dependent implicit arguments.
      */
     def shallowTest(ref: TermRef): Boolean =
       System.currentTimeMillis < deadLine
-      && inContext(ctx.fresh.setExploreTyperState()) {
+      && inContext(testContext()) {
         def test(pt: Type): Boolean = pt match
           case ViewProto(argType, OrType(rt1, rt2)) =>
             // Union types do not constrain results, since comparison with a union
@@ -209,7 +212,7 @@ trait ImportSuggestions:
         try
           timer.schedule(task, testOneImplicitTimeOut)
           typedImplicit(candidate, expectedType, argument, span)(
-            using ctx.fresh.setExploreTyperState()).isSuccess
+            using testContext()).isSuccess
         finally
           if task.cancel() then // timer task has not run yet
             assert(!ctx.run.isCancelled)
@@ -270,11 +273,14 @@ trait ImportSuggestions:
   /** The `ref` parts of this list of pairs, discarding subsequent elements that
    *  have the same String part. Elements are sorted by their String parts.
    */
-  extension (refs: List[(TermRef, String)]) def distinctRefs(using Context): List[TermRef] = refs match
-    case (ref, str) :: refs1 =>
-      ref :: refs1.dropWhile(_._2 == str).distinctRefs
-    case Nil =>
-      Nil
+  extension (refs: List[(TermRef, String)]) def distinctRefs(using Context): List[TermRef] =
+    val buf = new mutable.ListBuffer[TermRef]
+    var last = ""
+    for (ref, str) <- refs do
+      if last != str then
+        buf += ref
+        last = str
+    buf.toList
 
   /** The best `n` references in `refs`, according to `compare`
    *  `compare` is a partial order. If there's a tie, we take elements
diff --git a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala
index 59ee126f382d..17df9c93f9a9 100644
--- a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala
+++ b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala
@@ -7,7 +7,7 @@ import ast._
 import Contexts._, Types._, Flags._, Symbols._
 import Trees._
 import ProtoTypes._
-import NameKinds.UniqueName
+import NameKinds.{AvoidNameKind, UniqueName}
 import util.Spans._
 import util.{Stats, SimpleIdentityMap}
 import Decorators._
@@ -33,7 +33,10 @@ object Inferencing {
    */
   def isFullyDefined(tp: Type, force: ForceDegree.Value)(using Context): Boolean = {
     val nestedCtx = ctx.fresh.setNewTyperState()
-    val result = new IsFullyDefinedAccumulator(force)(using nestedCtx).process(tp)
+    val result =
+      try new IsFullyDefinedAccumulator(force)(using nestedCtx).process(tp)
+      catch case ex: StackOverflowError =>
+        false // can happen for programs with illegal recusions, e.g. neg/recursive-lower-constraint.scala
     if (result) nestedCtx.typerState.commit()
     result
   }
@@ -43,7 +46,7 @@ object Inferencing {
    */
   def canDefineFurther(tp: Type)(using Context): Boolean =
     val prevConstraint = ctx.typerState.constraint
-    isFullyDefined(tp, force = ForceDegree.all)
+    isFullyDefined(tp, force = ForceDegree.failBottom)
     && (ctx.typerState.constraint ne prevConstraint)
 
   /** The fully defined type, where all type variables are forced.
@@ -89,10 +92,16 @@ object Inferencing {
   }
 
   /** If `tp` is top-level type variable with a lower bound in the current constraint,
-   *  instantiate it from below. We also look for TypeVars whereever their instantiation
-   *  could uncover new type members.
+   *  instantiate it from below. We also look for TypeVars in other places where
+   *  their instantiation could uncover new type members. However that search is best
+   *  effort only. It might miss type variables that appear in structures involving
+   *  alias types and type projections.
+   *  @param applied  Test is done in a `tryInsertImplicitOnQualifier` application.
+   *                  In this case, we always try to instantiate TypeVars in type arguments.
+   *                  If `applied` is false, we only try that in arguments that may affect
+   *                  the result type.
    */
-  def couldInstantiateTypeVar(tp: Type)(using Context): Boolean = tp.dealias match
+  def couldInstantiateTypeVar(tp: Type, applied: Boolean = false)(using Context): Boolean = tp.dealias match
     case tvar: TypeVar
     if !tvar.isInstantiated
        && ctx.typerState.constraint.contains(tvar)
@@ -115,14 +124,14 @@ object Inferencing {
                 case _ => Nil
             case _ => Nil
         case _ => Nil
-      couldInstantiateTypeVar(tycon)
-      || argsInResult.exists(couldInstantiateTypeVar)
+      couldInstantiateTypeVar(tycon, applied)
+      || (if applied then args else argsInResult).exists(couldInstantiateTypeVar(_, applied))
     case RefinedType(parent, _, _) =>
-      couldInstantiateTypeVar(parent)
+      couldInstantiateTypeVar(parent, applied)
     case tp: AndOrType =>
-      couldInstantiateTypeVar(tp.tp1) || couldInstantiateTypeVar(tp.tp2)
+      couldInstantiateTypeVar(tp.tp1, applied) || couldInstantiateTypeVar(tp.tp2, applied)
     case AnnotatedType(tp, _) =>
-      couldInstantiateTypeVar(tp)
+      couldInstantiateTypeVar(tp, applied)
     case _ =>
       false
 
@@ -173,12 +182,14 @@ object Inferencing {
         && ctx.typerState.constraint.contains(tvar)
         && {
           val direction = instDirection(tvar.origin)
-          if direction != 0 then
+          if minimizeSelected then
+            if direction <= 0 && tvar.hasLowerBound then
+              instantiate(tvar, fromBelow = true)
+            else if direction >= 0 && tvar.hasUpperBound then
+              instantiate(tvar, fromBelow = false)
+            // else hold off instantiating unbounded unconstrained variable
+          else if direction != 0 then
             instantiate(tvar, fromBelow = direction < 0)
-          else if minimizeSelected then
-            if tvar.hasLowerBound then instantiate(tvar, fromBelow = true)
-            else if tvar.hasUpperBound then instantiate(tvar, fromBelow = false)
-            else () // hold off instantiating unbounded unconstrained variables
           else if variance >= 0 && (force.ifBottom == IfBottom.ok || tvar.hasLowerBound) then
             instantiate(tvar, fromBelow = true)
           else if variance >= 0 && force.ifBottom == IfBottom.fail then
@@ -326,7 +337,7 @@ object Inferencing {
     @tailrec def boundVars(tree: Tree, acc: List[TypeVar]): List[TypeVar] = tree match {
       case Apply(fn, _) => boundVars(fn, acc)
       case TypeApply(fn, targs) =>
-        val tvars = targs.filter(_.isInstanceOf[TypeVarBinder[?]]).tpes.collect {
+        val tvars = targs.filter(_.isInstanceOf[InferredTypeTree]).tpes.collect {
           case tvar: TypeVar
           if !tvar.isInstantiated &&
              ctx.typerState.ownedVars.contains(tvar) &&
@@ -396,20 +407,21 @@ object Inferencing {
     val vs = variances(tp)
     val patternBindings = new mutable.ListBuffer[(Symbol, TypeParamRef)]
     vs foreachBinding { (tvar, v) =>
-      if (v == 1) tvar.instantiate(fromBelow = false)
-      else if (v == -1) tvar.instantiate(fromBelow = true)
-      else {
-        val bounds = TypeComparer.fullBounds(tvar.origin)
-        if (bounds.hi <:< bounds.lo || bounds.hi.classSymbol.is(Final) || fromScala2x)
-          tvar.instantiate(fromBelow = false)
+      if !tvar.isInstantiated then
+        if (v == 1) tvar.instantiate(fromBelow = false)
+        else if (v == -1) tvar.instantiate(fromBelow = true)
         else {
-          // We do not add the created symbols to GADT constraint immediately, since they may have inter-dependencies.
-          // Instead, we simultaneously add them later on.
-          val wildCard = newPatternBoundSymbol(UniqueName.fresh(tvar.origin.paramName), bounds, span, addToGadt = false)
-          tvar.instantiateWith(wildCard.typeRef)
-          patternBindings += ((wildCard, tvar.origin))
+          val bounds = TypeComparer.fullBounds(tvar.origin)
+          if (bounds.hi <:< bounds.lo || bounds.hi.classSymbol.is(Final) || fromScala2x)
+            tvar.instantiate(fromBelow = false)
+          else {
+            // We do not add the created symbols to GADT constraint immediately, since they may have inter-dependencies.
+            // Instead, we simultaneously add them later on.
+            val wildCard = newPatternBoundSymbol(UniqueName.fresh(tvar.origin.paramName), bounds, span, addToGadt = false)
+            tvar.instantiateWith(wildCard.typeRef)
+            patternBindings += ((wildCard, tvar.origin))
+          }
         }
-      }
     }
     val res = patternBindings.toList.map { (boundSym, _) =>
       // substitute bounds of pattern bound variables to deal with possible F-bounds
@@ -425,12 +437,19 @@ object Inferencing {
 
   type VarianceMap = SimpleIdentityMap[TypeVar, Integer]
 
-  /** All occurrences of type vars in this type that satisfy predicate
-   *  `include` mapped to their variances (-1/0/1) in this type, where
+  /** All occurrences of type vars in `tp` that satisfy predicate
+   *  `include` mapped to their variances (-1/0/1) in both `tp` and
+   *  `pt.finalResultType`, where
    *  -1 means: only contravariant occurrences
    *  +1 means: only covariant occurrences
    *  0 means: mixed or non-variant occurrences
    *
+   *  We need to take the occurences in `pt` into account because a type
+   *  variable created when typing the current tree might only appear in the
+   *  bounds of a type variable in the expected type, for example when
+   *  `ConstraintHandling#legalBound` creates type variables when approximating
+   *  a bound.
+   *
    *  Note: We intentionally use a relaxed version of variance here,
    *  where the variance does not change under a prefix of a named type
    *  (the strict version makes prefixes invariant). This turns out to be
@@ -441,7 +460,7 @@ object Inferencing {
    *
    *  we want to instantiate U to x.type right away. No need to wait further.
    */
-  private def variances(tp: Type)(using Context): VarianceMap = {
+  private def variances(tp: Type, pt: Type = WildcardType)(using Context): VarianceMap = {
     Stats.record("variances")
     val constraint = ctx.typerState.constraint
 
@@ -474,21 +493,29 @@ object Inferencing {
       def traverse(tp: Type) = { vmap1 = accu(vmap1, tp) }
       vmap.foreachBinding { (tvar, v) =>
         val param = tvar.origin
-        val e = constraint.entry(param)
-        accu.setVariance(v)
-        if (v >= 0) {
-          traverse(e.bounds.lo)
-          constraint.lower(param).foreach(p => traverse(constraint.typeVarOfParam(p)))
-        }
-        if (v <= 0) {
-          traverse(e.bounds.hi)
-          constraint.upper(param).foreach(p => traverse(constraint.typeVarOfParam(p)))
-        }
+        constraint.entry(param) match
+          case TypeBounds(lo, hi) =>
+            accu.setVariance(v)
+            if v >= 0 then
+              traverse(lo)
+              constraint.lower(param).foreach(p => traverse(constraint.typeVarOfParam(p)))
+            if v <= 0 then
+              traverse(hi)
+              constraint.upper(param).foreach(p => traverse(constraint.typeVarOfParam(p)))
+          case _ =>
       }
       if (vmap1 eq vmap) vmap else propagate(vmap1)
     }
 
-    propagate(accu(SimpleIdentityMap.empty, tp))
+    propagate(accu(accu(SimpleIdentityMap.empty, tp), pt.finalResultType))
+  }
+
+  /** Run the transformation after dealiasing but return the original type if it was a no-op. */
+  private def derivedOnDealias(tp: Type)(transform: Type => Type)(using Context) = {
+    val dealiased = tp.dealias
+    val transformed = transform(dealiased)
+    if transformed eq dealiased then tp // return the original type, not the result of dealiasing
+    else transformed
   }
 
   /** Replace every top-level occurrence of a wildcard type argument by
@@ -496,23 +523,17 @@ object Inferencing {
     *  $i is a skolem of type `scala.internal.TypeBox`, and `CAP` is its
     *  type member. See the documentation of `TypeBox` for a rationale why we do this.
     */
-  def captureWildcards(tp: Type)(using Context): Type = tp match {
+  def captureWildcards(tp: Type)(using Context): Type = derivedOnDealias(tp) {
     case tp @ AppliedType(tycon, args) if tp.hasWildcardArg =>
-      tycon.typeParams match {
-        case tparams @ ((_: Symbol) :: _) =>
-          val boundss = tparams.map(_.paramInfo.substApprox(tparams.asInstanceOf[List[TypeSymbol]], args))
-          val args1 = args.zipWithConserve(boundss) { (arg, bounds) =>
-            arg match {
-              case TypeBounds(lo, hi) =>
-                val skolem = SkolemType(defn.TypeBoxClass.typeRef.appliedTo(lo | bounds.loBound, hi & bounds.hiBound))
-                TypeRef(skolem, defn.TypeBox_CAP)
-              case arg => arg
-            }
-          }
-          tp.derivedAppliedType(tycon, args1)
-        case _ =>
-          tp
+      val tparams = tycon.typeParamSymbols
+      val args1 = args.zipWithConserve(tparams.map(_.paramInfo.substApprox(tparams, args))) {
+        case (TypeBounds(lo, hi), bounds) =>
+          val skolem = SkolemType(defn.TypeBoxClass.typeRef.appliedTo(lo | bounds.loBound, hi & bounds.hiBound))
+          TypeRef(skolem, defn.TypeBox_CAP)
+        case (arg, _) =>
+          arg
       }
+      if tparams.isEmpty then tp else tp.derivedAppliedType(tycon, args1)
     case tp: AndOrType => tp.derivedAndOrType(captureWildcards(tp.tp1), captureWildcards(tp.tp2))
     case tp: RefinedType => tp.derivedRefinedType(captureWildcards(tp.parent), tp.refinedName, tp.refinedInfo)
     case tp: RecType => tp.derivedRecType(captureWildcards(tp.parent))
@@ -532,8 +553,9 @@ trait Inferencing { this: Typer =>
    *  @param locked  the set of type variables of the current typer state that cannot be interpolated
    *                 at the present time
    *  Eligible for interpolation are all type variables owned by the current typerstate
-   *  that are not in locked. Type variables occurring co- (respectively, contra-) variantly in the type
-   *  are minimized (respectvely, maximized). Non occurring type variables are minimized if they
+   *  that are not in `locked` and whose `nestingLevel` is `>= ctx.nestingLevel`.
+   *  Type variables occurring co- (respectively, contra-) variantly in the tree type
+   *  or expected type are minimized (respectvely, maximized). Non occurring type variables are minimized if they
    *  have a lower bound different from Nothing, maximized otherwise. Type variables appearing
    *  non-variantly in the type are left untouched.
    *
@@ -558,7 +580,7 @@ trait Inferencing { this: Typer =>
     if ((ownedVars ne locked) && !ownedVars.isEmpty) {
       val qualifying = ownedVars -- locked
       if (!qualifying.isEmpty) {
-        typr.println(i"interpolate $tree: ${tree.tpe.widen} in $state, owned vars = ${state.ownedVars.toList}%, %, qualifying = ${qualifying.toList}%, %, previous = ${locked.toList}%, % / ${state.constraint}")
+        typr.println(i"interpolate $tree: ${tree.tpe.widen} in $state, pt = $pt, owned vars = ${state.ownedVars.toList}%, %, qualifying = ${qualifying.toList}%, %, previous = ${locked.toList}%, % / ${state.constraint}")
         val resultAlreadyConstrained =
           tree.isInstanceOf[Apply] || tree.tpe.isInstanceOf[MethodOrPoly]
         if (!resultAlreadyConstrained)
@@ -566,7 +588,7 @@ trait Inferencing { this: Typer =>
             // This is needed because it could establish singleton type upper bounds. See i2998.scala.
 
         val tp = tree.tpe.widen
-        val vs = variances(tp)
+        val vs = variances(tp, pt)
 
         // Avoid interpolating variables occurring in tree's type if typerstate has unreported errors.
         // Reason: The errors might reflect unsatisfiable constraints. In that
@@ -593,22 +615,38 @@ trait Inferencing { this: Typer =>
         def constraint = state.constraint
         type InstantiateQueue = mutable.ListBuffer[(TypeVar, Boolean)]
         val toInstantiate = new InstantiateQueue
-        for (tvar <- qualifying)
-          if (!tvar.isInstantiated && constraint.contains(tvar)) {
+        for tvar <- qualifying do
+          if !tvar.isInstantiated && constraint.contains(tvar) && tvar.nestingLevel >= ctx.nestingLevel then
+            constrainIfDependentParamRef(tvar, tree)
             // Needs to be checked again, since previous interpolations could already have
             // instantiated `tvar` through unification.
             val v = vs(tvar)
-            if (v == null) {
-              typr.println(i"interpolate non-occurring $tvar in $state in $tree: $tp, fromBelow = ${tvar.hasLowerBound}, $constraint")
-              toInstantiate += ((tvar, tvar.hasLowerBound))
-            }
+            if v == null then
+              // Even though `tvar` is non-occurring in `v`, the specific
+              // instantiation we pick still matters because `tvar` might appear
+              // in the bounds of a non-`qualifying` type variable in the
+              // constraint.
+              // In particular, if `tvar` was created as the upper or lower
+              // bound of an existing variable by `LevelAvoidMap`, we
+              // instantiate it in the direction corresponding to the
+              // original variable which might be further constrained later.
+              // Otherwise, we simply rely on `hasLowerBound`.
+              val name = tvar.origin.paramName
+              val fromBelow =
+                name.is(AvoidNameKind.UpperBound) ||
+                !name.is(AvoidNameKind.LowerBound) && tvar.hasLowerBound
+              typr.println(i"interpolate non-occurring $tvar in $state in $tree: $tp, fromBelow = $fromBelow, $constraint")
+              toInstantiate += ((tvar, fromBelow))
+            else if v.intValue != 0 then
+              typr.println(i"interpolate $tvar in $state in $tree: $tp, fromBelow = ${v.intValue == 1}, $constraint")
+              toInstantiate += ((tvar, v.intValue == 1))
+            else if tvar.nestingLevel > ctx.nestingLevel then
+              // Invariant: a type variable of level N can only appear
+              // in the type of a tree whose enclosing scope is level <= N.
+              typr.println(i"instantiate nonvariant $tvar of level ${tvar.nestingLevel} to a type variable of level <= ${ctx.nestingLevel}, $constraint")
+              comparing(_.atLevel(ctx.nestingLevel, tvar.origin))
             else
-              if (v.intValue != 0) {
-                typr.println(i"interpolate $tvar in $state in $tree: $tp, fromBelow = ${v.intValue == 1}, $constraint")
-                toInstantiate += ((tvar, v.intValue == 1))
-              }
-              else typr.println(i"no interpolation for nonvariant $tvar in $state")
-          }
+              typr.println(i"no interpolation for nonvariant $tvar in $state")
 
         /** Instantiate all type variables in `buf` in the indicated directions.
          *  If a type variable A is instantiated from below, and there is another
@@ -645,13 +683,16 @@ trait Inferencing { this: Typer =>
             while buf.nonEmpty do
               val first @ (tvar, fromBelow) = buf.head
               buf.dropInPlace(1)
-              val suspend = buf.exists{ (following, _) =>
-                if fromBelow then
-                  constraint.isLess(following.origin, tvar.origin)
-                else
-                  constraint.isLess(tvar.origin, following.origin)
-              }
-              if suspend then suspended += first else tvar.instantiate(fromBelow)
+              if !tvar.isInstantiated then
+                val suspend = buf.exists{ (following, _) =>
+                  if fromBelow then
+                    constraint.isLess(following.origin, tvar.origin)
+                  else
+                    constraint.isLess(tvar.origin, following.origin)
+                }
+                if suspend then suspended += first else tvar.instantiate(fromBelow)
+              end if
+            end while
             doInstantiate(suspended)
         end doInstantiate
         doInstantiate(toInstantiate)
@@ -659,6 +700,33 @@ trait Inferencing { this: Typer =>
     }
     tree
   }
+
+  /** If `tvar` represents a parameter of a dependent method type in the current `call`
+   *  approximate it from below with the type of the actual argument. Skolemize that
+   *  type if necessary to make it a Singleton.
+   */
+  private def constrainIfDependentParamRef(tvar: TypeVar, call: Tree)(using Context): Unit =
+    if tvar.origin.paramName.is(NameKinds.DepParamName) then
+      representedParamRef(tvar.origin) match
+        case ref: TermParamRef =>
+          def findArg(tree: Tree)(using Context): Tree = tree match
+            case Apply(fn, args) =>
+              if fn.tpe.widen eq ref.binder then
+                if ref.paramNum < args.length then args(ref.paramNum)
+                else EmptyTree
+              else findArg(fn)
+            case TypeApply(fn, _) => findArg(fn)
+            case Block(_, expr) => findArg(expr)
+            case Inlined(_, _, expr) => findArg(expr)
+            case _ => EmptyTree
+
+          val arg = findArg(call)
+          if !arg.isEmpty then
+            var argType = arg.tpe.widenIfUnstable
+            if !argType.isSingleton then argType = SkolemType(argType)
+            argType <:< tvar
+        case _ =>
+  end constrainIfDependentParamRef
 }
 
 /** An enumeration controlling the degree of forcing in "is-dully-defined" checks. */
diff --git a/compiler/src/dotty/tools/dotc/typer/Inliner.scala b/compiler/src/dotty/tools/dotc/typer/Inliner.scala
index d5893f44f631..c99ccbd24428 100644
--- a/compiler/src/dotty/tools/dotc/typer/Inliner.scala
+++ b/compiler/src/dotty/tools/dotc/typer/Inliner.scala
@@ -20,7 +20,9 @@ import ProtoTypes.shallowSelectionProto
 import Annotations.Annotation
 import SymDenotations.SymDenotation
 import Inferencing.isFullyDefined
+import Scopes.newScope
 import config.Printers.inlining
+import config.Feature
 import ErrorReporting.errorTree
 import dotty.tools.dotc.util.{SimpleIdentityMap, SimpleIdentitySet, EqHashMap, SourceFile, SourcePosition, SrcPos}
 import dotty.tools.dotc.parsing.Parsers.Parser
@@ -33,9 +35,13 @@ import util.Spans.Span
 import dotty.tools.dotc.transform.{Splicer, TreeMapWithStages}
 import quoted.QuoteUtils
 
+import scala.annotation.constructorOnly
+
 object Inliner {
   import tpd._
 
+  private type DefBuffer = mutable.ListBuffer[ValOrDefDef]
+
   /** `sym` is an inline method with a known body to inline.
    */
   def hasBodyToInline(sym: SymDenotation)(using Context): Boolean =
@@ -70,6 +76,7 @@ object Inliner {
         || (ctx.phase == Phases.typerPhase && needsTransparentInlining(tree))
       )
       && !ctx.typer.hasInliningErrors
+      && !ctx.base.stopInlining
   }
 
   private def needsTransparentInlining(tree: Tree)(using Context): Boolean =
@@ -92,6 +99,10 @@ object Inliner {
       if (tree.symbol == defn.CompiletimeTesting_typeChecks) return Intrinsics.typeChecks(tree)
       if (tree.symbol == defn.CompiletimeTesting_typeCheckErrors) return Intrinsics.typeCheckErrors(tree)
 
+    if tree.symbol.isExperimental then
+      Feature.checkExperimentalDef(tree.symbol, tree)
+
+    if tree.symbol.isConstructor then return tree // error already reported for the inline constructor definition
 
     /** Set the position of all trees logically contained in the expansion of
      *  inlined call `call` to the position of `call`. This transform is necessary
@@ -124,6 +135,14 @@ object Inliner {
       case Apply(fn, args) =>
         cpy.Apply(tree)(liftBindings(fn, liftPos), args)
       case TypeApply(fn, args) =>
+        fn.tpe.widenTermRefExpr match
+          case tp: PolyType =>
+            val targBounds = tp.instantiateParamInfos(args.map(_.tpe))
+            for (arg, bounds: TypeBounds) <- args.zip(targBounds) if !bounds.contains(arg.tpe) do
+              val boundsStr =
+                if bounds == TypeBounds.empty then " <: Any. Note that this type is higher-kinded."
+                else bounds.show
+              report.error(em"${arg.tpe} does not conform to bound$boundsStr", arg)
         cpy.TypeApply(tree)(liftBindings(fn, liftPos), args)
       case Select(qual, name) =>
         cpy.Select(tree)(liftBindings(qual, liftPos), name)
@@ -140,6 +159,7 @@ object Inliner {
         val body = bodyToInline(tree.symbol) // can typecheck the tree and thereby produce errors
         new Inliner(tree, body).inlined(tree.srcPos)
       else
+        ctx.base.stopInlining = true
         val (reason, setting) =
           if reachedInlinedTreesLimit then ("inlined trees", ctx.settings.XmaxInlinedTrees)
           else ("successive inlines", ctx.settings.XmaxInlines)
@@ -150,6 +170,10 @@ object Inliner {
               |You can use ${setting.name} to change the limit.""",
           (tree :: enclosingInlineds).last.srcPos
         )
+    if ctx.base.stopInlining && enclosingInlineds.isEmpty then
+      ctx.base.stopInlining = false
+        // we have completely backed out of the call that overflowed;
+        // reset so that further inline calls can be expanded
     tree2
   }
 
@@ -175,7 +199,7 @@ object Inliner {
 
     val UnApply(fun, implicits, patterns) = unapp
     val sym = unapp.symbol
-    val cls = newNormalizedClassSymbol(ctx.owner, tpnme.ANON_CLASS, Synthetic | Final, List(defn.ObjectType), coord = sym.coord)
+    val cls = newNormalizedClassSymbol(ctx.owner, tpnme.ANON_CLASS, Synthetic | Final, List(defn.ObjectType), newScope, coord = sym.coord)
     val constr = newConstructor(cls, Synthetic, Nil, Nil, coord = sym.coord).entered
 
     val targs = fun match
@@ -285,8 +309,9 @@ object Inliner {
    */
   def inlineCallTrace(callSym: Symbol, pos: SourcePosition)(using Context): Tree = {
     assert(ctx.source == pos.source)
-    if (callSym.is(Macro)) ref(callSym.topLevelClass.owner).select(callSym.topLevelClass.name).withSpan(pos.span)
-    else Ident(callSym.topLevelClass.typeRef).withSpan(pos.span)
+    val topLevelCls = callSym.topLevelClass
+    if (callSym.is(Macro)) ref(topLevelCls.owner).select(topLevelCls.name)(using ctx.withOwner(topLevelCls.owner)).withSpan(pos.span)
+    else Ident(topLevelCls.typeRef).withSpan(pos.span)
   }
 
   object Intrinsics {
@@ -312,7 +337,7 @@ object Inliner {
       ConstFold(underlyingCodeArg).tpe.widenTermRefExpr match {
         case ConstantType(Constant(code: String)) =>
           val source2 = SourceFile.virtual("tasty-reflect", code)
-          inContext(ctx.fresh.setNewTyperState().setTyper(new Typer).setSource(source2)) {
+          inContext(ctx.fresh.setNewTyperState().setTyper(new Typer(ctx.nestingLevel + 1)).setSource(source2)) {
             val tree2 = new Parser(source2).block()
             if ctx.reporter.allErrors.nonEmpty then
               ctx.reporter.allErrors.map((ErrorKind.Parser, _))
@@ -345,9 +370,9 @@ object Inliner {
         lit(error.pos.column),
         if kind == ErrorKind.Parser then parserErrorKind else typerErrorKind)
 
-    private def packErrors(errors: List[(ErrorKind, Error)])(using Context): Tree =
+    private def packErrors(errors: List[(ErrorKind, Error)], pos: SrcPos)(using Context): Tree =
       val individualErrors: List[Tree] = errors.map(packError)
-      val errorTpt = ref(defn.CompiletimeTesting_ErrorClass)
+      val errorTpt = ref(defn.CompiletimeTesting_ErrorClass).withSpan(pos.span)
       mkList(individualErrors, errorTpt)
 
     /** Expand call to scala.compiletime.testing.typeChecks */
@@ -358,11 +383,12 @@ object Inliner {
     /** Expand call to scala.compiletime.testing.typeCheckErrors */
     def typeCheckErrors(tree: Tree)(using Context): Tree =
       val errors = compileForErrors(tree)
-      packErrors(errors)
+      packErrors(errors, tree)
 
     /** Expand call to scala.compiletime.codeOf */
     def codeOf(arg: Tree, pos: SrcPos)(using Context): Tree =
-      Literal(Constant(arg.show)).withSpan(pos.span)
+      val ctx1 = ctx.fresh.setSetting(ctx.settings.color, "never")
+      Literal(Constant(arg.show(using ctx1))).withSpan(pos.span)
   }
 
   extension (tp: Type) {
@@ -444,14 +470,14 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) {
   /** A binding for the parameter of an inline method. This is a `val` def for
    *  by-value parameters and a `def` def for by-name parameters. `val` defs inherit
    *  inline annotations from their parameters. The generated `def` is appended
-   *  to `bindingsBuf`.
+   *  to `buf`.
    *  @param name        the name of the parameter
    *  @param formal      the type of the parameter
    *  @param arg         the argument corresponding to the parameter
-   *  @param bindingsBuf the buffer to which the definition should be appended
+   *  @param buf         the buffer to which the definition should be appended
    */
   private def paramBindingDef(name: Name, formal: Type, arg0: Tree,
-                              bindingsBuf: mutable.ListBuffer[ValOrDefDef])(using Context): ValOrDefDef = {
+                              buf: DefBuffer)(using Context): ValOrDefDef = {
     val isByName = formal.dealias.isInstanceOf[ExprType]
     val arg = arg0 match {
       case Typed(arg1, tpt) if tpt.tpe.isRepeatedParam && arg1.tpe.derivesFrom(defn.ArrayClass) =>
@@ -467,6 +493,8 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) {
     var bindingFlags: FlagSet = InlineProxy
     if formal.widenExpr.hasAnnotation(defn.InlineParamAnnot) then
       bindingFlags |= Inline
+    if formal.widenExpr.hasAnnotation(defn.ErasedParamAnnot) then
+      bindingFlags |= Erased
     if isByName then
       bindingFlags |= Method
     val boundSym = newSym(InlineBinderName.fresh(name.asTermName), bindingFlags, bindingType).asTerm
@@ -478,23 +506,25 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) {
       else ValDef(boundSym, newArg)
     }.withSpan(boundSym.span)
     inlining.println(i"parameter binding: $binding, $argIsBottom")
-    bindingsBuf += binding
+    buf += binding
     binding
   }
 
-  /** Populate `paramBinding` and `bindingsBuf` by matching parameters with
+  /** Populate `paramBinding` and `buf` by matching parameters with
    *  corresponding arguments. `bindingbuf` will be further extended later by
    *  proxies to this-references. Issue an error if some arguments are missing.
    */
   private def computeParamBindings(
-      tp: Type, targs: List[Tree], argss: List[List[Tree]], formalss: List[List[Type]]): Boolean =
+      tp: Type, targs: List[Tree],
+      argss: List[List[Tree]], formalss: List[List[Type]],
+      buf: DefBuffer): Boolean =
     tp match
       case tp: PolyType =>
         tp.paramNames.lazyZip(targs).foreach { (name, arg) =>
           paramSpan(name) = arg.span
           paramBinding(name) = arg.tpe.stripTypeVar
         }
-        computeParamBindings(tp.resultType, targs.drop(tp.paramNames.length), argss, formalss)
+        computeParamBindings(tp.resultType, targs.drop(tp.paramNames.length), argss, formalss, buf)
       case tp: MethodType =>
         if argss.isEmpty then
           report.error(i"missing arguments for inline method $inlinedMethod", call.srcPos)
@@ -506,9 +536,9 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) {
               case _: SingletonType if isIdempotentPath(arg) =>
                 arg.tpe
               case _ =>
-                paramBindingDef(name, formal, arg, bindingsBuf).symbol.termRef
+                paramBindingDef(name, formal, arg, buf).symbol.termRef
           }
-          computeParamBindings(tp.resultType, targs, argss.tail, formalss.tail)
+          computeParamBindings(tp.resultType, targs, argss.tail, formalss.tail, buf)
       case _ =>
         assert(targs.isEmpty)
         assert(argss.isEmpty)
@@ -525,9 +555,11 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) {
     var lastSelf: Symbol = NoSymbol
     var lastLevel: Int = 0
     for ((level, selfSym) <- sortedProxies) {
-      lazy val rhsClsSym = selfSym.info.widenDealias.classSymbol
       val rhs = selfSym.info.dealias match
-        case info: TermRef if info.isStable =>
+        case info: TermRef
+        if info.isStable && (lastSelf.exists || isPureExpr(inlineCallPrefix)) =>
+          // If this is the first proxy, optimize to `ref(info)` only if call prefix is pure.
+          // Otherwise we might forget side effects. See run/i12829.scala.
           ref(info)
         case info =>
           val rhsClsSym = info.widenDealias.classSymbol
@@ -536,8 +568,11 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) {
           else if lastSelf.exists then
             ref(lastSelf).outerSelect(lastLevel - level, selfSym.info)
           else
-            inlineCallPrefix
-      val binding = ValDef(selfSym.asTerm, QuoteUtils.changeOwnerOfTree(rhs, selfSym)).withSpan(selfSym.span)
+            inlineCallPrefix match
+              case Super(_, _) => This(rhsClsSym.asClass)
+              case _ => inlineCallPrefix
+      val binding = accountForOpaques(
+        ValDef(selfSym.asTerm, QuoteUtils.changeOwnerOfTree(rhs, selfSym)).withSpan(selfSym.span))
       bindingsBuf += binding
       inlining.println(i"proxy at $level: $selfSym = ${bindingsBuf.last}")
       lastSelf = selfSym
@@ -545,10 +580,110 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) {
     }
   }
 
+  /** A list of pairs between TermRefs appearing in thisProxy bindings that
+   *  refer to objects with opaque type aliases and local proxy symbols
+   *  that contain refined versions of these TermRefs where the aliases
+   *  are exposed.
+   */
+  private val opaqueProxies = new mutable.ListBuffer[(TermRef, TermRef)]
+
+  /** Map first halfs of opaqueProxies pairs to second halfs, using =:= as equality */
+  def mapRef(ref: TermRef): Option[TermRef] =
+    opaqueProxies.collectFirst {
+      case (from, to) if from.symbol == ref.symbol && from =:= ref => to
+    }
+
+  /** If `tp` contains TermRefs that refer to objects with opaque
+   *  type aliases, add proxy definitions to `opaqueProxies` that expose these aliases.
+   */
+  def addOpaqueProxies(tp: Type, span: Span, forThisProxy: Boolean)(using Context): Unit =
+    tp.foreachPart {
+      case ref: TermRef =>
+        for cls <- ref.widen.classSymbols do
+          if cls.containsOpaques
+             && (forThisProxy || inlinedMethod.isContainedIn(cls))
+             && mapRef(ref).isEmpty
+          then
+            def openOpaqueAliases(selfType: Type): List[(Name, Type)] = selfType match
+              case RefinedType(parent, rname, TypeAlias(alias)) =>
+                val opaq = cls.info.member(rname).symbol
+                if opaq.isOpaqueAlias then
+                  (rname, alias.stripLazyRef.asSeenFrom(ref, cls))
+                  :: openOpaqueAliases(parent)
+                else Nil
+              case _ =>
+                Nil
+            val refinements = openOpaqueAliases(cls.givenSelfType)
+            val refinedType = refinements.foldLeft(ref: Type) ((parent, refinement) =>
+              RefinedType(parent, refinement._1, TypeAlias(refinement._2))
+            )
+            val refiningSym = newSym(InlineBinderName.fresh(), Synthetic, refinedType).asTerm
+            val refiningDef = ValDef(refiningSym, tpd.ref(ref).cast(refinedType)).withSpan(span)
+            inlining.println(i"add opaque alias proxy $refiningDef for $ref in $tp")
+            bindingsBuf += refiningDef
+            opaqueProxies += ((ref, refiningSym.termRef))
+      case _ =>
+    }
+
+  /** Map all TermRefs that match left element in `opaqueProxies` to the
+   *  corresponding right element.
+   */
+  val mapOpaques = TreeTypeMap(
+      typeMap = new TypeMap:
+          override def stopAt = StopAt.Package
+          def apply(t: Type) = mapOver {
+            t match
+              case ref: TermRef => mapRef(ref).getOrElse(ref)
+              case _ => t
+          }
+    )
+
+  /** If `binding` contains TermRefs that refer to objects with opaque
+   *  type aliases, add proxy definitions that expose these aliases
+   *  and substitute such TermRefs with theproxies. Example from pos/opaque-inline1.scala:
+   *
+   *  object refined:
+   *    opaque type Positive = Int
+   *    inline def Positive(value: Int): Positive = f(value)
+   *    def f(x: Positive): Positive = x
+   *  def run: Unit = { val x = 9; val nine = refined.Positive(x) }
+   *
+   *  This generates the following proxies:
+   *
+   *    val $proxy1: refined.type{type Positive = Int} =
+   *      refined.$asInstanceOf$[refined.type{type Positive = Int}]
+   *    val refined$_this: ($proxy1 : refined.type{Positive = Int}) =
+   *      $proxy1
+   *
+   *  and every reference to `refined` in the inlined expression is replaced by
+   *  `refined_$this`.
+   */
+  def accountForOpaques(binding: ValDef)(using Context): ValDef =
+    addOpaqueProxies(binding.symbol.info, binding.span, forThisProxy = true)
+    if opaqueProxies.isEmpty then binding
+    else
+      binding.symbol.info = mapOpaques.typeMap(binding.symbol.info)
+      mapOpaques.transform(binding).asInstanceOf[ValDef]
+        .showing(i"transformed this binding exposing opaque aliases: $result", inlining)
+  end accountForOpaques
+
+  /** If value argument contains references to objects that contain opaque types,
+   *  map them to their opaque proxies.
+   */
+  def mapOpaquesInValueArg(arg: Tree)(using Context): Tree =
+    val argType = arg.tpe.widen
+    addOpaqueProxies(argType, arg.span, forThisProxy = false)
+    if opaqueProxies.nonEmpty then
+      val mappedType = mapOpaques.typeMap(argType)
+      if mappedType ne argType then arg.cast(AndType(arg.tpe, mappedType))
+      else arg
+    else arg
+
   private def canElideThis(tpe: ThisType): Boolean =
-    inlineCallPrefix.tpe == tpe && ctx.owner.isContainedIn(tpe.cls) ||
-    tpe.cls.isContainedIn(inlinedMethod) ||
-    tpe.cls.is(Package)
+    inlineCallPrefix.tpe == tpe && ctx.owner.isContainedIn(tpe.cls)
+    || tpe.cls.isContainedIn(inlinedMethod)
+    || tpe.cls.is(Package)
+    || tpe.cls.isStaticOwner && !(tpe.cls.seesOpaques && inlinedMethod.isContainedIn(tpe.cls))
 
   /** Very similar to TreeInfo.isPureExpr, but with the following inliner-only exceptions:
    *  - synthetic case class apply methods, when the case class constructor is empty, are
@@ -637,19 +772,32 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) {
       for (param <- tpe.cls.typeParams)
         paramProxy(param.typeRef) = adaptToPrefix(param.typeRef)
     case tpe: NamedType
-    if tpe.symbol.is(Param) && tpe.symbol.owner == inlinedMethod && !paramProxy.contains(tpe) =>
+    if tpe.symbol.is(Param)
+        && tpe.symbol.owner == inlinedMethod
+        && (tpe.symbol.isTerm || inlinedMethod.paramSymss.exists(_.contains(tpe.symbol)))
+          // this test is needed to rule out nested LambdaTypeTree parameters
+          // with the same name as the method's parameters. Note that the nested
+          // LambdaTypeTree parameters also have the inlineMethod as owner. C.f. i13460.scala.
+        && !paramProxy.contains(tpe) =>
       paramBinding.get(tpe.name) match
         case Some(bound) => paramProxy(tpe) = bound
         case _ =>  // can happen for params bound by type-lambda trees.
+
+      // The widened type may contain param types too (see tests/pos/i12379a.scala)
+      if tpe.isTerm then registerType(tpe.widenTermRefExpr)
     case _ =>
   }
 
+  private val registerTypes = new TypeTraverser:
+    override def stopAt = StopAt.Package
+    override def traverse(t: Type) =
+      registerType(t)
+      traverseChildren(t)
+
   /** Register type of leaf node */
-  private def registerLeaf(tree: Tree): Unit = tree match {
-    case _: This | _: Ident | _: TypeTree =>
-      tree.typeOpt.foreachPart(registerType, stopAtStatic = true)
+  private def registerLeaf(tree: Tree): Unit = tree match
+    case _: This | _: Ident | _: TypeTree => registerTypes.traverse(tree.typeOpt)
     case _ =>
-  }
 
   /** Make `tree` part of inlined expansion. This means its owner has to be changed
    *  from its `originalOwner`, and, if it comes from outside the inlined method
@@ -680,7 +828,7 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) {
           return Intrinsics.codeOf(arg, call.srcPos)
       case _ =>
 
-  	// Special handling of `constValue[T]` and `constValueOpt[T]`
+  	// Special handling of `constValue[T]`, `constValueOpt[T], and summonInline[T]`
     if (callTypeArgs.length == 1)
       if (inlinedMethod == defn.Compiletime_constValue) {
         val constVal = tryConstValue
@@ -697,6 +845,19 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) {
           else New(defn.SomeClass.typeRef.appliedTo(constVal.tpe), constVal :: Nil)
         )
       }
+      else if (inlinedMethod == defn.Compiletime_summonInline) {
+        def searchImplicit(tpt: Tree) =
+          val evTyper = new Typer(ctx.nestingLevel + 1)
+          val evCtx = ctx.fresh.setTyper(evTyper)
+          val evidence = evTyper.inferImplicitArg(tpt.tpe, tpt.span)(using evCtx)
+          evidence.tpe match
+            case fail: Implicits.SearchFailureType =>
+              val msg = evTyper.missingArgMsg(evidence, tpt.tpe, "")
+              errorTree(call, em"$msg")
+            case _ =>
+              evidence
+        return searchImplicit(callTypeArgs.head)
+      }
 
     def paramTypess(call: Tree, acc: List[List[Type]]): List[List[Type]] = call match
       case Apply(fn, args) =>
@@ -706,12 +867,25 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) {
       case TypeApply(fn, _) => paramTypess(fn, acc)
       case _ => acc
 
-    // Compute bindings for all parameters, appending them to bindingsBuf
-    if !computeParamBindings(inlinedMethod.info, callTypeArgs, callValueArgss, paramTypess(call, Nil)) then
-      return call
+    val paramBindings =
+      val mappedCallValueArgss = callValueArgss.nestedMapConserve(mapOpaquesInValueArg)
+      if mappedCallValueArgss ne callValueArgss then
+        inlining.println(i"mapped value args = ${mappedCallValueArgss.flatten}%, %")
+
+      val paramBindingsBuf = new DefBuffer
+      // Compute bindings for all parameters, appending them to bindingsBuf
+      if !computeParamBindings(
+          inlinedMethod.info, callTypeArgs,
+          mappedCallValueArgss, paramTypess(call, Nil),
+          paramBindingsBuf)
+      then
+        return call
+
+      paramBindingsBuf.toList
+    end paramBindings
 
     // make sure prefix is executed if it is impure
-    if (!isIdempotentExpr(inlineCallPrefix)) registerType(inlinedMethod.owner.thisType)
+    if !isIdempotentExpr(inlineCallPrefix) then registerType(inlinedMethod.owner.thisType)
 
     // Register types of all leaves of inlined body so that the `paramProxy` and `thisProxy` maps are defined.
     rhsToInline.foreachSubTree(registerLeaf)
@@ -719,6 +893,9 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) {
     // Compute bindings for all this-proxies, appending them to bindingsBuf
     computeThisBindings()
 
+    // Parameter bindings come after this bindings, reflecting order of evaluation
+    bindingsBuf ++= paramBindings
+
     val inlineTyper = new InlineTyper(ctx.reporter.errorCount)
 
     val inlineCtx = inlineContext(call).fresh.setTyper(inlineTyper).setNewScope
@@ -762,11 +939,13 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) {
     val inliner = new InlinerMap(
       typeMap =
         new DeepTypeMap {
+          override def stopAt =
+            if opaqueProxies.isEmpty then StopAt.Static else StopAt.Package
           def apply(t: Type) = t match {
             case t: ThisType => thisProxy.getOrElse(t.cls, t)
             case t: TypeRef => paramProxy.getOrElse(t, mapOver(t))
             case t: SingletonType =>
-              if t.termSymbol.isAllOf(Inline | Param) then mapOver(t.widenTermRefExpr)
+              if t.termSymbol.isAllOf(Inline | Param) then apply(t.widenTermRefExpr)
               else paramProxy.getOrElse(t, mapOver(t))
             case t => mapOver(t)
           }
@@ -806,6 +985,11 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) {
       substTo = Nil
     )(using inlineCtx)
 
+    inlining.println(
+      i"""inliner transform with
+         |thisProxy = ${thisProxy.toList.map(_._1)}%, % --> ${thisProxy.toList.map(_._2)}%, %
+         |paramProxy = ${paramProxy.toList.map(_._1.typeSymbol.showLocated)}%, % --> ${paramProxy.toList.map(_._2)}%, %""")
+
     // Apply inliner to `rhsToInline`, split off any implicit bindings from result, and
     // make them part of `bindingsBuf`. The expansion is then the tree that remains.
     val expansion = inliner.transform(rhsToInline)
@@ -818,11 +1002,13 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) {
         }
         // Usually `error` is called from within a rewrite method. In this
         // case we need to report the error at the point of the outermost enclosing inline
-        // call. This way, a defensively written rewrite methid can always
+        // call. This way, a defensively written rewrite method can always
         // report bad inputs at the point of call instead of revealing its internals.
         val callToReport = if (enclosingInlineds.nonEmpty) enclosingInlineds.last else call
         val ctxToReport = ctx.outersIterator.dropWhile(enclosingInlineds(using _).nonEmpty).next
-        inContext(ctxToReport) {
+        // The context in which we report should still use the existing context reporter
+        val ctxOrigReporter = ctxToReport.fresh.setReporter(ctx.reporter)
+        inContext(ctxOrigReporter) {
           report.error(message, callToReport.srcPos)
         }
       case _ =>
@@ -880,7 +1066,17 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) {
 
       // Take care that only argument bindings go into `bindings`, since positions are
       // different for bindings from arguments and bindings from body.
-      tpd.Inlined(call, finalBindings, finalExpansion)
+      val res = tpd.Inlined(call, finalBindings, finalExpansion)
+      if opaqueProxies.isEmpty then res
+      else
+        val target =
+          if inlinedMethod.is(Transparent) then call.tpe & res.tpe
+          else call.tpe
+        res.ensureConforms(target)
+          // Make sure that the sealing with the declared type
+          // is type correct. Without it we might get problems since the
+          // expression's type is the opaque alias but the call's type is
+          // the opaque type itself. An example is in pos/opaque-inline1.scala.
     }
   }
 
@@ -1012,7 +1208,7 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) {
     private object InlineableArg {
       lazy val paramProxies = paramProxy.values.toSet
       def unapply(tree: Trees.Ident[?])(using Context): Option[Tree] = {
-        def search(buf: mutable.ListBuffer[ValOrDefDef]) = buf.find(_.name == tree.name)
+        def search(buf: DefBuffer) = buf.find(_.name == tree.name)
         if (paramProxies.contains(tree.typeOpt))
           search(bindingsBuf) match {
             case Some(bind: ValOrDefDef) if bind.symbol.is(Inline) =>
@@ -1051,7 +1247,7 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) {
             cpy.Inlined(cl)(call, bindings, recur(expr))
           case _ => ddef.tpe.widen match
             case mt: MethodType if ddef.paramss.head.length == args.length =>
-              val bindingsBuf = new mutable.ListBuffer[ValOrDefDef]
+              val bindingsBuf = new DefBuffer
               val argSyms = mt.paramNames.lazyZip(mt.paramInfos).lazyZip(args).map { (name, paramtp, arg) =>
                 arg.tpe.dealias match {
                   case ref @ TermRef(NoPrefix, _) => ref.symbol
@@ -1078,6 +1274,11 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) {
      */
     type MatchRedux = Option[(List[MemberDef], Tree)]
 
+    /** Same as MatchRedux, but also includes a boolean
+     *  that is true if the guard can be checked at compile time.
+     */
+    type MatchReduxWithGuard = Option[(List[MemberDef], Tree, Boolean)]
+
     /** Reduce an inline match
      *   @param     mtch          the match tree
      *   @param     scrutinee     the scrutinee expression, assumed to be pure, or
@@ -1115,7 +1316,7 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) {
         }
 
         def searchImplicit(sym: TermSymbol, tpt: Tree) = {
-          val evTyper = new Typer
+          val evTyper = new Typer(ctx.nestingLevel + 1)
           val evCtx = ctx.fresh.setTyper(evTyper)
           val evidence = evTyper.inferImplicitArg(tpt.tpe, tpt.span)(using evCtx)
           evidence.tpe match {
@@ -1125,7 +1326,7 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) {
             case fail: Implicits.SearchFailureType =>
               false
             case _ =>
-              //inliner.println(i"inferred implicit $sym: ${sym.info} with $evidence: ${evidence.tpe.widen}, ${evCtx.gadt.constraint}, ${evCtx.typerState.constraint}")
+              //inlining.println(i"inferred implicit $sym: ${sym.info} with $evidence: ${evidence.tpe.widen}, ${evCtx.gadt.constraint}, ${evCtx.typerState.constraint}")
               newTermBinding(sym, evidence)
               true
           }
@@ -1247,6 +1448,8 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) {
               case _ =>
                 false
             }
+          case Alternative(pats) =>
+            pats.exists(reducePattern(caseBindingMap, scrut, _))
           case Inlined(EmptyTree, Nil, ipat) =>
             reducePattern(caseBindingMap, scrut, ipat)
           case _ => false
@@ -1257,7 +1460,7 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) {
       val scrutineeSym = newSym(InlineScrutineeName.fresh(), Synthetic, scrutType).asTerm
       val scrutineeBinding = normalizeBinding(ValDef(scrutineeSym, scrutinee))
 
-      def reduceCase(cdef: CaseDef): MatchRedux = {
+      def reduceCase(cdef: CaseDef): MatchReduxWithGuard = {
         val caseBindingMap = new mutable.ListBuffer[(Symbol, MemberDef)]()
 
         def substBindings(
@@ -1276,21 +1479,26 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) {
         val gadtCtx = ctx.fresh.setFreshGADTBounds.addMode(Mode.GadtConstraintInference)
         if (reducePattern(caseBindingMap, scrutineeSym.termRef, cdef.pat)(using gadtCtx)) {
           val (caseBindings, from, to) = substBindings(caseBindingMap.toList, mutable.ListBuffer(), Nil, Nil)
-          val guardOK = cdef.guard.isEmpty || {
-            typer.typed(cdef.guard.subst(from, to), defn.BooleanType) match {
-              case ConstantValue(true) => true
-              case _ => false
+          val (guardOK, canReduceGuard) =
+            if cdef.guard.isEmpty then (true, true)
+            else typer.typed(cdef.guard.subst(from, to), defn.BooleanType) match {
+              case ConstantValue(v: Boolean) => (v, true)
+              case _ => (false, false)
             }
-          }
-          if (guardOK) Some((caseBindings.map(_.subst(from, to)), cdef.body.subst(from, to)))
-          else None
+          if guardOK then Some((caseBindings.map(_.subst(from, to)), cdef.body.subst(from, to), canReduceGuard))
+          else if canReduceGuard then None
+          else Some((caseBindings.map(_.subst(from, to)), cdef.body.subst(from, to), canReduceGuard))
         }
         else None
       }
 
       def recur(cases: List[CaseDef]): MatchRedux = cases match {
         case Nil => None
-        case cdef :: cases1 => reduceCase(cdef) `orElse` recur(cases1)
+        case cdef :: cases1 =>
+          reduceCase(cdef) match
+            case None => recur(cases1)
+            case r @ Some((caseBindings, rhs, canReduceGuard)) if canReduceGuard => Some((caseBindings, rhs))
+            case _ => None
       }
 
       recur(cases)
@@ -1306,7 +1514,7 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) {
    *  4. Make sure inlined code is type-correct.
    *  5. Make sure that the tree's typing is idempotent (so that future -Ycheck passes succeed)
    */
-  class InlineTyper(initialErrorCount: Int) extends ReTyper {
+  class InlineTyper(initialErrorCount: Int, @constructorOnly nestingLevel: Int = ctx.nestingLevel + 1) extends ReTyper(nestingLevel) {
     import reducer._
 
     override def ensureAccessible(tpe: Type, superAccess: Boolean, pos: SrcPos)(using Context): Type = {
@@ -1322,20 +1530,34 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) {
       super.ensureAccessible(tpe, superAccess, pos)
     }
 
+    /** Enter implicits in scope so that they can be found in implicit search.
+     *  This is important for non-transparent inlines
+     */
+    override def index(trees: List[untpd.Tree])(using Context): Context =
+      for case tree: untpd.MemberDef <- trees do
+        if tree.symbol.isOneOf(Flags.GivenOrImplicit) then
+          ctx.scope.openForMutations.enter(tree.symbol)
+      ctx
+
     override def typedIdent(tree: untpd.Ident, pt: Type)(using Context): Tree =
       inlineIfNeeded(tryInlineArg(tree.asInstanceOf[tpd.Tree]) `orElse` super.typedIdent(tree, pt))
 
     override def typedSelect(tree: untpd.Select, pt: Type)(using Context): Tree = {
-      assert(tree.hasType, tree)
       val qual1 = typed(tree.qualifier, shallowSelectionProto(tree.name, pt, this))
       val resNoReduce = untpd.cpy.Select(tree)(qual1, tree.name).withType(tree.typeOpt)
-      val resMaybeReduced = constToLiteral(reducer.reduceProjection(resNoReduce))
-      if (resNoReduce ne resMaybeReduced)
-        typed(resMaybeReduced, pt) // redo typecheck if reduction changed something
+      val reducedProjection = reducer.reduceProjection(resNoReduce)
+      if reducedProjection.isType then
+        //if the projection leads to a typed tree then we stop reduction
+        resNoReduce
       else
-        val res = resMaybeReduced
-        ensureAccessible(res.tpe, tree.qualifier.isInstanceOf[untpd.Super], tree.srcPos)
-        inlineIfNeeded(res)
+        val res = constToLiteral(reducedProjection)
+        if resNoReduce ne res then
+          typed(res, pt) // redo typecheck if reduction changed something
+        else if res.symbol.isInlineMethod then
+          inlineIfNeeded(res)
+        else
+          ensureAccessible(res.tpe, tree.qualifier.isInstanceOf[untpd.Super], tree.srcPos)
+          res
     }
 
     override def typedIf(tree: untpd.If, pt: Type)(using Context): Tree =
@@ -1366,14 +1588,18 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) {
       super.typedValDef(vdef1, sym)
 
     override def typedApply(tree: untpd.Apply, pt: Type)(using Context): Tree =
-      val res = constToLiteral(betaReduce(super.typedApply(tree, pt))) match {
+      def cancelQuotes(tree: Tree): Tree =
+        tree match
+          case Quoted(Spliced(inner)) => inner
+          case _ => tree
+      val res = cancelQuotes(constToLiteral(betaReduce(super.typedApply(tree, pt)))) match {
         case res: Apply if res.symbol == defn.QuotedRuntime_exprSplice
                         && level == 0
                         && !hasInliningErrors =>
           val expanded = expandMacro(res.args.head, tree.srcPos)
           typedExpr(expanded) // Inline calls and constant fold code generated by the macro
         case res =>
-          inlineIfNeeded(res)
+          specializeEq(inlineIfNeeded(res))
       }
       if res.symbol == defn.QuotedRuntime_exprQuote then
         ctx.compilationUnit.needsQuotePickling = true
@@ -1440,13 +1666,16 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) {
         }
       }
 
-    override def newLikeThis: Typer = new InlineTyper(initialErrorCount)
+    override def newLikeThis(nestingLevel: Int): Typer = new InlineTyper(initialErrorCount, nestingLevel)
 
     /** True if this inline typer has already issued errors */
     override def hasInliningErrors(using Context) = ctx.reporter.errorCount > initialErrorCount
 
     private def inlineIfNeeded(tree: Tree)(using Context): Tree =
-      if Inliner.needsInlining(tree) then Inliner.inlineCall(tree)
+      val meth = tree.symbol
+      if meth.isAllOf(DeferredInline) then
+        errorTree(tree, i"Deferred inline ${meth.showLocated} cannot be invoked")
+      else if Inliner.needsInlining(tree) then Inliner.inlineCall(tree)
       else tree
 
     override def typedUnadapted(tree: untpd.Tree, pt: Type, locked: TypeVars)(using Context): Tree =
@@ -1455,6 +1684,21 @@ class Inliner(call: tpd.Tree, rhsToInline: tpd.Tree)(using Context) {
         case tree => tree
   }
 
+  def specializeEq(tree: Tree): Tree =
+    tree match
+      case Apply(sel @ Select(arg1, opName), arg2 :: Nil)
+      if sel.symbol == defn.Any_== || sel.symbol == defn.Any_!= =>
+        defn.ScalaValueClasses().find { cls =>
+          arg1.tpe.derivesFrom(cls) && arg2.tpe.derivesFrom(cls)
+        } match {
+          case Some(cls) =>
+            val newOp = cls.requiredMethod(opName, List(cls.typeRef))
+            arg1.select(newOp).withSpan(sel.span).appliedTo(arg2).withSpan(tree.span)
+          case None => tree
+        }
+      case _ =>
+        tree
+
   /** Drop any side-effect-free bindings that are unused in expansion or other reachable bindings.
    *  Inline def bindings that are used only once.
    */
diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala
index 3be5fae4df98..4bca5c50cf77 100644
--- a/compiler/src/dotty/tools/dotc/typer/Namer.scala
+++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala
@@ -18,6 +18,8 @@ import tpd.tpes
 import Variances.alwaysInvariant
 import config.{Config, Feature}
 import config.Printers.typr
+import parsing.JavaParsers.JavaParser
+import parsing.Parsers.Parser
 import Annotations._
 import Inferencing._
 import transform.ValueClasses._
@@ -27,6 +29,8 @@ import reporting._
 import config.Feature.sourceVersion
 import config.SourceVersion._
 
+import scala.annotation.constructorOnly
+
 /** This class creates symbols from definitions and imports and gives them
  *  lazy types.
  *
@@ -86,13 +90,6 @@ class Namer { typer: Typer =>
    */
   lazy val nestedTyper: mutable.AnyRefMap[Symbol, Typer] = new mutable.AnyRefMap
 
-  /** The scope of the typer.
-   *  For nested typers this is a place parameters are entered during completion
-   *  and where they survive until typechecking. A context with this typer also
-   *  has this scope.
-   */
-  val scope: MutableScope = newScope
-
   /** We are entering symbols coming from a SourceLoader */
   private var lateCompile = false
 
@@ -241,7 +238,7 @@ class Namer { typer: Typer =>
 
     tree match {
       case tree: TypeDef if tree.isClassDef =>
-        val flags = checkFlags(tree.mods.flags &~ Implicit)
+        val flags = checkFlags(tree.mods.flags)
         val name = checkNoConflict(tree.name, flags.is(Private), tree.span).asTypeName
         val cls =
           createOrRefine[ClassSymbol](tree, name, flags, ctx.owner,
@@ -412,6 +409,58 @@ class Namer { typer: Typer =>
   def isEnumConstant(vd: ValDef)(using Context): Boolean =
     vd.mods.isAllOf(JavaEnumValue)
 
+  /** Ensure that the first type in a list of parent types Ps points to a non-trait class.
+   *  If that's not already the case, add one. The added class type CT is determined as follows.
+   *  First, let C be the unique class such that
+   *  - there is a parent P_i such that P_i derives from C, and
+   *  - for every class D: If some parent P_j, j <= i derives from D, then C derives from D.
+   *  Then, let CT be the smallest type which
+   *  - has C as its class symbol, and
+   *  - for all parents P_i: If P_i derives from C then P_i <:< CT.
+   *
+   * Tweak: It could be that at the point where the method is called, some superclass
+   * is still missing its parents. Parents are set to Nil when completion starts and are
+   * set to the actual parents later. If a superclass completes a subclass in one
+   * of its parents, the parents of the superclass or some intervening class might
+   * not yet be set. This situation can be detected by asking for the baseType of Any -
+   * if that type does not exist, one of the base classes of this class misses its parents.
+   * If this situation arises, the computation of the superclass might be imprecise.
+   * For instance, in i12722.scala, the superclass of `IPersonalCoinOps` is computed
+   * as `Object`, where `JsObject` would be correct. The problem cannot be solved locally,
+   * but we detect the situaton and mark the superclass with a `@ProvisionalSuperClass`
+   * annotation in this case. When typechecking the class, we then run ensureFirstIsClass
+   * again and possibly improve the computed super class.
+   * An alternatiev fix would compute superclasses at typer instead at completion. But
+   * that breaks too many invariants. For instance, we rely on correct @Child annotations
+   * after completion, and these in turn need the superclass.
+   */
+  def ensureFirstIsClass(cls: ClassSymbol, parents: List[Type])(using Context): List[Type] =
+
+    def realClassParent(sym: Symbol): ClassSymbol =
+      if !sym.isClass then defn.ObjectClass
+      else if !sym.is(Trait) then sym.asClass
+      else sym.info.parents match
+        case parentRef :: _ => realClassParent(parentRef.typeSymbol)
+        case nil => defn.ObjectClass
+
+    def improve(candidate: ClassSymbol, parent: Type): ClassSymbol =
+      val pcls = realClassParent(parent.classSymbol)
+      if (pcls derivesFrom candidate) pcls else candidate
+
+    parents match
+      case p :: _ if p.classSymbol.isRealClass => parents
+      case _ =>
+        val pcls = parents.foldLeft(defn.ObjectClass)(improve)
+        typr.println(i"ensure first is class $parents%, % --> ${parents map (_ baseType pcls)}%, %")
+        val bases = parents.map(_.baseType(pcls))
+        var first = TypeComparer.glb(defn.ObjectType :: bases)
+        val isProvisional = parents.exists(!_.baseType(defn.AnyClass).exists)
+        if isProvisional then
+          typr.println(i"provisional superclass $first for $cls")
+          first = AnnotatedType(first, Annotation(defn.ProvisionalSuperClassAnnot))
+        checkFeasibleParent(first, cls.srcPos, em" in inferred superclass $first") :: parents
+  end ensureFirstIsClass
+
   /** Add child annotation for `child` to annotations of `cls`. The annotation
    *  is added at the correct insertion point, so that Child annotations appear
    *  in reverse order of their start positions.
@@ -617,7 +666,7 @@ class Namer { typer: Typer =>
           val classSym = ctx.effectiveScope.lookup(className)
           val moduleName = className.toTermName
           if needsConstructorProxies(classSym) && ctx.effectiveScope.lookupEntry(moduleName) == null then
-            enterSymbol(constructorCompanion(classSym.asClass))
+            enterSymbol(classConstructorCompanion(classSym.asClass))
       else if ctx.owner.is(PackageClass) then
         for case cdef @ TypeDef(moduleName, _) <- moduleDef.values do
           val moduleSym = ctx.effectiveScope.lookup(moduleName)
@@ -634,12 +683,13 @@ class Namer { typer: Typer =>
             val moduleName = className.toTermName
             val companionVals = ctx.effectiveScope.lookupAll(moduleName.encode)
             if companionVals.isEmpty && needsConstructorProxies(classSym) then
-              enterSymbol(constructorCompanion(classSym.asClass))
+              enterSymbol(classConstructorCompanion(classSym.asClass))
             else
               for moduleSym <- companionVals do
                 if moduleSym.is(Module) && !moduleSym.isDefinedInCurrentRun then
                   val companion =
-                    if needsConstructorProxies(classSym) then constructorCompanion(classSym.asClass)
+                    if needsConstructorProxies(classSym) then
+                      classConstructorCompanion(classSym.asClass)
                     else newModuleSymbol(
                       ctx.owner, moduleName, EmptyFlags, EmptyFlags, (_, _) => NoType)
                   enterSymbol(companion)
@@ -655,15 +705,44 @@ class Namer { typer: Typer =>
     ctxWithStats
   }
 
-  /** Index symbols in `tree` while asserting the `lateCompile` flag.
-   *  This will cause any old top-level symbol with the same fully qualified
-   *  name as a newly created symbol to be replaced.
+  /** Parse the source and index symbols in the compilation unit's untpdTree
+   *  while asserting the `lateCompile` flag. This will cause any old
+   *  top-level symbol with the same fully qualified name as a newly created
+   *  symbol to be replaced.
+   *
+   *  Will call the callback with an implementation of type checking
+   *  That will set the tpdTree and root tree for the compilation unit.
    */
-  def lateEnter(tree: Tree)(using Context): Context = {
-    val saved = lateCompile
-    lateCompile = true
-    try index(tree :: Nil) finally lateCompile = saved
-  }
+  def lateEnterUnit(typeCheckCB: (() => Unit) => Unit)(using Context) =
+    val unit = ctx.compilationUnit
+
+    /** Index symbols in unit.untpdTree with lateCompile flag = true */
+    def lateEnter()(using Context): Context =
+      val saved = lateCompile
+      lateCompile = true
+      try index(unit.untpdTree :: Nil) finally lateCompile = saved
+
+    /** Set the tpdTree and root tree of the compilation unit */
+    def lateTypeCheck()(using Context) =
+      unit.tpdTree = typer.typedExpr(unit.untpdTree)
+      val phase = new transform.SetRootTree()
+      phase.run
+
+    unit.untpdTree =
+      if (unit.isJava) new JavaParser(unit.source).parse()
+      else new Parser(unit.source).parse()
+
+    atPhase(Phases.typerPhase) {
+      inContext(PrepareInlineable.initContext(ctx)) {
+        // inline body annotations are set in namer, capturing the current context
+        // we need to prepare the context for inlining.
+        lateEnter()
+        typeCheckCB { () =>
+          lateTypeCheck()
+        }
+      }
+    }
+  end lateEnterUnit
 
   /** The type bound on wildcard imports of an import list, with special values
    *    Nothing  if no wildcard imports of this kind exist
@@ -698,12 +777,12 @@ class Namer { typer: Typer =>
         if (sym.is(Module)) moduleValSig(sym)
         else valOrDefDefSig(original, sym, Nil, identity)(using localContext(sym).setNewScope)
       case original: DefDef =>
-        val typer1 = ctx.typer.newLikeThis
+        val typer1 = ctx.typer.newLikeThis(ctx.nestingLevel + 1)
         nestedTyper(sym) = typer1
-        typer1.defDefSig(original, sym)(using localContext(sym).setTyper(typer1))
+        typer1.defDefSig(original, sym, this)(using localContext(sym).setTyper(typer1))
       case imp: Import =>
         try
-          val expr1 = typedImportQualifier(imp, typedAheadExpr)
+          val expr1 = typedImportQualifier(imp, typedAheadExpr(_, _)(using ctx.withOwner(sym)))
           ImportType(expr1)
         catch case ex: CyclicReference =>
           typr.println(s"error while completing ${imp.expr}")
@@ -733,6 +812,15 @@ class Namer { typer: Typer =>
             completer.complete(denot)
     }
 
+    private var completedTypeParamSyms: List[TypeSymbol] = null
+
+    def setCompletedTypeParams(tparams: List[TypeSymbol]) =
+      completedTypeParamSyms = tparams
+
+    override def completerTypeParams(sym: Symbol)(using Context): List[TypeSymbol] =
+      if completedTypeParamSyms != null then completedTypeParamSyms
+      else Nil
+
     protected def addAnnotations(sym: Symbol): Unit = original match {
       case original: untpd.MemberDef =>
         lazy val annotCtx = annotContext(original, sym)
@@ -927,9 +1015,12 @@ class Namer { typer: Typer =>
       val unsafeInfo = if (isDerived) rhsBodyType else abstracted(rhsBodyType)
 
       def opaqueToBounds(info: Type): Type =
-        if sym.isOpaqueAlias && info.typeParams.nonEmpty && info.hkResult.typeParams.nonEmpty then
-          report.error(em"opaque type alias cannot have multiple type parameter lists", rhs.srcPos)
-        sym.opaqueToBounds(info, rhs1, tparamSyms)
+        if sym.isOpaqueAlias then
+          if info.typeParams.nonEmpty && info.hkResult.typeParams.nonEmpty then
+            report.error(em"opaque type alias cannot have multiple type parameter lists", rhs.srcPos)
+          sym.opaqueToBounds(info, rhs1, tparamSyms)
+        else
+          info
 
       if (isDerived) sym.info = unsafeInfo
       else {
@@ -953,7 +1044,7 @@ class Namer { typer: Typer =>
   }
 
   class ClassCompleter(cls: ClassSymbol, original: TypeDef)(ictx: Context) extends Completer(original)(ictx) {
-    withDecls(newScope)
+    withDecls(newScope(using ictx))
 
     protected implicit val completerCtx: Context = localContext(cls)
 
@@ -972,162 +1063,195 @@ class Namer { typer: Typer =>
 
     def init(): Context = index(params)
 
-    /** Add forwarders as required by the export statements in this class */
-    private def processExports(using Context): Unit = {
+    /** The forwarders defined by export `exp` */
+    private def exportForwarders(exp: Export)(using Context): List[tpd.MemberDef] =
+      val buf = new mutable.ListBuffer[tpd.MemberDef]
+      val Export(expr, selectors) = exp
+      if expr.isEmpty then
+        report.error(em"Export selector must have prefix and `.`", exp.srcPos)
+        return Nil
+
+      val path = typedAheadExpr(expr, AnySelectionProto)
+      checkLegalExportPath(path, selectors)
+      lazy val wildcardBound = importBound(selectors, isGiven = false)
+      lazy val givenBound = importBound(selectors, isGiven = true)
+
+      def canForward(mbr: SingleDenotation): CanForward = {
+        import CanForward.*
+        val sym = mbr.symbol
+        if !sym.isAccessibleFrom(path.tpe) then
+          No("is not accessible")
+        else if sym.isConstructor || sym.is(ModuleClass) || sym.is(Bridge) || sym.is(ConstructorProxy) then
+          Skip
+        else if cls.derivesFrom(sym.owner) && (sym.owner == cls || !sym.is(Deferred)) then
+          No(i"is already a member of $cls")
+        else if sym.is(Override) then
+          sym.allOverriddenSymbols.find(
+            other => cls.derivesFrom(other.owner) && !other.is(Deferred)
+          ) match
+              case Some(other) => No(i"overrides ${other.showLocated}, which is already a member of $cls")
+              case None => Yes
+        else if sym.isAllOf(JavaModule) then
+          Skip
+        else Yes
+      }
 
-      /** A string indicating that no forwarders for this kind of symbol are emitted */
-      val SKIP = "(skip)"
+      def foreachDefaultGetterOf(sym: TermSymbol, op: TermSymbol => Unit): Unit =
+        var n = 0
+        for params <- sym.paramSymss; param <- params do
+          if param.isTerm then
+            if param.is(HasDefault) then
+              val getterName = DefaultGetterName(sym.name, n)
+              val getter = path.tpe.member(DefaultGetterName(sym.name, n)).symbol
+              assert(getter.exists, i"$path does not have a default getter named $getterName")
+              op(getter.asTerm)
+            n += 1
+
+      /** Add a forwarder with name `alias` or its type name equivalent to `mbr`,
+        *  provided `mbr` is accessible and of the right implicit/non-implicit kind.
+        */
+      def addForwarder(alias: TermName, mbr: SingleDenotation, span: Span): Unit =
+
+        def adaptForwarderParams(acc: List[List[tpd.Tree]], tp: Type, prefss: List[List[tpd.Tree]])
+          : List[List[tpd.Tree]] = tp match
+            case mt: MethodType
+            if mt.paramInfos.nonEmpty && mt.paramInfos.last.isRepeatedParam =>
+              // Note: in this branch we use the assumptions
+              // that `prefss.head` corresponds to `mt.paramInfos` and
+              // that `prefss.tail` corresponds to `mt.resType`
+              val init :+ vararg = prefss.head
+              val prefs = init :+ ctx.typeAssigner.seqToRepeated(vararg)
+              adaptForwarderParams(prefs :: acc, mt.resType, prefss.tail)
+            case mt: MethodOrPoly =>
+              adaptForwarderParams(prefss.head :: acc, mt.resultType, prefss.tail)
+            case _ =>
+              acc.reverse ::: prefss
 
-      /** The forwarders defined by export `exp`.
-       */
-      def exportForwarders(exp: Export): List[tpd.MemberDef] = {
-        val buf = new mutable.ListBuffer[tpd.MemberDef]
-        val Export(expr, selectors) = exp
-        if expr.isEmpty then
-          report.error(em"Export selector must have prefix and `.`", exp.srcPos)
-          return Nil
-
-        val path = typedAheadExpr(expr, AnySelectionProto)
-        checkLegalExportPath(path, selectors)
-        lazy val wildcardBound = importBound(selectors, isGiven = false)
-        lazy val givenBound = importBound(selectors, isGiven = true)
-
-        def whyNoForwarder(mbr: SingleDenotation): String = {
+        if canForward(mbr) == CanForward.Yes then
           val sym = mbr.symbol
-          if (!sym.isAccessibleFrom(path.tpe)) "is not accessible"
-          else if (sym.isConstructor || sym.is(ModuleClass) || sym.is(Bridge) || sym.is(ConstructorProxy)) SKIP
-          else if (cls.derivesFrom(sym.owner) &&
-                   (sym.owner == cls || !sym.is(Deferred))) i"is already a member of $cls"
-          else if (sym.is(Override))
-            sym.allOverriddenSymbols.find(
-             other => cls.derivesFrom(other.owner) && !other.is(Deferred)) match {
-               case Some(other) => i"overrides ${other.showLocated}, which is already a member of $cls"
-               case None => ""
-            }
-          else ""
-        }
-
-        /** Add a forwarder with name `alias` or its type name equivalent to `mbr`,
-         *  provided `mbr` is accessible and of the right implicit/non-implicit kind.
-         */
-        def addForwarder(alias: TermName, mbr: SingleDenotation, span: Span): Unit =
-
-          def adaptForwarderParams(acc: List[List[tpd.Tree]], tp: Type, prefss: List[List[tpd.Tree]])
-            : List[List[tpd.Tree]] = tp match
-              case mt: MethodType
-              if mt.paramInfos.nonEmpty && mt.paramInfos.last.isRepeatedParam =>
-                // Note: in this branch we use the assumptions
-                // that `prefss.head` corresponds to `mt.paramInfos` and
-                // that `prefss.tail` corresponds to `mt.resType`
-                val init :+ vararg = prefss.head
-                val prefs = init :+ ctx.typeAssigner.seqToRepeated(vararg)
-                adaptForwarderParams(prefs :: acc, mt.resType, prefss.tail)
-              case mt: MethodOrPoly =>
-                adaptForwarderParams(prefss.head :: acc, mt.resultType, prefss.tail)
-              case _ =>
-                acc.reverse ::: prefss
-
-          if whyNoForwarder(mbr) == "" then
-            val sym = mbr.symbol
-            val forwarder =
-              if mbr.isType then
-                val forwarderName = checkNoConflict(alias.toTypeName, isPrivate = false, span)
-                var target = path.tpe.select(sym)
-                if target.typeParams.nonEmpty then
-                  target = target.EtaExpand(target.typeParams)
-                newSymbol(
-                  cls, forwarderName,
-                  Exported | Final,
-                  TypeAlias(target),
-                  coord = span)
-                // Note: This will always create unparameterzied aliases. So even if the original type is
-                // a parameterized class, say `C[X]` the alias will read `type C = d.C`. We currently do
-                // allow such type aliases. If we forbid them at some point (requiring the referred type to be
-                // fully applied), we'd have to change the scheme here as well.
-              else {
-                def refersToPrivate(tp: Type): Boolean = tp match
-                  case tp: TermRef => tp.termSymbol.is(Private) || refersToPrivate(tp.prefix)
-                  case _ => false
-                val (maybeStable, mbrInfo) =
-                  if sym.isStableMember && sym.isPublic && !refersToPrivate(path.tpe) then
-                    (StableRealizable, ExprType(path.tpe.select(sym)))
-                  else
-                    (EmptyFlags, mbr.info.ensureMethodic)
-                var mbrFlags = Exported | Method | Final | maybeStable | sym.flags & RetainedExportFlags
-                if sym.is(ExtensionMethod) then mbrFlags |= ExtensionMethod
-                val forwarderName = checkNoConflict(alias, isPrivate = false, span)
-                newSymbol(cls, forwarderName, mbrFlags, mbrInfo, coord = span)
-              }
-            forwarder.info = avoidPrivateLeaks(forwarder)
-            forwarder.addAnnotations(sym.annotations)
-            val forwarderDef =
-              if (forwarder.isType) tpd.TypeDef(forwarder.asType)
-              else {
-                import tpd._
-                val ref = path.select(sym.asTerm)
-                val ddef = tpd.DefDef(forwarder.asTerm, prefss =>
-                  ref.appliedToArgss(adaptForwarderParams(Nil, sym.info, prefss))
-                )
-                if forwarder.isInlineMethod then
-                  PrepareInlineable.registerInlineInfo(forwarder, ddef.rhs)
-                ddef
-              }
-
-            buf += forwarderDef.withSpan(span)
-        end addForwarder
-
-        def addForwardersNamed(name: TermName, alias: TermName, span: Span): Unit = {
-          val size = buf.size
-          val mbrs = List(name, name.toTypeName).flatMap(path.tpe.member(_).alternatives)
-          mbrs.foreach(addForwarder(alias, _, span))
-          if (buf.size == size) {
-            val reason = mbrs.map(whyNoForwarder).dropWhile(_ == SKIP) match {
-              case Nil => ""
-              case why :: _ => i"\n$path.$name cannot be exported because it $why"
-            }
-            report.error(i"""no eligible member $name at $path$reason""", ctx.source.atSpan(span))
-          }
-        }
-
-        def addWildcardForwardersNamed(name: TermName, span: Span): Unit =
-          List(name, name.toTypeName)
-            .flatMap(path.tpe.memberBasedOnFlags(_, excluded = Private|Given|ConstructorProxy).alternatives)
-            .foreach(addForwarder(name, _, span)) // ignore if any are not added
-
-        def addWildcardForwarders(seen: List[TermName], span: Span): Unit =
-          val nonContextual = mutable.HashSet(seen: _*)
-          for mbr <- path.tpe.membersBasedOnFlags(required = EmptyFlags, excluded = PrivateOrSynthetic) do
-            if !mbr.symbol.isSuperAccessor then
+          val hasDefaults = sym.hasDefaultParams // compute here to ensure HasDefaultParams and NoDefaultParams flags are set
+          val forwarder =
+            if mbr.isType then
+              val forwarderName = checkNoConflict(alias.toTypeName, isPrivate = false, span)
+              var target = path.tpe.select(sym)
+              if target.typeParams.nonEmpty then
+                target = target.EtaExpand(target.typeParams)
+              newSymbol(
+                cls, forwarderName,
+                Exported | Final,
+                TypeAlias(target),
+                coord = span)
+              // Note: This will always create unparameterzied aliases. So even if the original type is
+              // a parameterized class, say `C[X]` the alias will read `type C = d.C`. We currently do
+              // allow such type aliases. If we forbid them at some point (requiring the referred type to be
+              // fully applied), we'd have to change the scheme here as well.
+            else
+              def refersToPrivate(tp: Type): Boolean = tp match
+                case tp: TermRef => tp.termSymbol.is(Private) || refersToPrivate(tp.prefix)
+                case _ => false
+              val (maybeStable, mbrInfo) =
+                if sym.isStableMember && sym.isPublic && !refersToPrivate(path.tpe) then
+                  (StableRealizable, ExprType(path.tpe.select(sym)))
+                else
+                  (EmptyFlags, mbr.info.ensureMethodic)
+              var flagMask = RetainedExportFlags
+              if sym.isTerm then flagMask |= HasDefaultParams | NoDefaultParams
+              var mbrFlags = Exported | Method | Final | maybeStable | sym.flags & flagMask
+              if sym.is(ExtensionMethod) then mbrFlags |= ExtensionMethod
+              val forwarderName = checkNoConflict(alias, isPrivate = false, span)
+              newSymbol(cls, forwarderName, mbrFlags, mbrInfo, coord = span)
+
+          forwarder.info = avoidPrivateLeaks(forwarder)
+          forwarder.addAnnotations(sym.annotations.filterConserve(_.symbol != defn.BodyAnnot))
+
+          if forwarder.isType then
+            buf += tpd.TypeDef(forwarder.asType).withSpan(span)
+          else
+            import tpd._
+            val ref = path.select(sym.asTerm)
+            val ddef = tpd.DefDef(forwarder.asTerm, prefss =>
+                ref.appliedToArgss(adaptForwarderParams(Nil, sym.info, prefss)))
+            if forwarder.isInlineMethod then
+              PrepareInlineable.registerInlineInfo(forwarder, ddef.rhs)
+            buf += ddef.withSpan(span)
+            if hasDefaults then
+              foreachDefaultGetterOf(sym.asTerm,
+                getter => addForwarder(getter.name.asTermName, getter, span))
+      end addForwarder
+
+      def addForwardersNamed(name: TermName, alias: TermName, span: Span): Unit =
+        val size = buf.size
+        val mbrs = List(name, name.toTypeName).flatMap(path.tpe.member(_).alternatives)
+        mbrs.foreach(addForwarder(alias, _, span))
+        if buf.size == size then
+          val reason = mbrs.map(canForward).collect {
+            case CanForward.No(whyNot) => i"\n$path.$name cannot be exported because it $whyNot"
+          }.headOption.getOrElse("")
+          report.error(i"""no eligible member $name at $path$reason""", ctx.source.atSpan(span))
+
+      def addWildcardForwardersNamed(name: TermName, span: Span): Unit =
+        List(name, name.toTypeName)
+          .flatMap(path.tpe.memberBasedOnFlags(_, excluded = Private|Given|ConstructorProxy).alternatives)
+          .foreach(addForwarder(name, _, span)) // ignore if any are not added
+
+      def addWildcardForwarders(seen: List[TermName], span: Span): Unit =
+        val nonContextual = mutable.HashSet(seen: _*)
+        val fromCaseClass = path.tpe.widen.classSymbols.exists(_.is(Case))
+        def isCaseClassSynthesized(mbr: Symbol) =
+          fromCaseClass && defn.caseClassSynthesized.contains(mbr)
+        for mbr <- path.tpe.membersBasedOnFlags(required = EmptyFlags, excluded = PrivateOrSynthetic) do
+          if !mbr.symbol.isSuperAccessor
               // Scala 2 superaccessors have neither Synthetic nor Artfact set, so we
               // need to filter them out here (by contrast, Scala 3 superaccessors are Artifacts)
-              val alias = mbr.name.toTermName
-              if mbr.symbol.is(Given) then
-                if !seen.contains(alias) && mbr.matchesImportBound(givenBound) then
-                  addForwarder(alias, mbr, span)
-              else if !nonContextual.contains(alias) && mbr.matchesImportBound(wildcardBound) then
-                nonContextual += alias
-                addWildcardForwardersNamed(alias, span)
-
-        def addForwarders(sels: List[untpd.ImportSelector], seen: List[TermName]): Unit = sels match
-          case sel :: sels1 =>
-            if sel.isWildcard then
-              addWildcardForwarders(seen, sel.span)
-            else
-              if sel.rename != nme.WILDCARD then
-                addForwardersNamed(sel.name, sel.rename, sel.span)
-              addForwarders(sels1, sel.name :: seen)
-          case _ =>
+              // Symbols from base traits of case classes that will get synthesized implementations
+              // at PostTyper are also excluded.
+            && !isCaseClassSynthesized(mbr.symbol)
+            && !mbr.symbol.name.is(DefaultGetterName)
+              // default getters are exported with the members they belong to
+          then
+            val alias = mbr.name.toTermName
+            if mbr.symbol.is(Given) then
+              if !seen.contains(alias) && mbr.matchesImportBound(givenBound) then
+                addForwarder(alias, mbr, span)
+            else if !nonContextual.contains(alias) && mbr.matchesImportBound(wildcardBound) then
+              nonContextual += alias
+              addWildcardForwardersNamed(alias, span)
+
+      def addForwarders(sels: List[untpd.ImportSelector], seen: List[TermName]): Unit = sels match
+        case sel :: sels1 =>
+          if sel.isWildcard then
+            addWildcardForwarders(seen, sel.span)
+          else
+            if sel.rename != nme.WILDCARD then
+              addForwardersNamed(sel.name, sel.rename, sel.span)
+            addForwarders(sels1, sel.name :: seen)
+        case _ =>
 
-        addForwarders(selectors, Nil)
-        val forwarders = buf.toList
-        exp.pushAttachment(ExportForwarders, forwarders)
-        forwarders
-      }
+      addForwarders(selectors, Nil)
+      val forwarders = buf.toList
+      exp.pushAttachment(ExportForwarders, forwarders)
+      forwarders
+    end exportForwarders
 
-      for case exp @ Export(_, _) <- rest do
-        for forwarder <- exportForwarders(exp) do
-          forwarder.symbol.entered
-    }
+    /** Add forwarders as required by the export statements in this class */
+    private def processExports(using Context): Unit =
+
+      def process(stats: List[Tree])(using Context): Unit = stats match
+        case (stat: Export) :: stats1 =>
+          for forwarder <- exportForwarders(stat) do
+            forwarder.symbol.entered
+          process(stats1)
+        case (stat: Import) :: stats1 =>
+          process(stats1)(using ctx.importContext(stat, symbolOfTree(stat)))
+        case stat :: stats1 =>
+          process(stats1)
+        case Nil =>
+
+      // Do a quick scan whether we need to process at all. This avoids creating
+      // import contexts for nothing.
+      if rest.exists(_.isInstanceOf[Export]) then
+        process(rest)
+    end processExports
 
     /** Ensure constructor is completed so that any parameter accessors
      *  which have type trees deriving from its parameters can be
@@ -1240,37 +1364,6 @@ class Namer { typer: Typer =>
         }
       }
 
-      /** Ensure that the first type in a list of parent types Ps points to a non-trait class.
-       *  If that's not already the case, add one. The added class type CT is determined as follows.
-       *  First, let C be the unique class such that
-       *  - there is a parent P_i such that P_i derives from C, and
-       *  - for every class D: If some parent P_j, j <= i derives from D, then C derives from D.
-       *  Then, let CT be the smallest type which
-       *  - has C as its class symbol, and
-       *  - for all parents P_i: If P_i derives from C then P_i <:< CT.
-       */
-      def ensureFirstIsClass(parents: List[Type]): List[Type] =
-
-        def realClassParent(sym: Symbol): ClassSymbol =
-          if !sym.isClass then defn.ObjectClass
-          else if !sym.is(Trait) then sym.asClass
-          else sym.info.parents match
-            case parentRef :: _ => realClassParent(parentRef.typeSymbol)
-            case nil => defn.ObjectClass
-
-        def improve(candidate: ClassSymbol, parent: Type): ClassSymbol =
-          val pcls = realClassParent(parent.classSymbol)
-          if (pcls derivesFrom candidate) pcls else candidate
-
-        parents match
-          case p :: _ if p.classSymbol.isRealClass => parents
-          case _ =>
-            val pcls = parents.foldLeft(defn.ObjectClass)(improve)
-            typr.println(i"ensure first is class $parents%, % --> ${parents map (_ baseType pcls)}%, %")
-            val first = TypeComparer.glb(defn.ObjectType :: parents.map(_.baseType(pcls)))
-            checkFeasibleParent(first, cls.srcPos, em" in inferred superclass $first") :: parents
-      end ensureFirstIsClass
-
       /** If `parents` contains references to traits that have supertraits with implicit parameters
        *  add those supertraits in linearization order unless they are already covered by other
        *  parent types. For instance, in
@@ -1313,7 +1406,7 @@ class Namer { typer: Typer =>
       val parentTypes = defn.adjustForTuple(cls, cls.typeParams,
         defn.adjustForBoxedUnit(cls,
           addUsingTraits(
-            ensureFirstIsClass(parents.map(checkedParentType(_)))
+            ensureFirstIsClass(cls, parents.map(checkedParentType(_)))
           )
         )
       )
@@ -1348,6 +1441,12 @@ class Namer { typer: Typer =>
     }
   }
 
+  /** Possible actions to perform when deciding on a forwarder for a member */
+  private enum CanForward:
+    case Yes
+    case No(whyNot: String)
+    case Skip  // for members that have never forwarders
+
   class SuspendCompleter extends LazyType, SymbolLoaders.SecondCompleter {
 
     final override def complete(denot: SymDenotation)(using Context): Unit =
@@ -1368,14 +1467,10 @@ class Namer { typer: Typer =>
   }
 
   def typedAheadType(tree: Tree, pt: Type = WildcardType)(using Context): tpd.Tree =
-    inMode(ctx.mode &~ Mode.PatternOrTypeBits | Mode.Type) {
-      typedAhead(tree, typer.typed(_, pt))
-    }
+    typedAhead(tree, typer.typedType(_, pt))
 
   def typedAheadExpr(tree: Tree, pt: Type = WildcardType)(using Context): tpd.Tree =
-    withoutMode(Mode.PatternOrTypeBits) {
-      typedAhead(tree, typer.typed(_, pt))
-    }
+    typedAhead(tree, typer.typedExpr(_, pt))
 
   def typedAheadAnnotation(tree: Tree)(using Context): tpd.Tree =
     typedAheadExpr(tree, defn.AnnotationClass.typeRef)
@@ -1414,166 +1509,7 @@ class Namer { typer: Typer =>
    */
   def valOrDefDefSig(mdef: ValOrDefDef, sym: Symbol, paramss: List[List[Symbol]], paramFn: Type => Type)(using Context): Type = {
 
-    def inferredType = {
-      /** A type for this definition that might be inherited from elsewhere:
-       *  If this is a setter parameter, the corresponding getter type.
-       *  If this is a class member, the conjunction of all result types
-       *  of overridden methods.
-       *  NoType if neither case holds.
-       */
-      val inherited =
-        if (sym.owner.isTerm) NoType
-        else
-          // TODO: Look only at member of supertype instead?
-          lazy val schema = paramFn(WildcardType)
-          val site = sym.owner.thisType
-          val bcs = sym.owner.info.baseClasses
-          if bcs.isEmpty then
-            assert(ctx.reporter.errorsReported)
-            NoType
-          else bcs.tail.foldLeft(NoType: Type) { (tp, cls) =>
-            def instantiatedResType(info: Type, paramss: List[List[Symbol]]): Type = info match
-              case info: PolyType =>
-                paramss match
-                  case TypeSymbols(tparams) :: paramss1 if info.paramNames.length == tparams.length =>
-                    instantiatedResType(info.instantiate(tparams.map(_.typeRef)), paramss1)
-                  case _ =>
-                    NoType
-              case info: MethodType =>
-                paramss match
-                  case TermSymbols(vparams) :: paramss1 if info.paramNames.length == vparams.length =>
-                    instantiatedResType(info.instantiate(vparams.map(_.termRef)), paramss1)
-                  case _ =>
-                    NoType
-              case _ =>
-                if paramss.isEmpty then info.widenExpr
-                else NoType
-
-            val iRawInfo =
-              cls.info.nonPrivateDecl(sym.name).matchingDenotation(site, schema, sym.targetName).info
-            val iResType = instantiatedResType(iRawInfo, paramss).asSeenFrom(site, cls)
-            if (iResType.exists)
-              typr.println(i"using inherited type for ${mdef.name}; raw: $iRawInfo, inherited: $iResType")
-            tp & iResType
-          }
-      end inherited
-
-      /** If this is a default getter, the type of the corresponding method parameter,
-       *  otherwise NoType.
-       */
-      def defaultParamType = sym.name match
-        case DefaultGetterName(original, idx) =>
-          val meth: Denotation =
-            if (original.isConstructorName && (sym.owner.is(ModuleClass)))
-              sym.owner.companionClass.info.decl(nme.CONSTRUCTOR)
-            else
-              ctx.defContext(sym).denotNamed(original)
-          def paramProto(paramss: List[List[Type]], idx: Int): Type = paramss match {
-            case params :: paramss1 =>
-              if (idx < params.length) params(idx)
-              else paramProto(paramss1, idx - params.length)
-            case nil =>
-              NoType
-          }
-          val defaultAlts = meth.altsWith(_.hasDefaultParams)
-          if (defaultAlts.length == 1)
-            paramProto(defaultAlts.head.info.widen.paramInfoss, idx)
-          else
-            NoType
-        case _ =>
-          NoType
-
-      /** The expected type for a default argument. This is normally the `defaultParamType`
-       *  with references to internal parameters replaced by wildcards. This replacement
-       *  makes it possible that the default argument can have a more specific type than the
-       *  parameter. For instance, we allow
-       *
-       *      class C[A](a: A) { def copy[B](x: B = a): C[B] = C(x) }
-       *
-       *  However, if the default parameter type is a context function type, we
-       *  have to make sure that wildcard types do not leak into the implicitly
-       *  generated closure's result type. Test case is pos/i12019.scala. If there
-       *  would be a leakage with the wildcard approximation, we pick the original
-       *  default parameter type as expected type.
-       */
-      def expectedDefaultArgType =
-        val originalTp = defaultParamType
-        val approxTp = wildApprox(originalTp)
-        approxTp.stripPoly match
-          case atp @ defn.ContextFunctionType(_, resType, _)
-          if !defn.isNonRefinedFunction(atp) // in this case `resType` is lying, gives us only the non-dependent upper bound
-              || resType.existsPart(_.isInstanceOf[WildcardType], stopAtStatic = true, forceLazy = false) =>
-            originalTp
-          case _ =>
-            approxTp
-
-      // println(s"final inherited for $sym: ${inherited.toString}") !!!
-      // println(s"owner = ${sym.owner}, decls = ${sym.owner.info.decls.show}")
-      // TODO Scala 3.1: only check for inline vals (no final ones)
-      def isInlineVal = sym.isOneOf(FinalOrInline, butNot = Method | Mutable)
-
-      var rhsCtx = ctx.fresh.addMode(Mode.InferringReturnType)
-      if sym.isInlineMethod then rhsCtx = rhsCtx.addMode(Mode.InlineableBody)
-      if sym.is(ExtensionMethod) then rhsCtx = rhsCtx.addMode(Mode.InExtensionMethod)
-      val typeParams = paramss.collect { case TypeSymbols(tparams) => tparams }.flatten
-      if (typeParams.nonEmpty) {
-        // we'll be typing an expression from a polymorphic definition's body,
-        // so we must allow constraining its type parameters
-        // compare with typedDefDef, see tests/pos/gadt-inference.scala
-        rhsCtx.setFreshGADTBounds
-        rhsCtx.gadt.addToConstraint(typeParams)
-      }
-
-      def typedAheadRhs(pt: Type) =
-        PrepareInlineable.dropInlineIfError(sym,
-          typedAheadExpr(mdef.rhs, pt)(using rhsCtx))
-
-      def rhsType =
-        // For default getters, we use the corresponding parameter type as an
-        // expected type but we run it through `wildApprox` to allow default
-        // parameters like in `def mkList[T](value: T = 1): List[T]`.
-        val defaultTp = defaultParamType
-        val pt = inherited.orElse(expectedDefaultArgType).orElse(WildcardType).widenExpr
-        val tp = typedAheadRhs(pt).tpe
-        if (defaultTp eq pt) && (tp frozen_<:< defaultTp) then
-          // When possible, widen to the default getter parameter type to permit a
-          // larger choice of overrides (see `default-getter.scala`).
-          // For justification on the use of `@uncheckedVariance`, see
-          // `default-getter-variance.scala`.
-          AnnotatedType(defaultTp, Annotation(defn.UncheckedVarianceAnnot))
-        else tp.widenTermRefExpr.simplified match
-          case ctp: ConstantType if isInlineVal => ctp
-          case tp =>
-            TypeComparer.widenInferred(tp, pt)
-
-      // Replace aliases to Unit by Unit itself. If we leave the alias in
-      // it would be erased to BoxedUnit.
-      def dealiasIfUnit(tp: Type) = if (tp.isRef(defn.UnitClass)) defn.UnitType else tp
-
-      // Approximate a type `tp` with a type that does not contain skolem types.
-      val deskolemize = new ApproximatingTypeMap {
-        def apply(tp: Type) = /*trace(i"deskolemize($tp) at $variance", show = true)*/
-          tp match {
-            case tp: SkolemType => range(defn.NothingType, atVariance(1)(apply(tp.info)))
-            case _ => mapOver(tp)
-          }
-      }
-
-      def cookedRhsType = deskolemize(dealiasIfUnit(rhsType))
-      def lhsType = fullyDefinedType(cookedRhsType, "right-hand side", mdef.span)
-      //if (sym.name.toString == "y") println(i"rhs = $rhsType, cooked = $cookedRhsType")
-      if (inherited.exists)
-        if (isInlineVal) lhsType else inherited
-      else {
-        if (sym.is(Implicit))
-          mdef match {
-            case _: DefDef => missingType(sym, "result ")
-            case _: ValDef if sym.owner.isType => missingType(sym, "")
-            case _ =>
-          }
-        lhsType orElse WildcardType
-      }
-    }
+    def inferredType = inferredResultType(mdef, sym, paramss, paramFn, WildcardType)
     lazy val termParamss = paramss.collect { case TermSymbols(vparams) => vparams }
 
     val tptProto = mdef.tpt match {
@@ -1591,22 +1527,7 @@ class Namer { typer: Typer =>
             // This case applies if the closure result type contains uninstantiated
             // type variables. In this case, constrain the closure result from below
             // by the parameter-capture-avoiding type of the body.
-            val rhsType = typedAheadExpr(mdef.rhs, tpt.tpe).tpe
-
-            // The following part is important since otherwise we might instantiate
-            // the closure result type with a plain functon type that refers
-            // to local parameters. An example where this happens in `dependent-closures.scala`
-            // If the code after `val rhsType` is commented out, this file fails pickling tests.
-            // AVOIDANCE TODO: Follow up why this happens, and whether there
-            // are better ways to achieve this. It would be good if we could get rid of this code.
-            // It seems at least partially redundant with the nesting level checking on TypeVar
-            // instantiation.
-            val hygienicType = TypeOps.avoid(rhsType, termParamss.flatten)
-            if (!hygienicType.isValueType || !(hygienicType <:< tpt.tpe))
-              report.error(i"return type ${tpt.tpe} of lambda cannot be made hygienic;\n" +
-                i"it is not a supertype of the hygienic type $hygienicType", mdef.srcPos)
-            //println(i"lifting $rhsType over $termParamss -> $hygienicType = ${tpt.tpe}")
-            //println(TypeComparer.explained { implicit ctx => hygienicType <:< tpt.tpe })
+            typedAheadExpr(mdef.rhs, tpt.tpe).tpe
           case _ =>
         }
         WildcardType
@@ -1620,7 +1541,7 @@ class Namer { typer: Typer =>
   }
 
   /** The type signature of a DefDef with given symbol */
-  def defDefSig(ddef: DefDef, sym: Symbol)(using Context): Type = {
+  def defDefSig(ddef: DefDef, sym: Symbol, completer: Namer#Completer)(using Context): Type = {
     // Beware: ddef.name need not match sym.name if sym was freshened!
     val isConstructor = sym.name == nme.CONSTRUCTOR
 
@@ -1649,20 +1570,182 @@ class Namer { typer: Typer =>
     //   5. Info of CP is copied to DP and DP is completed.
     index(ddef.leadingTypeParams)
     if (isConstructor) sym.owner.typeParams.foreach(_.ensureCompleted())
-    for (tparam <- ddef.leadingTypeParams) typedAheadExpr(tparam)
-
+    val completedTypeParams =
+      for tparam <- ddef.leadingTypeParams yield typedAheadExpr(tparam).symbol
+    if completedTypeParams.forall(_.isType) then
+      completer.setCompletedTypeParams(completedTypeParams.asInstanceOf[List[TypeSymbol]])
     ddef.trailingParamss.foreach(completeParams)
     val paramSymss = normalizeIfConstructor(ddef.paramss.nestedMap(symbolOfTree), isConstructor)
     sym.setParamss(paramSymss)
-    def wrapMethType(restpe: Type): Type = {
+    def wrapMethType(restpe: Type): Type =
       instantiateDependent(restpe, paramSymss)
-      methodType(paramSymss, restpe, isJava = ddef.mods.is(JavaDefined))
-    }
-    if (isConstructor) {
+      methodType(paramSymss, restpe, ddef.mods.is(JavaDefined))
+    if isConstructor then
       // set result type tree to unit, but take the current class as result type of the symbol
       typedAheadType(ddef.tpt, defn.UnitType)
-      wrapMethType(effectiveResultType(sym, paramSymss, NoType))
-    }
-    else valOrDefDefSig(ddef, sym, paramSymss, wrapMethType)
+      wrapMethType(effectiveResultType(sym, paramSymss))
+    else
+      valOrDefDefSig(ddef, sym, paramSymss, wrapMethType)
   }
+
+  def inferredResultType(
+      mdef: ValOrDefDef,
+      sym: Symbol,
+      paramss: List[List[Symbol]],
+      paramFn: Type => Type,
+      fallbackProto: Type
+    )(using Context): Type =
+
+    /** A type for this definition that might be inherited from elsewhere:
+     *  If this is a setter parameter, the corresponding getter type.
+     *  If this is a class member, the conjunction of all result types
+     *  of overridden methods.
+     *  NoType if neither case holds.
+     */
+    val inherited =
+      if (sym.owner.isTerm) NoType
+      else
+        // TODO: Look only at member of supertype instead?
+        lazy val schema = paramFn(WildcardType)
+        val site = sym.owner.thisType
+        val bcs = sym.owner.info.baseClasses
+        if bcs.isEmpty then
+          assert(ctx.reporter.errorsReported)
+          NoType
+        else bcs.tail.foldLeft(NoType: Type) { (tp, cls) =>
+          def instantiatedResType(info: Type, paramss: List[List[Symbol]]): Type = info match
+            case info: PolyType =>
+              paramss match
+                case TypeSymbols(tparams) :: paramss1 if info.paramNames.length == tparams.length =>
+                  instantiatedResType(info.instantiate(tparams.map(_.typeRef)), paramss1)
+                case _ =>
+                  NoType
+            case info: MethodType =>
+              paramss match
+                case TermSymbols(vparams) :: paramss1 if info.paramNames.length == vparams.length =>
+                  instantiatedResType(info.instantiate(vparams.map(_.termRef)), paramss1)
+                case _ =>
+                  NoType
+            case _ =>
+              if paramss.isEmpty then info.widenExpr
+              else NoType
+
+          val iRawInfo =
+            cls.info.nonPrivateDecl(sym.name).matchingDenotation(site, schema, sym.targetName).info
+          val iResType = instantiatedResType(iRawInfo, paramss).asSeenFrom(site, cls)
+          if (iResType.exists)
+            typr.println(i"using inherited type for ${mdef.name}; raw: $iRawInfo, inherited: $iResType")
+          tp & iResType
+        }
+    end inherited
+
+    /** If this is a default getter, the type of the corresponding method parameter,
+     *  otherwise NoType.
+     */
+    def defaultParamType = sym.name match
+      case DefaultGetterName(original, idx) =>
+        val meth: Denotation =
+          if (original.isConstructorName && (sym.owner.is(ModuleClass)))
+            sym.owner.companionClass.info.decl(nme.CONSTRUCTOR)
+          else
+            ctx.defContext(sym).denotNamed(original)
+        def paramProto(paramss: List[List[Type]], idx: Int): Type = paramss match {
+          case params :: paramss1 =>
+            if (idx < params.length) params(idx)
+            else paramProto(paramss1, idx - params.length)
+          case nil =>
+            NoType
+        }
+        val defaultAlts = meth.altsWith(_.hasDefaultParams)
+        if (defaultAlts.length == 1)
+          paramProto(defaultAlts.head.info.widen.paramInfoss, idx)
+        else
+          NoType
+      case _ =>
+        NoType
+
+    /** The expected type for a default argument. This is normally the `defaultParamType`
+     *  with references to internal parameters replaced by wildcards. This replacement
+     *  makes it possible that the default argument can have a more specific type than the
+     *  parameter. For instance, we allow
+     *
+     *      class C[A](a: A) { def copy[B](x: B = a): C[B] = C(x) }
+     *
+     *  However, if the default parameter type is a context function type, we
+     *  have to make sure that wildcard types do not leak into the implicitly
+     *  generated closure's result type. Test case is pos/i12019.scala. If there
+     *  would be a leakage with the wildcard approximation, we pick the original
+     *  default parameter type as expected type.
+     */
+    def expectedDefaultArgType =
+      val originalTp = defaultParamType
+      val approxTp = wildApprox(originalTp)
+      approxTp.stripPoly match
+        case atp @ defn.ContextFunctionType(_, resType, _)
+        if !defn.isNonRefinedFunction(atp) // in this case `resType` is lying, gives us only the non-dependent upper bound
+            || resType.existsPart(_.isInstanceOf[WildcardType], StopAt.Static, forceLazy = false) =>
+          originalTp
+        case _ =>
+          approxTp
+
+    // println(s"final inherited for $sym: ${inherited.toString}") !!!
+    // println(s"owner = ${sym.owner}, decls = ${sym.owner.info.decls.show}")
+    // TODO Scala 3.1: only check for inline vals (no final ones)
+    def isInlineVal = sym.isOneOf(FinalOrInline, butNot = Method | Mutable)
+
+    var rhsCtx = ctx.fresh.addMode(Mode.InferringReturnType)
+    if sym.isInlineMethod then rhsCtx = rhsCtx.addMode(Mode.InlineableBody)
+    if sym.is(ExtensionMethod) then rhsCtx = rhsCtx.addMode(Mode.InExtensionMethod)
+    val typeParams = paramss.collect { case TypeSymbols(tparams) => tparams }.flatten
+    if (typeParams.nonEmpty) {
+      // we'll be typing an expression from a polymorphic definition's body,
+      // so we must allow constraining its type parameters
+      // compare with typedDefDef, see tests/pos/gadt-inference.scala
+      rhsCtx.setFreshGADTBounds
+      rhsCtx.gadt.addToConstraint(typeParams)
+    }
+
+    def typedAheadRhs(pt: Type) =
+      PrepareInlineable.dropInlineIfError(sym,
+        typedAheadExpr(mdef.rhs, pt)(using rhsCtx))
+
+    def rhsType =
+      // For default getters, we use the corresponding parameter type as an
+      // expected type but we run it through `wildApprox` to allow default
+      // parameters like in `def mkList[T](value: T = 1): List[T]`.
+      val defaultTp = defaultParamType
+      val pt = inherited.orElse(expectedDefaultArgType).orElse(fallbackProto).widenExpr
+      val tp = typedAheadRhs(pt).tpe
+      if (defaultTp eq pt) && (tp frozen_<:< defaultTp) then
+        // When possible, widen to the default getter parameter type to permit a
+        // larger choice of overrides (see `default-getter.scala`).
+        // For justification on the use of `@uncheckedVariance`, see
+        // `default-getter-variance.scala`.
+        AnnotatedType(defaultTp, Annotation(defn.UncheckedVarianceAnnot))
+      else
+        // don't strip @uncheckedVariance annot for default getters
+        TypeOps.simplify(tp.widenTermRefExpr,
+            if defaultTp.exists then TypeOps.SimplifyKeepUnchecked() else null) match
+          case ctp: ConstantType if isInlineVal => ctp
+          case tp => TypeComparer.widenInferred(tp, pt)
+
+    // Replace aliases to Unit by Unit itself. If we leave the alias in
+    // it would be erased to BoxedUnit.
+    def dealiasIfUnit(tp: Type) = if (tp.isRef(defn.UnitClass)) defn.UnitType else tp
+
+    def cookedRhsType = dealiasIfUnit(rhsType).deskolemized
+    def lhsType = fullyDefinedType(cookedRhsType, "right-hand side", mdef.span)
+    //if (sym.name.toString == "y") println(i"rhs = $rhsType, cooked = $cookedRhsType")
+    if (inherited.exists)
+      if (isInlineVal) lhsType else inherited
+    else {
+      if (sym.is(Implicit))
+        mdef match {
+          case _: DefDef => missingType(sym, "result ")
+          case _: ValDef if sym.owner.isType => missingType(sym, "")
+          case _ =>
+        }
+      lhsType orElse WildcardType
+    }
+  end inferredResultType
 }
diff --git a/compiler/src/dotty/tools/dotc/typer/PrepareInlineable.scala b/compiler/src/dotty/tools/dotc/typer/PrepareInlineable.scala
index 5b52fa883403..8fea810921e4 100644
--- a/compiler/src/dotty/tools/dotc/typer/PrepareInlineable.scala
+++ b/compiler/src/dotty/tools/dotc/typer/PrepareInlineable.scala
@@ -26,6 +26,19 @@ import dotty.tools.dotc.transform.TreeMapWithStages._
 object PrepareInlineable {
   import tpd._
 
+  private val InlineAccessorsKey = new Property.Key[InlineAccessors]
+
+  def initContext(ctx: Context): Context =
+    ctx.fresh.setProperty(InlineAccessorsKey, new InlineAccessors)
+
+  def makeInlineable(tree: Tree)(using Context): Tree =
+    ctx.property(InlineAccessorsKey).get.makeInlineable(tree)
+
+  def addAccessorDefs(cls: Symbol, body: List[Tree])(using Context): List[Tree] =
+    ctx.property(InlineAccessorsKey) match
+      case Some(inlineAccessors) => inlineAccessors.addAccessorDefs(cls, body)
+      case _ => body
+
   class InlineAccessors extends AccessProxies {
 
     /** If an inline accessor name wraps a unique inline name, this is taken as indication
@@ -80,7 +93,7 @@ object PrepareInlineable {
       def preTransform(tree: Tree)(using Context): Tree = tree match {
         case tree: RefTree if needsAccessor(tree.symbol) =>
           if (tree.symbol.isConstructor) {
-            report.error("Implementation restriction: cannot use private constructors in inlineinline methods", tree.srcPos)
+            report.error("Implementation restriction: cannot use private constructors in inline methods", tree.srcPos)
             tree // TODO: create a proper accessor for the private constructor
           }
           else useAccessor(tree)
@@ -251,7 +264,7 @@ object PrepareInlineable {
             if inlined.isInlineMethod then
               inlinedBody = dropInlineIfError(inlined,
                 checkInlineMethod(inlined,
-                  ctx.compilationUnit.inlineAccessors.makeInlineable(inlinedBody)))
+                  PrepareInlineable.makeInlineable(inlinedBody)))
             inlining.println(i"Body to inline for $inlined: $inlinedBody")
             inlinedBody
           })
@@ -259,8 +272,6 @@ object PrepareInlineable {
     }
 
   private def checkInlineMethod(inlined: Symbol, body: Tree)(using Context): body.type = {
-    if (inlined.owner.isClass && inlined.owner.seesOpaques)
-      report.error(em"Implementation restriction: No inline methods allowed where opaque type aliases are in scope", inlined.srcPos)
     if Inliner.inInlineMethod(using ctx.outer) then
       report.error(ex"Implementation restriction: nested inline methods are not supported", inlined.srcPos)
 
diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala
index 0d2c60c677b0..fcbcf179d97e 100644
--- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala
+++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala
@@ -8,7 +8,7 @@ import Contexts._, Types._, Denotations._, Names._, StdNames._, NameOps._, Symbo
 import NameKinds.DepParamName
 import Trees._
 import Constants._
-import util.{Stats, SimpleIdentityMap}
+import util.{Stats, SimpleIdentityMap, SimpleIdentitySet}
 import Decorators._
 import Uniques._
 import config.Printers.typr
@@ -64,19 +64,8 @@ object ProtoTypes {
                 i"""normalizedCompatible for $poly, $pt = $result
                    |constraint was: ${ctx.typerState.constraint}
                    |constraint now: ${newctx.typerState.constraint}""")
-            if result
-                && (ctx.typerState.constraint ne newctx.typerState.constraint)
-                && {
-                  val existingVars = ctx.typerState.uninstVars.toSet
-                  newctx.typerState.uninstVars.forall(existingVars.contains)
-                }
-            then newctx.typerState.commit()
-              // If the new constrait contains fresh type variables we cannot keep it,
-              // since those type variables are not instantiated anywhere in the source.
-              // See pos/i6682a.scala for a test case. See pos/11243.scala and pos/i5773b.scala
-              // for tests where it matters that we keep the constraint otherwise.
-              // TODO: A better solution would clean the new constraint, so that it "avoids"
-              // the problematic type variables. But we have not implemented such an algorithm yet.
+            if result && (ctx.typerState.constraint ne newctx.typerState.constraint) then
+              newctx.typerState.commit()
             result
           case _ => testCompat
       else explore(testCompat)
@@ -293,6 +282,9 @@ object ProtoTypes {
     /** A map in which typed arguments can be stored to be later integrated in `typedArgs`. */
     var typedArg: SimpleIdentityMap[untpd.Tree, Tree] = SimpleIdentityMap.empty
 
+    /** The argument that produced errors during typing */
+    var errorArgs: SimpleIdentitySet[untpd.Tree] = SimpleIdentitySet.empty
+
     /** The tupled or untupled version of this prototype, if it has been computed */
     var tupledDual: Type = NoType
 
@@ -352,6 +344,30 @@ object ProtoTypes {
       case _ => false
     }
 
+    /** Did an argument produce an error when typing? This means: an error was reported
+     *  and a tree got an error type. Errors of adaptation whree a tree has a good type
+     *  but that type does not conform to the expected type are not counted.
+     */
+    def hasErrorArg = !state.errorArgs.isEmpty
+
+    /** Does tree have embedded error trees that are not at the outside.
+     *  A nested tree t1 is "at the outside" relative to a tree t2 if
+     *    - t1 and t2 have the same span, or
+     *    - t2 is a ascription (t22: T) and t1 is at the outside of t22
+     *    - t2 is a closure (...) => t22 and t1 is at the outside of t22
+     */
+    def hasInnerErrors(t: Tree): Boolean = t match
+      case Typed(expr, tpe) => hasInnerErrors(expr)
+      case closureDef(mdef) => hasInnerErrors(mdef.rhs)
+      case _ =>
+        t.existsSubTree { t1 =>
+          if t1.typeOpt.isError && t1.span.toSynthetic != t.span.toSynthetic then
+            typr.println(i"error subtree $t1 of $t with ${t1.typeOpt}, spans = ${t1.span}, ${t.span}")
+            true
+          else
+            false
+        }
+
     private def cacheTypedArg(arg: untpd.Tree, typerFn: untpd.Tree => Tree, force: Boolean)(using Context): Tree = {
       var targ = state.typedArg(arg)
       if (targ == null)
@@ -368,8 +384,12 @@ object ProtoTypes {
             targ = arg.withType(WildcardType)
           case _ =>
             targ = typerFn(arg)
-            if (!ctx.reporter.hasUnreportedErrors)
+            if ctx.reporter.hasUnreportedErrors then
+              if hasInnerErrors(targ) then
+                state.errorArgs += arg
+            else
               state.typedArg = state.typedArg.updated(arg, targ)
+              state.errorArgs -= arg
         }
       targ
     }
@@ -385,21 +405,49 @@ object ProtoTypes {
      *                before it is typed. The second Int parameter is the parameter index.
      */
     def typedArgs(norm: (untpd.Tree, Int) => untpd.Tree = sameTree)(using Context): List[Tree] =
-      if (state.typedArgs.size == args.length) state.typedArgs
-      else {
-        val prevConstraint = protoCtx.typerState.constraint
-
-        try
-          inContext(protoCtx) {
-            val args1 = args.mapWithIndexConserve((arg, idx) =>
-              cacheTypedArg(arg, arg => typer.typed(norm(arg, idx)), force = false))
-            if !args1.exists(arg => isUndefined(arg.tpe)) then state.typedArgs = args1
-            args1
-          }
-        finally
-          if (protoCtx.typerState.constraint ne prevConstraint)
-            ctx.typerState.mergeConstraintWith(protoCtx.typerState)
-      }
+      if state.typedArgs.size == args.length then state.typedArgs
+      else
+        val passedCtx = ctx
+        val passedTyperState = ctx.typerState
+        inContext(protoCtx.withUncommittedTyperState) {
+          val protoTyperState = ctx.typerState
+          val oldConstraint = protoTyperState.constraint
+          val args1 = args.mapWithIndexConserve((arg, idx) =>
+            cacheTypedArg(arg, arg => typer.typed(norm(arg, idx)), force = false))
+          val newConstraint = protoTyperState.constraint
+
+          if !args1.exists(arg => isUndefined(arg.tpe)) then state.typedArgs = args1
+
+          // We only need to propagate constraints if we typed the arguments in a different
+          // TyperState and if that created additional constraints.
+          if (passedTyperState ne protoTyperState) && (oldConstraint ne newConstraint) then
+            // To respect the pre-condition of `mergeConstraintWith` and keep
+            // `protoTyperState` committable we must ensure that it does not
+            // contain any type variable which don't already exist in the passed
+            // TyperState. This is achieved by instantiating any such type
+            // variable. NOTE: this does not suffice to discard type variables
+            // in ancestors of `protoTyperState`, if this situation ever
+            // comes up, an assertion in TyperState will trigger and this code
+            // will need to be generalized.
+            if protoTyperState.isCommittable then
+              val passedConstraint = passedTyperState.constraint
+              val newLambdas = newConstraint.domainLambdas.filter(tl =>
+                !passedConstraint.contains(tl) || passedConstraint.hasConflictingTypeVarsFor(tl, newConstraint))
+              val newTvars = newLambdas.flatMap(_.paramRefs).map(newConstraint.typeVarOfParam)
+
+              args1.foreach(arg => Inferencing.instantiateSelected(arg.tpe, newTvars))
+
+              // `instantiateSelected` can leave some type variables uninstantiated,
+              // so we maximize them in a second pass.
+              newTvars.foreach {
+                case tvar: TypeVar if !tvar.isInstantiated =>
+                  tvar.instantiate(fromBelow = false)
+                case _ =>
+              }
+            passedTyperState.mergeConstraintWith(protoTyperState)(using passedCtx)
+          end if
+          args1
+        }
 
     /** Type single argument and remember the unadapted result in `myTypedArg`.
      *  used to avoid repeated typings of trees when backtracking.
@@ -424,6 +472,10 @@ object ProtoTypes {
       if (t == null) NoType else t.tpe
     }
 
+    /** Cache the typed argument */
+    def cacheArg(arg: untpd.Tree, targ: Tree) =
+      state.typedArg = state.typedArg.updated(arg, targ)
+
     /** The same proto-type but with all arguments combined in a single tuple */
     def tupledDual: FunProto = state.tupledDual match {
       case pt: FunProto =>
@@ -561,7 +613,7 @@ object ProtoTypes {
     override def isMatchedBy(tp: Type, keepConstraint: Boolean)(using Context): Boolean =
       canInstantiate(tp) || tp.member(nme.apply).hasAltWith(d => canInstantiate(d.info))
 
-    def derivedPolyProto(targs: List[Tree], resultType: Type): PolyProto =
+    def derivedPolyProto(targs: List[Tree], resType: Type): PolyProto =
       if ((targs eq this.targs) && (resType eq this.resType)) this
       else PolyProto(targs, resType)
 
@@ -607,7 +659,11 @@ object ProtoTypes {
    *  for each parameter.
    *  @return  The added type lambda, and the list of created type variables.
    */
-  def constrained(tl: TypeLambda, owningTree: untpd.Tree, alwaysAddTypeVars: Boolean)(using Context): (TypeLambda, List[TypeTree]) = {
+  def constrained(using Context)(
+    tl: TypeLambda, owningTree: untpd.Tree,
+    alwaysAddTypeVars: Boolean,
+    nestingLevel: Int = ctx.nestingLevel
+  ): (TypeLambda, List[TypeTree]) = {
     val state = ctx.typerState
     val addTypeVars = alwaysAddTypeVars || !owningTree.isEmpty
     if (tl.isInstanceOf[PolyType])
@@ -618,8 +674,8 @@ object ProtoTypes {
     def newTypeVars(tl: TypeLambda): List[TypeTree] =
       for (paramRef <- tl.paramRefs)
       yield {
-        val tt = TypeVarBinder().withSpan(owningTree.span)
-        val tvar = TypeVar(paramRef, state)
+        val tt = InferredTypeTree().withSpan(owningTree.span)
+        val tvar = TypeVar(paramRef, state, nestingLevel)
         state.ownedVars += tvar
         tt.withType(tvar)
       }
@@ -638,34 +694,66 @@ object ProtoTypes {
   def constrained(tl: TypeLambda)(using Context): TypeLambda =
     constrained(tl, EmptyTree)._1
 
-  def newTypeVar(bounds: TypeBounds)(using Context): TypeVar = {
-    val poly = PolyType(DepParamName.fresh().toTypeName :: Nil)(
+  /** Instantiate `tl` with fresh type variables added to the constraint. */
+  def instantiateWithTypeVars(tl: TypeLambda)(using Context): Type =
+    val targs = constrained(tl, ast.tpd.EmptyTree, alwaysAddTypeVars = true)._2
+    tl.instantiate(targs.tpes)
+
+  /** A fresh type variable added to the current constraint.
+   *  @param  bounds        The initial bounds of the variable
+   *  @param  name          The name of the variable, defaults a fresh `DepParamName`
+   *  @param  nestingLevel  See `TypeVar#nestingLevel`
+   *  @param  represents    If it exists, a ParamRef that this TypeVar represents,
+   *                        to be retrieved using `representedParamRef`.
+   *                        in the substitution generated by `resultTypeApprox`
+   *  If `represents` exists, it is stored in the result type of the PolyType
+   *  that backs the TypeVar, to be retrieved by `representedParamRef`.
+   */
+  def newTypeVar(using Context)(
+      bounds: TypeBounds, name: TypeName = DepParamName.fresh().toTypeName,
+      nestingLevel: Int = ctx.nestingLevel, represents: Type = NoType): TypeVar =
+    val poly = PolyType(name :: Nil)(
         pt => bounds :: Nil,
-        pt => defn.AnyType)
-    constrained(poly, untpd.EmptyTree, alwaysAddTypeVars = true)
+        pt => represents.orElse(defn.AnyType))
+    constrained(poly, untpd.EmptyTree, alwaysAddTypeVars = true, nestingLevel)
       ._2.head.tpe.asInstanceOf[TypeVar]
-  }
 
-  /** Create a new TypeVar that represents a dependent method parameter singleton */
-  def newDepTypeVar(tp: Type)(using Context): TypeVar =
-    newTypeVar(TypeBounds.upper(AndType(tp.widenExpr, defn.SingletonClass.typeRef)))
+  /** If `param` was created using `newTypeVar(..., represents = X)`, returns X.
+   *  This is used in:
+   *  - `Inferencing#constrainIfDependentParamRef` to retrieve the dependent function
+   *    parameter for which the variable was substituted.
+   *  - `ConstraintHandling#LevelAvoidMap#legalVar` to retrieve the type variable that was
+   *    avoided in a previous call to `legalVar`.
+   */
+  def representedParamRef(param: TypeParamRef)(using Context): Type =
+    param.binder.resultType match
+      case ref: ParamRef => ref
+      case _ => NoType
+
+  /** Create a new TypeVar that represents a dependent method parameter singleton `ref` */
+  def newDepTypeVar(ref: TermParamRef)(using Context): TypeVar =
+    newTypeVar(
+      TypeBounds.upper(AndType(ref.underlying.widenExpr, defn.SingletonClass.typeRef)),
+      represents = ref)
 
   /** The result type of `mt`, where all references to parameters of `mt` are
    *  replaced by either wildcards or TypeParamRefs.
    */
   def resultTypeApprox(mt: MethodType, wildcardOnly: Boolean = false)(using Context): Type =
     if mt.isResultDependent then
-      def replacement(tp: Type) =
+      def replacement(ref: TermParamRef) =
         if wildcardOnly
            || ctx.mode.is(Mode.TypevarsMissContext)
-           || !tp.widenExpr.isValueTypeOrWildcard
-        then WildcardType
-        else newDepTypeVar(tp)
-      mt.resultType.substParams(mt, mt.paramInfos.map(replacement))
+           || !ref.underlying.widenExpr.isValueTypeOrWildcard
+        then
+          WildcardType(ref.underlying.substParams(mt, mt.paramRefs.map(_ => WildcardType)).toBounds)
+        else
+          newDepTypeVar(ref)
+      mt.resultType.substParams(mt, mt.paramRefs.map(replacement))
     else mt.resultType
 
   /** The normalized form of a type
-   *   - unwraps polymorphic types, tracking their parameters in the current constraint
+   *   - instantiate polymorphic types with fresh type variables in the current constraint
    *   - skips implicit parameters of methods and functions;
    *     if result type depends on implicit parameter, replace with wildcard.
    *   - converts non-dependent method types to the corresponding function types
@@ -684,7 +772,7 @@ object ProtoTypes {
     Stats.record("normalize")
     tp.widenSingleton match {
       case poly: PolyType =>
-        normalize(constrained(poly).resultType, pt)
+        normalize(instantiateWithTypeVars(poly), pt)
       case mt: MethodType =>
         if (mt.isImplicitMethod) normalize(resultTypeApprox(mt, wildcardOnly = true), pt)
         else if (mt.isResultDependent) tp
@@ -783,6 +871,17 @@ object ProtoTypes {
       tp.derivedViewProto(
           wildApprox(tp.argType, theMap, seen, internal),
           wildApprox(tp.resultType, theMap, seen, internal))
+    case tp: FunProto =>
+      val args = tp.args.mapconserve(arg =>
+        val argTp = tp.typeOfArg(arg) match
+          case NoType => WildcardType
+          case tp => wildApprox(tp, theMap, seen, internal)
+        arg.withType(argTp))
+      val resTp = wildApprox(tp.resultType, theMap, seen, internal)
+      if (args eq tp.args) && (resTp eq tp.resultType) then
+        tp
+      else
+        FunProtoTyped(args, resTp)(ctx.typer, tp.applyKind)
     case tp: IgnoredProto =>
       WildcardType
     case  _: ThisType | _: BoundType => // default case, inlined for speed
diff --git a/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala b/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala
index 18c08bb8a3ad..86cd7e5f24f3 100644
--- a/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala
+++ b/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala
@@ -56,19 +56,24 @@ trait QuotesAndSplices {
     else if !qctx.tpe.isStable then
       report.error(em"Quotes require stable Quotes, but found non stable $qctx", qctx.srcPos)
 
-    val tree1 =
-      if ctx.mode.is(Mode.Pattern) then
-        typedQuotePattern(tree, pt, qctx)
-      else if tree.quoted.isType then
-        val msg = em"Consider using canonical type constructor scala.quoted.Type.of[${tree.quoted}] instead"
-        if sourceVersion.isAtLeast(`future-migration`) then report.error(msg, tree.srcPos)
-        else report.warning(msg, tree.srcPos)
-        typedTypeApply(untpd.TypeApply(untpd.ref(defn.QuotedTypeModule_of.termRef), tree.quoted :: Nil), pt)(using quoteContext).select(nme.apply).appliedTo(qctx)
-      else
-        typedApply(untpd.Apply(untpd.ref(defn.QuotedRuntime_exprQuote.termRef), tree.quoted), pt)(using pushQuotes(qctx)).select(nme.apply).appliedTo(qctx)
-    tree1.withSpan(tree.span)
+    if ctx.mode.is(Mode.Pattern) then
+      typedQuotePattern(tree, pt, qctx).withSpan(tree.span)
+    else if tree.quoted.isType then
+      val msg = em"Consider using canonical type constructor scala.quoted.Type.of[${tree.quoted}] instead"
+      if sourceVersion.isAtLeast(`future-migration`) then report.error(msg, tree.srcPos)
+      else report.warning(msg, tree.srcPos)
+      val typeOfTree = untpd.TypeApply(untpd.ref(defn.QuotedTypeModule_of.termRef), tree.quoted :: Nil).withSpan(tree.span)
+      makeInlineable(typedTypeApply(typeOfTree, pt)(using quoteContext).select(nme.apply).appliedTo(qctx).withSpan(tree.span))
+    else
+      val exprQuoteTree = untpd.Apply(untpd.ref(defn.QuotedRuntime_exprQuote.termRef), tree.quoted)
+      makeInlineable(typedApply(exprQuoteTree, pt)(using pushQuotes(qctx)).select(nme.apply).appliedTo(qctx).withSpan(tree.span))
   }
 
+  private def makeInlineable(tree: Tree)(using Context): Tree =
+    inContext(ctx.withOwner(ctx.owner.skipLocalOwners)) {
+      PrepareInlineable.makeInlineable(tree)
+    }
+
   /** Translate `${ t: Expr[T] }` into expression `t.splice` while tracking the quotation level in the context */
   def typedSplice(tree: untpd.Splice, pt: Type)(using Context): Tree = {
     record("typedSplice")
@@ -159,7 +164,7 @@ trait QuotesAndSplices {
 
     if ctx.mode.is(Mode.QuotedPattern) && level == 1 then
       report.error(
-            """`$` for quote pattern varable is not supported anymore.
+            """`$` for quote pattern variable is not supported anymore.
                |Use lower cased variable name without the `$` instead.""".stripMargin,
             tree.srcPos)
       ref(defn.NothingType)
@@ -323,7 +328,7 @@ trait QuotesAndSplices {
           tdef.symbol.addAnnotation(Annotation(New(ref(defn.QuotedRuntimePatterns_fromAboveAnnot.typeRef)).withSpan(tdef.span)))
         val bindingType = getBinding(tdef.symbol).symbol.typeRef
         val bindingTypeTpe = AppliedType(defn.QuotedTypeClass.typeRef, bindingType :: Nil)
-        val sym = newPatternBoundSymbol(nameOfSyntheticGiven, bindingTypeTpe, tdef.span, flags = ImplicitTerm)(using ctx0)
+        val sym = newPatternBoundSymbol(nameOfSyntheticGiven, bindingTypeTpe, tdef.span, flags = ImplicitVal)(using ctx0)
         buff += Bind(sym, untpd.Ident(nme.WILDCARD).withType(bindingTypeTpe)).withSpan(tdef.span)
         super.transform(tdef)
       }
diff --git a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala
index a744ca39f41f..85be8c32227a 100644
--- a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala
+++ b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala
@@ -22,7 +22,7 @@ import Nullables._
  *
  *  Otherwise, everything is as in Typer.
  */
-class ReTyper extends Typer with ReChecking {
+class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking {
   import tpd._
 
   private def assertTyped(tree: untpd.Tree)(using Context): Unit =
@@ -114,6 +114,9 @@ class ReTyper extends Typer with ReChecking {
       super.handleUnexpectedFunType(tree, fun)
   }
 
+  override def addCanThrowCapabilities(expr: untpd.Tree, cases: List[CaseDef])(using Context): untpd.Tree =
+    expr
+
   override def typedUnadapted(tree: untpd.Tree, pt: Type, locked: TypeVars)(using Context): Tree =
     try super.typedUnadapted(tree, pt, locked)
     catch {
@@ -134,4 +137,5 @@ class ReTyper extends Typer with ReChecking {
   override protected def addAccessorDefs(cls: Symbol, body: List[Tree])(using Context): List[Tree] = body
   override protected def checkEqualityEvidence(tree: tpd.Tree, pt: Type)(using Context): Unit = ()
   override protected def matchingApply(methType: MethodOrPoly, pt: FunProto)(using Context): Boolean = true
+  override protected def typedScala2MacroBody(call: untpd.Tree)(using Context): Tree = promote(call)
 }
diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala
index 7d9e25c33b2d..834cfba2a8a0 100644
--- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala
+++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala
@@ -4,19 +4,18 @@ package typer
 
 import transform._
 import core._
-import Symbols._, Types._, Contexts._, Flags._, Names._, NameOps._
+import Symbols._, Types._, Contexts._, Flags._, Names._, NameOps._, NameKinds._
 import StdNames._, Denotations._, SymUtils._, Phases._, SymDenotations._
 import NameKinds.DefaultGetterName
 import Annotations._
 import util.Spans._
-import util.{Store, SrcPos}
+import util.SrcPos
 import scala.collection.{ mutable, immutable }
 import ast._
 import MegaPhase._
 import config.Printers.{checks, noPrinter}
-import scala.util.{Try, Failure, Success}
-import config.{ScalaVersion, NoScalaVersion}
 import Decorators._
+import OverridingPairs.isOverridingPair
 import typer.ErrorReporting._
 import config.Feature.{warnOnMigration, migrateTo3}
 import config.Printers.refcheck
@@ -29,6 +28,7 @@ object RefChecks {
   import tpd._
 
   val name: String = "refchecks"
+  val description: String = "checks related to abstract members and overriding"
 
   private val defaultMethodFilter = new NameFilter {
     def apply(pre: Type, name: Name)(using Context): Boolean = name.is(DefaultGetterName)
@@ -96,7 +96,7 @@ object RefChecks {
 
   /** Check that self type of this class conforms to self types of parents
    *  and required classes. Also check that only `enum` constructs extend
-   *  `java.lang.Enum`.
+   *  `java.lang.Enum` and no user-written class extends ContextFunctionN.
    */
   private def checkParents(cls: Symbol, parentTrees: List[Tree])(using Context): Unit = cls.info match {
     case cinfo: ClassInfo =>
@@ -132,6 +132,8 @@ object RefChecks {
             case _ =>
               false
           }
+      if psyms.exists(defn.isContextFunctionClass) then
+        report.error(CannotExtendContextFunction(cls), cls.sourcePos)
 
       /** Check that arguments passed to trait parameters conform to the parameter types
        *  in the current class. This is necessary since parameter types might be narrowed
@@ -188,6 +190,54 @@ object RefChecks {
 
   // Override checking ------------------------------------------------------------
 
+  /** A class for checking all overriding pairs of `class` with a given check function */
+  class OverridingPairsChecker(clazz: ClassSymbol, self: Type)(using Context) extends OverridingPairs.Cursor(clazz):
+
+    override def matches(sym1: Symbol, sym2: Symbol): Boolean =
+      isOverridingPair(sym1, sym2, self)
+
+    private def inLinearizationOrder(sym1: Symbol, sym2: Symbol, parent: Symbol): Boolean =
+      val owner1 = sym1.owner
+      val owner2 = sym2.owner
+      def precedesIn(bcs: List[ClassSymbol]): Boolean = (bcs: @unchecked) match
+        case bc :: bcs1 =>
+          if owner1 eq bc then true
+          else if owner2 eq bc then false
+          else precedesIn(bcs1)
+        case _ =>
+          false
+      precedesIn(parent.asClass.baseClasses)
+
+    // We can exclude pairs safely from checking only under two additional conditions
+    //   - their signatures also match in the parent class.
+    //     See neg/i12828.scala for an example where this matters.
+    //   - They overriding/overridden appear in linearization order.
+    //     See neg/i5094.scala for an example where this matters.
+    override def canBeHandledByParent(sym1: Symbol, sym2: Symbol, parent: Symbol): Boolean =
+      isOverridingPair(sym1, sym2, parent.thisType)
+        .showing(i"already handled ${sym1.showLocated}: ${sym1.asSeenFrom(parent.thisType).signature}, ${sym2.showLocated}: ${sym2.asSeenFrom(parent.thisType).signature} = $result", refcheck)
+      && inLinearizationOrder(sym1, sym2, parent)
+
+    def checkAll(checkOverride: (Symbol, Symbol) => Unit) =
+      while hasNext do
+        checkOverride(overriding, overridden)
+        next()
+
+      // The OverridingPairs cursor does assume that concrete overrides abstract
+      // We have to check separately for an abstract definition in a subclass that
+      // overrides a concrete definition in a superclass. E.g. the following (inspired
+      // from neg/i11130.scala) needs to be rejected as well:
+      //
+      //   class A { type T = B }
+      //   class B extends A { override type T }
+      for dcl <- clazz.info.decls.iterator do
+        if dcl.is(Deferred) then
+          for other <- dcl.allOverriddenSymbols do
+            if !other.is(Deferred) then
+              checkOverride(dcl, other)
+    end checkAll
+  end OverridingPairsChecker
+
   /** 1. Check all members of class `clazz` for overriding conditions.
    *  That is for overriding member M and overridden member O:
    *
@@ -210,6 +260,7 @@ object RefChecks {
    *    1.9. If M is erased, O is erased. If O is erased, M is erased or inline.
    *    1.10.  If O is inline (and deferred, otherwise O would be final), M must be inline
    *    1.11.  If O is a Scala-2 macro, M must be a Scala-2 macro.
+   *    1.12.  If O is non-experimental, M must be non-experimental.
    *  2. Check that only abstract classes have deferred members
    *  3. Check that concrete classes do not have deferred definitions
    *     that are not implemented in a subclass.
@@ -260,31 +311,6 @@ object RefChecks {
       i"${if (showLocation) sym1.showLocated else sym1}$infoStr"
     }
 
-    def compatibleTypes(member: Symbol, memberTp: Type, other: Symbol, otherTp: Type, fallBack: => Boolean = false): Boolean =
-      try
-        if (member.isType) // intersection of bounds to refined types must be nonempty
-          memberTp.bounds.hi.hasSameKindAs(otherTp.bounds.hi) &&
-          ((memberTp frozen_<:< otherTp) ||
-            !member.owner.derivesFrom(other.owner) && {
-              // if member and other come from independent classes or traits, their
-              // bounds must have non-empty-intersection
-              val jointBounds = (memberTp.bounds & otherTp.bounds).bounds
-              jointBounds.lo frozen_<:< jointBounds.hi
-            })
-        else
-          // releaxed override check for explicit nulls if one of the symbols is Java defined,
-          // force `Null` being a subtype of reference types during override checking
-          val relaxedCtxForNulls =
-            if ctx.explicitNulls && (member.is(JavaDefined) || other.is(JavaDefined)) then
-              ctx.retractMode(Mode.SafeNulls)
-            else ctx
-          member.name.is(DefaultGetterName) // default getters are not checked for compatibility
-          || memberTp.overrides(otherTp, member.matchNullaryLoosely || other.matchNullaryLoosely || fallBack)(using relaxedCtxForNulls)
-      catch case ex: MissingType =>
-        // can happen when called with upwardsSelf as qualifier of memberTp and otherTp,
-        // because in that case we might access types that are not members of the qualifier.
-        false
-
     /* Check that all conditions for overriding `other` by `member`
        * of class `clazz` are met.
        */
@@ -314,10 +340,23 @@ object RefChecks {
       }
 
       def compatTypes(memberTp: Type, otherTp: Type): Boolean =
-        compatibleTypes(member, memberTp, other, otherTp,
-          fallBack = warnOnMigration(
-            overrideErrorMsg("no longer has compatible type"),
-            (if (member.owner == clazz) member else clazz).srcPos))
+        try
+          isOverridingPair(member, memberTp, other, otherTp,
+            fallBack = warnOnMigration(
+              overrideErrorMsg("no longer has compatible type"),
+              (if (member.owner == clazz) member else clazz).srcPos))
+        catch case ex: MissingType =>
+          // can happen when called with upwardsSelf as qualifier of memberTp and otherTp,
+          // because in that case we might access types that are not members of the qualifier.
+          false
+
+      /** Do types of term members `member` and `other` as seen from `self` match?
+       *  If not we treat them as not a real override and don't issue override
+       *  error messages. Also, bridges are not generated in this case.
+       *  Type members are always assumed to match.
+       */
+      def trueMatch: Boolean =
+        member.isType || memberTp(self).matches(otherTp(self))
 
       def emitOverrideError(fullmsg: Message) =
         if (!(hasErrors && member.is(Synthetic) && member.is(Module))) {
@@ -329,7 +368,7 @@ object RefChecks {
         }
 
       def overrideError(msg: String, compareTypes: Boolean = false) =
-        if (noErrorType)
+        if trueMatch && noErrorType then
           emitOverrideError(overrideErrorMsg(msg, compareTypes))
 
       def autoOverride(sym: Symbol) =
@@ -356,24 +395,6 @@ object RefChecks {
 
       //Console.println(infoString(member) + " overrides " + infoString(other) + " in " + clazz);//DEBUG
 
-      // return if we already checked this combination elsewhere
-      if (member.owner != clazz) {
-        def deferredCheck = member.is(Deferred) || !other.is(Deferred)
-        def subOther(s: Symbol) = s derivesFrom other.owner
-        def subMember(s: Symbol) = s derivesFrom member.owner
-
-        if (subOther(member.owner) && deferredCheck)
-          //Console.println(infoString(member) + " shadows1 " + infoString(other) " in " + clazz);//DEBUG
-          return
-        val parentSymbols = clazz.info.parents.map(_.typeSymbol)
-        if (parentSymbols exists (p => subOther(p) && subMember(p) && deferredCheck))
-          //Console.println(infoString(member) + " shadows2 " + infoString(other) + " in " + clazz);//DEBUG
-          return
-        if (parentSymbols forall (p => subOther(p) == subMember(p)))
-          //Console.println(infoString(member) + " shadows " + infoString(other) + " in " + clazz);//DEBUG
-          return
-      }
-
       /* Is the intersection between given two lists of overridden symbols empty? */
       def intersectionIsEmpty(syms1: Iterator[Symbol], syms2: Iterator[Symbol]) = {
         val set2 = syms2.toSet
@@ -408,7 +429,7 @@ object RefChecks {
         overrideError("cannot be used here - class definitions cannot be overridden")
       else if (!other.is(Deferred) && member.isClass)
         overrideError("cannot be used here - classes can only override abstract types")
-      else if (other.isEffectivelyFinal) // (1.2)
+      else if other.isEffectivelyFinal then // (1.2)
         overrideError(i"cannot override final member ${other.showLocated}")
       else if (member.is(ExtensionMethod) && !other.is(ExtensionMethod)) // (1.3)
         overrideError("is an extension method, cannot override a normal method")
@@ -429,9 +450,11 @@ object RefChecks {
           member.setFlag(Override)
         else if (member.isType && self.memberInfo(member) =:= self.memberInfo(other))
           () // OK, don't complain about type aliases which are equal
-        else if (member.owner != clazz && other.owner != clazz &&
-                 !(other.owner derivesFrom member.owner))
-          emitOverrideError(
+        else if member.owner != clazz
+             && other.owner != clazz
+             && !other.owner.derivesFrom(member.owner)
+        then
+          overrideError(
             s"$clazz inherits conflicting members:\n  "
               + infoStringWithLocation(other) + "  and\n  " + infoStringWithLocation(member)
               + "\n(Note: this can be resolved by declaring an override in " + clazz + ".)")
@@ -441,8 +464,7 @@ object RefChecks {
           overrideError("needs `override` modifier")
       else if (other.is(AbsOverride) && other.isIncompleteIn(clazz) && !member.is(AbsOverride))
         overrideError("needs `abstract override` modifiers")
-      else if (member.is(Override) && other.is(Accessor) &&
-        other.accessedFieldOrGetter.is(Mutable, butNot = Lazy))
+      else if member.is(Override) && other.is(Accessor, butNot = Deferred) && other.accessedFieldOrGetter.is(Mutable, butNot = Lazy) then
         overrideError("cannot override a mutable variable")
       else if (member.isAnyOverride &&
         !(member.owner.thisType.baseClasses exists (_ isSubClass other.owner)) &&
@@ -475,6 +497,8 @@ object RefChecks {
           overrideError(i"needs to be declared with @targetName(${"\""}${other.targetName}${"\""}) so that external names match")
         else
           overrideError("cannot have a @targetName annotation since external names would be different")
+      else if !other.isExperimental && member.hasAnnotation(defn.ExperimentalAnnot) then // (1.12)
+        overrideError("may not override non-experimental member")
       else
         checkOverrideDeprecated()
     }
@@ -490,46 +514,7 @@ object RefChecks {
           }*/
     }
 
-    val opc = new OverridingPairs.Cursor(clazz):
-
-      /** We declare a match if either we have a full match including matching names
-       *  or we have a loose match with different target name but the types are the same.
-       *  This leaves two possible sorts of discrepancies to be reported as errors
-       *  in `checkOveride`:
-       *
-       *    - matching names, target names, and signatures but different types
-       *    - matching names and types, but different target names
-       */
-      override def matches(sym1: Symbol, sym2: Symbol): Boolean =
-        !(sym1.owner.is(JavaDefined, butNot = Trait) && sym2.owner.is(JavaDefined, butNot = Trait)) && // javac already handles these checks
-        (sym1.isType || {
-          val sd1 = sym1.asSeenFrom(clazz.thisType)
-          val sd2 = sym2.asSeenFrom(clazz.thisType)
-          sd1.matchesLoosely(sd2)
-          && (sym1.hasTargetName(sym2.targetName)
-             || compatibleTypes(sym1, sd1.info, sym2, sd2.info))
-        })
-    end opc
-
-    while opc.hasNext do
-      checkOverride(opc.overriding, opc.overridden)
-      opc.next()
-
-    // The OverridingPairs cursor does assume that concrete overrides abstract
-    // We have to check separately for an abstract definition in a subclass that
-    // overrides a concrete definition in a superclass. E.g. the following (inspired
-    // from neg/i11130.scala) needs to be rejected as well:
-    //
-    //   class A { type T = B }
-    //   class B extends A { override type T }
-    for
-      dcl <- clazz.info.decls.iterator
-      if dcl.is(Deferred)
-      other <- dcl.allOverriddenSymbols
-      if !other.is(Deferred)
-    do
-      checkOverride(dcl, other)
-
+    OverridingPairsChecker(clazz, self).checkAll(checkOverride)
     printMixinOverrideErrors()
 
     // Verifying a concrete class has nothing unimplemented.
@@ -572,7 +557,8 @@ object RefChecks {
         def isConcrete(sym: Symbol) = sym.exists && !sym.isOneOf(NotConcrete)
         clazz.nonPrivateMembersNamed(mbr.name)
           .filterWithPredicate(
-            impl => isConcrete(impl.symbol) && mbrDenot.matchesLoosely(impl))
+            impl => isConcrete(impl.symbol)
+              && mbrDenot.matchesLoosely(impl, alwaysCompareTypes = true))
           .exists
 
       /** The term symbols in this class and its baseclasses that are
@@ -605,7 +591,7 @@ object RefChecks {
           // Grouping missing methods by the declaring class
           val regrouped = missingMethods.groupBy(_.owner).toList
           def membersStrings(members: List[Symbol]) =
-            members.sortBy(_.name.toString).map(_.showDcl + " = ???")
+            members.sortBy(_.name.toString).map(_.asSeenFrom(clazz.thisType).showDcl + " = ???")
 
           if (regrouped.tail.isEmpty)
             membersStrings(regrouped.head._2)
@@ -915,84 +901,6 @@ object RefChecks {
       }
   }
 
-  // Note: if a symbol has both @deprecated and @migration annotations and both
-  // warnings are enabled, only the first one checked here will be emitted.
-  // I assume that's a consequence of some code trying to avoid noise by suppressing
-  // warnings after the first, but I think it'd be better if we didn't have to
-  // arbitrarily choose one as more important than the other.
-  private def checkUndesiredProperties(sym: Symbol, pos: SrcPos)(using Context): Unit =
-    checkDeprecated(sym, pos)
-
-    val xMigrationValue = ctx.settings.Xmigration.value
-    if xMigrationValue != NoScalaVersion then
-      checkMigration(sym, pos, xMigrationValue)
-
-
-  /** If @deprecated is present, and the point of reference is not enclosed
-   * in either a deprecated member or a scala bridge method, issue a warning.
-   */
-  private def checkDeprecated(sym: Symbol, pos: SrcPos)(using Context): Unit =
-
-    /** is the owner an enum or its companion and also the owner of sym */
-    def isEnumOwner(owner: Symbol)(using Context) =
-      // pre: sym is an enumcase
-      if owner.isEnumClass then owner.companionClass eq sym.owner
-      else if owner.is(ModuleClass) && owner.companionClass.isEnumClass then owner eq sym.owner
-      else false
-
-    def isDeprecatedOrEnum(owner: Symbol)(using Context) =
-      // pre: sym is an enumcase
-      owner.isDeprecated
-      || isEnumOwner(owner)
-
-    /**Scan the chain of outer declaring scopes from the current context
-     * a deprecation warning will be skipped if one the following holds
-     * for a given declaring scope:
-     * - the symbol associated with the scope is also deprecated.
-     * - if and only if `sym` is an enum case, the scope is either
-     *   a module that declares `sym`, or the companion class of the
-     *   module that declares `sym`.
-     */
-    def skipWarning(using Context) =
-      ctx.owner.ownersIterator.exists(if sym.isEnumCase then isDeprecatedOrEnum else _.isDeprecated)
-
-    for annot <- sym.getAnnotation(defn.DeprecatedAnnot) do
-      if !skipWarning then
-        val msg = annot.argumentConstant(0).map(": " + _.stringValue).getOrElse("")
-        val since = annot.argumentConstant(1).map(" since " + _.stringValue).getOrElse("")
-        report.deprecationWarning(s"${sym.showLocated} is deprecated${since}${msg}", pos)
-
-  /** If @migration is present (indicating that the symbol has changed semantics between versions),
-   *  emit a warning.
-   */
-  private def checkMigration(sym: Symbol, pos: SrcPos, xMigrationValue: ScalaVersion)(using Context): Unit =
-    for annot <- sym.getAnnotation(defn.MigrationAnnot) do
-      val migrationVersion = ScalaVersion.parse(annot.argumentConstant(1).get.stringValue)
-      migrationVersion match
-        case Success(symVersion) if xMigrationValue < symVersion =>
-          val msg = annot.argumentConstant(0).get.stringValue
-          report.warning(SymbolChangedSemanticsInVersion(sym, symVersion, msg), pos)
-        case Failure(ex) =>
-          report.warning(SymbolHasUnparsableVersionNumber(sym, ex.getMessage), pos)
-        case _ =>
-
-  /** Check that a deprecated val or def does not override a
-   *  concrete, non-deprecated method.  If it does, then
-   *  deprecation is meaningless.
-   */
-  private def checkDeprecatedOvers(tree: Tree)(using Context): Unit = {
-    val symbol = tree.symbol
-    if (symbol.isDeprecated) {
-      val concrOvers =
-        symbol.allOverriddenSymbols.filter(sym =>
-          !sym.isDeprecated && !sym.is(Deferred))
-      if (!concrOvers.isEmpty)
-        report.deprecationWarning(
-          symbol.toString + " overrides concrete, non-deprecated symbol(s):" +
-            concrOvers.map(_.name).mkString("    ", ", ", ""), tree.srcPos)
-    }
-  }
-
   /** Check that we do not "override" anything with a private method
    *  or something that becomes a private method. According to the Scala
    *  modeling this is non-sensical since private members don't override.
@@ -1005,7 +913,7 @@ object RefChecks {
    */
   def checkNoPrivateOverrides(tree: Tree)(using Context): Unit =
     val sym = tree.symbol
-    if sym.owner.isClass
+    if sym.maybeOwner.isClass
        && sym.is(Private)
        && (sym.isOneOf(MethodOrLazyOrMutable) || !sym.is(Local)) // in these cases we'll produce a getter later
        && !sym.isConstructor
@@ -1017,41 +925,52 @@ object RefChecks {
           report.error(i"private $sym cannot override ${other.showLocated}", sym.srcPos)
   end checkNoPrivateOverrides
 
-  type LevelAndIndex = immutable.Map[Symbol, (LevelInfo, Int)]
+  /** Check that unary method definition do not receive parameters.
+   *  They can only receive inferred parameters such as type parameters and implicit parameters.
+   */
+  def checkUnaryMethods(sym: Symbol)(using Context): Unit =
+    /** Check that the only term parameters are contextual or implicit */
+    def checkParameters(tpe: Type): Unit =
+      tpe match
+        case tpe: MethodType =>
+          if tpe.isImplicitMethod || tpe.isContextualMethod then
+            checkParameters(tpe.resType)
+          else
+            val what =
+              if tpe.paramNames.isEmpty then "empty parameter list.\n\nPossible fix: remove the `()` arguments."
+              else "parameters"
+            report.warning(s"unary_ method cannot take $what", sym.sourcePos)
+        case tpe: PolyType =>
+          checkParameters(tpe.resType)
+        case _ =>
+          // ok
 
-  class OptLevelInfo {
-    def levelAndIndex: LevelAndIndex = Map()
-    def enterReference(sym: Symbol, span: Span): Unit = ()
-  }
+    /** Skip leading type and contextual parameters, then skip the
+     *  self parameter, and finally check the parameter
+     */
+    def checkExtensionParameters(tpe: Type): Unit =
+      tpe match
+        case tpe: MethodType =>
+          assert(tpe.paramNames.length == 1)
+          if tpe.isContextualMethod then checkExtensionParameters(tpe.resType)
+          else checkParameters(tpe.resType)
+        case tpe: PolyType =>
+          checkExtensionParameters(tpe.resType)
+
+    def isUnaryPrefixName(name: Name) = name match
+      case name: SimpleName =>
+        name.startsWith("unary_") && nme.raw.isUnary(name.drop(6))
+      case _ =>
+        false
 
-  /** A class to help in forward reference checking */
-  class LevelInfo(outerLevelAndIndex: LevelAndIndex, stats: List[Tree])(using Context)
-  extends OptLevelInfo {
-    override val levelAndIndex: LevelAndIndex =
-      stats.foldLeft(outerLevelAndIndex, 0) {(mi, stat) =>
-        val (m, idx) = mi
-        val m1 = stat match {
-          case stat: MemberDef => m.updated(stat.symbol, (this, idx))
-          case _ => m
-        }
-        (m1, idx + 1)
-      }._1
-    var maxIndex: Int = Int.MinValue
-    var refSpan: Span = _
-    var refSym: Symbol = _
-
-    override def enterReference(sym: Symbol, span: Span): Unit =
-      if (sym.exists && sym.owner.isTerm)
-        levelAndIndex.get(sym) match {
-          case Some((level, idx)) if (level.maxIndex < idx) =>
-            level.maxIndex = idx
-            level.refSpan = span
-            level.refSym = sym
-          case _ =>
-        }
-  }
+    if isUnaryPrefixName(sym.name) then
+      if sym.is(Extension) || sym.name.is(ExtMethName) then
+        // if is method from `extension` or value class
+        checkExtensionParameters(sym.info)
+      else
+        checkParameters(sym.info)
 
-  val NoLevelInfo: RefChecks.OptLevelInfo = new OptLevelInfo()
+  end checkUnaryMethods
 
   /** Verify that references in the user-defined `@implicitNotFound` message are valid.
    *  (i.e. they refer to a type variable that really occurs in the signature of the annotated symbol.)
@@ -1173,44 +1092,28 @@ class RefChecks extends MiniPhase { thisPhase =>
 
   override def phaseName: String = RefChecks.name
 
-  // Needs to run after ElimRepeated for override checks involving varargs methods
-  override def runsAfter: Set[String] = Set(ElimRepeated.name)
-
-  private var LevelInfo: Store.Location[OptLevelInfo] = _
-  private def currentLevel(using Context): OptLevelInfo = ctx.store(LevelInfo)
-
-  override def initContext(ctx: FreshContext): Unit =
-    LevelInfo = ctx.addLocation(NoLevelInfo)
+  override def description: String = RefChecks.description
 
-  override def prepareForStats(trees: List[Tree])(using Context): Context =
-    if (ctx.owner.isTerm)
-      ctx.fresh.updateStore(LevelInfo, new LevelInfo(currentLevel.levelAndIndex, trees))
-    else ctx
+  override def runsAfter: Set[String] = Set(ElimRepeated.name)
+    // Needs to run after ElimRepeated for override checks involving varargs methods
 
   override def transformValDef(tree: ValDef)(using Context): ValDef = {
-    checkNoPrivateOverrides(tree)
-    checkDeprecatedOvers(tree)
-    val sym = tree.symbol
-    if (sym.exists && sym.owner.isTerm) {
-      tree.rhs match {
-        case Ident(nme.WILDCARD) => report.error(UnboundPlaceholderParameter(), sym.srcPos)
-        case _ =>
-      }
-      if (!sym.is(Lazy))
-        currentLevel.levelAndIndex.get(sym) match {
-          case Some((level, symIdx)) if symIdx <= level.maxIndex =>
-            report.error(ForwardReferenceExtendsOverDefinition(sym, level.refSym),
-              ctx.source.atSpan(level.refSpan))
+    if tree.symbol.exists then
+      checkNoPrivateOverrides(tree)
+      val sym = tree.symbol
+      if (sym.exists && sym.owner.isTerm) {
+        tree.rhs match {
+          case Ident(nme.WILDCARD) => report.error(UnboundPlaceholderParameter(), sym.srcPos)
           case _ =>
         }
-    }
+      }
     tree
   }
 
   override def transformDefDef(tree: DefDef)(using Context): DefDef = {
     checkNoPrivateOverrides(tree)
-    checkDeprecatedOvers(tree)
     checkImplicitNotFoundAnnotation.defDef(tree.symbol.denot)
+    checkUnaryMethods(tree.symbol)
     tree
   }
 
@@ -1229,43 +1132,6 @@ class RefChecks extends MiniPhase { thisPhase =>
       report.error(ex, tree.srcPos)
       tree
   }
-
-  override def transformIdent(tree: Ident)(using Context): Ident = {
-    checkUndesiredProperties(tree.symbol, tree.srcPos)
-    currentLevel.enterReference(tree.symbol, tree.span)
-    tree
-  }
-
-  override def transformSelect(tree: Select)(using Context): Select = {
-    checkUndesiredProperties(tree.symbol, tree.srcPos)
-    tree
-  }
-
-  override def transformApply(tree: Apply)(using Context): Apply = {
-    if (isSelfConstrCall(tree)) {
-      assert(currentLevel.isInstanceOf[LevelInfo], s"${ctx.owner}/" + i"$tree")
-      val level = currentLevel.asInstanceOf[LevelInfo]
-      if (level.maxIndex > 0) {
-        // An implementation restriction to avoid VerifyErrors and lazyvals mishaps; see SI-4717
-        report.debuglog("refsym = " + level.refSym)
-        report.error("forward reference not allowed from self constructor invocation",
-          ctx.source.atSpan(level.refSpan))
-      }
-    }
-    tree
-  }
-
-  override def transformNew(tree: New)(using Context): New = {
-    val tpe = tree.tpe
-    val sym = tpe.typeSymbol
-    checkUndesiredProperties(sym, tree.srcPos)
-    currentLevel.enterReference(sym, tree.span)
-    tpe.dealias.foreachPart {
-      case TermRef(_, s: Symbol) => currentLevel.enterReference(s, tree.span)
-      case _ =>
-    }
-    tree
-  }
 }
 
 /* todo: rewrite and re-enable
diff --git a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala
index 0406c98d790b..fe4feecd6100 100644
--- a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala
+++ b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala
@@ -39,9 +39,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context):
               if defn.SpecialClassTagClasses.contains(sym) then
                 classTag.select(sym.name.toTermName)
               else
-                val clsOfType = erasure(tp) match
-                  case JavaArrayType(elemType) => defn.ArrayOf(elemType)
-                  case etp => etp
+                val clsOfType = escapeJavaArray(erasure(tp))
                 classTag.select(nme.apply).appliedToType(tp).appliedTo(clsOf(clsOfType))
             tag.withSpan(span)
           case tp => EmptyTree
@@ -52,7 +50,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context):
     (formal, span) => formal.argInfos match {
       case arg1 :: arg2 :: Nil if !defn.isBottomClass(arg2.typeSymbol) =>
         val tp1 = fullyDefinedType(arg1, "TypeTest argument", span)
-        val tp2 = fullyDefinedType(arg2, "TypeTest argument", span)
+        val tp2 = fullyDefinedType(arg2, "TypeTest argument", span).normalized
         val sym2 = tp2.typeSymbol
         if tp1 <:< tp2 then
           // optimization when we know the typetest will always succeed
@@ -72,7 +70,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context):
               ref(defn.NoneModule))
           }
           val tpe = MethodType(List(nme.s))(_ => List(tp1), mth => defn.OptionClass.typeRef.appliedTo(mth.newParamRef(0) & tp2))
-          val meth = newSymbol(ctx.owner, nme.ANON_FUN, Synthetic | Method, tpe, coord = span)
+          val meth = newAnonFun(ctx.owner, tpe, coord = span)
           val typeTestType = defn.TypeTestClass.typeRef.appliedTo(List(tp1, tp2))
           Closure(meth, tss => body(tss.head).changeOwner(ctx.owner, meth), targetType = typeTestType).withSpan(span)
       case _ =>
@@ -104,7 +102,8 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context):
     def canComparePredefinedClasses(cls1: ClassSymbol, cls2: ClassSymbol): Boolean =
 
       def cmpWithBoxed(cls1: ClassSymbol, cls2: ClassSymbol) =
-        cls2 == defn.boxedType(cls1.typeRef).symbol
+        cls2 == defn.NothingClass
+        || cls2 == defn.boxedType(cls1.typeRef).symbol
         || cls1.isNumericValueClass && cls2.derivesFrom(defn.BoxedNumberClass)
 
       if cls1.isPrimitiveValueClass then
@@ -131,7 +130,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context):
       else if cls2 == defn.NullClass then
         cls1.derivesFrom(defn.ObjectClass)
       else
-        false
+        cls1 == defn.NothingClass || cls2 == defn.NothingClass
     end canComparePredefinedClasses
 
     /** Some simulated `CanEqual` instances for predefined types. It's more efficient
@@ -160,13 +159,12 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context):
 
     def success(t: Tree) =
       New(defn.ValueOfClass.typeRef.appliedTo(t.tpe), t :: Nil).withSpan(span)
-
     formal.argInfos match
       case arg :: Nil =>
-        fullyDefinedType(arg.dealias, "ValueOf argument", span).normalized match
+        fullyDefinedType(arg, "ValueOf argument", span).normalized.dealias match
           case ConstantType(c: Constant) =>
             success(Literal(c))
-          case TypeRef(_, sym) if sym == defn.UnitClass =>
+          case tp: TypeRef if tp.isRef(defn.UnitClass) =>
             success(Literal(Constant(())))
           case n: TermRef =>
             success(ref(n))
@@ -219,6 +217,12 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context):
       case _ => true
     loop(formal)
 
+  private def checkRefinement(formal: Type, name: TypeName, expected: Type, span: Span)(using Context): Unit =
+    val actual = formal.lookupRefined(name)
+    if actual.exists && !(expected =:= actual)
+    then report.error(
+      em"$name mismatch, expected: $expected, found: $actual.", ctx.source.atSpan(span))
+
   private def mkMirroredMonoType(mirroredType: HKTypeLambda)(using Context): Type =
     val monoMap = new TypeMap:
       def apply(t: Type) = t match
@@ -245,25 +249,19 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context):
           val cls = mirroredType.classSymbol
           val accessors = cls.caseAccessors.filterNot(_.isAllOf(PrivateLocal))
           val elemLabels = accessors.map(acc => ConstantType(Constant(acc.name.toString)))
+          val nestedPairs = TypeOps.nestedPairs(accessors.map(mirroredType.resultType.memberInfo(_).widenExpr))
           val (monoType, elemsType) = mirroredType match
             case mirroredType: HKTypeLambda =>
-              def accessorType(acc: Symbol) =
-                if cls.typeParams.hasSameLengthAs(mirroredType.paramRefs) then
-                  acc.info.subst(cls.typeParams, mirroredType.paramRefs)
-                else
-                  acc.info
-              val elems =
-                mirroredType.derivedLambdaType(
-                  resType = TypeOps.nestedPairs(accessors.map(accessorType))
-                )
-              (mkMirroredMonoType(mirroredType), elems)
+              (mkMirroredMonoType(mirroredType), mirroredType.derivedLambdaType(resType = nestedPairs))
             case _ =>
-              val elems = TypeOps.nestedPairs(accessors.map(mirroredType.memberInfo(_).widenExpr))
-              (mirroredType, elems)
+              (mirroredType, nestedPairs)
+          val elemsLabels = TypeOps.nestedPairs(elemLabels)
+          checkRefinement(formal, tpnme.MirroredElemTypes, elemsType, span)
+          checkRefinement(formal, tpnme.MirroredElemLabels, elemsLabels, span)
           val mirrorType =
             mirrorCore(defn.Mirror_ProductClass, monoType, mirroredType, cls.name, formal)
               .refinedWith(tpnme.MirroredElemTypes, TypeAlias(elemsType))
-              .refinedWith(tpnme.MirroredElemLabels, TypeAlias(TypeOps.nestedPairs(elemLabels)))
+              .refinedWith(tpnme.MirroredElemLabels, TypeAlias(elemsLabels))
           val mirrorRef =
             if (cls.is(Scala2x)) anonymousMirror(monoType, ExtendsProductMirror, span)
             else companionPath(mirroredType, span)
@@ -273,18 +271,18 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context):
 
   private def sumMirror(mirroredType: Type, formal: Type, span: Span)(using Context): Tree =
     val cls = mirroredType.classSymbol
-    val useCompanion = cls.useCompanionAsMirror
+    val useCompanion = cls.useCompanionAsSumMirror
 
     if cls.isGenericSum(if useCompanion then cls.linkedClass else ctx.owner) then
       val elemLabels = cls.children.map(c => ConstantType(Constant(c.name.toString)))
 
       def solve(sym: Symbol): Type = sym match
-        case caseClass: ClassSymbol =>
-          assert(caseClass.is(Case))
-          if caseClass.is(Module) then
-            caseClass.sourceModule.termRef
+        case childClass: ClassSymbol =>
+          assert(childClass.isOneOf(Case | Sealed))
+          if childClass.is(Module) then
+            childClass.sourceModule.termRef
           else
-            caseClass.primaryConstructor.info match
+            childClass.primaryConstructor.info match
               case info: PolyType =>
                 // Compute the the full child type by solving the subtype constraint
                 // `C[X1, ..., Xn] <: P`, where
@@ -301,13 +299,13 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context):
                     case tp => tp
                   resType <:< target
                   val tparams = poly.paramRefs
-                  val variances = caseClass.typeParams.map(_.paramVarianceSign)
+                  val variances = childClass.typeParams.map(_.paramVarianceSign)
                   val instanceTypes = tparams.lazyZip(variances).map((tparam, variance) =>
                     TypeComparer.instanceType(tparam, fromBelow = variance < 0))
                   resType.substParams(poly, instanceTypes)
-                instantiate(using ctx.fresh.setExploreTyperState().setOwner(caseClass))
+                instantiate(using ctx.fresh.setExploreTyperState().setOwner(childClass))
               case _ =>
-                caseClass.typeRef
+                childClass.typeRef
         case child => child.termRef
       end solve
 
@@ -322,9 +320,9 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context):
           (mirroredType, elems)
 
       val mirrorType =
-          mirrorCore(defn.Mirror_SumClass, monoType, mirroredType, cls.name, formal)
-          .refinedWith(tpnme.MirroredElemTypes, TypeAlias(elemsType))
-          .refinedWith(tpnme.MirroredElemLabels, TypeAlias(TypeOps.nestedPairs(elemLabels)))
+        mirrorCore(defn.Mirror_SumClass, monoType, mirroredType, cls.name, formal)
+            .refinedWith(tpnme.MirroredElemTypes, TypeAlias(elemsType))
+            .refinedWith(tpnme.MirroredElemLabels, TypeAlias(TypeOps.nestedPairs(elemLabels)))
       val mirrorRef =
         if useCompanion then companionPath(mirroredType, span)
         else anonymousMirror(monoType, ExtendsSumMirror, span)
@@ -337,7 +335,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context):
       (using Context): Tree =
     if checkFormal(formal) then
       formal.member(tpnme.MirroredType).info match
-        case TypeBounds(mirroredType, _) => synth(mirroredType.stripTypeVar, formal, span)
+        case TypeBounds(mirroredType, _) => synth(TypeOps.stripTypeVars(mirroredType), formal, span)
         case other => EmptyTree
     else EmptyTree
 
@@ -367,6 +365,119 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context):
           synthesizedSumMirror(formal, span)
       case _ => EmptyTree
 
+  private def escapeJavaArray(tp: Type)(using Context): Type = tp match
+    case JavaArrayType(elemTp) => defn.ArrayOf(escapeJavaArray(elemTp))
+    case _                     => tp
+
+  private enum ManifestKind:
+    case Full, Opt, Clss
+
+    /** The kind that should be used for an array element, if we are `OptManifest` then this
+     *  prevents wildcards arguments of Arrays being converted to `NoManifest`
+     */
+    def arrayElem = if this == Full then this else Clss
+
+  end ManifestKind
+
+  /** Manifest factory that does enough to satisfy the equality semantics for
+   *  - `scala.reflect.OptManifest` (only runtime class is recorded)
+   *  - `scala.reflect.Manifest` (runtime class of arguments are recorded, with wildcard upper bounds wrapped)
+   *  however,`toString` may be different.
+   *
+   * There are some differences to `ClassTag`,
+   *  e.g. in Scala 2 `manifest[Int @unchecked]` will fail, but `classTag[Int @unchecked]` succeeds.
+   */
+  private def manifestFactoryOf(kind: ManifestKind): SpecialHandler = (formal, span) =>
+    import ManifestKind.*
+
+    /* Creates a tree that calls the factory method called constructor in object scala.reflect.Manifest */
+    def factoryManifest(constructor: TermName, tparg: Type, args: Tree*): Tree =
+      if args.contains(EmptyTree) then
+        EmptyTree
+      else
+        val factory = if kind == Full then defn.ManifestFactoryModule else defn.ClassManifestFactoryModule
+        applyOverloaded(ref(factory), constructor, args.toList, tparg :: Nil, Types.WildcardType).withSpan(span)
+
+    /* Creates a tree representing one of the singleton manifests.*/
+    def singletonManifest(name: TermName) =
+      ref(defn.ManifestFactoryModule).select(name).ensureApplied.withSpan(span)
+
+    def synthArrayManifest(elemTp: Type, kind: ManifestKind, topLevel: Boolean): Tree =
+      factoryManifest(nme.arrayType, elemTp, synthesize(elemTp, kind.arrayElem, topLevel))
+
+    /** manifests generated from wildcards can not equal Int,Long,Any,AnyRef,AnyVal etc,
+     *  so we wrap their upper bound.
+     */
+    def synthWildcardManifest(tp: Manifestable, hi: Type, topLevel: Boolean): Tree =
+      factoryManifest(nme.wildcardType, tp, singletonManifest(nme.Nothing), synthesize(hi, Full, topLevel))
+
+    /** `Nil` if not full manifest */
+    def synthArgManifests(tp: Manifestable): List[Tree] = tp match
+      case AppliedType(_, args) if kind == Full && tp.typeSymbol.isClass =>
+        args.map(synthesize(_, Full, topLevel = false))
+      case _ =>
+        Nil
+
+    /** This type contains all top-level types supported by Scala 2's algorithm */
+    type Manifestable =
+      ThisType | TermRef | ConstantType | TypeRef | AppliedType | TypeBounds | RecType | RefinedType | AndType
+
+    def canManifest(tp: Manifestable, topLevel: Boolean) =
+      val sym = tp.typeSymbol
+      !sym.isAbstractType
+      && hasStableErasure(tp)
+      && !(topLevel && defn.isBottomClassAfterErasure(sym))
+
+    /** adapted from `syntheticClassTag` */
+    def synthManifest(tp: Manifestable, kind: ManifestKind, topLevel: Boolean) = tp match
+      case defn.ArrayOf(elemTp)              => synthArrayManifest(elemTp, kind, topLevel)
+      case TypeBounds(_, hi) if kind == Full => synthWildcardManifest(tp, hi, topLevel)
+
+      case tp if canManifest(tp, topLevel) =>
+        val sym = tp.typeSymbol
+        if sym.isPrimitiveValueClass || defn.SpecialManifestClasses.contains(sym) then
+          singletonManifest(sym.name.toTermName)
+        else
+          erasure(tp) match
+            case JavaArrayType(elemTp) =>
+              synthArrayManifest(escapeJavaArray(elemTp), kind, topLevel)
+
+            case etp =>
+              val clsArg = clsOf(etp).asInstance(defn.ClassType(tp)) // cast needed to resolve overloading
+              factoryManifest(nme.classType, tp, (clsArg :: synthArgManifests(tp))*)
+
+      case _ =>
+        EmptyTree
+
+    end synthManifest
+
+    def manifestOfType(tp0: Type, kind: ManifestKind, topLevel: Boolean): Tree = tp0.dealiasKeepAnnots match
+      case tp1: Manifestable => synthManifest(tp1, kind, topLevel)
+      case tp1               => EmptyTree
+
+    def synthesize(tp: Type, kind: ManifestKind, topLevel: Boolean): Tree =
+      manifestOfType(tp, kind, topLevel) match
+        case EmptyTree if kind == Opt => ref(defn.NoManifestModule)
+        case result                   => result
+
+    formal.argInfos match
+      case arg :: Nil =>
+        val manifest = synthesize(fullyDefinedType(arg, "Manifest argument", span), kind, topLevel = true)
+        if manifest != EmptyTree then
+          report.deprecationWarning(
+            i"""Compiler synthesis of Manifest and OptManifest is deprecated, instead
+               |replace with the type `scala.reflect.ClassTag[$arg]`.
+               |Alternatively, consider using the new metaprogramming features of Scala 3,
+               |see https://docs.scala-lang.org/scala3/reference/metaprogramming.html""", ctx.source.atSpan(span))
+        manifest
+      case _ =>
+        EmptyTree
+
+  end manifestFactoryOf
+
+  val synthesizedManifest: SpecialHandler = manifestFactoryOf(ManifestKind.Full)
+  val synthesizedOptManifest: SpecialHandler = manifestFactoryOf(ManifestKind.Opt)
+
   val specialHandlers = List(
     defn.ClassTagClass        -> synthesizedClassTag,
     defn.TypeTestClass        -> synthesizedTypeTest,
@@ -374,7 +485,10 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context):
     defn.ValueOfClass         -> synthesizedValueOf,
     defn.Mirror_ProductClass  -> synthesizedProductMirror,
     defn.Mirror_SumClass      -> synthesizedSumMirror,
-    defn.MirrorClass          -> synthesizedMirror)
+    defn.MirrorClass          -> synthesizedMirror,
+    defn.ManifestClass        -> synthesizedManifest,
+    defn.OptManifestClass     -> synthesizedOptManifest,
+  )
 
   def tryAll(formal: Type, span: Span)(using Context): Tree =
     def recur(handlers: SpecialHandlers): Tree = handlers match
diff --git a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala
index 8ce74a6061cc..3dcec413540f 100644
--- a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala
+++ b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala
@@ -4,7 +4,7 @@ package typer
 
 import core._
 import ast._
-import Contexts._, Constants._, Types._, Symbols._, Names._, Flags._, Decorators._
+import Contexts._, ContextOps._, Constants._, Types._, Symbols._, Names._, Flags._, Decorators._
 import ErrorReporting._, Annotations._, Denotations._, SymDenotations._, StdNames._
 import util.Spans._
 import util.SrcPos
@@ -12,7 +12,6 @@ import config.Printers.typr
 import ast.Trees._
 import NameOps._
 import ProtoTypes._
-import Inferencing.couldInstantiateTypeVar
 import collection.mutable
 import reporting._
 import Checking.{checkNoPrivateLeaks, checkNoWildcard}
@@ -117,12 +116,15 @@ trait TypeAssigner {
 
   /** The type of the selection `tree`, where `qual1` is the typed qualifier part. */
   def selectionType(tree: untpd.RefTree, qual1: Tree)(using Context): Type =
-    var qualType = qual1.tpe.widenIfUnstable
-    if !qualType.hasSimpleKind && tree.name != nme.CONSTRUCTOR then
-      // constructors are selected on typeconstructor, type arguments are passed afterwards
-      qualType = errorType(em"$qualType takes type parameters", qual1.srcPos)
-    else if !qualType.isInstanceOf[TermType] then
-      qualType = errorType(em"$qualType is illegal as a selection prefix", qual1.srcPos)
+    val qualType0 = qual1.tpe.widenIfUnstable
+    val qualType =
+      if !qualType0.hasSimpleKind && tree.name != nme.CONSTRUCTOR then
+        // constructors are selected on typeconstructor, type arguments are passed afterwards
+        errorType(em"$qualType0 takes type parameters", qual1.srcPos)
+      else if !qualType0.isInstanceOf[TermType] && !qualType0.isError then
+        errorType(em"$qualType0 is illegal as a selection prefix", qual1.srcPos)
+      else
+        qualType0
 
     def arrayElemType = qual1.tpe.widen match
       case JavaArrayType(elemtp) => elemtp
@@ -143,7 +145,12 @@ trait TypeAssigner {
       // this is exactly what Erasure will do.
       case _ =>
         val pre = maybeSkolemizePrefix(qualType, name)
-        val mbr = qualType.findMember(name, pre)
+        val mbr =
+          if ctx.isJava then
+            ctx.javaFindMember(name, pre)
+          else
+            qualType.findMember(name, pre)
+
         if reallyExists(mbr) then qualType.select(name, mbr)
         else if qualType.isErroneous || name.toTermName == nme.ERROR then UnspecifiedErrorType
         else NoType
@@ -155,7 +162,7 @@ trait TypeAssigner {
     val qualType = qual.tpe.widenIfUnstable
     def kind = if tree.isType then "type" else "value"
     val foundWithoutNull = qualType match
-      case OrNull(qualType1) =>
+      case OrNull(qualType1) if qualType1 <:< defn.ObjectType =>
         val name = tree.name
         val pre = maybeSkolemizePrefix(qualType1, name)
         reallyExists(qualType1.findMember(name, pre))
@@ -228,19 +235,18 @@ trait TypeAssigner {
         else errorType("not a legal qualifying class for this", tree.srcPos))
   }
 
-  def assignType(tree: untpd.Super, qual: Tree, mixinClass: Symbol = NoSymbol)(using Context): Super = {
-    val mix = tree.mix
-    qual.tpe match {
-      case err: ErrorType => untpd.cpy.Super(tree)(qual, mix).withType(err)
+  def superType(qualType: Type, mix: untpd.Ident, mixinClass: Symbol, pos: SrcPos)(using Context) =
+    qualType match
+      case err: ErrorType => err
       case qtype @ ThisType(_) =>
         val cls = qtype.cls
         def findMixinSuper(site: Type): Type = site.parents filter (_.typeSymbol.name == mix.name) match {
           case p :: Nil =>
             p.typeConstructor
           case Nil =>
-            errorType(SuperQualMustBeParent(mix, cls), tree.srcPos)
+            errorType(SuperQualMustBeParent(mix, cls), pos)
           case p :: q :: _ =>
-            errorType("ambiguous parent class qualifier", tree.srcPos)
+            errorType("ambiguous parent class qualifier", pos)
         }
         val owntype =
           if (mixinClass.exists) mixinClass.appliedRef
@@ -250,9 +256,11 @@ trait TypeAssigner {
             val ps = cls.classInfo.parents
             if (ps.isEmpty) defn.AnyType else ps.reduceLeft((x: Type, y: Type) => x & y)
           }
-        tree.withType(SuperType(cls.thisType, owntype))
-    }
-  }
+        SuperType(cls.thisType, owntype)
+
+  def assignType(tree: untpd.Super, qual: Tree, mixinClass: Symbol = NoSymbol)(using Context): Super =
+    untpd.cpy.Super(tree)(qual, tree.mix)
+      .withType(superType(qual.tpe, tree.mix, mixinClass, tree.srcPos))
 
   /** Substitute argument type `argType` for parameter `pref` in type `tp`,
    *  skolemizing the argument type if it is not stable and `pref` occurs in `tp`.
@@ -340,7 +348,17 @@ trait TypeAssigner {
             }
           }
           else {
-            val argTypes = args.tpes
+            // Make sure arguments don't contain the type `pt` itself.
+            // make a copy of the argument if that's the case.
+            // This is done to compensate for the fact that normally every
+            // reference to a polytype would have to be a fresh copy of that type,
+            // but we want to avoid that because it would increase compilation cost.
+            // See pos/i6682a.scala for a test case where the defensive copying matters.
+            val ensureFresh = new TypeMap:
+              def apply(tp: Type) = mapOver(
+                if tp eq pt then pt.newLikeThis(pt.paramNames, pt.paramInfos, pt.resType)
+                else tp)
+            val argTypes = args.tpes.mapConserve(ensureFresh)
             if (sameLength(argTypes, paramNames)) pt.instantiate(argTypes)
             else wrongNumberOfTypeArgs(fn.tpe, pt.typeParams, args, tree.srcPos)
           }
diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala
index 4433ac7dee90..09acde2ac845 100644
--- a/compiler/src/dotty/tools/dotc/typer/Typer.scala
+++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala
@@ -39,7 +39,8 @@ import annotation.tailrec
 import Implicits._
 import util.Stats.record
 import config.Printers.{gadts, typr, debug}
-import config.Feature._
+import config.Feature
+import config.Feature.{sourceVersion, migrateTo3}
 import config.SourceVersion._
 import rewrites.Rewrites.patch
 import NavigateAST._
@@ -50,6 +51,8 @@ import Nullables._
 import NullOpsDecorator._
 import config.Config
 
+import scala.annotation.constructorOnly
+
 object Typer {
 
   /** The precedence of bindings which determines which of several bindings will be
@@ -66,6 +69,9 @@ object Typer {
     if (!tree.isEmpty && !tree.isInstanceOf[untpd.TypedSplice] && ctx.typerState.isGlobalCommittable)
       assert(tree.span.exists, i"position not set for $tree # ${tree.uniqueId} of ${tree.getClass} in ${tree.source}")
 
+  /** An attachment for GADT constraints that were inferred for a pattern. */
+  val InferredGadtConstraints = new Property.StickyKey[core.GadtConstraint]
+
   /** A context property that indicates the owner of any expressions to be typed in the context
    *  if that owner is different from the context's owner. Typically, a context with a class
    *  as owner would have a local dummy as ExprOwner value.
@@ -77,6 +83,12 @@ object Typer {
    */
   private val InsertedApply = new Property.Key[Unit]
 
+  /** An attachment on a result of an implicit conversion or extension method
+   *  that was added by tryInsertImplicitOnQualifier. Needed to prevent infinite
+   *  expansions in error cases (e.g. in fuzzy/i9293.scala).
+   */
+  private val InsertedImplicitOnQualifier = new Property.Key[Unit]
+
   /** An attachment on a tree `t` occurring as part of a `t()` where
    *  the `()` was dropped by the Typer.
    */
@@ -100,7 +112,11 @@ object Typer {
     tree.putAttachment(HiddenSearchFailure,
       fail :: tree.attachmentOrElse(HiddenSearchFailure, Nil))
 }
-class Typer extends Namer
+/** Typecheck trees, the main entry point is `typed`.
+ *
+ *  @param nestingLevel The nesting level of the `scope` of this Typer.
+ */
+class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer
                with TypeAssigner
                with Applications
                with Implicits
@@ -115,6 +131,13 @@ class Typer extends Namer
   import tpd.{cpy => _, _}
   import untpd.cpy
 
+  /** The scope of the typer.
+   *  For nested typers (cf `Namer#nestedTyper`), this is a place parameters are
+   *  entered during completion and where they survive until typechecking. A
+   *  context with this typer also has this scope.
+   */
+  val scope: MutableScope = newScope(nestingLevel)
+
   /** A temporary data item valid for a single typed ident:
    *  The set of all root import symbols that have been
    *  encountered as a qualifier of an import so far.
@@ -131,7 +154,7 @@ class Typer extends Namer
   private var foundUnderScala2: Type = NoType
 
   // Overridden in derived typers
-  def newLikeThis: Typer = new Typer
+  def newLikeThis(nestingLevel: Int): Typer = new Typer(nestingLevel)
 
   /** Find the type of an identifier with given `name` in given context `ctx`.
    *   @param name       the name of the identifier
@@ -149,19 +172,32 @@ class Typer extends Namer
       if !suppressErrors then report.error(msg, pos)
 
     /** A symbol qualifies if it really exists and is not a package class.
-     *  In addition, if we are in a constructor of a pattern, we ignore all definitions
-     *  which are methods and not accessors (note: if we don't do that
-     *  case x :: xs in class List would return the :: method).
-     *
      *  Package classes are part of their parent's scope, because otherwise
      *  we could not reload them via `_.member`. On the other hand, accessing a
      *  package as a type from source is always an error.
+
+     *  In addition:
+     *    - if we are in a constructor of a pattern, we ignore all definitions
+     *      which are methods and not accessors (note: if we don't do that
+     *      case x :: xs in class List would return the :: method).
+     *    - Members of the empty package can be accessed only from within the empty package.
+     *      Note: it would be cleaner to never nest package definitions in empty package definitions,
+     *      but then we'd have to give up the fiction that a compilation unit consists of
+     *      a single tree (because a source file may have both toplevel classes which go
+     *      into the empty package and package definitions, which would have to stay outside).
+     *      Since the principle of a single tree per compilation unit is assumed by many
+     *      tools, we did not want to take that step.
      */
     def qualifies(denot: Denotation): Boolean =
       reallyExists(denot)
       && (!pt.isInstanceOf[UnapplySelectionProto]
           || denot.hasAltWith(sd => !sd.symbol.is(Method, butNot = Accessor)))
       && !denot.symbol.is(PackageClass)
+      && {
+        var owner = denot.symbol.maybeOwner
+        if owner.isPackageObject then owner = owner.owner
+        !owner.isEmptyPackage || ctx.owner.enclosingPackageClass.isEmptyPackage
+      }
 
     /** Find the denotation of enclosing `name` in given context `ctx`.
      *  @param previous    A denotation that was found in a more deeply nested scope,
@@ -275,7 +311,7 @@ class Typer extends Namer
        */
       def isDefinedInCurrentUnit(denot: Denotation)(using Context): Boolean = denot match {
         case MultiDenotation(d1, d2) => isDefinedInCurrentUnit(d1) || isDefinedInCurrentUnit(d2)
-        case denot: SingleDenotation => denot.symbol.source == ctx.compilationUnit.source
+        case denot: SingleDenotation => ctx.compilationUnit != null && denot.symbol.source == ctx.compilationUnit.source
       }
 
       /** Is `denot` the denotation of a self symbol? */
@@ -300,7 +336,7 @@ class Typer extends Namer
            *  A package object should always be skipped if we look for a term.
            *  That way we make sure we consider all overloaded alternatives of
            *  a definition, even if they are in different source files.
-           *  If we are looking for a type, a package object should ne skipped
+           *  If we are looking for a type, a package object should be skipped
            *  only if it does not contain opaque definitions. Package objects
            *  with opaque definitions are significant, since opaque aliases
            *  are only seen if the prefix is the this-type of the package object.
@@ -364,7 +400,9 @@ class Typer extends Namer
             if (qualifies(defDenot)) {
               val found =
                 if (isSelfDenot(defDenot)) curOwner.enclosingClass.thisType
-                else {
+                else if (ctx.isJava && defDenot.symbol.isStatic) {
+                  defDenot.symbol.namedType
+                } else {
                   val effectiveOwner =
                     if (curOwner.isTerm && defDenot.symbol.maybeOwner.isType)
                       // Don't mix NoPrefix and thisType prefixes, since type comparer
@@ -377,7 +415,8 @@ class Typer extends Namer
               if !curOwner.is(Package) || isDefinedInCurrentUnit(defDenot) then
                 result = checkNewOrShadowed(found, Definition) // no need to go further out, we found highest prec entry
                 found match
-                  case found: NamedType if curOwner.isClass && isInherited(found.denot) =>
+                  case found: NamedType
+                  if curOwner.isClass && isInherited(found.denot) && !ctx.compilationUnit.isJava =>
                     checkNoOuterDefs(found.denot, ctx, ctx)
                   case _ =>
               else
@@ -459,6 +498,8 @@ class Typer extends Namer
     if ctx.mode.is(Mode.Pattern) then
       if name == nme.WILDCARD then
         return tree.withType(pt)
+      if name == tpnme.WILDCARD then
+        return tree.withType(defn.AnyType)
       if untpd.isVarPattern(tree) && name.isTermName then
         return typed(desugar.patternVar(tree), pt)
     else if ctx.mode.is(Mode.QuotedPattern) then
@@ -571,13 +612,17 @@ class Typer extends Namer
       checkLegalValue(select, pt)
       ConstFold(select)
     else if couldInstantiateTypeVar(qual.tpe.widen) then
-      // try again with more defined qualifier type
+       // there's a simply visible type variable in the result; try again with a more defined qualifier type
+       // There's a second trial where we try to instantiate all type variables in `qual.tpe.widen`,
+       // but that is done only after we search for extension methods or conversions.
       typedSelect(tree, pt, qual)
     else
       val tree1 = tryExtensionOrConversion(
-          tree, pt, IgnoredProto(pt), qual, ctx.typerState.ownedVars, this, privateOK = true)
+          tree, pt, IgnoredProto(pt), qual, ctx.typerState.ownedVars, this, inSelect = true)
       if !tree1.isEmpty then
         tree1
+      else if canDefineFurther(qual.tpe.widen) then
+        typedSelect(tree, pt, qual)
       else if qual.tpe.derivesFrom(defn.DynamicClass)
         && selName.isTermName && !isDynamicExpansion(tree)
       then
@@ -602,7 +647,7 @@ class Typer extends Namer
       val qual = typedExpr(tree.qualifier, shallowSelectionProto(tree.name, pt, this))
       val qual1 = if Nullables.unsafeNullsEnabled then
         qual.tpe match {
-          case OrNull(tpe1) =>
+          case OrNull(tpe1) if tpe1 <:< defn.ObjectType =>
             qual.cast(AndType(qual.tpe, tpe1))
           case tp =>
             if tp.isNullType
@@ -695,14 +740,18 @@ class Typer extends Namer
       else if (target.isRef(defn.FloatClass))
         tree.kind match {
           case Whole(16) => // cant parse hex literal as float
-          case _         => return lit(floatFromDigits(digits))
+          case _         =>
+            val float = floatFromDigits(digits)
+            if digits.toIntOption.exists(_ != float.toInt) then
+              report.warning(LossyWideningConstantConversion(defn.IntType, target), tree.srcPos)
+            return lit(float)
         }
       else if (target.isRef(defn.DoubleClass))
         tree.kind match {
           case Whole(16) => // cant parse hex literal as double
           case _         => return lit(doubleFromDigits(digits))
         }
-      else if genericNumberLiteralsEnabled
+      else if Feature.genericNumberLiteralsEnabled
           && target.isValueType && isFullyDefined(target, ForceDegree.none)
       then
         // If expected type is defined with a FromDigits instance, use that one
@@ -771,6 +820,9 @@ class Typer extends Namer
         typed(cpy.Block(tree)(clsDef :: Nil, New(Ident(x), Nil)), pt)
       case _ =>
         var tpt1 = typedType(tree.tpt)
+        val tsym = tpt1.tpe.underlyingClassRef(refinementOK = false).typeSymbol
+        if tsym.is(Package) then
+          report error(em"$tsym cannot be instantiated", tpt1.srcPos)
         tpt1 = tpt1.withType(ensureAccessible(tpt1.tpe, superAccess = false, tpt1.srcPos))
         tpt1 match {
           case AppliedTypeTree(_, targs) =>
@@ -804,11 +856,9 @@ class Typer extends Namer
       val underlyingTreeTpe =
         if (isRepeatedParamType(tpt)) TypeTree(defn.SeqType.appliedTo(pt :: Nil))
         else tpt
-
       val expr1 =
-        if (isRepeatedParamType(tpt)) tree.expr.withType(defn.SeqType.appliedTo(pt :: Nil))
-        else if (isWildcard) tree.expr.withType(tpt.tpe)
-        else typed(tree.expr, tpt.tpe.widenSkolem)
+        if isWildcard then tree.expr.withType(underlyingTreeTpe.tpe)
+        else typed(tree.expr, underlyingTreeTpe.tpe.widenSkolem)
       assignType(cpy.Typed(tree)(expr1, tpt), underlyingTreeTpe)
         .withNotNullInfo(expr1.notNullInfo)
     }
@@ -834,8 +884,10 @@ class Typer extends Namer
               // We need to make sure its type is no longer nullable
               expr0.castToNonNullable
           else expr0
-        val fromCls = if expr1.tpe.derivesFrom(defn.ArrayClass)
-          then defn.ArrayClass else defn.SeqClass
+        val fromCls =
+          if expr1.tpe.derivesFrom(defn.ArrayClass)
+          then defn.ArrayClass
+          else defn.SeqClass
         val tpt1 = TypeTree(expr1.tpe.widen.translateToRepeated(fromCls)).withSpan(tree.tpt.span)
         assignType(cpy.Typed(tree)(expr1, tpt1), tpt1)
       }
@@ -845,7 +897,7 @@ class Typer extends Namer
         wildName = nme.WILDCARD_STAR)
     }
     else {
-      def typedTpt = checkSimpleKinded(typedType(tree.tpt))
+      def typedTpt = checkSimpleKinded(typedType(tree.tpt, mapPatternBounds = true))
       def handlePattern: Tree = {
         val tpt1 = typedTpt
         if !ctx.isAfterTyper && pt != defn.ImplicitScrutineeTypeRef then
@@ -874,28 +926,32 @@ class Typer extends Namer
    *  exists, rewrite to `tt(e)`.
    *  @pre We are in pattern-matching mode (Mode.Pattern)
    */
-  def tryWithTypeTest(tree: Typed, pt: Type)(using Context): Tree = tree.tpt.tpe.dealias match {
-    case tref: TypeRef if !tref.symbol.isClass && !ctx.isAfterTyper && !(tref =:= pt) =>
-      def withTag(tpe: Type): Option[Tree] = {
-        require(ctx.mode.is(Mode.Pattern))
-        withoutMode(Mode.Pattern)(
-          inferImplicit(tpe, EmptyTree, tree.tpt.span)
-        ) match
-          case SearchSuccess(clsTag, _, _, _) =>
-            withMode(Mode.InTypeTest) {
-              Some(typed(untpd.Apply(untpd.TypedSplice(clsTag), untpd.TypedSplice(tree.expr)), pt))
-            }
-          case _ =>
-            None
-      }
-      val tag = withTag(defn.TypeTestClass.typeRef.appliedTo(pt, tref))
-          .orElse(withTag(defn.ClassTagClass.typeRef.appliedTo(tref)))
+  def tryWithTypeTest(tree: Typed, pt: Type)(using Context): Tree =
+    def withTag(tpe: Type): Option[Tree] = {
+      require(ctx.mode.is(Mode.Pattern))
+      withoutMode(Mode.Pattern)(
+        inferImplicit(tpe, EmptyTree, tree.tpt.span)
+      ) match
+        case SearchSuccess(clsTag, _, _, _) =>
+          withMode(Mode.InTypeTest) {
+            Some(typed(untpd.Apply(untpd.TypedSplice(clsTag), untpd.TypedSplice(tree.expr)), pt))
+          }
+        case _ =>
+          None
+    }
+    def tagged(tpe: Type) = {
+      val tag = withTag(defn.TypeTestClass.typeRef.appliedTo(pt, tpe))
+          .orElse(withTag(defn.ClassTagClass.typeRef.appliedTo(tpe)))
           .getOrElse(tree)
-      if tag.symbol.owner == defn.ClassTagClass && config.Feature.sourceVersion.isAtLeast(config.SourceVersion.future) then
+      if tag.symbol.maybeOwner == defn.ClassTagClass && config.Feature.sourceVersion.isAtLeast(config.SourceVersion.future) then
         report.warning("Use of `scala.reflect.ClassTag` for type testing may be unsound. Consider using `scala.reflect.TypeTest` instead.", tree.srcPos)
       tag
-    case _ => tree
-  }
+    }
+    tree.tpt.tpe.dealias match {
+      case tpe @ AppliedType(tref: TypeRef, _) if !tref.symbol.isClass && !ctx.isAfterTyper && !(tpe =:= pt) => tagged(tpe)
+      case tref: TypeRef if !tref.symbol.isClass && !ctx.isAfterTyper && !(tref =:= pt) => tagged(tref)
+      case _ => tree
+    }
 
 
   def typedNamedArg(tree: untpd.NamedArg, pt: Type)(using Context): NamedArg = {
@@ -1045,7 +1101,15 @@ class Typer extends Namer
         val expr1 = ascribeType(expr, pt)
         cpy.Block(block)(stats, expr1) withType expr1.tpe // no assignType here because avoid is redundant
       case _ =>
-        Typed(tree, TypeTree(pt.simplified))
+        val target = pt.simplified
+        val targetTpt = InferredTypeTree().withType(target)
+        if tree.tpe <:< target then Typed(tree, targetTpt)
+        else
+          // This case should not normally arise. It currently does arise in test cases
+          // pos/t4080b.scala and pos/i7067.scala. In that case, a type ascription is wrong
+          // and would not pass Ycheck. We have to use a cast instead. TODO: follow-up why
+          // the cases arise and eliminate them, if possible.
+          tree.cast(targetTpt)
     }
     def noLeaks(t: Tree): Boolean = escapingRefs(t, localSyms).isEmpty
     if (noLeaks(tree)) tree
@@ -1097,10 +1161,10 @@ class Typer extends Namer
    *     def double(x: Char): String = s"$x$x"
    *     "abc" flatMap double
    */
-  private def decomposeProtoFunction(pt: Type, defaultArity: Int, tree: untpd.Tree)(using Context): (List[Type], untpd.Tree) = {
+  private def decomposeProtoFunction(pt: Type, defaultArity: Int, pos: SrcPos)(using Context): (List[Type], untpd.Tree) = {
     def typeTree(tp: Type) = tp match {
-      case _: WildcardType => untpd.TypeTree()
-      case _ => untpd.TypeTree(tp)
+      case _: WildcardType => new untpd.InferredTypeTree()
+      case _ => untpd.InferredTypeTree(tp)
     }
     def interpolateWildcards = new TypeMap {
       def apply(t: Type): Type = t match
@@ -1108,33 +1172,73 @@ class Typer extends Namer
           newTypeVar(apply(bounds.orElse(TypeBounds.empty)).bounds)
         case _ => mapOver(t)
     }
-    val pt1 = pt.stripTypeVar.dealias
+
+    val pt1 = pt.stripTypeVar.dealias.normalized
     if (pt1 ne pt1.dropDependentRefinement)
        && defn.isContextFunctionType(pt1.nonPrivateMember(nme.apply).info.finalResultType)
     then
       report.error(
         i"""Implementation restriction: Expected result type $pt1
            |is a curried dependent context function type. Such types are not yet supported.""",
-        tree.srcPos)
+        pos)
     pt1 match {
-      case pt1 if defn.isNonRefinedFunction(pt1) =>
-        // if expected parameter type(s) are wildcards, approximate from below.
-        // if expected result type is a wildcard, approximate from above.
-        // this can type the greatest set of admissible closures.
-        (pt1.argTypesLo.init, typeTree(interpolateWildcards(pt1.argTypesHi.last)))
-      case SAMType(sam @ MethodTpe(_, formals, restpe)) =>
-        (formals,
-         if (sam.isResultDependent)
-           untpd.DependentTypeTree(syms => restpe.substParams(sam, syms.map(_.termRef)))
-         else
-           typeTree(restpe))
       case tp: TypeParamRef =>
-        decomposeProtoFunction(ctx.typerState.constraint.entry(tp).bounds.hi, defaultArity, tree)
-      case _ =>
-        (List.tabulate(defaultArity)(alwaysWildcardType), untpd.TypeTree())
+        decomposeProtoFunction(ctx.typerState.constraint.entry(tp).bounds.hi, defaultArity, pos)
+      case _ => pt1.findFunctionType match {
+        case pt1 if defn.isNonRefinedFunction(pt1) =>
+          // if expected parameter type(s) are wildcards, approximate from below.
+          // if expected result type is a wildcard, approximate from above.
+          // this can type the greatest set of admissible closures.
+          (pt1.argTypesLo.init, typeTree(interpolateWildcards(pt1.argTypesHi.last)))
+        case RefinedType(parent, nme.apply, mt @ MethodTpe(_, formals, restpe))
+        if defn.isNonRefinedFunction(parent) && formals.length == defaultArity =>
+          (formals, untpd.DependentTypeTree(syms => restpe.substParams(mt, syms.map(_.termRef))))
+        case SAMType(mt @ MethodTpe(_, formals, restpe)) =>
+          (formals,
+           if (mt.isResultDependent)
+             untpd.DependentTypeTree(syms => restpe.substParams(mt, syms.map(_.termRef)))
+           else
+             typeTree(restpe))
+        case _ =>
+          (List.tabulate(defaultArity)(alwaysWildcardType), untpd.TypeTree())
+      }
     }
   }
 
+  /** The parameter type for a parameter in a lambda that does
+   *  not have an explicit type given, and where the type is not known from the context.
+   *  In this case the parameter type needs to be inferred the "target type" T known
+   *  from the callee `f` if the lambda is of a form like `x => f(x)`.
+   *  If `T` exists, we know that `S <: I <: T`.
+   *
+   *  The inference makes two attempts:
+   *
+   *    1. Compute the target type `T` and make it known that `S <: T`.
+   *       If the expected type `S` can be fully defined under ForceDegree.flipBottom,
+   *       pick this one (this might use the fact that S <: T for an upper approximation).
+   *    2. Otherwise, if the target type `T` can be fully defined under ForceDegree.flipBottom,
+   *       pick this one.
+   *
+   *  If both attempts fail, return `NoType`.
+   */
+  def inferredFromTarget(
+      param: untpd.ValDef, formal: Type, calleeType: Type, paramIndex: Name => Int)(using Context): Type =
+    val target = calleeType.widen match
+      case mtpe: MethodType =>
+        val pos = paramIndex(param.name)
+        if pos < mtpe.paramInfos.length then
+          mtpe.paramInfos(pos)
+            // This works only if vararg annotations match up.
+            // See neg/i14367.scala for an example where the inferred type is mispredicted. 
+            // Nevertheless, the alternative would be to give up completely, so this is
+            // defensible.
+        else NoType
+      case _ => NoType
+    if target.exists then formal <:< target
+    if isFullyDefined(formal, ForceDegree.flipBottom) then formal
+    else if target.exists && isFullyDefined(target, ForceDegree.flipBottom) then target
+    else NoType
+
   def typedFunction(tree: untpd.Function, pt: Type)(using Context): Tree =
     if (ctx.mode is Mode.Type) typedFunctionType(tree, pt)
     else typedFunctionValue(tree, pt)
@@ -1216,10 +1320,10 @@ class Typer extends Namer
      */
     var fnBody = tree.body
 
-    def refersTo(arg: untpd.Tree, param: untpd.ValDef): Boolean = arg match {
+    def refersTo(arg: untpd.Tree, param: untpd.ValDef): Boolean = arg match
       case Ident(name) => name == param.name
+      case Typed(arg1, _) if untpd.isWildcardStarArg(arg) => refersTo(arg1, param)
       case _ => false
-    }
 
     /** If parameter `param` appears exactly once as an argument in `args`,
      *  the singleton list consisting of its position in `args`, otherwise `Nil`.
@@ -1307,41 +1411,7 @@ class Typer extends Namer
       case _ =>
     }
 
-    val (protoFormals, resultTpt) = decomposeProtoFunction(pt, params.length, tree)
-
-    /** The inferred parameter type for a parameter in a lambda that does
-     *  not have an explicit type given.
-     *  An inferred parameter type I has two possible sources:
-     *   - the type S known from the context
-     *   - the "target type" T known from the callee `f` if the lambda is of a form like `x => f(x)`
-     *  If `T` exists, we know that `S <: I <: T`.
-     *
-     *  The inference makes three attempts:
-     *
-     *    1. If the expected type `S` is already fully defined under ForceDegree.failBottom
-     *       pick this one.
-     *    2. Compute the target type `T` and make it known that `S <: T`.
-     *       If the expected type `S` can be fully defined under ForceDegree.flipBottom,
-     *       pick this one (this might use the fact that S <: T for an upper approximation).
-     *    3. Otherwise, if the target type `T` can be fully defined under ForceDegree.flipBottom,
-     *       pick this one.
-     *
-     *  If all attempts fail, issue a "missing parameter type" error.
-     */
-    def inferredParamType(param: untpd.ValDef, formal: Type): Type =
-      if isFullyDefined(formal, ForceDegree.failBottom) then return formal
-      val target = calleeType.widen match
-        case mtpe: MethodType =>
-          val pos = paramIndex(param.name)
-          if pos < mtpe.paramInfos.length then
-            val ptype = mtpe.paramInfos(pos)
-            if ptype.isRepeatedParam then NoType else ptype
-          else NoType
-        case _ => NoType
-      if target.exists then formal <:< target
-      if isFullyDefined(formal, ForceDegree.flipBottom) then formal
-      else if target.exists && isFullyDefined(target, ForceDegree.flipBottom) then target
-      else errorType(AnonymousFunctionMissingParamType(param, params, tree, formal), param.srcPos)
+    val (protoFormals, resultTpt) = decomposeProtoFunction(pt, params.length, tree.srcPos)
 
     def protoFormal(i: Int): Type =
       if (protoFormals.length == params.length) protoFormals(i)
@@ -1367,9 +1437,19 @@ class Typer extends Namer
         val inferredParams: List[untpd.ValDef] =
           for ((param, i) <- params.zipWithIndex) yield
             if (!param.tpt.isEmpty) param
-            else cpy.ValDef(param)(
-              tpt = untpd.TypeTree(
-                inferredParamType(param, protoFormal(i)).translateFromRepeated(toArray = false)))
+            else
+              val formal = protoFormal(i)
+              val knownFormal = isFullyDefined(formal, ForceDegree.failBottom)
+              val paramType =
+                if knownFormal then formal
+                else inferredFromTarget(param, formal, calleeType, paramIndex)
+                  .orElse(errorType(AnonymousFunctionMissingParamType(param, tree, formal), param.srcPos))
+              val paramTpt = untpd.TypedSplice(
+                  (if knownFormal then InferredTypeTree() else untpd.TypeTree())
+                    .withType(paramType.translateFromRepeated(toArray = false))
+                    .withSpan(param.span.endPos)
+                )
+              cpy.ValDef(param)(tpt = paramTpt)
         desugar.makeClosure(inferredParams, fnBody, resultTpt, isContextual, tree.span)
       }
     typed(desugared, pt)
@@ -1382,7 +1462,7 @@ class Typer extends Namer
       if (tree.tpt.isEmpty)
         meth1.tpe.widen match {
           case mt: MethodType =>
-            pt.stripNull match {
+            pt.findFunctionType match {
               case pt @ SAMType(sam)
               if !defn.isFunctionType(pt) && mt <:< sam =>
                 // SAMs of the form C[?] where C is a class cannot be conversion targets.
@@ -1399,10 +1479,6 @@ class Typer extends Namer
                   else
                     report.error(ex"result type of lambda is an underspecified SAM type $pt", tree.srcPos)
                     pt
-                if (pt.classSymbol.isOneOf(FinalOrSealed)) {
-                  val offendingFlag = pt.classSymbol.flags & FinalOrSealed
-                  report.error(ex"lambda cannot implement $offendingFlag ${pt.classSymbol}", tree.srcPos)
-                }
                 TypeTree(targetTpe)
               case _ =>
                 if (mt.isParamDependent)
@@ -1439,7 +1515,7 @@ class Typer extends Namer
           typedMatchFinish(tree, tpd.EmptyTree, defn.ImplicitScrutineeTypeRef, cases1, pt)
         }
         else {
-          val (protoFormals, _) = decomposeProtoFunction(pt, 1, tree)
+          val (protoFormals, _) = decomposeProtoFunction(pt, 1, tree.srcPos)
           val checkMode =
             if (pt.isRef(defn.PartialFunctionClass)) desugar.MatchCheck.None
             else desugar.MatchCheck.Exhaustive
@@ -1448,8 +1524,10 @@ class Typer extends Namer
       case _ =>
         if tree.isInline then checkInInlineContext("inline match", tree.srcPos)
         val sel1 = typedExpr(tree.selector)
-        val selType = fullyDefinedType(sel1.tpe, "pattern selector", tree.span).widen
-
+        val rawSelectorTpe = fullyDefinedType(sel1.tpe, "pattern selector", tree.span)
+        val selType = rawSelectorTpe match
+          case c: ConstantType if tree.isInline => c
+          case otherTpe => otherTpe.widen
         /** Extractor for match types hidden behind an AppliedType/MatchAlias */
         object MatchTypeInDisguise {
           def unapply(tp: AppliedType): Option[MatchType] = tp match {
@@ -1488,6 +1566,11 @@ class Typer extends Namer
                   case defn.MatchCase(patternTp, _) => tpt.tpe frozen_=:= patternTp
                   case _ => false
                 }
+              case (id @ Ident(nme.WILDCARD), pt) =>
+                pt match {
+                  case defn.MatchCase(patternTp, _) => defn.AnyType frozen_=:= patternTp
+                  case _ => false
+                }
               case _ => false
             }
 
@@ -1564,9 +1647,9 @@ class Typer extends Namer
       super.transform(trt.withType(stripTypeVars(trt.tpe))) match {
         case b: Bind =>
           val sym = b.symbol
-          if (sym.name != tpnme.WILDCARD)
-            if (ctx.scope.lookup(b.name) == NoSymbol) ctx.enter(sym)
-            else report.error(new DuplicateBind(b, cdef), b.srcPos)
+          assert(sym.name != tpnme.WILDCARD)
+          if ctx.scope.lookup(b.name) == NoSymbol then ctx.enter(sym)
+          else report.error(new DuplicateBind(b, cdef), b.srcPos)
           if (!ctx.isAfterTyper) {
             val bounds = ctx.gadt.fullBounds(sym)
             if (bounds != null) sym.info = bounds
@@ -1592,7 +1675,7 @@ class Typer extends Namer
   /** Type a case. */
   def typedCase(tree: untpd.CaseDef, sel: Tree, wideSelType: Type, pt: Type)(using Context): CaseDef = {
     val originalCtx = ctx
-    val gadtCtx: Context = ctx.fresh.setFreshGADTBounds
+    val gadtCtx: Context = ctx.fresh.setFreshGADTBounds.setNewScope
 
     def caseRest(pat: Tree)(using Context) = {
       val pt1 = instantiateMatchTypeProto(pat, pt) match {
@@ -1602,6 +1685,13 @@ class Typer extends Namer
       val pat1 = indexPattern(tree).transform(pat)
       val guard1 = typedExpr(tree.guard, defn.BooleanType)
       var body1 = ensureNoLocalRefs(typedExpr(tree.body, pt1), pt1, ctx.scope.toList)
+      if ctx.gadt.isNarrowing then
+        // Store GADT constraint to later retrieve it (in PostTyper, for now).
+        // GADT constraints are necessary to correctly check bounds of type app,
+        // see tests/pos/i12226 and issue #12226. It might be possible that this
+        // will end up taking too much memory. If it does, we should just limit
+        // how much GADT constraints we infer - it's always sound to infer less.
+        pat1.putAttachment(InferredGadtConstraints, ctx.gadt)
       if (pt1.isValueType) // insert a cast if body does not conform to expected type if we disregard gadt bounds
         body1 = body1.ensureConforms(pt1)(using originalCtx)
       assignType(cpy.CaseDef(tree)(pat1, guard1, body1), pat1, body1)
@@ -1610,7 +1700,7 @@ class Typer extends Namer
     val pat1 = typedPattern(tree.pat, wideSelType)(using gadtCtx)
     caseRest(pat1)(
       using Nullables.caseContext(sel, pat1)(
-        using gadtCtx.fresh.setNewScope))
+        using gadtCtx))
   }
 
   def typedLabeled(tree: untpd.Labeled)(using Context): Labeled = {
@@ -1622,7 +1712,9 @@ class Typer extends Namer
   /** Type a case of a type match */
   def typedTypeCase(cdef: untpd.CaseDef, selType: Type, pt: Type)(using Context): CaseDef = {
     def caseRest(using Context) = {
-      val pat1 = withMode(Mode.Pattern)(checkSimpleKinded(typedType(cdef.pat)))
+      val pat1 = withMode(Mode.Pattern) {
+        checkSimpleKinded(typedType(cdef.pat, mapPatternBounds = true))
+      }
       val pat2 = indexPattern(cdef).transform(pat1)
       var body1 = typedType(cdef.body, pt)
       if !body1.isType then
@@ -1633,66 +1725,25 @@ class Typer extends Namer
     caseRest(using ctx.fresh.setFreshGADTBounds.setNewScope)
   }
 
-  def typedReturn(tree: untpd.Return)(using Context): Return = {
+  def typedReturn(tree: untpd.Return)(using Context): Return =
 
-    /** If `pt` is a context function type, its return type. If the CFT
-     * is dependent, instantiate with the parameters of the associated
-     * anonymous function.
-     * @param  paramss  the parameters of the anonymous functions
-     *                  enclosing the return expression
-     */
-    def instantiateCFT(pt: Type, paramss: => List[List[Symbol]]): Type =
-      val ift = defn.asContextFunctionType(pt)
-      if ift.exists then
-        ift.nonPrivateMember(nme.apply).info match
-          case appType: MethodType =>
-            instantiateCFT(appType.instantiate(paramss.head.map(_.termRef)), paramss.tail)
-      else pt
-
-    def returnProto(owner: Symbol): Type =
-      if (owner.isConstructor) defn.UnitType
-      else
-        // We need to get the return type of the enclosing function, with all parameters replaced
-        // by the local type and value parameters. It would be nice if we could look up that
-        // type simply in the tpt field of the enclosing function. But the tree argument in
-        // a context is an untyped tree, so we cannot extract its type.
-        def instantiateRT(info: Type, psymss: List[List[Symbol]]): Type = info match
-          case info: PolyType =>
-            instantiateRT(info.instantiate(psymss.head.map(_.typeRef)), psymss.tail)
-          case info: MethodType =>
-            instantiateRT(info.instantiate(psymss.head.map(_.termRef)), psymss.tail)
-          case info =>
-            info.widenExpr
-        val rt = instantiateRT(owner.info, owner.paramSymss)
-        def iftParamss = ctx.owner.ownersIterator
-          .filter(_.is(Method, butNot = Accessor))
-          .takeWhile(_.isAnonymousFunction)
-          .toList
-          .reverse
-          .map(_.paramSymss.head)
-        instantiateCFT(rt, iftParamss)
-
-    def enclMethInfo(cx: Context): (Tree, Type) = {
+    def enclMethInfo(cx: Context): (Tree, Type) =
       val owner = cx.owner
-      if (owner.isType) {
+      if owner.isType then
         report.error(ReturnOutsideMethodDefinition(owner), tree.srcPos)
         (EmptyTree, WildcardType)
-      }
-      else if (owner != cx.outer.owner && owner.isRealMethod)
-        if (owner.isInlineMethod)
+      else if owner != cx.outer.owner && owner.isRealMethod then
+        if owner.isInlineMethod then
           (EmptyTree, errorType(NoReturnFromInlineable(owner), tree.srcPos))
-        else if (!owner.isCompleted)
+        else if !owner.isCompleted then
           (EmptyTree, errorType(MissingReturnTypeWithReturnStatement(owner), tree.srcPos))
-        else {
-          val from = Ident(TermRef(NoPrefix, owner.asTerm))
-          val proto = returnProto(owner)
-          (from, proto)
-        }
+        else
+          (Ident(TermRef(NoPrefix, owner.asTerm)), owner.returnProto)
       else enclMethInfo(cx.outer)
-    }
+
     val (from, proto) =
-      if (tree.from.isEmpty) enclMethInfo(ctx)
-      else {
+      if tree.from.isEmpty then enclMethInfo(ctx)
+      else
         val from = tree.from.asInstanceOf[tpd.Tree]
         val proto =
           if (ctx.erasedTypes) from.symbol.info.finalResultType
@@ -1700,10 +1751,9 @@ class Typer extends Namer
                             // because we do not know the internal type params and method params.
                             // Hence no adaptation is possible, and we assume WildcardType as prototype.
         (from, proto)
-      }
     val expr1 = typedExpr(tree.expr orElse untpd.unitLiteral.withSpan(tree.span), proto)
     assignType(cpy.Return(tree)(expr1, from))
-  }
+  end typedReturn
 
   def typedWhileDo(tree: untpd.WhileDo)(using Context): Tree =
     inContext(Nullables.whileContext(tree.span)) {
@@ -1715,10 +1765,38 @@ class Typer extends Namer
         .withNotNullInfo(body1.notNullInfo.retractedInfo.seq(cond1.notNullInfoIf(false)))
     }
 
+  /** Add givens reflecting `CanThrow` capabilities for all checked exceptions matched
+   *  by `cases`. The givens appear in nested blocks with earlier cases leading to
+   *  more deeply nested givens. This way, given priority will be the same as pattern priority.
+   *  The functionality is enabled if the experimental.saferExceptions language feature is enabled.
+   */
+  def addCanThrowCapabilities(expr: untpd.Tree, cases: List[CaseDef])(using Context): untpd.Tree =
+    def makeCanThrow(tp: Type): untpd.Tree =
+      untpd.ValDef(
+          EvidenceParamName.fresh(),
+          untpd.TypeTree(defn.CanThrowClass.typeRef.appliedTo(tp)),
+          untpd.ref(defn.Compiletime_erasedValue))
+        .withFlags(Given | Final | Erased)
+        .withSpan(expr.span)
+    val caughtExceptions =
+      if Feature.enabled(Feature.saferExceptions) then
+        for
+          CaseDef(pat, guard, _) <- cases
+          if pat.tpe.widen.isCheckedException
+        yield
+          checkCatch(pat, guard)
+          pat.tpe.widen
+      else Seq.empty
+
+    if caughtExceptions.isEmpty then expr
+    else
+      val capabilityProof = caughtExceptions.reduce(OrType(_, _, true))
+      untpd.Block(makeCanThrow(capabilityProof), expr)
+
   def typedTry(tree: untpd.Try, pt: Type)(using Context): Try = {
     val expr2 :: cases2x = harmonic(harmonize, pt) {
-      val expr1 = typed(tree.expr, pt.dropIfProto)
       val cases1 = typedCases(tree.cases, EmptyTree, defn.ThrowableType, pt.dropIfProto)
+      val expr1 = typed(addCanThrowCapabilities(tree.expr, cases1), pt.dropIfProto)
       expr1 :: cases1
     }
     val finalizer1 = typed(tree.finalizer, defn.UnitType)
@@ -1737,6 +1815,7 @@ class Typer extends Namer
 
   def typedThrow(tree: untpd.Throw)(using Context): Tree = {
     val expr1 = typed(tree.expr, defn.ThrowableType)
+    checkCanThrow(expr1.tpe.widen, tree.span)
     Throw(expr1).withSpan(tree.span)
   }
 
@@ -1776,8 +1855,15 @@ class Typer extends Namer
         bindings1, expansion1)
   }
 
+  def completeTypeTree(tree: untpd.TypeTree, pt: Type, original: untpd.Tree)(using Context): TypeTree =
+    tree.withSpan(original.span).withAttachmentsFrom(original)
+      .withType(
+        if isFullyDefined(pt, ForceDegree.flipBottom) then pt
+        else if ctx.reporter.errorsReported then UnspecifiedErrorType
+        else errorType(i"cannot infer type; expected type $pt is not fully defined", tree.srcPos))
+
   def typedTypeTree(tree: untpd.TypeTree, pt: Type)(using Context): Tree =
-    tree match {
+    tree match
       case tree: untpd.DerivedTypeTree =>
         tree.ensureCompletions
         tree.getAttachment(untpd.OriginalSymbol) match {
@@ -1791,11 +1877,7 @@ class Typer extends Namer
             errorTree(tree, "Something's wrong: missing original symbol for type tree")
         }
       case _ =>
-        tree.withType(
-          if (isFullyDefined(pt, ForceDegree.flipBottom)) pt
-          else if (ctx.reporter.errorsReported) UnspecifiedErrorType
-          else errorType(i"cannot infer type; expected type $pt is not fully defined", tree.srcPos))
-    }
+        completeTypeTree(InferredTypeTree(), pt, tree)
 
   def typedSingletonTypeTree(tree: untpd.SingletonTypeTree)(using Context): SingletonTypeTree = {
     val ref1 = typedExpr(tree.ref)
@@ -1832,7 +1914,7 @@ class Typer extends Namer
   def typedAppliedTypeTree(tree: untpd.AppliedTypeTree)(using Context): Tree = {
     tree.args match
       case arg :: _ if arg.isTerm =>
-        if dependentEnabled then
+        if Feature.dependentEnabled then
           return errorTree(tree, i"Not yet implemented: T(...)")
         else
           return errorTree(tree, dependentStr)
@@ -1872,8 +1954,7 @@ class Typer extends Namer
             arg match {
               case untpd.WildcardTypeBoundsTree()
               if tparam.paramInfo.isLambdaSub &&
-                 tpt1.tpe.typeParamSymbols.nonEmpty &&
-                 !ctx.mode.is(Mode.Pattern) =>
+                 tpt1.tpe.typeParamSymbols.nonEmpty =>
                 // An unbounded `_` automatically adapts to type parameter bounds. This means:
                 // If we have wildcard application C[?], where `C` is a class replace
                 // with C[? >: L <: H] where `L` and `H` are the bounds of the corresponding
@@ -1882,7 +1963,7 @@ class Typer extends Namer
                 // wildcard identifiers `_` instead.
                 TypeTree(tparamBounds).withSpan(arg.span)
               case _ =>
-                typed(desugaredArg, argPt)
+                typedType(desugaredArg, argPt, mapPatternBounds = true)
             }
           else desugaredArg.withType(UnspecifiedErrorType)
         }
@@ -1898,14 +1979,20 @@ class Typer extends Namer
       }
       var checkedArgs = preCheckKinds(args1, paramBounds)
         // check that arguments conform to bounds is done in phase PostTyper
-      if (tpt1.symbol == defn.andType)
+      val tycon = tpt1.symbol
+      if (tycon == defn.andType)
         checkedArgs = checkedArgs.mapconserve(arg =>
           checkSimpleKinded(checkNoWildcard(arg)))
-      else if (tpt1.symbol == defn.orType)
+      else if (tycon == defn.orType)
         checkedArgs = checkedArgs.mapconserve(arg =>
           checkSimpleKinded(checkNoWildcard(arg)))
+      else if tycon == defn.throwsAlias
+          && checkedArgs.length == 2
+          && checkedArgs(1).tpe.derivesFrom(defn.RuntimeExceptionClass)
+      then
+        report.error(em"throws clause cannot be defined for RuntimeException", checkedArgs(1).srcPos)
       else if (ctx.isJava)
-        if (tpt1.symbol eq defn.ArrayClass) then
+        if tycon eq defn.ArrayClass then
           checkedArgs match {
             case List(arg) =>
               val elemtp = arg.tpe.translateJavaArrayElementType
@@ -1929,7 +2016,7 @@ class Typer extends Namer
     typeIndexedLambdaTypeTree(tree, tparams, body)
 
   def typedTermLambdaTypeTree(tree: untpd.TermLambdaTypeTree)(using Context): Tree =
-    if dependentEnabled then
+    if Feature.dependentEnabled then
       errorTree(tree, i"Not yet implemented: (...) =>> ...")
     else
       errorTree(tree, dependentStr)
@@ -1949,35 +2036,18 @@ class Typer extends Namer
     assignType(cpy.ByNameTypeTree(tree)(result1), result1)
   }
 
-  def typedTypeBoundsTree(tree: untpd.TypeBoundsTree, pt: Type)(using Context): Tree = {
+  def typedTypeBoundsTree(tree: untpd.TypeBoundsTree, pt: Type)(using Context): Tree =
     val TypeBoundsTree(lo, hi, alias) = tree
     val lo1 = typed(lo)
     val hi1 = typed(hi)
     val alias1 = typed(alias)
-
     val lo2 = if (lo1.isEmpty) typed(untpd.TypeTree(defn.NothingType)) else lo1
     val hi2 = if (hi1.isEmpty) typed(untpd.TypeTree(defn.AnyType)) else hi1
-
     if !alias1.isEmpty then
       val bounds = TypeBounds(lo2.tpe, hi2.tpe)
       if !bounds.contains(alias1.tpe) then
         report.error(em"type ${alias1.tpe} outside bounds $bounds", tree.srcPos)
-
-    val tree1 = assignType(cpy.TypeBoundsTree(tree)(lo2, hi2, alias1), lo2, hi2, alias1)
-    if (ctx.mode.is(Mode.Pattern))
-      // Associate a pattern-bound type symbol with the wildcard.
-      // The bounds of the type symbol can be constrained when comparing a pattern type
-      // with an expected type in typedTyped. The type symbol and the defining Bind node
-      // are eliminated once the enclosing pattern has been typechecked; see `indexPattern`
-      // in `typedCase`.
-      //val ptt = if (lo.isEmpty && hi.isEmpty) pt else
-      if (ctx.isAfterTyper) tree1
-      else {
-        val wildcardSym = newPatternBoundSymbol(tpnme.WILDCARD, tree1.tpe & pt, tree.span)
-        untpd.Bind(tpnme.WILDCARD, tree1).withType(wildcardSym.typeRef)
-      }
-    else tree1
-  }
+    assignType(cpy.TypeBoundsTree(tree)(lo2, hi2, alias1), lo2, hi2, alias1)
 
   def typedBind(tree: untpd.Bind, pt: Type)(using Context): Tree = {
     if !isFullyDefined(pt, ForceDegree.all) then
@@ -2054,10 +2124,21 @@ class Typer extends Namer
   def annotContext(mdef: untpd.Tree, sym: Symbol)(using Context): Context = {
     def isInner(owner: Symbol) = owner == sym || sym.is(Param) && owner == sym.owner
     val outer = ctx.outersIterator.dropWhile(c => isInner(c.owner)).next()
-    outer.property(ExprOwner) match {
+    var adjusted = outer.property(ExprOwner) match {
       case Some(exprOwner) if outer.owner.isClass => outer.exprContext(mdef, exprOwner)
       case _ => outer
     }
+    sym.owner.infoOrCompleter match
+      case completer: Namer#Completer if sym.is(Param) =>
+        val tparams = completer.completerTypeParams(sym)
+        if tparams.nonEmpty then
+          // Create a new local context with a dummy owner and a scope containing the
+          // type parameters of the enclosing method or class. Thus annotations can see
+          // these type parameters. See i12953.scala for a test case.
+          val dummyOwner = newLocalDummy(sym.owner)
+          adjusted = adjusted.fresh.setOwner(dummyOwner).setScope(newScopeWith(tparams*))
+      case _ =>
+    adjusted
   }
 
   def completeAnnotations(mdef: untpd.MemberDef, sym: Symbol)(using Context): Unit = {
@@ -2066,12 +2147,43 @@ class Typer extends Namer
     lazy val annotCtx = annotContext(mdef, sym)
     // necessary in order to mark the typed ahead annotations as definitely typed:
     for (annot <- mdef.mods.annotations)
-      checkAnnotApplicable(typedAnnotation(annot)(using annotCtx), sym)
+      val annot1 = typedAnnotation(annot)(using annotCtx)
+      checkAnnotApplicable(annot1, sym)
+      if Annotations.annotClass(annot1) == defn.NowarnAnnot then
+        registerNowarn(annot1, mdef)
   }
 
   def typedAnnotation(annot: untpd.Tree)(using Context): Tree =
     checkAnnotArgs(typed(annot, defn.AnnotationClass.typeRef))
 
+  def registerNowarn(tree: Tree, mdef: untpd.Tree)(using Context): Unit =
+    val annot = Annotations.Annotation(tree)
+    def argPos = annot.argument(0).getOrElse(tree).sourcePos
+    var verbose = false
+    val filters = annot.argumentConstantString(0) match
+      case None => annot.argument(0) match
+        case Some(t: Select) if t.name.is(DefaultGetterName) =>
+          // default argument used for `@nowarn` and `@nowarn()`
+          List(MessageFilter.Any)
+        case _ =>
+          report.warning(s"filter needs to be a compile-time constant string", argPos)
+          List(MessageFilter.None)
+      case Some("") =>
+        List(MessageFilter.Any)
+      case Some("verbose") | Some("v") =>
+        verbose = true
+        List(MessageFilter.Any)
+      case Some(s) =>
+        WConf.parseFilters(s).left.map(parseErrors =>
+          report.warning (s"Invalid message filter\n${parseErrors.mkString ("\n")}", argPos)
+            List(MessageFilter.None)
+        ).merge
+    val range = mdef.sourcePos
+    val sup = Suppression(tree.sourcePos, filters, range.start, range.end, verbose)
+    // invalid suppressions, don't report as unused
+    if filters == List(MessageFilter.None) then sup.markUsed()
+    ctx.run.suppressions.addSuppression(sup)
+
   def typedValDef(vdef: untpd.ValDef, sym: Symbol)(using Context): Tree = {
     val ValDef(name, tpt, _) = vdef
     completeAnnotations(vdef, sym)
@@ -2153,6 +2265,8 @@ class Typer extends Namer
       PrepareInlineable.registerInlineInfo(sym, rhsToInline)
 
     if sym.isConstructor then
+      if sym.is(Inline) then
+        report.error("constructors cannot be `inline`", ddef)
       if sym.isPrimaryConstructor then
         if sym.owner.is(Case) then
           for
@@ -2269,11 +2383,20 @@ class Typer extends Namer
           result = maybeCall(result, psym)
       }
       else checkParentCall(result, cls)
-      checkTraitInheritance(psym, cls, tree.srcPos)
       if (cls is Case) checkCaseInheritance(psym, cls, tree.srcPos)
       result
     }
 
+    def ensureCorrectSuperClass(): Unit =
+      val parents0 = cls.classInfo.declaredParents
+      parents0 match
+        case AnnotatedType(sc, ann) :: rest if ann.symbol == defn.ProvisionalSuperClassAnnot =>
+          val parents1 = ensureFirstIsClass(cls, rest)
+          if parents1.head ne sc then
+            typr.println(i"improved provisional superclass $sc to ${parents1.head}")
+          cls.info = cls.classInfo.derivedClassInfo(declaredParents = parents1)
+        case _ =>
+
     /** Augment `ptrees` to have the same class symbols as `parents`. Generate TypeTrees
      *  or New trees to fill in any parents for which no tree exists yet.
      */
@@ -2313,6 +2436,7 @@ class Typer extends Namer
       }
     }
 
+    ensureCorrectSuperClass()
     completeAnnotations(cdef, cls)
     val constr1 = typed(constr).asInstanceOf[DefDef]
     val parents0 = parentTrees(
@@ -2345,7 +2469,7 @@ class Typer extends Namer
         ctx.phase.isTyper &&
         cdef1.symbol.ne(defn.DynamicClass) &&
         cdef1.tpe.derivesFrom(defn.DynamicClass) &&
-        !dynamicsEnabled
+        !Feature.dynamicsEnabled
       if (reportDynamicInheritance) {
         val isRequired = parents1.exists(_.tpe.isRef(defn.DynamicClass))
         report.featureWarning(nme.dynamics.toString, "extension of type scala.Dynamic", cls, isRequired, cdef.srcPos)
@@ -2385,7 +2509,7 @@ class Typer extends Namer
       // 4. Polymorphic type defs override nothing.
 
   protected def addAccessorDefs(cls: Symbol, body: List[Tree])(using Context): List[Tree] =
-    ctx.compilationUnit.inlineAccessors.addAccessorDefs(cls, body)
+    PrepareInlineable.addAccessorDefs(cls, body)
 
   /** If this is a real class, make sure its first parent is a
    *  constructor call. Cannot simply use a type. Overridden in ReTyper.
@@ -2435,8 +2559,8 @@ class Typer extends Namer
                 |The selector is not a member of an object or package.""")
     else typd(imp.expr, AnySelectionProto)
 
-  def typedImport(imp: untpd.Import, sym: Symbol)(using Context): Import =
-    val expr1 = typedImportQualifier(imp, typedExpr)
+  def typedImport(imp: untpd.Import, sym: Symbol)(using Context): Tree =
+    val expr1 = typedImportQualifier(imp, typedExpr(_, _)(using ctx.withOwner(sym)))
     checkLegalImportPath(expr1)
     val selectors1 = typedSelectors(imp.selectors)
     assignType(cpy.Import(imp)(expr1, selectors1), sym)
@@ -2453,6 +2577,10 @@ class Typer extends Namer
     pid1 match
       case pid1: RefTree if pkg.is(Package) =>
         inContext(ctx.packageContext(tree, pkg)) {
+          // If it exists, complete the class containing the top-level definitions
+          // before typing any statement in the package to avoid cycles as in i13669.scala
+          val topLevelClassName = desugar.packageObjectName(ctx.source).moduleClassName
+          pkg.moduleClass.info.decls.lookup(topLevelClassName).ensureCompleted()
           var stats1 = typedStats(tree.stats, pkg.moduleClass)._1
           if (!ctx.isAfterTyper)
             stats1 = stats1 ++ typedBlockStats(MainProxies.mainProxies(stats1))._1
@@ -2467,6 +2595,8 @@ class Typer extends Namer
 
   def typedAnnotated(tree: untpd.Annotated, pt: Type)(using Context): Tree = {
     val annot1 = typedExpr(tree.annot, defn.AnnotationClass.typeRef)
+    if Annotations.annotClass(annot1) == defn.NowarnAnnot then
+      registerNowarn(annot1, tree)
     val arg1 = typed(tree.arg, pt)
     if (ctx.mode is Mode.Type) {
       if arg1.isType then
@@ -2558,7 +2688,10 @@ class Typer extends Namer
     val untpd.InfixOp(l, op, r) = tree
     val result =
       if (ctx.mode.is(Mode.Type))
-        typedAppliedTypeTree(cpy.AppliedTypeTree(tree)(op, l :: r :: Nil))
+        typedAppliedTypeTree(
+          if op.name == tpnme.throws && Feature.enabled(Feature.saferExceptions)
+          then desugar.throws(l, op, r)
+          else cpy.AppliedTypeTree(tree)(op, l :: r :: Nil))
       else if (ctx.mode.is(Mode.Pattern))
         typedUnApply(cpy.Apply(tree)(op, l :: r :: Nil), pt)
       else {
@@ -2576,7 +2709,10 @@ class Typer extends Namer
           wrapDefs(defs, lift(app))
         else app
       }
-    checkValidInfix(tree, result.symbol)
+    // issue 10383: we stripBlock because e.g. default arguments desugar to blocks during typing,
+    // and the block itself doesn't have a symbol (because a Block isn't a ProxyTree),
+    // but the last expression in the block does have the right symbol
+    checkValidInfix(tree, stripBlock(result).symbol)
     result
   }
 
@@ -2589,7 +2725,9 @@ class Typer extends Namer
       val pts =
         if (arity == pt.tupleArity) pt.tupleElementTypes
         else List.fill(arity)(defn.AnyType)
-      val elems = tree.trees.lazyZip(pts).map(typed(_, _))
+      val elems = tree.trees.lazyZip(pts).map(
+        if ctx.mode.is(Mode.Type) then typedType(_, _, mapPatternBounds = true)
+        else typed(_, _))
       if (ctx.mode.is(Mode.Type))
         elems.foldRight(TypeTree(defn.EmptyTupleModule.termRef): Tree)((elemTpt, elemTpts) =>
           AppliedTypeTree(TypeTree(defn.PairClass.typeRef), List(elemTpt, elemTpts)))
@@ -2709,7 +2847,7 @@ class Typer extends Namer
           case tree: untpd.TypedSplice => typedTypedSplice(tree)
           case tree: untpd.UnApply => typedUnApply(tree, pt)
           case tree: untpd.Tuple => typedTuple(tree, pt)
-          case tree: untpd.DependentTypeTree => typed(untpd.TypeTree().withSpan(tree.span), pt)
+          case tree: untpd.DependentTypeTree => completeTypeTree(untpd.TypeTree(), pt, tree)
           case tree: untpd.InfixOp => typedInfixOp(tree, pt)
           case tree: untpd.ParsedTry => typedTry(tree, pt)
           case tree @ untpd.PostfixOp(qual, Ident(nme.WILDCARD)) => typedAsFunction(tree, pt)
@@ -2717,6 +2855,7 @@ class Typer extends Namer
           case tree: untpd.Quote => typedQuote(tree, pt)
           case tree: untpd.Splice => typedSplice(tree, pt)
           case tree: untpd.TypSplice => typedTypSplice(tree, pt)
+          case tree: untpd.MacroTree => report.error("Unexpected macro", tree.srcPos); tpd.nullLiteral  // ill-formed code may reach here
           case _ => typedUnadapted(desugar(tree), pt, locked)
         }
 
@@ -2777,7 +2916,7 @@ class Typer extends Namer
     // see tests/pos/i7778b.scala
 
     val paramTypes = {
-      val hasWildcard = formals.exists(_.existsPart(_.isInstanceOf[WildcardType], stopAtStatic = true))
+      val hasWildcard = formals.exists(_.existsPart(_.isInstanceOf[WildcardType], StopAt.Static))
       if hasWildcard then formals.map(_ => untpd.TypeTree())
       else formals.map(untpd.TypeTree)
     }
@@ -2919,8 +3058,24 @@ class Typer extends Namer
 
   def typedExpr(tree: untpd.Tree, pt: Type = WildcardType)(using Context): Tree =
     withoutMode(Mode.PatternOrTypeBits)(typed(tree, pt))
-  def typedType(tree: untpd.Tree, pt: Type = WildcardType)(using Context): Tree = // todo: retract mode between Type and Pattern?
-    withMode(Mode.Type)(typed(tree, pt))
+
+  def typedType(tree: untpd.Tree, pt: Type = WildcardType, mapPatternBounds: Boolean = false)(using Context): Tree =
+    val tree1 = withMode(Mode.Type) { typed(tree, pt) }
+    if mapPatternBounds && ctx.mode.is(Mode.Pattern) && !ctx.isAfterTyper then
+      tree1 match
+        case tree1: TypeBoundsTree =>
+          // Associate a pattern-bound type symbol with the wildcard.
+          // The bounds of the type symbol can be constrained when comparing a pattern type
+          // with an expected type in typedTyped. The type symbol and the defining Bind node
+          // are eliminated once the enclosing pattern has been typechecked; see `indexPattern`
+          // in `typedCase`.
+          val boundName = WildcardParamName.fresh().toTypeName
+          val wildcardSym = newPatternBoundSymbol(boundName, tree1.tpe & pt, tree.span)
+          untpd.Bind(boundName, tree1).withType(wildcardSym.typeRef)
+        case tree1 =>
+          tree1
+    else tree1
+
   def typedPattern(tree: untpd.Tree, selType: Type = WildcardType)(using Context): Tree =
     withMode(Mode.Pattern)(typed(tree, selType))
 
@@ -3019,24 +3174,34 @@ class Typer extends Namer
     }
   }
 
-  /** If this tree is a select node `qual.name` that does not conform to `pt`,
-   *  try to insert an implicit conversion `c` around `qual` so that
-   *  `c(qual).name` conforms to `pt`.
+  /** If this tree is a select node `qual.name` (possibly applied to type variables)
+   *  that does not conform to `pt`, try two mitigations:
+   *   1. Instantiate any TypeVars in the widened type of `tree` with their lower bounds.
+   *   2. Try to insert an implicit conversion `c` around `qual` so that
+   *   `c(qual).name` conforms to `pt`.
    */
   def tryInsertImplicitOnQualifier(tree: Tree, pt: Type, locked: TypeVars)(using Context): Option[Tree] = trace(i"try insert impl on qualifier $tree $pt") {
-    tree match {
+    tree match
       case tree @ Select(qual, name) if name != nme.CONSTRUCTOR =>
-        val selProto = SelectionProto(name, pt, NoViewsAllowed, privateOK = false)
-        if selProto.isMatchedBy(qual.tpe) then None
+        if couldInstantiateTypeVar(qual.tpe.widen, applied = true)
+        then
+          Some(adapt(tree, pt, locked))
         else
-          tryEither {
-            val tree1 = tryExtensionOrConversion(tree, pt, pt, qual, locked, NoViewsAllowed, privateOK = false)
-            if tree1.isEmpty then None
-            else Some(adapt(tree1, pt, locked))
-          } { (_, _) => None
-          }
+          val selProto = SelectionProto(name, pt, NoViewsAllowed, privateOK = false)
+          if selProto.isMatchedBy(qual.tpe) || tree.hasAttachment(InsertedImplicitOnQualifier) then
+            None
+          else
+            tryEither {
+              val tree1 = tryExtensionOrConversion(tree, pt, pt, qual, locked, NoViewsAllowed, inSelect = false)
+              if tree1.isEmpty then None
+              else
+                tree1.putAttachment(InsertedImplicitOnQualifier, ())
+                Some(adapt(tree1, pt, locked))
+            } { (_, _) => None
+            }
+      case TypeApply(fn, args) if args.forall(_.isInstanceOf[untpd.InferredTypeTree]) =>
+        tryInsertImplicitOnQualifier(fn, pt, locked)
       case _ => None
-    }
   }
 
   /** Given a selection `qual.name`, try to convert to an extension method
@@ -3044,10 +3209,10 @@ class Typer extends Namer
    *  @return The converted tree, or `EmptyTree` is not successful.
    */
   def tryExtensionOrConversion
-      (tree: untpd.Select, pt: Type, mbrProto: Type, qual: Tree, locked: TypeVars, compat: Compatibility, privateOK: Boolean)
+      (tree: untpd.Select, pt: Type, mbrProto: Type, qual: Tree, locked: TypeVars, compat: Compatibility, inSelect: Boolean)
       (using Context): Tree =
 
-    def selectionProto = SelectionProto(tree.name, mbrProto, compat, privateOK)
+    def selectionProto = SelectionProto(tree.name, mbrProto, compat, privateOK = inSelect)
 
     def tryExtension(using Context): Tree =
       findRef(tree.name, WildcardType, ExtensionMethod, EmptyFlags, qual.srcPos) match
@@ -3067,7 +3232,10 @@ class Typer extends Namer
       if !app.isEmpty && !nestedCtx.reporter.hasErrors then
         nestedCtx.typerState.commit()
         return app
-      for err <- nestedCtx.reporter.allErrors.take(1) do
+      val errs = nestedCtx.reporter.allErrors
+      val remembered = // report AmbiguousReferences as priority, otherwise last error
+        (errs.filter(_.msg.isInstanceOf[AmbiguousReference]) ++ errs).take(1)
+      for err <- remembered do
         rememberSearchFailure(qual,
           SearchFailure(app.withType(FailedExtension(app, selectionProto, err.msg))))
     catch case ex: TypeError => nestedFailure(ex)
@@ -3085,12 +3253,13 @@ class Typer extends Namer
                 return typedSelect(tree, pt, found)
             case failure: SearchFailure =>
               if failure.isAmbiguous then
-                return (
-                  if canDefineFurther(qual.tpe.widen) then
-                    tryExtensionOrConversion(tree, pt, mbrProto, qual, locked, compat, privateOK)
+                return
+                  if !inSelect // in a selection we will do the canDefineFurther afterwards
+                     && canDefineFurther(qual.tpe.widen)
+                  then
+                    tryExtensionOrConversion(tree, pt, mbrProto, qual, locked, compat, inSelect)
                   else
                     err.typeMismatch(qual, selProto, failure.reason) // TODO: report NotAMember instead, but need to be aware of failure
-                )
               rememberSearchFailure(qual, failure)
         }
       catch case ex: TypeError => nestedFailure(ex)
@@ -3135,7 +3304,7 @@ class Typer extends Namer
    */
   def adapt(tree: Tree, pt: Type, locked: TypeVars, tryGadtHealing: Boolean = true)(using Context): Tree =
     try
-      trace(i"adapting $tree to $pt ${if (tryGadtHealing) "" else "(tryGadtHealing=false)" }\n", typr, show = true) {
+      trace(i"adapting $tree to $pt ${if (tryGadtHealing) "" else "(tryGadtHealing=false)" }", typr, show = true) {
         record("adapt")
         adapt1(tree, pt, locked, tryGadtHealing)
       }
@@ -3202,37 +3371,6 @@ class Typer extends Namer
       }
     }
 
-    def isUnary(tp: Type): Boolean = tp match {
-      case tp: MethodicType =>
-        tp.firstParamTypes match {
-          case ptype :: Nil => !ptype.isRepeatedParam
-          case _ => false
-        }
-      case tp: TermRef =>
-        tp.denot.alternatives.forall(alt => isUnary(alt.info))
-      case _ =>
-        false
-    }
-
-    /** Should we tuple or untuple the argument before application?
-     *  If auto-tupling is enabled then
-     *
-     *   - we tuple n-ary arguments where n > 0 if the function consists
-     *     only of unary alternatives
-     *   - we untuple tuple arguments of infix operations if the function
-     *     does not consist only of unary alternatives.
-     */
-    def needsTupledDual(funType: Type, pt: FunProto): Boolean =
-      pt.args match
-        case untpd.Tuple(elems) :: Nil =>
-          elems.length > 1
-          && pt.applyKind == ApplyKind.InfixTuple
-          && !isUnary(funType)
-        case args =>
-          args.lengthCompare(1) > 0
-          && isUnary(funType)
-          && autoTuplingEnabled
-
     def adaptToArgs(wtp: Type, pt: FunProto): Tree = wtp match {
       case wtp: MethodOrPoly =>
         def methodStr = methPart(tree).symbol.showLocated
@@ -3271,7 +3409,7 @@ class Typer extends Namer
         replaceSingletons(tp)
       }
       wtp.paramInfos.foreach(instantiate)
-      val constr = ctx.typerState.constraint
+      val saved = ctx.typerState.snapshot()
 
       def dummyArg(tp: Type) = untpd.Ident(nme.???).withTypeUnchecked(tp)
 
@@ -3371,8 +3509,9 @@ class Typer extends Namer
         if (propFail.exists) {
           // If there are several arguments, some arguments might already
           // have influenced the context, binding variables, but later ones
-          // might fail. In that case the constraint needs to be reset.
-          ctx.typerState.constraint = constr
+          // might fail. In that case the constraint and instantiated variables
+          // need to be reset.
+          ctx.typerState.resetTo(saved)
 
           // If method has default params, fall back to regular application
           // where all inferred implicits are passed as named args.
@@ -3439,7 +3578,7 @@ class Typer extends Namer
       def isAutoApplied(sym: Symbol): Boolean =
         sym.isConstructor
         || sym.matchNullaryLoosely
-        || warnOnMigration(MissingEmptyArgumentList(sym.show), tree.srcPos)
+        || Feature.warnOnMigration(MissingEmptyArgumentList(sym.show), tree.srcPos)
            && { patch(tree.span.endPos, "()"); true }
 
       // Reasons NOT to eta expand:
@@ -3536,13 +3675,33 @@ class Typer extends Namer
                 typr.println(i"adapt to subtype ${tree.tpe} !<:< $pt")
                 //typr.println(TypeComparer.explained(tree.tpe <:< pt))
                 adaptToSubType(wtp)
-          case CompareResult.OKwithGADTUsed if pt.isValueType =>
+          case CompareResult.OKwithGADTUsed
+          if pt.isValueType
+             && !inContext(ctx.fresh.setGadt(EmptyGadtConstraint)) {
+               val res = (tree.tpe.widenExpr frozen_<:< pt)
+               if res then
+                 // we overshot; a cast is not needed, after all.
+                 gadts.println(i"unnecessary GADTused for $tree: ${tree.tpe.widenExpr} vs $pt in ${ctx.source}")
+               res
+              } =>
             // Insert an explicit cast, so that -Ycheck in later phases succeeds.
             // I suspect, but am not 100% sure that this might affect inferred types,
             // if the expected type is a supertype of the GADT bound. It would be good to come
             // up with a test case for this.
-            tree.cast(pt)
+            val target =
+              if tree.tpe.isSingleton then
+                val conj = AndType(tree.tpe, pt)
+                if tree.tpe.isStable && !conj.isStable then
+                  // this is needed for -Ycheck. Without the annotation Ycheck will
+                  // skolemize the result type which will lead to different types before
+                  // and after checking. See i11955.scala.
+                  AnnotatedType(conj, Annotation(defn.UncheckedStableAnnot))
+                else conj
+              else pt
+            gadts.println(i"insert GADT cast from $tree to $target")
+            tree.cast(target)
           case _ =>
+            //typr.println(i"OK ${tree.tpe}\n${TypeComparer.explained(_.isSubType(tree.tpe, pt))}") // uncomment for unexpected successes
             tree
     }
 
@@ -3632,6 +3791,12 @@ class Typer extends Namer
         case ConstantType(x) =>
           val converted = x.convertTo(pt)
           if converted != null && (converted ne x) then
+            val cls = pt.classSymbol
+            if x.tag == IntTag && cls == defn.FloatClass && x.intValue.toFloat.toInt != x.intValue
+              || x.tag == LongTag && cls == defn.FloatClass  && x.longValue.toFloat.toLong != x.longValue
+              || x.tag == LongTag && cls == defn.DoubleClass && x.longValue.toDouble.toLong != x.longValue
+            then
+              report.warning(LossyWideningConstantConversion(x.tpe, pt), tree.srcPos)
             return adaptConstant(tree, ConstantType(converted))
         case _ =>
 
@@ -3672,13 +3837,22 @@ class Typer extends Namer
         if target <:< pt then
           return readapt(tree.cast(target))
 
+      // if unsafeNulls is enabled, try to strip nulls from Java function calls
+      if Nullables.unsafeNullsEnabled then
+        tree match
+          case _: Apply | _: Select if tree.symbol.is(JavaDefined) =>
+            wtp match
+              case OrNull(wtp1) => return readapt(tree.cast(wtp1))
+              case _ =>
+          case _ =>
+
       def recover(failure: SearchFailureType) =
-        if canDefineFurther(wtp) then readapt(tree)
+        if canDefineFurther(wtp) || canDefineFurther(pt) then readapt(tree)
         else err.typeMismatch(tree, pt, failure)
 
       pt match
         case pt: SelectionProto =>
-          if ctx.gadt.nonEmpty then
+          if ctx.gadt.isNarrowing then
             // try GADT approximation if we're trying to select a member
             // Member lookup cannot take GADTs into account b/c of cache, so we
             // approximate types based on GADT constraints instead. For an example,
@@ -3688,8 +3862,14 @@ class Typer extends Namer
             gadts.println(i"GADT-approximated $wtp ~~ $gadtApprox")
             if pt.isMatchedBy(gadtApprox) then
               gadts.println(i"Member selection healed by GADT approximation")
-              tpd.Typed(tree, TypeTree(gadtApprox))
+              tree.cast(gadtApprox)
             else tree
+          else if tree.tpe.derivesFrom(defn.PairClass) && !defn.isTupleNType(tree.tpe.widenDealias) then
+            // If this is a generic tuple we need to cast it to make the TupleN/ members accessible.
+            // This only works for generic tuples of know size up to 22.
+            defn.tupleTypes(tree.tpe.widenTermRefExpr, Definitions.MaxTupleArity) match
+              case Some(elems) => tree.cast(defn.tupleType(elems))
+              case None => tree
           else tree // other adaptations for selections are handled in typedSelect
         case _ if ctx.mode.is(Mode.ImplicitsEnabled) && tree.tpe.isValueType =>
           checkConversionsSpecific(pt, tree.srcPos)
@@ -3770,7 +3950,7 @@ class Typer extends Namer
         case ref: TermRef =>
           pt match {
             case pt: FunProto
-            if needsTupledDual(ref, pt) && autoTuplingEnabled =>
+            if needsTupledDual(ref, pt) && Feature.autoTuplingEnabled =>
               adapt(tree, pt.tupledDual, locked)
             case _ =>
               adaptOverloaded(ref)
@@ -3835,9 +4015,15 @@ class Typer extends Namer
 
         // approximate type params with bounds
         def approx = new ApproximatingTypeMap {
+          var alreadyExpanding: List[TypeRef] = Nil
           def apply(tp: Type) = tp.dealias match
             case tp: TypeRef if !tp.symbol.isClass =>
-              expandBounds(tp.info.bounds)
+              if alreadyExpanding contains tp then tp else
+                val saved = alreadyExpanding
+                alreadyExpanding ::= tp
+                val res = expandBounds(tp.info.bounds)
+                alreadyExpanding = saved
+                res
             case _ =>
               mapOver(tp)
         }
@@ -3859,12 +4045,12 @@ class Typer extends Namer
         }
 
         if isDefiniteNotSubtype then
-          // We could check whether `equals` is overriden.
+          // We could check whether `equals` is overridden.
           // Reasons for not doing so:
           // - it complicates the protocol
           // - such code patterns usually implies hidden errors in the code
           // - it's safe/sound to reject the code
-          report.error(TypeMismatch(tree.tpe, pt, "\npattern type is incompatible with expected type"), tree.srcPos)
+          report.error(TypeMismatch(tree.tpe, pt, Some(tree), "\npattern type is incompatible with expected type"), tree.srcPos)
         else
           val cmp =
             untpd.Apply(
@@ -3895,7 +4081,7 @@ class Typer extends Namer
         report.warning(PureExpressionInStatementPosition(original, exprOwner), original.srcPos)
 
   /** Types the body Scala 2 macro declaration `def f = macro ` */
-  private def typedScala2MacroBody(call: untpd.Tree)(using Context): Tree =
+  protected def typedScala2MacroBody(call: untpd.Tree)(using Context): Tree =
     // TODO check that call is to a method with valid signature
     def typedPrefix(tree: untpd.RefTree)(splice: Context ?=> Tree => Tree)(using Context): Tree = {
       tryAlternatively {
diff --git a/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala b/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala
new file mode 100644
index 000000000000..070bf9fff12f
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala
@@ -0,0 +1,112 @@
+package dotty.tools
+package dotc
+package typer
+
+import core._
+import Phases._
+import Contexts._
+import Symbols._
+import Decorators._
+import ImportInfo.withRootImports
+import parsing.JavaParsers.JavaParser
+import parsing.Parsers.Parser
+import parsing.{Parser => ParserPhase}
+import config.Config
+import config.Printers.{typr, default}
+import util.Stats._
+import util.{ SourcePosition, NoSourcePosition }
+import scala.util.control.NonFatal
+import ast.Trees._
+
+/**
+ *
+ * @param addRootImports Set to false in the REPL. Calling [[ImportInfo.withRootImports]] on the [[Context]]
+ *                       for each [[CompilationUnit]] causes dotty.tools.repl.ScriptedTests to fail.
+ */
+class TyperPhase(addRootImports: Boolean = true) extends Phase {
+
+  override def phaseName: String = TyperPhase.name
+
+  override def description: String = TyperPhase.description
+
+  override def isTyper: Boolean = true
+
+  import ast.tpd
+
+  override def allowsImplicitSearch: Boolean = true
+
+  // Run regardless of parsing errors
+  override def isRunnable(implicit ctx: Context): Boolean = true
+
+  def enterSyms(using Context): Unit = monitor("indexing") {
+    val unit = ctx.compilationUnit
+    ctx.typer.index(unit.untpdTree)
+    typr.println("entered: " + unit.source)
+  }
+
+  def typeCheck(using Context): Unit = monitor("typechecking") {
+    try
+      val unit = ctx.compilationUnit
+      if !unit.suspended then
+        unit.tpdTree = ctx.typer.typedExpr(unit.untpdTree)
+        typr.println("typed: " + unit.source)
+        record("retained untyped trees", unit.untpdTree.treeSize)
+        record("retained typed trees after typer", unit.tpdTree.treeSize)
+        ctx.run.suppressions.reportSuspendedMessages(unit.source)
+    catch
+      case ex: CompilationUnit.SuspendException =>
+  }
+
+  def javaCheck(using Context): Unit = monitor("checking java") {
+    val unit = ctx.compilationUnit
+    if unit.isJava then
+      JavaChecks.check(unit.tpdTree)
+  }
+
+  protected def discardAfterTyper(unit: CompilationUnit)(using Context): Boolean =
+    unit.isJava || unit.suspended
+
+  override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] =
+    val unitContexts =
+      for unit <- units yield
+        val newCtx0 = ctx.fresh.setPhase(this.start).setCompilationUnit(unit)
+        val newCtx = PrepareInlineable.initContext(newCtx0)
+        report.inform(s"typing ${unit.source}")
+        if (addRootImports)
+          newCtx.withRootImports
+        else
+          newCtx
+
+    unitContexts.foreach(enterSyms(using _))
+
+    ctx.base.parserPhase match {
+      case p: ParserPhase =>
+        if p.firstXmlPos.exists && !defn.ScalaXmlPackageClass.exists then
+          report.error(
+            """To support XML literals, your project must depend on scala-xml.
+              |See https://github.com/scala/scala-xml for more information.""".stripMargin,
+            p.firstXmlPos)
+      case _ =>
+    }
+
+    unitContexts.foreach(typeCheck(using _))
+    record("total trees after typer", ast.Trees.ntrees)
+    unitContexts.foreach(javaCheck(using _)) // after typechecking to avoid cycles
+
+    val newUnits = unitContexts.map(_.compilationUnit).filterNot(discardAfterTyper)
+    ctx.run.checkSuspendedUnits(newUnits)
+    newUnits
+
+  def run(using Context): Unit = unsupported("run")
+}
+
+object TyperPhase {
+  val name: String = "typer"
+  val description: String = "type the trees"
+}
+
+@deprecated(message = "FrontEnd has been split into TyperPhase and Parser. Refer to one or the other.")
+object FrontEnd {
+  // For backwards compatibility: some plugins refer to FrontEnd so that they can schedule themselves after it.
+  val name: String = TyperPhase.name
+}
diff --git a/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala b/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala
index f0ad18ee18c5..8c796cfb6fa9 100644
--- a/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala
+++ b/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala
@@ -133,7 +133,7 @@ class VarianceChecker(using Context) {
     def apply(status: Option[VarianceError], tp: Type): Option[VarianceError] = trace(s"variance checking $tp of $base at $variance", variances) {
       try
         if (status.isDefined) status
-        else tp.normalized match {
+        else tp match {
           case tp: TypeRef =>
             val sym = tp.symbol
             if (sym.isOneOf(VarianceFlags) && base.isContainedIn(sym.owner)) checkVarianceOfSymbol(sym)
diff --git a/compiler/src/dotty/tools/dotc/util/Attachment.scala b/compiler/src/dotty/tools/dotc/util/Attachment.scala
index 582ff6da0557..8f0e5dd97dfe 100644
--- a/compiler/src/dotty/tools/dotc/util/Attachment.scala
+++ b/compiler/src/dotty/tools/dotc/util/Attachment.scala
@@ -117,7 +117,7 @@ object Attachment {
     }
 
     final def pushAttachment[V](key: Key[V], value: V)(using ctx: Context): Unit = {
-      assert(!hasAttachment(key) || ctx.reporter.errorsReported, s"duplicate attachment for key $key")
+      assert(!hasAttachment(key) || ctx.base.errorsToBeReported, s"duplicate attachment for key $key")
       next = new Link(key, value, next)
     }
 
diff --git a/compiler/src/dotty/tools/dotc/util/Chars.scala b/compiler/src/dotty/tools/dotc/util/Chars.scala
index 986b1fda58ec..4c54dc73459e 100644
--- a/compiler/src/dotty/tools/dotc/util/Chars.scala
+++ b/compiler/src/dotty/tools/dotc/util/Chars.scala
@@ -11,10 +11,10 @@ import java.lang.Character.UPPERCASE_LETTER
 /** Contains constants and classifier methods for characters */
 object Chars {
 
-  final val LF = '\u000A'
-  final val FF = '\u000C'
-  final val CR = '\u000D'
-  final val SU = '\u001A'
+  inline val LF = '\u000A'
+  inline val FF = '\u000C'
+  inline val CR = '\u000D'
+  inline val SU = '\u001A'
 
   /** Convert a character digit to an Int according to given base,
     *  -1 if no success
diff --git a/compiler/src/dotty/tools/dotc/util/CommentParsing.scala b/compiler/src/dotty/tools/dotc/util/CommentParsing.scala
index d8184c0ce5aa..c25660a6225a 100644
--- a/compiler/src/dotty/tools/dotc/util/CommentParsing.scala
+++ b/compiler/src/dotty/tools/dotc/util/CommentParsing.scala
@@ -8,11 +8,11 @@ package dotty.tools.dotc.util
 import scala.collection.mutable
 
 /** The comment parsing in `dotc` is used by both the comment cooking and the
-  * dottydoc tool.
+  * scaladoc tool.
   *
   * The comment cooking is used to expand comments with `@inheritdoc` and
   * `@define` annotations. The rest of the comment is untouched and later
-  * handled by dottydoc.
+  * handled by scaladoc.
   */
 object CommentParsing {
   import Chars._
diff --git a/compiler/src/dotty/tools/dotc/util/HashSet.scala b/compiler/src/dotty/tools/dotc/util/HashSet.scala
index e7406f9ab094..e99754c7267b 100644
--- a/compiler/src/dotty/tools/dotc/util/HashSet.scala
+++ b/compiler/src/dotty/tools/dotc/util/HashSet.scala
@@ -7,6 +7,11 @@ object HashSet:
    */
   inline val DenseLimit = 8
 
+  def from[T](xs: IterableOnce[T]): HashSet[T] =
+    val set = new HashSet[T]()
+    set ++= xs
+    set
+
 /** A hash set that allows some privileged protected access to its internals
  *  @param  initialCapacity  Indicates the initial number of slots in the hash table.
  *                           The actual number of slots is always a power of 2, so the
diff --git a/compiler/src/dotty/tools/dotc/util/MutableSet.scala b/compiler/src/dotty/tools/dotc/util/MutableSet.scala
index bedb079f18ca..6e3ae7628eb6 100644
--- a/compiler/src/dotty/tools/dotc/util/MutableSet.scala
+++ b/compiler/src/dotty/tools/dotc/util/MutableSet.scala
@@ -8,7 +8,7 @@ abstract class MutableSet[T] extends ReadOnlySet[T]:
   def +=(x: T): Unit
 
   /** Like `+=` but return existing element equal to `x` of it exists,
-   *  `x` itself otherwose.
+   *  `x` itself otherwise.
    */
   def put(x: T): T
 
diff --git a/compiler/src/dotty/tools/dotc/util/ReusableInstance.scala b/compiler/src/dotty/tools/dotc/util/ReusableInstance.scala
index 16bd2decd908..4dd897dd082a 100644
--- a/compiler/src/dotty/tools/dotc/util/ReusableInstance.scala
+++ b/compiler/src/dotty/tools/dotc/util/ReusableInstance.scala
@@ -27,7 +27,7 @@ final class ReusableInstance[T <: AnyRef] private (make: => T) {
 }
 
 object ReusableInstance {
-  private final val InitialSize = 4
+  private inline val InitialSize = 4
 
   def apply[T <: AnyRef](make: => T): ReusableInstance[T] = new ReusableInstance[T](make)
 }
diff --git a/compiler/src/dotty/tools/dotc/util/SimpleIdentityMap.scala b/compiler/src/dotty/tools/dotc/util/SimpleIdentityMap.scala
index 437f7e140f68..c7dc8632f194 100644
--- a/compiler/src/dotty/tools/dotc/util/SimpleIdentityMap.scala
+++ b/compiler/src/dotty/tools/dotc/util/SimpleIdentityMap.scala
@@ -6,6 +6,7 @@ import collection.mutable.ListBuffer
  *  It has linear complexity for `apply`, `updated`, and `remove`.
  */
 abstract class SimpleIdentityMap[K <: AnyRef, +V >: Null <: AnyRef] extends (K => V) {
+  final def isEmpty: Boolean = this eq SimpleIdentityMap.myEmpty
   def size: Int
   def apply(k: K): V
   def remove(k: K): SimpleIdentityMap[K, V]
diff --git a/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala b/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala
index 5b3544b894c4..45ee3652fe16 100644
--- a/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala
+++ b/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala
@@ -7,28 +7,46 @@ import collection.mutable
  */
 abstract class SimpleIdentitySet[+Elem <: AnyRef] {
   def size: Int
-  final def isEmpty: Boolean = size == 0
   def + [E >: Elem <: AnyRef](x: E): SimpleIdentitySet[E]
   def - [E >: Elem <: AnyRef](x: E): SimpleIdentitySet[Elem]
   def contains[E >: Elem <: AnyRef](x: E): Boolean
   def foreach(f: Elem => Unit): Unit
   def exists[E >: Elem <: AnyRef](p: E => Boolean): Boolean
+  def map[B <: AnyRef](f: Elem => B): SimpleIdentitySet[B]
   def /: [A, E >: Elem <: AnyRef](z: A)(f: (A, E) => A): A
   def toList: List[Elem]
+
+  final def isEmpty: Boolean = size == 0
+
+  def forall[E >: Elem <: AnyRef](p: E => Boolean): Boolean = !exists(!p(_))
+
+  def filter(p: Elem => Boolean): SimpleIdentitySet[Elem] =
+    val z: SimpleIdentitySet[Elem] = SimpleIdentitySet.empty
+    (z /: this)((s, x) => if p(x) then s + x else s)
+
   def ++ [E >: Elem <: AnyRef](that: SimpleIdentitySet[E]): SimpleIdentitySet[E] =
     if (this.size == 0) that
     else if (that.size == 0) this
     else ((this: SimpleIdentitySet[E]) /: that)(_ + _)
+
   def -- [E >: Elem <: AnyRef](that: SimpleIdentitySet[E]): SimpleIdentitySet[E] =
     if (that.size == 0) this
     else
       ((SimpleIdentitySet.empty: SimpleIdentitySet[E]) /: this) { (s, x) =>
         if (that.contains(x)) s else s + x
       }
-  override def toString: String = toList.mkString("(", ", ", ")")
+  override def toString: String = toList.mkString("{", ", ", "}")
 }
 
 object SimpleIdentitySet {
+
+  def apply[Elem <: AnyRef](elems: Elem*): SimpleIdentitySet[Elem] =
+    elems.foldLeft(empty: SimpleIdentitySet[Elem])(_ + _)
+
+  extension [E <: AnyRef](xs: SimpleIdentitySet[E])
+    def intersect(ys: SimpleIdentitySet[E]): SimpleIdentitySet[E] =
+      xs.filter(ys.contains)
+
   object empty extends SimpleIdentitySet[Nothing] {
     def size: Int = 0
     def + [E <: AnyRef](x: E): SimpleIdentitySet[E] =
@@ -38,6 +56,7 @@ object SimpleIdentitySet {
     def contains[E <: AnyRef](x: E): Boolean = false
     def foreach(f: Nothing => Unit): Unit = ()
     def exists[E <: AnyRef](p: E => Boolean): Boolean = false
+    def map[B <: AnyRef](f: Nothing => B): SimpleIdentitySet[B] = empty
     def /: [A, E <: AnyRef](z: A)(f: (A, E) => A): A = z
     def toList = Nil
   }
@@ -52,6 +71,8 @@ object SimpleIdentitySet {
     def foreach(f: Elem => Unit): Unit = f(x0.asInstanceOf[Elem])
     def exists[E >: Elem <: AnyRef](p: E => Boolean): Boolean =
       p(x0.asInstanceOf[E])
+    def map[B <: AnyRef](f: Elem => B): SimpleIdentitySet[B] =
+      Set1(f(x0.asInstanceOf[Elem]))
     def /: [A, E >: Elem <: AnyRef](z: A)(f: (A, E) => A): A =
       f(z, x0.asInstanceOf[E])
     def toList = x0.asInstanceOf[Elem] :: Nil
@@ -69,6 +90,8 @@ object SimpleIdentitySet {
     def foreach(f: Elem => Unit): Unit = { f(x0.asInstanceOf[Elem]); f(x1.asInstanceOf[Elem]) }
     def exists[E >: Elem <: AnyRef](p: E => Boolean): Boolean =
       p(x0.asInstanceOf[E]) || p(x1.asInstanceOf[E])
+    def map[B <: AnyRef](f: Elem => B): SimpleIdentitySet[B] =
+      Set2(f(x0.asInstanceOf[Elem]), f(x1.asInstanceOf[Elem]))
     def /: [A, E >: Elem <: AnyRef](z: A)(f: (A, E) => A): A =
       f(f(z, x0.asInstanceOf[E]), x1.asInstanceOf[E])
     def toList = x0.asInstanceOf[Elem] :: x1.asInstanceOf[Elem] :: Nil
@@ -97,6 +120,8 @@ object SimpleIdentitySet {
     }
     def exists[E >: Elem <: AnyRef](p: E => Boolean): Boolean =
       p(x0.asInstanceOf[E]) || p(x1.asInstanceOf[E]) || p(x2.asInstanceOf[E])
+    def map[B <: AnyRef](f: Elem => B): SimpleIdentitySet[B] =
+      Set3(f(x0.asInstanceOf[Elem]), f(x1.asInstanceOf[Elem]), f(x2.asInstanceOf[Elem]))
     def /: [A, E >: Elem <: AnyRef](z: A)(f: (A, E) => A): A =
       f(f(f(z, x0.asInstanceOf[E]), x1.asInstanceOf[E]), x2.asInstanceOf[E])
     def toList = x0.asInstanceOf[Elem] :: x1.asInstanceOf[Elem] :: x2.asInstanceOf[Elem] :: Nil
@@ -139,6 +164,8 @@ object SimpleIdentitySet {
     }
     def exists[E >: Elem <: AnyRef](p: E => Boolean): Boolean =
       xs.asInstanceOf[Array[E]].exists(p)
+    def map[B <: AnyRef](f: Elem => B): SimpleIdentitySet[B] =
+      SetN(xs.map(x => f(x.asInstanceOf[Elem]).asInstanceOf[AnyRef]))
     def /: [A, E >: Elem <: AnyRef](z: A)(f: (A, E) => A): A =
       xs.asInstanceOf[Array[E]].foldLeft(z)(f)
     def toList: List[Elem] = {
diff --git a/compiler/src/dotty/tools/dotc/util/SixteenNibbles.scala b/compiler/src/dotty/tools/dotc/util/SixteenNibbles.scala
index ebeb9f700273..42286aef5d31 100644
--- a/compiler/src/dotty/tools/dotc/util/SixteenNibbles.scala
+++ b/compiler/src/dotty/tools/dotc/util/SixteenNibbles.scala
@@ -22,7 +22,7 @@ class SixteenNibbles(val bits: Long) extends AnyVal {
 }
 
 object SixteenNibbles {
-  final val Width = 4
-  final val Mask = (1 << Width) - 1
+  inline val Width = 4
+  inline val Mask = (1 << Width) - 1
   final val LongMask: Long = Mask.toLong
 }
diff --git a/compiler/src/dotty/tools/dotc/util/SourceFile.scala b/compiler/src/dotty/tools/dotc/util/SourceFile.scala
index 73fd53289a9b..bddcd02e7ab9 100644
--- a/compiler/src/dotty/tools/dotc/util/SourceFile.scala
+++ b/compiler/src/dotty/tools/dotc/util/SourceFile.scala
@@ -144,6 +144,9 @@ class SourceFile(val file: AbstractFile, computeContent: => Array[Char]) extends
     if lineIndicesCache eq null then
       lineIndicesCache = calculateLineIndicesFromContents()
     lineIndicesCache
+
+  def initialized = lineIndicesCache != null
+
   def setLineIndicesFromLineSizes(sizes: Array[Int]): Unit =
     val lines = sizes.length
     val indices = new Array[Int](lines + 1)
diff --git a/compiler/src/dotty/tools/dotc/util/SourcePosition.scala b/compiler/src/dotty/tools/dotc/util/SourcePosition.scala
index 9bc21fdf408f..7808c9a57acd 100644
--- a/compiler/src/dotty/tools/dotc/util/SourcePosition.scala
+++ b/compiler/src/dotty/tools/dotc/util/SourcePosition.scala
@@ -68,7 +68,7 @@ extends SrcPos, interfaces.SourcePosition, Showable {
     if outer == null || outer == NoSourcePosition then this else outer.outermost
 
   /** Inner most position that is contained within the `outermost` position.
-   *  Most precise position that that comes from the call site.
+   *  Most precise position that comes from the call site.
    */
   def nonInlined: SourcePosition = {
     val om = outermost
diff --git a/compiler/src/dotty/tools/dotc/util/Spans.scala b/compiler/src/dotty/tools/dotc/util/Spans.scala
index c21070f79e95..0a28c367ff29 100644
--- a/compiler/src/dotty/tools/dotc/util/Spans.scala
+++ b/compiler/src/dotty/tools/dotc/util/Spans.scala
@@ -15,9 +15,9 @@ import language.implicitConversions
  */
 object Spans {
 
-  private final val StartEndBits = 26
-  private final val StartEndMask = (1L << StartEndBits) - 1
-  private final val SyntheticPointDelta = (1 << (64 - StartEndBits * 2)) - 1
+  private inline val StartEndBits = 26
+  private inline val StartEndMask = (1L << StartEndBits) - 1
+  private inline val SyntheticPointDelta = (1 << (64 - StartEndBits * 2)) - 1
 
   /** The maximal representable offset in a span */
   final val MaxOffset = StartEndMask.toInt
@@ -85,6 +85,7 @@ object Spans {
       || containsInner(this, that.end)
       || containsInner(that, this.start)
       || containsInner(that, this.end)
+      || this.start == that.start && this.end == that.end   // exact match in one point
       )
     }
 
diff --git a/compiler/src/dotty/tools/dotc/util/Stats.scala b/compiler/src/dotty/tools/dotc/util/Stats.scala
index 684083fa77ba..60465e519452 100644
--- a/compiler/src/dotty/tools/dotc/util/Stats.scala
+++ b/compiler/src/dotty/tools/dotc/util/Stats.scala
@@ -9,7 +9,7 @@ import collection.mutable
 
 @sharable object Stats {
 
-  final val enabled = false
+  inline val enabled = false
 
   var monitored: Boolean = false
 
@@ -43,7 +43,7 @@ import collection.mutable
     else op
   }
 
-  final val GroupChar = '/'
+  inline val GroupChar = '/'
 
   /** Aggregate all counts of all keys with a common prefix, followed by `:` */
   private def aggregate(): Unit = {
@@ -62,6 +62,7 @@ import collection.mutable
         aggregate()
         println()
         println(hits.toList.sortBy(_._2).map{ case (x, y) => s"$x -> $y" } mkString "\n")
+        hits.clear()
       }
     }
     else op
diff --git a/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala b/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala
index 265f6e78cad2..3dc5761c0244 100644
--- a/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala
+++ b/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala
@@ -1,10 +1,10 @@
-/** Taken from the original implementation of WeakHashSet in scala-reflect
+/** Adapted from the original implementation of WeakHashSet in scala-reflect
  */
 package dotty.tools.dotc.util
 
 import java.lang.ref.{ReferenceQueue, WeakReference}
 
-import scala.annotation.tailrec
+import scala.annotation.{ constructorOnly, tailrec }
 import scala.collection.mutable
 
 /**
@@ -17,12 +17,10 @@ import scala.collection.mutable
  * This set implementation is not in general thread safe without external concurrency control. However it behaves
  * properly when GC concurrently collects elements in this set.
  */
-final class WeakHashSet[A <: AnyRef](initialCapacity: Int, loadFactor: Double) extends mutable.Set[A] {
+abstract class WeakHashSet[A <: AnyRef](initialCapacity: Int = 8, loadFactor: Double = 0.5) extends MutableSet[A] {
 
   import WeakHashSet._
 
-  def this() = this(initialCapacity = WeakHashSet.defaultInitialCapacity, loadFactor = WeakHashSet.defaultLoadFactor)
-
   type This = WeakHashSet[A]
 
   /**
@@ -30,12 +28,12 @@ final class WeakHashSet[A <: AnyRef](initialCapacity: Int, loadFactor: Double) e
    * the removeStaleEntries() method works through the queue to remove
    * stale entries from the table
    */
-  private val queue = new ReferenceQueue[A]
+  protected val queue = new ReferenceQueue[A]
 
   /**
    * the number of elements in this set
    */
-  private var count = 0
+  protected var count = 0
 
   /**
    * from a specified initial capacity compute the capacity we'll use as being the next
@@ -52,40 +50,26 @@ final class WeakHashSet[A <: AnyRef](initialCapacity: Int, loadFactor: Double) e
   /**
    * the underlying table of entries which is an array of Entry linked lists
    */
-  private var table = new Array[Entry[A]](computeCapacity)
+  protected var table = new Array[Entry[A]](computeCapacity)
 
   /**
    * the limit at which we'll increase the size of the hash table
    */
-  private var threshold = computeThreshold
+  protected var threshold = computeThreshold
 
   private def computeThreshold: Int = (table.size * loadFactor).ceil.toInt
 
-  def get(elem: A): Option[A] = Option(findEntry(elem))
+  protected def hash(key: A): Int
+  protected def isEqual(x: A, y: A): Boolean = x.equals(y)
 
-  /**
-   * find the bucket associated with an element's hash code
-   */
-  private def bucketFor(hash: Int): Int = {
-    // spread the bits around to try to avoid accidental collisions using the
-    // same algorithm as java.util.HashMap
-    var h = hash
-    h ^= h >>> 20 ^ h >>> 12
-    h ^= h >>> 7 ^ h >>> 4
-
-    // this is finding h % table.length, but takes advantage of the
-    // fact that table length is a power of 2,
-    // if you don't do bit flipping in your head, if table.length
-    // is binary 100000.. (with n 0s) then table.length - 1
-    // is 1111.. with n 1's.
-    // In other words this masks on the last n bits in the hash
-    h & (table.length - 1)
-  }
+  /** Turn hashcode `x` into a table index */
+  protected def index(x: Int): Int = x & (table.length - 1)
 
   /**
    * remove a single entry from a linked list in a given bucket
    */
   private def remove(bucket: Int, prevEntry: Entry[A], entry: Entry[A]): Unit = {
+    Stats.record(statsItem("remove"))
     prevEntry match {
       case null => table(bucket) = entry.tail
       case _ => prevEntry.tail = entry.tail
@@ -96,14 +80,14 @@ final class WeakHashSet[A <: AnyRef](initialCapacity: Int, loadFactor: Double) e
   /**
    * remove entries associated with elements that have been gc'ed
    */
-  private def removeStaleEntries(): Unit = {
+  protected def removeStaleEntries(): Unit = {
     def poll(): Entry[A] = queue.poll().asInstanceOf[Entry[A]]
 
     @tailrec
     def queueLoop(): Unit = {
       val stale = poll()
       if (stale != null) {
-        val bucket = bucketFor(stale.hash)
+        val bucket = index(stale.hash)
 
         @tailrec
         def linkedListLoop(prevEntry: Entry[A], entry: Entry[A]): Unit = if (stale eq entry) remove(bucket, prevEntry, entry)
@@ -121,7 +105,8 @@ final class WeakHashSet[A <: AnyRef](initialCapacity: Int, loadFactor: Double) e
   /**
    * Double the size of the internal table
    */
-  private def resize(): Unit = {
+  protected def resize(): Unit = {
+    Stats.record(statsItem("resize"))
     val oldTable = table
     table = new Array[Entry[A]](oldTable.size * 2)
     threshold = computeThreshold
@@ -132,7 +117,7 @@ final class WeakHashSet[A <: AnyRef](initialCapacity: Int, loadFactor: Double) e
       def linkedListLoop(entry: Entry[A]): Unit = entry match {
         case null => ()
         case _ =>
-          val bucket = bucketFor(entry.hash)
+          val bucket = index(entry.hash)
           val oldNext = entry.tail
           entry.tail = table(bucket)
           table(bucket) = entry
@@ -145,103 +130,76 @@ final class WeakHashSet[A <: AnyRef](initialCapacity: Int, loadFactor: Double) e
     tableLoop(0)
   }
 
-  def contains(elem: A): Boolean = findEntry(elem) ne null
-
-  // from scala.reflect.internal.Set, find an element or null if it isn't contained
-  def findEntry(elem: A): A = elem match {
+  def lookup(elem: A): A | Null = elem match {
     case null => throw new NullPointerException("WeakHashSet cannot hold nulls")
     case _    =>
+      Stats.record(statsItem("lookup"))
       removeStaleEntries()
-      val hash = elem.hashCode
-      val bucket = bucketFor(hash)
+      val bucket = index(hash(elem))
 
       @tailrec
       def linkedListLoop(entry: Entry[A]): A = entry match {
         case null                    => null.asInstanceOf[A]
         case _                       =>
           val entryElem = entry.get
-          if (elem.equals(entryElem)) entryElem
+          if (isEqual(elem, entryElem)) entryElem
           else linkedListLoop(entry.tail)
       }
 
       linkedListLoop(table(bucket))
   }
-  // add an element to this set unless it's already in there and return the element
-  def findEntryOrUpdate(elem: A): A = elem match {
+
+  protected def addEntryAt(bucket: Int, elem: A, elemHash: Int, oldHead: Entry[A]): A = {
+    Stats.record(statsItem("addEntryAt"))
+    table(bucket) = new Entry(elem, elemHash, oldHead, queue)
+    count += 1
+    if (count > threshold) resize()
+    elem
+  }
+
+  def put(elem: A): A = elem match {
     case null => throw new NullPointerException("WeakHashSet cannot hold nulls")
     case _    =>
+      Stats.record(statsItem("put"))
       removeStaleEntries()
-      val hash = elem.hashCode
-      val bucket = bucketFor(hash)
+      val h = hash(elem)
+      val bucket = index(h)
       val oldHead = table(bucket)
 
-      def add() = {
-        table(bucket) = new Entry(elem, hash, oldHead, queue)
-        count += 1
-        if (count > threshold) resize()
-        elem
-      }
-
       @tailrec
       def linkedListLoop(entry: Entry[A]): A = entry match {
-        case null                    => add()
+        case null                    => addEntryAt(bucket, elem, h, oldHead)
         case _                       =>
           val entryElem = entry.get
-          if (elem.equals(entryElem)) entryElem
+          if (isEqual(elem, entryElem)) entryElem
           else linkedListLoop(entry.tail)
       }
 
       linkedListLoop(oldHead)
   }
 
-  // add an element to this set unless it's already in there and return this set
-  override def addOne(elem: A): this.type = elem match {
-    case null => throw new NullPointerException("WeakHashSet cannot hold nulls")
-    case _    =>
-      removeStaleEntries()
-      val hash = elem.hashCode
-      val bucket = bucketFor(hash)
-      val oldHead = table(bucket)
+  def +=(elem: A): Unit = put(elem)
 
-      def add(): Unit = {
-        table(bucket) = new Entry(elem, hash, oldHead, queue)
-        count += 1
-        if (count > threshold) resize()
-      }
-
-      @tailrec
-      def linkedListLoop(entry: Entry[A]): Unit = entry match {
-        case null                        => add()
-        case _ if elem.equals(entry.get) => ()
-        case _                           => linkedListLoop(entry.tail)
-      }
-
-      linkedListLoop(oldHead)
-      this
-  }
-
-  // remove an element from this set and return this set
-  override def subtractOne(elem: A): this.type = elem match {
-    case null => this
+  def -=(elem: A): Unit = elem match {
+    case null =>
     case _ =>
+      Stats.record(statsItem("-="))
       removeStaleEntries()
-      val bucket = bucketFor(elem.hashCode)
+      val bucket = index(hash(elem))
 
 
 
       @tailrec
       def linkedListLoop(prevEntry: Entry[A], entry: Entry[A]): Unit = entry match {
         case null => ()
-        case _ if elem.equals(entry.get) => remove(bucket, prevEntry, entry)
+        case _ if isEqual(elem, entry.get) => remove(bucket, prevEntry, entry)
         case _ => linkedListLoop(entry, entry.tail)
       }
 
       linkedListLoop(null, table(bucket))
-      this
   }
 
-  // empty this set
-  override def clear(): Unit = {
+  def clear(): Unit = {
     table = new Array[Entry[A]](table.size)
     threshold = computeThreshold
     count = 0
@@ -251,21 +209,11 @@ final class WeakHashSet[A <: AnyRef](initialCapacity: Int, loadFactor: Double) e
     queueLoop()
   }
 
-  // true if this set is empty
-  override def empty: This = new WeakHashSet[A](initialCapacity, loadFactor)
-
-  // the number of elements in this set
-  override def size: Int = {
+  def size: Int = {
     removeStaleEntries()
     count
   }
 
-  override def isEmpty: Boolean = size == 0
-  override def foreach[U](f: A => U): Unit = iterator foreach f
-
-  // It has the `()` because iterator runs `removeStaleEntries()`
-  override def toList(): List[A] = iterator.toList
-
   // Iterator over all the elements in this set in no particular order
   override def iterator: Iterator[A] = {
     removeStaleEntries()
@@ -318,6 +266,12 @@ final class WeakHashSet[A <: AnyRef](initialCapacity: Int, loadFactor: Double) e
     }
   }
 
+  protected def statsItem(op: String): String = {
+    val prefix = "WeakHashSet."
+    val suffix = getClass.getSimpleName
+    s"$prefix$op $suffix"
+  }
+
   /**
    * Diagnostic information about the internals of this set. Not normally
    * needed by ordinary code, but may be useful for diagnosing performance problems
@@ -338,9 +292,9 @@ final class WeakHashSet[A <: AnyRef](initialCapacity: Int, loadFactor: Double) e
           assert(entry.get != null, s"$entry had a null value indicated that gc activity was happening during diagnostic validation or that a null value was inserted")
           computedCount += 1
           val cachedHash = entry.hash
-          val realHash = entry.get.hashCode
+          val realHash = hash(entry.get)
           assert(cachedHash == realHash, s"for $entry cached hash was $cachedHash but should have been $realHash")
-          val computedBucket = bucketFor(realHash)
+          val computedBucket = index(realHash)
           assert(computedBucket == bucket, s"for $entry the computed bucket was $computedBucket but should have been $bucket")
 
           entry = entry.tail
@@ -386,11 +340,6 @@ object WeakHashSet {
    * A single entry in a WeakHashSet. It's a WeakReference plus a cached hash code and
    * a link to the next Entry in the same bucket
    */
-  private class Entry[A](element: A, val hash:Int, var tail: Entry[A], queue: ReferenceQueue[A]) extends WeakReference[A](element, queue)
-
-  private final val defaultInitialCapacity = 16
-  private final val defaultLoadFactor = .75
+  class Entry[A](@constructorOnly element: A, val hash:Int, var tail: Entry[A], @constructorOnly queue: ReferenceQueue[A]) extends WeakReference[A](element, queue)
 
-  def apply[A <: AnyRef](initialCapacity: Int = defaultInitialCapacity, loadFactor: Double = defaultLoadFactor): WeakHashSet[A] =
-    new WeakHashSet(initialCapacity, loadFactor)
 }
diff --git a/compiler/src/dotty/tools/io/ClassPath.scala b/compiler/src/dotty/tools/io/ClassPath.scala
index d7218108e944..eeefabddd19f 100644
--- a/compiler/src/dotty/tools/io/ClassPath.scala
+++ b/compiler/src/dotty/tools/io/ClassPath.scala
@@ -132,7 +132,8 @@ object ClassPath {
       dir.list.filter(x => filt(x.name) && (x.isDirectory || isJarOrZip(x))).map(_.path).toList
 
     if (pattern == "*") lsDir(Directory("."))
-    else if (pattern.endsWith(wildSuffix)) lsDir(Directory(pattern dropRight 2))
+    // On Windows the JDK supports forward slash or backslash in classpath entries
+    else if (pattern.endsWith(wildSuffix) || pattern.endsWith("/*")) lsDir(Directory(pattern dropRight 2))
     else if (pattern.contains('*')) {
       try {
         val regexp = ("^" + pattern.replace("""\*""", """.*""") + "$").r
diff --git a/compiler/src/dotty/tools/io/Jar.scala b/compiler/src/dotty/tools/io/Jar.scala
index 063de2ca8137..6baaec175b14 100644
--- a/compiler/src/dotty/tools/io/Jar.scala
+++ b/compiler/src/dotty/tools/io/Jar.scala
@@ -72,6 +72,7 @@ class Jar(file: File) {
     case null   => errorFn("No such entry: " + entry) ; null
     case x      => x
   }
+
   override def toString: String = "" + file
 }
 
diff --git a/compiler/src/dotty/tools/io/JarArchive.scala b/compiler/src/dotty/tools/io/JarArchive.scala
index f35493299cd6..80af5ef390b5 100644
--- a/compiler/src/dotty/tools/io/JarArchive.scala
+++ b/compiler/src/dotty/tools/io/JarArchive.scala
@@ -10,6 +10,8 @@ import scala.jdk.CollectionConverters._
  */
 class JarArchive private (root: Directory) extends PlainDirectory(root) {
   def close(): Unit = jpath.getFileSystem().close()
+  def allFileNames(): Iterator[String] = 
+    java.nio.file.Files.walk(jpath).iterator().asScala.map(_.toString)
 }
 
 object JarArchive {
diff --git a/compiler/src/dotty/tools/repl/AbstractFileClassLoader.scala b/compiler/src/dotty/tools/repl/AbstractFileClassLoader.scala
index a3f4047fb646..7263758b60c5 100644
--- a/compiler/src/dotty/tools/repl/AbstractFileClassLoader.scala
+++ b/compiler/src/dotty/tools/repl/AbstractFileClassLoader.scala
@@ -18,7 +18,7 @@ import io.AbstractFile
 import java.net.{URL, URLConnection, URLStreamHandler}
 import java.util.Collections
 
-class AbstractFileClassLoader(root: AbstractFile, parent: ClassLoader) extends ClassLoader(parent):
+class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader) extends ClassLoader(parent):
   private def findAbstractFile(name: String) = root.lookupPath(name.split('/').toIndexedSeq, directory = false)
 
   override protected def findResource(name: String) =
diff --git a/compiler/src/dotty/tools/repl/JLineTerminal.scala b/compiler/src/dotty/tools/repl/JLineTerminal.scala
index 5f43a7483841..807ae2bf5eec 100644
--- a/compiler/src/dotty/tools/repl/JLineTerminal.scala
+++ b/compiler/src/dotty/tools/repl/JLineTerminal.scala
@@ -28,7 +28,7 @@ final class JLineTerminal extends java.io.Closeable {
   private def blue(str: String)(using Context) =
     if (ctx.settings.color.value != "never") Console.BLUE + str + Console.RESET
     else str
-  private def prompt(using Context)        = blue("scala> ")
+  private def prompt(using Context)        = blue("\nscala> ")
   private def newLinePrompt(using Context) = blue("     | ")
 
   /** Blockingly read line from `System.in`
diff --git a/compiler/src/dotty/tools/repl/Main.scala b/compiler/src/dotty/tools/repl/Main.scala
index 127ccd10b467..7eb906edc586 100644
--- a/compiler/src/dotty/tools/repl/Main.scala
+++ b/compiler/src/dotty/tools/repl/Main.scala
@@ -1,6 +1,7 @@
 package dotty.tools.repl
 
 /** Main entry point to the REPL */
+// To test, run bin/scala
 object Main {
   def main(args: Array[String]): Unit =
     new ReplDriver(args).tryRunning
diff --git a/compiler/src/dotty/tools/repl/ParseResult.scala b/compiler/src/dotty/tools/repl/ParseResult.scala
index 352c568533c4..5d414ecf9b34 100644
--- a/compiler/src/dotty/tools/repl/ParseResult.scala
+++ b/compiler/src/dotty/tools/repl/ParseResult.scala
@@ -7,7 +7,7 @@ import dotc.core.Contexts._
 import dotc.core.StdNames.str
 import dotc.parsing.Parsers.Parser
 import dotc.parsing.Tokens
-import dotc.reporting.Diagnostic
+import dotc.reporting.{Diagnostic, StoreReporter}
 import dotc.util.SourceFile
 
 import scala.annotation.internal.sharable
@@ -16,7 +16,7 @@ import scala.annotation.internal.sharable
 sealed trait ParseResult
 
 /** An error free parsing resulting in a list of untyped trees */
-case class Parsed(source: SourceFile, trees: List[untpd.Tree]) extends ParseResult
+case class Parsed(source: SourceFile, trees: List[untpd.Tree], reporter: StoreReporter) extends ParseResult
 
 /** A parsing result containing syntax `errors` */
 case class SyntaxErrors(sourceCode: String,
@@ -80,10 +80,16 @@ case object Imports extends Command {
   val command: String = ":imports"
 }
 
+case class Settings(arg: String) extends Command
+object Settings {
+  val command: String = ":settings"
+}
+
 /** Reset the session to the initial state from when the repl program was
  *  started
  */
-case object Reset extends Command {
+case class Reset(arg: String) extends Command
+object Reset {
   val command: String = ":reset"
 }
 
@@ -105,7 +111,8 @@ case object Help extends Command {
       |:type        evaluate the type of the given expression
       |:doc         print the documentation for the given expression
       |:imports                 show import history
-      |:reset                   reset the repl to its initial state, forgetting all session entries
+      |:reset [options]         reset the repl to its initial state, forgetting all session entries
+      |:settings       update compiler options, if possible
     """.stripMargin
 }
 
@@ -124,11 +131,12 @@ object ParseResult {
     Quit.command -> (_ => Quit),
     Quit.alias -> (_ => Quit),
     Help.command -> (_  => Help),
-    Reset.command -> (_  => Reset),
+    Reset.command -> (arg  => Reset(arg)),
     Imports.command -> (_  => Imports),
     Load.command -> (arg => Load(arg)),
     TypeOf.command -> (arg => TypeOf(arg)),
-    DocOf.command -> (arg => DocOf(arg))
+    DocOf.command -> (arg => DocOf(arg)),
+    Settings.command -> (arg => Settings(arg)),
   )
 
   def apply(source: SourceFile)(implicit state: State): ParseResult = {
@@ -154,7 +162,7 @@ object ParseResult {
               reporter.removeBufferedMessages,
               stats)
           else
-            Parsed(source, stats)
+            Parsed(source, stats, reporter)
         }
     }
   }
diff --git a/compiler/src/dotty/tools/repl/Rendering.scala b/compiler/src/dotty/tools/repl/Rendering.scala
index e07cb867062d..bb5496f0cc8b 100644
--- a/compiler/src/dotty/tools/repl/Rendering.scala
+++ b/compiler/src/dotty/tools/repl/Rendering.scala
@@ -31,21 +31,16 @@ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None) {
 
   private val MaxStringElements: Int = 1000  // no need to mkString billions of elements
 
-  /** A `MessageRenderer` for the REPL without file positions */
-  private val messageRenderer = new MessageRendering {
-    override def posStr(pos: SourcePosition, diagnosticLevel: String, message: Message)(using Context): String = ""
-  }
-
-  private var myClassLoader: ClassLoader = _
+  private var myClassLoader: AbstractFileClassLoader = _
 
   private var myReplStringOf: Object => String = _
 
 
   /** Class loader used to load compiled code */
   private[repl] def classLoader()(using Context) =
-    if (myClassLoader != null) myClassLoader
+    if (myClassLoader != null && myClassLoader.root == ctx.settings.outputDir.value) myClassLoader
     else {
-      val parent = parentClassLoader.getOrElse {
+      val parent = Option(myClassLoader).orElse(parentClassLoader).getOrElse {
         val compilerClasspath = ctx.platform.classPath(using ctx).asURLs
         // We can't use the system classloader as a parent because it would
         // pollute the user classpath with everything passed to the JVM
@@ -62,11 +57,21 @@ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None) {
         // We need to use the ScalaRunTime class coming from the scala-library
         // on the user classpath, and not the one available in the current
         // classloader, so we use reflection instead of simply calling
-        // `ScalaRunTime.replStringOf`.
+        // `ScalaRunTime.replStringOf`. Probe for new API without extraneous newlines.
+        // For old API, try to clean up extraneous newlines by stripping suffix and maybe prefix newline.
         val scalaRuntime = Class.forName("scala.runtime.ScalaRunTime", true, myClassLoader)
-        val meth = scalaRuntime.getMethod("replStringOf", classOf[Object], classOf[Int])
-
-        (value: Object) => meth.invoke(null, value, Integer.valueOf(MaxStringElements)).asInstanceOf[String]
+        val renderer = "stringOf"  // was: replStringOf
+        try {
+          val meth = scalaRuntime.getMethod(renderer, classOf[Object], classOf[Int], classOf[Boolean])
+          val truly = java.lang.Boolean.TRUE
+
+          (value: Object) => meth.invoke(null, value, Integer.valueOf(MaxStringElements), truly).asInstanceOf[String]
+        } catch {
+          case _: NoSuchMethodException =>
+            val meth = scalaRuntime.getMethod(renderer, classOf[Object], classOf[Int])
+
+            (value: Object) => meth.invoke(null, value, Integer.valueOf(MaxStringElements)).asInstanceOf[String]
+        }
       }
       myClassLoader
     }
@@ -88,7 +93,8 @@ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None) {
   private[repl] def replStringOf(value: Object)(using Context): String = {
     assert(myReplStringOf != null,
       "replStringOf should only be called on values creating using `classLoader()`, but `classLoader()` has not been called so far")
-    truncate(myReplStringOf(value))
+    val res = myReplStringOf(value)
+    if res == null then "null // non-null reference has null-valued toString" else truncate(res)
   }
 
   /** Load the value of the symbol using reflection.
@@ -102,26 +108,18 @@ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None) {
       resObj
         .getDeclaredMethods.find(_.getName == sym.name.encode.toString)
         .map(_.invoke(null))
-    val string = value.map(replStringOf(_).trim)
+    val string = value.map(replStringOf(_))
     if (!sym.is(Flags.Method) && sym.info == defn.UnitType)
       None
     else
       string.map { s =>
-        if (s.startsWith(str.REPL_SESSION_LINE))
-          s.drop(str.REPL_SESSION_LINE.length).dropWhile(c => c.isDigit || c == '$')
+        if (s.startsWith(REPL_WRAPPER_NAME_PREFIX))
+          s.drop(REPL_WRAPPER_NAME_PREFIX.length).dropWhile(c => c.isDigit || c == '$')
         else
           s
       }
   }
 
-  /** Formats errors using the `messageRenderer` */
-  def formatError(dia: Diagnostic)(implicit state: State): Diagnostic =
-    new Diagnostic(
-      messageRenderer.messageAndPos(dia.msg, dia.pos, messageRenderer.diagnosticLevel(dia))(using state.context),
-      dia.pos,
-      dia.level
-    )
-
   def renderTypeDef(d: Denotation)(using Context): Diagnostic =
     infoDiagnostic("// defined " ++ d.symbol.showUser, d)
 
@@ -156,11 +154,14 @@ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None) {
     val cause = ite.getCause match
       case e: ExceptionInInitializerError => e.getCause
       case e => e
-    def isWrapperCode(ste: StackTraceElement) =
-      ste.getClassName == d.symbol.owner.name.show
+    // detect
+    //at repl$.rs$line$2$.(rs$line$2:1)
+    //at repl$.rs$line$2.res1(rs$line$2)
+    def isWrapperInitialization(ste: StackTraceElement) =
+      ste.getClassName.startsWith(REPL_WRAPPER_NAME_PREFIX)  // d.symbol.owner.name.show is simple name
       && (ste.getMethodName == nme.STATIC_CONSTRUCTOR.show || ste.getMethodName == nme.CONSTRUCTOR.show)
 
-    cause.formatStackTracePrefix(!isWrapperCode(_))
+    cause.formatStackTracePrefix(!isWrapperInitialization(_))
   end renderError
 
   private def infoDiagnostic(msg: String, d: Denotation)(using Context): Diagnostic =
@@ -169,6 +170,7 @@ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None) {
 }
 
 object Rendering {
+  final val REPL_WRAPPER_NAME_PREFIX = str.REPL_SESSION_LINE
 
   extension (s: Symbol)
     def showUser(using Context): String = {
diff --git a/compiler/src/dotty/tools/repl/ReplCompillationUnit.scala b/compiler/src/dotty/tools/repl/ReplCompilationUnit.scala
similarity index 100%
rename from compiler/src/dotty/tools/repl/ReplCompillationUnit.scala
rename to compiler/src/dotty/tools/repl/ReplCompilationUnit.scala
diff --git a/compiler/src/dotty/tools/repl/ReplCompiler.scala b/compiler/src/dotty/tools/repl/ReplCompiler.scala
index e81ea47cfff9..9249172b5d4d 100644
--- a/compiler/src/dotty/tools/repl/ReplCompiler.scala
+++ b/compiler/src/dotty/tools/repl/ReplCompiler.scala
@@ -14,6 +14,7 @@ import dotty.tools.dotc.core.Symbols._
 import dotty.tools.dotc.reporting.Diagnostic
 import dotty.tools.dotc.transform.{PostTyper, Staging}
 import dotty.tools.dotc.typer.ImportInfo._
+import dotty.tools.dotc.typer.TyperPhase
 import dotty.tools.dotc.util.Spans._
 import dotty.tools.dotc.util.{ParsedComment, SourceFile}
 import dotty.tools.dotc.{CompilationUnit, Compiler, Run}
@@ -32,37 +33,41 @@ import scala.collection.mutable
 class ReplCompiler extends Compiler {
 
   override protected def frontendPhases: List[List[Phase]] = List(
-    List(new REPLFrontEnd),
+    List(new TyperPhase(addRootImports = false)),
     List(new CollectTopLevelImports),
     List(new PostTyper),
   )
 
-  def newRun(initCtx: Context, state: State): Run = new Run(this, initCtx) {
-
-    /** Import previous runs and user defined imports */
-    override protected def rootContext(using Context): Context = {
-      def importContext(imp: tpd.Import)(using Context) =
-        ctx.importContext(imp, imp.symbol)
-
-      def importPreviousRun(id: Int)(using Context) = {
-        // we first import the wrapper object id
-        val path = nme.EMPTY_PACKAGE ++ "." ++ objectNames(id)
-        val ctx0 = ctx.fresh
-          .setNewScope
-          .withRootImports(RootRef(() => requiredModuleRef(path)) :: Nil)
-
-        // then its user defined imports
-        val imports = state.imports.getOrElse(id, Nil)
-        if imports.isEmpty then ctx0
-        else imports.foldLeft(ctx0.fresh.setNewScope)((ctx, imp) =>
-          importContext(imp)(using ctx))
-      }
+  def newRun(initCtx: Context, state: State): Run =
+    val run = new Run(this, initCtx) {
+      /** Import previous runs and user defined imports */
+      override protected def rootContext(using Context): Context = {
+        def importContext(imp: tpd.Import)(using Context) =
+          ctx.importContext(imp, imp.symbol)
+
+        def importPreviousRun(id: Int)(using Context) = {
+          // we first import the wrapper object id
+          val path = nme.EMPTY_PACKAGE ++ "." ++ objectNames(id)
+          val ctx0 = ctx.fresh
+            .setNewScope
+            .withRootImports(RootRef(() => requiredModuleRef(path)) :: Nil)
+
+          // then its user defined imports
+          val imports = state.imports.getOrElse(id, Nil)
+          if imports.isEmpty then ctx0
+          else imports.foldLeft(ctx0.fresh.setNewScope)((ctx, imp) =>
+            importContext(imp)(using ctx))
+        }
 
-      val rootCtx = super.rootContext.withRootImports
-      (1 to state.objectIndex).foldLeft(rootCtx)((ctx, id) =>
-        importPreviousRun(id)(using ctx))
+        val rootCtx = super.rootContext.fresh
+          .setOwner(defn.EmptyPackageClass)
+          .withRootImports
+        (1 to state.objectIndex).foldLeft(rootCtx)((ctx, id) =>
+          importPreviousRun(id)(using ctx))
+      }
     }
-  }
+    run.suppressions.initSuspendedMessages(state.context.run)
+    run
 
   private val objectNames = mutable.Map.empty[Int, TermName]
 
@@ -83,6 +88,17 @@ class ReplCompiler extends Compiler {
     var valIdx = state.valIndex
     val defs = new mutable.ListBuffer[Tree]
 
+    /** If the user inputs a definition whose name is of the form REPL_RES_PREFIX and a number,
+     *  such as `val res9 = 1`, we bump `valIdx` to avoid name clashes.  lampepfl/dotty#3536 */
+    def maybeBumpValIdx(tree: Tree): Unit = tree match
+      case apply: Apply   => for a <- apply.args  do maybeBumpValIdx(a)
+      case tuple: Tuple   => for t <- tuple.trees do maybeBumpValIdx(t)
+      case patDef: PatDef => for p <- patDef.pats do maybeBumpValIdx(p)
+      case tree: NameTree => tree.name.show.stripPrefix(str.REPL_RES_PREFIX).toIntOption match
+        case Some(n) if n >= valIdx => valIdx = n + 1
+        case _                      =>
+      case _              =>
+
     flattened.foreach {
       case expr @ Assign(id: Ident, _) =>
         // special case simple reassignment (e.g. x = 3)
@@ -96,6 +112,7 @@ class ReplCompiler extends Compiler {
         val vd = ValDef(resName, TypeTree(), expr).withSpan(expr.span)
         defs += vd
       case other =>
+        maybeBumpValIdx(other)
         defs += other
     }
 
@@ -148,6 +165,7 @@ class ReplCompiler extends Compiler {
   private def runCompilationUnit(unit: CompilationUnit, state: State): Result[(CompilationUnit, State)] = {
     val ctx = state.context
     ctx.run.compileUnits(unit :: Nil)
+    ctx.run.printSummary() // this outputs "2 errors found" like normal - but we might decide that's needlessly noisy for the REPL
 
     if (!ctx.reporter.hasErrors) (unit, state).result
     else ctx.reporter.removeBufferedMessages(using ctx).errors
@@ -235,7 +253,7 @@ class ReplCompiler extends Compiler {
       }
 
       ParseResult(sourceFile)(state) match {
-        case Parsed(_, trees) =>
+        case Parsed(_, trees, _) =>
           wrap(trees).result
         case SyntaxErrors(_, reported, trees) =>
           if (errorsAllowed) wrap(trees).result
diff --git a/compiler/src/dotty/tools/repl/ReplDriver.scala b/compiler/src/dotty/tools/repl/ReplDriver.scala
index 7abaa86a77dc..f69a7ca142fb 100644
--- a/compiler/src/dotty/tools/repl/ReplDriver.scala
+++ b/compiler/src/dotty/tools/repl/ReplDriver.scala
@@ -5,25 +5,31 @@ import java.nio.charset.StandardCharsets
 
 import dotty.tools.dotc.ast.Trees._
 import dotty.tools.dotc.ast.{tpd, untpd}
+import dotty.tools.dotc.config.CommandLineParser.tokenize
+import dotty.tools.dotc.config.Properties.{javaVersion, javaVmName, simpleVersionString}
 import dotty.tools.dotc.core.Contexts._
+import dotty.tools.dotc.core.Decorators._
 import dotty.tools.dotc.core.Phases.{unfusedPhases, typerPhase}
 import dotty.tools.dotc.core.Denotations.Denotation
 import dotty.tools.dotc.core.Flags._
 import dotty.tools.dotc.core.Mode
 import dotty.tools.dotc.core.NameKinds.SimpleNameKind
+import dotty.tools.dotc.core.NameKinds.DefaultGetterName
 import dotty.tools.dotc.core.NameOps._
 import dotty.tools.dotc.core.Names.Name
 import dotty.tools.dotc.core.StdNames._
 import dotty.tools.dotc.core.Symbols.{Symbol, defn}
+import dotty.tools.dotc.interfaces
 import dotty.tools.dotc.interactive.Completion
 import dotty.tools.dotc.printing.SyntaxHighlighting
-import dotty.tools.dotc.reporting.MessageRendering
+import dotty.tools.dotc.reporting.{ConsoleReporter, MessageRendering, StoreReporter}
 import dotty.tools.dotc.reporting.{Message, Diagnostic}
 import dotty.tools.dotc.util.Spans.Span
 import dotty.tools.dotc.util.{SourceFile, SourcePosition}
 import dotty.tools.dotc.{CompilationUnit, Driver}
 import dotty.tools.dotc.config.CompilerCommand
 import dotty.tools.io._
+import dotty.tools.runner.ScalaClassLoader.*
 import org.jline.reader._
 
 import scala.annotation.tailrec
@@ -57,7 +63,7 @@ case class State(objectIndex: Int,
 /** Main REPL instance, orchestrating input, compilation and presentation */
 class ReplDriver(settings: Array[String],
                  out: PrintStream = Console.out,
-                 classLoader: Option[ClassLoader] = None) extends Driver {
+                 classLoader: Option[ClassLoader] = None) extends Driver:
 
   /** Overridden to `false` in order to not have to give sources on the
    *  commandline
@@ -65,14 +71,21 @@ class ReplDriver(settings: Array[String],
   override def sourcesRequired: Boolean = false
 
   /** Create a fresh and initialized context with IDE mode enabled */
-  private def initialCtx = {
-    val rootCtx = initCtx.fresh.addMode(Mode.ReadPositions | Mode.Interactive | Mode.ReadComments)
+  private def initialCtx(settings: List[String]) = {
+    val rootCtx = initCtx.fresh.addMode(Mode.ReadPositions | Mode.Interactive)
     rootCtx.setSetting(rootCtx.settings.YcookComments, true)
+    rootCtx.setSetting(rootCtx.settings.YreadComments, true)
+    setupRootCtx(this.settings ++ settings, rootCtx)
+  }
+
+  private def setupRootCtx(settings: Array[String], rootCtx: Context) = {
     setup(settings, rootCtx) match
-      case Some((files, ictx)) =>
+      case Some((files, ictx)) => inContext(ictx) {
         shouldStart = true
-        ictx.base.initialize()(using ictx)
+        if files.nonEmpty then out.println(i"Ignoring spurious arguments: $files%, %")
+        ictx.base.initialize()
         ictx
+      }
       case None =>
         shouldStart = false
         rootCtx
@@ -87,8 +100,8 @@ class ReplDriver(settings: Array[String],
    *  such, when the user enters `:reset` this method should be called to reset
    *  everything properly
    */
-  protected def resetToInitial(): Unit = {
-    rootCtx = initialCtx
+  protected def resetToInitial(settings: List[String] = Nil): Unit = {
+    rootCtx = initialCtx(settings)
     if (rootCtx.settings.outputDir.isDefault(using rootCtx))
       rootCtx = rootCtx.fresh
         .setSetting(rootCtx.settings.outputDir, new VirtualDirectory(""))
@@ -122,6 +135,10 @@ class ReplDriver(settings: Array[String],
   final def runUntilQuit(initialState: State = initialState): State = {
     val terminal = new JLineTerminal
 
+    out.println(
+      s"""Welcome to Scala $simpleVersionString ($javaVersion, Java $javaVmName).
+         |Type in expressions for evaluation. Or try :help.""".stripMargin)
+
     /** Blockingly read a line, getting back a parse result */
     def readLine(state: State): ParseResult = {
       val completer: Completer = { (_, line, candidates) =>
@@ -145,15 +162,17 @@ class ReplDriver(settings: Array[String],
       else loop(interpret(res)(state))
     }
 
-    try withRedirectedOutput { loop(initialState) }
+    try runBody { loop(initialState) }
     finally terminal.close()
   }
 
-  final def run(input: String)(implicit state: State): State = withRedirectedOutput {
+  final def run(input: String)(implicit state: State): State = runBody {
     val parsed = ParseResult(input)(state)
     interpret(parsed)
   }
 
+  private def runBody(body: => State): State = rendering.classLoader()(using rootCtx).asContext(withRedirectedOutput(body))
+
   // TODO: i5069
   final def bind(name: String, value: Any)(implicit state: State): State = state
 
@@ -172,8 +191,8 @@ class ReplDriver(settings: Array[String],
     }
   }
 
-  private def newRun(state: State) = {
-    val run = compiler.newRun(rootCtx.fresh.setReporter(newStoreReporter), state)
+  private def newRun(state: State, reporter: StoreReporter = newStoreReporter) = {
+    val run = compiler.newRun(rootCtx.fresh.setReporter(reporter), state)
     state.copy(context = run.runContext)
   }
 
@@ -206,7 +225,7 @@ class ReplDriver(settings: Array[String],
   }
 
   private def interpret(res: ParseResult)(implicit state: State): State = {
-    val newState = res match {
+    res match {
       case parsed: Parsed if parsed.trees.nonEmpty =>
         compile(parsed, state)
 
@@ -223,11 +242,6 @@ class ReplDriver(settings: Array[String],
       case _ => // new line, empty tree
         state
     }
-    inContext(newState.context) {
-      if (!ctx.settings.XreplDisableDisplay.value)
-        out.println()
-      newState
-    }
   }
 
   /** Compile `parsed` trees and evolve `state` in accordance */
@@ -240,8 +254,14 @@ class ReplDriver(settings: Array[String],
     def extractTopLevelImports(ctx: Context): List[tpd.Import] =
       unfusedPhases(using ctx).collectFirst { case phase: CollectTopLevelImports => phase.imports }.get
 
+    def contextWithNewImports(ctx: Context, imports: List[tpd.Import]): Context =
+      if imports.isEmpty then ctx
+      else
+        imports.foldLeft(ctx.fresh.setNewScope)((ctx, imp) =>
+          ctx.importContext(imp, imp.symbol(using ctx)))
+
     implicit val state = {
-      val state0 = newRun(istate)
+      val state0 = newRun(istate, parsed.reporter)
       state0.copy(context = state0.context.withSource(parsed.source))
     }
     compiler
@@ -255,11 +275,13 @@ class ReplDriver(settings: Array[String],
             var allImports = newState.imports
             if (newImports.nonEmpty)
               allImports += (newState.objectIndex -> newImports)
-            val newStateWithImports = newState.copy(imports = allImports)
+            val newStateWithImports = newState.copy(
+              imports = allImports,
+              context = contextWithNewImports(newState.context, newImports)
+            )
 
             val warnings = newState.context.reporter
               .removeBufferedMessages(using newState.context)
-              .map(rendering.formatError)
 
             inContext(newState.context) {
               val (updatedState, definitions) =
@@ -276,8 +298,7 @@ class ReplDriver(settings: Array[String],
 
               (definitions ++ warnings)
                 .sorted
-                .map(_.msg)
-                .foreach(out.println)
+                .foreach(printDiagnostic)
 
               updatedState
             }
@@ -306,6 +327,7 @@ class ReplDriver(settings: Array[String],
           .membersBasedOnFlags(required = Method, excluded = Accessor | ParamAccessor | Synthetic | Private)
           .filterNot { denot =>
             defn.topClasses.contains(denot.symbol.owner) || denot.symbol.isConstructor
+             || denot.symbol.name.is(DefaultGetterName)
           }
 
       val vals =
@@ -368,8 +390,8 @@ class ReplDriver(settings: Array[String],
       out.println(Help.text)
       state
 
-    case Reset =>
-      resetToInitial()
+    case Reset(arg) =>
+      resetToInitial(tokenize(arg))
       initialState
 
     case Imports =>
@@ -412,6 +434,16 @@ class ReplDriver(settings: Array[String],
       }
       state
 
+    case Settings(arg) => arg match
+      case "" =>
+        given ctx: Context = state.context
+        for (s <- ctx.settings.userSetSettings(ctx.settingsState).sortBy(_.name))
+          out.println(s"${s.name} = ${if s.value == "" then "\"\"" else s.value}")
+        state
+      case _  =>
+        rootCtx = setupRootCtx(tokenize(arg).toArray, rootCtx)
+        state.copy(context = rootCtx)
+
     case Quit =>
       // end of the world!
       state
@@ -419,7 +451,21 @@ class ReplDriver(settings: Array[String],
 
   /** shows all errors nicely formatted */
   private def displayErrors(errs: Seq[Diagnostic])(implicit state: State): State = {
-    errs.map(rendering.formatError).map(_.msg).foreach(out.println)
+    errs.foreach(printDiagnostic)
     state
   }
-}
+
+  /** Like ConsoleReporter, but without file paths, -Xprompt displaying,
+   *  and using a PrintStream rather than a PrintWriter so messages aren't re-encoded. */
+  private object ReplConsoleReporter extends ConsoleReporter.AbstractConsoleReporter {
+    override def posFileStr(pos: SourcePosition) = "" // omit file paths
+    override def printMessage(msg: String): Unit = out.println(msg)
+    override def flush()(using Context): Unit    = out.flush()
+  }
+
+  /** Print warnings & errors using ReplConsoleReporter, and info straight to out */
+  private def printDiagnostic(dia: Diagnostic)(implicit state: State) = dia.level match
+    case interfaces.Diagnostic.INFO => out.println(dia.msg) // print REPL's special info diagnostics directly to out
+    case _                          => ReplConsoleReporter.doReport(dia)(using state.context)
+
+end ReplDriver
diff --git a/compiler/src/dotty/tools/repl/ReplFrontEnd.scala b/compiler/src/dotty/tools/repl/ReplFrontEnd.scala
deleted file mode 100644
index 59df49e853cb..000000000000
--- a/compiler/src/dotty/tools/repl/ReplFrontEnd.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-package dotty.tools
-package repl
-
-import dotc.typer.FrontEnd
-import dotc.CompilationUnit
-import dotc.core.Contexts._
-import dotc.typer.ImportInfo.withRootImports
-
-/** A customized `FrontEnd` for the REPL
- *
- *  This customized front end does not perform parsing as part of its `runOn`
- *  method. This allows us to keep the parsing separate from the rest of the
- *  compiler pipeline.
- */
-private[repl] class REPLFrontEnd extends FrontEnd {
-
-  override def isRunnable(using Context): Boolean = true
-
-  override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = {
-    assert(units.size == 1) // REPl runs one compilation unit at a time
-    val unit = units.head
-    val unitContext = ctx.fresh.setCompilationUnit(unit).withRootImports
-    enterSyms(using unitContext)
-    typeCheck(using unitContext)
-    List(unit)
-  }
-}
diff --git a/compiler/src/dotty/tools/repl/ScriptEngine.scala b/compiler/src/dotty/tools/repl/ScriptEngine.scala
index 8a1d3e7c2148..501dbd0263d7 100644
--- a/compiler/src/dotty/tools/repl/ScriptEngine.scala
+++ b/compiler/src/dotty/tools/repl/ScriptEngine.scala
@@ -3,7 +3,7 @@ package repl
 
 import java.io.{Reader, StringWriter}
 import javax.script.{AbstractScriptEngine, Bindings, ScriptContext, ScriptEngine => JScriptEngine, ScriptEngineFactory, ScriptException, SimpleBindings}
-import dotc.core.StdNames.str
+import dotc.core.StdNames.{nme, str}
 
 /** A JSR 223 (Scripting API) compatible wrapper around the REPL for improved
  *  interoperability with software that supports it.
@@ -37,7 +37,7 @@ class ScriptEngine extends AbstractScriptEngine {
     val vid = state.valIndex
     state = driver.run(script)(state)
     val oid = state.objectIndex
-    Class.forName(s"${str.REPL_SESSION_LINE}$oid", true, rendering.classLoader()(using state.context))
+    Class.forName(s"${Rendering.REPL_WRAPPER_NAME_PREFIX}$oid", true, rendering.classLoader()(using state.context))
       .getDeclaredMethods.find(_.getName == s"${str.REPL_RES_PREFIX}$vid")
       .map(_.invoke(null))
       .getOrElse(null)
diff --git a/compiler/src/dotty/tools/runner/ObjectRunner.scala b/compiler/src/dotty/tools/runner/ObjectRunner.scala
new file mode 100644
index 000000000000..112b55f2e464
--- /dev/null
+++ b/compiler/src/dotty/tools/runner/ObjectRunner.scala
@@ -0,0 +1,48 @@
+package dotty.tools
+package runner
+
+import java.net.URL
+import scala.util.control.NonFatal
+import java.lang.reflect.InvocationTargetException
+import java.lang.reflect.UndeclaredThrowableException
+import java.util.concurrent.ExecutionException
+
+/**
+ * This is a copy implementation from scala/scala scala.tools.nsc.CommonRunner trait
+ */
+trait CommonRunner {
+  /** Run a given object, specified by name, using a
+   *  specified classpath and argument list.
+   *
+   *  @throws java.lang.ClassNotFoundException
+   *  @throws java.lang.NoSuchMethodException
+   *  @throws java.lang.reflect.InvocationTargetException
+   */
+  def run(urls: Seq[URL], objectName: String, arguments: Seq[String]): Unit = {
+    import RichClassLoader._
+    ScalaClassLoader.fromURLsParallelCapable(urls).run(objectName, arguments)
+  }
+
+  /** Catches any non-fatal exception thrown by run (in the case of InvocationTargetException,
+   *  unwrapping it) and returns it in an Option.
+   */
+  def runAndCatch(urls: Seq[URL], objectName: String, arguments: Seq[String]): Option[Throwable] =
+    try   { run(urls, objectName, arguments) ; None }
+    catch { case NonFatal(e) => Some(rootCause(e)) }
+
+  private def rootCause(x: Throwable): Throwable = x match {
+    case  _: InvocationTargetException |
+          _: ExceptionInInitializerError |
+          _: UndeclaredThrowableException |
+          _: ExecutionException
+            if x.getCause != null =>
+              rootCause(x.getCause)
+    case _ => x
+  }
+}
+
+/** An object that runs another object specified by name.
+ *
+ *  @author  Lex Spoon
+ */
+object ObjectRunner extends CommonRunner
diff --git a/compiler/src/dotty/tools/runner/ScalaClassLoader.scala b/compiler/src/dotty/tools/runner/ScalaClassLoader.scala
new file mode 100644
index 000000000000..af9e54f3506d
--- /dev/null
+++ b/compiler/src/dotty/tools/runner/ScalaClassLoader.scala
@@ -0,0 +1,82 @@
+package dotty.tools
+package runner
+
+import scala.language.implicitConversions
+
+import java.lang.ClassLoader
+import java.lang.invoke.{MethodHandles, MethodType}
+import java.lang.reflect.Modifier
+import java.net.{ URL, URLClassLoader }
+import java.lang.reflect.{ InvocationTargetException, UndeclaredThrowableException }
+
+import scala.annotation.internal.sharable
+import scala.annotation.tailrec
+import scala.util.control.Exception.catching
+
+final class RichClassLoader(private val self: ClassLoader) extends AnyVal {
+  /** Execute an action with this classloader as context classloader. */
+  private def asContext[T](action: => T): T = ScalaClassLoader.asContext(self)(action)
+
+  /** Load and link a class with this classloader */
+  def tryToLoadClass[T <: AnyRef](path: String): Option[Class[T]] = tryClass(path, initialize = false)
+
+  /** Load, link and initialize a class with this classloader */
+  def tryToInitializeClass[T <: AnyRef](path: String): Option[Class[T]] = tryClass(path, initialize = true)
+
+  private def tryClass[T <: AnyRef](path: String, initialize: Boolean): Option[Class[T]] =
+    catching(classOf[ClassNotFoundException], classOf[SecurityException]) opt
+      Class.forName(path, initialize, self).asInstanceOf[Class[T]]
+
+  /** Run the main method of a class to be loaded by this classloader */
+  def run(objectName: String, arguments: Seq[String]): Unit = {
+    val clsToRun = tryToInitializeClass(objectName).getOrElse(throw new ClassNotFoundException(objectName))
+    val method = clsToRun.getMethod("main", classOf[Array[String]])
+    if !Modifier.isStatic(method.getModifiers) then
+      throw new NoSuchMethodException(s"$objectName.main is not static")
+    try asContext(method.invoke(null, Array(arguments.toArray: AnyRef): _*))
+    catch unwrapHandler({ case ex => throw ex })
+  }
+
+  @tailrec private def unwrapThrowable(x: Throwable): Throwable = x match {
+    case  _: InvocationTargetException |      // thrown by reflectively invoked method or constructor
+          _: ExceptionInInitializerError |    // thrown when running a static initializer (e.g. a scala module constructor)
+          _: UndeclaredThrowableException |   // invocation on a proxy instance if its invocation handler's `invoke` throws an exception
+          _: ClassNotFoundException |         // no definition for a class instantiated by name
+          _: NoClassDefFoundError             // the definition existed when the executing class was compiled, but can no longer be found
+            if x.getCause != null =>
+              unwrapThrowable(x.getCause)
+    case _ => x
+  }
+
+  // Transforms an exception handler into one which will only receive the unwrapped
+  // exceptions (for the values of wrap covered in unwrapThrowable.)
+  private def unwrapHandler[T](pf: PartialFunction[Throwable, T]): PartialFunction[Throwable, T] =
+    pf.compose({ case ex => unwrapThrowable(ex) })
+}
+
+object RichClassLoader {
+  implicit def wrapClassLoader(loader: ClassLoader): RichClassLoader = new RichClassLoader(loader)
+}
+
+object ScalaClassLoader {
+  def setContext(cl: ClassLoader) = Thread.currentThread.setContextClassLoader(cl)
+
+  def fromURLsParallelCapable(urls: Seq[URL], parent: ClassLoader = null): URLClassLoader =
+    new URLClassLoader(urls.toArray, if parent == null then bootClassLoader else parent)
+
+  @sharable private[this] val bootClassLoader: ClassLoader =
+    if scala.util.Properties.isJavaAtLeast("9") then
+      try
+        MethodHandles.lookup().findStatic(classOf[ClassLoader], "getPlatformClassLoader", MethodType.methodType(classOf[ClassLoader])).invoke().asInstanceOf[ClassLoader]
+      catch case _: Throwable => null
+    else null
+
+  extension (classLoader: ClassLoader)
+    /** Execute an action with this classloader as context classloader. */
+    def asContext[T](action: => T): T =
+      val saved = Thread.currentThread.getContextClassLoader
+      try
+        setContext(classLoader)
+        action
+      finally setContext(saved)
+}
diff --git a/compiler/src/dotty/tools/scripting/Main.scala b/compiler/src/dotty/tools/scripting/Main.scala
old mode 100644
new mode 100755
index ded809b9f675..3a86155f1fe9
--- a/compiler/src/dotty/tools/scripting/Main.scala
+++ b/compiler/src/dotty/tools/scripting/Main.scala
@@ -2,7 +2,7 @@ package dotty.tools.scripting
 
 import java.io.File
 import java.nio.file.{Path, Paths}
-import dotty.tools.dotc.config.Properties.isWin 
+import dotty.tools.dotc.config.Properties.isWin
 
 /** Main entry point to the Scripting execution engine */
 object Main:
@@ -10,9 +10,11 @@ object Main:
       All arguments afterwards are script arguments.*/
   private def distinguishArgs(args: Array[String]): (Array[String], File, Array[String], Boolean, Boolean) =
     val (leftArgs, rest) = args.splitAt(args.indexOf("-script"))
-    assert(rest.size >= 2,s"internal error: rest == Array(${rest.mkString(",")})")
+    assert(rest.size >= 2, s"internal error: rest == Array(${rest.mkString(",")})")
 
     val file = File(rest(1))
+    // write script path to script.path property, so called script can see it
+    sys.props("script.path") = file.toPath.toAbsolutePath.toString
     val scriptArgs = rest.drop(2)
     var saveJar = false
     var invokeFlag = true // by default, script main method is invoked
@@ -32,10 +34,12 @@ object Main:
   def main(args: Array[String]): Unit =
     val (compilerArgs, scriptFile, scriptArgs, saveJar, invokeFlag) = distinguishArgs(args)
     val driver = ScriptingDriver(compilerArgs, scriptFile, scriptArgs)
-    try driver.compileAndRun { (outDir:Path, classpath:String, mainClass: String) =>
+    try driver.compileAndRun { (outDir:Path, classpathEntries:Seq[Path], mainClass: String) =>
+      // write expanded classpath to java.class.path property, so called script can see it
+      sys.props("java.class.path") = classpathEntries.map(_.toString).mkString(pathsep)
       if saveJar then
         // write a standalone jar to the script parent directory
-        writeJarfile(outDir, scriptFile, scriptArgs, classpath, mainClass)
+        writeJarfile(outDir, scriptFile, scriptArgs, classpathEntries, mainClass)
       invokeFlag
     }
     catch
@@ -47,10 +51,7 @@ object Main:
         throw e.getCause
 
   private def writeJarfile(outDir: Path, scriptFile: File, scriptArgs:Array[String],
-      classpath:String, mainClassName: String): Unit =
-
-    val javaClasspath = sys.props("java.class.path")
-    val runtimeClasspath = s"${classpath}$pathsep$javaClasspath"
+      classpathEntries:Seq[Path], mainClassName: String): Unit =
 
     val jarTargetDir: Path = Option(scriptFile.toPath.toAbsolutePath.getParent) match {
       case None => sys.error(s"no parent directory for script file [$scriptFile]")
@@ -60,7 +61,7 @@ object Main:
     def scriptBasename = scriptFile.getName.takeWhile(_!='.')
     val jarPath = s"$jarTargetDir/$scriptBasename.jar"
 
-    val cpPaths = runtimeClasspath.split(pathsep).map(_.toUrl)
+    val cpPaths = classpathEntries.map { _.toString.toUrl }
 
     import java.util.jar.Attributes.Name
     val cpString:String = cpPaths.distinct.mkString(" ")
@@ -88,11 +89,11 @@ object Main:
         case s if s.startsWith("./") => s.drop(2)
         case s => s
       }
-   
+
     // convert to absolute path relative to cwd.
     def absPath: String = norm match
       case str if str.isAbsolute => norm
-      case _ => Paths.get(userDir,norm).toString.norm
+      case _ => Paths.get(userDir, norm).toString.norm
 
     def toUrl: String = Paths.get(absPath).toUri.toURL.toString
 
diff --git a/compiler/src/dotty/tools/scripting/ScriptingDriver.scala b/compiler/src/dotty/tools/scripting/ScriptingDriver.scala
old mode 100644
new mode 100755
index af0858acf658..bce54183807b
--- a/compiler/src/dotty/tools/scripting/ScriptingDriver.scala
+++ b/compiler/src/dotty/tools/scripting/ScriptingDriver.scala
@@ -1,24 +1,18 @@
 package dotty.tools.scripting
 
-import java.nio.file.{ Files, Path }
+import java.nio.file.{ Files, Paths, Path }
 import java.io.File
-import java.net.{ URL, URLClassLoader }
-import java.lang.reflect.{ Modifier, Method }
+import java.net.{ URLClassLoader }
 
-import scala.jdk.CollectionConverters._
-
-import dotty.tools.dotc.{ Driver, Compiler }
-import dotty.tools.dotc.core.Contexts, Contexts.{ Context, ContextBase, ctx }
-import dotty.tools.dotc.config.CompilerCommand
-import dotty.tools.io.{ PlainDirectory, Directory }
-import dotty.tools.dotc.reporting.Reporter
-import dotty.tools.dotc.config.Settings.Setting._
-
-import sys.process._
+import dotty.tools.dotc.Driver
+import dotty.tools.dotc.core.Contexts, Contexts.{ Context, ctx }
+import dotty.tools.io.{ PlainDirectory, Directory, ClassPath }
+import Util.*
 
 class ScriptingDriver(compilerArgs: Array[String], scriptFile: File, scriptArgs: Array[String]) extends Driver:
-  def compileAndRun(pack:(Path, String, String) => Boolean = null): Unit =
+  def compileAndRun(pack:(Path, Seq[Path], String) => Boolean = null): Unit =
     val outDir = Files.createTempDirectory("scala3-scripting")
+    outDir.toFile.deleteOnExit()
     setup(compilerArgs :+ scriptFile.getAbsolutePath, initCtx.fresh) match
       case Some((toCompile, rootCtx)) =>
         given Context = rootCtx.fresh.setSetting(rootCtx.settings.outputDir,
@@ -28,11 +22,13 @@ class ScriptingDriver(compilerArgs: Array[String], scriptFile: File, scriptArgs:
           throw ScriptingException("Errors encountered during compilation")
 
         try
-          val (mainClass, mainMethod) = detectMainClassAndMethod(outDir, ctx.settings.classpath.value, scriptFile)
+          val classpath = s"${ctx.settings.classpath.value}${pathsep}${sys.props("java.class.path")}"
+          val classpathEntries: Seq[Path] = ClassPath.expandPath(classpath, expandStar=true).map { Paths.get(_) }
+          val (mainClass, mainMethod) = detectMainClassAndMethod(outDir, classpathEntries, scriptFile.toString)
           val invokeMain: Boolean =
             Option(pack) match
               case Some(func) =>
-                func(outDir, ctx.settings.classpath.value, mainClass)
+                func(outDir, classpathEntries, mainClass)
               case None =>
                 true
             end match
@@ -45,57 +41,6 @@ class ScriptingDriver(compilerArgs: Array[String], scriptFile: File, scriptArgs:
       case None =>
   end compileAndRun
 
-  private def deleteFile(target: File): Unit =
-    if target.isDirectory then
-      for member <- target.listFiles.toList
-      do deleteFile(member)
-    target.delete()
-  end deleteFile
-
-  private def detectMainClassAndMethod(outDir: Path, classpath: String,
-      scriptFile: File): (String, Method) =
-    val outDirURL = outDir.toUri.toURL
-    val classpathUrls = classpath.split(pathsep).map(File(_).toURI.toURL)
-    val cl = URLClassLoader(classpathUrls :+ outDirURL)
-
-    def collectMainMethods(target: File, path: String): List[(String, Method)] =
-      val nameWithoutExtension = target.getName.takeWhile(_ != '.')
-      val targetPath =
-        if path.nonEmpty then s"${path}.${nameWithoutExtension}"
-        else nameWithoutExtension
-
-      if target.isDirectory then
-        for
-          packageMember <- target.listFiles.toList
-          membersMainMethod <- collectMainMethods(packageMember, targetPath)
-        yield membersMainMethod
-      else if target.getName.endsWith(".class") then
-        val cls = cl.loadClass(targetPath)
-        try
-          val method = cls.getMethod("main", classOf[Array[String]])
-          if Modifier.isStatic(method.getModifiers) then List((cls.getName, method)) else Nil
-        catch
-          case _: java.lang.NoSuchMethodException => Nil
-      else Nil
-    end collectMainMethods
-
-    val candidates = for
-      file <- outDir.toFile.listFiles.toList
-      method <- collectMainMethods(file, "")
-    yield method
-
-    candidates match
-      case Nil =>
-        throw ScriptingException(s"No main methods detected in script ${scriptFile}")
-      case _ :: _ :: _ =>
-        throw ScriptingException("A script must contain only one main method. " +
-          s"Detected the following main methods:\n${candidates.mkString("\n")}")
-      case m :: Nil => m
-    end match
-  end detectMainClassAndMethod
-
-  def pathsep = sys.props("path.separator")
-
 end ScriptingDriver
 
 case class ScriptingException(msg: String) extends RuntimeException(msg)
diff --git a/compiler/src/dotty/tools/scripting/StringDriver.scala b/compiler/src/dotty/tools/scripting/StringDriver.scala
new file mode 100755
index 000000000000..6ac0bce9766a
--- /dev/null
+++ b/compiler/src/dotty/tools/scripting/StringDriver.scala
@@ -0,0 +1,45 @@
+package dotty.tools.scripting
+
+import java.nio.file.{ Files, Paths, Path }
+
+import dotty.tools.dotc.Driver
+import dotty.tools.dotc.core.Contexts, Contexts.{ Context, ctx }
+import dotty.tools.io.{ PlainDirectory, Directory, ClassPath }
+import Util.*
+
+class StringDriver(compilerArgs: Array[String], scalaSource: String) extends Driver:
+  override def sourcesRequired: Boolean = false
+
+  def compileAndRun(classpath: List[String] = Nil): Unit =
+    val outDir = Files.createTempDirectory("scala3-expression")
+    outDir.toFile.deleteOnExit()
+
+    setup(compilerArgs, initCtx.fresh) match
+      case Some((toCompile, rootCtx)) =>
+        given Context = rootCtx.fresh.setSetting(rootCtx.settings.outputDir,
+          new PlainDirectory(Directory(outDir)))
+
+        val compiler = newCompiler
+        compiler.newRun.compileFromStrings(List(scalaSource))
+
+        val output = ctx.settings.outputDir.value
+        if ctx.reporter.hasErrors then
+          throw StringDriverException("Errors encountered during compilation")
+
+        try
+          val classpath = s"${ctx.settings.classpath.value}${pathsep}${sys.props("java.class.path")}"
+          val classpathEntries: Seq[Path] = ClassPath.expandPath(classpath, expandStar=true).map { Paths.get(_) }
+          sys.props("java.class.path") = classpathEntries.map(_.toString).mkString(pathsep)
+          val (mainClass, mainMethod) = detectMainClassAndMethod(outDir, classpathEntries, scalaSource)
+          mainMethod.invoke(null, Array.empty[String])
+        catch
+          case e: java.lang.reflect.InvocationTargetException =>
+            throw e.getCause
+        finally
+          deleteFile(outDir.toFile)
+      case None =>
+  end compileAndRun
+
+end StringDriver
+
+case class StringDriverException(msg: String) extends RuntimeException(msg)
diff --git a/compiler/src/dotty/tools/scripting/Util.scala b/compiler/src/dotty/tools/scripting/Util.scala
new file mode 100755
index 000000000000..9529eb9ad791
--- /dev/null
+++ b/compiler/src/dotty/tools/scripting/Util.scala
@@ -0,0 +1,60 @@
+package dotty.tools.scripting
+
+import java.nio.file.{ Path }
+import java.io.File
+import java.net.{ URLClassLoader }
+import java.lang.reflect.{ Modifier, Method }
+
+object Util:
+
+  def deleteFile(target: File): Unit =
+    if target.isDirectory then
+      for member <- target.listFiles.toList
+      do deleteFile(member)
+    target.delete()
+  end deleteFile
+
+  def detectMainClassAndMethod(outDir: Path, classpathEntries: Seq[Path], srcFile: String): (String, Method) =
+    val classpathUrls = (classpathEntries :+ outDir).map { _.toUri.toURL }
+    val cl = URLClassLoader(classpathUrls.toArray)
+
+    def collectMainMethods(target: File, path: String): List[(String, Method)] =
+      val nameWithoutExtension = target.getName.takeWhile(_ != '.')
+      val targetPath =
+        if path.nonEmpty then s"${path}.${nameWithoutExtension}"
+        else nameWithoutExtension
+
+      if target.isDirectory then
+        for
+          packageMember <- target.listFiles.toList
+          membersMainMethod <- collectMainMethods(packageMember, targetPath)
+        yield membersMainMethod
+      else if target.getName.endsWith(".class") then
+        val cls = cl.loadClass(targetPath)
+        try
+          val method = cls.getMethod("main", classOf[Array[String]])
+          if Modifier.isStatic(method.getModifiers) then List((cls.getName, method)) else Nil
+        catch
+          case _: java.lang.NoSuchMethodException => Nil
+      else Nil
+    end collectMainMethods
+
+    val mains = for
+      file <- outDir.toFile.listFiles.toList
+      method <- collectMainMethods(file, "")
+    yield method
+
+    mains match
+      case Nil =>
+        throw StringDriverException(s"No main methods detected for [${srcFile}]")
+      case _ :: _ :: _ =>
+        throw StringDriverException(
+          s"internal error: Detected the following main methods:\n${mains.mkString("\n")}")
+      case m :: Nil => m
+    end match
+  end detectMainClassAndMethod
+
+  def pathsep = sys.props("path.separator")
+
+end Util
+
diff --git a/compiler/src/scala/quoted/runtime/impl/Matcher.scala b/compiler/src/scala/quoted/runtime/impl/Matcher.scala
deleted file mode 100644
index c6176cbcf830..000000000000
--- a/compiler/src/scala/quoted/runtime/impl/Matcher.scala
+++ /dev/null
@@ -1,438 +0,0 @@
-package scala.quoted
-package runtime.impl
-
-import scala.annotation.internal.sharable
-import scala.annotation.{Annotation, compileTimeOnly}
-
-/** Matches a quoted tree against a quoted pattern tree.
- *  A quoted pattern tree may have type and term holes in addition to normal terms.
- *
- *
- *  Semantics:
- *
- *  We use `'{..}` for expression, `'[..]` for types and `⟨..⟩` for patterns nested in expressions.
- *  The semantics are defined as a list of reduction rules that are tried one by one until one matches.
- *
- *   Operations:
- *   - `s =?= p` checks if a scrutinee `s` matches the pattern `p` while accumulating extracted parts of the code.
- *   - `isColosedUnder(x1, .., xn)('{e})` returns true if and only if all the references in `e` to names defined in the patttern are contained in the set `{x1, ... xn}`.
- *   - `lift(x1, .., xn)('{e})` returns `(y1, ..., yn) => [xi = $yi]'{e}` where `yi` is an `Expr` of the type of `xi`.
- *   - `withEnv(x1 -> y1, ..., xn -> yn)(matching)` evaluates mathing recording that `xi` is equivalent to `yi`.
- *   - `matched` denotes that the the match succedded and `matched('{e})` denotes that a matech succeded and extracts `'{e}`
- *   - `&&&` matches if both sides match. Concatenates the extracted expressions of both sides.
- *
- *   Note: that not all quoted terms bellow are valid expressions
- *
- *   ```scala
- *   /* Term hole */
- *   '{ e } =?= '{ hole[T] }  &&  typeOf('{e}) <:< T && isColosedUnder()('{e})  ===>   matched('{e})
- *
- *   /* Higher order term hole */
- *   '{ e } =?= '{ hole[(T1, ..., Tn) => T](x1, ..., xn) }  &&  isColosedUnder(x1, ... xn)('{e})  ===>   matched(lift(x1, ..., xn)('{e}))
- *
- *   /* Match literal */
- *   '{ lit } =?= '{ lit }   ===>   matched
- *
- *   /* Match type ascription (a) */
- *   '{ e: T } =?= '{ p }   ===>   '{e} =?= '{p}
- *
- *   /* Match type ascription (b) */
- *   '{ e } =?= '{ p: P }   ===>   '{e} =?= '{p}
- *
- *   /* Match selection */
- *   '{ e.x } =?= '{ p.x }   ===>   '{e} =?= '{p}
- *
- *   /* Match reference */
- *   '{ x } =?= '{ x }   ===>   matched
- *
- *   /* Match application */
- *   '{e0(e1, ..., en)} =?= '{p0(p1, ..., p2)}   ===>   '{e0} =?= '{p0} &&& '{e1} =?= '{p1} &&& ... %% '{en} =?= '{pn}
- *
- *   /* Match type application */
- *   '{e[T1, ..., Tn]} =?= '{p[P1, ..., Pn]}   ===>   '{e} =?= '{p} &&& '[T1] =?= '{P1} &&& ... %% '[Tn] =?= '[Pn]
- *
- *   /* Match block flattening */
- *   '{ {e0; e1; ...; en}; em } =?= '{ {p0; p1; ...; pm}; em }   ===>   '{ e0; {e1; ...; en; em} } =?= '{ p0; {p1; ...; pm; em} }
- *
- *   /* Match block */
- *   '{ e1; e2 } =?= '{ p1; p2 }   ===>   '{e1} =?= '{p1} &&& '{e2} =?= '{p2}
- *
- *   /* Match def block */
- *   '{ e1; e2 } =?= '{ p1; p2 }   ===>   withEnv(symOf(e1) -> symOf(p1))('{e1} =?= '{p1} &&& '{e2} =?= '{p2})
- *
- *   /* Match if */
- *   '{ if e0 then e1 else e2 } =?= '{ if p0 then p1 else p2 }   ===>  '{e0} =?= '{p0} &&& '{e1} =?= '{p1} &&& '{e2} =?= '{p2}
- *
- *   /* Match while */
- *   '{ while e0 do e1 } =?= '{ while p0 do p1 }   ===>  '{e0} =?= '{p0} &&& '{e1} =?= '{p1}
- *
- *   /* Match assign */
- *   '{ e0 = e1 } =?= '{ p0 = p1 }   ==>   '{e0} =?= '{p0} &&& '{e1} =?= '{p1}
- *
- *   /* Match new */
- *   '{ new T } =?= '{ new T }   ===>   matched
- *
- *   /* Match this */
- *   '{ C.this } =?= '{ C.this }   ===>   matched
- *
- *   /* Match super */
- *   '{ e.super } =?= '{ p.super }   ===>   '{e} =?= '{p}
- *
- *   /* Match varargs */
- *   '{ e: _* } =?= '{ p: _* }   ===>   '{e} =?= '{p}
- *
- *   /* Match val */
- *   '{ val x: T = e1; e2 } =?= '{ val y: P = p1; p2 }   ===>   withEnv(x -> y)('[T] =?= '[P] &&& '{e1} =?= '{p1} &&& '{e2} =?= '{p2})
- *
- *   /* Match def */
- *   '{ def x0(x1: T1, ..., xn: Tn): T0 = e1; e2 } =?= '{ def y0(y1: P1, ..., yn: Pn): P0 = p1; p2 }   ===>   withEnv(x0 -> y0, ..., xn -> yn)('[T0] =?= '[P0] &&& ... &&& '[Tn] =?= '[Pn] &&& '{e1} =?= '{p1} &&& '{e2} =?= '{p2})
- *
- *   // Types
- *
- *   /* Match type */
- *   '[T] =?= '[P] && T <:< P   ===>   matched
- *
- *   ```
- */
-object Matcher {
-
-  abstract class QuoteMatcher[QCtx <: Quotes & Singleton](val qctx: QCtx) {
-
-    // TODO improve performance
-
-    // TODO use flag from qctx.reflect. Maybe -debug or add -debug-macros
-    private inline val debug = false
-
-    import qctx.reflect._
-    import Matching._
-
-    def patternHoleSymbol: Symbol
-    def higherOrderHoleSymbol: Symbol
-
-    /** A map relating equivalent symbols from the scrutinee and the pattern
-     *  For example in
-     *  ```
-     *  '{val a = 4; a * a} match case '{ val x = 4; x * x }
-     *  ```
-     *  when matching `a * a` with `x * x` the environment will contain `Map(a -> x)`.
-     */
-    private type Env = Map[Symbol, Symbol]
-
-    inline private def withEnv[T](env: Env)(inline body: Env ?=> T): T = body(using env)
-
-    def termMatch(scrutineeTerm: Term, patternTerm: Term): Option[Tuple] =
-      given Env = Map.empty
-      scrutineeTerm =?= patternTerm
-
-    def typeTreeMatch(scrutineeTypeTree: TypeTree, patternTypeTree: TypeTree): Option[Tuple] =
-      given Env = Map.empty
-      scrutineeTypeTree =?= patternTypeTree
-
-    /** Check that all trees match with `mtch` and concatenate the results with &&& */
-    private def matchLists[T](l1: List[T], l2: List[T])(mtch: (T, T) => Matching): Matching = (l1, l2) match {
-      case (x :: xs, y :: ys) => mtch(x, y) &&& matchLists(xs, ys)(mtch)
-      case (Nil, Nil) => matched
-      case _ => notMatched
-    }
-
-    extension (scrutinees: List[Tree])
-      /** Check that all trees match with =?= and concatenate the results with &&& */
-      private def =?= (patterns: List[Tree])(using Env): Matching =
-        matchLists(scrutinees, patterns)(_ =?= _)
-
-    extension (scrutinee0: Tree)
-      /** Check that the trees match and return the contents from the pattern holes.
-       *  Return None if the trees do not match otherwise return Some of a tuple containing all the contents in the holes.
-       *
-       *  @param scrutinee The tree beeing matched
-       *  @param pattern The pattern tree that the scrutinee should match. Contains `patternHole` holes.
-       *  @param `summon[Env]` Set of tuples containing pairs of symbols (s, p) where s defines a symbol in `scrutinee` which corresponds to symbol p in `pattern`.
-       *  @return `None` if it did not match or `Some(tup: Tuple)` if it matched where `tup` contains the contents of the holes.
-       */
-      private def =?= (pattern0: Tree)(using Env): Matching = {
-
-        /* Match block flattening */ // TODO move to cases
-        /** Normalize the tree */
-        def normalize(tree: Tree): Tree = tree match {
-          case Block(Nil, expr) => normalize(expr)
-          case Block(stats1, Block(stats2, expr)) =>
-            expr match
-              case _: Closure => tree
-              case _ => normalize(Block(stats1 ::: stats2, expr))
-          case Inlined(_, Nil, expr) => normalize(expr)
-          case _ => tree
-        }
-
-        val scrutinee = normalize(scrutinee0)
-        val pattern = normalize(pattern0)
-
-        /** Check that both are `val` or both are `lazy val` or both are `var` **/
-        def checkValFlags(): Boolean = {
-          import Flags._
-          val sFlags = scrutinee.symbol.flags
-          val pFlags = pattern.symbol.flags
-          sFlags.is(Lazy) == pFlags.is(Lazy) && sFlags.is(Mutable) == pFlags.is(Mutable)
-        }
-
-        (scrutinee, pattern) match {
-
-          /* Term hole */
-          // Match a scala.internal.Quoted.patternHole typed as a repeated argument and return the scrutinee tree
-          case (scrutinee @ Typed(s, tpt1), Typed(TypeApply(patternHole, tpt :: Nil), tpt2))
-              if patternHole.symbol == patternHoleSymbol &&
-                 s.tpe <:< tpt.tpe &&
-                 tpt2.tpe.derivesFrom(defn.RepeatedParamClass) =>
-            matched(scrutinee.asExpr)
-
-          /* Term hole */
-          // Match a scala.internal.Quoted.patternHole and return the scrutinee tree
-          case (ClosedPatternTerm(scrutinee), TypeApply(patternHole, tpt :: Nil))
-              if patternHole.symbol == patternHoleSymbol &&
-                 scrutinee.tpe <:< tpt.tpe =>
-            matched(scrutinee.asExpr)
-
-          /* Higher order term hole */
-          // Matches an open term and wraps it into a lambda that provides the free variables
-          case (scrutinee, pattern @ Apply(TypeApply(Ident("higherOrderHole"), List(Inferred())), Repeated(args, _) :: Nil))
-              if pattern.symbol == higherOrderHoleSymbol =>
-
-            def bodyFn(lambdaArgs: List[Tree]): Tree = {
-              val argsMap = args.map(_.symbol).zip(lambdaArgs.asInstanceOf[List[Term]]).toMap
-              new TreeMap {
-                override def transformTerm(tree: Term)(owner: Symbol): Term =
-                  tree match
-                    case tree: Ident => summon[Env].get(tree.symbol).flatMap(argsMap.get).getOrElse(tree)
-                    case tree => super.transformTerm(tree)(owner)
-              }.transformTree(scrutinee)(Symbol.spliceOwner)
-            }
-            val names = args.map {
-              case Block(List(DefDef("$anonfun", _, _, Some(Apply(Ident(name), _)))), _) => name
-              case arg => arg.symbol.name
-            }
-            val argTypes = args.map(x => x.tpe.widenTermRefByName)
-            val resType = pattern.tpe
-            val res = Lambda(Symbol.spliceOwner, MethodType(names)(_ => argTypes, _ => resType), (meth, x) => bodyFn(x).changeOwner(meth))
-            matched(res.asExpr)
-
-          //
-          // Match two equivalent trees
-          //
-
-          /* Match literal */
-          case (Literal(constant1), Literal(constant2)) if constant1 == constant2 =>
-            matched
-
-          /* Match type ascription (a) */
-          case (Typed(expr1, _), pattern) =>
-            expr1 =?= pattern
-
-          /* Match type ascription (b) */
-          case (scrutinee, Typed(expr2, _)) =>
-            scrutinee =?= expr2
-
-          /* Match selection */
-          case (ref: Ref, Select(qual2, _)) if symbolMatch(scrutinee, pattern) =>
-            ref match
-              case Select(qual1, _) => qual1 =?= qual2
-              case ref: Ident =>
-                ref.tpe match
-                  case TermRef(qual: TermRef, _) => Ref.term(qual) =?= qual2
-                  case _ => matched
-
-          /* Match reference */
-          case (_: Ref, _: Ident) if symbolMatch(scrutinee, pattern) =>
-            matched
-
-          /* Match application */
-          case (Apply(fn1, args1), Apply(fn2, args2)) =>
-            fn1 =?= fn2 &&& args1 =?= args2
-
-          /* Match type application */
-          case (TypeApply(fn1, args1), TypeApply(fn2, args2)) =>
-            fn1 =?= fn2 &&& args1 =?= args2
-
-          /* Match block */
-          case (Block(stat1 :: stats1, expr1), Block(stat2 :: stats2, expr2)) =>
-            val newEnv = (stat1, stat2) match {
-              case (stat1: Definition, stat2: Definition) =>
-                summon[Env] + (stat1.symbol -> stat2.symbol)
-              case _ =>
-                summon[Env]
-            }
-            withEnv(newEnv) {
-              stat1 =?= stat2 &&& Block(stats1, expr1) =?= Block(stats2, expr2)
-            }
-
-          /* Match if */
-          case (If(cond1, thenp1, elsep1), If(cond2, thenp2, elsep2)) =>
-            cond1 =?= cond2 &&& thenp1 =?= thenp2 &&& elsep1 =?= elsep2
-
-          /* Match while */
-          case (While(cond1, body1), While(cond2, body2)) =>
-            cond1 =?= cond2 &&& body1 =?= body2
-
-          /* Match assign */
-          case (Assign(lhs1, rhs1), Assign(lhs2, rhs2)) =>
-            lhs1 =?= lhs2 &&& rhs1 =?= rhs2
-
-          /* Match new */
-          case (New(tpt1), New(tpt2)) if tpt1.tpe.typeSymbol == tpt2.tpe.typeSymbol =>
-            matched
-
-          /* Match this */
-          case (This(_), This(_)) if scrutinee.symbol == pattern.symbol =>
-            matched
-
-          /* Match super */
-          case (Super(qual1, mix1), Super(qual2, mix2)) if mix1 == mix2 =>
-            qual1 =?= qual2
-
-          /* Match varargs */
-          case (Repeated(elems1, _), Repeated(elems2, _)) if elems1.size == elems2.size =>
-            elems1 =?= elems2
-
-          /* Match type */
-          // TODO remove this?
-          case (scrutinee: TypeTree, pattern: TypeTree) if scrutinee.tpe <:< pattern.tpe =>
-            matched
-
-          /* Match val */
-          case (ValDef(_, tpt1, rhs1), ValDef(_, tpt2, rhs2)) if checkValFlags() =>
-            def rhsEnv = summon[Env] + (scrutinee.symbol -> pattern.symbol)
-            tpt1 =?= tpt2 &&& treeOptMatches(rhs1, rhs2)(using rhsEnv)
-
-          /* Match def */
-          case (DefDef(_, paramss1, tpt1, Some(rhs1)), DefDef(_, paramss2, tpt2, Some(rhs2))) =>
-            def rhsEnv =
-              val paramSyms: List[(Symbol, Symbol)] =
-                for
-                  (clause1, clause2) <- paramss1.zip(paramss2)
-                  (param1, param2) <- clause1.params.zip(clause2.params)
-                yield
-                  param1.symbol -> param2.symbol
-              val oldEnv: Env = summon[Env]
-              val newEnv: List[(Symbol, Symbol)] = (scrutinee.symbol -> pattern.symbol) :: paramSyms
-              oldEnv ++ newEnv
-
-            matchLists(paramss1, paramss2)(_ =?= _)
-              &&& tpt1 =?= tpt2
-              &&& withEnv(rhsEnv)(rhs1 =?= rhs2)
-
-          case (Closure(_, tpt1), Closure(_, tpt2)) =>
-            // TODO match tpt1 with tpt2?
-            matched
-
-          case (NamedArg(name1, arg1), NamedArg(name2, arg2)) if name1 == name2 =>
-            arg1 =?= arg2
-
-          // No Match
-          case _ =>
-            if (debug)
-              println(
-                s""">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
-                   |Scrutinee
-                   |  ${scrutinee.show}
-                   |did not match pattern
-                   |  ${pattern.show}
-                   |
-                   |with environment: ${summon[Env]}
-                   |
-                   |Scrutinee: ${scrutinee.show(using Printer.TreeStructure)}
-                   |Pattern: ${pattern.show(using Printer.TreeStructure)}
-                   |
-                   |""".stripMargin)
-            notMatched
-        }
-      }
-    end extension
-
-    extension (scrutinee: ParamClause)
-      /** Check that all parameters in the clauses clauses match with =?= and concatenate the results with &&& */
-      private def =?= (pattern: ParamClause)(using Env)(using DummyImplicit): Matching =
-        (scrutinee, pattern) match
-          case (TermParamClause(params1), TermParamClause(params2)) => matchLists(params1, params2)(_ =?= _)
-          case (TypeParamClause(params1), TypeParamClause(params2)) => matchLists(params1, params2)(_ =?= _)
-          case _ => notMatched
-
-    /** Does the scrutenne symbol match the pattern symbol? It matches if:
-     *   - They are the same symbol
-     *   - The scrutinee has is in the environment and they are equivalent
-     *   - The scrutinee overrides the symbol of the pattern
-     */
-    private def symbolMatch(scrutineeTree: Tree, patternTree: Tree)(using Env): Boolean =
-      val scrutinee = scrutineeTree.symbol
-      val devirtualizedScrutinee = scrutineeTree match
-        case Select(qual, _) =>
-          val sym = scrutinee.overridingSymbol(qual.tpe.typeSymbol)
-          if sym.exists then sym
-          else scrutinee
-        case _ => scrutinee
-      val pattern = patternTree.symbol
-
-      devirtualizedScrutinee == pattern
-      || summon[Env].get(devirtualizedScrutinee).contains(pattern)
-      || devirtualizedScrutinee.allOverriddenSymbols.contains(pattern)
-
-    private object ClosedPatternTerm {
-      /** Matches a term that does not contain free variables defined in the pattern (i.e. not defined in `Env`) */
-      def unapply(term: Term)(using Env): Option[term.type] =
-        if freePatternVars(term).isEmpty then Some(term) else None
-
-      /** Return all free variables of the term defined in the pattern (i.e. defined in `Env`) */
-      def freePatternVars(term: Term)(using env: Env): Set[Symbol] =
-        val accumulator = new TreeAccumulator[Set[Symbol]] {
-          def foldTree(x: Set[Symbol], tree: Tree)(owner: Symbol): Set[Symbol] =
-            tree match
-              case tree: Ident if env.contains(tree.symbol) => foldOverTree(x + tree.symbol, tree)(owner)
-              case _ => foldOverTree(x, tree)(owner)
-        }
-        accumulator.foldTree(Set.empty, term)(Symbol.spliceOwner)
-    }
-
-    private object IdentArgs {
-      def unapply(args: List[Term]): Option[List[Ident]] =
-        args.foldRight(Option(List.empty[Ident])) {
-          case (id: Ident, Some(acc)) => Some(id :: acc)
-          case (Block(List(DefDef("$anonfun", TermParamClause(params) :: Nil, Inferred(), Some(Apply(id: Ident, args)))), Closure(Ident("$anonfun"), None)), Some(acc))
-              if params.zip(args).forall(_.symbol == _.symbol) =>
-            Some(id :: acc)
-          case _ => None
-        }
-    }
-
-    private def treeOptMatches(scrutinee: Option[Tree], pattern: Option[Tree])(using Env): Matching = {
-      (scrutinee, pattern) match {
-        case (Some(x), Some(y)) => x =?= y
-        case (None, None) => matched
-        case _ => notMatched
-      }
-    }
-
-  }
-
-  /** Result of matching a part of an expression */
-  private opaque type Matching = Option[Tuple]
-
-  private object Matching {
-
-    def notMatched: Matching = None
-    val matched: Matching = Some(Tuple())
-    def matched(x: Any): Matching = Some(Tuple1(x))
-
-    extension (self: Matching)
-      def asOptionOfTuple: Option[Tuple] = self
-
-      /** Concatenates the contents of two successful matchings or return a `notMatched` */
-      def &&& (that: => Matching): Matching = self match {
-        case Some(x) =>
-          that match {
-            case Some(y) => Some(x ++ y)
-            case _ => None
-          }
-        case _ => None
-      }
-    end extension
-
-  }
-
-}
diff --git a/compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala b/compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala
new file mode 100644
index 000000000000..6d9ff6ca68a8
--- /dev/null
+++ b/compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala
@@ -0,0 +1,474 @@
+package scala.quoted
+package runtime.impl
+
+import scala.annotation.internal.sharable
+import scala.annotation.{Annotation, compileTimeOnly}
+
+import dotty.tools.dotc
+import dotty.tools.dotc.ast.tpd
+import dotty.tools.dotc.core.Contexts.*
+import dotty.tools.dotc.core.Flags.*
+import dotty.tools.dotc.core.Names.*
+import dotty.tools.dotc.core.Types.*
+import dotty.tools.dotc.core.StdNames.nme
+import dotty.tools.dotc.core.Symbols.*
+
+/** Matches a quoted tree against a quoted pattern tree.
+ *  A quoted pattern tree may have type and term holes in addition to normal terms.
+ *
+ *
+ *  Semantics:
+ *
+ *  We use `'{..}` for expression, `'[..]` for types and `⟨..⟩` for patterns nested in expressions.
+ *  The semantics are defined as a list of reduction rules that are tried one by one until one matches.
+ *
+ *   Operations:
+ *   - `s =?= p` checks if a scrutinee `s` matches the pattern `p` while accumulating extracted parts of the code.
+ *   - `isClosedUnder(x1, .., xn)('{e})` returns true if and only if all the references in `e` to names defined in the pattern are contained in the set `{x1, ... xn}`.
+ *   - `lift(x1, .., xn)('{e})` returns `(y1, ..., yn) => [xi = $yi]'{e}` where `yi` is an `Expr` of the type of `xi`.
+ *   - `withEnv(x1 -> y1, ..., xn -> yn)(matching)` evaluates matching recording that `xi` is equivalent to `yi`.
+ *   - `matched` denotes that the the match succeeded and `matched('{e})` denotes that a match succeeded and extracts `'{e}`
+ *   - `&&&` matches if both sides match. Concatenates the extracted expressions of both sides.
+ *
+ *   Note: that not all quoted terms bellow are valid expressions
+ *
+ *   ```scala
+ *   /* Term hole */
+ *   '{ e } =?= '{ hole[T] }  &&  typeOf('{e}) <:< T && isClosedUnder()('{e})  ===>   matched('{e})
+ *
+ *   /* Higher order term hole */
+ *   '{ e } =?= '{ hole[(T1, ..., Tn) => T](x1, ..., xn) }  &&  isClosedUnder(x1, ... xn)('{e})  ===>   matched(lift(x1, ..., xn)('{e}))
+ *
+ *   /* Match literal */
+ *   '{ lit } =?= '{ lit }   ===>   matched
+ *
+ *   /* Match type ascription (a) */
+ *   '{ e: T } =?= '{ p }   ===>   '{e} =?= '{p}
+ *
+ *   /* Match type ascription (b) */
+ *   '{ e } =?= '{ p: P }   ===>   '{e} =?= '{p}
+ *
+ *   /* Match selection */
+ *   '{ e.x } =?= '{ p.x }   ===>   '{e} =?= '{p}
+ *
+ *   /* Match reference */
+ *   '{ x } =?= '{ x }   ===>   matched
+ *
+ *   /* Match application */
+ *   '{e0(e1, ..., en)} =?= '{p0(p1, ..., p2)}   ===>   '{e0} =?= '{p0} &&& '{e1} =?= '{p1} &&& ... %% '{en} =?= '{pn}
+ *
+ *   /* Match type application */
+ *   '{e[T1, ..., Tn]} =?= '{p[P1, ..., Pn]}   ===>   '{e} =?= '{p} &&& '[T1] =?= '{P1} &&& ... %% '[Tn] =?= '[Pn]
+ *
+ *   /* Match block flattening */
+ *   '{ {e0; e1; ...; en}; em } =?= '{ {p0; p1; ...; pm}; em }   ===>   '{ e0; {e1; ...; en; em} } =?= '{ p0; {p1; ...; pm; em} }
+ *
+ *   /* Match block */
+ *   '{ e1; e2 } =?= '{ p1; p2 }   ===>   '{e1} =?= '{p1} &&& '{e2} =?= '{p2}
+ *
+ *   /* Match def block */
+ *   '{ e1; e2 } =?= '{ p1; p2 }   ===>   withEnv(symOf(e1) -> symOf(p1))('{e1} =?= '{p1} &&& '{e2} =?= '{p2})
+ *
+ *   /* Match if */
+ *   '{ if e0 then e1 else e2 } =?= '{ if p0 then p1 else p2 }   ===>  '{e0} =?= '{p0} &&& '{e1} =?= '{p1} &&& '{e2} =?= '{p2}
+ *
+ *   /* Match while */
+ *   '{ while e0 do e1 } =?= '{ while p0 do p1 }   ===>  '{e0} =?= '{p0} &&& '{e1} =?= '{p1}
+ *
+ *   /* Match assign */
+ *   '{ e0 = e1 } =?= '{ p0 = p1 }   ==>   '{e0} =?= '{p0} &&& '{e1} =?= '{p1}
+ *
+ *   /* Match new */
+ *   '{ new T } =?= '{ new T }   ===>   matched
+ *
+ *   /* Match this */
+ *   '{ C.this } =?= '{ C.this }   ===>   matched
+ *
+ *   /* Match super */
+ *   '{ e.super } =?= '{ p.super }   ===>   '{e} =?= '{p}
+ *
+ *   /* Match varargs */
+ *   '{ e: _* } =?= '{ p: _* }   ===>   '{e} =?= '{p}
+ *
+ *   /* Match val */
+ *   '{ val x: T = e1; e2 } =?= '{ val y: P = p1; p2 }   ===>   withEnv(x -> y)('[T] =?= '[P] &&& '{e1} =?= '{p1} &&& '{e2} =?= '{p2})
+ *
+ *   /* Match def */
+ *   '{ def x0(x1: T1, ..., xn: Tn): T0 = e1; e2 } =?= '{ def y0(y1: P1, ..., yn: Pn): P0 = p1; p2 }   ===>   withEnv(x0 -> y0, ..., xn -> yn)('[T0] =?= '[P0] &&& ... &&& '[Tn] =?= '[Pn] &&& '{e1} =?= '{p1} &&& '{e2} =?= '{p2})
+ *
+ *   // Types
+ *
+ *   /* Match type */
+ *   '[T] =?= '[P] && T <:< P   ===>   matched
+ *
+ *   ```
+ */
+object QuoteMatcher {
+  import tpd.*
+
+  // TODO improve performance
+
+  // TODO use flag from Context. Maybe -debug or add -debug-macros
+  private inline val debug = false
+
+  import Matching._
+
+  /** A map relating equivalent symbols from the scrutinee and the pattern
+    *  For example in
+    *  ```
+    *  '{val a = 4; a * a} match case '{ val x = 4; x * x }
+    *  ```
+    *  when matching `a * a` with `x * x` the environment will contain `Map(a -> x)`.
+    */
+  private type Env = Map[Symbol, Symbol]
+
+  private def withEnv[T](env: Env)(body: Env ?=> T): T = body(using env)
+
+  def treeMatch(scrutineeTerm: Tree, patternTerm: Tree)(using Context): Option[Tuple] =
+    given Env = Map.empty
+    scrutineeTerm =?= patternTerm
+
+  /** Check that all trees match with `mtch` and concatenate the results with &&& */
+  private def matchLists[T](l1: List[T], l2: List[T])(mtch: (T, T) => Matching): Matching = (l1, l2) match {
+    case (x :: xs, y :: ys) => mtch(x, y) &&& matchLists(xs, ys)(mtch)
+    case (Nil, Nil) => matched
+    case _ => notMatched
+  }
+
+  extension (scrutinees: List[Tree])
+    private def =?= (patterns: List[Tree])(using Env, Context): Matching =
+      matchLists(scrutinees, patterns)(_ =?= _)
+
+  extension (scrutinee0: Tree)
+
+    /** Check that the trees match and return the contents from the pattern holes.
+      *  Return None if the trees do not match otherwise return Some of a tuple containing all the contents in the holes.
+      *
+      *  @param scrutinee The tree being matched
+      *  @param pattern The pattern tree that the scrutinee should match. Contains `patternHole` holes.
+      *  @param `summon[Env]` Set of tuples containing pairs of symbols (s, p) where s defines a symbol in `scrutinee` which corresponds to symbol p in `pattern`.
+      *  @return `None` if it did not match or `Some(tup: Tuple)` if it matched where `tup` contains the contents of the holes.
+      */
+    private def =?= (pattern0: Tree)(using Env, Context): Matching =
+
+      /* Match block flattening */ // TODO move to cases
+      /** Normalize the tree */
+      def normalize(tree: Tree): Tree = tree match {
+        case Block(Nil, expr) => normalize(expr)
+        case Block(stats1, Block(stats2, expr)) =>
+          expr match
+            case _: Closure => tree
+            case _ => normalize(Block(stats1 ::: stats2, expr))
+        case Inlined(_, Nil, expr) => normalize(expr)
+        case _ => tree
+      }
+
+      val scrutinee = normalize(scrutinee0)
+      val pattern = normalize(pattern0)
+
+      /** Check that both are `val` or both are `lazy val` or both are `var` **/
+      def checkValFlags(): Boolean = {
+        val sFlags = scrutinee.symbol.flags
+        val pFlags = pattern.symbol.flags
+        sFlags.is(Lazy) == pFlags.is(Lazy) && sFlags.is(Mutable) == pFlags.is(Mutable)
+      }
+
+      // TODO remove
+      object TypeTreeTypeTest:
+        def unapply(x: Tree): Option[Tree & x.type] = x match
+          case x: (TypeBoundsTree & x.type) => None
+          case x: (Tree & x.type) if x.isType => Some(x)
+          case _ => None
+      end TypeTreeTypeTest
+
+      val res = pattern match
+
+        /* Term hole */
+        // Match a scala.internal.Quoted.patternHole typed as a repeated argument and return the scrutinee tree
+        case Typed(TypeApply(patternHole, tpt :: Nil), tpt2)
+            if patternHole.symbol.eq(defn.QuotedRuntimePatterns_patternHole) &&
+               tpt2.tpe.derivesFrom(defn.RepeatedParamClass) =>
+          scrutinee match
+            case Typed(s, tpt1) if s.tpe <:< tpt.tpe => matched(scrutinee)
+            case _ => notMatched
+
+        /* Term hole */
+        // Match a scala.internal.Quoted.patternHole and return the scrutinee tree
+        case TypeApply(patternHole, tpt :: Nil)
+            if patternHole.symbol.eq(defn.QuotedRuntimePatterns_patternHole) &&
+                scrutinee.tpe <:< tpt.tpe =>
+          scrutinee match
+            case ClosedPatternTerm(scrutinee) => matched(scrutinee)
+            case _ => notMatched
+
+
+        /* Higher order term hole */
+        // Matches an open term and wraps it into a lambda that provides the free variables
+        case Apply(TypeApply(Ident(_), List(TypeTree())), SeqLiteral(args, _) :: Nil)
+            if pattern.symbol.eq(defn.QuotedRuntimePatterns_higherOrderHole) =>
+          val names: List[TermName] = args.map {
+            case Block(List(DefDef(nme.ANON_FUN, _, _, Apply(Ident(name), _))), _) => name.asTermName
+            case arg => arg.symbol.name.asTermName
+          }
+          val argTypes = args.map(x => x.tpe.widenTermRefExpr)
+          val methTpe = MethodType(names)(_ => argTypes, _ => pattern.tpe)
+          val meth = newAnonFun(ctx.owner, methTpe)
+          def bodyFn(lambdaArgss: List[List[Tree]]): Tree = {
+            val argsMap = args.map(_.symbol).zip(lambdaArgss.head).toMap
+            val body = new TreeMap {
+              override def transform(tree: Tree)(using Context): Tree =
+                tree match
+                  case tree: Ident => summon[Env].get(tree.symbol).flatMap(argsMap.get).getOrElse(tree)
+                  case tree => super.transform(tree)
+            }.transform(scrutinee)
+            TreeOps(body).changeNonLocalOwners(meth)
+          }
+          matched(Closure(meth, bodyFn))
+
+        /* Match type ascription (b) */
+        case Typed(expr2, _) =>
+          scrutinee =?= expr2
+
+        case _ =>
+          scrutinee match
+            /* Match type ascription (a) */
+            case Typed(expr1, _) =>
+              expr1 =?= pattern
+
+            /* Match literal */
+            case Literal(constant1) =>
+              pattern match
+                case Literal(constant2) if constant1 == constant2 => matched
+                case _ => notMatched
+
+            case ref: RefTree =>
+              pattern match
+                /* Match selection */
+                case Select(qual2, _) if symbolMatch(scrutinee, pattern) =>
+                  ref match
+                    case Select(qual1, _) => qual1 =?= qual2
+                    case ref: Ident =>
+                      ref.tpe match
+                        case TermRef(qual: TermRef, _) => tpd.ref(qual) =?= qual2
+                        case _ => matched
+                /* Match reference */
+                case _: Ident if symbolMatch(scrutinee, pattern) => matched
+                /* Match type */
+                case TypeTreeTypeTest(pattern) if scrutinee.tpe <:< pattern.tpe => matched
+                case _ => notMatched
+
+            /* Match application */
+            case Apply(fn1, args1) =>
+              pattern match
+                case Apply(fn2, args2) =>
+                  fn1 =?= fn2 &&& args1 =?= args2
+                case _ => notMatched
+
+            /* Match type application */
+            case TypeApply(fn1, args1) =>
+              pattern match
+                case TypeApply(fn2, args2) =>
+                  fn1 =?= fn2 &&& args1 =?= args2
+                case _ => notMatched
+
+            /* Match block */
+            case Block(stat1 :: stats1, expr1) =>
+              pattern match
+                case Block(stat2 :: stats2, expr2) =>
+                  val newEnv = (stat1, stat2) match {
+                    case (stat1: MemberDef, stat2: MemberDef) =>
+                      summon[Env] + (stat1.symbol -> stat2.symbol)
+                    case _ =>
+                      summon[Env]
+                  }
+                  withEnv(newEnv) {
+                    stat1 =?= stat2 &&& Block(stats1, expr1) =?= Block(stats2, expr2)
+                  }
+                case _ => notMatched
+
+            /* Match if */
+            case If(cond1, thenp1, elsep1) =>
+              pattern match
+                case If(cond2, thenp2, elsep2) =>
+                  cond1 =?= cond2 &&& thenp1 =?= thenp2 &&& elsep1 =?= elsep2
+                case _ => notMatched
+
+            /* Match while */
+            case WhileDo(cond1, body1) =>
+              pattern match
+                case WhileDo(cond2, body2) => cond1 =?= cond2 &&& body1 =?= body2
+                case _ => notMatched
+
+            /* Match assign */
+            case Assign(lhs1, rhs1) =>
+              pattern match
+                case Assign(lhs2, rhs2) => lhs1 =?= lhs2 &&& rhs1 =?= rhs2
+                case _ => notMatched
+
+            /* Match new */
+            case New(tpt1) =>
+              pattern match
+                case New(tpt2) if tpt1.tpe.typeSymbol == tpt2.tpe.typeSymbol => matched
+                case _ => notMatched
+
+            /* Match this */
+            case This(_) =>
+              pattern match
+                case This(_) if scrutinee.symbol == pattern.symbol => matched
+                case _ => notMatched
+
+            /* Match super */
+            case Super(qual1, mix1) =>
+              pattern match
+                case Super(qual2, mix2) if mix1 == mix2 => qual1 =?= qual2
+                case _ => notMatched
+
+            /* Match varargs */
+            case SeqLiteral(elems1, _) =>
+              pattern match
+                case SeqLiteral(elems2, _) if elems1.size == elems2.size => elems1 =?= elems2
+                case _ => notMatched
+
+            /* Match type */
+            // TODO remove this?
+            case TypeTreeTypeTest(scrutinee) =>
+              pattern match
+                case TypeTreeTypeTest(pattern) if scrutinee.tpe <:< pattern.tpe => matched
+                case _ => notMatched
+
+            /* Match val */
+            case scrutinee @ ValDef(_, tpt1, _) =>
+              pattern match
+                case pattern @ ValDef(_, tpt2, _) if checkValFlags() =>
+                  def rhsEnv = summon[Env] + (scrutinee.symbol -> pattern.symbol)
+                  tpt1 =?= tpt2 &&& withEnv(rhsEnv)(scrutinee.rhs =?= pattern.rhs)
+                case _ => notMatched
+
+            /* Match def */
+            case scrutinee @ DefDef(_, paramss1, tpt1, _) =>
+              pattern match
+                case pattern @ DefDef(_, paramss2, tpt2, _) =>
+                  def rhsEnv: Env =
+                    val paramSyms: List[(Symbol, Symbol)] =
+                      for
+                        (clause1, clause2) <- paramss1.zip(paramss2)
+                        (param1, param2) <- clause1.zip(clause2)
+                      yield
+                        param1.symbol -> param2.symbol
+                    val oldEnv: Env = summon[Env]
+                    val newEnv: List[(Symbol, Symbol)] = (scrutinee.symbol -> pattern.symbol) :: paramSyms
+                    oldEnv ++ newEnv
+                  matchLists(paramss1, paramss2)(_ =?= _)
+                    &&& tpt1 =?= tpt2
+                    &&& withEnv(rhsEnv)(scrutinee.rhs =?= pattern.rhs)
+                case _ => notMatched
+
+            case Closure(_, _, tpt1) =>
+              pattern match
+                case Closure(_, _, tpt2) => matched // TODO match tpt1 with tpt2?
+                case _ => notMatched
+
+            case NamedArg(name1, arg1) =>
+              pattern match
+                case NamedArg(name2, arg2) if name1 == name2 => arg1 =?= arg2
+                case _ => notMatched
+
+            case EmptyTree =>
+              if pattern.isEmpty then matched
+              else notMatched
+
+            // No Match
+            case _ =>
+              notMatched
+
+      if (debug && res == notMatched)
+        val quotes = QuotesImpl()
+        println(
+          s""">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
+              |Scrutinee
+              |  ${scrutinee.show}
+              |did not match pattern
+              |  ${pattern.show}
+              |
+              |with environment: ${summon[Env]}
+              |
+              |Scrutinee: ${quotes.reflect.Printer.TreeStructure.show(scrutinee.asInstanceOf)}
+              |Pattern: ${quotes.reflect.Printer.TreeStructure.show(pattern.asInstanceOf)}
+              |
+              |""".stripMargin)
+
+      res
+    end =?=
+
+  end extension
+
+  /** Does the scrutinee symbol match the pattern symbol? It matches if:
+    *   - They are the same symbol
+    *   - The scrutinee has is in the environment and they are equivalent
+    *   - The scrutinee overrides the symbol of the pattern
+    */
+  private def symbolMatch(scrutineeTree: Tree, patternTree: Tree)(using Env, Context): Boolean =
+    val scrutinee = scrutineeTree.symbol
+
+    def overridingSymbol(ofclazz: Symbol): Symbol =
+      if ofclazz.isClass then scrutinee.denot.overridingSymbol(ofclazz.asClass)
+      else NoSymbol
+
+    val devirtualizedScrutinee = scrutineeTree match
+      case Select(qual, _) =>
+        val sym = overridingSymbol(qual.tpe.typeSymbol)
+        if sym.exists then sym
+        else scrutinee
+      case _ => scrutinee
+    val pattern = patternTree.symbol
+
+
+    devirtualizedScrutinee == pattern
+    || summon[Env].get(devirtualizedScrutinee).contains(pattern)
+    || devirtualizedScrutinee.allOverriddenSymbols.contains(pattern)
+
+  private object ClosedPatternTerm {
+    /** Matches a term that does not contain free variables defined in the pattern (i.e. not defined in `Env`) */
+    def unapply(term: Tree)(using Env, Context): Option[term.type] =
+      if freePatternVars(term).isEmpty then Some(term) else None
+
+    /** Return all free variables of the term defined in the pattern (i.e. defined in `Env`) */
+    def freePatternVars(term: Tree)(using Env, Context): Set[Symbol] =
+      val accumulator = new TreeAccumulator[Set[Symbol]] {
+        def apply(x: Set[Symbol], tree: Tree)(using Context): Set[Symbol] =
+          tree match
+            case tree: Ident if summon[Env].contains(tree.symbol) => foldOver(x + tree.symbol, tree)
+            case _ => foldOver(x, tree)
+      }
+      accumulator.apply(Set.empty, term)
+  }
+
+  /** Result of matching a part of an expression */
+  private opaque type Matching = Option[Tuple]
+
+  private object Matching {
+
+    def notMatched: Matching = None
+
+    val matched: Matching = Some(Tuple())
+
+    def matched(tree: Tree)(using Context): Matching =
+      Some(Tuple1(new ExprImpl(tree, SpliceScope.getCurrent)))
+
+    extension (self: Matching)
+      def asOptionOfTuple: Option[Tuple] = self
+
+      /** Concatenates the contents of two successful matchings or return a `notMatched` */
+      def &&& (that: => Matching): Matching = self match {
+        case Some(x) =>
+          that match {
+            case Some(y) => Some(x ++ y)
+            case _ => None
+          }
+        case _ => None
+      }
+    end extension
+
+  }
+
+}
diff --git a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala
index 9a1df9d01f2d..000f84199c27 100644
--- a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala
+++ b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala
@@ -45,6 +45,14 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler
     def matches(that: scala.quoted.Expr[Any]): Boolean =
       treeMatch(reflect.asTerm(self), reflect.asTerm(that)).nonEmpty
 
+    def valueOrAbort(using fromExpr: FromExpr[T]): T =
+      def reportError =
+        val tree = reflect.asTerm(self)
+        val code = reflect.Printer.TreeCode.show(tree)
+        val msg = s"Expected a known value. \n\nThe value of: $code\ncould not be extracted using $fromExpr"
+        reflect.report.throwError(msg, self)
+      fromExpr.unapply(self)(using QuotesImpl.this).getOrElse(reportError)
+
   end extension
 
   extension (self: scala.quoted.Expr[Any])
@@ -70,6 +78,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler
 
     object CompilationInfo extends CompilationInfoModule:
       def isWhileTyping: Boolean = !ctx.isAfterTyper
+      def XmacroSettings: List[String] = ctx.settings.XmacroSettings.value
     end CompilationInfo
 
     extension (expr: Expr[Any])
@@ -97,7 +106,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler
             new ExprImpl(self, SpliceScope.getCurrent)
           else self match
             case TermTypeTest(self) => throw new Exception("Expected an expression. This is a partially applied Term. Try eta-expanding the term first.")
-            case _ => throw new Exception("Expected a Term but was: " + self)
+            case _ => throw new Exception("Expected a Term but was: " + Printer.TreeStructure.show(self))
       end extension
 
       extension (self: Tree)
@@ -186,10 +195,11 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler
 
     object StatementTypeTest extends TypeTest[Tree, Statement]:
       def unapply(x: Tree): Option[Statement & x.type] = x match
-        case _: tpd.PatternTree => None
-        case _ =>
-          if x.isTerm then TermTypeTest.unapply(x)
-          else DefinitionTypeTest.unapply(x)
+        case TermTypeTest(x: x.type) => Some(x)
+        case DefinitionTypeTest(x: x.type) => Some(x)
+        case ImportTypeTest(x: x.type) => Some(x)
+        case ExportTypeTest(x: x.type) => Some(x)
+        case _ => None
     end StatementTypeTest
 
     type Definition = tpd.MemberDef
@@ -286,15 +296,15 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler
       def unapply(vdef: ValDef): (String, TypeTree, Option[Term]) =
         (vdef.name.toString, vdef.tpt, optional(vdef.rhs))
 
-      def let(owner: Symbol, name: String, rhs: Term)(body: Ident => Term): Term =
+      def let(owner: Symbol, name: String, rhs: Term)(body: Ref => Term): Term =
         val vdef = tpd.SyntheticValDef(name.toTermName, rhs)(using ctx.withOwner(owner))
-        val ref = tpd.ref(vdef.symbol).asInstanceOf[Ident]
+        val ref = tpd.ref(vdef.symbol).asInstanceOf[Ref]
         Block(List(vdef), body(ref))
 
-      def let(owner: Symbol, terms: List[Term])(body: List[Ident] => Term): Term =
+      def let(owner: Symbol, terms: List[Term])(body: List[Ref] => Term): Term =
         val ctx1 = ctx.withOwner(owner)
         val vdefs = terms.map(term => tpd.SyntheticValDef("x".toTermName, term)(using ctx1))
-        val refs = vdefs.map(vdef => tpd.ref(vdef.symbol).asInstanceOf[Ident])
+        val refs = vdefs.map(vdef => tpd.ref(vdef.symbol).asInstanceOf[Ref])
         Block(vdefs, body(refs))
     end ValDef
 
@@ -332,13 +342,13 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler
 
     object TermTypeTest extends TypeTest[Tree, Term]:
       def unapply(x: Tree): Option[Term & x.type] = x match
-        case _ if UnapplyTypeTest.unapply(x).isDefined => None
-        case _: tpd.PatternTree => None
-        case x: (tpd.Tree & x.type) if x.isTerm => Some(x)
+        case x: tpd.PatternTree => None
         case x: (tpd.SeqLiteral & x.type) => Some(x)
         case x: (tpd.Inlined & x.type) => Some(x)
         case x: (tpd.NamedArg & x.type) => Some(x)
-        case _ => None
+        case x: (tpd.Typed & x.type) =>
+          TypedTypeTest.unapply(x) // Matches `Typed` but not `TypedOrTest`
+        case _ => if x.isTerm then Some(x) else None
     end TermTypeTest
 
     object Term extends TermModule:
@@ -376,7 +386,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler
               case t => t
             }
             val closureTpe = Types.MethodType(mtpe.paramNames, mtpe.paramInfos, closureResType)
-            val closureMethod = dotc.core.Symbols.newSymbol(owner, nme.ANON_FUN, Synthetic | Method, closureTpe)
+            val closureMethod = dotc.core.Symbols.newAnonFun(owner, closureTpe)
             tpd.Closure(closureMethod, tss => new tpd.TreeOps(self).appliedToTermArgs(tss.head).etaExpand(closureMethod))
           case _ => self
         }
@@ -441,6 +451,20 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler
       end extension
     end IdentMethods
 
+    type Wildcard = tpd.Ident
+
+    object WildcardTypeTest extends TypeTest[Tree, Wildcard]:
+      def unapply(x: Tree): Option[Wildcard & x.type] = x match
+        case x: (tpd.Ident & x.type) if x.name == nme.WILDCARD => Some(x)
+        case _ => None
+    end WildcardTypeTest
+
+    object Wildcard extends WildcardModule:
+      def apply(): Wildcard =
+        withDefaultPos(untpd.Ident(nme.WILDCARD).withType(dotc.core.Symbols.defn.AnyType))
+      def unapply(pattern: Wildcard): true = true
+    end Wildcard
+
     type Select = tpd.Select
 
     object SelectTypeTest extends TypeTest[Tree, Select]:
@@ -648,7 +672,10 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler
 
     object TypedTypeTest extends TypeTest[Tree, Typed]:
       def unapply(x: Tree): Option[Typed & x.type] = x match
-        case x: (tpd.Typed & x.type) => Some(x)
+        case x: (tpd.Typed & x.type) =>
+          x.expr match
+            case TermTypeTest(_) => Some(x)
+            case _ => None
         case _ => None
     end TypedTypeTest
 
@@ -668,6 +695,31 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler
       end extension
     end TypedMethods
 
+    type TypedOrTest = tpd.Typed
+
+    object TypedOrTestTypeTest extends TypeTest[Tree, TypedOrTest]:
+      def unapply(x: Tree): Option[TypedOrTest & x.type] = x match
+        case x: (tpd.Typed & x.type) => Some(x)
+        case _ => None
+    end TypedOrTestTypeTest
+
+    object TypedOrTest extends TypedOrTestModule:
+      def apply(expr: Term, tpt: TypeTree): Typed =
+        withDefaultPos(tpd.Typed(xCheckMacroValidExpr(expr), tpt))
+      def copy(original: Tree)(expr: Term, tpt: TypeTree): Typed =
+        tpd.cpy.Typed(original)(xCheckMacroValidExpr(expr), tpt)
+      def unapply(x: Typed): (Term, TypeTree) =
+        (x.expr, x.tpt)
+    end TypedOrTest
+
+    given TypedOrTestMethods: TypedOrTestMethods with
+      extension (self: Typed)
+        def tree: Tree = self.expr
+        def tpt: TypeTree = self.tpt
+      end extension
+    end TypedOrTestMethods
+
+
     type Assign = tpd.Assign
 
     object AssignTypeTest extends TypeTest[Tree, Assign]:
@@ -702,9 +754,9 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler
 
     object Block extends BlockModule:
       def apply(stats: List[Statement], expr: Term): Block =
-        withDefaultPos(tpd.Block(stats, expr))
+        xCheckMacroBlockOwners(withDefaultPos(tpd.Block(stats, expr)))
       def copy(original: Tree)(stats: List[Statement], expr: Term): Block =
-        tpd.cpy.Block(original)(stats, expr)
+        xCheckMacroBlockOwners(tpd.cpy.Block(original)(stats, expr))
       def unapply(x: Block): (List[Statement], Term) =
         (x.statements, x.expr)
     end Block
@@ -742,8 +794,8 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler
 
     object Lambda extends LambdaModule:
       def apply(owner: Symbol, tpe: MethodType, rhsFn: (Symbol, List[Tree]) => Tree): Block =
-        val meth = dotc.core.Symbols.newSymbol(owner, nme.ANON_FUN, Synthetic | Method, tpe)
-        tpd.Closure(meth, tss => xCheckMacroedOwners(xCheckMacroValidExpr(rhsFn(meth, tss.head.map(withDefaultPos))), meth))
+        val meth = dotc.core.Symbols.newAnonFun(owner, tpe)
+        withDefaultPos(tpd.Closure(meth, tss => xCheckMacroedOwners(xCheckMacroValidExpr(rhsFn(meth, tss.head.map(withDefaultPos))), meth)))
 
       def unapply(tree: Block): Option[(List[ValDef], Term)] = tree match {
         case Block((ddef @ DefDef(_, tpd.ValDefs(params) :: Nil, _, Some(body))) :: Nil, Closure(meth, _))
@@ -1021,7 +1073,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler
 
     object TypeIdentTypeTest extends TypeTest[Tree, TypeIdent]:
       def unapply(x: Tree): Option[TypeIdent & x.type] = x match
-        case tpt: (tpd.Ident & x.type) if tpt.isType => Some(tpt)
+        case tpt: (tpd.Ident & x.type) if tpt.isType && tpt.name != nme.WILDCARD => Some(tpt)
         case _ => None
     end TypeIdentTypeTest
 
@@ -1324,10 +1376,12 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler
         def tpe: TypeBounds = self.tpe.asInstanceOf[Types.TypeBounds]
         def low: TypeTree = self match
           case self: tpd.TypeBoundsTree => self.lo
-          case self: tpd.TypeTree => tpd.TypeTree(self.tpe.asInstanceOf[Types.TypeBounds].lo).withSpan(self.span)
+          case self: tpd.TypeTree => makeTypeDef(self.tpe.asInstanceOf[Types.TypeBounds].lo)
         def hi: TypeTree = self match
           case self: tpd.TypeBoundsTree => self.hi
-          case self: tpd.TypeTree => tpd.TypeTree(self.tpe.asInstanceOf[Types.TypeBounds].hi).withSpan(self.span)
+          case self: tpd.TypeTree => makeTypeDef(self.tpe.asInstanceOf[Types.TypeBounds].hi)
+        private def makeTypeDef(tpe: Types.Type) =
+          tpd.TypeTree(tpe)(using ctx.withSource(self.source)).withSpan(self.span)
       end extension
     end TypeBoundsTreeMethods
 
@@ -1335,7 +1389,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler
 
     object WildcardTypeTreeTypeTest extends TypeTest[Tree, WildcardTypeTree]:
       def unapply(x: Tree): Option[WildcardTypeTree & x.type] = x match
-        case x: (tpd.Ident & x.type) if x.name == nme.WILDCARD => Some(x)
+        case x: (tpd.Ident & x.type) if x.isType && x.name == nme.WILDCARD => Some(x)
         case _ => None
     end WildcardTypeTreeTypeTest
 
@@ -1423,17 +1477,17 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler
       end extension
     end BindMethods
 
-    type Unapply = tpd.UnApply | tpd.Typed // tpd.Typed containing a tpd.UnApply as expression
+    type Unapply = tpd.UnApply
 
     object UnapplyTypeTest extends TypeTest[Tree, Unapply]:
-      def unapply(x: Tree): Option[Unapply & x.type] =
-        x match // keep in sync with UnapplyMethodsImpl.selfUnApply
-          case x: (tpd.UnApply & x.type) => Some(x)
-          case x: (tpd.Typed & x.type) if x.expr.isInstanceOf[tpd.UnApply] => Some(x)
-          case _ => None
+      def unapply(x: Tree): Option[Unapply & x.type] = x match
+        case x: (tpd.UnApply & x.type) => Some(x)
+        case _ => None
     end UnapplyTypeTest
 
     object Unapply extends UnapplyModule:
+      def apply(fun: Term, implicits: List[Term], patterns: List[Tree]): Unapply =
+        withDefaultPos(tpd.UnApply(fun, implicits, patterns, dotc.core.Symbols.defn.NothingType))
       def copy(original: Tree)(fun: Term, implicits: List[Term], patterns: List[Tree]): Unapply =
         withDefaultPos(tpd.cpy.UnApply(original)(fun, implicits, patterns))
       def unapply(x: Unapply): (Term, List[Term], List[Tree]) =
@@ -1442,14 +1496,10 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler
 
     given UnapplyMethods: UnapplyMethods with
       extension (self: Unapply)
-        def fun: Term = selfUnApply(self).fun
-        def implicits: List[Term] = selfUnApply(self).implicits
-        def patterns: List[Tree] = effectivePatterns(selfUnApply(self).patterns)
-      end extension
-      private def selfUnApply(self: Unapply): tpd.UnApply =
-        self match // keep in sync with UnapplyTypeTest
-          case self: tpd.UnApply => self
-          case self: tpd.Typed => self.expr.asInstanceOf[tpd.UnApply]
+        def fun: Term = self.fun
+        def implicits: List[Term] = self.implicits
+        def patterns: List[Tree] = effectivePatterns(self.patterns)
+      end extension
       private def effectivePatterns(patterns: List[Tree]): List[Tree] =
         patterns match
           case patterns0 :+ dotc.ast.Trees.SeqLiteral(elems, _) => patterns0 ::: elems
@@ -1691,11 +1741,19 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler
           val tpNoRefinement = self.dropDependentRefinement
           tpNoRefinement != self
           && dotc.core.Symbols.defn.isNonRefinedFunction(tpNoRefinement)
+        def isTupleN: Boolean =
+          dotc.core.Symbols.defn.isTupleNType(self)
         def select(sym: Symbol): TypeRepr = self.select(sym)
         def appliedTo(targ: TypeRepr): TypeRepr =
           dotc.core.Types.decorateTypeApplications(self).appliedTo(targ)
         def appliedTo(targs: List[TypeRepr]): TypeRepr =
           dotc.core.Types.decorateTypeApplications(self).appliedTo(targs)
+        def substituteTypes(from: List[Symbol], to: List[TypeRepr]): TypeRepr =
+          self.subst(from, to)
+
+        def typeArgs: List[TypeRepr] = self match
+          case AppliedType(_, args) => args
+          case _ => List.empty
       end extension
     end TypeReprMethods
 
@@ -1726,7 +1784,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler
 
     given NamedTypeMethods: NamedTypeMethods with
       extension (self: NamedType)
-        def qualifier: TypeRepr = self.prefix
+        def qualifier: TypeRepr = self.prefix.widenSkolem
         def name: String = self.name.toString
       end extension
     end NamedTypeMethods
@@ -1825,13 +1883,13 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler
 
     object AppliedType extends AppliedTypeModule:
       def unapply(x: AppliedType): (TypeRepr, List[TypeRepr]) =
-        (x.tycon, x.args)
+        (AppliedTypeMethods.tycon(x), AppliedTypeMethods.args(x))
     end AppliedType
 
     given AppliedTypeMethods: AppliedTypeMethods with
       extension (self: AppliedType)
-        def tycon: TypeRepr = self.tycon
-        def args: List[TypeRepr] = self.args
+        def tycon: TypeRepr = self.tycon.stripTypeVar
+        def args: List[TypeRepr] = self.args.mapConserve(_.stripTypeVar)
       end extension
     end AppliedTypeMethods
 
@@ -2427,6 +2485,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler
 
         def name: String = self.denot.name.toString
         def fullName: String = self.denot.fullName.toString
+
         def pos: Option[Position] =
           if self.exists then Some(self.sourcePos) else None
 
@@ -2454,7 +2513,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler
           }
 
         def isDefinedInCurrentRun: Boolean =
-          self.topLevelClass.asClass.isDefinedInCurrentRun
+          self.exists && self.topLevelClass.asClass.isDefinedInCurrentRun
         def isLocalDummy: Boolean = self.denot.isLocalDummy
         def isRefinementClass: Boolean = self.denot.isRefinementClass
         def isAliasType: Boolean = self.denot.isAliasType
@@ -2482,13 +2541,22 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler
 
         def declaredFields: List[Symbol] = self.unforcedDecls.filter(isField)
 
-        def memberField(name: String): Symbol =
-          appliedTypeRef(self).allMembers.iterator.map(_.symbol).find {
+        /** The prefix on which a member lookup should be performed. */
+        private def lookupPrefix: TypeRepr =
+          if self.isClass then
+            self.thisType // Needed to handle self-types (as in tests/run-macros/self)
+          else
+            self.namedType
+
+        def memberField(name: String): Symbol = fieldMember(name)
+        def fieldMember(name: String): Symbol =
+          lookupPrefix.allMembers.iterator.map(_.symbol).find {
             sym => isField(sym) && sym.name.toString == name
           }.getOrElse(dotc.core.Symbols.NoSymbol)
 
-        def memberFields: List[Symbol] =
-          appliedTypeRef(self).allMembers.iterator.map(_.symbol).collect {
+        def memberFields: List[Symbol] = fieldMembers
+        def fieldMembers: List[Symbol] =
+          lookupPrefix.allMembers.iterator.map(_.symbol).collect {
             case sym if isField(sym) => sym.asTerm
           }.toList
 
@@ -2502,13 +2570,15 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler
             case sym if isMethod(sym) => sym.asTerm
           }.toList
 
-        def memberMethod(name: String): List[Symbol] =
-          appliedTypeRef(self).allMembers.iterator.map(_.symbol).collect {
+        def memberMethod(name: String): List[Symbol] = methodMember(name)
+        def methodMember(name: String): List[Symbol] =
+          lookupPrefix.allMembers.iterator.map(_.symbol).collect {
             case sym if isMethod(sym) && sym.name.toString == name => sym.asTerm
           }.toList
 
-        def memberMethods: List[Symbol] =
-          appliedTypeRef(self).allMembers.iterator.map(_.symbol).collect {
+        def memberMethods: List[Symbol] = methodMembers
+        def methodMembers: List[Symbol] =
+          lookupPrefix.allMembers.iterator.map(_.symbol).collect {
             case sym if isMethod(sym) => sym.asTerm
           }.toList
 
@@ -2522,10 +2592,12 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler
             case sym if sym.isType => sym.asType
           }.toList
 
-        def memberType(name: String): Symbol =
+        def memberType(name: String): Symbol = typeMember(name)
+        def typeMember(name: String): Symbol =
           self.unforcedDecls.find(sym => sym.name == name.toTypeName)
 
-        def memberTypes: List[Symbol] =
+        def memberTypes: List[Symbol] = typeMembers
+        def typeMembers: List[Symbol] =
           self.unforcedDecls.filter(_.isType)
 
         def declarations: List[Symbol] =
@@ -2733,6 +2805,9 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler
     given SourceFileMethods: SourceFileMethods with
       extension (self: SourceFile)
         def jpath: java.nio.file.Path = self.file.jpath
+        def getJPath: Option[java.nio.file.Path] = Option(self.file.jpath)
+        def name: String = self.name
+        def path: String = self.path
         def content: Option[String] =
           // TODO detect when we do not have a source and return None
           Some(new String(self.content()))
@@ -2751,14 +2826,23 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler
         dotc.report.error(msg, pos)
 
       def throwError(msg: String): Nothing =
+        errorAndAbort(msg)
+
+      def throwError(msg: String, expr: Expr[Any]): Nothing =
+        errorAndAbort(msg, expr)
+
+      def throwError(msg: String, pos: Position): Nothing =
+        errorAndAbort(msg, pos)
+
+      def errorAndAbort(msg: String): Nothing =
         error(msg)
         throw new scala.quoted.runtime.StopMacroExpansion
 
-      def throwError(msg: String, expr: Expr[Any]): Nothing =
+      def errorAndAbort(msg: String, expr: Expr[Any]): Nothing =
         error(msg, expr)
         throw new scala.quoted.runtime.StopMacroExpansion
 
-      def throwError(msg: String, pos: Position): Nothing =
+      def errorAndAbort(msg: String, pos: Position): Nothing =
         error(msg, pos)
         throw new scala.quoted.runtime.StopMacroExpansion
 
@@ -2835,6 +2919,28 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler
             case _ => traverseChildren(t)
       }.traverse(tree)
 
+    /** Checks that all definitions in this block have the same owner.
+     *  Nested definitions are ignored and assumed to be correct by construction.
+     */
+    private def xCheckMacroBlockOwners(tree: Tree): tree.type =
+      if xCheckMacro then
+        val defs = new tpd.TreeAccumulator[List[Tree]] {
+          def apply(defs: List[Tree], tree: Tree)(using Context): List[Tree] =
+            tree match
+              case tree: tpd.DefTree => tree :: defs
+              case _ => foldOver(defs, tree)
+        }.apply(Nil, tree)
+        val defOwners = defs.groupBy(_.symbol.owner)
+        assert(defOwners.size <= 1,
+          s"""Block contains definition with different owners.
+            |Found definitions ${defOwners.size} distinct owners: ${defOwners.keys.mkString(", ")}
+            |
+            |Block: ${Printer.TreeCode.show(tree)}
+            |
+            |${defOwners.map((owner, trees) => s"Definitions owned by $owner: \n${trees.map(Printer.TreeCode.show).mkString("\n")}").mkString("\n\n")}
+            |""".stripMargin)
+      tree
+
     private def xCheckMacroValidExprs(terms: List[Term]): terms.type =
       if xCheckMacro then terms.foreach(xCheckMacroValidExpr)
       terms
@@ -2942,24 +3048,17 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler
         ctx1.gadt.addToConstraint(typeHoles)
         ctx1
 
-    val qctx1 = QuotesImpl()(using ctx1)
-
-    val matcher = new Matcher.QuoteMatcher[qctx1.type](qctx1) {
-      def patternHoleSymbol: qctx1.reflect.Symbol = dotc.core.Symbols.defn.QuotedRuntimePatterns_patternHole.asInstanceOf
-      def higherOrderHoleSymbol: qctx1.reflect.Symbol = dotc.core.Symbols.defn.QuotedRuntimePatterns_higherOrderHole.asInstanceOf
-    }
-
-    val matchings =
-      if pat1.isType then matcher.termMatch(scrutinee.asInstanceOf[matcher.qctx.reflect.Term], pat1.asInstanceOf[matcher.qctx.reflect.Term])
-      else matcher.termMatch(scrutinee.asInstanceOf[matcher.qctx.reflect.Term], pat1.asInstanceOf[matcher.qctx.reflect.Term])
+    val matchings = QuoteMatcher.treeMatch(scrutinee, pat1)(using ctx1)
 
-    // val matchings = matcher.termMatch(scrutinee, pattern)
     if typeHoles.isEmpty then matchings
     else {
       // After matching and doing all subtype checks, we have to approximate all the type bindings
       // that we have found, seal them in a quoted.Type and add them to the result
       def typeHoleApproximation(sym: Symbol) =
-        ctx1.gadt.approximation(sym, !sym.hasAnnotation(dotc.core.Symbols.defn.QuotedRuntimePatterns_fromAboveAnnot)).asInstanceOf[qctx1.reflect.TypeRepr].asType
+        val fromAboveAnnot = sym.hasAnnotation(dotc.core.Symbols.defn.QuotedRuntimePatterns_fromAboveAnnot)
+        val fullBounds = ctx1.gadt.fullBounds(sym)
+        val tp = if fromAboveAnnot then fullBounds.hi else fullBounds.lo
+        reflect.TypeReprMethods.asType(tp)
       matchings.map { tup =>
         Tuple.fromIArray(typeHoles.map(typeHoleApproximation).toArray.asInstanceOf[IArray[Object]]) ++ tup
       }
diff --git a/compiler/src/scala/quoted/runtime/impl/ScopeException.scala b/compiler/src/scala/quoted/runtime/impl/ScopeException.scala
index ae16d7434a18..d65328bb5405 100644
--- a/compiler/src/scala/quoted/runtime/impl/ScopeException.scala
+++ b/compiler/src/scala/quoted/runtime/impl/ScopeException.scala
@@ -33,4 +33,9 @@ object ScopeException:
             |
             |Use stack:
             |${currentScope.stack.mkString("\t", "\n\t", "\n")}
+            |
+            |Hint: A common reason for this to happen is when a `def` that creates a `'{...}`
+            |      captures an outer instance of `Quotes`. If this `def` is called in a splice
+            |      it will not track the `Quotes` provided by that particular splice.
+            |      To fix it add a `given Quotes` to this `def`.
           """.stripMargin)
diff --git a/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala b/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala
index b55ae6cbb166..0bea8f0ab643 100644
--- a/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala
+++ b/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala
@@ -33,6 +33,7 @@ object Extractors {
     if (flags.is(Flags.ExtensionMethod)) flagList += "Flags.ExtensionMethod"
     if (flags.is(Flags.FieldAccessor)) flagList += "Flags.FieldAccessor"
     if (flags.is(Flags.Final)) flagList += "Flags.Final"
+    if (flags.is(Flags.Given)) flagList += "Flags.Given"
     if (flags.is(Flags.HasDefault)) flagList += "Flags.HasDefault"
     if (flags.is(Flags.Implicit)) flagList += "Flags.Implicit"
     if (flags.is(Flags.Infix)) flagList += "Flags.Infix"
@@ -71,10 +72,14 @@ object Extractors {
     def result(): String = sb.result()
 
     def visitTree(x: Tree): this.type = x match {
-      case Ident(name) =>
-        this += "Ident(\"" += name += "\")"
-      case Select(qualifier, name) =>
-        this += "Select(" += qualifier += ", \"" += name += "\")"
+      case tree: Ref =>
+        tree match
+          case Wildcard() =>
+            this += "Wildcard()"
+          case Ident(name) =>
+            this += "Ident(\"" += name += "\")"
+          case Select(qualifier, name) =>
+            this += "Select(" += qualifier += ", \"" += name += "\")"
       case This(qual) =>
         this += "This(" += qual += ")"
       case Super(qual, mix) =>
@@ -170,7 +175,9 @@ object Extractors {
       case Unapply(fun, implicits, patterns) =>
         this += "Unapply(" += fun += ", " ++= implicits += ", " ++= patterns += ")"
       case Alternatives(patterns) =>
-        this += "Alternative(" ++= patterns += ")"
+        this += "Alternatives(" ++= patterns += ")"
+      case TypedOrTest(tree, tpt) =>
+        this += "TypedOrTest(" += tree += ", " += tpt += ")"
     }
 
     def visitConstant(x: Constant): this.type = x match {
@@ -231,6 +238,8 @@ object Extractors {
         this += "TypeBounds(" += lo += ", " += hi += ")"
       case NoPrefix() =>
         this += "NoPrefix()"
+      case MatchCase(pat, rhs) =>
+        this += "MatchCase(" += pat += ", " += rhs += ")"
     }
 
     def visitSignature(sig: Signature): this.type = {
diff --git a/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala b/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala
index 62fa24d908f1..7f77f8fa5eb2 100644
--- a/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala
+++ b/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala
@@ -328,7 +328,7 @@ object SourceCode {
         }
         this
 
-      case Ident("_") =>
+      case Wildcard() =>
         this += "_"
 
       case tree: Ident =>
@@ -530,6 +530,9 @@ object SourceCode {
       case Closure(meth, _) =>
         printTree(meth)
 
+      case _:Unapply | _:Alternatives | _:Bind =>
+        printPattern(tree)
+
       case _ =>
         throw new MatchError(tree.show(using Printer.TreeStructure))
 
@@ -893,13 +896,13 @@ object SourceCode {
     }
 
     private def printPattern(pattern: Tree): this.type = pattern match {
-      case Ident("_") =>
+      case Wildcard() =>
         this += "_"
 
-      case Bind(name, Ident("_")) =>
+      case Bind(name, Wildcard()) =>
         this += name
 
-      case Bind(name, Typed(Ident("_"), tpt)) =>
+      case Bind(name, Typed(Wildcard(), tpt)) =>
         this += highlightValDef(name) += ": "
         printTypeTree(tpt)
 
@@ -925,9 +928,13 @@ object SourceCode {
       case Alternatives(trees) =>
         inParens(printPatterns(trees, " | "))
 
-      case Typed(Ident("_"), tpt) =>
-        this += "_: "
-        printTypeTree(tpt)
+      case TypedOrTest(tree1, tpt) =>
+        tree1 match
+          case Wildcard() =>
+            this += "_: "
+            printTypeTree(tpt)
+          case _ =>
+            printPattern(tree1)
 
       case v: Term =>
         printTree(v)
@@ -1049,7 +1056,7 @@ object SourceCode {
 
       case LambdaTypeTree(tparams, body) =>
         printTargsDefs(tparams.zip(tparams), isDef = false)
-        this += highlightTypeDef(" => ")
+        this += highlightTypeDef(" =>> ")
         printTypeOrBoundsTree(body)
 
       case TypeBind(name, _) =>
@@ -1215,19 +1222,18 @@ object SourceCode {
         this += "]"
         printType(tpe.resType)
 
-      case tpe: TypeLambda =>
-        this += "["
-        printList(tpe.paramNames.zip(tpe.paramBounds), ", ",
-          (x: (String, TypeBounds)) => (this += x._1 += " ").printType(x._2))
-        this += "] => "
-        printType(tpe.resType)
-
       case tpe@TypeBounds(lo, hi) =>
         this += "_ >: "
         printType(lo)
         this += " <: "
         printType(hi)
 
+      case MatchCase(pat, rhs) =>
+        this += "case "
+        printType(pat)
+        this += " => "
+        printType(rhs)
+
       case _ =>
         throw new MatchError(tpe.show(using Printer.TypeReprStructure))
     }
@@ -1417,7 +1423,7 @@ object SourceCode {
       case '"' => "\\\""
       case '\'' => "\\\'"
       case '\\' => "\\\\"
-      case _ => if (ch.isControl) "\\0" + Integer.toOctalString(ch) else String.valueOf(ch)
+      case _ => if ch.isControl then f"${"\\"}u${ch.toInt}%04x" else String.valueOf(ch)
     }
 
     private def escapedString(str: String): String = str flatMap escapedChar
diff --git a/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala b/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala
new file mode 100644
index 000000000000..d8a14f72ebfa
--- /dev/null
+++ b/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala
@@ -0,0 +1,173 @@
+package dotty
+package tools
+package coursier
+
+import java.io.File
+import java.nio.file.{Path, Paths, Files}
+import scala.sys.process._
+import org.junit.Test
+import org.junit.BeforeClass
+import org.junit.Assert._
+import scala.collection.mutable.ListBuffer
+
+import java.net.URLClassLoader
+import java.net.URL
+
+class CoursierScalaTests:
+
+  private def scripts(path: String): Array[File] = {
+    val dir = new File(getClass.getResource(path).getPath)
+    assert(dir.exists && dir.isDirectory, "Couldn't load scripts dir")
+    dir.listFiles
+  }
+
+  extension (f: File) private def absPath =
+    f.getAbsolutePath.replace('\\', '/')
+
+  extension (str: String) private def dropExtension =
+    str.reverse.dropWhile(_ != '.').drop(1).reverse
+
+  // classpath tests are managed by scripting.ClasspathTests.scala
+  def testFiles = scripts("/scripting").filter { ! _.getName.startsWith("classpath") }
+
+  // Cannot run tests in parallel, more info here: https://stackoverflow.com/questions/6345660/java-executing-bash-script-error-26-text-file-busy
+  @Test def allTests =
+    def scriptArgs() =
+      val scriptPath = scripts("/scripting").find(_.getName == "showArgs.sc").get.absPath
+      val testScriptArgs = Seq("a", "b", "c", "-repl", "-run", "-script", "-debug")
+
+      val args = scriptPath +: testScriptArgs
+      val output = CoursierScalaTests.csScalaCmd(args*)
+      val expectedOutput = List(
+        "arg  0:[a]",
+        "arg  1:[b]",
+        "arg  2:[c]",
+        "arg  3:[-repl]",
+        "arg  4:[-run]",
+        "arg  5:[-script]",
+        "arg  6:[-debug]",
+      )
+      for (line, expect) <- output zip expectedOutput do
+        printf("expected: %-17s\nactual  : %s\n", expect, line)
+      assertEquals(expectedOutput, output)
+    scriptArgs()
+
+    def scriptPath() =
+      val scriptPath = scripts("/scripting").find(_.getName == "scriptPath.sc").get.absPath
+      val args = scriptPath
+      val output = CoursierScalaTests.csScalaCmd(args)
+      assertTrue(output.mkString("\n").startsWith("script.path:"))
+      assertTrue(output.mkString("\n").endsWith("scriptPath.sc"))
+    scriptPath()
+
+    def scriptEnvDashJDashD() =
+      val scriptPath = scripts("/scripting").find(_.getName == "envtest.sc").get.absPath
+      val args = scriptPath
+      val output = CoursierScalaTests.csScalaCmd("-J-Dkey=World", args)
+      assertEquals(output.mkString("\n"), "Hello World")
+    scriptEnvDashJDashD()
+
+    def version() =
+      val output = CoursierScalaTests.csScalaCmd("-version")
+      assertTrue(output.mkString("\n").contains(sys.env("DOTTY_BOOTSTRAPPED_VERSION")))
+    version()
+
+    def emptyArgsEqualsRepl() =
+      val output = CoursierScalaTests.csScalaCmd()
+      assertTrue(output.mkString("\n").contains("Unable to create a system terminal")) // Scala attempted to create REPL so we can assume it is working
+    emptyArgsEqualsRepl()
+
+    def run() =
+      val output = CoursierScalaTests.csScalaCmd("-classpath", scripts("/run").head.getParentFile.getParent, "-run", "run.myfile")
+      assertEquals(output.mkString("\n"), "Hello")
+    run()
+
+    def runDashJDashD() =
+      val output = CoursierScalaTests.csScalaCmd("-J-Dkey=World", "-classpath", scripts("/run").head.getParentFile.getParent, "-run", "run.envtest")
+      assertEquals(output.mkString("\n"), "Hello World")
+    runDashJDashD()
+
+    def notOnlyOptionsEqualsRun() =
+      val output = CoursierScalaTests.csScalaCmd("-classpath", scripts("/run").head.getParentFile.getParent, "run.myfile")
+      assertEquals(output.mkString("\n"), "Hello")
+    notOnlyOptionsEqualsRun()
+
+    def help() =
+      val output = CoursierScalaTests.csScalaCmd("-help")
+      assertTrue(output.mkString("\n").contains("Usage: scala  "))
+    help()
+
+    def jar() =
+      val source = new File(getClass.getResource("/run/myfile.scala").getPath)
+      val output = CoursierScalaTests.csScalaCmd("-save", source.absPath)
+      assertEquals(output.mkString("\n"), "Hello")
+      assertTrue(source.getParentFile.listFiles.find(_.getName == "myfile.jar").isDefined)
+    jar()
+
+    def runThatJar() =
+      val source = new File(getClass.getResource("/run/myfile.jar").getPath)
+      val output = CoursierScalaTests.csScalaCmd(source.absPath)
+      assertEquals(output.mkString("\n"), "Hello")
+    runThatJar()
+
+    def compileFilesToJarAndRun() =
+      val source = new File(getClass.getResource("/run/myfile.scala").getPath)
+      val prefix = source.getParent
+
+      val o1source = Paths.get(prefix, "automain.jar").toAbsolutePath.toString
+      val output1 = CoursierScalaTests.csScalaCompilerCmd("-d", o1source, source.absPath)
+      assertEquals(output1.mkString("\n"), "")
+
+      val o2source = Paths.get(prefix, "custommain.jar").toAbsolutePath.toString
+      val output2 = CoursierScalaTests.csScalaCompilerCmd("-d", o2source, "-Xmain-class", "run.myfile", source.absPath)
+      assertEquals(output2.mkString("\n"), "")
+
+      val output3 = CoursierScalaTests.csScalaCmd(o1source)
+      assertEquals(output3.mkString("\n"), "Hello")
+
+      val output4 = CoursierScalaTests.csScalaCmd(o2source)
+      assertEquals(output4.mkString("\n"), "Hello")
+    compileFilesToJarAndRun()
+
+    def replWithArgs() =
+      val output = CoursierScalaTests.csScalaCmd("-source", "3.0-migration")
+      assertTrue(output.mkString("\n").contains("Unable to create a system terminal")) // Scala attempted to create REPL so we can assume it is working
+    replWithArgs()
+
+    def argumentFile() =
+      // verify that an arguments file is accepted
+      // verify that setting a user classpath does not remove compiler libraries from the classpath.
+      // arguments file contains "-classpath .", adding current directory to classpath.
+      val source = new File(getClass.getResource("/run/myfile.scala").getPath)
+      val argsFile = new File(getClass.getResource("/run/myargs.txt").getPath)
+      val output = CoursierScalaTests.csScalaCmd(s"@$argsFile", source.absPath)
+      assertEquals(output.mkString("\n"), "Hello")
+    argumentFile()
+
+object CoursierScalaTests:
+
+  def execCmd(command: String, options: String*): List[String] =
+    val cmd = (command :: options.toList).toSeq.mkString(" ")
+    val out = new ListBuffer[String]
+    cmd.!(ProcessLogger(out += _, out += _))
+    out.toList
+
+  def csScalaCmd(options: String*): List[String] =
+    csCmd("dotty.tools.MainGenericRunner", options*)
+
+  def csScalaCompilerCmd(options: String*): List[String] =
+    csCmd("dotty.tools.dotc.Main", options*)
+
+  private def csCmd(entry: String, options: String*): List[String] =
+    val (jOpts, args) = options.partition(_.startsWith("-J"))
+    val newOptions = args match
+      case Nil => args
+      case _ => "--" +: args
+    val newJOpts = jOpts.map(s => s"--java-opt ${s.stripPrefix("-J")}").mkString(" ")
+    execCmd("./cs", (s"""launch "org.scala-lang:scala3-compiler_3:${sys.env("DOTTY_BOOTSTRAPPED_VERSION")}" $newJOpts --main-class "$entry" --property "scala.usejavacp=true"""" +: newOptions)*)
+
+  /** Get coursier script */
+  @BeforeClass def setup(): Unit =
+    val ver = execCmd("uname").head.replace('L', 'l').replace('D', 'd')
+    execCmd("curl", s"-fLo cs https://git.io/coursier-cli-$ver") #&& execCmd("chmod", "+x cs")
+
diff --git a/compiler/test-coursier/run/envtest.scala b/compiler/test-coursier/run/envtest.scala
new file mode 100644
index 000000000000..bf416c9519ce
--- /dev/null
+++ b/compiler/test-coursier/run/envtest.scala
@@ -0,0 +1,4 @@
+package run
+
+object envtest extends App:
+  println("Hello " + sys.props("key"))
diff --git a/compiler/test-coursier/run/myargs.txt b/compiler/test-coursier/run/myargs.txt
new file mode 100755
index 000000000000..a0d2d24986de
--- /dev/null
+++ b/compiler/test-coursier/run/myargs.txt
@@ -0,0 +1 @@
+-classpath .
diff --git a/compiler/test-coursier/run/myfile.scala b/compiler/test-coursier/run/myfile.scala
new file mode 100644
index 000000000000..c9ed2cfb1683
--- /dev/null
+++ b/compiler/test-coursier/run/myfile.scala
@@ -0,0 +1,4 @@
+package run
+
+object myfile extends App:
+  println("Hello")
diff --git a/compiler/test-resources/repl/10886 b/compiler/test-resources/repl/10886
new file mode 100644
index 000000000000..5d06f7995a13
--- /dev/null
+++ b/compiler/test-resources/repl/10886
@@ -0,0 +1,10 @@
+scala> type Channel = "A" | "B"
+// defined alias type Channel = "A" | "B"
+
+scala> type SelChannel[C <: Tuple] = C match { case x *: xs => x | SelChannel[xs] case _ => Nothing }
+
+scala> lazy val a: SelChannel[("A", "B", "C")] = "A"
+lazy val a: "A" | ("B" | ("C" | Nothing))
+
+scala>:type a
+("A" : String) | (("B" : String) | (("C" : String) | Nothing))
diff --git a/compiler/test-resources/repl/1379 b/compiler/test-resources/repl/1379
index 902d6251a62b..978aab344204 100644
--- a/compiler/test-resources/repl/1379
+++ b/compiler/test-resources/repl/1379
@@ -1,4 +1,6 @@
 scala>  object Foo { val bar = new Object { def baz = 1 }; bar.baz }
+-- [E008] Not Found Error: -----------------------------------------------------
 1 |  object Foo { val bar = new Object { def baz = 1 }; bar.baz }
   |                                                     ^^^^^^^
-  |                                       value baz is not a member of Object
\ No newline at end of file
+  |                                       value baz is not a member of Object
+1 error found
diff --git a/compiler/test-resources/repl/defaultClassloader b/compiler/test-resources/repl/defaultClassloader
index ac42150e14d4..bd9a955409b9 100644
--- a/compiler/test-resources/repl/defaultClassloader
+++ b/compiler/test-resources/repl/defaultClassloader
@@ -1,3 +1,2 @@
-scala> val d: java.sql.Date = new java.sql.Date(100L)
-val d: java.sql.Date = 1970-01-01
-
+scala> val d: Long = (new java.sql.Date(100L)).getTime
+val d: Long = 100
diff --git a/compiler/test-resources/repl/errmsgs b/compiler/test-resources/repl/errmsgs
index e4cb6eca36d4..9819c1cb089f 100644
--- a/compiler/test-resources/repl/errmsgs
+++ b/compiler/test-resources/repl/errmsgs
@@ -1,35 +1,59 @@
 scala> class Inv[T](x: T)
 // defined class Inv
 scala> val x: List[String] = List(1)
+-- [E007] Type Mismatch Error: -------------------------------------------------
 1 | val x: List[String] = List(1)
   |                            ^
   |                            Found:    (1 : Int)
   |                            Required: String
+  |
+  | longer explanation available when compiling with `-explain`
+1 error found
 scala> val y: List[List[String]] = List(List(1))
+-- [E007] Type Mismatch Error: -------------------------------------------------
 1 | val y: List[List[String]] = List(List(1))
   |                                       ^
   |                                       Found:    (1 : Int)
   |                                       Required: String
+  |
+  | longer explanation available when compiling with `-explain`
+1 error found
 scala> val z: (List[String], List[Int]) = (List(1), List("a"))
+-- [E007] Type Mismatch Error: -------------------------------------------------
 1 | val z: (List[String], List[Int]) = (List(1), List("a"))
   |                                          ^
   |                                          Found:    (1 : Int)
   |                                          Required: String
+  |
+  | longer explanation available when compiling with `-explain`
+-- [E007] Type Mismatch Error: -------------------------------------------------
 1 | val z: (List[String], List[Int]) = (List(1), List("a"))
   |                                                   ^^^
   |                                                  Found:    ("a" : String)
   |                                                  Required: Int
+  |
+  | longer explanation available when compiling with `-explain`
+2 errors found
 scala> val a: Inv[String] = new Inv(new Inv(1))
+-- [E007] Type Mismatch Error: -------------------------------------------------
 1 | val a: Inv[String] = new Inv(new Inv(1))
   |                              ^^^^^^^^^^
   |                              Found:    Inv[Int]
   |                              Required: String
+  |
+  | longer explanation available when compiling with `-explain`
+1 error found
 scala> val b: Inv[String] = new Inv(1)
+-- [E007] Type Mismatch Error: -------------------------------------------------
 1 | val b: Inv[String] = new Inv(1)
   |                              ^
   |                              Found:    (1 : Int)
   |                              Required: String
+  |
+  | longer explanation available when compiling with `-explain`
+1 error found
 scala> abstract class C { type T; val x: T; val s: Unit = { type T = String; var y: T = x; locally { def f() = { type T = Int; val z: T = y }; f() } }; }
+-- [E007] Type Mismatch Error: -------------------------------------------------
 1 | abstract class C { type T; val x: T; val s: Unit = { type T = String; var y: T = x; locally { def f() = { type T = Int; val z: T = y }; f() } }; }
   |                                                                                  ^
   |Found:    (C.this.x : C.this.T)
@@ -37,6 +61,9 @@ scala> abstract class C { type T; val x: T; val s: Unit = { type T = String; var
   |
   |where:    T  is a type in class C
   |          T² is a type in the initializer of value s which is an alias of String
+  |
+  | longer explanation available when compiling with `-explain`
+-- [E007] Type Mismatch Error: -------------------------------------------------
 1 | abstract class C { type T; val x: T; val s: Unit = { type T = String; var y: T = x; locally { def f() = { type T = Int; val z: T = y }; f() } }; }
   |                                                                                                                                    ^
   |Found:    (y : T)
@@ -44,28 +71,53 @@ scala> abstract class C { type T; val x: T; val s: Unit = { type T = String; var
   |
   |where:    T  is a type in the initializer of value s which is an alias of String
   |          T² is a type in method f which is an alias of Int
+  |
+  | longer explanation available when compiling with `-explain`
+2 errors found
 scala> class Foo() { def bar: Int = 1 }; val foo = new Foo(); foo.barr
+-- [E008] Not Found Error: -----------------------------------------------------
 1 | class Foo() { def bar: Int = 1 }; val foo = new Foo(); foo.barr
   |                                                        ^^^^^^^^
   |                 value barr is not a member of Foo - did you mean foo.bar?
+1 error found
 scala> val x: List[Int] = "foo" :: List(1)
+-- [E007] Type Mismatch Error: -------------------------------------------------
 1 | val x: List[Int] = "foo" :: List(1)
   |                    ^^^^^
   |                    Found:    ("foo" : String)
   |                    Required: Int
+  |
+  | longer explanation available when compiling with `-explain`
+1 error found
 scala> while (((  foo ))) {}
+-- [E006] Not Found Error: -----------------------------------------------------
 1 | while (((  foo ))) {}
   |            ^^^
   |            Not found: foo
+  |
+  | longer explanation available when compiling with `-explain`
+1 error found
 scala> val a: iDontExist = 1
+-- [E006] Not Found Error: -----------------------------------------------------
 1 | val a: iDontExist = 1
   |        ^^^^^^^^^^
   |        Not found: type iDontExist
+  |
+  | longer explanation available when compiling with `-explain`
+1 error found
 scala> def foo1(x: => Int) = x _
+-- [E099] Syntax Error: --------------------------------------------------------
 1 | def foo1(x: => Int) = x _
   |                       ^^^
   |Only function types can be followed by _ but the current expression has type Int
+  |
+  | longer explanation available when compiling with `-explain`
+1 error found
 scala> def foo2(x: => Int): () => Int = x _
+-- [E099] Syntax Error: --------------------------------------------------------
 1 | def foo2(x: => Int): () => Int = x _
   |                                  ^^^
   |Only function types can be followed by _ but the current expression has type Int
+  |
+  | longer explanation available when compiling with `-explain`
+1 error found
diff --git a/compiler/test-resources/repl/errorThenValid b/compiler/test-resources/repl/errorThenValid
deleted file mode 100644
index 3aea44b6392d..000000000000
--- a/compiler/test-resources/repl/errorThenValid
+++ /dev/null
@@ -1,8 +0,0 @@
-scala> val xs = scala.collection.mutable.ListBuffer[Int]
-1 | val xs = scala.collection.mutable.ListBuffer[Int]
-  |          ^
-  |          Missing parameter type
-  |
-  |          I could not infer the type of the parameter elems.
-scala> val xs = scala.collection.mutable.ListBuffer[Int]()
-val xs: scala.collection.mutable.ListBuffer[Int] = ListBuffer()
diff --git a/compiler/test-resources/repl/i11146 b/compiler/test-resources/repl/i11146
new file mode 100644
index 000000000000..1a6b7d414eeb
--- /dev/null
+++ b/compiler/test-resources/repl/i11146
@@ -0,0 +1,25 @@
+scala> class Appendable { def foo = println("Appendable.foo") }
+// defined class Appendable
+
+scala> (new Appendable).foo
+Appendable.foo
+
+scala> def assert(x: Boolean) = println(if x then "not asserted" else "asserted")
+def assert(x: Boolean): Unit
+
+scala> assert(false)
+asserted
+
+scala> class Option; object Option { val baz = 42 }
+// defined class Option
+// defined object Option
+
+scala> Option.baz
+val res0: Int = 42
+
+scala> object fs2 { class Stream[T] { override def toString = "fs2.Stream(..)" }; object Stream { def apply[T](x: T) = new Stream[T] }}
+// defined object fs2
+
+scala> import fs2.Stream
+scala> Stream(1)
+val res1: fs2.Stream[Int] = fs2.Stream(..)
diff --git a/compiler/test-resources/repl/i11377 b/compiler/test-resources/repl/i11377
index c07100ddbcf5..4e971fb89749 100644
--- a/compiler/test-resources/repl/i11377
+++ b/compiler/test-resources/repl/i11377
@@ -5,8 +5,10 @@ val bigArray: Array[Int] = Array(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
 scala> val notTruncated = "!" * 999
 val notTruncated: String = !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
 scala> val onTruncationLimit = "!" * 1000
-val onTruncationLimit: String = !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! ... large output truncated, print value to show all
+val onTruncationLimit: String = !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+scala> val onTruncationLimitPlus = "!" * 1001
+val onTruncationLimitPlus: String = !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! ... large output truncated, print value to show all
 scala> val veryBigTruncated = "!" * 10000
 val veryBigTruncated: String = !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! ... large output truncated, print value to show all
 scala> val beh = "\u08A0"*10000
-val beh: String = ࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠ ... large output truncated, print value to show all
\ No newline at end of file
+val beh: String = ࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠࢠ ... large output truncated, print value to show all
diff --git a/compiler/test-resources/repl/i13181 b/compiler/test-resources/repl/i13181
new file mode 100644
index 000000000000..32f6c6e40c1e
--- /dev/null
+++ b/compiler/test-resources/repl/i13181
@@ -0,0 +1,2 @@
+scala> scala.compiletime.codeOf(1+2)
+val res0: String = 1.+(2)
diff --git a/compiler/test-resources/repl/i13208.default.scala b/compiler/test-resources/repl/i13208.default.scala
new file mode 100644
index 000000000000..9a18f9a67c66
--- /dev/null
+++ b/compiler/test-resources/repl/i13208.default.scala
@@ -0,0 +1,10 @@
+scala> try 1
+1 warning found
+-- [E000] Syntax Warning: ------------------------------------------------------
+1 | try 1
+  | ^^^^^
+  | A try without catch or finally is equivalent to putting
+  | its body in a block; no exceptions are handled.
+  |
+  | longer explanation available when compiling with `-explain`
+val res0: Int = 1
diff --git a/compiler/test-resources/repl/i13208.scala b/compiler/test-resources/repl/i13208.scala
new file mode 100644
index 000000000000..ce4fcf0d9ed8
--- /dev/null
+++ b/compiler/test-resources/repl/i13208.scala
@@ -0,0 +1,8 @@
+// scalac: -source:future -deprecation
+scala> type M[X] = X match { case Int => String case _ => Int }
+scala> type N[X] = X match { case List[_] => Int }
+1 warning found
+-- Deprecation Warning: --------------------------------------------------------
+1 | type N[X] = X match { case List[_] => Int }
+  |                                 ^
+  |        `_` is deprecated for wildcard arguments of types: use `?` instead
diff --git a/compiler/test-resources/repl/i1370 b/compiler/test-resources/repl/i1370
index 21193692e355..6582e03b6539 100644
--- a/compiler/test-resources/repl/i1370
+++ b/compiler/test-resources/repl/i1370
@@ -1,4 +1,6 @@
 scala> object Lives { class Private { def foo1: Any = new Private.C1; def foo2: Any = new Private.C2 };  object Private { class C1 private {}; private class C2 {} } }
+-- Error: ----------------------------------------------------------------------
 1 | object Lives { class Private { def foo1: Any = new Private.C1; def foo2: Any = new Private.C2 };  object Private { class C1 private {}; private class C2 {} } }
   |                                                    ^^^^^^^^^^
   |constructor C1 cannot be accessed as a member of Lives.Private.C1 from class Private.
+1 error found
diff --git a/compiler/test-resources/repl/i1374 b/compiler/test-resources/repl/i1374
index a56b15df2818..3d117fdb4ff9 100644
--- a/compiler/test-resources/repl/i1374
+++ b/compiler/test-resources/repl/i1374
@@ -1,5 +1,6 @@
 scala> implicit class Padder(val sb: StringBuilder) extends AnyVal { def pad2(width: Int) = { 1 to width - sb.length foreach { sb append '*' }; sb } }
 // defined class Padder
+def Padder(sb: StringBuilder): Padder
 scala> val greeting = new StringBuilder("Hello, kitteh!")
 val greeting: StringBuilder = Hello, kitteh!
 scala> val a = greeting pad2 20
diff --git a/compiler/test-resources/repl/i2063 b/compiler/test-resources/repl/i2063
index 2cee5d4fa735..7245f677bfe6 100644
--- a/compiler/test-resources/repl/i2063
+++ b/compiler/test-resources/repl/i2063
@@ -1,12 +1,24 @@
 scala> 	class Foo extends Bar // with one tab
+-- [E006] Not Found Error: -----------------------------------------------------
 1 | 	class Foo extends Bar // with one tab
   | 	                  ^^^
   | 	                  Not found: type Bar
+  |
+  | longer explanation available when compiling with `-explain`
+1 error found
 scala>                                                                               class Foo extends Bar // with spaces
+-- [E006] Not Found Error: -----------------------------------------------------
 1 |                                                                               class Foo extends Bar // with spaces
   |                                                                                                 ^^^
   |                                                       Not found: type Bar
+  |
+  | longer explanation available when compiling with `-explain`
+1 error found
 scala>                                                                                  class Foo extends Bar // with tabs
+-- [E006] Not Found Error: -----------------------------------------------------
 1 |                                                                                  class Foo extends Bar // with tabs
   |                                                                                                    ^^^
   |                                                       Not found: type Bar
+  |
+  | longer explanation available when compiling with `-explain`
+1 error found
diff --git a/compiler/test-resources/repl/i2213 b/compiler/test-resources/repl/i2213
index 93638f03f504..b75b2056c959 100644
--- a/compiler/test-resources/repl/i2213
+++ b/compiler/test-resources/repl/i2213
@@ -1,9 +1,13 @@
 scala> def x
+-- [E019] Syntax Error: --------------------------------------------------------
 1 | def x
   |      ^
   |      Missing return type
-
+  |
+  | longer explanation available when compiling with `-explain`
 scala> def x: Int
+-- [E067] Syntax Error: --------------------------------------------------------
 1 | def x: Int
   |     ^
   |Declaration of method x not allowed here: only classes can have declared but undefined members
+1 error found
diff --git a/compiler/test-resources/repl/i2631 b/compiler/test-resources/repl/i2631
index f68b430d829d..a04e2cb15bc6 100644
--- a/compiler/test-resources/repl/i2631
+++ b/compiler/test-resources/repl/i2631
@@ -1,4 +1,6 @@
 scala> class Foo(x : Any) { val foo : Integer = 0; def this() = { this(foo) } }
+-- Error: ----------------------------------------------------------------------
 1 | class Foo(x : Any) { val foo : Integer = 0; def this() = { this(foo) } }
   |                                                                 ^^^
-  |                          foo is not accessible from constructor arguments
\ No newline at end of file
+  |                          foo is not accessible from constructor arguments
+1 error found
diff --git a/compiler/test-resources/repl/i3536 b/compiler/test-resources/repl/i3536
new file mode 100644
index 000000000000..490909db103a
--- /dev/null
+++ b/compiler/test-resources/repl/i3536
@@ -0,0 +1,19 @@
+scala> val res0 = 1
+val res0: Int = 1
+
+scala> res0
+val res1: Int = 1
+
+
+scala> val res99 = 9
+val res99: Int = 9
+
+scala> res99
+val res100: Int = 9
+
+
+scala> val res50 = 5
+val res50: Int = 5
+
+scala> res50
+val res101: Int = 5
diff --git a/compiler/test-resources/repl/i3536_bind b/compiler/test-resources/repl/i3536_bind
new file mode 100644
index 000000000000..96ccdce81356
--- /dev/null
+++ b/compiler/test-resources/repl/i3536_bind
@@ -0,0 +1,4 @@
+scala> val res10 @ Some(_) = Option(1)
+val res10: Some[Int] = Some(1)
+scala> res10
+val res11: Some[Int] = Some(1)
diff --git a/compiler/test-resources/repl/i3536_defdef b/compiler/test-resources/repl/i3536_defdef
new file mode 100644
index 000000000000..04e6045c26c3
--- /dev/null
+++ b/compiler/test-resources/repl/i3536_defdef
@@ -0,0 +1,4 @@
+scala> def res20 = 1
+def res20: Int
+scala> res20
+val res21: Int = 1
diff --git a/compiler/test-resources/repl/i3536_object b/compiler/test-resources/repl/i3536_object
new file mode 100644
index 000000000000..181302939423
--- /dev/null
+++ b/compiler/test-resources/repl/i3536_object
@@ -0,0 +1,4 @@
+scala> object res30 { override def toString = "res30" }
+// defined object res30
+scala> res30
+val res31: res30.type = res30
diff --git a/compiler/test-resources/repl/i3536_patterndef_some b/compiler/test-resources/repl/i3536_patterndef_some
new file mode 100644
index 000000000000..24c8736b57b9
--- /dev/null
+++ b/compiler/test-resources/repl/i3536_patterndef_some
@@ -0,0 +1,5 @@
+scala> val Some((res40, res41)) = Option((1, 0))
+val res40: Int = 1
+val res41: Int = 0
+scala> res40
+val res42: Int = 1
diff --git a/compiler/test-resources/repl/i3536_patterndef_tuple b/compiler/test-resources/repl/i3536_patterndef_tuple
new file mode 100644
index 000000000000..987c28c3a382
--- /dev/null
+++ b/compiler/test-resources/repl/i3536_patterndef_tuple
@@ -0,0 +1,5 @@
+scala> val (res40, res41) = (1, 0)
+val res40: Int = 1
+val res41: Int = 0
+scala> res40
+val res42: Int = 1
diff --git a/compiler/test-resources/repl/i3536_typedef b/compiler/test-resources/repl/i3536_typedef
new file mode 100644
index 000000000000..4db70a4f9c98
--- /dev/null
+++ b/compiler/test-resources/repl/i3536_typedef
@@ -0,0 +1,4 @@
+scala> type res50 = Int
+// defined alias type res50 = Int
+scala> 1
+val res51: Int = 1
diff --git a/compiler/test-resources/repl/i3536_var b/compiler/test-resources/repl/i3536_var
new file mode 100644
index 000000000000..7467ad84c772
--- /dev/null
+++ b/compiler/test-resources/repl/i3536_var
@@ -0,0 +1,4 @@
+scala> var res60 = 1
+var res60: Int = 1
+scala> res60
+val res61: Int = 1
diff --git a/compiler/test-resources/repl/i4184 b/compiler/test-resources/repl/i4184
index 385e98014d11..2c4eb7d12a6f 100644
--- a/compiler/test-resources/repl/i4184
+++ b/compiler/test-resources/repl/i4184
@@ -5,6 +5,8 @@ scala> object bar { class Foo }
 scala> implicit def eqFoo: CanEqual[foo.Foo, foo.Foo] = CanEqual.derived
 def eqFoo: CanEqual[foo.Foo, foo.Foo]
 scala> object Bar { new foo.Foo == new bar.Foo }
+-- Error: ----------------------------------------------------------------------
 1 | object Bar { new foo.Foo == new bar.Foo }
   |              ^^^^^^^^^^^^^^^^^^^^^^^^^^
   |      Values of types foo.Foo and bar.Foo cannot be compared with == or !=
+1 error found
diff --git a/compiler/test-resources/repl/i4217 b/compiler/test-resources/repl/i4217
index 22964adaf6a7..d4e113271073 100644
--- a/compiler/test-resources/repl/i4217
+++ b/compiler/test-resources/repl/i4217
@@ -1,7 +1,11 @@
 scala> def foo(x: Option[Int]) = x match { case None => }
+1 warning found
+-- [E029] Pattern Match Exhaustivity Warning: ----------------------------------
 1 | def foo(x: Option[Int]) = x match { case None => }
   |                           ^
   |                           match may not be exhaustive.
   |
   |                           It would fail on pattern case: Some(_)
-def foo(x: Option[Int]): Unit
\ No newline at end of file
+  |
+  | longer explanation available when compiling with `-explain`
+def foo(x: Option[Int]): Unit
diff --git a/compiler/test-resources/repl/i4566 b/compiler/test-resources/repl/i4566
index dcc7262a0eaf..32d84a6403d4 100644
--- a/compiler/test-resources/repl/i4566
+++ b/compiler/test-resources/repl/i4566
@@ -1,5 +1,9 @@
 scala> object test { type ::[A, B]; def a: Int :: Int = ???; def b: Int = a }
+-- [E007] Type Mismatch Error: -------------------------------------------------
 1 | object test { type ::[A, B]; def a: Int :: Int = ???; def b: Int = a }
   |                                                                    ^
   |                                                      Found:    Int :: Int
   |                                                      Required: Int
+  |
+  | longer explanation available when compiling with `-explain`
+1 error found
diff --git a/compiler/test-resources/repl/i5218 b/compiler/test-resources/repl/i5218
index bdf8325b6962..abe63009ef74 100644
--- a/compiler/test-resources/repl/i5218
+++ b/compiler/test-resources/repl/i5218
@@ -3,5 +3,4 @@ val tuple: (Int, String, Long) = (1,2,3)
 scala> 0.0 *: tuple
 val res0: (Double, Int, String, Long) = (0.0,1,2,3)
 scala> tuple ++ tuple
-val res1: Int *: String *: Long *:
-  scala.Tuple.Concat[scala.Tuple$package.EmptyTuple.type, tuple.type] = (1,2,3,1,2,3)
+val res1: Int *: String *: Long *: tuple.type = (1,2,3,1,2,3)
diff --git a/compiler/test-resources/repl/i5733 b/compiler/test-resources/repl/i5733
index e9afea5e98ec..ef578f24522f 100644
--- a/compiler/test-resources/repl/i5733
+++ b/compiler/test-resources/repl/i5733
@@ -1,6 +1,8 @@
 scala> abstract class F { def f(arg: Any): Unit; override def toString = "F" }
 // defined class F
 scala> val f: F = println
+1 warning found
+-- Warning: --------------------------------------------------------------------
 1 | val f: F = println
   |            ^^^^^^^
   |method println is eta-expanded even though F does not have the @FunctionalInterface annotation.
diff --git a/compiler/test-resources/repl/i6474 b/compiler/test-resources/repl/i6474
index 8b11b42859cf..0957bbbe2761 100644
--- a/compiler/test-resources/repl/i6474
+++ b/compiler/test-resources/repl/i6474
@@ -9,9 +9,11 @@ val res0: (Any, Int) = (1,2)
 scala> ((1, 2): Foo2.T[Int][Int]): Foo2.T[Any][Int]
 val res1: (Any, Int) = (1,2)
 scala> (1, 2): Foo3.T[Int][Int]
+-- [E056] Syntax Error: --------------------------------------------------------
 1 | (1, 2): Foo3.T[Int][Int]
   |         ^^^^^^^^^^^^^^^^
   |         Missing type parameter for Foo3.T[Int][Int]
+1 error found
 scala> ((1, 2): Foo3.T[Int][Int][Int]): Foo3.T[Any][Int][Int]
 val res2: (Any, Int) = (1,2)
 scala> object Foo3 { type T[A] = [B] =>> [C] =>> (A, B) }
diff --git a/compiler/test-resources/repl/i6676 b/compiler/test-resources/repl/i6676
index 519225183e43..e60023000b1d 100644
--- a/compiler/test-resources/repl/i6676
+++ b/compiler/test-resources/repl/i6676
@@ -1,25 +1,25 @@
 scala> xml"
+-- Error: ----------------------------------------------------------------------
 1 | xml"
   |    ^
   |    unclosed string literal
-1 | xml"
-  |     ^
-  |     ';' expected, but eof found
 scala> xml""
+-- [E008] Not Found Error: -----------------------------------------------------
 1 | xml""
   | ^^^^^
   | value xml is not a member of StringContext
+1 error found
 scala> xml"""
+-- Error: ----------------------------------------------------------------------
 1 | xml"""
   |    ^
   |    unclosed multi-line string literal
+-- Error: ----------------------------------------------------------------------
 1 | xml"""
   |       ^
   |       unclosed multi-line string literal
 scala> s"
+-- Error: ----------------------------------------------------------------------
 1 | s"
   |  ^
   |  unclosed string literal
-1 | s"
-  |   ^
-  |   ';' expected, but eof found
diff --git a/compiler/test-resources/repl/i7635 b/compiler/test-resources/repl/i7635
deleted file mode 100644
index 1c9e6474a768..000000000000
--- a/compiler/test-resources/repl/i7635
+++ /dev/null
@@ -1,9 +0,0 @@
-scala>  class C { protected val c = 42; override def toString() = s"C($c)" }
-// defined class C
-scala> val x = C()
-val x: C = C(42)
-scala> def foo = 3
-def foo: Int
-scala> foo
-val res0: Int = 3
-scala> import util.Try
diff --git a/compiler/test-resources/repl/i7644 b/compiler/test-resources/repl/i7644
index 780b3847bb4e..8ceaf8b00804 100644
--- a/compiler/test-resources/repl/i7644
+++ b/compiler/test-resources/repl/i7644
@@ -1,14 +1,24 @@
 scala> class T extends CanEqual
+-- [E112] Syntax Error: --------------------------------------------------------
 1 | class T extends CanEqual
   |       ^
   |       Cannot extend sealed trait CanEqual in a different source file
+  |
+  | longer explanation available when compiling with `-explain`
+-- [E056] Syntax Error: --------------------------------------------------------
 1 | class T extends CanEqual
   |                 ^^^^^^^^
   |                 Missing type parameter for CanEqual
+2 errors found
 scala> class T extends CanEqual
+-- [E112] Syntax Error: --------------------------------------------------------
 1 | class T extends CanEqual
   |       ^
   |       Cannot extend sealed trait CanEqual in a different source file
+  |
+  | longer explanation available when compiling with `-explain`
+-- [E056] Syntax Error: --------------------------------------------------------
 1 | class T extends CanEqual
   |                 ^^^^^^^^
   |                 Missing type parameter for CanEqual
+2 errors found
diff --git a/compiler/test-resources/repl/i9227 b/compiler/test-resources/repl/i9227
index 772325640ff7..40f9248898a2 100644
--- a/compiler/test-resources/repl/i9227
+++ b/compiler/test-resources/repl/i9227
@@ -1,5 +1,6 @@
 scala> import scala.quoted._; inline def myMacro[T]: Unit = ${ myMacroImpl[T] }; def myMacroImpl[T](using Quotes): Expr[Unit] = '{}; println(myMacro[Int])
+-- Error: ----------------------------------------------------------------------
 1 | import scala.quoted._; inline def myMacro[T]: Unit = ${ myMacroImpl[T] }; def myMacroImpl[T](using Quotes): Expr[Unit] = '{}; println(myMacro[Int])
   |                                                                                                                                       ^^^^^^^^^^^^
   |      Cannot call macro method myMacroImpl defined in the same source file
-  | This location contains code that was inlined from rs$line$1:1
+1 error found
diff --git a/compiler/test-resources/repl/importFromObj b/compiler/test-resources/repl/importFromObj
index 144859891519..281025af45f5 100644
--- a/compiler/test-resources/repl/importFromObj
+++ b/compiler/test-resources/repl/importFromObj
@@ -5,17 +5,25 @@ scala> object o { val xs = List(1, 2, 3) }
 // defined object o
 scala> import o._
 scala> buf += xs
+-- [E007] Type Mismatch Error: -------------------------------------------------
 1 | buf += xs
   |        ^^
   |        Found:    (o.xs : List[Int])
   |        Required: Int
+  |
+  | longer explanation available when compiling with `-explain`
+1 error found
 scala> buf ++= xs
 val res0: scala.collection.mutable.ListBuffer[Int] = ListBuffer(1, 2, 3)
 scala> import util.foobar
+-- [E008] Not Found Error: -----------------------------------------------------
 1 | import util.foobar
   |             ^^^^^^
   |             value foobar is not a member of util
+1 error found
 scala> import util.foobar.bar
+-- [E008] Not Found Error: -----------------------------------------------------
 1 | import util.foobar.bar
   |        ^^^^^^^^^^^
   |        value foobar is not a member of util
+1 error found
diff --git a/compiler/test-resources/repl/notFound b/compiler/test-resources/repl/notFound
index 99379a89660d..689854ef2700 100644
--- a/compiler/test-resources/repl/notFound
+++ b/compiler/test-resources/repl/notFound
@@ -1,8 +1,16 @@
 scala>  Foo
+-- [E006] Not Found Error: -----------------------------------------------------
 1 |  Foo
   |  ^^^
   |  Not found: Foo
+  |
+  | longer explanation available when compiling with `-explain`
+1 error found
 scala>  Bar
+-- [E006] Not Found Error: -----------------------------------------------------
 1 |  Bar
   |  ^^^
   |  Not found: Bar
+  |
+  | longer explanation available when compiling with `-explain`
+1 error found
diff --git a/compiler/test-resources/repl/nowarn.scala b/compiler/test-resources/repl/nowarn.scala
new file mode 100644
index 000000000000..db6ea24465e7
--- /dev/null
+++ b/compiler/test-resources/repl/nowarn.scala
@@ -0,0 +1,33 @@
+scala> @annotation.nowarn def f = try 1 // @nowarn doesn't work on first line, ctx.run is null in issueIfNotSuppressed
+1 warning found
+-- [E000] Syntax Warning: ------------------------------------------------------
+1 | @annotation.nowarn def f = try 1 // @nowarn doesn't work on first line, ctx.run is null in issueIfNotSuppressed
+  |                            ^^^^^
+  |                   A try without catch or finally is equivalent to putting
+  |                   its body in a block; no exceptions are handled.
+  |
+  | longer explanation available when compiling with `-explain`
+def f: Int
+scala> @annotation.nowarn def f = try 1
+def f: Int
+scala> def f = try 1
+1 warning found
+-- [E000] Syntax Warning: ------------------------------------------------------
+1 | def f = try 1
+  |         ^^^^^
+  |         A try without catch or finally is equivalent to putting
+  |         its body in a block; no exceptions are handled.
+  |
+  | longer explanation available when compiling with `-explain`
+def f: Int
+scala> @annotation.nowarn def f = { 1; 2 }
+def f: Int
+scala> def f = { 1; 2 }
+1 warning found
+-- [E129] Potential Issue Warning: ---------------------------------------------
+1 | def f = { 1; 2 }
+  |           ^
+  |A pure expression does nothing in statement position; you may be omitting necessary parentheses
+  |
+  | longer explanation available when compiling with `-explain`
+def f: Int
diff --git a/compiler/test-resources/repl/overrides b/compiler/test-resources/repl/overrides
index 05ddd3229deb..5bad603801a4 100644
--- a/compiler/test-resources/repl/overrides
+++ b/compiler/test-resources/repl/overrides
@@ -1,10 +1,18 @@
 scala> class B { override def foo(i: Int): Unit = {}; }
+-- [E037] Declaration Error: ---------------------------------------------------
 1 | class B { override def foo(i: Int): Unit = {}; }
   |                        ^
   |                        method foo overrides nothing
+  |
+  | longer explanation available when compiling with `-explain`
+1 error found
 scala> class A { def foo: Unit = {}; }
 // defined class A
 scala> class B extends A { override def foo(i: Int): Unit = {}; }
+-- [E038] Declaration Error: ---------------------------------------------------
 1 | class B extends A { override def foo(i: Int): Unit = {}; }
   |                                  ^
   |      method foo has a different signature than the overridden declaration
+  |
+  | longer explanation available when compiling with `-explain`
+1 error found
diff --git a/compiler/test-resources/repl/parsing b/compiler/test-resources/repl/parsing
index 2b3da45aa537..76cdd488ca36 100644
--- a/compiler/test-resources/repl/parsing
+++ b/compiler/test-resources/repl/parsing
@@ -1,14 +1,15 @@
-scala> ; 
-scala> ;; 
-scala> 1; 2 
+scala> ;
+scala> ;;
+scala> 1; 2
 val res0: Int = 1
 val res1: Int = 2
-scala> 1; 
+scala> 1;
 val res2: Int = 1
-scala> 1;; 2 
+scala> 1;; 2
 val res3: Int = 1
 val res4: Int = 2
-scala> } 
+scala> }
+-- [E040] Syntax Error: --------------------------------------------------------
 1 | }
   | ^
   | eof expected, but '}' found
diff --git a/compiler/test-resources/repl/reset-command b/compiler/test-resources/repl/reset-command
new file mode 100644
index 000000000000..ad38c9e9a867
--- /dev/null
+++ b/compiler/test-resources/repl/reset-command
@@ -0,0 +1,29 @@
+scala> def f(thread: Thread) = thread.stop()
+there were 1 deprecation warning(s); re-run with -deprecation for details
+def f(thread: Thread): Unit
+
+scala>:reset -deprecation
+
+scala> def f(thread: Thread) = thread.stop()
+1 warning found
+-- Deprecation Warning: --------------------------------------------------------
+1 | def f(thread: Thread) = thread.stop()
+  |                         ^^^^^^^^^^^
+  |method stop in class Thread is deprecated since : see corresponding Javadoc for more information.
+def f(thread: Thread): Unit
+
+scala> def resetNoArgsStillWorks = 1
+def resetNoArgsStillWorks: Int
+
+scala>:reset
+
+scala> resetNoArgsStillWorks
+-- [E006] Not Found Error: -----------------------------------------------------
+1 | resetNoArgsStillWorks
+  | ^^^^^^^^^^^^^^^^^^^^^
+  | Not found: resetNoArgsStillWorks
+  |
+  | longer explanation available when compiling with `-explain`
+1 error found
+
+scala>:settings "-Dfoo=bar baz"
diff --git a/compiler/test-resources/repl/settings-command b/compiler/test-resources/repl/settings-command
new file mode 100644
index 000000000000..db4b1af7343f
--- /dev/null
+++ b/compiler/test-resources/repl/settings-command
@@ -0,0 +1,16 @@
+scala> def f(thread: Thread) = thread.stop()
+there were 1 deprecation warning(s); re-run with -deprecation for details
+def f(thread: Thread): Unit
+
+scala>:settings -deprecation foo.scala
+Ignoring spurious arguments: foo.scala
+
+scala> def f(thread: Thread) = thread.stop()
+1 warning found
+-- Deprecation Warning: --------------------------------------------------------
+1 | def f(thread: Thread) = thread.stop()
+  |                         ^^^^^^^^^^^
+  |method stop in class Thread is deprecated since : see corresponding Javadoc for more information.
+def f(thread: Thread): Unit
+
+scala>
diff --git a/compiler/test-resources/repl/settings-outputDir b/compiler/test-resources/repl/settings-outputDir
new file mode 100644
index 000000000000..8f0460a30cfe
--- /dev/null
+++ b/compiler/test-resources/repl/settings-outputDir
@@ -0,0 +1,18 @@
+scala> java.nio.file.Files.exists(java.nio.file.Files.createDirectories(java.nio.file.Paths.get("target", "test-repl-settings-outDir")))
+val res0: Boolean = true
+
+scala> val x = 1
+val x: Int = 1
+
+scala>:settings -d target/test-repl-settings-outDir
+
+scala> val y = 2
+val y: Int = 2
+
+scala> x
+val res1: Int = 1
+
+scala> y
+val res2: Int = 2
+
+scala> java.nio.file.Files.walk(java.nio.file.Paths.get("target", "test-repl-settings-outDir")).sorted(java.util.Comparator.reverseOrder).forEach(java.nio.file.Files.delete)
diff --git a/compiler/test-resources/scripting/argfileClasspath.sc b/compiler/test-resources/scripting/argfileClasspath.sc
new file mode 100755
index 000000000000..c31371ba8934
--- /dev/null
+++ b/compiler/test-resources/scripting/argfileClasspath.sc
@@ -0,0 +1,9 @@
+#!dist/target/pack/bin/scala @compiler/test-resources/scripting/cpArgumentsFile.txt
+
+import java.nio.file.Paths
+
+def main(args: Array[String]): Unit =
+  val cwd = Paths.get(".").toAbsolutePath.toString.replace('\\', '/').replaceAll("/$", "")
+  printf("cwd: %s\n", cwd)
+  printf("classpath: %s\n", sys.props("java.class.path"))
+
diff --git a/compiler/test-resources/scripting/classpathReport.sc b/compiler/test-resources/scripting/classpathReport.sc
new file mode 100755
index 000000000000..a9eacbbba1f7
--- /dev/null
+++ b/compiler/test-resources/scripting/classpathReport.sc
@@ -0,0 +1,12 @@
+#!bin/scala -classpath 'dist/target/pack/lib/*'
+
+import java.nio.file.Paths
+
+def main(args: Array[String]): Unit =
+  val cwd = Paths.get(".").toAbsolutePath.normalize.toString.norm
+  printf("cwd: %s\n", cwd)
+  printf("classpath: %s\n", sys.props("java.class.path").norm)
+
+extension(s: String)
+  def norm: String = s.replace('\\', '/')
+
diff --git a/compiler/test-resources/scripting/cpArgumentsFile.txt b/compiler/test-resources/scripting/cpArgumentsFile.txt
new file mode 100755
index 000000000000..73037eb7d9bc
--- /dev/null
+++ b/compiler/test-resources/scripting/cpArgumentsFile.txt
@@ -0,0 +1 @@
+-classpath dist/target/pack/lib/*
diff --git a/compiler/test-resources/scripting/envtest.sc b/compiler/test-resources/scripting/envtest.sc
new file mode 100755
index 000000000000..b2fde1b32339
--- /dev/null
+++ b/compiler/test-resources/scripting/envtest.sc
@@ -0,0 +1,2 @@
+def main(args: Array[String]): Unit =
+  println("Hello " + util.Properties.propOrNull("key"))
diff --git a/compiler/test-resources/scripting/envtest.scala b/compiler/test-resources/scripting/envtest.scala
new file mode 100644
index 000000000000..bf416c9519ce
--- /dev/null
+++ b/compiler/test-resources/scripting/envtest.scala
@@ -0,0 +1,4 @@
+package run
+
+object envtest extends App:
+  println("Hello " + sys.props("key"))
diff --git a/compiler/test-resources/scripting/hashBang.sc b/compiler/test-resources/scripting/hashBang.sc
old mode 100644
new mode 100755
diff --git a/compiler/test-resources/scripting/mainClassOnStack.sc b/compiler/test-resources/scripting/mainClassOnStack.sc
old mode 100644
new mode 100755
diff --git a/compiler/test-resources/scripting/scriptPath.sc b/compiler/test-resources/scripting/scriptPath.sc
old mode 100644
new mode 100755
index 49ed65a76515..46cd5e8a7385
--- a/compiler/test-resources/scripting/scriptPath.sc
+++ b/compiler/test-resources/scripting/scriptPath.sc
@@ -1,10 +1,22 @@
-#!/usr/bin/env scala
+#!dist/target/pack/bin/scala
 
   def main(args: Array[String]): Unit =
     args.zipWithIndex.foreach { case (arg,i) => printf("arg %d: [%s]\n",i,arg) }
-    val path = Option(sys.props("script.path")) match {
-    case None => printf("no script.path property is defined\n")
+
+    Option(sys.props("script.path")) match {
     case Some(path) =>
-      printf("script.path: %s\n",path)
-      assert(path.endsWith("scriptPath.sc"),s"actual path [$path]")
+      if ! path.endsWith("scriptPath.sc") then
+        printf( s"incorrect script.path defined as [$path]")
+      else
+        printf("script.path: %s\n",path) // report the value
+    case None =>
+      printf("no script.path property is defined\n")
+      // report relevant environment factors that might explain the error
+      val psep: String = Option(sys.props("path.separator")).get
+      val pathEntries = System.getenv("PATH").split(psep).toList
+      System.err.printf("sun.java.command: %s\n", sys.props("sun.java.command"))
+      System.err.printf("first 5 PATH entries:\n%s\n",pathEntries.take(5).mkString("\n"))
     }
+
+  extension(s: String)
+    def norm: String = s.replace('\\', '/')
diff --git a/compiler/test-resources/scripting/showArgs.sc b/compiler/test-resources/scripting/showArgs.sc
new file mode 100755
index 000000000000..28f16a9022b3
--- /dev/null
+++ b/compiler/test-resources/scripting/showArgs.sc
@@ -0,0 +1,6 @@
+#!/usr/bin/env scala
+
+// precise output format expected by BashScriptsTests.scala
+def main(args: Array[String]): Unit =
+  for (a,i) <- args.zipWithIndex do
+    printf(s"arg %2d:[%s]\n",i,a)
diff --git a/compiler/test-resources/scripting/sqlDateError.sc b/compiler/test-resources/scripting/sqlDateError.sc
new file mode 100755
index 000000000000..b9a47d245d1a
--- /dev/null
+++ b/compiler/test-resources/scripting/sqlDateError.sc
@@ -0,0 +1,6 @@
+#!bin/scala -nosave
+
+def main(args: Array[String]): Unit = {
+  println(new java.sql.Date(100L))
+  System.err.println("SCALA_OPTS="+Option(System.getenv("SCALA_OPTS")).getOrElse(""))
+}
diff --git a/compiler/test-resources/scripting/unglobClasspath.sc b/compiler/test-resources/scripting/unglobClasspath.sc
new file mode 100755
index 000000000000..796697cdedf2
--- /dev/null
+++ b/compiler/test-resources/scripting/unglobClasspath.sc
@@ -0,0 +1,8 @@
+#!bin/scala -classpath 'dist/target/pack/lib/*'
+
+// won't compile unless the hashbang line sets classpath
+import org.jline.terminal.Terminal
+
+def main(args: Array[String]) =
+  val cp = sys.props("java.class.path")
+  printf("unglobbed classpath: %s\n", cp)
diff --git a/compiler/test-resources/type-printer/infix b/compiler/test-resources/type-printer/infix
index caac75e62fe2..a7904ae9ec43 100644
--- a/compiler/test-resources/type-printer/infix
+++ b/compiler/test-resources/type-printer/infix
@@ -40,7 +40,7 @@ def foo: Int Mappy Boolean && String
 scala> @scala.annotation.showAsInfix(false) class ||[T,U]
 // defined class ||
 scala> def foo: Int || Boolean = ???
-def foo: ||[Int, Boolean]
+def foo: Int || Boolean
 scala> def foo: Int && Boolean & String = ???
 def foo: Int && Boolean & String
 scala> def foo: (Int && Boolean) & String = ???
diff --git a/compiler/test-resources/type-printer/type-mismatch b/compiler/test-resources/type-printer/type-mismatch
index 83a84d917113..d139f9dff74b 100644
--- a/compiler/test-resources/type-printer/type-mismatch
+++ b/compiler/test-resources/type-printer/type-mismatch
@@ -3,7 +3,11 @@ scala> case class Foo[A](a: A)
 scala> Foo(1)
 val res0: Foo[Int] = Foo(1)
 scala> val x: Foo[String] = res0
+-- [E007] Type Mismatch Error: -------------------------------------------------
 1 | val x: Foo[String] = res0
   |                      ^^^^
   |                      Found:    (res0 : Foo[Int])
   |                      Required: Foo[String]
+  |
+  | longer explanation available when compiling with `-explain`
+1 error found
diff --git a/compiler/test/dotc/comptest.scala b/compiler/test/dotc/comptest.scala
index bb4961ea2a88..bd0d800e641c 100644
--- a/compiler/test/dotc/comptest.scala
+++ b/compiler/test/dotc/comptest.scala
@@ -10,7 +10,7 @@ object comptest extends ParallelTesting {
   def numberOfSlaves = 5
   def safeMode = false
   def isInteractive = true
-  def testFilter = None
+  def testFilter = Nil
   def updateCheckFiles: Boolean = false
 
   val posDir = "./tests/pos/"
diff --git a/compiler/test/dotc/pos-from-tasty.blacklist b/compiler/test/dotc/pos-from-tasty.blacklist
index b0ea2e35caec..e48c7f7ca8ce 100644
--- a/compiler/test/dotc/pos-from-tasty.blacklist
+++ b/compiler/test/dotc/pos-from-tasty.blacklist
@@ -11,3 +11,9 @@ rbtree.scala
 
 # transitive reduction of match types
 i10511.scala
+
+# Violates tightened condition in Retyper#typedTyped
+i11247.scala
+
+# Tree is huge and blows stack for printing Text
+i7034.scala
diff --git a/compiler/test/dotc/pos-test-pickling.blacklist b/compiler/test/dotc/pos-test-pickling.blacklist
index e6fd9deb979d..b42d41f0df85 100644
--- a/compiler/test/dotc/pos-test-pickling.blacklist
+++ b/compiler/test/dotc/pos-test-pickling.blacklist
@@ -16,6 +16,11 @@ tuple-filter.scala
 i7740a.scala
 i7740b.scala
 i6507b.scala
+i12299a.scala
+i13871.scala
+
+# Tree is huge and blows stack for printing Text
+i7034.scala
 
 # Stale symbol: package object scala
 seqtype-cycle
@@ -24,18 +29,17 @@ seqtype-cycle
 i939.scala
 
 # Match types
-typelevel0.scala
-matchtype.scala
-6322.scala
-i7087.scala
-i7868.scala
 i7872.scala
-6709.scala
-6687.scala
 i11236.scala
 i11247.scala
 i11250
 i9999.scala
+8649.scala
+12093.scala
+9757.scala
+9890.scala
+13491.scala
+7512.scala
 
 # Opaque type
 i5720.scala
@@ -64,3 +68,15 @@ i8182.scala
 
 # local lifted value in annotation argument has different position after pickling
 i2797a
+
+# Late instantiation of type variable in tryInsertImplicitOnQualifier
+# allows to simplify a type that was already computed
+i13842.scala
+
+# GADT cast applied to singleton type difference
+i4176-gadt.scala
+
+# GADT difference
+i13974a.scala
+
+java-inherited-type1
diff --git a/compiler/test/dotc/run-test-pickling.blacklist b/compiler/test/dotc/run-test-pickling.blacklist
index d36314e896d8..9f19b439135c 100644
--- a/compiler/test/dotc/run-test-pickling.blacklist
+++ b/compiler/test/dotc/run-test-pickling.blacklist
@@ -1,3 +1,6 @@
+## Many of these tests fail because CompilationTests.pickling does not handle
+## tests containing java files correctly
+
 derive-generic.scala
 eff-dependent.scala
 enum-java
@@ -6,6 +9,8 @@ i7212
 i7868.scala
 i9011.scala
 i9473.scala
+i13433.scala
+i13433b.scala
 macros-in-same-project1
 mixin-forwarder-overload
 t10889
@@ -33,3 +38,9 @@ typeclass-derivation2d.scala
 typeclass-derivation3.scala
 varargs-abstract
 zero-arity-case-class.scala
+i12194.scala
+i12753
+t6138
+t6138-2
+i12656.scala
+trait-static-forwarder
diff --git a/compiler/test/dotty/Properties.scala b/compiler/test/dotty/Properties.scala
index 042773505dc5..4ef2ad7a61c9 100644
--- a/compiler/test/dotty/Properties.scala
+++ b/compiler/test/dotty/Properties.scala
@@ -15,13 +15,18 @@ object Properties {
   val isRunByCI: Boolean = sys.env.isDefinedAt("DOTTY_CI_RUN")
   || sys.env.isDefinedAt("DRONE")  // TODO remove this when we drop Drone
 
+  val testCache: Path =
+    sys.env.get("DOTTY_TEST_CACHE").map(Paths.get(_)).getOrElse {
+      Paths.get(sys.props("user.home"), ".cache", "dotty", "test")
+    }
+
   /** Tests should run interactive? */
   val testsInteractive: Boolean = propIsNullOrTrue("dotty.tests.interactive")
 
   /** Filter out tests not matching the regex supplied by "dotty.tests.filter"
    *  define
    */
-  val testsFilter: Option[String] = sys.props.get("dotty.tests.filter")
+  val testsFilter: List[String] = sys.props.get("dotty.tests.filter").fold(Nil)(_.split(',').toList)
 
   /** Tests should override the checkfiles with the current output */
   val testsUpdateCheckfile: Boolean =
diff --git a/compiler/test/dotty/tools/DottyTypeStealer.scala b/compiler/test/dotty/tools/DottyTypeStealer.scala
index 0a80205fbacf..52bf7a7e221a 100644
--- a/compiler/test/dotty/tools/DottyTypeStealer.scala
+++ b/compiler/test/dotty/tools/DottyTypeStealer.scala
@@ -3,29 +3,97 @@ package dotty.tools
 import dotc.ast.tpd
 import dotc.core.Names._
 import dotc.ast.tpd._
-import dotc.core.Contexts.Context
+import dotc.core.Contexts.{Context, atPhase}
+import dotty.tools.dotc.core.Phases.{typerPhase, erasurePhase}
+import dotc.core.Symbols.Symbol
 import dotc.core.Decorators._
 import dotc.core.Types.Type
 
+import scala.util.CommandLineParser.FromString
+
+/**Pass a string representing a Scala source file,
+ * and then some type signatures referencing prior definitions.
+ *
+ * The type signatures will then be printed as raw data structures.
+ *
+ * @param source top level Scala definitions, e.g. `"class O { type X }"`
+ * @param kind the kind of type we are inspecting [`rhs`, `method`, `class`, `type`]
+ * @param typeStrings Scala type signatures, e.g. `"O#X"`
+ *
+ * @syntax markdown
+ */
+@main def printTypes(source: String, kind: DottyTypeStealer.Kind, typeStrings: String*) = {
+  val (_, tpes) = DottyTypeStealer.stealType(source, kind, typeStrings*)
+  tpes.foreach(t => println(s"$t [${t.getClass}]"))
+}
+
+/**Pass a string representing a Scala source file,
+ * and then some type signatures referencing prior definitions.
+ *
+ * The type signatures will then be printed comparing between phase
+ * `typer` where types are as Scala understands them and phase `erasure`,
+ * which models the JVM types.
+ *
+ * @param source top level Scala definitions, e.g. `"class O { type X }"`
+ * @param kind the kind of type we are inspecting [`rhs`, `method`, `class`, `type`]
+ * @param typeStrings Scala type signatures, e.g. `"O#X"`
+ *
+ * @syntax markdown
+ */
+@main def printTypesAndErasure(source: String, kind: DottyTypeStealer.Kind, typeStrings: String*): Unit =
+  val (ictx, vdefs) = DottyTypeStealer.stealMember("erasure", source, kind, typeStrings*)
+
+  given Context = ictx
+
+  for vdef <- vdefs do
+    println(i"info @ typer   => ${atPhase(typerPhase.next)(vdef.info)}")
+    println(i"info @ erasure => ${atPhase(erasurePhase.next)(vdef.info)}")
+end printTypesAndErasure
+
 object DottyTypeStealer extends DottyTest {
-  def stealType(source: String, typeStrings: String*): (Context, List[Type]) = {
+
+  given FromString[Kind] = kind =>
+    if kind == "" then
+      println(s"assuming kind `${Kind.rhs}`")
+      Kind.rhs
+    else
+      Kind.valueOf(kind)
+
+  enum Kind:
+    case `rhs`, `method`, `class`, `type`
+
+    def format(name: String, arg: String) = this match
+      case `rhs`    => s"val $name: $arg = ???"
+      case `method` => s"def $name $arg = ???"
+      case `class`  => s"class $name $arg"
+      case `type`   => s"type $name $arg"
+
+  def stealType(source: String, kind: Kind, typeStrings: String*): (Context, List[Type]) = {
+    val (scontext, members) = stealMember("typer", source, kind, typeStrings*)
+    given Context = scontext
+    (scontext, members.map(_.info))
+  }
+
+  def stealMember(lastPhase: String, source: String, kind: Kind, typeStrings: String*): (Context, List[Symbol]) = {
     val dummyName = "x_x_x"
-    val vals = typeStrings.zipWithIndex.map{case (s, x)=> s"val ${dummyName}$x: $s = ???"}.mkString("\n")
+    val vals = typeStrings.zipWithIndex.map{case (s, x) => kind.format(dummyName + x, s) }.mkString("\n")
     val gatheredSource = s" ${source}\n object A$dummyName {$vals}"
     var scontext : Context = null
-    var tp: List[Type] = null
-    checkCompile("typer", gatheredSource) {
+    var members: List[Symbol] = null
+    checkCompile(lastPhase, gatheredSource) {
       (tree, context) =>
         given Context = context
-        val findValDef: (List[ValDef], tpd.Tree) => List[ValDef] =
+        val findMemberDef: (List[MemberDef], tpd.Tree) => List[MemberDef] =
           (acc , tree) =>  tree match {
+            case t: DefDef if t.name.startsWith(dummyName) => t :: acc
             case t: ValDef if t.name.startsWith(dummyName) => t :: acc
+            case t: TypeDef if t.name.startsWith(dummyName) => t :: acc
             case _ => acc
           }
-        val d = new DeepFolder[List[ValDef]](findValDef).foldOver(Nil, tree)
-        tp = d.map(_.tpe.widen).reverse
+        val d = new DeepFolder[List[MemberDef]](findMemberDef).foldOver(Nil, tree)
+        members = d.map(_.symbol).reverse
         scontext = context
     }
-    (scontext, tp)
+    (scontext, members)
   }
 }
diff --git a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala
index d984b612ced2..9d6ff413f8a2 100644
--- a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala
+++ b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala
@@ -32,6 +32,22 @@ class TestBCode extends DottyBytecodeTest {
     }
   }
 
+  @Test def byNameParameters = {
+    val source = """
+                   |class Foo {
+                   |  def byNameParam(str: => String): Unit = {}
+                   |}
+                 """.stripMargin
+
+    checkBCode(source) { dir =>
+      val clsIn      = dir.lookupName("Foo.class", directory = false).input
+      val clsNode    = loadClassNode(clsIn)
+      val methodNode: MethodNode = getMethod(clsNode, "byNameParam")
+
+      assert(methodNode.signature == "(Lscala/Function0;)V")
+    }
+  }
+
   /** This test verifies that simple matches are transformed if possible
    *  despite no annotation
    */
@@ -118,6 +134,28 @@ class TestBCode extends DottyBytecodeTest {
     }
   }
 
+  @Test def switchOnStrings = {
+    val source =
+      """
+        |object Foo {
+        |  import scala.annotation.switch
+        |  def foo(s: String) = s match {
+        |    case "AaAa" => println(3)
+        |    case "BBBB" | "c" => println(2)
+        |    case "D" | "E" => println(1)
+        |    case _ => println(0)
+        |  }
+        |}
+      """.stripMargin
+
+    checkBCode(source) { dir =>
+      val moduleIn   = dir.lookupName("Foo$.class", directory = false)
+      val moduleNode = loadClassNode(moduleIn.input)
+      val methodNode = getMethod(moduleNode, "foo")
+      assert(verifySwitch(methodNode))
+    }
+  }
+
   @Test def matchWithDefaultNoThrowMatchError = {
     val source =
       """class Test {
@@ -677,7 +715,7 @@ class TestBCode extends DottyBytecodeTest {
       """.stripMargin)
 
   @Test def objectsInObjDefAreFinal =
-    checkFinalClass("Test$Foo$1$.class",
+    checkFinalClass("Test$Foo$2$.class",
       """
         |object Test {
         |  def bar() = {
@@ -687,7 +725,7 @@ class TestBCode extends DottyBytecodeTest {
       """.stripMargin)
 
   @Test def objectsInClassDefAreFinal =
-    checkFinalClass("Test$Foo$1$.class",
+    checkFinalClass("Test$Foo$2$.class",
       """
         |class Test {
         |  def bar() = {
@@ -697,7 +735,7 @@ class TestBCode extends DottyBytecodeTest {
       """.stripMargin)
 
   @Test def objectsInObjValAreFinal =
-    checkFinalClass("Test$Foo$1$.class",
+    checkFinalClass("Test$Foo$2$.class",
       """
         |class Test {
         |  val bar = {
@@ -759,17 +797,14 @@ class TestBCode extends DottyBytecodeTest {
         FrameEntry(1, List(1), List()),
         VarOp(Opcodes.ILOAD, 1),
         Op(Opcodes.ICONST_5),
-        Jump(Opcodes.IF_ICMPGT, Label(16)),
+        Jump(Opcodes.IF_ICMPGT, Label(13)),
         Field(Opcodes.GETSTATIC, "scala/Predef$", "MODULE$", "Lscala/Predef$;"),
         VarOp(Opcodes.ILOAD, 1),
         Invoke(Opcodes.INVOKESTATIC, "scala/runtime/BoxesRunTime", "boxToInteger", "(I)Ljava/lang/Integer;", false),
         Invoke(Opcodes.INVOKEVIRTUAL, "scala/Predef$", "println", "(Ljava/lang/Object;)V", false),
-        VarOp(Opcodes.ILOAD, 1),
-        Op(Opcodes.ICONST_1),
-        Op(Opcodes.IADD),
-        VarOp(Opcodes.ISTORE, 1),
+        Incr(Opcodes.IINC, 1, 1),
         Jump(Opcodes.GOTO, Label(2)),
-        Label(16),
+        Label(13),
         FrameEntry(3, List(), List()),
         Op(Opcodes.RETURN))
 
@@ -938,7 +973,7 @@ class TestBCode extends DottyBytecodeTest {
 
   @Test
   def invocationReceivers(): Unit = {
-    import Opcodes._
+    import Opcodes.*
 
     checkBCode(List(invocationReceiversTestCode.definitions("Object"))) { dir =>
       val c1 = loadClassNode(dir.lookupName("C1.class", directory = false).input)
diff --git a/compiler/test/dotty/tools/backend/jvm/IincTest.scala b/compiler/test/dotty/tools/backend/jvm/IincTest.scala
new file mode 100644
index 000000000000..ad19a4b489ae
--- /dev/null
+++ b/compiler/test/dotty/tools/backend/jvm/IincTest.scala
@@ -0,0 +1,71 @@
+package dotty.tools.backend.jvm
+
+import org.junit.Test
+import org.junit.Assert._
+
+import scala.tools.asm.Opcodes._
+
+class IincTest extends DottyBytecodeTest {
+  import ASMConverters._
+
+  @Test def increment = test(
+    """{
+      |  var i = x
+      |  i += 1
+      |  i += 54
+      |  i += 127
+      |  i -= 1
+      |  i -= 54
+      |  i -= 128
+      |  i
+      |}""".stripMargin,
+    List(1, 54, 127, -1, -54, -128)
+  )
+
+  @Test def wideIncrement = test(
+    """{
+      |  var i = x
+      |  i += 128
+      |  i += 8765
+      |  i += 32767
+      |  i -= 129
+      |  i -= 8765
+      |  i -= 32768
+      |  i
+      |}""".stripMargin,
+    List(128, 8765, 32767, -129, -8765, -32768)
+  )
+
+  @Test def tooBigForIinc = test(
+    """{
+      |  var i = x
+      |  i += 32768
+      |  i += 56789
+      |  i += 2147483647
+      |  i -= 32769
+      |  i -= 56789
+      |  i -= 2147483647
+      |  i
+      |}""".stripMargin,
+    Nil
+  )
+
+  private def test(code: String, expectedIincs: List[Int])= {
+    val source =
+      s"""class Increment {
+         | def test(x: Int): Int = $code
+         |}
+       """.stripMargin
+
+    checkBCode(source) { dir =>
+      val clsIn   = dir.lookupName("Increment.class", directory = false).input
+      val clsNode = loadClassNode(clsIn)
+      val meth   = getMethod(clsNode, "test")
+
+      val foundIincs = instructionsFromMethod(meth).collect { case iinc: Incr => iinc.incr }
+
+      assertEquals(expectedIincs, foundIincs)
+    }
+  }
+
+}
diff --git a/compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala b/compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala
index 829bc2607feb..741928203d77 100644
--- a/compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala
+++ b/compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala
@@ -397,10 +397,7 @@ class InlineBytecodeTests extends DottyBytecodeTest {
         List(
           IntOp(BIPUSH, 10),
           VarOp(ISTORE, 1),
-          VarOp(ILOAD, 1),
-          Op(ICONST_1),
-          Op(IADD),
-          VarOp(ISTORE, 1),
+          Incr(IINC, 1, 1),
           VarOp(ILOAD, 1),
           Op(IRETURN),
         )
@@ -615,4 +612,108 @@ class InlineBytecodeTests extends DottyBytecodeTest {
 
     }
   }
+
+  @Test def any_eq_specialization = {
+    val source = """class Test:
+                   |  inline def eql(x: Any, y: Any) = x == y
+                   |
+                   |  def testAny(x: Any, y: Any) = eql(x, y)
+                   |  def testAnyExpected(x: Any, y: Any) = x == y
+                   |
+                   |  def testBoolean(x: Boolean, y: Boolean) = eql(x, y)
+                   |  def testBooleanExpected(x: Boolean, y: Boolean) = x == y
+                   |
+                   |  def testByte(x: Byte, y: Byte) = eql(x, y)
+                   |  def testByteExpected(x: Byte, y: Byte) = x == y
+                   |
+                   |  def testShort(x: Short, y: Short) = eql(x, y)
+                   |  def testShortExpected(x: Short, y: Short) = x == y
+                   |
+                   |  def testInt(x: Int, y: Int) = eql(x, y)
+                   |  def testIntExpected(x: Int, y: Int) = x == y
+                   |
+                   |  def testLong(x: Long, y: Long) = eql(x, y)
+                   |  def testLongExpected(x: Long, y: Long) = x == y
+                   |
+                   |  def testFloat(x: Float, y: Float) = eql(x, y)
+                   |  def testFloatExpected(x: Float, y: Float) = x == y
+                   |
+                   |  def testDouble(x: Double, y: Double) = eql(x, y)
+                   |  def testDoubleExpected(x: Double, y: Double) = x == y
+                   |
+                   |  def testChar(x: Char, y: Char) = eql(x, y)
+                   |  def testCharExpected(x: Char, y: Char) = x == y
+                   |
+                   |  def testUnit(x: Unit, y: Unit) = eql(x, y)
+                   |  def testUnitExpected(x: Unit, y: Unit) = x == y
+                 """.stripMargin
+
+    checkBCode(source) { dir =>
+      val clsIn      = dir.lookupName("Test.class", directory = false).input
+      val clsNode    = loadClassNode(clsIn)
+
+      for cls <- List("Boolean", "Byte", "Short", "Int", "Long", "Float", "Double", "Char", "Unit") do
+        val meth1      = getMethod(clsNode, s"test$cls")
+        val meth2      = getMethod(clsNode, s"test${cls}Expected")
+
+        val instructions1 = instructionsFromMethod(meth1)
+        val instructions2 = instructionsFromMethod(meth2)
+
+        assert(instructions1 == instructions2,
+          s"`==` was not properly specialized when inlined in `test$cls`\n" +
+          diffInstructions(instructions1, instructions2))
+    }
+  }
+
+  @Test def any_neq_specialization = {
+    val source = """class Test:
+                   |  inline def neql(x: Any, y: Any) = x != y
+                   |
+                   |  def testAny(x: Any, y: Any) = neql(x, y)
+                   |  def testAnyExpected(x: Any, y: Any) = x != y
+                   |
+                   |  def testBoolean(x: Boolean, y: Boolean) = neql(x, y)
+                   |  def testBooleanExpected(x: Boolean, y: Boolean) = x != y
+                   |
+                   |  def testByte(x: Byte, y: Byte) = neql(x, y)
+                   |  def testByteExpected(x: Byte, y: Byte) = x != y
+                   |
+                   |  def testShort(x: Short, y: Short) = neql(x, y)
+                   |  def testShortExpected(x: Short, y: Short) = x != y
+                   |
+                   |  def testInt(x: Int, y: Int) = neql(x, y)
+                   |  def testIntExpected(x: Int, y: Int) = x != y
+                   |
+                   |  def testLong(x: Long, y: Long) = neql(x, y)
+                   |  def testLongExpected(x: Long, y: Long) = x != y
+                   |
+                   |  def testFloat(x: Float, y: Float) = neql(x, y)
+                   |  def testFloatExpected(x: Float, y: Float) = x != y
+                   |
+                   |  def testDouble(x: Double, y: Double) = neql(x, y)
+                   |  def testDoubleExpected(x: Double, y: Double) = x != y
+                   |
+                   |  def testChar(x: Char, y: Char) = neql(x, y)
+                   |  def testCharExpected(x: Char, y: Char) = x != y
+                   |
+                   |  def testUnit(x: Unit, y: Unit) = neql(x, y)
+                   |  def testUnitExpected(x: Unit, y: Unit) = x != y
+                 """.stripMargin
+
+    checkBCode(source) { dir =>
+      val clsIn      = dir.lookupName("Test.class", directory = false).input
+      val clsNode    = loadClassNode(clsIn)
+
+      for cls <- List("Boolean", "Byte", "Short", "Int", "Long", "Float", "Double", "Char", "Unit") do
+        val meth1      = getMethod(clsNode, s"test$cls")
+        val meth2      = getMethod(clsNode, s"test${cls}Expected")
+
+        val instructions1 = instructionsFromMethod(meth1)
+        val instructions2 = instructionsFromMethod(meth2)
+
+        assert(instructions1 == instructions2,
+          s"`!=` was not properly specialized when inlined in `test$cls`\n" +
+          diffInstructions(instructions1, instructions2))
+    }
+  }
 }
diff --git a/compiler/test/dotty/tools/backend/jvm/StringConcatTest.scala b/compiler/test/dotty/tools/backend/jvm/StringConcatTest.scala
index f288a8d6ff33..613e72b32e52 100644
--- a/compiler/test/dotty/tools/backend/jvm/StringConcatTest.scala
+++ b/compiler/test/dotty/tools/backend/jvm/StringConcatTest.scala
@@ -61,7 +61,7 @@ class StringConcatTest extends DottyBytecodeTest {
       }
 
       assertEquals(List(
-        "()V",
+        "(I)V",
         "toString()Ljava/lang/String;",
         "append(Ljava/lang/String;)Ljava/lang/StringBuilder;",
         "append(Ljava/lang/Object;)Ljava/lang/StringBuilder;",
@@ -82,7 +82,7 @@ class StringConcatTest extends DottyBytecodeTest {
       )
 
       assertEquals(List(
-        "()V",
+        "(I)V",
         "toString()Ljava/lang/String;",
         "append(Ljava/lang/String;)Ljava/lang/StringBuilder;",
         "append(Ljava/lang/String;)Ljava/lang/StringBuilder;",
diff --git a/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala b/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala
index 3af7d5e676b3..9a6da578348c 100644
--- a/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala
+++ b/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala
@@ -29,6 +29,7 @@ class BootstrappedOnlyCompilationTests {
       compileFilesInDir("tests/pos-custom-args/semanticdb", defaultOptions.and("-Xsemanticdb")),
       compileDir("tests/pos-special/i7592", defaultOptions.and("-Yretain-trees")),
       compileDir("tests/pos-special/i11331.1", defaultOptions),
+      compileDir("tests/pos-custom-args/i13405", defaultOptions.and("-Xfatal-warnings")),
     ).checkCompile()
   }
 
@@ -118,6 +119,7 @@ class BootstrappedOnlyCompilationTests {
     aggregateTests(
       compileFilesInDir("tests/run-macros", defaultOptions.and("-Xcheck-macros")),
       compileFilesInDir("tests/run-custom-args/Yretain-trees", defaultOptions and "-Yretain-trees"),
+      compileFilesInDir("tests/run-custom-args/Yread-comments", defaultOptions and "-Yread-docs"),
       compileFilesInDir("tests/run-custom-args/run-macros-erased", defaultOptions.and("-language:experimental.erasedDefinitions").and("-Xcheck-macros")),
     )
   }.checkRuns()
diff --git a/compiler/test/dotty/tools/dotc/CompilationTests.scala b/compiler/test/dotty/tools/dotc/CompilationTests.scala
index db05ed611a0e..88eab8d131e6 100644
--- a/compiler/test/dotty/tools/dotc/CompilationTests.scala
+++ b/compiler/test/dotty/tools/dotc/CompilationTests.scala
@@ -37,11 +37,12 @@ class CompilationTests {
       compileFilesInDir("tests/pos-special/spec-t5545", defaultOptions),
       compileFilesInDir("tests/pos-special/strawman-collections", allowDeepSubtypes),
       compileFilesInDir("tests/pos-special/isInstanceOf", allowDeepSubtypes.and("-Xfatal-warnings")),
-      compileFilesInDir("tests/new", defaultOptions),
+      compileFilesInDir("tests/new", defaultOptions.and("-source", "3.1")), // just to see whether 3.1 works
       compileFilesInDir("tests/pos-scala2", scala2CompatMode),
       compileFilesInDir("tests/pos-custom-args/erased", defaultOptions.and("-language:experimental.erasedDefinitions")),
       compileFilesInDir("tests/pos", defaultOptions.and("-Ysafe-init")),
       compileFilesInDir("tests/pos-deep-subtype", allowDeepSubtypes),
+      compileFilesInDir("tests/pos-custom-args/no-experimental", defaultOptions.and("-Yno-experimental")),
       compileDir("tests/pos-special/java-param-names", defaultOptions.withJavacOnlyOptions("-parameters")),
       compileFile(
         // succeeds despite -Xfatal-warnings because of -nowarn
@@ -54,11 +55,16 @@ class CompilationTests {
       compileDir("tests/pos-special/adhoc-extension", defaultOptions.and("-source", "future", "-feature", "-Xfatal-warnings")),
       compileFile("tests/pos-special/i7575.scala", defaultOptions.andLanguageFeature("dynamics")),
       compileFile("tests/pos-special/kind-projector.scala", defaultOptions.and("-Ykind-projector")),
+      compileFile("tests/pos-special/kind-projector-underscores.scala", defaultOptions.and("-Ykind-projector:underscores")),
       compileFile("tests/run/i5606.scala", defaultOptions.and("-Yretain-trees")),
       compileFile("tests/pos-custom-args/i5498-postfixOps.scala", defaultOptions withoutLanguageFeature "postfixOps"),
       compileFile("tests/pos-custom-args/i8875.scala", defaultOptions.and("-Xprint:getters")),
       compileFile("tests/pos-custom-args/i9267.scala", defaultOptions.and("-Ystop-after:erasure")),
       compileFile("tests/pos-special/extend-java-enum.scala", defaultOptions.and("-source", "3.0-migration")),
+      compileFile("tests/pos-custom-args/help.scala", defaultOptions.and("-help", "-V", "-W", "-X", "-Y")),
+      compileFile("tests/pos-custom-args/i10383.scala", defaultOptions.and("-source", "future", "-deprecation", "-Xfatal-warnings")),
+      compileFile("tests/pos-custom-args/i13044.scala", defaultOptions.and("-Xmax-inlines:33")),
+      compileFile("tests/pos-custom-args/jdk-8-app.scala", defaultOptions.and("-release:8")),
     ).checkCompile()
   }
 
@@ -70,7 +76,8 @@ class CompilationTests {
       compileFile("tests/rewrites/rewrites3x.scala", defaultOptions.and("-rewrite", "-source", "future-migration")),
       compileFile("tests/rewrites/i8982.scala", defaultOptions.and("-indent", "-rewrite")),
       compileFile("tests/rewrites/i9632.scala", defaultOptions.and("-indent", "-rewrite")),
-      compileFile("tests/rewrites/i11895.scala", defaultOptions.and("-indent", "-rewrite"))
+      compileFile("tests/rewrites/i11895.scala", defaultOptions.and("-indent", "-rewrite")),
+      compileFile("tests/rewrites/i12340.scala", unindentOptions.and("-rewrite")),
     ).checkRewrites()
   }
 
@@ -126,12 +133,14 @@ class CompilationTests {
       compileFilesInDir("tests/neg-no-kind-polymorphism", defaultOptions and "-Yno-kind-polymorphism"),
       compileFilesInDir("tests/neg-custom-args/deprecation", defaultOptions.and("-Xfatal-warnings", "-deprecation")),
       compileFilesInDir("tests/neg-custom-args/fatal-warnings", defaultOptions.and("-Xfatal-warnings")),
+      compileFilesInDir("tests/neg-custom-args/nowarn", defaultOptions.and("-deprecation", "-Wunused:nowarn", "-Wconf:msg=@nowarn annotation does not suppress any warnings:e")),
       compileFilesInDir("tests/neg-custom-args/erased", defaultOptions.and("-language:experimental.erasedDefinitions")),
       compileFilesInDir("tests/neg-custom-args/allow-double-bindings", allowDoubleBindings),
       compileFilesInDir("tests/neg-custom-args/allow-deep-subtypes", allowDeepSubtypes),
       compileFilesInDir("tests/neg-custom-args/explicit-nulls", defaultOptions.and("-Yexplicit-nulls")),
       compileFilesInDir("tests/neg-custom-args/no-experimental", defaultOptions.and("-Yno-experimental")),
       compileDir("tests/neg-custom-args/impl-conv", defaultOptions.and("-Xfatal-warnings", "-feature")),
+      compileDir("tests/neg-custom-args/i13946", defaultOptions.and("-Xfatal-warnings", "-feature")),
       compileFile("tests/neg-custom-args/implicit-conversions.scala", defaultOptions.and("-Xfatal-warnings", "-feature")),
       compileFile("tests/neg-custom-args/implicit-conversions-old.scala", defaultOptions.and("-Xfatal-warnings", "-feature")),
       compileFile("tests/neg-custom-args/i3246.scala", scala2CompatMode),
@@ -146,6 +155,7 @@ class CompilationTests {
       compileFile("tests/neg-custom-args/i3882.scala", allowDeepSubtypes),
       compileFile("tests/neg-custom-args/i4372.scala", allowDeepSubtypes),
       compileFile("tests/neg-custom-args/i1754.scala", allowDeepSubtypes),
+      compileFile("tests/neg-custom-args/i12650.scala", allowDeepSubtypes),
       compileFile("tests/neg-custom-args/i9517.scala", defaultOptions.and("-Xprint-types")),
       compileFile("tests/neg-custom-args/i11637.scala", defaultOptions.and("-explain")),
       compileFile("tests/neg-custom-args/interop-polytypes.scala", allowDeepSubtypes.and("-Yexplicit-nulls")),
@@ -166,12 +176,17 @@ class CompilationTests {
       compileDir("tests/neg-custom-args/adhoc-extension", defaultOptions.and("-source", "future", "-feature", "-Xfatal-warnings")),
       compileFile("tests/neg/i7575.scala", defaultOptions.withoutLanguageFeatures.and("-language:_")),
       compileFile("tests/neg-custom-args/kind-projector.scala", defaultOptions.and("-Ykind-projector")),
+      compileFile("tests/neg-custom-args/kind-projector-underscores.scala", defaultOptions.and("-Ykind-projector:underscores")),
       compileFile("tests/neg-custom-args/typeclass-derivation2.scala", defaultOptions.and("-language:experimental.erasedDefinitions")),
       compileFile("tests/neg-custom-args/i5498-postfixOps.scala", defaultOptions withoutLanguageFeature "postfixOps"),
       compileFile("tests/neg-custom-args/deptypes.scala", defaultOptions.and("-language:experimental.dependent")),
       compileFile("tests/neg-custom-args/matchable.scala", defaultOptions.and("-Xfatal-warnings", "-source", "future")),
       compileFile("tests/neg-custom-args/i7314.scala", defaultOptions.and("-Xfatal-warnings", "-source", "future")),
       compileFile("tests/neg-custom-args/feature-shadowing.scala", defaultOptions.and("-Xfatal-warnings", "-feature")),
+      compileDir("tests/neg-custom-args/hidden-type-errors", defaultOptions.and("-explain")),
+      compileFile("tests/neg-custom-args/i13026.scala", defaultOptions.and("-print-lines")),
+      compileFile("tests/neg-custom-args/i13838.scala", defaultOptions.and("-Ximplicit-search-limit", "1000")),
+      compileFile("tests/neg-custom-args/jdk-9-app.scala", defaultOptions.and("-release:8")),
     ).checkExpectedErrors()
   }
 
@@ -191,6 +206,9 @@ class CompilationTests {
       compileFile("tests/run-custom-args/no-useless-forwarders.scala", defaultOptions and "-Xmixin-force-forwarders:false"),
       compileFile("tests/run-custom-args/defaults-serizaliable-no-forwarders.scala", defaultOptions and "-Xmixin-force-forwarders:false"),
       compileFilesInDir("tests/run-custom-args/erased", defaultOptions.and("-language:experimental.erasedDefinitions")),
+      compileFilesInDir("tests/run-custom-args/fatal-warnings", defaultOptions.and("-Xfatal-warnings")),
+      compileDir("tests/run-custom-args/Xmacro-settings/simple", defaultOptions.and("-Xmacro-settings:one,two,three")),
+      compileDir("tests/run-custom-args/Xmacro-settings/compileTimeEnv", defaultOptions.and("-Xmacro-settings:a,b=1,c.b.a=x.y.z=1,myLogger.level=INFO")),
       compileFilesInDir("tests/run-deep-subtype", allowDeepSubtypes),
       compileFilesInDir("tests/run", defaultOptions.and("-Ysafe-init"))
     ).checkRuns()
@@ -214,84 +232,6 @@ class CompilationTests {
     ).checkCompile()
   }
 
-  /** The purpose of this test is three-fold, being able to compile dotty
-   *  bootstrapped, and making sure that TASTY can link against a compiled
-   *  version of Dotty, and compiling the compiler using the SemanticDB generation
-   */
-  @Test def tastyBootstrap: Unit = {
-    implicit val testGroup: TestGroup = TestGroup("tastyBootstrap/tests")
-    val libGroup = TestGroup("tastyBootstrap/lib")
-    val tastyCoreGroup = TestGroup("tastyBootstrap/tastyCore")
-    val dotty1Group = TestGroup("tastyBootstrap/dotty1")
-    val dotty2Group = TestGroup("tastyBootstrap/dotty2")
-
-    // Make sure that the directory is clean
-    dotty.tools.io.Directory(defaultOutputDir + "tastyBootstrap").deleteRecursively()
-
-    val opt = TestFlags(
-      List(
-        // compile with bootstrapped library on cp:
-        defaultOutputDir + libGroup + "/lib/",
-        // and bootstrapped tasty-core:
-        defaultOutputDir + tastyCoreGroup + "/tastyCore/",
-        // as well as bootstrapped compiler:
-        defaultOutputDir + dotty1Group + "/dotty1/",
-        // and the other compiler dependencies:
-        Properties.compilerInterface, Properties.scalaLibrary, Properties.scalaAsm,
-        Properties.dottyInterfaces, Properties.jlineTerminal, Properties.jlineReader,
-      ).mkString(File.pathSeparator),
-      Array("-Ycheck-reentrant", "-language:postfixOps", "-Xsemanticdb", "-Yno-experimental")
-    )
-
-    val libraryDirs = List(Paths.get("library/src"), Paths.get("library/src-bootstrapped"))
-    val librarySources = libraryDirs.flatMap(sources(_))
-
-    val lib =
-      compileList("lib", librarySources,
-        defaultOptions.and("-Ycheck-reentrant",
-          "-language:experimental.erasedDefinitions", // support declaration of scala.compiletime.erasedValue
-          //  "-source", "future",  // TODO: re-enable once we allow : @unchecked in pattern definitions. Right now, lots of narrowing pattern definitions fail.
-          ))(libGroup)
-
-    val tastyCoreSources = sources(Paths.get("tasty/src"))
-    val tastyCore = compileList("tastyCore", tastyCoreSources, opt)(tastyCoreGroup)
-
-    val compilerSources = sources(Paths.get("compiler/src")) ++ sources(Paths.get("compiler/src-bootstrapped"))
-    val compilerManagedSources = sources(Properties.dottyCompilerManagedSources)
-
-    val dotty1 = compileList("dotty1", compilerSources ++ compilerManagedSources, opt)(dotty1Group)
-    val dotty2 = compileList("dotty2", compilerSources ++ compilerManagedSources, opt)(dotty2Group)
-
-    val tests = {
-      lib.keepOutput :: tastyCore.keepOutput :: dotty1.keepOutput :: aggregateTests(
-        dotty2,
-        compileShallowFilesInDir("compiler/src/dotty/tools", opt),
-        compileShallowFilesInDir("compiler/src/dotty/tools/dotc", opt),
-        compileShallowFilesInDir("compiler/src/dotty/tools/dotc/ast", opt),
-        compileShallowFilesInDir("compiler/src/dotty/tools/dotc/config", opt),
-        compileShallowFilesInDir("compiler/src/dotty/tools/dotc/parsing", opt),
-        compileShallowFilesInDir("compiler/src/dotty/tools/dotc/printing", opt),
-        compileShallowFilesInDir("compiler/src/dotty/tools/dotc/reporting", opt),
-        compileShallowFilesInDir("compiler/src/dotty/tools/dotc/rewrites", opt),
-        compileShallowFilesInDir("compiler/src/dotty/tools/dotc/transform", opt),
-        compileShallowFilesInDir("compiler/src/dotty/tools/dotc/typer", opt),
-        compileShallowFilesInDir("compiler/src/dotty/tools/dotc/util", opt),
-        compileShallowFilesInDir("compiler/src/dotty/tools/backend", opt),
-        compileShallowFilesInDir("compiler/src/dotty/tools/backend/jvm", opt),
-        compileList("managed-sources", compilerManagedSources, opt)
-      ).keepOutput :: Nil
-    }.map(_.checkCompile())
-
-    def assertExists(path: String) = assertTrue(Files.exists(Paths.get(path)))
-    assertExists(s"out/$libGroup/lib/")
-    assertExists(s"out/$tastyCoreGroup/tastyCore/")
-    assertExists(s"out/$dotty1Group/dotty1/")
-    assertExists(s"out/$dotty2Group/dotty2/")
-    compileList("idempotency", List("tests/idempotency/BootstrapChecker.scala", "tests/idempotency/IdempotencyCheck.scala"), defaultOptions).checkRuns()
-
-    tests.foreach(_.delete())
-  }
-
   // Explicit nulls tests
   @Test def explicitNullsNeg: Unit = {
     implicit val testGroup: TestGroup = TestGroup("explicitNullsNeg")
@@ -323,6 +263,27 @@ class CompilationTests {
     compileFilesInDir("tests/init/neg", options).checkExpectedErrors()
     compileFilesInDir("tests/init/pos", options).checkCompile()
     compileFilesInDir("tests/init/crash", options.without("-Xfatal-warnings")).checkCompile()
+
+    // The regression test for i12128 has some atypical classpath requirements.
+    // The test consists of three files: (a) Reflect_1  (b) Macro_2  (c) Test_3
+    // which must be compiled separately. In addition:
+    //   - the output from (a) must be on the classpath while compiling (b)
+    //   - the output from (b) must be on the classpath while compiling (c)
+    //   - the output from (a) _must not_ be on the classpath while compiling (c)
+    locally {
+      val i12128Group = TestGroup("checkInit/i12128")
+      val i12128Options = options.without("-Xfatal-warnings")
+      val outDir1 = defaultOutputDir + i12128Group + "/Reflect_1/i12128/Reflect_1"
+      val outDir2 = defaultOutputDir + i12128Group + "/Macro_2/i12128/Macro_2"
+
+      val tests = List(
+        compileFile("tests/init/special/i12128/Reflect_1.scala", i12128Options)(i12128Group),
+        compileFile("tests/init/special/i12128/Macro_2.scala", i12128Options.withClasspath(outDir1))(i12128Group),
+        compileFile("tests/init/special/i12128/Test_3.scala", options.withClasspath(outDir2))(i12128Group)
+      ).map(_.keepOutput.checkCompile())
+
+      tests.foreach(_.delete())
+    }
   }
 }
 
diff --git a/compiler/test/dotty/tools/dotc/IdempotencyTests.scala b/compiler/test/dotty/tools/dotc/IdempotencyTests.scala
index 980cfee4634a..38f8c02ee375 100644
--- a/compiler/test/dotty/tools/dotc/IdempotencyTests.scala
+++ b/compiler/test/dotty/tools/dotc/IdempotencyTests.scala
@@ -18,9 +18,8 @@ class IdempotencyTests {
   import IdempotencyTests._
   import CompilationTest.aggregateTests
 
-  val filter = FileFilter.exclude(
-    s"pos${JFile.separator}i6507b"
-  )
+  // ignore flaky tests
+  val filter = FileFilter.NoFilter
 
   @Category(Array(classOf[SlowTests]))
   @Test def idempotency: Unit = {
diff --git a/compiler/test/dotty/tools/dotc/SettingsTests.scala b/compiler/test/dotty/tools/dotc/SettingsTests.scala
index 5303fb070c7a..4625f4f34f72 100644
--- a/compiler/test/dotty/tools/dotc/SettingsTests.scala
+++ b/compiler/test/dotty/tools/dotc/SettingsTests.scala
@@ -21,7 +21,7 @@ class SettingsTests {
     assertEquals(1, reporter.errorCount)
     assertEquals("'not_here' does not exist or is not a directory or .jar file", reporter.allErrors.head.message)
 
-  @Test def jarOutput: Unit = {
+  @Test def jarOutput: Unit =
     val source = "tests/pos/Foo.scala"
     val out = Paths.get("out/jaredFoo.jar").normalize
     if (Files.exists(out)) Files.delete(out)
@@ -29,7 +29,6 @@ class SettingsTests {
     val reporter = Main.process(options)
     assertEquals(0, reporter.errorCount)
     assertTrue(Files.exists(out))
-  }
 
   @Test def `t8124 Don't crash on missing argument`: Unit =
     val source    = Paths.get("tests/pos/Foo.scala").normalize
@@ -45,15 +44,70 @@ class SettingsTests {
       val foo = StringSetting("-foo", "foo", "Foo", "a")
       val bar = IntSetting("-bar", "Bar", 0)
 
-    inContext {
-      val args = List("-foo", "b", "-bar", "1")
-      val summary = Settings.processArguments(args, true)
-      assertTrue(summary.errors.isEmpty)
-      given SettingsState = summary.sstate
+    val args = List("-foo", "b", "-bar", "1")
+    val summary = Settings.processArguments(args, true)
+    assertTrue(summary.errors.isEmpty)
+    withProcessedArgs(summary) {
       assertEquals("b", Settings.foo.value)
       assertEquals(1, Settings.bar.value)
     }
 
+  @Test def `workaround dont crash on many files`: Unit =
+    object Settings extends SettingGroup
+
+    val args = "--" :: List.fill(6000)("file.scala")
+    val summary = Settings.processArguments(args, processAll = true)
+    assertTrue(summary.errors.isEmpty)
+    assertEquals(6000, summary.arguments.size)
+
+  @Test def `dont crash on many files`: Unit =
+    object Settings extends SettingGroup
+
+    val args = List.fill(6000)("file.scala")
+    val summary = Settings.processArguments(args, processAll = true)
+    assertTrue(summary.errors.isEmpty)
+    assertEquals(6000, summary.arguments.size)
+
+  @Test def `dont crash on many options`: Unit =
+    object Settings extends SettingGroup:
+      val option = BooleanSetting("-option", "Some option")
+
+    val limit = 6000
+    val args = List.fill(limit)("-option")
+    val summary = Settings.processArguments(args, processAll = true)
+    assertTrue(summary.errors.isEmpty)
+    assertEquals(limit-1, summary.warnings.size)
+    assertTrue(summary.warnings.head.contains("repeatedly"))
+    assertEquals(0, summary.arguments.size)
+    withProcessedArgs(summary) {
+      assertTrue(Settings.option.value)
+    }
+
+  @Test def `bad option warning consumes an arg`: Unit =
+    object Settings extends SettingGroup:
+      val option = BooleanSetting("-option", "Some option")
+
+    val args = List("-adoption", "dogs", "cats")
+    val summary = Settings.processArguments(args, processAll = true)
+    assertTrue(summary.errors.isEmpty)
+    assertFalse(summary.warnings.isEmpty)
+    assertEquals(2, summary.arguments.size)
+
+  @Test def `bad option settings throws`: Unit =
+    object Settings extends SettingGroup:
+      val option = BooleanSetting("-option", "Some option")
+
+    def checkMessage(s: String): (Throwable => Boolean) = t =>
+      if t.getMessage == s then true
+      else
+        println(s"Expected: $s, Actual: ${t.getMessage}")
+        false
+
+    val default = Settings.defaultState
+    assertThrows[IllegalArgumentException](checkMessage("found: not an option of type java.lang.String, required: Boolean")) {
+      Settings.option.updateIn(default, "not an option")
+    }
+
   @Test def validateChoices: Unit =
     object Settings extends SettingGroup:
       val foo = ChoiceSetting("-foo", "foo", "Foo", List("a", "b"), "a")
@@ -63,25 +117,27 @@ class SettingsTests {
       val quux = ChoiceSetting("-quux", "quux", "Quux", List(), "")
       val quuz = IntChoiceSetting("-quuz", "Quuz", List(), 0)
 
-    inContext {
+    locally {
       val args = List("-foo", "b", "-bar", "1", "-baz", "5")
       val summary = Settings.processArguments(args, true)
       assertTrue(summary.errors.isEmpty)
-      given SettingsState = summary.sstate
-      assertEquals("b", Settings.foo.value)
-      assertEquals(1, Settings.bar.value)
-      assertEquals(5, Settings.baz.value)
+      withProcessedArgs(summary) {
+        assertEquals("b", Settings.foo.value)
+        assertEquals(1, Settings.bar.value)
+        assertEquals(5, Settings.baz.value)
+      }
     }
 
-    inContext {
+    locally {
       val args = List("-foo:b")
       val summary = Settings.processArguments(args, true)
       assertTrue(summary.errors.isEmpty)
-      given SettingsState = summary.sstate
-      assertEquals("b", Settings.foo.value)
+      withProcessedArgs(summary) {
+        assertEquals("b", Settings.foo.value)
+      }
     }
 
-    inContext {
+    locally {
       val args = List("-foo", "c", "-bar", "3", "-baz", "-1")
       val summary = Settings.processArguments(args, true)
       val expectedErrors = List(
@@ -92,14 +148,14 @@ class SettingsTests {
       assertEquals(expectedErrors, summary.errors)
     }
 
-    inContext {
+    locally {
       val args = List("-foo:c")
       val summary = Settings.processArguments(args, true)
       val expectedErrors = List("c is not a valid choice for -foo")
       assertEquals(expectedErrors, summary.errors)
     }
 
-    inContext {
+    locally {
       val args = List("-quux", "a", "-quuz", "0")
       val summary = Settings.processArguments(args, true)
       val expectedErrors = List(
@@ -109,7 +165,19 @@ class SettingsTests {
       assertEquals(expectedErrors, summary.errors)
     }
 
-  private def inContext(f: Context ?=> Unit) = f(using (new ContextBase).initialCtx.fresh)
+  @Test def `Allow IntSetting's to be set with a colon`: Unit =
+    object Settings extends SettingGroup:
+      val foo = IntSetting("-foo", "foo", 80)
+    import Settings._
+
+    val args = List("-foo:100")
+    val summary = processArguments(args, processAll = true)
+    assertTrue(s"Setting args errors:\n  ${summary.errors.take(5).mkString("\n  ")}", summary.errors.isEmpty)
+    withProcessedArgs(summary) {
+      assertEquals(100, foo.value)
+    }
+
+  private def withProcessedArgs(summary: ArgsSummary)(f: SettingsState ?=> Unit) = f(using summary.sstate)
 
   extension [T](setting: Setting[T])
     private def value(using ss: SettingsState): T = setting.valueIn(ss)
diff --git a/compiler/test/dotty/tools/dotc/TastyBootstrapTests.scala b/compiler/test/dotty/tools/dotc/TastyBootstrapTests.scala
new file mode 100644
index 000000000000..7b1629c426d6
--- /dev/null
+++ b/compiler/test/dotty/tools/dotc/TastyBootstrapTests.scala
@@ -0,0 +1,121 @@
+package dotty
+package tools
+package dotc
+
+import org.junit.{ Test, BeforeClass, AfterClass }
+import org.junit.Assert._
+import org.junit.Assume._
+import org.junit.experimental.categories.Category
+
+import java.io.File
+import java.nio.file._
+import java.util.stream.{ Stream => JStream }
+import scala.collection.JavaConverters._
+import scala.util.matching.Regex
+import scala.concurrent.duration._
+import TestSources.sources
+import vulpix._
+
+class TastyBootstrapTests {
+  import ParallelTesting._
+  import TestConfiguration._
+  import CompilationTests._
+  import CompilationTest.aggregateTests
+
+  /** The purpose of this test is three-fold, being able to compile dotty
+   *  bootstrapped, and making sure that TASTY can link against a compiled
+   *  version of Dotty, and compiling the compiler using the SemanticDB generation
+   */
+  @Test def tastyBootstrap: Unit = {
+    implicit val testGroup: TestGroup = TestGroup("tastyBootstrap/tests")
+    val libGroup = TestGroup("tastyBootstrap/lib")
+    val tastyCoreGroup = TestGroup("tastyBootstrap/tastyCore")
+    val dotty1Group = TestGroup("tastyBootstrap/dotty1")
+    val dotty2Group = TestGroup("tastyBootstrap/dotty2")
+
+    // Make sure that the directory is clean
+    dotty.tools.io.Directory(defaultOutputDir + "tastyBootstrap").deleteRecursively()
+
+    val opt = TestFlags(
+      List(
+        // compile with bootstrapped library on cp:
+        defaultOutputDir + libGroup + "/lib/",
+        // and bootstrapped tasty-core:
+        defaultOutputDir + tastyCoreGroup + "/tastyCore/",
+        // as well as bootstrapped compiler:
+        defaultOutputDir + dotty1Group + "/dotty1/",
+        // and the other compiler dependencies:
+        Properties.compilerInterface, Properties.scalaLibrary, Properties.scalaAsm,
+        Properties.dottyInterfaces, Properties.jlineTerminal, Properties.jlineReader,
+      ).mkString(File.pathSeparator),
+      Array("-Ycheck-reentrant", "-language:postfixOps", "-Xsemanticdb")
+    )
+
+    val libraryDirs = List(Paths.get("library/src"), Paths.get("library/src-bootstrapped"))
+    val librarySources = libraryDirs.flatMap(sources(_))
+
+    val lib =
+      compileList("lib", librarySources,
+        defaultOptions.and("-Ycheck-reentrant",
+          "-language:experimental.erasedDefinitions", // support declaration of scala.compiletime.erasedValue
+          //  "-source", "future",  // TODO: re-enable once we allow : @unchecked in pattern definitions. Right now, lots of narrowing pattern definitions fail.
+          ))(libGroup)
+
+    val tastyCoreSources = sources(Paths.get("tasty/src"))
+    val tastyCore = compileList("tastyCore", tastyCoreSources, opt)(tastyCoreGroup)
+
+    val compilerSources = sources(Paths.get("compiler/src")) ++ sources(Paths.get("compiler/src-bootstrapped"))
+    val compilerManagedSources = Properties.dottyCompilerManagedSources match
+      case p if Files.isDirectory(p) => sources(p)
+      case _                         => Nil
+
+    val dotty1 = compileList("dotty1", compilerSources ++ compilerManagedSources, opt)(dotty1Group)
+    val dotty2 = compileList("dotty2", compilerSources ++ compilerManagedSources, opt)(dotty2Group)
+
+    val tests = {
+      lib.keepOutput :: tastyCore.keepOutput :: dotty1.keepOutput :: aggregateTests(
+        dotty2,
+        compileShallowFilesInDir("compiler/src/dotty/tools", opt),
+        compileShallowFilesInDir("compiler/src/dotty/tools/dotc", opt),
+        compileShallowFilesInDir("compiler/src/dotty/tools/dotc/ast", opt),
+        compileShallowFilesInDir("compiler/src/dotty/tools/dotc/config", opt),
+        compileShallowFilesInDir("compiler/src/dotty/tools/dotc/parsing", opt),
+        compileShallowFilesInDir("compiler/src/dotty/tools/dotc/printing", opt),
+        compileShallowFilesInDir("compiler/src/dotty/tools/dotc/reporting", opt),
+        compileShallowFilesInDir("compiler/src/dotty/tools/dotc/rewrites", opt),
+        compileShallowFilesInDir("compiler/src/dotty/tools/dotc/transform", opt),
+        compileShallowFilesInDir("compiler/src/dotty/tools/dotc/typer", opt),
+        compileShallowFilesInDir("compiler/src/dotty/tools/dotc/util", opt),
+        compileShallowFilesInDir("compiler/src/dotty/tools/backend", opt),
+        compileShallowFilesInDir("compiler/src/dotty/tools/backend/jvm", opt),
+        compileList("managed-sources", compilerManagedSources, opt)
+      ).keepOutput :: Nil
+    }.map(_.checkCompile())
+
+    def assertExists(path: String) = assertTrue(Files.exists(Paths.get(path)))
+    assertExists(s"out/$libGroup/lib/")
+    assertExists(s"out/$tastyCoreGroup/tastyCore/")
+    assertExists(s"out/$dotty1Group/dotty1/")
+    assertExists(s"out/$dotty2Group/dotty2/")
+    compileList("idempotency", List("tests/idempotency/BootstrapChecker.scala", "tests/idempotency/IdempotencyCheck.scala"), defaultOptions).checkRuns()
+
+    tests.foreach(_.delete())
+  }
+}
+
+object TastyBootstrapTests extends ParallelTesting {
+  // Test suite configuration --------------------------------------------------
+
+  def maxDuration = 45.seconds
+  def numberOfSlaves = Runtime.getRuntime.availableProcessors()
+  def safeMode = Properties.testsSafeMode
+  def isInteractive = SummaryReport.isInteractive
+  def testFilter = Properties.testsFilter
+  def updateCheckFiles: Boolean = Properties.testsUpdateCheckfile
+
+  implicit val summaryReport: SummaryReporting = new SummaryReport
+  @AfterClass def tearDown(): Unit = {
+    super.cleanup()
+    summaryReport.echoSummary()
+  }
+}
diff --git a/compiler/test/dotty/tools/dotc/classpath/MultiReleaseJarTest.scala b/compiler/test/dotty/tools/dotc/classpath/MultiReleaseJarTest.scala
index b79a34e1f2ab..79aeb87aed1d 100644
--- a/compiler/test/dotty/tools/dotc/classpath/MultiReleaseJarTest.scala
+++ b/compiler/test/dotty/tools/dotc/classpath/MultiReleaseJarTest.scala
@@ -2,18 +2,19 @@ package dotty.tools.dotc.classpath
 
 import dotty.tools.dotc.core.Contexts.Context
 
-import java.io.ByteArrayOutputStream
+import java.io.{ByteArrayOutputStream, IOException}
 import java.nio.file.{FileSystems, Files, Path}
 import java.util.jar.Attributes
 import java.util.jar.Attributes.Name
 
-import org.junit.Test
 import org.junit.Assert._
+import org.junit.Test
 
-import scala.util.Properties
 import scala.collection.JavaConverters._
+import scala.util.Properties
 
 class MultiReleaseJarTest extends dotty.tools.backend.jvm.DottyBytecodeTest {
+
   @Test
   def mrJar(): Unit = {
     if (!Properties.isJavaAtLeast("9")) { println("skipping mrJar() on old JDK"); return }
@@ -61,7 +62,10 @@ class MultiReleaseJarTest extends dotty.tools.backend.jvm.DottyBytecodeTest {
       if Properties.isJavaAtLeast("10") then
         assertEquals(Set("foo1", "foo2", "bar1", "bar2"), apiMethods(jar3, "10"))
     } finally
-      List(jar1, jar2, jar3).foreach(Files.deleteIfExists)
+      List(jar1, jar2, jar3).forall(path =>
+        try Files.deleteIfExists(path)
+        catch case _: IOException => false
+      )
   }
 
   @Test
@@ -82,7 +86,6 @@ class MultiReleaseJarTest extends dotty.tools.backend.jvm.DottyBytecodeTest {
     assertTrue(classExists("java.lang.invoke.LambdaMetafactory", "9"))
   }
 
-
   private def createManifest = {
     val manifest = new java.util.jar.Manifest()
     manifest.getMainAttributes.put(Name.MANIFEST_VERSION, "1.0")
@@ -92,6 +95,7 @@ class MultiReleaseJarTest extends dotty.tools.backend.jvm.DottyBytecodeTest {
     val manifestBytes = os.toByteArray
     manifestBytes
   }
+
   private def createZip(zipLocation: Path, content: List[(String, Array[Byte])]): Unit = {
     val env = new java.util.HashMap[String, String]()
     Files.deleteIfExists(zipLocation)
@@ -113,4 +117,5 @@ class MultiReleaseJarTest extends dotty.tools.backend.jvm.DottyBytecodeTest {
       zipfs.close()
     }
   }
+
 }
diff --git a/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala b/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala
index a18f96dfd35b..05f218059f02 100644
--- a/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala
+++ b/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala
@@ -64,4 +64,22 @@ class ScalaSettingsTests:
     assertTrue("Has the feature", set.contains("implicitConversions"))
     assertTrue("Has the feature", set.contains("dynamics"))
 
+  @Test def `WConf setting is parsed`: Unit =
+    import reporting.{Action, Diagnostic, NoExplanation}
+    val sets = new ScalaSettings
+    val args = List("-Wconf:cat=deprecation:s,cat=feature:e", "-Wconf:msg=a problem\\.:s")
+    val sumy = ArgsSummary(sets.defaultState, args, errors = Nil, warnings = Nil)
+    val proc = sets.processArguments(sumy, processAll = true, skipped = Nil)
+    val conf = sets.Wconf.valueIn(proc.sstate)
+    val sut  = reporting.WConf.fromSettings(conf).getOrElse(???)
+    val msg  = NoExplanation("There was a problem!")
+    val depr = new Diagnostic.DeprecationWarning(msg, util.NoSourcePosition)
+    assertEquals(Action.Silent, sut.action(depr))
+    val feat = new Diagnostic.FeatureWarning(msg, util.NoSourcePosition)
+    assertEquals(Action.Error, sut.action(feat))
+    val warn = new Diagnostic.Warning(msg, util.NoSourcePosition)
+    assertEquals(Action.Warning, sut.action(warn))
+    val nowr = new Diagnostic.Warning(NoExplanation("This is a problem."), util.NoSourcePosition)
+    assertEquals(Action.Silent, sut.action(nowr))
+
 end ScalaSettingsTests
diff --git a/compiler/test/dotty/tools/dotc/core/ConstraintsTest.scala b/compiler/test/dotty/tools/dotc/core/ConstraintsTest.scala
new file mode 100644
index 000000000000..5ab162b9f05c
--- /dev/null
+++ b/compiler/test/dotty/tools/dotc/core/ConstraintsTest.scala
@@ -0,0 +1,55 @@
+package dotty.tools
+package dotc.core
+
+import vulpix.TestConfiguration
+
+import dotty.tools.dotc.core.Contexts.{*, given}
+import dotty.tools.dotc.core.Decorators.{*, given}
+import dotty.tools.dotc.core.Symbols.*
+import dotty.tools.dotc.core.Types.*
+import dotty.tools.dotc.ast.tpd.*
+import dotty.tools.dotc.typer.ProtoTypes.constrained
+
+import org.junit.Test
+
+import dotty.tools.DottyTest
+
+class ConstraintsTest:
+
+  @Test def mergeParamsTransitivity: Unit =
+    inCompilerContext(TestConfiguration.basicClasspath,
+        scalaSources = "trait A { def foo[S, T, R]: Any  }") {
+      val tvars = constrained(requiredClass("A").typeRef.select("foo".toTermName).info.asInstanceOf[TypeLambda], EmptyTree, alwaysAddTypeVars = true)._2
+      val List(s, t, r) = tvars.tpes
+
+      val innerCtx = ctx.fresh.setExploreTyperState()
+      inContext(innerCtx) {
+        s <:< t
+      }
+
+      t <:< r
+
+      ctx.typerState.mergeConstraintWith(innerCtx.typerState)
+      assert(s frozen_<:< r,
+        i"Merging constraints `?S <: ?T` and `?T <: ?R` should result in `?S <:< ?R`: ${ctx.typerState.constraint}")
+    }
+  end mergeParamsTransitivity
+
+  @Test def mergeBoundsTransitivity: Unit =
+    inCompilerContext(TestConfiguration.basicClasspath,
+        scalaSources = "trait A { def foo[S, T]: Any  }") {
+      val tvars = constrained(requiredClass("A").typeRef.select("foo".toTermName).info.asInstanceOf[TypeLambda], EmptyTree, alwaysAddTypeVars = true)._2
+      val List(s, t) = tvars.tpes
+
+      val innerCtx = ctx.fresh.setExploreTyperState()
+      inContext(innerCtx) {
+        s <:< t
+      }
+
+      defn.IntType <:< s
+
+      ctx.typerState.mergeConstraintWith(innerCtx.typerState)
+      assert(defn.IntType frozen_<:< t,
+        i"Merging constraints `?S <: ?T` and `Int <: ?S` should result in `Int <:< ?T`: ${ctx.typerState.constraint}")
+    }
+  end mergeBoundsTransitivity
diff --git a/compiler/test/dotty/tools/dotc/core/SealedDescendantsTest.scala b/compiler/test/dotty/tools/dotc/core/SealedDescendantsTest.scala
new file mode 100644
index 000000000000..7d90d0ed8870
--- /dev/null
+++ b/compiler/test/dotty/tools/dotc/core/SealedDescendantsTest.scala
@@ -0,0 +1,108 @@
+package dotty.tools.dotc.core
+
+import dotty.tools.dotc.core.Contexts.{Context, ctx}
+import dotty.tools.dotc.core.Symbols.*
+
+import org.junit.Assert._
+import org.junit.Test
+
+import dotty.tools.DottyTest
+
+class SealedDescendantsTest extends DottyTest {
+
+  @Test
+  def zincIssue979: Unit =
+    val source =
+      """
+      sealed trait Z
+      sealed trait A extends Z
+      class B extends A
+      class C extends A
+      class D extends A
+      """
+
+    expectedDescendents(source, "Z",
+      "Z" ::
+      "A" ::
+      "B" ::
+      "C" ::
+      "D" :: Nil
+    )
+  end zincIssue979
+
+  @Test
+  def enumOpt: Unit =
+    val source =
+      """
+      enum Opt[+T] {
+        case Some(t: T)
+        case None
+      }
+      """
+
+    expectedDescendents(source, "Opt",
+      "Opt"       ::
+      "Some"      ::
+      "None.type" :: Nil
+    )
+  end enumOpt
+
+  @Test
+  def hierarchicalSharedChildren: Unit =
+    // Q is a child of both Z and A and should appear once
+    // X is a child of both A and Q and should appear once
+    val source =
+      """
+      sealed trait Z
+      sealed trait A extends Z
+      sealed trait Q extends A with Z
+      trait X extends A with Q
+      case object Y extends Q
+      """
+
+    expectedDescendents(source, "Z",
+      "Z"      ::
+      "A"      ::
+      "Q"      ::
+      "X"      ::
+      "Y.type" :: Nil
+    )
+  end hierarchicalSharedChildren
+
+  @Test
+  def hierarchicalSharedChildrenB: Unit =
+    val source =
+      """
+      sealed trait Z
+      case object A extends Z with D with E
+      sealed trait B extends Z
+      trait C extends B
+      sealed trait D extends B
+      sealed trait E extends D
+      """
+
+    expectedDescendents(source, "Z",
+      "Z"      ::
+      "A.type" ::
+      "B"      ::
+      "C"      ::
+      "D"      ::
+      "E"      :: Nil
+    )
+  end hierarchicalSharedChildrenB
+
+  def expectedDescendents(source: String, root: String, expected: List[String]) =
+    exploreRoot(source, root) { rootCls =>
+      val descendents = rootCls.sealedDescendants.map(sym => s"${sym.name}${if (sym.isTerm) ".type" else ""}")
+      assertEquals(expected.toString, descendents.toString)
+    }
+
+  def exploreRoot(source: String, root: String)(op: Context ?=> ClassSymbol => Unit) =
+    val source0 = source.linesIterator.map(_.trim).mkString("\n|")
+    val source1 = s"""package testsealeddescendants
+                     |$source0""".stripMargin
+    checkCompile("typer", source1) { (_, context) =>
+      given Context = context
+      op(requiredClass(s"testsealeddescendants.$root"))
+    }
+}
diff --git a/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala b/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala
index 361d5a00b880..064eedd6dceb 100644
--- a/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala
+++ b/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala
@@ -105,12 +105,16 @@ class CommentPicklingTest {
   }
 
   private class UnpicklingDriver extends Driver {
-    override def initCtx = super.initCtx.addMode(Mode.ReadComments)
+    override def initCtx =
+      val ctx = super.initCtx.fresh
+      ctx.setSetting(ctx.settings.YreadComments, true)
+      ctx
+
     def unpickle[T](args: Array[String], files: List[File])(fn: (List[tpd.Tree], Context) => T): T = {
       implicit val ctx: Context = setup(args, initCtx).map(_._2).getOrElse(initCtx)
       ctx.initialize()
       val trees = files.flatMap { f =>
-        val unpickler = new DottyUnpickler(f.toByteArray())
+        val unpickler = new DottyUnpickler(f.toByteArray(), ctx.tastyVersion)
         unpickler.enter(roots = Set.empty)
         unpickler.rootTrees(using ctx)
       }
diff --git a/compiler/test/dotty/tools/dotc/core/tasty/PathPicklingTest.scala b/compiler/test/dotty/tools/dotc/core/tasty/PathPicklingTest.scala
index 2a44c681e8f1..927cbe838e6e 100644
--- a/compiler/test/dotty/tools/dotc/core/tasty/PathPicklingTest.scala
+++ b/compiler/test/dotty/tools/dotc/core/tasty/PathPicklingTest.scala
@@ -1,7 +1,7 @@
 package dotty.tools.dotc.core.tasty
 
 import java.io.{File => JFile, ByteArrayOutputStream, IOException}
-import java.nio.file.{Files, NoSuchFileException, Path, Paths}
+import java.nio.file.{Files, NoSuchFileException, Paths}
 
 import scala.sys.process._
 
@@ -19,7 +19,7 @@ import dotty.tools.dotc.core.Mode
 import dotty.tools.dotc.core.Names.Name
 import dotty.tools.dotc.interfaces.Diagnostic.ERROR
 import dotty.tools.dotc.reporting.TestReporter
-import dotty.tools.io.{Directory, File, Path}
+import dotty.tools.io.{Directory, File, Path, JarArchive}
 
 import dotty.tools.vulpix.TestConfiguration
 
@@ -42,28 +42,24 @@ class PathPicklingTest {
       assertFalse("Compilation failed.", rep.hasErrors)
     }
 
-    val decompiled =
-      val outstream = new ByteArrayOutputStream()
-      val options = TestConfiguration.defaultOptions
-        .and("-print-tasty")
-        .and("-color:never")
-        .and(s"$out/out.jar")
-      val reporter = TestReporter.reporter(System.out, logLevel = ERROR)
-      val rep = Console.withOut(outstream) {
-        decompiler.Main.process(options.all, reporter)
-      }
-      assertFalse("Decompilation failed.", rep.hasErrors)
-      new String(outstream.toByteArray(), "UTF-8")
+    val printedTasty =
+      val sb = new StringBuffer
+      val jar = JarArchive.open(Path(s"$out/out.jar"), create = false)
+      try
+        for file <- jar.iterator() if file.name.endsWith(".tasty") do
+          sb.append(TastyPrinter.showContents(file.toByteArray, noColor = true))
+      finally jar.close()
+      sb.toString()
 
-    assertTrue(decompiled.contains(": i10430/lib.scala"))
-    assertTrue(decompiled.contains(": i10430/app.scala"))
-    assertTrue(decompiled.contains("[i10430/lib.scala]"))
-    assertTrue(decompiled.contains("[i10430/app.scala]"))
+    assertTrue(printedTasty.contains(": i10430/lib.scala"))
+    assertTrue(printedTasty.contains("[i10430/lib.scala]"))
+    assertFalse(printedTasty.contains(": i10430\\lib.scala"))
+    assertFalse(printedTasty.contains("[i10430\\lib.scala]"))
 
-    assertFalse(decompiled.contains(": i10430\\lib.scala"))
-    assertFalse(decompiled.contains(": i10430\\app.scala"))
-    assertFalse(decompiled.contains("[i10430\\lib.scala]"))
-    assertFalse(decompiled.contains("[i10430\\app.scala]"))
+    assertTrue(printedTasty.contains(": i10430/app.scala"))
+    assertTrue(printedTasty.contains("[i10430/app.scala]"))
+    assertFalse(printedTasty.contains(": i10430\\app.scala"))
+    assertFalse(printedTasty.contains("[i10430\\app.scala]"))
   }
 
   private def delete(file: JFile): Unit = {
diff --git a/compiler/test/dotty/tools/dotc/interactive/CustomCompletionTests.scala b/compiler/test/dotty/tools/dotc/interactive/CustomCompletionTests.scala
index ddd44ea7daf0..ccf211136d51 100644
--- a/compiler/test/dotty/tools/dotc/interactive/CustomCompletionTests.scala
+++ b/compiler/test/dotty/tools/dotc/interactive/CustomCompletionTests.scala
@@ -169,3 +169,4 @@ class CustomCompletionTests extends DottyTest:
 
       assert(offset == prefix.length)
       assert(labels.contains("scala.Function2"))
+
diff --git a/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala b/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala
index cd0aee578742..9c260cb75cc8 100644
--- a/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala
+++ b/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala
@@ -19,17 +19,18 @@ import scala.io.Source
 import org.junit.Test
 
 class PrintingTest {
-  val testsDir = "tests/printing"
-  val options = List("-Xprint:typer", "-color:never", "-classpath", TestConfiguration.basicClasspath)
 
-  private def compileFile(path: JPath): Boolean = {
+  def options(phase: String) =
+    List(s"-Xprint:$phase", "-color:never", "-classpath", TestConfiguration.basicClasspath)
+
+  private def compileFile(path: JPath, phase: String): Boolean = {
     val baseFilePath  = path.toString.stripSuffix(".scala")
     val checkFilePath = baseFilePath + ".check"
     val byteStream    = new ByteArrayOutputStream()
     val reporter = TestReporter.reporter(new PrintStream(byteStream), INFO)
 
     try {
-      Main.process((path.toString::options).toArray, reporter, null)
+      Main.process((path.toString::options(phase)).toArray, reporter, null)
     } catch {
       case e: Throwable =>
         println(s"Compile $path exception:")
@@ -40,11 +41,10 @@ class PrintingTest {
     FileDiff.checkAndDump(path.toString, actualLines.toIndexedSeq, checkFilePath)
   }
 
-  @Test
-  def printing: Unit = {
+  def testIn(testsDir: String, phase: String) =
     val res = Directory(testsDir).list.toList
       .filter(f => f.extension == "scala")
-      .map { f => compileFile(f.jpath) }
+      .map { f => compileFile(f.jpath, phase) }
 
     val failed = res.filter(!_)
 
@@ -53,5 +53,12 @@ class PrintingTest {
     assert(failed.length == 0, msg)
 
     println(msg)
-  }
+
+  end testIn
+
+  @Test
+  def printing: Unit = testIn("tests/printing", "typer")
+
+  @Test
+  def untypedPrinting: Unit = testIn("tests/printing/untyped", "parser")
 }
diff --git a/compiler/test/dotty/tools/dotc/reporting/ErrorMessagesTest.scala b/compiler/test/dotty/tools/dotc/reporting/ErrorMessagesTest.scala
index cc7d1eac2004..76f06802c79c 100644
--- a/compiler/test/dotty/tools/dotc/reporting/ErrorMessagesTest.scala
+++ b/compiler/test/dotty/tools/dotc/reporting/ErrorMessagesTest.scala
@@ -8,7 +8,7 @@ import org.junit.Assert._
 
 trait ErrorMessagesTest extends DottyTest {
 
-  private def newContext = {
+  protected def newContext = {
     val rep = new StoreReporter(null)
               with UniqueMessagePositions with HideNonSensicalMessages
     initialCtx.setReporter(rep).setSetting(ctx.settings.color, "never")
diff --git a/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala b/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala
index 24890279accd..da9b2498d528 100644
--- a/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala
+++ b/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala
@@ -53,7 +53,7 @@ extends Reporter with UniqueMessagePositions with HideNonSensicalMessages with M
 
   /** Prints the message with the given position indication. */
   def printMessageAndPos(dia: Diagnostic, extra: String)(using Context): Unit = {
-    val msg = messageAndPos(dia.msg, dia.pos, diagnosticLevel(dia))
+    val msg = messageAndPos(dia)
     val extraInfo = inlineInfo(dia.pos)
 
     if (dia.level >= logLevel) {
@@ -73,15 +73,9 @@ extends Reporter with UniqueMessagePositions with HideNonSensicalMessages with M
       case _ => ""
     }
 
-    dia match {
-      case dia: Error => {
-        _errorBuf.append(dia)
-        _consoleReporter.doReport(dia)
-        printMessageAndPos(dia, extra)
-      }
-      case dia =>
-        printMessageAndPos(dia, extra)
-    }
+    if dia.level >= ERROR then _errorBuf.append(dia)
+    if dia.level >= WARNING then _consoleReporter.doReport(dia)
+    printMessageAndPos(dia, extra)
   }
 }
 
diff --git a/compiler/test/dotty/tools/dotc/semanticdb/SemanticdbTests.scala b/compiler/test/dotty/tools/dotc/semanticdb/SemanticdbTests.scala
index 5deeaed114a8..d1f58bf52a91 100644
--- a/compiler/test/dotty/tools/dotc/semanticdb/SemanticdbTests.scala
+++ b/compiler/test/dotty/tools/dotc/semanticdb/SemanticdbTests.scala
@@ -25,6 +25,30 @@ import dotty.tools.dotc.util.SourceFile
 @main def updateExpect =
   SemanticdbTests().runExpectTest(updateExpectFiles = true)
 
+/** Useful for printing semanticdb metac output for one file
+ *
+ *  @param root the output directory containing semanticdb output,
+ *  only 1 semanticdb file should be present
+ *  @param source the single source file producing the semanticdb
+ */
+@main def metac(root: String, source: String) =
+  val rootSrc = Paths.get(root)
+  val sourceSrc = Paths.get(source)
+  val semanticFile = FileSystems.getDefault.getPathMatcher("glob:**.semanticdb")
+  def inputFile(): Path =
+    val ls = Files.walk(rootSrc.resolve("META-INF").resolve("semanticdb"))
+    val files =
+      try ls.filter(p => semanticFile.matches(p)).collect(Collectors.toList).asScala
+      finally ls.close()
+    require(files.sizeCompare(1) == 0, s"No semanticdb files! $rootSrc")
+    files.head
+  val metacSb: StringBuilder = StringBuilder(5000)
+  val semanticdbPath = inputFile()
+  val doc = Tools.loadTextDocumentUnsafe(sourceSrc.toAbsolutePath, semanticdbPath)
+  Tools.metac(doc, Paths.get(doc.uri))(using metacSb)
+  Files.write(rootSrc.resolve("metac.expect"), metacSb.toString.getBytes(StandardCharsets.UTF_8))
+
+
 @Category(Array(classOf[BootstrappedOnlyTests]))
 class SemanticdbTests:
   val javaFile = FileSystems.getDefault.getPathMatcher("glob:**.java")
diff --git a/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala b/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala
index 58e1133180ec..0c895d436238 100644
--- a/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala
+++ b/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala
@@ -12,24 +12,25 @@ import dotty.tools.io.Directory
 import java.io._
 import java.nio.file.{Path => JPath}
 
-import scala.io.Source._
 import org.junit.Test
 
 class PatmatExhaustivityTest {
   val testsDir = "tests/patmat"
-  // stop-after: patmatexhaust-huge.scala crash compiler
-  val options = List("-pagewidth", "80", "-color:never", "-Ystop-after:explicitSelf", "-Ycheck-all-patmat", "-classpath", TestConfiguration.basicClasspath)
+  // pagewidth/color: for a stable diff as the defaults are based on the terminal (e.g size)
+  // stop-after: patmatexhaust-huge.scala crash compiler (but also hides other warnings..)
+  val options = List("-pagewidth", "80", "-color:never", "-Ystop-after:explicitSelf", "-classpath", TestConfiguration.basicClasspath)
 
-  private def compile(files: Seq[String]): Seq[String] = {
+  private def compile(files: List[JPath]): Seq[String] = {
+    val opts         = toolArgsFor(files)
     val stringBuffer = new StringWriter()
-    val reporter = TestReporter.simplifiedReporter(new PrintWriter(stringBuffer))
+    val printWriter  = new PrintWriter(stringBuffer)
+    val reporter = TestReporter.simplifiedReporter(printWriter)
 
     try {
-      Main.process((options ++ files).toArray, reporter, null)
+      Main.process((options ::: opts ::: files.map(_.toString)).toArray, reporter, null)
     } catch {
       case e: Throwable =>
-        println(s"Compile $files exception:")
-        e.printStackTrace()
+        e.printStackTrace(printWriter)
     }
 
     stringBuffer.toString.trim.replaceAll("\\s+\n", "\n") match {
@@ -39,35 +40,34 @@ class PatmatExhaustivityTest {
   }
 
   private def compileFile(path: JPath): Boolean = {
-    val actualLines   = compile(path.toString :: Nil)
+    val actualLines   = compile(List(path))
     val baseFilePath  = path.toString.stripSuffix(".scala")
     val checkFilePath = baseFilePath + ".check"
 
-    FileDiff.checkAndDump(path.toString, actualLines, checkFilePath)
+    FileDiff.checkAndDumpOrUpdate(path.toString, actualLines, checkFilePath)
   }
 
   /** A single test with multiple files grouped in a folder */
   private def compileDir(path: JPath): Boolean = {
     val files = Directory(path).list.toList
       .filter(f => f.extension == "scala" || f.extension == "java" )
-      .map(_.jpath.toString)
+      .map(_.jpath)
 
     val actualLines   = compile(files)
     val checkFilePath = s"${path}${File.separator}expected.check"
 
-    FileDiff.checkAndDump(path.toString, actualLines, checkFilePath)
+    FileDiff.checkAndDumpOrUpdate(path.toString, actualLines, checkFilePath)
   }
 
   @Test
   def patmatExhaustivity: Unit = {
     val res = Directory(testsDir).list.toList
       .filter(f => f.extension == "scala" || f.isDirectory)
-      .map { f =>
-        if (f.isDirectory)
-          compileDir(f.jpath)
-        else
-          compileFile(f.jpath)
+      .filter { f =>
+        val path = if f.isDirectory then f.path + "/" else f.path
+        Properties.testsFilter.isEmpty || Properties.testsFilter.exists(path.contains)
       }
+      .map(f => if f.isDirectory then compileDir(f.jpath) else compileFile(f.jpath))
 
     val failed = res.filter(!_)
     val ignored = Directory(testsDir).list.toList.filter(_.extension == "ignore")
diff --git a/compiler/test/dotty/tools/dotc/transform/SpaceEngineTest.scala b/compiler/test/dotty/tools/dotc/transform/SpaceEngineTest.scala
new file mode 100644
index 000000000000..09fb5ee960fd
--- /dev/null
+++ b/compiler/test/dotty/tools/dotc/transform/SpaceEngineTest.scala
@@ -0,0 +1,25 @@
+package dotty.tools
+package dotc
+package transform
+
+import org.junit.*, Assert.*
+
+import core.*, Constants.*, Contexts.*, Decorators.*, Symbols.*, Types.*
+
+class SpaceEngineTest extends DottyTest:
+  @Test def testAdaptTest(): Unit =
+    given Context = ctx
+    val defn = ctx.definitions
+    import defn._
+    val e = patmat.SpaceEngine()
+
+    val BoxedIntType = BoxedIntClass.typeRef
+    val ConstOneType = ConstantType(Constant(1))
+
+    assertTrue(e.isPrimToBox(IntType, BoxedIntType))
+    assertFalse(e.isPrimToBox(BoxedIntType, IntType))
+    assertTrue(e.isPrimToBox(ConstOneType, BoxedIntType))
+
+    assertEquals(BoxedIntType, e.adaptType(IntType, BoxedIntType).widenSingleton)
+    assertEquals(IntType,      e.adaptType(BoxedIntType, IntType).widenSingleton)
+    assertEquals(IntType,      e.adaptType(BoxedIntType, ConstOneType).widenSingleton)
diff --git a/compiler/test/dotty/tools/io/ClasspathTest.scala b/compiler/test/dotty/tools/io/ClasspathTest.scala
new file mode 100755
index 000000000000..35437ef17390
--- /dev/null
+++ b/compiler/test/dotty/tools/io/ClasspathTest.scala
@@ -0,0 +1,70 @@
+package dotty.tools.io
+
+import org.junit.Test
+
+import java.io.File
+import dotty.tools.io.AbstractFile
+import java.nio.file.{Files, Paths}
+import java.nio.file.StandardCopyOption._
+import java.nio.file.attribute.PosixFilePermissions
+import dotty.tools.io.{ PlainDirectory, Directory, ClassPath }
+
+class ClasspathTest {
+
+  def pathsep = sys.props("path.separator")
+
+  //
+  // Cope with wildcard classpath entries, exercised with -classpath 
+  //
+  // Verify that Windows users not forced to use backslash in classpath.
+  //
+  @Test def testWildcards(): Unit =
+    val outDir = Files.createTempDirectory("classpath-test")
+    try
+      val compilerLib = "dist/target/pack/lib"
+      val libdir = Paths.get(compilerLib).toFile
+      if libdir.exists then
+        val libjarFiles = libdir.listFiles.toList.take(5)
+        try
+          for src <- libjarFiles do
+            val dest = Paths.get(s"$outDir/${src.getName}")
+            printf("copy: %s\n", Files.copy(src.toPath, dest))
+        
+          val cp = Seq(s"$outDir/*", "not-a-real-directory/*").mkString(pathsep).replace('\\', '/')
+         
+          val libjars = libjarFiles.map { _.getName }.toSet
+
+          // expand wildcard classpath entries, ignoring invalid entries
+          val entries = ClassPath.expandPath(cp).map { Paths.get(_).toFile.getName }
+
+          // require one-to-one matches
+          assert(libjars == entries.toSet)
+
+          printf("%d entries\n", entries.size)
+          printf("%d libjars\n", libjars.size)
+
+          for entry <- libjars do
+            printf("libdir[%s]\n", entry)
+
+          for entry <- entries do
+            printf("expand[%s]\n", entry)
+
+          // verify that expanded classpath has expected jar names
+          for jar <- libjars do
+            assert(entries.contains(jar))
+
+        catch
+          case _:NullPointerException => // no test if unable to copy jars to outDir
+
+
+    finally
+      deleteFile(outDir.toFile)
+
+
+  private def deleteFile(target: File): Unit =
+    if target.isDirectory then
+      for member <- target.listFiles.toList
+      do deleteFile(member)
+    target.delete()
+  end deleteFile
+}
diff --git a/compiler/test/dotty/tools/repl/DocTests.scala b/compiler/test/dotty/tools/repl/DocTests.scala
index 67f203c9b52b..ea900e866119 100644
--- a/compiler/test/dotty/tools/repl/DocTests.scala
+++ b/compiler/test/dotty/tools/repl/DocTests.scala
@@ -6,78 +6,44 @@ import org.junit.Assert.assertEquals
 
 class DocTests extends ReplTest {
 
-  @Test def docOfDef =
-    eval("/** doc */ def foo = 0").andThen { implicit s =>
-      assertEquals("doc", doc("foo"))
-    }
+  @Test def docOfDef = eval("/** doc */ def foo = 0") andThen assertEquals("doc", doc("foo"))
 
-  @Test def docOfVal =
-    eval("/** doc */ val foo = 0").andThen { implicit s =>
-      assertEquals("doc", doc("foo"))
-    }
+  @Test def docOfVal = eval("/** doc */ val foo = 0") andThen assertEquals("doc", doc("foo"))
 
-  @Test def docOfObject =
-    eval("/** doc */ object Foo").andThen { implicit s =>
-      assertEquals("doc", doc("Foo"))
-    }
+  @Test def docOfObject = eval("/** doc */ object Foo") andThen assertEquals("doc", doc("Foo"))
 
-  @Test def docOfClass =
-    eval("/** doc */ class Foo").andThen { implicit s =>
-      assertEquals("doc", doc("new Foo"))
-    }
+  @Test def docOfClass = eval("/** doc */ class Foo") andThen assertEquals("doc", doc("new Foo"))
 
-  @Test def docOfTrait =
-    eval("/** doc */ trait Foo").andThen { implicit s =>
-      assertEquals("doc", doc("new Foo"))
-    }
-/*
-  @Test def docOfExtension1 =
-    eval("/** doc */ extension (x: Int) def foo = 0").andThen { implicit s =>
-      assertEquals("doc", doc("extension_foo"))
-    }
+  @Test def docOfTrait = eval("/** doc */ trait Foo") andThen assertEquals("doc", doc("new Foo"))
 
-  @Test def docOfExtension2 =
-    eval("extension (x: Int) /** doc */ def foo = 0").andThen { implicit s =>
-      assertEquals("doc", doc("extension_foo"))
-    }
+  /*@Test*/ def docOfExtension1 =
+    eval("/** doc */ extension (x: Int) def foo = 0") andThen assertEquals("doc", doc("extension_foo"))
 
-  @Test def docOfExtension3 =
-    eval("/** doc0 */ extension (x: Int) { /** doc1 */ def foo = 0; /** doc2 */ def bar = 0; def baz = 0 }").andThen { implicit s =>
+  /*@Test*/ def docOfExtension2 =
+    eval("extension (x: Int) /** doc */ def foo = 0") andThen assertEquals("doc", doc("extension_foo"))
+
+  /*@Test*/ def docOfExtension3 =
+    eval("/** doc0 */ extension (x: Int) { /** doc1 */ def foo = 0; /** doc2 */ def bar = 0; def baz = 0 }") andThen {
       assertEquals("doc1", doc("extension_foo"))
       assertEquals("doc2", doc("extension_bar"))
       assertEquals("doc0", doc("extension_baz"))
     }
-*/
-  @Test def docOfDefInObject =
-    eval("object O { /** doc */ def foo = 0 }").andThen { implicit s =>
-      assertEquals("doc", doc("O.foo"))
-    }
 
-  @Test def docOfValInObject =
-    eval("object O { /** doc */ val foo = 0 }").andThen { implicit s =>
-      assertEquals("doc", doc("O.foo"))
-    }
+  @Test def docOfDefInObject = eval("object O { /** doc */ def foo = 0 }") andThen assertEquals("doc", doc("O.foo"))
 
-  @Test def docOfObjectInObject =
-    eval("object O { /** doc */ object Foo }").andThen { implicit s =>
-      assertEquals("doc", doc("O.Foo"))
-    }
+  @Test def docOfValInObject = eval("object O { /** doc */ val foo = 0 }") andThen assertEquals("doc", doc("O.foo"))
 
-  @Test def docOfClassInObject =
-    eval("object O { /** doc */ class Foo }").andThen { implicit s =>
-      assertEquals("doc", doc("new O.Foo"))
-    }
+  @Test def docOfObjectInObject = eval("object O { /** doc */ object Foo }") andThen assertEquals("doc", doc("O.Foo"))
 
-  @Test def docOfTraitInObject =
-    eval("object O { /** doc */ trait Foo }").andThen { implicit s =>
-      assertEquals("doc", doc("new O.Foo"))
-    }
+  @Test def docOfClassInObject = eval("object O { /** doc */ class Foo }") andThen assertEquals("doc", doc("new O.Foo"))
+
+  @Test def docOfTraitInObject = eval("object O { /** doc */ trait Foo }") andThen assertEquals("doc", doc("new O.Foo"))
 
   @Test def docOfDefInClass =
     eval(
       """class C { /** doc */ def foo = 0 }
         |val c = new C
-      """.stripMargin).andThen { implicit s =>
+      """.stripMargin) andThen {
       assertEquals("doc", doc("c.foo"))
     }
 
@@ -85,7 +51,7 @@ class DocTests extends ReplTest {
     eval(
       """class C { /** doc */ val foo = 0 }
         |val c = new C
-      """.stripMargin).andThen { implicit s =>
+      """.stripMargin) andThen {
       assertEquals("doc", doc("c.foo"))
     }
 
@@ -93,7 +59,7 @@ class DocTests extends ReplTest {
     eval(
       """class C { /** doc */ object Foo }
         |val c = new C
-      """.stripMargin).andThen { implicit s =>
+      """.stripMargin) andThen {
       assertEquals("doc", doc("c.Foo"))
     }
 
@@ -101,7 +67,7 @@ class DocTests extends ReplTest {
     eval(
       """class C { /** doc */ class Foo }
         |val c = new C
-      """.stripMargin).andThen { implicit s =>
+      """.stripMargin) andThen {
       assertEquals("doc", doc("new c.Foo"))
     }
 
@@ -109,7 +75,7 @@ class DocTests extends ReplTest {
     eval(
       """class C { /** doc */ trait Foo }
         |val c = new C
-      """.stripMargin).andThen { implicit s =>
+      """.stripMargin) andThen {
       assertEquals("doc", doc("new c.Foo"))
     }
 
@@ -119,7 +85,7 @@ class DocTests extends ReplTest {
         |  /** doc0 */ def foo(x: Int) = x
         |  /** doc1 */ def foo(x: String) = x
         |}
-      """.stripMargin).andThen { implicit s =>
+      """.stripMargin) andThen {
       assertEquals("doc0", doc("O.foo(_: Int)"))
       assertEquals("doc1", doc("O.foo(_: String)"))
     }
@@ -128,7 +94,7 @@ class DocTests extends ReplTest {
     eval(
       """class C { /** doc */ def foo = 0 }
         |object O extends C
-      """.stripMargin).andThen { implicit s =>
+      """.stripMargin) andThen {
       assertEquals("doc", doc("O.foo"))
     }
 
@@ -142,7 +108,7 @@ class DocTests extends ReplTest {
         |  override def foo(x: Int): Int = x
         |  /** overridden doc */ override def foo(x: String): String = x
         |}
-      """.stripMargin).andThen { implicit s =>
+      """.stripMargin) andThen {
       assertEquals("doc0", doc("O.foo(_: Int)"))
       assertEquals("overridden doc", doc("O.foo(_: String)"))
     }
@@ -158,38 +124,34 @@ class DocTests extends ReplTest {
         |    override def bar: Int = 0
         |  }
         |}
-      """.stripMargin).andThen { implicit s =>
+      """.stripMargin) andThen {
       assertEquals("companion", doc("O.foo"))
       assertEquals("doc0", doc("O.foo.bar"))
     }
 
   @Test def docIsCooked =
     eval(
-      """/**
-        | * An object
+      """/** An object
         | * @define Variable some-value
         | */
         |object Foo {
         |  /** Expansion: $Variable */
         |  def hello = "world"
         |}
-      """.stripMargin).andThen { implicit s =>
+      """.stripMargin) andThen {
       assertEquals("Expansion: some-value", doc("Foo.hello"))
     }
 
-  @Test def docOfEmpty =
-    fromInitialState { implicit s =>
+  @Test def docOfEmpty = initially {
     run(":doc")
     assertEquals(":doc ", storedOutput().trim)
   }
 
-  private def eval(code: String): State =
-    fromInitialState { implicit s => run(code) }
+  private def eval(code: String): State = initially(run(code))
 
-  private def doc(expr: String)(implicit s: State): String = {
+  private def doc(expr: String)(using State): String = {
     storedOutput()
     run(s":doc $expr")
     storedOutput().trim
   }
-
 }
diff --git a/compiler/test/dotty/tools/repl/JavaDefinedTests.scala b/compiler/test/dotty/tools/repl/JavaDefinedTests.scala
new file mode 100644
index 000000000000..561111cf7eb1
--- /dev/null
+++ b/compiler/test/dotty/tools/repl/JavaDefinedTests.scala
@@ -0,0 +1,16 @@
+package dotty.tools.repl
+
+import org.junit.Assert._
+import org.junit.Test
+
+class JavaDefinedTests extends ReplTest {
+  @Test def typeOfJavaDefinedString = initially {
+    run("String")
+    assertTrue(storedOutput().contains("Java defined class String is not a value"))
+  }
+
+  @Test def typeOfJavaDefinedClass = initially {
+    run("Class")
+    assertTrue(storedOutput().contains("Java defined class Class is not a value"))
+  }
+}
diff --git a/compiler/test/dotty/tools/repl/LoadTests.scala b/compiler/test/dotty/tools/repl/LoadTests.scala
index 276a684b20e8..c4f1f9f0781d 100644
--- a/compiler/test/dotty/tools/repl/LoadTests.scala
+++ b/compiler/test/dotty/tools/repl/LoadTests.scala
@@ -16,12 +16,9 @@ class LoadTests extends ReplTest {
                  |""".stripMargin,
     defs    = """|Hello, World!
                  |def helloWorld: String
-                 |
-                 |
                  |""".stripMargin,
     runCode = "helloWorld",
     output  = """|val res0: String = Hello, World!
-                 |
                  |""".stripMargin
   )
 
@@ -29,12 +26,9 @@ class LoadTests extends ReplTest {
     file    = """|@main def helloWorld = println("Hello, World!")
                  |""".stripMargin,
     defs    = """|def helloWorld: Unit
-                 |
-                 |
                  |""".stripMargin,
     runCode = "helloWorld",
     output  = """|Hello, World!
-                 |
                  |""".stripMargin
   )
 
@@ -44,26 +38,21 @@ class LoadTests extends ReplTest {
                  |""".stripMargin,
     defs    = """|def helloWorld: Unit
                  |def helloTo(name: String): Unit
-                 |
-                 |
                  |""".stripMargin,
     runCode = """helloWorld; helloTo("Scala")""",
     output  = """|Hello, World!
                  |Hello, Scala!
-                 |
                  |""".stripMargin
   )
 
   def loadTest(file: String, defs: String, runCode: String, output: String) =
-    eval(s":load ${writeFile(file)}").andThen { implicit s =>
+    eval(s":load ${writeFile(file)}") andThen {
       assertMultiLineEquals(defs, storedOutput())
       run(runCode)
       assertMultiLineEquals(output, storedOutput())
     }
 
-  private def eval(code: String): State =
-    fromInitialState { implicit s => run(code) }
-
+  private def eval(code: String): State = initially(run(code))
 }
 
 object LoadTests {
diff --git a/compiler/test/dotty/tools/repl/ReplCompilerTests.scala b/compiler/test/dotty/tools/repl/ReplCompilerTests.scala
index bc06727f2ea4..c02552aac83c 100644
--- a/compiler/test/dotty/tools/repl/ReplCompilerTests.scala
+++ b/compiler/test/dotty/tools/repl/ReplCompilerTests.scala
@@ -6,42 +6,40 @@ import org.junit.Assert.{assertTrue => assert, _}
 import org.junit.{Ignore, Test}
 import dotty.tools.dotc.core.Contexts.Context
 
-class ReplCompilerTests extends ReplTest {
+class ReplCompilerTests extends ReplTest:
   import ReplCompilerTests._
 
   private def lines() =
     storedOutput().trim.linesIterator.toList
 
-  @Test def compileSingle = fromInitialState { implicit state =>
+  @Test def compileSingle = initially {
     run("def foo: 1 = 1")
     assertEquals("def foo: 1", storedOutput().trim)
   }
 
   @Test def compileTwo =
-    fromInitialState { implicit state =>
+    initially {
       run("def foo: 1 = 1")
-    }
-    .andThen { implicit state =>
+    } andThen {
       val s2 = run("def foo(i: Int): i.type = i")
       assertEquals(2, s2.objectIndex)
     }
 
   @Test def inspectWrapper =
-    fromInitialState { implicit state =>
+    initially {
       run("def foo = 1")
-
-    }.andThen { implicit state =>
+    } andThen {
       storedOutput() // discard output
       run("val x = rs$line$1.foo")
       assertEquals("val x: Int = 1", storedOutput().trim)
     }
 
-  @Test def testVar = fromInitialState { implicit state =>
+  @Test def testVar = initially {
     run("var x = 5")
     assertEquals("var x: Int = 5", storedOutput().trim)
   }
 
-  @Test def testRes = fromInitialState { implicit state =>
+  @Test def testRes = initially {
     run {
       """|def foo = 1 + 1
          |val x = 5 + 5
@@ -62,11 +60,10 @@ class ReplCompilerTests extends ReplTest {
   }
 
   @Test def testImportMutable =
-    fromInitialState { implicit state =>
+    initially {
       run("import scala.collection.mutable")
-    }
-    .andThen { implicit state =>
-      assertEquals(1, state.imports.size)
+    } andThen {
+      assertEquals(1, summon[State].imports.size)
       run("""mutable.Map("one" -> 1)""")
       assertEquals(
         "val res0: scala.collection.mutable.Map[String, Int] = HashMap(one -> 1)",
@@ -75,22 +72,26 @@ class ReplCompilerTests extends ReplTest {
     }
 
   @Test def rebindVariable =
-    fromInitialState { implicit s =>
+    initially {
       val state = run("var x = 5")
       assertEquals("var x: Int = 5", storedOutput().trim)
       state
-    }
-    .andThen { implicit s =>
+    } andThen {
       run("x = 10")
       assertEquals("x: Int = 10", storedOutput().trim)
     }
 
-  @Test def i8677 = fromInitialState { implicit state =>
+  @Test def defaultParameter = initially {
+    run("def foo(a: Int = 1): 1 = 1")
+    assertEquals("def foo(a: Int): 1", storedOutput().trim)
+  }
+
+  @Test def i8677 = initially {
     run {
       """|sealed trait T1
          |case class X() extends T1
          |case class Y() extends T1
-         | case object O extends T1
+         |case object O extends T1
          """.stripMargin
     }
 
@@ -106,38 +107,38 @@ class ReplCompilerTests extends ReplTest {
 
   // FIXME: Tests are not run in isolation, the classloader is corrupted after the first exception
   @Ignore @Test def i3305: Unit = {
-    fromInitialState { implicit s =>
+    initially {
       run("null.toString")
       assert(storedOutput().startsWith("java.lang.NullPointerException"))
     }
 
-    fromInitialState { implicit s =>
+    initially {
       run("def foo: Int = 1 + foo; foo")
       assert(storedOutput().startsWith("def foo: Int\njava.lang.StackOverflowError"))
     }
 
-    fromInitialState { implicit s =>
+    initially {
       run("""throw new IllegalArgumentException("Hello")""")
       assert(storedOutput().startsWith("java.lang.IllegalArgumentException: Hello"))
     }
 
-    fromInitialState { implicit s =>
+    initially {
       run("val (x, y) = null")
       assert(storedOutput().startsWith("scala.MatchError: null"))
     }
   }
 
-  @Test def i2789: Unit = fromInitialState { implicit state =>
+  @Test def i2789: Unit = initially {
     run("(x: Int) => println(x)")
     assert(storedOutput().startsWith("val res0: Int => Unit ="))
   }
 
-  @Test def byNameParam: Unit = fromInitialState { implicit state =>
+  @Test def byNameParam: Unit = initially {
     run("def f(g: => Int): Int = g")
     assert(storedOutput().startsWith("def f(g: => Int): Int"))
   }
 
-  @Test def i4051 = fromInitialState { implicit state =>
+  @Test def i4051 = initially {
     val source =
       """val x: PartialFunction[Int, Int] = { case x => x }
         |val y = Map(("A", 1), ("B", 2), ("X", 3)).collect { case (k, v) => v }.toList""".stripMargin
@@ -152,8 +153,9 @@ class ReplCompilerTests extends ReplTest {
   }
 
   @Test def i5897 =
-    fromInitialState { implicit state => run("given Int = 10") }
-    .andThen         { implicit state =>
+    initially {
+      run("given Int = 10")
+    } andThen {
       assertEquals(
         "lazy val given_Int: Int",
         storedOutput().trim
@@ -166,7 +168,7 @@ class ReplCompilerTests extends ReplTest {
     }
 
   @Test def i6200 =
-    fromInitialState { implicit state =>
+    initially {
       run("""
         |trait Ord[T] {
         |  def compare(x: T, y: T): Int
@@ -178,8 +180,8 @@ class ReplCompilerTests extends ReplTest {
         |  def compare(x: Int, y: Int) =
         |  if (x < y) -1 else if (x > y) +1 else 0
         |}
-      """.stripMargin) }
-    .andThen         { implicit state =>
+      """.stripMargin)
+    } andThen {
       assertMultiLineEquals(
         """// defined trait Ord
           |// defined object IntOrd""".stripMargin,
@@ -189,75 +191,113 @@ class ReplCompilerTests extends ReplTest {
       assert(storedOutput().startsWith("val res0: IntOrd.type ="))
     }
 
-  @Test def i7934: Unit = fromInitialState { state =>
-    given Context = state.context
+  @Test def i7934: Unit = contextually {
     assertFalse(ParseResult.isIncomplete("_ + 1"))  // was: assertThrows[NullPointerException]
   }
 
-  @Test def testSingletonPrint = fromInitialState { implicit state =>
+  @Test def `i9374 accept collective extensions`: Unit = contextually {
+    assert(ParseResult.isIncomplete("extension (x: String)"))
+    assert(ParseResult.isIncomplete("extension (x: String) {"))
+  }
+
+  @Test def testSingletonPrint = initially {
     run("""val a = "hello"; val x: a.type = a""")
     assertMultiLineEquals("val a: String = hello\nval x: a.type = hello", storedOutput().trim)
   }
 
-  @Test def i6574 = fromInitialState { implicit state =>
+  @Test def i6574 = initially {
     run("val a: 1 | 0 = 1")
     assertEquals("val a: 1 | 0 = 1", storedOutput().trim)
   }
 
-  @Test def `i10214 must show classic MatchError` = fromInitialState { implicit state =>
+  @Test def `i10214 must show classic MatchError` = initially {
     run("val 1 = 2")
     assertEquals("scala.MatchError: 2 (of class java.lang.Integer)", storedOutput().linesIterator.next())
   }
   @Test def `i10214 must show useful regex MatchError` =
-    fromInitialState { implicit state =>
+    initially {
       run("""val r = raw"\d+".r""")
-    } andThen { implicit state =>
+    } andThen {
       run("""val r() = "abc"""")
-      assertEquals("scala.MatchError: abc (of class java.lang.String)", storedOutput().linesIterator.drop(2).next())
+      assertEquals("scala.MatchError: abc (of class java.lang.String)", storedOutput().linesIterator.drop(1).next())
     }
-  @Test def `i10214 must show MatchError on literal type` = fromInitialState { implicit state =>
+  @Test def `i10214 must show MatchError on literal type` = initially {
     run("val (x: 1) = 2")
     assertEquals("scala.MatchError: 2 (of class java.lang.Integer)", storedOutput().linesIterator.next())
   }
-}
+  @Test def `i12920 must truncate stack trace to user code` = initially {
+    run("???")
+    val all = lines()
+    assertEquals(3, all.length)
+    assertEquals("scala.NotImplementedError: an implementation is missing", all.head)
+    /* avoid asserting much about line number or elided count
+    scala.NotImplementedError: an implementation is missing
+      at scala.Predef$.$qmark$qmark$qmark(Predef.scala:344)
+      ... 28 elided
+     */
+  }
+  @Test def `i14281 context class loader must be REPL class loader` = initially {
+    run("class C ; assert(classOf[C].getClassLoader eq Thread.currentThread.getContextClassLoader)")
+    assertEquals(List("// defined class C"), lines())
+  }
 
-object ReplCompilerTests {
+  @Test def i14491 =
+    initially {
+      run("import language.experimental.fewerBraces")
+    } andThen {
+      run("""|val x = Seq(7,8,9).apply:
+             |  1
+             |""".stripMargin)
+      assertEquals("val x: Int = 8", storedOutput().trim)
+    }
+    initially {
+      run("""|import language.experimental.fewerBraces
+             |import language.experimental.fewerBraces as _
+             |""".stripMargin)
+    } andThen {
+      run("""|val x = Seq(7,8,9).apply:
+             |  1
+             |""".stripMargin)
+      assert("expected error if fewerBraces is unimported",
+        lines().exists(_.contains("missing arguments for method apply")))
+    }
+
+object ReplCompilerTests:
 
   private val pattern = Pattern.compile("\\r[\\n]?|\\n");
 
   // Ensure 'expected' and 'actual' contain the same line separator(s).
-  def assertMultiLineEquals(expected: String, actual: String): Unit = {
+  def assertMultiLineEquals(expected: String, actual: String): Unit =
     val expected0 = pattern.matcher(expected).replaceAll(System.lineSeparator)
     val actual0 = pattern.matcher(actual).replaceAll(System.lineSeparator)
     assertEquals(expected0, actual0)
-  }
 
-}
+end ReplCompilerTests
 
-class ReplXPrintTyperTests extends ReplTest(ReplTest.defaultOptions :+ "-Xprint:typer") {
-  @Test def i9111 = fromInitialState { implicit state =>
+class ReplXPrintTyperTests extends ReplTest(ReplTest.defaultOptions :+ "-Xprint:typer"):
+  @Test def i9111 = initially {
     run("""|enum E {
            |  case A
            |}""".stripMargin)
     assert(storedOutput().trim().endsWith("// defined class E"))
   }
 
-  @Test def i10883 = fromInitialState { implicit state =>
+  @Test def i10883 = initially {
     run("val a = 42")
     assert(storedOutput().trim().endsWith("val a: Int = 42"))
   }
-}
+end ReplXPrintTyperTests
 
-class ReplVerboseTests extends ReplTest(ReplTest.defaultOptions :+ "-verbose") {
-  @Test def i9111 = fromInitialState { implicit state =>
+class ReplVerboseTests extends ReplTest(ReplTest.defaultOptions :+ "-verbose"):
+  @Test def i9111 = initially {
     run("""|enum E {
            |  case A
            |}""".stripMargin)
     assert(storedOutput().trim().endsWith("// defined class E"))
   }
 
-  @Test def i10883 = fromInitialState { implicit state =>
+  @Test def i10883 = initially {
     run("val a = 42")
     assert(storedOutput().trim().endsWith("val a: Int = 42"))
   }
-}
+end ReplVerboseTests
diff --git a/compiler/test/dotty/tools/repl/ReplTest.scala b/compiler/test/dotty/tools/repl/ReplTest.scala
index 0c2517dd0219..d5256986f874 100644
--- a/compiler/test/dotty/tools/repl/ReplTest.scala
+++ b/compiler/test/dotty/tools/repl/ReplTest.scala
@@ -12,6 +12,7 @@ import scala.io.Source
 import scala.util.Using
 import scala.collection.mutable.ArrayBuffer
 
+import dotty.tools.dotc.core.Contexts.Context
 import dotty.tools.dotc.reporting.MessageRendering
 import org.junit.{After, Before}
 import org.junit.Assert._
@@ -33,13 +34,16 @@ extends ReplDriver(options, new PrintStream(out, true, StandardCharsets.UTF_8.na
   @After def cleanup: Unit =
     storedOutput()
 
-  def fromInitialState[A](op: State => A): A =
-    op(initialState)
+  def initially[A](op: State ?=> A): A = op(using initialState)
+
+  def contextually[A](op: Context ?=> A): A = op(using initialState.context)
 
   extension [A](state: State)
-    def andThen(op: State => A): A = op(state)
+    infix def andThen(op: State ?=> A): A = op(using state)
+
+  def testFile(f: JFile): Unit = testScript(f.toString, readLines(f), Some(f))
 
-  def testFile(f: JFile): Unit = {
+  def testScript(name: => String, lines: List[String], scriptFile: Option[JFile] = None): Unit = {
     val prompt = "scala>"
 
     def evaluate(state: State, input: String) =
@@ -50,7 +54,7 @@ extends ReplDriver(options, new PrintStream(out, true, StandardCharsets.UTF_8.na
       }
       catch {
         case ex: Throwable =>
-          System.err.println(s"failed while running script: $f, on:\n$input")
+          System.err.println(s"failed while running script: $name, on:\n$input")
           throw ex
       }
 
@@ -59,36 +63,41 @@ extends ReplDriver(options, new PrintStream(out, true, StandardCharsets.UTF_8.na
         case "" => Nil
         case nonEmptyLine => nonEmptyLine :: Nil
       }
+    def nonBlank(line: String): Boolean = line.exists(!Character.isWhitespace(_))
 
-    val expectedOutput = readLines(f).flatMap(filterEmpties)
+    val expectedOutput = lines.filter(nonBlank)
     val actualOutput = {
-      resetToInitial()
+      val opts = toolArgsParse(lines.take(1))
+      val (optsLine, inputLines) = if opts.isEmpty then ("", lines) else (lines.head, lines.drop(1))
+      resetToInitial(opts)
 
-      val lines = readLines(f)
-      assert(lines.head.startsWith(prompt),
-        s"""Each file has to start with the prompt: "$prompt"""")
-      val inputRes = lines.filter(_.startsWith(prompt))
+      assert(inputLines.head.startsWith(prompt),
+        s"""Each script must start with the prompt: "$prompt"""")
+      val inputRes = inputLines.filter(_.startsWith(prompt))
 
       val buf = new ArrayBuffer[String]
       inputRes.foldLeft(initialState) { (state, input) =>
         val (out, nstate) = evaluate(state, input)
         out.linesIterator.foreach(buf.append)
-
-        assert(out.endsWith("\n"),
-               s"Expected output of $input to end with newline")
-
         nstate
       }
-      buf.toList.flatMap(filterEmpties)
+      (optsLine :: buf.toList).filter(nonBlank)
     }
 
     if !FileDiff.matches(actualOutput, expectedOutput) then
-      println("expected =========>")
-      println(expectedOutput.mkString(EOL))
-      println("actual ===========>")
-      println(actualOutput.mkString(EOL))
-
-      fail(s"Error in file $f, expected output did not match actual")
+      // Some tests aren't file-based but just pass a string, so can't update anything then
+      // Also the files here are the copies in target/ not the original, so you need to vimdiff/mv them...
+      if dotty.Properties.testsUpdateCheckfile && scriptFile != None then
+        val checkFile = scriptFile.get
+        FileDiff.dump(checkFile.toPath.toString, actualOutput)
+        println(s"Wrote updated script file to $checkFile")
+      else
+        println("expected =========>")
+        println(expectedOutput.mkString(EOL))
+        println("actual ===========>")
+        println(actualOutput.mkString(EOL))
+
+        fail(s"Error in script $name, expected output did not match actual")
     end if
   }
 }
diff --git a/compiler/test/dotty/tools/repl/ShadowingBatchTests.scala b/compiler/test/dotty/tools/repl/ShadowingBatchTests.scala
new file mode 100644
index 000000000000..bcfe1c3251a7
--- /dev/null
+++ b/compiler/test/dotty/tools/repl/ShadowingBatchTests.scala
@@ -0,0 +1,71 @@
+package dotty.tools
+package repl
+
+import java.io.File
+import java.nio.file.Files
+
+import org.junit.{ After, AfterClass, BeforeClass, Ignore, Test }
+import org.junit.Assert._
+import io.{ Directory, PlainDirectory }
+import dotc.core.Contexts._
+import dotc.reporting.{ ErrorMessagesTest, StoreReporter }
+import vulpix.TestConfiguration
+
+object ShadowingBatchTests:
+  val dir = Directory(Files.createTempDirectory("batch-shadow"))
+
+  @BeforeClass def suiteStarting: Unit = dir.createDirectory()
+  @AfterClass  def suiteFinished: Unit = dir.deleteRecursively()
+
+class ShadowingBatchTests extends ErrorMessagesTest:
+  import ShadowingBatchTests._
+
+  @After def testFinished: Unit = dir.list.foreach(_.deleteRecursively())
+
+  val compiler = new dotc.Compiler()
+
+  override def initializeCtx(ictx: FreshContext) = inContext(ictx) {
+    super.initializeCtx(ictx)
+    val settings = ictx.settings; import settings._
+    ictx.setSetting(outputDir, new PlainDirectory(dir))
+    ictx.setSetting(classpath, classpath.value + File.pathSeparator + dir.jpath.toAbsolutePath)
+  }
+
+  @Test def file =
+    checkMessages("class C(val c: Int)").expectNoErrors
+    checkMessages("object rsline1 {\n  def line1 = new C().c\n}").expect { (_, msgs) =>
+      assertMessageCount(1, msgs)
+      assertEquals("missing argument for parameter c of constructor C in class C: (c: Int): C", msgs.head.message)
+    }
+    checkMessages("object rsline2 {\n  def line2 = new C(13).c\n}").expectNoErrors
+    checkMessages("object rsline3 {\n  class C { val c = 42 }\n}").expectNoErrors
+    checkMessages("import rsline3._\nobject rsline4 {\n  def line4 = new C().c\n}").expectNoErrors
+
+  @Test def directory =
+    checkMessages("package foo\nclass C").expectNoErrors
+    checkMessages("object rsline1 {\n  def line1 = foo\n}").expect { (_, msgs) =>
+      assertMessageCount(1, msgs)
+      assertEquals("package foo is not a value", msgs.head.message)
+    }
+    checkMessages("object rsline2 {\n  val foo = 2\n}").expectNoErrors
+    checkMessages("import rsline2._\nobject rsline3 {\n  def line3 = foo\n}").expectNoErrors
+
+  @Test def directoryJava =
+    checkMessages("object rsline1 {\n  def line1 = java\n}").expect { (_, msgs) =>
+      assertMessageCount(1, msgs)
+      assertEquals("package java is not a value", msgs.head.message)
+    }
+    checkMessages("object rsline2 {\n  val java = 2\n}").expectNoErrors
+    checkMessages("import rsline2._\nobject rsline3 {\n  def line3 = java\n}").expectNoErrors
+
+  def checkMessages(source: String): Report =
+    ctx = newContext
+    val run = compiler.newRun(using ctx.fresh)
+    run.compileFromStrings(List(source))
+    val runCtx = run.runContext
+    if runCtx.reporter.hasErrors then
+      val rep = runCtx.reporter.asInstanceOf[StoreReporter]
+      val msgs = rep.removeBufferedMessages(using runCtx).map(_.msg).reverse
+      new Report(msgs, runCtx)
+    else new EmptyReport
+end ShadowingBatchTests
diff --git a/compiler/test/dotty/tools/repl/ShadowingTests.scala b/compiler/test/dotty/tools/repl/ShadowingTests.scala
new file mode 100644
index 000000000000..43c182b9ae5b
--- /dev/null
+++ b/compiler/test/dotty/tools/repl/ShadowingTests.scala
@@ -0,0 +1,139 @@
+package dotty.tools
+package repl
+
+import java.io.File
+import java.nio.file.{Path, Files}
+import java.util.Comparator
+
+import org.junit.{Test, Ignore, BeforeClass, AfterClass}
+
+import dotc.Driver
+import dotc.reporting.TestReporter
+import dotc.interfaces.Diagnostic.ERROR
+import vulpix.{TestConfiguration, TestFlags}
+
+/** Test that the REPL can shadow artifacts in the local filesystem on the classpath.
+ *  Since the REPL launches with the current directory on the classpath, stray .class
+ *  files containing definitions in the empty package will be in scope in the REPL.
+ *  Additionally, any subdirectories will be treated as package names in scope.
+ *  As this may come as a surprise to an unsuspecting user, we would like definitions
+ *  from the REPL session to shadow these names.
+ *
+ *  Provided here is a framework for creating the filesystem artifacts to be shadowed
+ *  and running scripted REPL tests with them on the claspath.
+ */
+object ShadowingTests:
+  def classpath = TestConfiguration.basicClasspath + File.pathSeparator + shadowDir
+  def options = ReplTest.commonOptions ++ Array("-classpath", classpath)
+  def shadowDir = dir.toAbsolutePath.toString
+
+  def createSubDir(name: String): Path =
+    val subdir = dir.resolve(name)
+    try Files.createDirectory(subdir)
+    catch case _: java.nio.file.FileAlreadyExistsException =>
+    assert(Files.isDirectory(subdir), s"failed to create shadowed subdirectory $subdir")
+    subdir
+
+  // The directory on the classpath containing artifacts to be shadowed
+  private var dir: Path = null
+
+  @BeforeClass def setupDir: Unit =
+    dir = Files.createTempDirectory("repl-shadow")
+
+  @AfterClass def tearDownDir: Unit =
+    Files.walk(dir).sorted(Comparator.reverseOrder).forEach(Files.delete)
+    dir = null
+
+class ShadowingTests extends ReplTest(options = ShadowingTests.options):
+  // delete contents of shadowDir after each test
+  override def cleanup: Unit =
+    super.cleanup
+    val dir = ShadowingTests.dir
+    Files.walk(dir)
+      .filter(_ != dir)
+      .sorted(Comparator.reverseOrder)
+      .forEach(Files.delete)
+
+  /** Run a scripted REPL test with the compilation artifacts of `shadowed` on the classpath */
+  def shadowedScriptedTest(name: String, shadowed: String, script: String): Unit =
+    compileShadowed(shadowed)
+    testScript(name, script.linesIterator.toList)
+
+  /** Compile the given source text and output to the shadow dir on the classpath */
+  private def compileShadowed(src: String): Unit =
+    val file: Path = Files.createTempFile("repl-shadow-test", ".scala")
+    Files.write(file, src.getBytes)
+
+    val flags =
+      TestFlags(TestConfiguration.basicClasspath, TestConfiguration.noCheckOptions)
+        .and("-d", ShadowingTests.shadowDir)
+    val driver = new Driver
+    val reporter = TestReporter.reporter(System.out, logLevel = ERROR)
+    driver.process(flags.all :+ file.toString, reporter)
+    assert(!reporter.hasErrors, s"compilation of $file failed")
+    Files.delete(file)
+  end compileShadowed
+
+  @Test def i7635 = shadowedScriptedTest(name = "",
+    shadowed = "class C(val c: Int)",
+    script =
+      """|scala> new C().c
+         |-- Error: ----------------------------------------------------------------------
+         |1 | new C().c
+         |  | ^^^^^^^
+         |  | missing argument for parameter c of constructor C in class C: (c: Int): C
+         |1 error found
+         |
+         |scala> new C(13).c
+         |val res0: Int = 13
+         |
+         |scala> class C { val c = 42 }
+         |// defined class C
+         |
+         |scala> new C().c
+         |val res1: Int = 42
+         |""".stripMargin
+  )
+
+  @Test def `shadow subdirectories on classpath` =
+    // NB: Tests of shadowing of subdirectories on the classpath are only valid
+    // when the subdirectories exist prior to initialization of the REPL driver.
+    // In the tests below this is enforced by the call to `testScript` which
+    // in turn invokes `ReplDriver#resetToInitial`. When testing interactively,
+    // the subdirectories may be created before launching the REPL, or during
+    // an existing session followed by the `:reset` command.
+
+    ShadowingTests.createSubDir("foo")
+    testScript(name = "",
+      """|scala> val foo = 3
+         |val foo: Int = 3
+         |
+         |scala> foo
+         |val res0: Int = 3
+         |""".stripMargin.linesIterator.toList
+    )
+
+    ShadowingTests.createSubDir("x")
+    testScript(name = "",
+      """|scala> val (x, y) = (42, "foo")
+         |val x: Int = 42
+         |val y: String = foo
+         |
+         |scala> if (true) x else y
+         |val res0: Matchable = 42
+         |""".stripMargin.linesIterator.toList
+    )
+
+    ShadowingTests.createSubDir("util")
+    testScript(name = "",
+      """|scala> import util.Try
+         |
+         |scala> object util { class Try { override def toString = "you've gotta try!" }  }
+         |// defined object util
+         |
+         |scala> import util.Try
+         |scala> new Try
+         |val res0: util.Try = you've gotta try!
+         |""".stripMargin.linesIterator.toList
+    )
+end ShadowingTests
diff --git a/compiler/test/dotty/tools/repl/TabcompleteTests.scala b/compiler/test/dotty/tools/repl/TabcompleteTests.scala
index cbf267e7c846..ab581d664629 100644
--- a/compiler/test/dotty/tools/repl/TabcompleteTests.scala
+++ b/compiler/test/dotty/tools/repl/TabcompleteTests.scala
@@ -10,22 +10,22 @@ class TabcompleteTests extends ReplTest {
   private def tabComplete(src: String)(implicit state: State): List[String] =
     completions(src.length, src, state).map(_.value).sorted
 
-  @Test def tabCompleteList = fromInitialState { implicit s =>
+  @Test def tabCompleteList = initially {
     val comp = tabComplete("List.r")
     assertEquals(List("range"), comp.distinct)
   }
 
-  @Test def tabCompleteListInstance = fromInitialState { implicit s =>
+  @Test def tabCompleteListInstance = initially {
     val comp = tabComplete("(null: List[Int]).sli")
     assertEquals(List("slice", "sliding"), comp.distinct)
   }
 
-  @Test def tabCompleteModule = fromInitialState{ implicit s =>
+  @Test def tabCompleteModule = initially {
     val comp = tabComplete("scala.Pred")
     assertEquals(List("Predef"), comp)
   }
 
-  @Test def tabCompleteInClass = fromInitialState { implicit s =>
+  @Test def tabCompleteInClass = initially {
     val comp = tabComplete("class Foo { def bar: List[Int] = List.ap")
     assertEquals(List("apply"), comp)
   }
@@ -34,77 +34,70 @@ class TabcompleteTests extends ReplTest {
     val src1 = "class Foo { def bar(xs: List[Int]) = xs.map"
     val src2 = "class Foo { def bar(xs: List[Int]) = xs.mapC"
 
-    fromInitialState { implicit state =>
-      val comp = tabComplete(src1)
-      assertEquals(List("map", "mapConserve"), comp)
-      state
-    }
-    .andThen { implicit state =>
-      val comp = tabComplete(src2)
-      assertEquals(List("mapConserve"), comp)
+    initially {
+      val comp1 = tabComplete(src1)
+      assertEquals(List("map", "mapConserve"), comp1)
+      val comp2 = tabComplete(src2)
+      assertEquals(List("mapConserve"), comp2)
     }
   }
 
-  @Test def i3309 = fromInitialState { implicit s =>
+  @Test def i3309 = initially {
     // We make sure we do not crash
     List("\"", ")", "'", "¨", "£", ":", ",", ";", "@", "}", "[", "]", ".")
       .foreach(tabComplete(_))
   }
 
   @Test def completeFromPreviousState =
-    fromInitialState { implicit state  =>
+    initially {
       val src = "class Foo { def comp3 = 3; def comp1 = 1; def comp2 = 2 }"
       run(src)
-    }
-    .andThen { implicit state =>
+    } andThen {
       val expected = List("comp1", "comp2", "comp3")
       assertEquals(expected, tabComplete("(new Foo).comp"))
     }
 
   @Test def completeFromPreviousState2 =
-    fromInitialState { implicit state  =>
+    initially {
       val src = "def hello = 1"
       run(src)
-    }
-    .andThen { implicit state =>
+    } andThen {
       val expected = List("hello")
       assertEquals(expected, tabComplete("hel"))
     }
 
   @Test def tabCompleteFromPreviousImport =
-    fromInitialState { implicit state =>
+    initially {
       val src = "import java.io.FileDescriptor"
       run(src)
-    }
-    .andThen { implicit state =>
+    } andThen {
       val expected = List("FileDescriptor")
       assertEquals(expected, tabComplete("val foo: FileDesc"))
     }
 
   @Test def tabCompleteRenamedImport =
-    fromInitialState { implicit state =>
+    initially {
       val src = "import java.io.{FileDescriptor => Renamed}"
       run(src)
-    }
-    .andThen { implicit state =>
+    } andThen {
       val expected = List("Renamed")
       assertEquals(expected, tabComplete("val foo: Rena"))
     }
 
-  @Test def tabClosureComplete = fromInitialState { implicit s =>
+  @Test def tabClosureComplete = initially {
     assertEquals(List("map", "mapConserve"), tabComplete("Nil.map"))
     assertEquals(List("map", "mapConserve"), tabComplete("(x: Int => Int) => Nil.map"))
     assertEquals(List("apply"), tabComplete("(x: Int => Int) => x.ap"))
   }
 
-  @Test def importScala = fromInitialState { implicit s =>
+  @Test def importScala = initially {
     val comp = tabComplete("import scala.")
     // check that there are no special symbols leaked: , , ...
     assertEquals(Some("<:<"), comp.find(_.startsWith("<")))
     assert(!comp.contains("package"))
   }
 
-  @Test def `null` = fromInitialState { implicit s =>
+  @Test def `null` = initially {
     val comp = tabComplete("null.")
     assertEquals(
       List("!=", "##", "==", "asInstanceOf", "eq", "equals", "getClass", "hashCode",
@@ -112,7 +105,7 @@ class TabcompleteTests extends ReplTest {
       comp.distinct.sorted)
   }
 
-  @Test def anyRef = fromInitialState { implicit s =>
+  @Test def anyRef = initially {
     val comp = tabComplete("(null: AnyRef).")
     assertEquals(
       List("!=", "##", "->", "==", "asInstanceOf", "ensuring", "eq", "equals", "formatted",
@@ -120,16 +113,22 @@ class TabcompleteTests extends ReplTest {
       comp.distinct.sorted)
   }
 
-  @Test def `???` = fromInitialState { implicit s =>
+  @Test def `???` = initially {
     val comp = tabComplete("???.")
     assertEquals(Nil, comp)
   }
 
-  @Test def moduleCompletion = fromInitialState { implicit s =>
+  @Test def moduleCompletion = initially {
     assertEquals(List("Predef"), tabComplete("object Foo { type T = Pre"))
   }
 
-  @Test def i6415 = fromInitialState { implicit s =>
+  @Test def i6415 = initially {
     assertEquals(List("Predef"), tabComplete("object Foo { opaque type T = Pre"))
   }
+
+  @Test def i12600 = initially {
+    assertEquals(List("select", "show", "simplified", "substituteTypes"),
+      tabComplete("import quoted.* ; def fooImpl(using Quotes): Expr[Int] = { import quotes.reflect.* ; TypeRepr.of[Int].s"))
+  }
+
 }
diff --git a/compiler/test/dotty/tools/repl/TypeTests.scala b/compiler/test/dotty/tools/repl/TypeTests.scala
index abc66ba2c0ab..6a8c38867ff0 100644
--- a/compiler/test/dotty/tools/repl/TypeTests.scala
+++ b/compiler/test/dotty/tools/repl/TypeTests.scala
@@ -4,25 +4,26 @@ import org.junit.Assert._
 import org.junit.Test
 
 class TypeTests extends ReplTest {
-  @Test def typeOf1 = fromInitialState { implicit s =>
+  @Test def typeOf1 = initially {
     run(":type 1")
     assertEquals("Int", storedOutput().trim)
   }
 
-  @Test def typeOfBlock = fromInitialState { implicit s =>
+  @Test def typeOfBlock = initially {
     run(":type { /** omg omg omg */ 1 + 5; 1 }")
     assertEquals("Int", storedOutput().trim)
   }
 
   @Test def typeOfX =
-    fromInitialState { implicit s => run("val x = 5") }
-    .andThen { implicit s =>
+    initially {
+      run("val x = 5")
+    } andThen {
       storedOutput() // discard output
       run(":type x")
       assertEquals("Int", storedOutput().trim)
     }
 
-  @Test def typeOfEmpty = fromInitialState { implicit s =>
+  @Test def typeOfEmpty = initially {
     run(":type")
     assertEquals(":type ", storedOutput().trim)
   }
diff --git a/compiler/test/dotty/tools/scripting/BashScriptsTests.scala b/compiler/test/dotty/tools/scripting/BashScriptsTests.scala
new file mode 100644
index 000000000000..b83b16e01e1f
--- /dev/null
+++ b/compiler/test/dotty/tools/scripting/BashScriptsTests.scala
@@ -0,0 +1,229 @@
+package dotty
+package tools
+package scripting
+
+import java.nio.file.Paths
+import org.junit.{Test, AfterClass}
+import org.junit.Assert.assertEquals
+
+import vulpix.TestConfiguration
+
+import ScriptTestEnv.*
+
+/** Verifies correct handling of command line arguments by `dist/bin/scala` and `dist/bin/scalac`.
+ *   +. arguments following a script path must be treated as script arguments
+ *   +. preserve script command line arguments.
+ *   +. prevent SCALA_OPTS in build environment from infecting tests, via 'SCALA_OPTS= ' prefix
+ *   +. test scripts must not throw execptions or exit with nonzero.
+ */
+object BashScriptsTests:
+  lazy val argsfile = createArgsFile() // avoid problems caused by drive letter
+  def testFiles = scripts("/scripting")
+
+  @AfterClass def cleanup: Unit = {
+    val af = argsfile.toFile
+    if (af.exists) {
+      af.delete()
+    }
+  }
+  printf("osname[%s]\n", osname)
+  printf("uname[%s]\n", ostypeFull)
+  printf("using JAVA_HOME=%s\n", envJavaHome)
+  printf("using SCALA_HOME=%s\n", envScalaHome)
+  printf("first 5 PATH entries:\n%s\n", adjustedPathEntries.take(5).mkString("\n"))
+  printf("scala path:  [%s]\n", scalaPath)
+  printf("scalac path: [%s]\n", scalacPath)
+
+  val expectedOutput = List(
+    "arg  0:[a]",
+    "arg  1:[b]",
+    "arg  2:[c]",
+    "arg  3:[-repl]",
+    "arg  4:[-run]",
+    "arg  5:[-script]",
+    "arg  6:[-debug]",
+  )
+  val testScriptArgs = Seq(
+    "a", "b", "c", "-repl", "-run", "-script", "-debug"
+  )
+  val showArgsScript = testFiles.find(_.getName == "showArgs.sc").get.absPath
+
+  def testFile(name: String): String = 
+    val file = testFiles.find(_.getName == name) match {
+      case Some(f) =>
+        val ff = f.absPath
+        printf("test file [%s] is [%s]\n", name, ff)
+        ff
+      case None =>
+        printf("test file [%s] not found!\n", name)
+        name.absPath
+    }
+    file
+
+  val Seq(envtestSc, envtestScala) = Seq("envtest.sc", "envtest.scala").map { testFile(_) }
+
+  // create command line with given options, execute specified script, return stdout
+  def callScript(tag: String, script: String, keyPre: String): String =
+    val keyArg = s"$keyPre=$tag"
+    printf("pass tag [%s] via [%s] to script [%s]\n", tag, keyArg, script)
+    val cmd: String = Seq("SCALA_OPTS= ", scalaPath, keyArg, script).mkString(" ")
+    printf("cmd: [%s]\n", cmd)
+    val (validTest, exitCode, stdout, stderr) = bashCommand(cmd)
+    stderr.filter { !_.contains("Inappropriate ioctl") }.foreach { System.err.printf("stderr [%s]\n", _) }
+    stdout.mkString("\n")
+
+
+class BashScriptsTests:
+  import BashScriptsTests.*
+  // classpath tests managed by scripting.ClasspathTests.scala
+
+  ////////////////////////// begin tests //////////////////////
+  
+  /* verify that `dist/bin/scala` correctly passes args to the jvm via -J-D for script envtest.sc */
+  @Test def verifyScJProperty =
+    val tag = "World1"
+    val stdout = callScript(tag, envtestSc, s"-J-Dkey")
+    assertEquals( s"Hello $tag", stdout)
+
+  /* verify that `dist/bin/scala` correctly passes args to the jvm via -J-D for script envtest.scala */
+  @Test def verifyScalaJProperty =
+    val tag = "World2"
+    val stdout = callScript(tag, envtestScala, s"-J-Dkey")
+    assertEquals(s"Hello $tag", stdout)
+
+  /* verify that `dist/bin/scala` can set system properties via -D for envtest.sc */
+  @Test def verifyScDProperty =
+    val tag = "World3"
+    val stdout = callScript(tag, envtestSc, s"-Dkey")
+    assertEquals(s"Hello $tag", stdout)
+
+  /* verify that `dist/bin/scala` can set system properties via -D for envtest.scala */
+  @Test def verifyScalaDProperty =
+    val tag = "World4"
+    val stdout = callScript(tag, envtestScala, s"-Dkey")
+    assertEquals(s"Hello $tag", stdout)
+
+  /* verify that `dist/bin/scala` can set system properties via -D when executing compiled script via -jar envtest.jar */
+  @Test def saveAndRunWithDProperty =
+    val commandline = Seq("SCALA_OPTS= ", scalaPath.relpath, "-save", envtestScala.relpath).mkString(" ")
+    val (_, _, _, _) = bashCommand(commandline) // compile jar, discard output
+    val testJar = testFile("envtest.jar") // jar is created by the previous bashCommand()
+    if (testJar.isFile){
+      printf("compiled envtest.scala to %s\n", testJar.norm)
+    } else {
+      sys.error(s"error: unable to compile envtest.scala to ${testJar.norm}")
+    }
+
+    val tag = "World5"
+    val commandline2 = Seq("SCALA_OPTS= ", scalaPath.relpath, s"-Dkey=$tag", testJar.relpath)
+    printf("cmd[%s]\n", commandline2.mkString(" "))
+    val (validTest, exitCode, stdout, stderr) = bashCommand(commandline2.mkString(" "))
+    assertEquals(s"Hello $tag", stdout.mkString("/n"))
+
+  /* verify `dist/bin/scalac` non-interference with command line args following script name */
+  @Test def verifyScalacArgs =
+    val commandline = (Seq("SCALA_OPTS= ", scalacPath, "-script", showArgsScript) ++ testScriptArgs).mkString(" ")
+    val (validTest, exitCode, stdout, stderr) = bashCommand(commandline)
+    if verifyValid(validTest) then
+      var fail = false
+      printf("\n")
+      for (line, expect) <- stdout zip expectedOutput do
+        printf("expected: %-17s\nactual  : %s\n", expect, line)
+        if line != expect then
+          fail = true
+
+      if fail then
+        assert(stdout == expectedOutput)
+
+  /* verify `dist/bin/scala` non-interference with command line args following script name */
+  @Test def verifyScalaArgs =
+    val commandline = (Seq("SCALA_OPTS= ", scalaPath, showArgsScript) ++ testScriptArgs).mkString(" ")
+    val (validTest, exitCode, stdout, stderr) = bashCommand(commandline)
+    if verifyValid(validTest) then
+      var fail = false
+      printf("\n")
+      for (line, expect) <- stdout zip expectedOutput do
+        printf("expected: %-17s\nactual  : %s\n", expect, line)
+        if line != expect then
+          fail = true
+
+      if fail then
+        assert(stdout == expectedOutput)
+
+  /*
+   * verify that scriptPath.sc sees a valid script.path property,
+   * and that it's value is the path to "scriptPath.sc".
+   */
+  @Test def verifyScriptPathProperty =
+    val scriptFile = testFiles.find(_.getName == "scriptPath.sc").get
+    val expected = s"${scriptFile.getName}"
+    printf("===> verify valid system property script.path is reported by script [%s]\n", scriptFile.getName)
+    printf("calling scriptFile: %s\n", scriptFile)
+    val (validTest, exitCode, stdout, stderr) = bashCommand(scriptFile.absPath)
+    if verifyValid(validTest) then
+      stdout.foreach { printf("stdout: [%s]\n", _) }
+      stderr.foreach { printf("stderr: [%s]\n", _) }
+      val valid = stdout.exists { _.endsWith(expected) }
+      if valid then printf("# valid script.path reported by [%s]\n", scriptFile.getName)
+      assert(valid, s"script ${scriptFile.absPath} did not report valid script.path value")
+
+  /*
+   * verify SCALA_OPTS can specify an @argsfile when launching a scala script in `dist/bin/scala`.
+   */
+  @Test def verifyScalaOpts =
+    val scriptFile = testFiles.find(_.getName == "classpathReport.sc").get
+    printf("===> verify SCALA_OPTS='@argsfile' is properly handled by `dist/bin/scala`\n")
+    val envPairs = List(("SCALA_OPTS", s"@$argsfile"))
+    val (validTest, exitCode, stdout, stderr) = bashCommand(scriptFile.absPath, envPairs)
+    printf("stdout: %s\n", stdout.mkString("\n","\n",""))
+    if verifyValid(validTest) then
+      val expected = s"${workingDirectory.norm}"
+      // stdout might be polluted with an ANSI color prefix, so be careful
+      val cwdline = stdout.find( _.trim.matches(".*cwd: .*") ).getOrElse("")
+      printf("cwdline  [%s]\n", cwdline)
+      printf("expected[%s]\n", expected)
+      val valid = cwdline.endsWith(expected)
+      if (!valid) then
+        stdout.foreach { printf("stdout[%s]\n", _) }
+        stderr.foreach { printf("stderr[%s]\n", _) }
+      if valid then printf(s"\n===> success: classpath begins with %s, as reported by [%s]\n", workingDirectory, scriptFile.getName)
+      assert(valid, s"script ${scriptFile.absPath} did not report valid java.class.path first entry")
+
+  /*
+   * verify that individual scripts can override -save with -nosave (needed to address #13760).
+   */
+  @Test def sqlDateTest =
+    val scriptBase = "sqlDateError"
+    val scriptFile = testFiles.find(_.getName == s"$scriptBase.sc").get
+    val testJar = testFile(s"$scriptBase.jar") // jar should not be created when scriptFile runs
+    val tj = Paths.get(testJar).toFile
+    if tj.isFile then tj.delete() // discard residual debris from previous test
+    printf("===> verify '-save' is cancelled by '-nosave' in script hashbang.`\n")
+    val (validTest, exitCode, stdout, stderr) = bashCommand(s"SCALA_OPTS=-save ${scriptFile.absPath}")
+    printf("stdout: %s\n", stdout.mkString("\n","\n",""))
+    if verifyValid(validTest) then
+      // the script should print '1969-12-31' or '1970-01-01', depending on time zone
+      // stdout can be polluted with an ANSI color prefix, in some test environments
+      val valid = stdout.mkString("").matches(""".*\d{4}-\d{2}-\d{2}.*""")
+      if (!valid) then
+        stdout.foreach { printf("stdout[%s]\n", _) }
+        stderr.foreach { printf("stderr[%s]\n", _) }
+      if valid then printf(s"\n===> success: scripts can override -save via -nosave\n")
+      assert(valid, s"script ${scriptFile.absPath} reported unexpected value for java.sql.Date ${stdout.mkString("\n")}")
+      assert(!testJar.exists,s"unexpected, jar file [$testJar] was created")
+
+
+  /*
+   * verify -e println("yo!") works.
+   */
+  @Test def verifyCommandLineExpression =
+    printf("===> verify -e  is properly handled by `dist/bin/scala`\n")
+    val expected = "9"
+    val expression = s"println(3*3)"
+    val cmd = s"bin/scala -e $expression"
+    val (validTest, exitCode, stdout, stderr) = bashCommand(s"""bin/scala -e '$expression'""")
+    val result = stdout.filter(_.nonEmpty).mkString("")
+    printf("stdout: %s\n", result)
+    printf("stderr: %s\n", stderr.mkString("\n","\n",""))
+    if verifyValid(validTest) then
+      assert(result.contains(expected), s"expression [$expression] did not send [$expected] to stdout")
diff --git a/compiler/test/dotty/tools/scripting/ClasspathTests.scala b/compiler/test/dotty/tools/scripting/ClasspathTests.scala
new file mode 100755
index 000000000000..767c30e60ad6
--- /dev/null
+++ b/compiler/test/dotty/tools/scripting/ClasspathTests.scala
@@ -0,0 +1,102 @@
+package dotty
+package tools
+package scripting
+
+import java.io.File
+import java.nio.file.Path
+
+import org.junit.{Test, Ignore, AfterClass}
+import vulpix.TestConfiguration
+import ScriptTestEnv.*
+
+/** Test java command line generated by bin/scala and bin/scalac */
+
+class ClasspathTests:
+  /*
+   * Test disabled (temporarily).
+   * verify classpath reported by called script.
+   * This need to be reconceptualized.
+   * System property "java.class.path" does not necessarily contain the actual runtime path,
+   * So this test can fail even when the classpath is correct.
+   */
+  @Ignore
+  @Test def hashbangClasspathVerifyTest = {
+    // only interested in classpath test scripts
+    val testScriptName = "classpathReport.sc"
+    val testScript = scripts("/scripting").find { _.getName.matches(testScriptName) } match
+      case None => sys.error(s"test script not found: ${testScriptName}")
+      case Some(file) => file
+
+    val relpath = testScript.toPath.relpath.norm
+    printf("===> hashbangClasspathVerifyTest for script [%s]\n", relpath)
+    printf("bash is [%s]\n", bashExe)
+
+    if packBinScalaExists then
+      val bashCmdline = s"SCALA_OPTS= $relpath"
+      val cmd = Array(bashExe, "-c", bashCmdline)
+
+      cmd.foreach { printf("[%s]\n", _) }
+
+      // classpathReport.sc is expected to produce two lines:
+      // cwd: 
+      // classpath: 
+      
+      val scriptOutput: Seq[String] = exec(cmd:_*)
+      val scriptCwd: String = findTaggedLine("cwd", scriptOutput) // the value tagged "cwd: "
+      printf("script ran in directory [%s]\n", scriptCwd)
+      val scriptCp = findTaggedLine("classpath", scriptOutput) // the value tagged "classpath: "
+
+      // convert scriptCp to a list of files
+      val hashbangJars: List[File] = scriptCp.split(psep).map { _.toFile }.toList
+      val hashbangClasspathJars = hashbangJars.map { _.name }.sorted.distinct // get jar basenames, remove duplicates
+      val packlibDir = s"$scriptCwd/$packLibDir" // classpathReport.sc specifies a wildcard classpath in this directory 
+      val packlibJars: List[File] = listJars(packlibDir) // classpath entries expected to have been reported by the script
+
+      printf("%d jar files in dist/target/pack/lib\n", packlibJars.size)
+      printf("%d test script jars in classpath\n", hashbangClasspathJars.size)
+
+      val (diff: Set[File], msg: String) = if (packlibJars.size > hashbangClasspathJars.size) {
+        (packlibJars.toSet -- hashbangJars.toSet , "only in packlib classpath")
+      } else {
+        (hashbangJars.toSet -- packlibJars.toSet , "only in hashbang classpath")
+      }
+      // verify that the script hasbang classpath setting was effective at supplementing the classpath
+      // (a minimal subset of jars below dist/target/pack/lib are always be in the classpath)
+      val missingClasspathEntries = if hashbangClasspathJars.size != packlibJars.size then
+        printf("packlib dir [%s]\n", packlibDir)
+        printf("hashbangClasspathJars: %s\n", hashbangJars.map { _.relpath.norm }.mkString("\n ", "\n ", ""))
+        printf("# %s\n", msg)
+        diff.foreach { (f: File) => printf(" %s\n", f.relpath.norm) }
+      else
+        Set.empty[String]
+
+      assert(hashbangClasspathJars.size == packlibJars.size)
+  }
+  /*
+   * verify classpath is unglobbed by MainGenericRunner.
+   */
+  @Test def unglobClasspathVerifyTest = {
+    val testScriptName = "unglobClasspath.sc"
+    val testScript = scripts("/scripting").find { _.name.matches(testScriptName) } match
+      case None => sys.error(s"test script not found: ${testScriptName}")
+      case Some(file) => file
+
+    val relpath = testScript.toPath.relpath.norm
+    printf("===> unglobClasspathVerifyTest for script [%s]\n", relpath)
+    printf("bash is [%s]\n", bashExe)
+
+    if packBinScalaExists then
+      val bashCmdline = s"set +x ; SCALA_OPTS= $relpath"
+      val cmd = Array(bashExe, "-c", bashCmdline)
+
+      cmd.foreach { printf("[%s]\n", _) }
+
+      // test script reports the classpath it sees
+      val scriptOutput = exec(cmd:_*)
+      val scriptCp = findTaggedLine("unglobbed classpath", scriptOutput)
+      printf("%s\n", scriptCp)
+      val classpathJars = scriptCp.split(psep).map { _.getName }.sorted.distinct
+      //classpathJars.foreach { printf("%s\n", _) }
+      assert(classpathJars.size > 1)
+  }
+
diff --git a/compiler/test/dotty/tools/scripting/ExpressionTest.scala b/compiler/test/dotty/tools/scripting/ExpressionTest.scala
new file mode 100755
index 000000000000..136007af835d
--- /dev/null
+++ b/compiler/test/dotty/tools/scripting/ExpressionTest.scala
@@ -0,0 +1,56 @@
+package dotty
+package tools
+package scripting
+
+import java.nio.file.Paths
+import org.junit.{Test, AfterClass}
+import org.junit.Assert.assertEquals
+
+import vulpix.TestConfiguration
+
+import ScriptTestEnv.*
+
+/** 
+ *   +. test scala -e 
+ */
+class ExpressionTest:
+  /*
+   * verify -e  works.
+   */
+  @Test def verifyCommandLineExpression =
+    printf("===> verify -e  is properly handled by `dist/bin/scala`\n")
+    val expected = "9"
+    val expression = s"println(3*3)"
+    val result = getResult(expression)
+    assert(result.contains(expected), s"expression [$expression] did not send [$expected] to stdout")
+
+  @Test def verifyImports: Unit =
+    val expressionLines = List(
+      "import java.nio.file.Paths",
+      "import scala.util.Properties.userDir",
+      "println(Paths.get(userDir).toFile.listFiles.toList.filter(_.isDirectory).size)",
+    )
+    val expression = expressionLines.mkString(";")
+    val success = testExpression(expression){ result =>
+      result.matches("[0-9]+") && result.toInt > 0
+    }
+    assert(success)
+
+  def getResult(expression: String): String =
+    val (_, _, stdout, stderr) = bashCommand(s"$scalaPath -e '$expression'")
+    printf("stdout: %s\n", stdout.mkString("|"))
+    printf("stderr: %s\n", stderr.mkString("\n", "\n", ""))
+    stdout.filter(_.nonEmpty).mkString("")
+    
+  def testExpression(expression: String)(check: (result: String) => Boolean): Boolean =
+    val result = getResult(expression)
+    check(result)
+
+object ExpressionTest:
+
+  def main(args: Array[String]): Unit =
+    val tests = new ExpressionTest
+    println("\n=== verifyCommandLineExpression ===")
+    tests.verifyCommandLineExpression
+    println("\n=== verifyImports ===")
+    tests.verifyImports
diff --git a/compiler/test/dotty/tools/scripting/ScriptTestEnv.scala b/compiler/test/dotty/tools/scripting/ScriptTestEnv.scala
new file mode 100644
index 000000000000..15bcf4034a5f
--- /dev/null
+++ b/compiler/test/dotty/tools/scripting/ScriptTestEnv.scala
@@ -0,0 +1,292 @@
+package dotty
+package tools
+package scripting
+
+import java.io.File
+import java.nio.file.{Path, Paths, Files}
+
+import dotty.tools.dotc.config.Properties.*
+
+import scala.sys.process.*
+import scala.jdk.CollectionConverters.*
+
+/**
+ * Common Code for supporting scripting tests.
+ * To override the path to the bash executable, set TEST_BASH=
+ * To specify where `dist/target/pack/bin` resides, set TEST_CWD=
+ * Test scripts run in a bash env, so paths are converted to forward slash via .norm.
+ */
+object ScriptTestEnv {
+  def osname: String = sys.props("os.name").toLowerCase
+  def psep: String = sys.props("path.separator")
+  def userDir: String = sys.props("user.dir").norm
+  def testCwd = envOrElse("TEST_CWD", "").norm // optional working directory TEST_CWD
+  def verbose = envOrElse("VERBOSE", "").nonEmpty
+
+  def whichJava: String = whichExe("java")
+  def whichBash: String = whichExe("bash")
+
+  lazy val workingDirectory: String = {
+    val dirstr = if testCwd.nonEmpty then
+      if verbose then printf("TEST_CWD set to [%s]\n", testCwd)
+      testCwd
+    else 
+      userDir // userDir, if TEST_CWD not set
+
+    // issue warning if things don't look right
+    val test = Paths.get(s"$dirstr/dist/target/pack/bin").normalize
+    if !test.isDirectory then
+      printf("warning: not found below working directory: %s\n", test.norm)
+
+    if verbose then printf("working directory is [%s]\n", dirstr)
+    dirstr
+  }
+
+  def envPath: String = envOrElse("PATH", "")
+  // remove duplicate entries in path
+  def supplementedPath: String = s"dist/target/pack/bin$psep$envJavaHome/bin$psep$envScalaHome/bin$psep$envPath".norm
+  def adjustedPathEntries: List[String] = supplementedPath.norm.split(psep).toList.distinct
+  def adjustedPath: String = adjustedPathEntries.mkString(psep)
+  def envPathEntries: List[String] = envPath.split(psep).toList.distinct
+
+  def bashExe: String = envOrElse("TEST_BASH", whichBash)
+
+  def unameExe = which("uname")
+  def ostypeFull = if unameExe.nonEmpty then exec(unameExe).mkString else ""
+  def ostype = ostypeFull.toLowerCase.takeWhile{ cc => cc >= 'a' && cc <='z' || cc >= 'A' && cc <= 'Z' }
+
+  def cygwin = ostype == "cygwin"
+  def mingw = ostype == "mingw"
+  def msys = ostype == "msys"
+  def winshell: Boolean = cygwin || mingw || msys
+
+  def which(str: String) =
+    var out = ""
+    // must not use adjusted path here! (causes recursive call / stack overflow)
+    envPathEntries.find { entry =>
+      val it = Paths.get(entry).toAbsolutePath.normalize
+      it.toFile.isDirectory && {
+        var testpath = s"$it/$str".norm
+        val test = Paths.get(testpath)
+        if test.toFile.exists then
+          out = testpath
+          true
+        else
+          val test = Paths.get(s"$it/$str.exe".norm)
+          if test.toFile.exists then
+            out = testpath
+            true
+          else
+            false
+        }
+      }
+    out
+
+  def whichExe(basename: String): String = 
+    val exeName = if (osname.toLowerCase.startsWith("windows")) s"$basename.exe" else basename
+    which(exeName)
+
+  /* returned values are:
+   * validTest: Boolean   - false if permissions problems occur, true otherwise
+   * exitVal:   Int       - the conventional return error code, where zero implies "no errors".
+   * stdout: Seq[String]  - the lines captured from STDOUT
+   * stderr: Seq[String]  - the lines captured from STDERR
+   */
+  def bashCommand(cmdstr: String, additionalEnvPairs: List[(String, String)] = Nil): (Boolean, Int, Seq[String], Seq[String]) = {
+    var (stdout, stderr) = (List.empty[String], List.empty[String])
+    if bashExe.toFile.exists then
+      def q = "\""
+      printf("bashCmd: %s -c %s\n", bashExe, s"$q$cmdstr$q")
+      val cmd = Seq(bashExe, "-c", cmdstr)
+      val envPairs = testEnvPairs ++ additionalEnvPairs
+      val proc = Process(cmd, None, envPairs *)
+      val exitVal = proc ! ProcessLogger (
+        (out: String) => stdout ::= out,
+        (err: String) => stderr ::= err
+      )
+      // a misconfigured environment (e.g., script is not executable) can prevent script execution
+      val validTest = !stderr.exists(_.contains("Permission denied"))
+      if ! validTest then
+        System.err.printf("\nunable to execute script, return value is %d\n", exitVal)
+        stderr.foreach { System.err.printf("stderr [%s]\n", _) }
+
+      (validTest, exitVal, stdout.reverse, stderr.reverse)
+    else
+      (false, -1, Nil, Nil)
+  }
+
+  def execCmd(command: String, options: String *): Seq[String] =
+    val cmd = (command :: options.toList).toSeq
+    for {
+      line <- Process(cmd).lazyLines_!
+    } yield line
+
+
+  def packBinDir = "dist/target/pack/bin"
+  def packLibDir = "dist/target/pack/lib"
+  def packBinScalaExists: Boolean = Files.exists(Paths.get(s"$packBinDir/scala"))
+
+  def listJars(dir: String): List[File] =
+    val packlibDir = Paths.get(dir).toFile
+    if packlibDir.isDirectory then
+      packlibDir.listFiles.toList.filter { _.getName.endsWith(".jar") }
+    else
+      Nil
+
+  // script output expected as ": "
+  def findTaggedLine(tag: String, lines: Seq[String]): String =
+    lines.map { stripColors(_) }.find { _.startsWith(tag) } match
+      case None =>
+        lines.foreach { System.err.printf("line[%s]\n", _) }
+        sys.error(s"no $tag: found in script output")
+      case Some(cwd) => cwd.dropWhile( _ != ' ').trim // discard tag
+
+  def stripColors(line:String): String =
+    // ESC has be seen in the wild replaced by "\u2190"
+    // Also, BOM marker appears as 
+    lazy val colorsRegex = "(\u001b|\u2190)\\[[0-9;]*m|".r
+    colorsRegex.replaceAllIn(line,"")
+
+  def exec(cmd: String *): Seq[String] = Process(cmd).lazyLines_!.toList
+
+  def script2jar(scriptFile: File) = 
+    val jarName = s"${scriptFile.getName.dropExtension}.jar"
+    File(scriptFile.getParent, jarName)
+
+  def showScriptUnderTest(scriptFile: File): Unit =
+    printf("===> test script name [%s]\n", scriptFile.getName)
+
+  def callExecutableJar(script: File, jar: File, scriptArgs: Array[String] = Array.empty[String]) = {
+    import scala.sys.process._
+    val cmd = Array("java", s"-Dscript.path=${script.getName}", "-jar", jar.absPath)
+      ++ scriptArgs
+    Process(cmd).lazyLines_!.foreach { println }
+  }
+
+  ////////////////////////////////////////////////////////////////////////////////
+
+  def createArgsFile(): String =
+    val utfCharset = java.nio.charset.StandardCharsets.UTF_8.name
+    val path = Files.createTempFile("scriptingTest", ".args")
+    val text = s"-classpath ${workingDirectory.absPath}"
+    Files.write(path, text.getBytes(utfCharset))
+    path.toFile.getAbsolutePath.norm
+
+  def fixHome(s: String): String =
+    s.startsWith("~") match {
+      case false => s
+      case true => s.replaceFirst("~", userHome)
+    }
+
+  extension(s: String) {
+    def norm: String = s.replace('\\', '/') // bash expects forward slash
+    def noDrive = if s.secondChar == ":" then s.drop(2).norm else s.norm
+    def toPath: Path = Paths.get(fixHome(s)) // .toAbsolutePath
+    def toFile: File = File(s)
+    def absPath: String = s.toFile.absPath
+    def relpath: String = s.norm.replaceFirst(s"${cwd.norm}/","")
+    def isFile: Boolean = s.toFile.isFile
+    def isDirectory: Boolean = s.toFile.isDirectory
+    def exists: Boolean = s.toFile.exists
+    def name: String = s.toFile.getName
+    def getName: String = s.toFile.getName
+    def dropExtension: String = s.reverse.dropWhile(_ != '.').drop(1).reverse
+    def parent(up: Int): String = s.norm.split("/").reverse.drop(up).reverse.mkString("/")
+    def secondChar: String = s.take(2).drop(1).mkString("")
+  }
+
+  extension(p: Path) {
+    def norm: String = p.normalize.toString.replace('\\', '/')
+    def noDrive = p.norm match {
+      case str if str.drop(1).take(1) == ":" => str.drop(2)
+      case str => str
+    }
+    def name: String = p.toFile.getName
+    def relpath: Path = cwd.relativize(p).normalize
+    def files: Seq[File] = p.toFile.files
+    def parent: String = norm.replaceAll("/[^/]*$", "")
+
+    // convert to absolute path relative to cwd.
+    def absPath: String = if (p.isAbsolute) p.norm else Paths.get(userDir, p.norm).norm
+
+    def isDir: Boolean = Files.isDirectory(p)
+    def isDirectory: Boolean = p.toFile.isDirectory
+    def isFile: Boolean = p.toFile.isFile
+
+    def toUrl: String = Paths.get(absPath).toUri.toURL.toString
+
+    // Treat norm paths with a leading '/' as absolute (Windows java.io.File#isAbsolute treats them as relative)
+    def isAbsolute = p.norm.startsWith("/") || (isWin && p.norm.secondChar == ":")
+  }
+
+  extension(f: File) {
+    def name = f.getName
+    def norm: String = f.toPath.normalize.norm
+    def absPath: String = f.getAbsolutePath.norm
+    def relpath: Path = f.toPath.relpath
+    def files: Seq[File] = f.listFiles.toList
+    def parentDir: Path = f.toPath.getParent
+  }
+
+  lazy val cwd: Path = Paths.get(".").toAbsolutePath.normalize
+
+  lazy val (scalacPath: String, scalaPath: String) = {
+    val scalac = s"$workingDirectory/dist/target/pack/bin/scalac".toPath.normalize
+    val scala = s"$workingDirectory/dist/target/pack/bin/scala".toPath.normalize
+    (scalac.norm, scala.norm)
+  }
+    
+
+  // use optional TEST_BASH if defined, otherwise, bash must be in PATH
+
+  // envScalaHome is:
+  //    dist/target/pack, if present
+  //    else, SCALA_HOME if defined
+  //    else, not defined
+  lazy val envScalaHome =
+    printf("scalacPath: %s\n", scalacPath.norm)
+    if scalacPath.isFile then scalacPath.replaceAll("/bin/scalac", "")
+    else envOrElse("SCALA_HOME", "not-found").norm
+
+  lazy val javaParent: String = whichJava.parent(2)
+  lazy val envJavaHome: String = envOrElse("JAVA_HOME", javaParent)
+  lazy val cyghome = envOrElse("CYGWIN", "")
+  lazy val msyshome = envOrElse("MSYS", "")
+
+  // remove xtrace, if present, add :igncr: if not present
+  lazy val shellopts: String = {
+    val value: String = envOrElse("SHELLOPTS", "braceexpand:hashall:igncr:ignoreeof:monitor:vi")
+    val list: List[String] = value.split(":").toList
+    val minlist = list.filter {
+      case "igncr" | "xtrace" => false
+      case _ => true
+    }
+    if isWin then
+      "igncr" :: minlist
+    else
+      minlist
+  }.mkString(":")
+
+  lazy val testEnvPairs = {
+    val pairs = List(
+      ("JAVA_HOME", envJavaHome),
+      ("SCALA_HOME", envScalaHome),
+      ("PATH", adjustedPath),
+      ("CYGWIN", cyghome),
+      ("MSYS", msyshome),
+      ("SHELLOPTS", shellopts),
+    ).filter { case (name, valu) => valu.nonEmpty }
+    if verbose then for (k, v) <- pairs do printf("%s : %s\n", k ,v)
+    pairs
+  }
+
+  // if unable to execute bash commands, this prevents invalid tests from failing
+  lazy val passInvalidTests = envOrElse("PASS_INVALID_TESTS", "").nonEmpty
+
+  def verifyValid(validTest: Boolean): Boolean =
+      // !validTest implies unable to execute scripts via bash (e.g., permissions, or bash not found, etc.)
+    if !validTest && !passInvalidTests then
+      assert(validTest == true, s"unable to call script via bash -c")
+
+    validTest
+}
diff --git a/compiler/test/dotty/tools/scripting/ScriptingTests.scala b/compiler/test/dotty/tools/scripting/ScriptingTests.scala
index e7399c68f09a..7ddab2b55424 100644
--- a/compiler/test/dotty/tools/scripting/ScriptingTests.scala
+++ b/compiler/test/dotty/tools/scripting/ScriptingTests.scala
@@ -3,57 +3,23 @@ package tools
 package scripting
 
 import java.io.File
+import java.nio.file.Path
 
 import org.junit.Test
 
 import vulpix.TestConfiguration
-
+import ScriptTestEnv.*
 
 /** Runs all tests contained in `compiler/test-resources/scripting/` */
 class ScriptingTests:
-  extension (str: String) def dropExtension =
-    str.reverse.dropWhile(_ != '.').drop(1).reverse
-
-  extension(f: File) def absPath =
-    f.getAbsolutePath.replace('\\','/')
-
-  def testFiles = scripts("/scripting")
-
-  def script2jar(scriptFile: File) = 
-    val jarName = s"${scriptFile.getName.dropExtension}.jar"
-    File(scriptFile.getParent,jarName)
-
-  def showScriptUnderTest(scriptFile: File): Unit =
-    printf("===> test script name [%s]\n",scriptFile.getName)
-
-  val argss: Map[String, Array[String]] = (
-    for
-      argFile <- testFiles
-      if argFile.getName.endsWith(".args")
-      name = argFile.getName.dropExtension
-      scriptArgs = readLines(argFile).toArray
-    yield name -> scriptArgs).toMap
-
-  def scalaFilesWithArgs(extension: String) = (
-    for
-      scriptFile <- testFiles
-      if scriptFile.getName.endsWith(extension)
-      name = scriptFile.getName.dropExtension
-      scriptArgs = argss.getOrElse(name, Array.empty[String])
-    yield scriptFile -> scriptArgs).toList.sortBy { (file,args) => file.getName }
-
-  def callExecutableJar(script: File,jar: File, scriptArgs: Array[String] = Array.empty[String]) = {
-    import scala.sys.process._
-    val cmd = Array("java",s"-Dscript.path=${script.getName}","-jar",jar.absPath)
-      ++ scriptArgs
-    Process(cmd).lazyLines_!.foreach { println }
-  }
+  // classpath tests managed by scripting.ClasspathTests.scala
+  def testFiles = scripts("/scripting").filter { ! _.getName.toLowerCase.contains("classpath") }
 
   /*
    * Call .scala scripts without -save option, verify no jar created
    */
   @Test def scriptingDriverTests =
-    for (scriptFile,scriptArgs) <- scalaFilesWithArgs(".scala") do
+    for (scriptFile, scriptArgs) <- scalaFilesWithArgs(".scala") do
       showScriptUnderTest(scriptFile)
       val unexpectedJar = script2jar(scriptFile)
       unexpectedJar.delete
@@ -65,17 +31,17 @@ class ScriptingTests:
         ),
         scriptFile = scriptFile,
         scriptArgs = scriptArgs
-      ).compileAndRun { (path:java.nio.file.Path,classpath:String, mainClass:String) =>
-        printf("mainClass from ScriptingDriver: %s\n",mainClass)
+      ).compileAndRun { (path:java.nio.file.Path, classpathEntries:Seq[Path], mainClass:String) =>
+        printf("mainClass from ScriptingDriver: %s\n", mainClass)
         true // call compiled script main method
       }
-      assert(! unexpectedJar.exists, s"not expecting jar file: ${unexpectedJar.absPath}")
+      assert( !unexpectedJar.exists, s"not expecting jar file: ${unexpectedJar.absPath}" )
 
   /*
    * Call .sc scripts without -save option, verify no jar created
    */
   @Test def scriptingMainTests =
-    for (scriptFile,scriptArgs) <- scalaFilesWithArgs(".sc") do
+    for (scriptFile, scriptArgs) <- scalaFilesWithArgs(".sc") do
       showScriptUnderTest(scriptFile)
       val unexpectedJar = script2jar(scriptFile)
       unexpectedJar.delete
@@ -87,13 +53,13 @@ class ScriptingTests:
       ) ++ scriptArgs
 
       Main.main(mainArgs)
-      assert(! unexpectedJar.exists, s"not expecting jar file: ${unexpectedJar.absPath}")
+      assert( !unexpectedJar.exists, s"not expecting jar file: ${unexpectedJar.absPath}")
 
   /*
    * Call .sc scripts with -save option, verify jar is created.
    */
   @Test def scriptingJarTest =
-    for (scriptFile,scriptArgs) <- scalaFilesWithArgs(".sc") do
+    for (scriptFile, scriptArgs) <- scalaFilesWithArgs(".sc") do
       showScriptUnderTest(scriptFile)
       val expectedJar = script2jar(scriptFile)
       expectedJar.delete
@@ -107,7 +73,7 @@ class ScriptingTests:
 
       Main.main(mainArgs)
 
-      printf("===> test script jar name [%s]\n",expectedJar.getName)
+      printf("===> test script jar name [%s]\n", expectedJar.getName)
       assert(expectedJar.exists)
 
       callExecutableJar(scriptFile, expectedJar, scriptArgs)
@@ -123,13 +89,13 @@ class ScriptingTests:
     // verify main method not called when false is returned
     printf("testing script compile, with no call to script main method.\n")
     touchedFile.delete
-    assert(!touchedFile.exists, s"unable to delete ${touchedFile}")
+    assert( !touchedFile.exists, s"unable to delete ${touchedFile}" )
     ScriptingDriver(
       compilerArgs = Array("-classpath", TestConfiguration.basicClasspath),
       scriptFile = scriptFile,
       scriptArgs = Array.empty[String]
-    ).compileAndRun { (path:java.nio.file.Path,classpath:String, mainClass:String) =>
-      printf("success: no call to main method in mainClass: %s\n",mainClass)
+    ).compileAndRun { (path:java.nio.file.Path, classpathEntries:Seq[Path], mainClass:String) =>
+      printf("success: no call to main method in mainClass: %s\n", mainClass)
       false // no call to compiled script main method
     }
     touchedFile.delete
@@ -141,14 +107,14 @@ class ScriptingTests:
       compilerArgs = Array("-classpath", TestConfiguration.basicClasspath),
       scriptFile = scriptFile,
       scriptArgs = Array.empty[String]
-    ).compileAndRun { (path:java.nio.file.Path,classpath:String, mainClass:String) =>
-      printf("call main method in mainClass: %s\n",mainClass)
+    ).compileAndRun { (path:java.nio.file.Path, classpathEntries:Seq[Path], mainClass:String) =>
+      printf("call main method in mainClass: %s\n", mainClass)
       true // call compiled script main method, create touchedFile
     }
 
     if touchedFile.exists then
-      printf("success: script created file %s\n",touchedFile)
-    if touchedFile.exists then printf("success: created file %s\n",touchedFile)
+      printf("success: script created file %s\n", touchedFile)
+    if touchedFile.exists then printf("success: created file %s\n", touchedFile)
     assert( touchedFile.exists, s"expected to find file ${touchedFile}" )
    
   /*
@@ -168,16 +134,48 @@ class ScriptingTests:
 
     expectedJar.delete
     Main.main(mainArgs) // create executable jar
-    printf("===> test script jar name [%s]\n",expectedJar.getName)
-    assert(expectedJar.exists,s"unable to create executable jar [$expectedJar]")
+    printf("===> test script jar name [%s]\n", expectedJar.getName)
+    assert(expectedJar.exists, s"unable to create executable jar [$expectedJar]")
 
     touchedFile.delete
-    assert(!touchedFile.exists,s"unable to delete ${touchedFile}")
-    printf("calling executable jar %s\n",expectedJar)
+    assert( !touchedFile.exists, s"unable to delete ${touchedFile}" )
+    printf("calling executable jar %s\n", expectedJar)
     callExecutableJar(scriptFile, expectedJar)
     if touchedFile.exists then
-      printf("success: executable jar created file %s\n",touchedFile)
+      printf("success: executable jar created file %s\n", touchedFile)
     assert( touchedFile.exists, s"expected to find file ${touchedFile}" )
 
+///////////////////////////////////
   def touchFileScript = testFiles.find(_.getName == "touchFile.sc").get
+
   def touchedFile = File("touchedFile.out")
+
+  def script2jar(scriptFile: File) = 
+    val jarName = s"${scriptFile.getName.dropExtension}.jar"
+    File(scriptFile.getParent, jarName)
+
+  def showScriptUnderTest(scriptFile: File): Unit =
+    printf("===> test script name [%s]\n", scriptFile.getName)
+
+  def argss: Map[String, Array[String]] = (
+    for
+      argFile <- testFiles
+      if argFile.getName.endsWith(".args")
+      name = argFile.getName.dropExtension
+      scriptArgs = readLines(argFile).toArray
+    yield name -> scriptArgs).toMap
+
+  def scalaFilesWithArgs(extension: String) = (
+    for
+      scriptFile <- testFiles
+      if scriptFile.getName.endsWith(extension)
+      name = scriptFile.getName.dropExtension
+      scriptArgs = argss.getOrElse(name, Array.empty[String])
+    yield scriptFile -> scriptArgs).toList.sortBy { (file, args) => file.getName }
+
+  def callExecutableJar(script: File, jar: File, scriptArgs: Array[String] = Array.empty[String]) = {
+    import scala.sys.process._
+    val cmd = Array("java", s"-Dscript.path=${script.getName}", "-jar", jar.absPath)
+      ++ scriptArgs
+    Process(cmd).lazyLines_!.foreach { println }
+  }
diff --git a/compiler/test/dotty/tools/utils.scala b/compiler/test/dotty/tools/utils.scala
index b7023a577234..388715f45cc2 100644
--- a/compiler/test/dotty/tools/utils.scala
+++ b/compiler/test/dotty/tools/utils.scala
@@ -1,19 +1,69 @@
-package dotty.tools
+package dotty
+package tools
 
 import java.io.File
+import java.nio.charset.Charset
 import java.nio.charset.StandardCharsets.UTF_8
+import java.nio.file.{Files, Path => JPath}
 
 import scala.io.Source
+import scala.jdk.StreamConverters._
+import scala.reflect.ClassTag
 import scala.util.Using.resource
+import scala.util.chaining.given
+import scala.util.control.{ControlThrowable, NonFatal}
 
 def scripts(path: String): Array[File] = {
   val dir = new File(getClass.getResource(path).getPath)
   assert(dir.exists && dir.isDirectory, "Couldn't load scripts dir")
-  dir.listFiles
+  dir.listFiles.filter { f =>
+    val path = if f.isDirectory then f.getPath + "/" else f.getPath
+    Properties.testsFilter.isEmpty || Properties.testsFilter.exists(path.contains)
+  }
 }
 
-private def withFile[T](file: File)(action: Source => T): T =
-  resource(Source.fromFile(file, UTF_8.name))(action)
+extension (f: File) def absPath =
+  f.getAbsolutePath.replace('\\', '/')
 
-def readLines(f: File): List[String] = withFile(f)(_.getLines.toList)
-def readFile(f: File): String = withFile(f)(_.mkString)
+extension (str: String) def dropExtension =
+  str.reverse.dropWhile(_ != '.').drop(1).reverse
+
+private
+def withFile[T](file: File)(action: Source => T): T = resource(Source.fromFile(file, UTF_8.name))(action)
+def readLines(f: File): List[String]                = withFile(f)(_.getLines.toList)
+def readFile(f: File): String                       = withFile(f)(_.mkString)
+
+private object Unthrown extends ControlThrowable
+
+def assertThrows[T <: Throwable: ClassTag](p: T => Boolean)(body: => Any): Unit =
+  try
+    body
+    throw Unthrown
+  catch
+    case Unthrown        => throw AssertionError("Expression did not throw!")
+    case e: T if p(e)    => ()
+    case failed: T       => throw AssertionError(s"Exception failed check: $failed").tap(_.addSuppressed(failed))
+    case NonFatal(other) => throw AssertionError(s"Wrong exception: expected ${implicitly[ClassTag[T]]} but was ${other.getClass.getName}").tap(_.addSuppressed(other))
+end assertThrows
+
+def toolArgsFor(files: List[JPath], charset: Charset = UTF_8): List[String] =
+  files.flatMap(path => toolArgsParse(resource(Files.lines(path, charset))(_.limit(10).toScala(List))))
+
+// Inspect the first 10 of the given lines for compiler options of the form
+// `// scalac: args`, `/* scalac: args`, ` * scalac: args`.
+// If args string ends in close comment, drop the `*` `/`.
+// If split, parse the args string as a command line.
+// (from scala.tools.partest.nest.Runner#toolArgsFor)
+def toolArgsParse(lines: List[String]): List[String] = {
+  val tag  = "scalac:"
+  val endc = "*" + "/"    // be forgiving of /* scalac: ... */
+  def stripped(s: String) = s.substring(s.indexOf(tag) + tag.length).stripSuffix(endc)
+  val args = lines.to(LazyList).take(10).filter { s =>
+       s.contains("//" + tag)
+    || s.contains("// " + tag)
+    || s.contains("/* " + tag)
+    || s.contains(" * " + tag)
+    // but avoid picking up comments like "% scalac ./a.scala" and "$ scalac a.scala"
+  }.map(stripped).headOption
+  args.map(dotc.config.CommandLineParser.tokenize).getOrElse(Nil)
+}
\ No newline at end of file
diff --git a/compiler/test/dotty/tools/vulpix/ChildJVMMain.java b/compiler/test/dotty/tools/vulpix/ChildJVMMain.java
index 65e40f3bbbac..a6873ead1968 100644
--- a/compiler/test/dotty/tools/vulpix/ChildJVMMain.java
+++ b/compiler/test/dotty/tools/vulpix/ChildJVMMain.java
@@ -14,7 +14,7 @@ public class ChildJVMMain {
 
     private static void runMain(String dir) throws Exception {
         Method meth = null;
-        Object[] args = new Object[]{ new String[]{ "jvm" } };
+        Object[] args = new Object[]{ new String[]{ } };
         try {
             String jcp = System.getProperty("java.class.path");
             String sep = File.pathSeparator;
diff --git a/compiler/test/dotty/tools/vulpix/FileDiff.scala b/compiler/test/dotty/tools/vulpix/FileDiff.scala
index af44dbf2076d..4cb68005ecb7 100644
--- a/compiler/test/dotty/tools/vulpix/FileDiff.scala
+++ b/compiler/test/dotty/tools/vulpix/FileDiff.scala
@@ -63,4 +63,24 @@ object FileDiff {
     }
   }
 
+  def checkAndDumpOrUpdate(sourceTitle: String, actualLines: Seq[String], checkFilePath: String): Boolean = {
+    val outFilePath = checkFilePath + ".out"
+    FileDiff.check(sourceTitle, actualLines, checkFilePath) match {
+      case Some(msg) if dotty.Properties.testsUpdateCheckfile =>
+        Files.deleteIfExists(Paths.get(outFilePath))
+        if actualLines.isEmpty
+          then Files.deleteIfExists(Paths.get(checkFilePath))
+          else FileDiff.dump(checkFilePath, actualLines)
+        println("Updated checkfile: " + checkFilePath)
+        true
+      case Some(msg) =>
+        FileDiff.dump(outFilePath, actualLines)
+        println(msg)
+        println(FileDiff.diffMessage(checkFilePath, outFilePath))
+        false
+      case _ =>
+        Files.deleteIfExists(Paths.get(outFilePath))
+        true
+    }
+  }
 }
diff --git a/compiler/test/dotty/tools/vulpix/ParallelTesting.scala b/compiler/test/dotty/tools/vulpix/ParallelTesting.scala
index 5855af9f5b04..bd30e7fff98e 100644
--- a/compiler/test/dotty/tools/vulpix/ParallelTesting.scala
+++ b/compiler/test/dotty/tools/vulpix/ParallelTesting.scala
@@ -2,31 +2,33 @@ package dotty
 package tools
 package vulpix
 
-import java.io.{File => JFile, IOException}
+import java.io.{File => JFile, IOException, PrintStream, ByteArrayOutputStream}
 import java.lang.System.{lineSeparator => EOL}
+import java.net.URL
 import java.nio.file.StandardCopyOption.REPLACE_EXISTING
 import java.nio.file.{Files, NoSuchFileException, Path, Paths}
-import java.nio.charset.StandardCharsets
+import java.nio.charset.{Charset, StandardCharsets}
 import java.text.SimpleDateFormat
 import java.util.{HashMap, Timer, TimerTask}
 import java.util.concurrent.{TimeUnit, TimeoutException, Executors => JExecutors}
 
 import scala.collection.mutable
-import scala.io.Source
+import scala.io.{Codec, Source}
+import scala.jdk.CollectionConverters.*
 import scala.util.{Random, Try, Failure => TryFailure, Success => TrySuccess, Using}
 import scala.util.control.NonFatal
 import scala.util.matching.Regex
 import scala.collection.mutable.ListBuffer
 
 import dotc.{Compiler, Driver}
-import dotc.core.Contexts._
+import dotc.core.Contexts.*
 import dotc.decompiler
 import dotc.report
 import dotc.interfaces.Diagnostic.ERROR
 import dotc.reporting.{Reporter, TestReporter}
 import dotc.reporting.Diagnostic
 import dotc.config.Config
-import dotc.util.DiffUtil
+import dotc.util.{DiffUtil, SourceFile, SourcePosition, Spans, NoSourcePosition}
 import io.AbstractFile
 import dotty.tools.vulpix.TestConfiguration.defaultOptions
 
@@ -44,11 +46,11 @@ trait ParallelTesting extends RunnerOrchestration { self =>
    */
   def isInteractive: Boolean
 
-  /** A string which is used to filter which tests to run, if `None` will run
-   *  all tests. All absolute paths that contain the substring `testFilter`
+  /** A list of strings which is used to filter which tests to run, if `Nil` will run
+   *  all tests. All absolute paths that contain any of the substrings in `testFilter`
    *  will be run
    */
-  def testFilter: Option[String]
+  def testFilter: List[String]
 
   /** Tests should override the checkfiles with the current output */
   def updateCheckFiles: Boolean
@@ -127,10 +129,10 @@ trait ParallelTesting extends RunnerOrchestration { self =>
           }
           sb.toString + "\n\n"
         }
-        case self: SeparateCompilationSource => {
+        case self: SeparateCompilationSource => { // TODO: this won't work when using other versions of compiler
           val command = sb.toString
           val fsb = new StringBuilder(command)
-          self.compilationGroups.foreach { files =>
+          self.compilationGroups.foreach { (_, files) =>
             files.map(_.getPath).foreach { path =>
               fsb.append(delimiter)
               lineLen = 8
@@ -173,31 +175,28 @@ trait ParallelTesting extends RunnerOrchestration { self =>
     flags: TestFlags,
     outDir: JFile
   ) extends TestSource {
-
-    /** Get the files grouped by `_X` as a list of groups, files missing this
-     *  suffix will be put into the same group.
-     *  Files in each group are sorted alphabetically.
-     *
-     *  Filters out all none source files
-     */
-    def compilationGroups: List[Array[JFile]] =
-      dir
-      .listFiles
-      .groupBy { file =>
-        val name = file.getName
-        Try {
-          val potentialNumber = name
-            .substring(0, name.lastIndexOf('.'))
-            .reverse.takeWhile(_ != '_').reverse
-
-          potentialNumber.toInt.toString
-        }
-        .toOption
-        .getOrElse("")
-      }
-      .toList.sortBy(_._1).map(_._2.filter(isSourceFile).sorted)
-
-    def sourceFiles: Array[JFile] = compilationGroups.flatten.toArray
+    case class Group(ordinal: Int, compiler: String, release: String)
+
+    lazy val compilationGroups: List[(Group, Array[JFile])] =
+      val Release = """r([\d\.]+)""".r
+      val Compiler = """c([\d\.]+)""".r
+      val Ordinal = """(\d+)""".r
+      def groupFor(file: JFile): Group =
+        val groupSuffix = file.getName.dropWhile(_ != '_').stripSuffix(".scala").stripSuffix(".java")
+        val groupSuffixParts = groupSuffix.split("_")
+        val ordinal = groupSuffixParts.collectFirst { case Ordinal(n) => n.toInt }.getOrElse(Int.MinValue)
+        val release = groupSuffixParts.collectFirst { case Release(r) => r }.getOrElse("")
+        val compiler = groupSuffixParts.collectFirst { case Compiler(c) => c }.getOrElse("")
+        Group(ordinal, compiler, release)
+
+      dir.listFiles
+        .filter(isSourceFile)
+        .groupBy(groupFor)
+        .toList
+        .sortBy { (g, _) => (g.ordinal, g.compiler, g.release) }
+        .map { (g, f) => (g, f.sorted) }
+
+    def sourceFiles = compilationGroups.map(_._2).flatten.toArray
   }
 
   private trait CompilationLogic { this: Test =>
@@ -216,7 +215,13 @@ trait ParallelTesting extends RunnerOrchestration { self =>
           List(reporter)
 
         case testSource @ SeparateCompilationSource(_, dir, flags, outDir) =>
-          testSource.compilationGroups.map(files => compile(files, flags, suppressErrors, outDir))  // TODO? only `compile` option?
+          testSource.compilationGroups.map { (group, files) =>
+            val flags1 = if group.release.isEmpty then flags else flags.and("-Yscala-release", group.release)
+            if group.compiler.isEmpty then
+              compile(files, flags1, suppressErrors, outDir)
+            else
+              compileWithOtherCompiler(group.compiler, files, flags1, outDir)
+          }
       })
 
     final def countErrorsAndWarnings(reporters: Seq[TestReporter]): (Int, Int) =
@@ -271,7 +276,7 @@ trait ParallelTesting extends RunnerOrchestration { self =>
     /** This callback is executed once the compilation of this test source finished */
     private final def onComplete(testSource: TestSource, reportersOrCrash: Try[Seq[TestReporter]], logger: LoggedRunnable): Unit =
       reportersOrCrash match {
-        case TryFailure(exn) => onFailure(testSource, Nil, logger, Some(s"Fatal compiler crash when compiling: ${testSource.title}:\n${exn.getMessage}\n${exn.getStackTrace.mkString("\n")}"))
+        case TryFailure(exn) => onFailure(testSource, Nil, logger, Some(s"Fatal compiler crash when compiling: ${testSource.title}:\n${exn.getMessage}${exn.getStackTrace.map("\n\tat " + _).mkString}"))
         case TrySuccess(reporters) => maybeFailureMessage(testSource, reporters) match {
           case Some(msg) => onFailure(testSource, reporters, logger, Option(msg).filter(_.nonEmpty))
           case None => onSuccess(testSource, reporters, logger)
@@ -340,12 +345,12 @@ trait ParallelTesting extends RunnerOrchestration { self =>
 
     /** All testSources left after filtering out */
     private val filteredSources =
-      if (!testFilter.isDefined) testSources
+      if (testFilter.isEmpty) testSources
       else testSources.filter {
         case JointCompilationSource(_, files, _, _, _, _) =>
-          files.exists(file => file.getPath.contains(testFilter.get))
+          testFilter.exists(filter => files.exists(file => file.getPath.contains(filter)))
         case SeparateCompilationSource(_, dir, _, _) =>
-          dir.getPath.contains(testFilter.get)
+          testFilter.exists(dir.getPath.contains)
       }
 
     /** Total amount of test sources being compiled by this test */
@@ -441,15 +446,17 @@ trait ParallelTesting extends RunnerOrchestration { self =>
           throw e
 
     protected def compile(files0: Array[JFile], flags0: TestFlags, suppressErrors: Boolean, targetDir: JFile): TestReporter = {
-      val flags = flags0.and("-d", targetDir.getPath)
-        .withClasspath(targetDir.getPath)
-
       def flattenFiles(f: JFile): Array[JFile] =
         if (f.isDirectory) f.listFiles.flatMap(flattenFiles)
         else Array(f)
 
       val files: Array[JFile] = files0.flatMap(flattenFiles)
 
+      val flags = flags0
+        .and(toolArgsFor(files.toList.map(_.toPath), getCharsetFromEncodingOpt(flags0)): _*)
+        .and("-d", targetDir.getPath)
+        .withClasspath(targetDir.getPath)
+
       def compileWithJavac(fs: Array[String]) = if (fs.nonEmpty) {
         val fullArgs = Array(
           "javac",
@@ -500,6 +507,87 @@ trait ParallelTesting extends RunnerOrchestration { self =>
       reporter
     }
 
+    private def parseErrors(errorsText: String, compilerVersion: String, pageWidth: Int) =
+      val errorPattern = """^.*Error: (.*\.scala):(\d+):(\d+).*""".r
+      val brokenClassPattern = """^class file (.*) is broken.*""".r
+      val warnPattern = """^.*Warning: (.*\.scala):(\d+):(\d+).*""".r
+      val summaryPattern = """\d+ (?:warning|error)s? found""".r
+      val indent = "    "
+      var diagnostics = List.empty[Diagnostic.Error]
+      def barLine(start: Boolean) = s"$indent${if start then "╭" else "╰"}${"┄" * pageWidth}${if start then "╮" else "╯"}\n"
+      def errorLine(line: String) = s"$indent┆${String.format(s"%-${pageWidth}s", stripAnsi(line))}┆\n"
+      def stripAnsi(str: String): String = str.replaceAll("\u001b\\[\\d+m", "")
+      def addToLast(str: String): Unit =
+        diagnostics match
+          case head :: tail =>
+            diagnostics = Diagnostic.Error(s"${head.msg.rawMessage}$str", head.pos) :: tail
+          case Nil =>
+      var inError = false
+      for line <- errorsText.linesIterator do
+        line match
+          case error @ warnPattern(filePath, line, column) =>
+            inError = false
+          case error @ errorPattern(filePath, line, column) =>
+            inError = true
+            val lineNum = line.toInt
+            val columnNum = column.toInt
+            val abstractFile = AbstractFile.getFile(filePath)
+            val sourceFile = SourceFile(abstractFile, Codec.UTF8)
+            val offset = sourceFile.lineToOffset(lineNum - 1) + columnNum - 1
+            val span = Spans.Span(offset)
+            val sourcePos = SourcePosition(sourceFile, span)
+            addToLast(barLine(start = false))
+            diagnostics ::= Diagnostic.Error(s"Compilation of $filePath with Scala $compilerVersion failed at line: $line, column: $column.\nFull error output:\n${barLine(start = true)}${errorLine(error)}", sourcePos)
+          case error @ brokenClassPattern(filePath) =>
+            inError = true
+            diagnostics ::= Diagnostic.Error(s"$error\nFull error output:\n${barLine(start = true)}${errorLine(error)}", NoSourcePosition)
+          case summaryPattern() => // Ignored
+          case line if inError => addToLast(errorLine(line))
+          case _ =>
+      addToLast(barLine(start = false))
+      diagnostics.reverse
+
+    protected def compileWithOtherCompiler(compiler: String, files: Array[JFile], flags: TestFlags, targetDir: JFile): TestReporter =
+      def artifactClasspath(organizationName: String, moduleName: String) =
+        import coursier._
+        val dep = Dependency(
+          Module(
+            Organization(organizationName),
+            ModuleName(moduleName),
+            attributes = Map.empty
+          ),
+          version = compiler
+        )
+        Fetch()
+          .addDependencies(dep)
+          .run()
+          .mkString(JFile.pathSeparator)
+
+      val stdlibClasspath = artifactClasspath("org.scala-lang", "scala3-library_3")
+      val scalacClasspath = artifactClasspath("org.scala-lang", "scala3-compiler_3")
+
+      val pageWidth = TestConfiguration.pageWidth - 20
+      val flags1 = flags.copy(defaultClassPath = stdlibClasspath)
+        .withClasspath(targetDir.getPath)
+        .and("-d", targetDir.getPath)
+        .and("-pagewidth", pageWidth.toString)
+
+      val scalacCommand = Array("java", "-cp", scalacClasspath, "dotty.tools.dotc.Main")
+      val command = scalacCommand ++ flags1.all ++ files.map(_.getAbsolutePath)
+      val process = Runtime.getRuntime.exec(command)
+
+      val reporter = TestReporter.reporter(realStdout, logLevel =
+        if (suppressErrors || suppressAllOutput) ERROR + 1 else ERROR)
+      val errorsText = Source.fromInputStream(process.getErrorStream).mkString
+      if process.waitFor() != 0 then
+        val diagnostics = parseErrors(errorsText, compiler, pageWidth)
+        diagnostics.foreach { diag =>
+          val context = (new ContextBase).initialCtx
+          reporter.report(diag)(using context)
+        }
+
+      reporter
+
     protected def compileFromTasty(flags0: TestFlags, suppressErrors: Boolean, targetDir: JFile): TestReporter = {
       val tastyOutput = new JFile(targetDir.getPath + "_from-tasty")
       tastyOutput.mkdir()
@@ -581,9 +669,9 @@ trait ParallelTesting extends RunnerOrchestration { self =>
         else reportPassed()
       }
       else echo {
-        testFilter
-          .map(r => s"""No files matched "$r" in test""")
-          .getOrElse("No tests available under target - erroneous test?")
+        testFilter match
+          case _ :: _ => s"""No files matched "${testFilter.mkString(",")}" in test"""
+          case _      => "No tests available under target - erroneous test?"
       }
 
       this
@@ -663,17 +751,26 @@ trait ParallelTesting extends RunnerOrchestration { self =>
       def compilerCrashed = reporters.exists(_.compilerCrashed)
       lazy val (errorMap, expectedErrors) = getErrorMapAndExpectedCount(testSource.sourceFiles.toIndexedSeq)
       lazy val actualErrors = reporters.foldLeft(0)(_ + _.errorCount)
-      def hasMissingAnnotations = getMissingExpectedErrors(errorMap, reporters.iterator.flatMap(_.errors))
+      lazy val (expected, unexpected) = getMissingExpectedErrors(errorMap, reporters.iterator.flatMap(_.errors))
+      def hasMissingAnnotations = expected.nonEmpty || unexpected.nonEmpty
       def showErrors = "-> following the errors:\n" +
-        reporters.flatMap(_.allErrors.map(e => e.pos.line.toString + ": " + e.message)).mkString(start = "at ", sep = "\n at ", end = "")
-
-      if (compilerCrashed) Some(s"Compiler crashed when compiling: ${testSource.title}")
-      else if (actualErrors == 0) Some(s"\nNo errors found when compiling neg test $testSource")
-      else if (expectedErrors == 0) Some(s"\nNo errors expected/defined in $testSource -- use // error or // nopos-error")
-      else if (expectedErrors != actualErrors) Some(s"\nWrong number of errors encountered when compiling $testSource\nexpected: $expectedErrors, actual: $actualErrors " + showErrors)
-      else if (hasMissingAnnotations) Some(s"\nErrors found on incorrect row numbers when compiling $testSource\n$showErrors")
-      else if (!errorMap.isEmpty) Some(s"\nExpected error(s) have {=}: $errorMap")
-      else None
+        reporters.flatMap(_.allErrors.sortBy(_.pos.line).map(e => s"${e.pos.line + 1}: ${e.message}")).mkString(" at ", "\n at ", "")
+
+      Option {
+        if compilerCrashed then s"Compiler crashed when compiling: ${testSource.title}"
+        else if actualErrors == 0 then s"\nNo errors found when compiling neg test $testSource"
+        else if expectedErrors == 0 then s"\nNo errors expected/defined in $testSource -- use // error or // nopos-error"
+        else if expectedErrors != actualErrors then
+          s"""|Wrong number of errors encountered when compiling $testSource
+              |expected: $expectedErrors, actual: $actualErrors
+              |${expected.mkString("Unfulfilled expectations:\n", "\n", "")}
+              |${unexpected.mkString("Unexpected errors:\n", "\n", "")}
+              |$showErrors
+              |""".stripMargin.trim.linesIterator.mkString("\n", "\n", "")
+        else if hasMissingAnnotations then s"\nErrors found on incorrect row numbers when compiling $testSource\n$showErrors"
+        else if !errorMap.isEmpty then s"\nExpected error(s) have {=}: $errorMap"
+        else null
+      }
     }
 
     override def onSuccess(testSource: TestSource, reporters: Seq[TestReporter], logger: LoggedRunnable): Unit =
@@ -688,72 +785,59 @@ trait ParallelTesting extends RunnerOrchestration { self =>
     //
     // We collect these in a map `"file:row" -> numberOfErrors`, for
     // nopos errors we save them in `"file" -> numberOfNoPosErrors`
-    def getErrorMapAndExpectedCount(files: Seq[JFile]): (HashMap[String, Integer], Int) = {
+    def getErrorMapAndExpectedCount(files: Seq[JFile]): (HashMap[String, Integer], Int) =
+      val comment = raw"//( *)(nopos-|anypos-)?error".r
       val errorMap = new HashMap[String, Integer]()
       var expectedErrors = 0
+      def bump(key: String): Unit =
+        errorMap.get(key) match
+          case null => errorMap.put(key, 1)
+          case n => errorMap.put(key, n+1)
+        expectedErrors += 1
       files.filter(isSourceFile).foreach { file =>
         Using(Source.fromFile(file, StandardCharsets.UTF_8.name)) { source =>
           source.getLines.zipWithIndex.foreach { case (line, lineNbr) =>
-            val errors = line.toSeq.sliding("// error".length).count(_.unwrap == "// error")
-            if (errors > 0)
-              errorMap.put(s"${file.getPath}:$lineNbr", errors)
-
-            val noposErrors = line.toSeq.sliding("// nopos-error".length).count(_.unwrap == "// nopos-error")
-            if (noposErrors > 0) {
-              val nopos = errorMap.get("nopos")
-              val existing: Integer = if (nopos eq null) 0 else nopos
-              errorMap.put("nopos", noposErrors + existing)
-            }
-
-            val anyposErrors = line.toSeq.sliding("// anypos-error".length).count(_.unwrap == "// anypos-error")
-            if (anyposErrors > 0) {
-              val anypos = errorMap.get("anypos")
-              val existing: Integer = if (anypos eq null) 0 else anypos
-              errorMap.put("anypos", anyposErrors + existing)
-            }
-
-            val possibleTypos = List("//error" -> "// error", "//nopos-error" -> "// nopos-error", "//anypos-error" -> "// anypos-error")
-            for ((possibleTypo, expected) <- possibleTypos) {
-              if (line.contains(possibleTypo))
-                echo(s"Warning: Possible typo in error tag in file ${file.getCanonicalPath}:$lineNbr: found `$possibleTypo` but expected `$expected`")
+            comment.findAllMatchIn(line).foreach { m =>
+              m.group(2) match
+                case prefix if m.group(1).isEmpty =>
+                  val what = Option(prefix).getOrElse("")
+                  echo(s"Warning: ${file.getCanonicalPath}:${lineNbr}: found `//${what}error` but expected `// ${what}error`, skipping comment")
+                case "nopos-" => bump("nopos")
+                case "anypos-" => bump("anypos")
+                case _ => bump(s"${file.getPath}:${lineNbr+1}")
             }
-
-            expectedErrors += anyposErrors + noposErrors + errors
           }
         }.get
       }
-
       (errorMap, expectedErrors)
-    }
-
-    def getMissingExpectedErrors(errorMap: HashMap[String, Integer], reporterErrors: Iterator[Diagnostic]) = !reporterErrors.forall { error =>
-      val pos1 = error.pos.nonInlined
-      val key = if (pos1.exists) {
-        def toRelative(path: String): String =  // For some reason, absolute paths leak from the compiler itself...
-          path.split(JFile.separatorChar).dropWhile(_ != "tests").mkString(JFile.separator)
-        val fileName = toRelative(pos1.source.file.toString)
-        s"$fileName:${pos1.line}"
-
-      } else "nopos"
-
-      val errors = errorMap.get(key)
-
-      def missing = { echo(s"Error reported in ${pos1.source}, but no annotation found") ; false }
-
-      if (errors ne null) {
-        if (errors == 1) errorMap.remove(key)
-        else errorMap.put(key, errors - 1)
-        true
-      }
-      else if key == "nopos" then
-        missing
-      else
-        errorMap.get("anypos") match
-          case null  => missing
-          case 1     => errorMap.remove("anypos") ; true
-          case slack => if slack < 1 then missing
-                        else errorMap.put("anypos", slack - 1) ; true
-    }
+    end getErrorMapAndExpectedCount
+
+    // return unfulfilled expected errors and unexpected diagnostics
+    def getMissingExpectedErrors(errorMap: HashMap[String, Integer], reporterErrors: Iterator[Diagnostic]): (List[String], List[String]) =
+      val unexpected, unpositioned = ListBuffer.empty[String]
+      // For some reason, absolute paths leak from the compiler itself...
+      def relativize(path: String): String = path.split(JFile.separatorChar).dropWhile(_ != "tests").mkString(JFile.separator)
+      def seenAt(key: String): Boolean =
+        errorMap.get(key) match
+          case null => false
+          case 1 => errorMap.remove(key) ; true
+          case n => errorMap.put(key, n - 1) ; true
+      def sawDiagnostic(d: Diagnostic): Unit =
+        d.pos.nonInlined match
+          case srcpos if srcpos.exists =>
+            val key = s"${relativize(srcpos.source.file.toString)}:${srcpos.line + 1}"
+            if !seenAt(key) then unexpected += key
+          case srcpos =>
+            if !seenAt("nopos") then unpositioned += relativize(srcpos.source.file.toString)
+
+      reporterErrors.foreach(sawDiagnostic)
+
+      errorMap.get("anypos") match
+        case n if n == unexpected.size => errorMap.remove("anypos") ; unexpected.clear()
+        case _ =>
+
+      (errorMap.asScala.keys.toList, (unexpected ++ unpositioned).toList)
+    end getMissingExpectedErrors
   }
 
   private final class NoCrashTest(testSources: List[TestSource], times: Int, threadLimit: Option[Int], suppressAllOutput: Boolean)(implicit summaryReport: SummaryReporting)
@@ -978,7 +1062,7 @@ trait ParallelTesting extends RunnerOrchestration { self =>
           target.copy(dir = copyToDir(outDir, dir))
       }
 
-      val test = new RewriteTest(copiedTargets, checkFileMap, times, threadLimit, shouldFail || shouldSuppressOutput).executeTestSuite()
+      new RewriteTest(copiedTargets, checkFileMap, times, threadLimit, shouldFail || shouldSuppressOutput).executeTestSuite()
       this
     }
 
@@ -1227,14 +1311,14 @@ trait ParallelTesting extends RunnerOrchestration { self =>
     val (dirs, files) = compilationTargets(sourceDir, fileFilter)
 
     val isPicklerTest = flags.options.contains("-Ytest-pickler")
-    def ignoreDir(dir: JFile): Boolean = {
+    def picklerDirFilter(source: SeparateCompilationSource): Boolean = {
       // Pickler tests stop after pickler not producing class/tasty files. The second part of the compilation
       // will not be able to compile due to the missing artifacts from the first part.
-      isPicklerTest && dir.listFiles().exists(file => file.getName.endsWith("_2.scala") || file.getName.endsWith("_2.java"))
+      !isPicklerTest || source.compilationGroups.length == 1
     }
     val targets =
       files.map(f => JointCompilationSource(testGroup.name, Array(f), flags, createOutputDirsForFile(f, sourceDir, outDir))) ++
-      dirs.collect { case dir if !ignoreDir(dir) => SeparateCompilationSource(testGroup.name, dir, flags, createOutputDirsForDir(dir, sourceDir, outDir)) }
+      dirs.map { dir => SeparateCompilationSource(testGroup.name, dir, flags, createOutputDirsForDir(dir, sourceDir, outDir)) }.filter(picklerDirFilter)
 
     // Create a CompilationTest and let the user decide whether to execute a pos or a neg test
     new CompilationTest(targets)
@@ -1269,10 +1353,9 @@ trait ParallelTesting extends RunnerOrchestration { self =>
 
     val (dirs, files) = compilationTargets(sourceDir, fromTastyFilter)
 
-    val filteredFiles = testFilter match {
-      case Some(str) => files.filter(_.getPath.contains(str))
-      case None => files
-    }
+    val filteredFiles = testFilter match
+      case _ :: _ => files.filter(f => testFilter.exists(f.getPath.contains))
+      case _      => Nil
 
     class JointCompilationSourceFromTasty(
        name: String,
@@ -1358,6 +1441,11 @@ trait ParallelTesting extends RunnerOrchestration { self =>
     // Create a CompilationTest and let the user decide whether to execute a pos or a neg test
     new CompilationTest(targets)
   }
+
+  private def getCharsetFromEncodingOpt(flags: TestFlags) =
+    flags.options.sliding(2).collectFirst {
+      case Array("-encoding", encoding) => Charset.forName(encoding)
+    }.getOrElse(StandardCharsets.UTF_8)
 }
 
 object ParallelTesting {
@@ -1371,4 +1459,5 @@ object ParallelTesting {
 
   def isTastyFile(f: JFile): Boolean =
     f.getName.endsWith(".tasty")
+
 }
diff --git a/compiler/test/dotty/tools/vulpix/TestConfiguration.scala b/compiler/test/dotty/tools/vulpix/TestConfiguration.scala
index 8d1c9fa5cd86..b43dcbdd6046 100644
--- a/compiler/test/dotty/tools/vulpix/TestConfiguration.scala
+++ b/compiler/test/dotty/tools/vulpix/TestConfiguration.scala
@@ -6,8 +6,10 @@ import java.io.File
 
 object TestConfiguration {
 
+  val pageWidth = 120
+
   val noCheckOptions = Array(
-    "-pagewidth", "120",
+    "-pagewidth", pageWidth.toString,
     "-color:never",
     "-Xtarget", defaultTarget
   )
@@ -60,6 +62,7 @@ object TestConfiguration {
 
   val commonOptions = Array("-indent", "-language:postfixOps") ++ checkOptions ++ noCheckOptions ++ yCheckOptions
   val defaultOptions = TestFlags(basicClasspath, commonOptions)
+  val unindentOptions = TestFlags(basicClasspath, Array("-no-indent") ++ checkOptions ++ noCheckOptions ++ yCheckOptions)
   val withCompilerOptions =
     defaultOptions.withClasspath(withCompilerClasspath).withRunClasspath(withCompilerClasspath)
   lazy val withStagingOptions =
diff --git a/compiler/test/dotty/tools/vulpix/VulpixMetaTests.scala b/compiler/test/dotty/tools/vulpix/VulpixMetaTests.scala
index befb4701f371..75af0aa94893 100644
--- a/compiler/test/dotty/tools/vulpix/VulpixMetaTests.scala
+++ b/compiler/test/dotty/tools/vulpix/VulpixMetaTests.scala
@@ -28,7 +28,7 @@ object VulpixMetaTests extends ParallelTesting {
   def numberOfSlaves = 1
   def safeMode = false // Don't fork a new VM after each run test
   def isInteractive = false // Don't beautify output for interactive use.
-  def testFilter = None // Run all the tests.
+  def testFilter = Nil // Run all the tests.
   def updateCheckFiles: Boolean = false
 
   @AfterClass
diff --git a/compiler/test/dotty/tools/vulpix/VulpixUnitTests.scala b/compiler/test/dotty/tools/vulpix/VulpixUnitTests.scala
index cdbd55af61d4..432bb64bc1cf 100644
--- a/compiler/test/dotty/tools/vulpix/VulpixUnitTests.scala
+++ b/compiler/test/dotty/tools/vulpix/VulpixUnitTests.scala
@@ -104,7 +104,7 @@ object VulpixUnitTests extends ParallelTesting {
   def numberOfSlaves = 5
   def safeMode = sys.env.get("SAFEMODE").isDefined
   def isInteractive = !sys.env.contains("DRONE")
-  def testFilter = None
+  def testFilter = Nil
   def updateCheckFiles: Boolean = false
 
   @AfterClass
diff --git a/dist/bin/common b/dist/bin/common
index fd7b8be9149e..e3e4253938fb 100755
--- a/dist/bin/common
+++ b/dist/bin/common
@@ -54,15 +54,15 @@ case "`uname`" in
 esac
 
 unset CYGPATHCMD
-if [[ $cygwin || $mingw || $msys ]]; then
+if [[ ${cygwin-} || ${mingw-} || ${msys-} ]]; then
   # ConEmu terminal is incompatible with jna-5.*.jar
-  [[ ($CONEMUANSI || $ConEmuANSI) ]] && conemu=true
+  [[ (${CONEMUANSI-} || ${ConEmuANSI-}) ]] && conemu=true
   # cygpath is used by various windows shells: cygwin, git-sdk, gitbash, msys, etc.
   CYGPATHCMD=`which cygpath 2>/dev/null`
   case "$TERM" in
     rxvt* | xterm* | cygwin*)
       stty -icanon min 1 -echo
-      SCALA_OPTS="$SCALA_OPTS -Djline.terminal=unix"
+      JAVA_OPTS="$JAVA_OPTS -Djline.terminal=unix"
     ;;
   esac
 fi
@@ -111,14 +111,14 @@ CLASSPATH_SUFFIX=""
 PSEP=":"
 
 # translate paths to Windows-mixed format before running java
-if [ -n "$CYGPATHCMD" ]; then
-  [ -n "$PROG_HOME" ] &&
+if [ -n "${CYGPATHCMD-}" ]; then
+  [ -n "${PROG_HOME-}" ] &&
     PROG_HOME=`"$CYGPATHCMD" -am "$PROG_HOME"`
   [ -n "$JAVA_HOME" ] &&
     JAVA_HOME=`"$CYGPATHCMD" -am "$JAVA_HOME"`
   CLASSPATH_SUFFIX=";"
   PSEP=";"
-elif [[ $mingw || $msys ]]; then
+elif [[ ${mingw-} || ${msys-} ]]; then
   # For Mingw / Msys, convert paths from UNIX format before anything is touched
   [ -n "$PROG_HOME" ] &&
     PROG_HOME="`(cd "$PROG_HOME"; pwd -W | sed 's|/|\\\\|g')`"
@@ -133,14 +133,18 @@ fi
 # *-------------------------------------------------*/
 
 find_lib () {
-  local lib=$(find $PROG_HOME/lib/ -name "$1")
-  if [ -n "$CYGPATHCMD" ]; then
-    "$CYGPATHCMD" -am $lib
-  elif [[ $mingw ||  $msys ]]; then
-    echo $lib | sed 's|/|\\\\|g'
-  else
-    echo $lib
-  fi
+  for lib in "$PROG_HOME"/lib/$1 ; do
+    if [[ -f "$lib" ]]; then
+      if [ -n "$CYGPATHCMD" ]; then
+        "$CYGPATHCMD" -am "$lib"
+      elif [[ $mingw ||  $msys ]]; then
+        echo "$lib" | sed 's|/|\\\\|g'
+      else
+        echo "$lib"
+      fi
+      return
+    fi
+  done
 }
 
 DOTTY_COMP=$(find_lib "*scala3-compiler*")
@@ -155,8 +159,65 @@ SBT_INTF=$(find_lib "*compiler-interface*")
 JLINE_READER=$(find_lib "*jline-reader-3*")
 JLINE_TERMINAL=$(find_lib "*jline-terminal-3*")
 JLINE_TERMINAL_JNA=$(find_lib "*jline-terminal-jna-3*")
-[[ $conemu ]] || JNA=$(find_lib "*jna-5*")
 
-# debug
+# jna-5 only appropriate for some combinations
+[[ ${conemu-} && ${msys-} ]] || JNA=$(find_lib "*jna-5*")
+
+compilerJavaClasspathArgs () {
+  # echo "dotty-compiler: $DOTTY_COMP"
+  # echo "dotty-interface: $DOTTY_INTF"
+  # echo "dotty-library: $DOTTY_LIB"
+  # echo "tasty-core: $TASTY_CORE"
+  # echo "scala-asm: $SCALA_ASM"
+  # echo "scala-lib: $SCALA_LIB"
+  # echo "sbt-intface: $SBT_INTF"
+
+  toolchain=""
+  toolchain+="$SCALA_LIB$PSEP"
+  toolchain+="$DOTTY_LIB$PSEP"
+  toolchain+="$SCALA_ASM$PSEP"
+  toolchain+="$SBT_INTF$PSEP"
+  toolchain+="$DOTTY_INTF$PSEP"
+  toolchain+="$DOTTY_COMP$PSEP"
+  toolchain+="$TASTY_CORE$PSEP"
+  toolchain+="$DOTTY_STAGING$PSEP"
+  toolchain+="$DOTTY_TASTY_INSPECTOR$PSEP"
+
+  # jine
+  toolchain+="$JLINE_READER$PSEP"
+  toolchain+="$JLINE_TERMINAL$PSEP"
+  toolchain+="$JLINE_TERMINAL_JNA$PSEP"
+  [ -n "${JNA-}" ] && toolchain+="$JNA$PSEP"
+
+  if [ -n "${jvm_cp_args-}" ]; then
+    jvm_cp_args="$toolchain$jvm_cp_args"
+  else
+    jvm_cp_args="$toolchain$PSEP"
+  fi
+}
+
+default_java_opts="-Xmx768m -Xms768m"
+
+CompilerMain=dotty.tools.dotc.Main
+DecompilerMain=dotty.tools.dotc.decompiler.Main
+TastyPrinterMain=dotty.tools.dotc.core.tasty.TastyPrinter
+ReplMain=dotty.tools.repl.Main
+ScriptingMain=dotty.tools.scripting.Main
 
-DEBUG_STR=-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5005
+declare -a java_args
+declare -a scala_args
+declare -a residual_args
+declare -a script_args
+
+addJava () {
+  java_args+=("'$1'")
+}
+addScala () {
+  scala_args+=("'$1'")
+}
+addResidual () {
+  residual_args+=("'$1'")
+}
+addScript () {
+  script_args+=("'$1'")
+}
diff --git a/dist/bin/common.bat b/dist/bin/common.bat
new file mode 100644
index 000000000000..0ffaed18652c
--- /dev/null
+++ b/dist/bin/common.bat
@@ -0,0 +1,54 @@
+@rem #########################################################################
+@rem ## Code common to scalac.bat, scaladoc.bat and scala.bat
+
+if defined JAVACMD (
+    set "_JAVACMD=%JAVACMD%"
+) else if defined JAVA_HOME (
+    set "_JAVACMD=%JAVA_HOME%\bin\java.exe"
+) else if defined JDK_HOME (
+    set "_JAVACMD=%JDK_HOME%\bin\java.exe"
+) else (
+    where /q java.exe
+    if !ERRORLEVEL!==0 (
+        set __JAVA_BIN_DIR=
+        for /f "delims=" %%i in ('where /f java.exe') do (
+            set "__PATH=%%~dpi"
+            @rem we take first occurence and ignore Oracle path for java executable
+            if not defined __JAVA_BIN_DIR if "!__PATH!"=="!__PATH:javapath=!" set "__JAVA_BIN_DIR=!__PATH!"
+        )
+        if defined __JAVA_BIN_DIR set "_JAVACMD=!__JAVA_BIN_DIR!\java.exe"
+    )
+    if not defined _JAVACMD (
+        set "__PATH=%ProgramFiles%\Java"
+        for /f %%f in ('dir /ad /b "!__PATH!\jre*" 2^>NUL') do set "_JAVA_HOME=!__PATH!\%%f"
+        if not defined _JAVA_HOME (
+           set __PATH=C:\opt
+           for /f %%f in ('dir /ad /b "!__PATH!\jdk*" 2^>NUL') do set "_JAVA_HOME=!__PATH!\%%f\jre"
+        )
+        if defined _JAVA_HOME set "_JAVACMD=!_JAVA_HOME!\bin\java.exe"
+    )
+)
+if not exist "%_JAVACMD%" (
+   echo Error: Java executable not found ^(!_JAVACMD!^) 1>&2
+   set _EXITCODE=1
+   goto :eof
+)
+
+if not defined _PROG_HOME set "_PROG_HOME=%~dp0"
+for /f "delims=" %%f in ("%_PROG_HOME%\.") do set "_LIB_DIR=%%~dpflib"
+
+set _PSEP=;
+
+for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*scala3-compiler*"')        do set "_SCALA3_COMP=%_LIB_DIR%\%%f"
+for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*scala3-interfaces*"')      do set "_SCALA3_INTF=%_LIB_DIR%\%%f"
+for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*scala3-library*"')         do set "_SCALA3_LIB=%_LIB_DIR%\%%f"
+for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*scala3-staging*"')         do set "_SCALA3_STAGING=%_LIB_DIR%\%%f"
+for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*scala3-tasty-inspector*"') do set "_SCALA3_TASTY_INSPECTOR=%_LIB_DIR%\%%f"
+for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*tasty-core*"')             do set "_TASTY_CORE=%_LIB_DIR%\%%f"
+for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*scala-asm*"')              do set "_SCALA_ASM=%_LIB_DIR%\%%f"
+for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*scala-library*"')          do set "_SCALA_LIB=%_LIB_DIR%\%%f"
+for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*compiler-interface*"')     do set "_SBT_INTF=%_LIB_DIR%\%%f"
+for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*jline-reader-3*"')         do set "_JLINE_READER=%_LIB_DIR%\%%f"
+for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*jline-terminal-3*"')       do set "_JLINE_TERMINAL=%_LIB_DIR%\%%f"
+for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*jline-terminal-jna-3*"')   do set "_JLINE_TERMINAL_JNA=%_LIB_DIR%\%%f"
+for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*jna-5*"')                  do set "_JNA=%_LIB_DIR%\%%f"
diff --git a/dist/bin/scala b/dist/bin/scala
index bfcd30c14cd0..b3116b2706b3 100755
--- a/dist/bin/scala
+++ b/dist/bin/scala
@@ -1,8 +1,7 @@
 #!/usr/bin/env bash
 
 # Try to autodetect real location of the script
-
-if [ -z "$PROG_HOME" ] ; then
+if [ -z "${PROG_HOME-}" ] ; then
   ## resolve links - $0 may be a link to PROG_HOME
   PRG="$0"
 
@@ -27,141 +26,47 @@ if [ -z "$PROG_HOME" ] ; then
   cd "$saveddir"
 fi
 
-addJvmOptions () {
-  jvm_options+=("'$1'")
-}
-
-addScalacOptions () {
-  java_options+=("'$1'")
-}
-
 source "$PROG_HOME/bin/common"
 
-declare -a residual_args
-declare -a script_args
-execute_repl=false
-execute_run=false
-execute_script=false
-with_compiler=false
-class_path_count=0
-CLASS_PATH=""
-save_compiled=false
-
-# Little hack to check if all arguments are options
-all_params="$*"
-truncated_params="${*#-}"
-# options_indicator != 0 if at least one parameter is not an option
-options_indicator=$(( ${#all_params} - ${#truncated_params} - $# ))
-
-[ -n "$SCALA_OPTS" ] && set -- $SCALA_OPTS "$@"
-
 while [[ $# -gt 0 ]]; do
   case "$1" in
-    -repl)
-      execute_repl=true
-      shift
-      ;;
-    -run)
-      execute_run=true
-      shift
-      ;;
-    -cp | -classpath)
-      CLASS_PATH="$2"
-      class_path_count+=1
-      shift
-      shift
-      ;;
-    -with-compiler)
-      with_compiler=true
-      shift
-      ;;
-    @*|-color:*)
-      addScalacOptions "${1}"
+    -D*)
+      # pass to scala as well: otherwise we lose it sometimes when we
+      # need it, e.g. communicating with a server compiler.
+      addJava "$1"
+      addScala "$1"
+      # respect user-supplied -Dscala.usejavacp
       shift
       ;;
-    -save|-savecompiled)
-      save_compiled=true
-      scala_script_options+=("$1")
-      shift
-      ;;
-    -compile-only)
-      scala_script_options+=("$1")
-      shift
-      ;;
-    -d)
-      DEBUG="$DEBUG_STR"
-      shift
-      ;;
-    -version)
-      # defer to scalac, then exit
-      addScalacOptions "${1}"
+    -J*)
+      # as with -D, pass to scala even though it will almost
+      # never be used.
+      addJava "${1:2}"
+      addScala "$1"
       shift
-      eval "\"$PROG_HOME/bin/scalac\" ${cp_arg-} ${java_options[@]}"
-      scala_exit_status=$?
-      onExit
       ;;
-    -J*)
-      addJvmOptions "${1:2}"
-      addScalacOptions "${1}"
-      shift ;;
-    *)
-      if [ $execute_script == false ]; then
-        # is a script if extension .scala or .sc or if has scala hash bang
-        if [[ -e "$1" && ("$1" == *.scala || "$1" == *.sc || -f "$1" && `head -n 1 -- "$1" | grep '#!.*scala'`) ]]; then
-          execute_script=true
-          target_script="$1"
-        else
-          residual_args+=("$1")
-        fi
+    -classpath*)
+      if [ "$1" != "${1##* }" ]; then
+        # hashbang-combined args "-classpath 'lib/*'"
+        A=$1 ; shift # consume $1 before adding its substrings back
+        set -- $A "$@" # split $1 on whitespace and put it back
       else
-        script_args+=("$1")
+        addScala "$1"
+        shift
       fi
+      ;;
+    *)
+      addScala "$1"
       shift
       ;;
-
   esac
 done
 
-if [ $execute_script == true ]; then
-  [ -n "${script_trace-}" ] && set -x
-  if [ "$CLASS_PATH" ]; then
-    cp_arg="-classpath \"$CLASS_PATH\""
-  fi
-  java_options+=(${scala_script_options})
-  setScriptName="-Dscript.path=$target_script"
-  target_jar="${target_script%.*}.jar"
-  if [[ $save_compiled == true && "$target_jar" -nt "$target_script" ]]; then
-    eval "\"$JAVACMD\"" $setScriptName -jar "$target_jar" "${script_args[@]}"
-    scala_exit_status=$?
-  else
-    [[ $save_compiled == true ]] && rm -f $target_jar
-    residual_args+=($setScriptName)
-    eval "\"$PROG_HOME/bin/scalac\" ${cp_arg-} ${java_options[@]} ${residual_args[@]} -script $target_script ${script_args[@]}"
-    scala_exit_status=$?
-  fi
-elif [ $execute_repl == true ] || ([ $execute_run == false ] && [ $options_indicator == 0 ]); then
-  if [ "$CLASS_PATH" ]; then
-    cp_arg="-classpath \"$CLASS_PATH\""
-  fi
-  eval "\"$PROG_HOME/bin/scalac\" ${cp_arg-} ${java_options[@]} -repl ${residual_args[@]}"
-  scala_exit_status=$?
-elif [ $execute_repl == true ] || [ ${#residual_args[@]} -ne 0 ]; then
-  cp_arg="$DOTTY_LIB$PSEP$SCALA_LIB"
-  if [ -z "$CLASS_PATH" ]; then
-    cp_arg+="$PSEP."
-  else
-    cp_arg+="$PSEP$CLASS_PATH"
-  fi
-  if [ "$class_path_count" -gt 1 ]; then
-    echo "warning: multiple classpaths are found, scala only use the last one."
-  fi
-  if [ $with_compiler == true ]; then
-    cp_arg+="$PSEP$DOTTY_COMP$PSEP$TASTY_CORE$PSEP$DOTTY_INTF$PSEP$SCALA_ASM$PSEP$DOTTY_STAGING$PSEP$DOTTY_TASTY_INSPECTOR"
-  fi
-  # exec here would prevent onExit from being called, leaving terminal in unusable state
-  eval "\"$JAVACMD\"" "$DEBUG"  "-classpath \"$cp_arg\"" "${jvm_options[@]}" "${residual_args[@]}"
-  scala_exit_status=$?
-else
-  echo "warning: command option is not correct."
-fi
+# exec here would prevent onExit from being called, leaving terminal in unusable state
+compilerJavaClasspathArgs
+[ -z "${ConEmuPID-}" -o -n "${cygwin-}" ] && export MSYSTEM= PWD= # workaround for #12405
+eval "\"$JAVACMD\"" "${java_args[@]}" "-Dscala.home=$PROG_HOME" "-classpath \"$jvm_cp_args\"" "dotty.tools.MainGenericRunner" "-classpath \"$jvm_cp_args\"" "${scala_args[@]}"
+scala_exit_status=$?
+
+
 onExit
diff --git a/dist/bin/scala.bat b/dist/bin/scala.bat
new file mode 100644
index 000000000000..edf7bfc5a1c2
--- /dev/null
+++ b/dist/bin/scala.bat
@@ -0,0 +1,121 @@
+@echo off
+setlocal enabledelayedexpansion
+
+@rem #########################################################################
+@rem ## Environment setup
+
+set _EXITCODE=0
+
+set "_PROG_HOME=%~dp0"
+
+call "%_PROG_HOME%\common.bat"
+if not %_EXITCODE%==0 goto end
+
+call :args %*
+
+@rem #########################################################################
+@rem ## Main
+
+set _CASE_REPL=0
+if %_EXECUTE_REPL%==1 set _CASE_REPL=1
+if %_EXECUTE_RUN%==0 if %_OPTIONS_INDICATOR%==0 set _CASE_REPL=1
+
+set _CASE_EXEC=0
+if %_EXECUTE_REPL%==1 set _CASE_EXEC=1
+if defined _RESIDUAL_ARGS set _CASE_EXEC=1
+
+if %_EXECUTE_SCRIPT%==1 (
+    set _SCALAC_ARGS=
+    if defined _CLASS_PATH set _SCALAC_ARGS=-classpath "%_CLASS_PATH%"
+    set _RESIDUAL_ARGS=!_RESIDUAL_ARGS! "-Dscript.path=%_TARGET_SCRIPT%" 
+    set _SCALAC_ARGS=!_SCALAC_ARGS! %_JAVA_OPTS% !_RESIDUAL_ARGS! -script "%_TARGET_SCRIPT%" %_SCRIPT_ARGS%
+    call "%_PROG_HOME%\scalac.bat" !_SCALAC_ARGS!
+    if not !ERRORLEVEL!==0 ( set _EXITCODE=1& goto end )
+@rem if [ $execute_repl == true ] || ([ $execute_run == false ] && [ $options_indicator == 0 ]); then
+) else if %_CASE_REPL%==1 (
+    set _SCALAC_ARGS=
+    if defined _CLASS_PATH set _SCALAC_ARGS=-classpath "%_CLASS_PATH%"
+    set _SCALAC_ARGS=!_SCALAC_ARGS! %_JAVA_OPTS% -repl %_RESIDUAL_ARGS%
+    call "%_PROG_HOME%\scalac.bat" !_SCALAC_ARGS!
+    if not !ERRORLEVEL!==0 ( set _EXITCODE=1& goto end )
+@rem elif [ $execute_repl == true ] || [ ${#residual_args[@]} -ne 0 ]; then
+) else if %_CASE_EXEC%==1 (
+    set "_CP_ARG=%_SCALA3_LIB%%_PSEP%%_SCALA_LIB%"
+    if defined _CLASS_PATH ( set "_CP_ARG=!_CP_ARG!%_PSEP%%_CLASS_PATH%"
+    ) else ( set "_CP_ARG=!_CP_ARG!%_PSEP%."
+    )
+    if %_CLASS_PATH_COUNT% gtr 1 (
+        echo Warning: Multiple classpaths are found, scala only use the last one. 1>&2
+    )
+    if %_WITH_COMPILER%==1 (
+        set "_CP_ARG=!_CP_ARG!%_PSEP%%_SCALA3_COMP%%_PSEP%%_TASTY_CORE%%_PSEP%%_SCALA3_INTF%%_PSEP%%_SCALA_ASM%%_PSEP%%_SCALA3_STAGING%%_PSEP%%_SCALA3_TASTY_INSPECTOR%"
+    )
+    set _JAVA_ARGS=-classpath "!_CP_ARG!" %_JVM_OPTS% %_RESIDUAL_ARGS%
+    call "%_JAVACMD%" !_JAVA_ARGS!
+    if not !ERRORLEVEL!==0 ( set _EXITCODE=1& goto end )
+) else (
+    echo Warning: Command option is not correct. 1>&2
+)
+
+goto end
+
+@rem #########################################################################
+@rem ## Subroutines
+
+:args
+set _RESIDUAL_ARGS=
+set _SCRIPT_ARGS=
+set _EXECUTE_REPL=0
+set _EXECUTE_RUN=0
+set _EXECUTE_SCRIPT=0
+set _TARGET_SCRIPT=
+set _WITH_COMPILER=0
+set _CLASS_PATH_COUNT=0
+set _CLASS_PATH=
+set _JVM_OPTS=
+set _JAVA_OPTS=
+set _OPTIONS_INDICATOR=0
+
+:args_loop
+if "%~1"=="" goto args_done
+set "__ARG=%~1"
+if "%__ARG%"=="-repl" (
+    set _EXECUTE_REPL=1
+) else if "%__ARG%"=="-run" (
+    set _EXECUTE_RUN=1
+) else if "%__ARG%"=="-classpath" (
+    set "_CLASS_PATH=%~2"
+    set /a _CLASS_PATH_COUNT+=1
+    shift
+) else if "%__ARG%"=="-cp" (
+    set "_CLASS_PATH=%~2"
+    set /a _CLASS_PATH_COUNT+=1
+    shift
+) else if "%__ARG%"=="-with-compiler" (
+    set _WITH_COMPILER=1
+) else if "%__ARG:~0,2%"=="-J" (
+    set _JVM_OPTS=!_JVM_OPTS! %__ARG:~2%
+    set _JAVA_OPTS=!_JAVA_OPTS! %__ARG%
+) else (
+    @rem _OPTIONS_INDICATOR != 0 if at least one parameter is not an option
+    if not "%__ARG:~0,1%"=="-" set /a _OPTIONS_INDICATOR+=1
+    if %_EXECUTE_SCRIPT%==1 (
+        set _SCRIPT_ARGS=%_SCRIPT_ARGS% %__ARG%
+    ) else if "%__ARG:~-6%"==".scala" (
+        set _EXECUTE_SCRIPT=1
+        set "_TARGET_SCRIPT=%__ARG%"
+    ) else (
+        set _RESIDUAL_ARGS=%_RESIDUAL_ARGS% %__ARG%
+    )
+)
+shift
+goto args_loop
+:args_done
+goto :eof
+
+@rem #########################################################################
+@rem ## Cleanups
+
+:end
+exit /b %_EXITCODE%
+endlocal
diff --git a/dist/bin/scalac b/dist/bin/scalac
old mode 100755
new mode 100644
index 7d56ac197cc5..6ebf0fe70ea8
--- a/dist/bin/scalac
+++ b/dist/bin/scalac
@@ -1,6 +1,7 @@
 #!/usr/bin/env bash
 
-if [ -z "$PROG_HOME" ] ; then
+# Try to autodetect real location of the script
+if [ -z "${PROG_HOME-}" ] ; then
   ## resolve links - $0 may be a link to PROG_HOME
   PRG="$0"
 
@@ -27,73 +28,24 @@ fi
 
 source "$PROG_HOME/bin/common"
 
-default_java_opts="-Xmx768m -Xms768m"
-withCompiler=true
+[ -z "$PROG_NAME" ] && PROG_NAME=$CompilerMain
 
-CompilerMain=dotty.tools.dotc.Main
-DecompilerMain=dotty.tools.dotc.decompiler.Main
-ReplMain=dotty.tools.repl.Main
-ScriptingMain=dotty.tools.scripting.Main
-
-PROG_NAME=$CompilerMain
-
-addJava () {
-  java_args+=("'$1'")
-}
-addScala () {
-  scala_args+=("'$1'")
-}
-addResidual () {
-  residual_args+=("'$1'")
-}
-addScripting () {
-  scripting_args+=("'$1'")
-}
-
-classpathArgs () {
-  # echo "dotty-compiler: $DOTTY_COMP"
-  # echo "dotty-interface: $DOTTY_INTF"
-  # echo "dotty-library: $DOTTY_LIB"
-  # echo "tasty-core: $TASTY_CORE"
-  # echo "scala-asm: $SCALA_ASM"
-  # echo "scala-lib: $SCALA_LIB"
-  # echo "sbt-intface: $SBT_INTF"
-
-  toolchain=""
-  toolchain+="$SCALA_LIB$PSEP"
-  toolchain+="$DOTTY_LIB$PSEP"
-  toolchain+="$SCALA_ASM$PSEP"
-  toolchain+="$SBT_INTF$PSEP"
-  toolchain+="$DOTTY_INTF$PSEP"
-  toolchain+="$DOTTY_COMP$PSEP"
-  toolchain+="$TASTY_CORE$PSEP"
-  toolchain+="$DOTTY_STAGING$PSEP"
-  toolchain+="$DOTTY_TASTY_INSPECTOR$PSEP"
-
-  # jine
-  toolchain+="$JLINE_READER$PSEP"
-  toolchain+="$JLINE_TERMINAL$PSEP"
-  toolchain+="$JLINE_TERMINAL_JNA$PSEP"
-  toolchain+="$JNA$PSEP"
-
-  jvm_cp_args="-classpath \"$toolchain\""
-}
+withCompiler=true
 
 while [[ $# -gt 0 ]]; do
 case "$1" in
            --) shift; for arg; do addResidual "$arg"; done; set -- ;;
   -v|-verbose) verbose=true && addScala "-verbose" && shift ;;
-       -debug) DEBUG="$DEBUG_STR" && shift ;;
     -q|-quiet) quiet=true && shift ;;
 
     # Optimize for short-running applications, see https://github.com/lampepfl/dotty/issues/222
     -Oshort) addJava "-XX:+TieredCompilation -XX:TieredStopAtLevel=1" && shift ;;
         -repl) PROG_NAME="$ReplMain" && shift ;;
       -script) PROG_NAME="$ScriptingMain" && target_script="$2" && shift && shift
-               while [[ $# -gt 0 ]]; do addScripting "$1" && shift ; done ;;
+               while [[ $# -gt 0 ]]; do addScript "$1" && shift ; done ;;
      -compile) PROG_NAME="$CompilerMain" && shift ;;
    -decompile) PROG_NAME="$DecompilerMain" && shift ;;
- -print-tasty) PROG_NAME="$DecompilerMain" && addScala "-print-tasty" && shift ;;
+ -print-tasty) PROG_NAME="$TastyPrinterMain" && shift ;;
          -run) PROG_NAME="$ReplMain" && shift ;;
       -colors) colors=true && shift ;;
    -no-colors) unset colors && shift ;;
@@ -108,21 +60,26 @@ case "$1" in
   esac
 done
 
-classpathArgs
+compilerJavaClasspathArgs
 
 if [ "$PROG_NAME" == "$ScriptingMain" ]; then
-  scripting_string="-script $target_script ${scripting_args[@]}"
+  setScriptName="-Dscript.path=$target_script"
+  scripting_string="-script $target_script ${script_args[@]}"
 fi
 
+[ -n "$script_trace" ] && set -x
+[ -z "${ConEmuPID-}" -o -n "${cygwin-}" ] && export MSYSTEM= PWD= # workaround for #12405
+
+# exec here would prevent onExit from being called, leaving terminal in unusable state
 eval "\"$JAVACMD\"" \
-     ${JAVA_OPTS:-$default_java_opts} \
-     "${DEBUG-}" \
-     "${java_args[@]}" \
-     "$jvm_cp_args" \
-     -Dscala.usejavacp=true \
-     "$PROG_NAME"  \
-     "${scala_args[@]}" \
-     "${residual_args[@]}" \
-     "${scripting_string-}"
+   ${JAVA_OPTS:-$default_java_opts} \
+   "${java_args[@]}" \
+   "-classpath \"$jvm_cp_args\"" \
+   -Dscala.usejavacp=true \
+   "$setScriptName" \
+   "$PROG_NAME"  \
+   "${scala_args[@]}" \
+   "${residual_args[@]}" \
+   "${scripting_string-}"
 scala_exit_status=$?
-onExit
+
diff --git a/dist/bin/scalac.bat b/dist/bin/scalac.bat
new file mode 100644
index 000000000000..a94bdb5f88b8
--- /dev/null
+++ b/dist/bin/scalac.bat
@@ -0,0 +1,175 @@
+@echo off
+setlocal enabledelayedexpansion
+
+@rem #########################################################################
+@rem ## Environment setup
+
+set _EXITCODE=0
+
+set "_PROG_HOME=%~dp0"
+
+call "%_PROG_HOME%\common.bat"
+if not %_EXITCODE%==0 goto end
+
+set _DEFAULT_JAVA_OPTS=-Xmx768m -Xms768m
+@rem set _WITH_COMPILER=true
+
+set _COMPILER_MAIN=dotty.tools.dotc.Main
+set _DECOMPILER_MAIN=dotty.tools.dotc.decompiler.Main
+set _REPL_MAIN=dotty.tools.repl.Main
+set _SCRIPTING_MAIN=dotty.tools.scripting.Main
+
+call :args %*
+
+@rem #########################################################################
+@rem ## Main
+
+call :classpathArgs
+
+set _SCRIPTING_STRING=
+if "%_PROG_NAME%"=="%_SCRIPTING_MAIN%" (
+    if not defined _TARGET_SCRIPT (
+        echo Error: Missing Scala script file 1>&2
+        set _EXITCODE=1
+        goto end
+    )
+    set _SCRIPTING_STRING=-script %_TARGET_SCRIPT% %_SCRIPTING_ARGS%
+)
+
+if defined JAVA_OPTS ( set _JAVA_OPTS=%JAVA_OPTS%
+) else ( set _JAVA_OPTS=%_DEFAULT_JAVA_OPTS%
+)
+for %%i in ("%_PROG_HOME%\..") do set "_SCALA_HOME=%%~fi"
+
+call "%_JAVACMD%" %_JAVA_OPTS% %_JAVA_DEBUG% %_JAVA_ARGS% %_JVM_CP_ARGS% ^
+-Dscala.home="%_SCALA_HOME%" -Dscala.usejavacp=true ^
+%_PROG_NAME% %_SCALA_ARGS% %_RESIDUAL_ARGS% %_SCRIPTING_STRING%
+if not %ERRORLEVEL%==0 (
+    set _EXITCODE=1
+    goto end
+)
+goto end
+
+@rem #########################################################################
+@rem ## Subroutines
+
+:args
+set _JAVA_DEBUG=
+set _HELP=
+set _VERBOSE=
+set _QUIET=
+set _COLORS=
+set _PROG_NAME=%_COMPILER_MAIN%
+set _SCALA_ARGS=
+set _JAVA_ARGS=
+set _RESIDUAL_ARGS=
+set _SCRIPTING_ARGS=
+set _TARGET_SCRIPT=
+
+:args_loop
+if "%~1"=="" goto args_done
+set "__ARG=%~1"
+if defined _TARGET_SCRIPT (
+    call :addScripting "%__ARG%"
+) else if "%__ARG%"=="--" (
+    @rem for arg; do addResidual "$arg"; done; set -- ;;
+) else if "%__ARG%"=="-h" (
+    set _HELP=true
+    call :addScala "-help"
+) else if "%__ARG%"=="-help" (
+    set _HELP=true
+    call :addScala "-help"
+) else if "%__ARG%"=="-v" (
+    set _VERBOSE=true
+    call :addScala "-verbose"
+) else if "%__ARG%"=="-verbose" (
+    set _VERBOSE=true
+    call :addScala "-verbose"
+) else if "%__ARG%"=="-debug" ( set "_JAVA_DEBUG=%_DEBUG_STR%"
+) else if "%__ARG%"=="-q" ( set _QUIET=true
+) else if "%__ARG%"=="-quiet" ( set _QUIET=true
+@rem Optimize for short-running applications, see https://github.com/lampepfl/dotty/issues/222
+) else if "%__ARG%"=="-Oshort" (
+    call :addJava "-XX:+TieredCompilation -XX:TieredStopAtLevel=1"
+) else if "%__ARG%"=="-repl" ( set _PROG_NAME=%_REPL_MAIN%
+) else if "%__ARG%"=="-script" (
+    set _PROG_NAME=%_SCRIPTING_MAIN%
+    if "%~2"=="" goto args_done
+    set "_TARGET_SCRIPT=%~2"
+    shift
+) else if "%__ARG%"=="-compile" ( set _PROG_NAME=%_COMPILER_MAIN%
+) else if "%__ARG%"=="-decompile" ( set _PROG_NAME=%_DECOMPILER_MAIN%
+) else if "%__ARG%"=="-print-tasty" (
+    set _PROG_NAME=%_DECOMPILER_MAIN%
+    call :addScala "-print-tasty"
+) else if "%__ARG%"=="-run" ( set _PROG_NAME=%_REPL_MAIN%
+) else if "%__ARG%"=="-colors" ( set _COLORS=true
+) else if "%__ARG%"=="-no-colors" ( set _COLORS=
+) else if "%__ARG%"=="-with-compiler" ( set "_JVM_CP_ARGS=%_PSEP%%_SCALA3_COMP%%_PSEP%%_TASTY_CORE%"
+@rem break out -D and -J options and add them to JAVA_OPTS as well
+@rem so they reach the JVM in time to do some good. The -D options
+@rem will be available as system properties.
+) else if "%__ARG:~0,2%"=="-D" ( call :addJava "%__ARG%"
+) else if "%__ARG:~0,2%"=="-J" ( call :addJava "%__ARG:~2%"
+) else ( call :addResidual "%__ARG%"
+)
+shift
+goto args_loop
+:args_done
+goto :eof
+
+@rem output parameter: _SCALA_ARGS
+:addScala
+set _SCALA_ARGS=%_SCALA_ARGS% %~1
+goto :eof
+
+@rem output parameter: _JAVA_ARGS
+:addJava
+set _JAVA_ARGS=%_JAVA_ARGS% %~1
+goto :eof
+
+@rem output parameter: _RESIDUAL_ARGS
+:addResidual
+set _RESIDUAL_ARGS=%_RESIDUAL_ARGS% %~1
+goto :eof
+
+@rem output parameter: _SCRIPTING_ARGS
+:addScripting
+set _SCRIPTING_ARGS=%_SCRIPTING_ARGS% %~1
+goto :eof
+
+@rem output parameter: _JVM_CP_ARGS
+:classpathArgs
+@rem echo scala3-compiler: %_SCALA3_COMP%
+@rem echo scala3-interface: %_SCALA3_INTF%
+@rem echo scala3-library: %_SCALA3_LIB%
+@rem echo tasty-core: %_TASTY_CORE%
+@rem echo scala-asm: %_SCALA_ASM%
+@rem echo scala-lib: %_SCALA_LIB%
+@rem echo sbt-intface: %_SBT_INTF%
+
+set "__TOOLCHAIN=%_SCALA_LIB%%_PSEP%"
+set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA3_LIB%%_PSEP%"
+set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA_ASM%%_PSEP%"
+set "__TOOLCHAIN=%__TOOLCHAIN%%_SBT_INTF%%_PSEP%"
+set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA3_INTF%%_PSEP%"
+set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA3_COMP%%_PSEP%"
+set "__TOOLCHAIN=%__TOOLCHAIN%%_TASTY_CORE%%_PSEP%"
+set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA3_STAGING%%_PSEP%"
+set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA3_TASTY_INSPECTOR%%_PSEP%"
+
+@rem # jline
+set "__TOOLCHAIN=%__TOOLCHAIN%%_JLINE_READER%%_PSEP%"
+set "__TOOLCHAIN=%__TOOLCHAIN%%_JLINE_TERMINAL%%_PSEP%"
+set "__TOOLCHAIN=%__TOOLCHAIN%%_JLINE_TERMINAL_JNA%%_PSEP%"
+set "__TOOLCHAIN=%__TOOLCHAIN%%_JNA%"
+
+set _JVM_CP_ARGS=-classpath "%__TOOLCHAIN%"
+goto :eof
+
+@rem #########################################################################
+@rem ## Cleanups
+
+:end
+exit /b %_EXITCODE%
+endlocal
diff --git a/dist/bin/scaladoc b/dist/bin/scaladoc
index 304da8f5045d..a3a23a1accea 100755
--- a/dist/bin/scaladoc
+++ b/dist/bin/scaladoc
@@ -1,6 +1,9 @@
 #!/usr/bin/env bash
 
-if [ -z "$PROG_HOME" ] ; then
+#set -o nounset ; set -o errexit
+
+# Try to autodetect real location of the script
+if [ -z "${PROG_HOME-}" ] ; then
   ## resolve links - $0 may be a link to PROG_HOME
   PRG="$0"
 
@@ -26,7 +29,6 @@ if [ -z "$PROG_HOME" ] ; then
 fi
 
 source "$PROG_HOME/bin/common"
-
 default_java_opts="-Xmx768m -Xms768m"
 withCompiler=true
 
@@ -46,8 +48,8 @@ addScala () {
 addResidual () {
   residual_args+=("'$1'")
 }
-addScripting () {
-  scripting_args+=("'$1'")
+addScrip() {
+  script_args+=("'$1'")
 }
 
 classpathArgs () {
@@ -71,6 +73,8 @@ classpathArgs () {
   CLASS_PATH+="$(find_lib "*liqp*")$PSEP"
   CLASS_PATH+="$(find_lib "*jsoup*")$PSEP"
   CLASS_PATH+="$(find_lib "*jackson-dataformat-yaml*")$PSEP"
+  CLASS_PATH+="$(find_lib "*jackson-datatype-jsr310*")$PSEP"
+  CLASS_PATH+="$(find_lib "*strftime4j*")$PSEP"
   CLASS_PATH+="$(find_lib "*scala-asm*")$PSEP"
   CLASS_PATH+="$(find_lib "*compiler-interface*")$PSEP"
   CLASS_PATH+="$(find_lib "*jline-reader*")$PSEP"
@@ -80,7 +84,7 @@ classpathArgs () {
   CLASS_PATH+="$(find_lib "*flexmark-formatter*")$PSEP"
   CLASS_PATH+="$(find_lib "*autolink-0.6*")$PSEP"
   CLASS_PATH+="$(find_lib "*flexmark-jira-converter*")$PSEP"
-  CLASS_PATH+="$(find_lib "*antlr-3*")$PSEP"
+  CLASS_PATH+="$(find_lib "*antlr4*")$PSEP"
   CLASS_PATH+="$(find_lib "*jackson-annotations*")$PSEP"
   CLASS_PATH+="$(find_lib "*jackson-core*")$PSEP"
   CLASS_PATH+="$(find_lib "*jackson-databind*")$PSEP"
@@ -92,18 +96,18 @@ classpathArgs () {
   CLASS_PATH+="$(find_lib "*flexmark-ext-tables*")$PSEP"
   CLASS_PATH+="$(find_lib "*flexmark-ext-ins*")$PSEP"
   CLASS_PATH+="$(find_lib "*flexmark-ext-superscript*")$PSEP"
-  CLASS_PATH+="$(find_lib "*antlr-runtime*")$PSEP"
+  CLASS_PATH+="$(find_lib "*antlr4-runtime*")$PSEP"
   CLASS_PATH+="$(find_lib "*ST4*")"
 
   jvm_cp_args="-classpath \"$CLASS_PATH\""
 }
 
-in_scripting_args=false
+#for A in "$@" ; do echo "A[$A]" ; done ; exit 2
+
 while [[ $# -gt 0 ]]; do
 case "$1" in
            --) shift; for arg; do addResidual "$arg"; done; set -- ;;
   -v|-verbose) verbose=true && addScala "-verbose" && shift ;;
-       -debug) DEBUG="$DEBUG_STR" && shift ;;
     -q|-quiet) quiet=true && shift ;;
 
       -colors) colors=true && shift ;;
@@ -113,11 +117,7 @@ case "$1" in
           # will be available as system properties.
           -D*) addJava "$1" && shift ;;
           -J*) addJava "${1:2}" && shift ;;
-            *) if [ $in_scripting_args == false ]; then
-                addResidual "$1"
-               else
-                addScripting "$1"
-               fi
+            *) addResidual "$1"
                shift
                ;;
   esac
@@ -127,12 +127,12 @@ classpathArgs
 
 eval "\"$JAVACMD\"" \
      ${JAVA_OPTS:-$default_java_opts} \
-     "$DEBUG" \
      "${java_args[@]}" \
-     "$jvm_cp_args" \
+     "${jvm_cp_args-}" \
+      -Dscala.usejavacp=true \
      "dotty.tools.scaladoc.Main"  \
      "${scala_args[@]}" \
      "${residual_args[@]}" \
-     "$scripting_string"
+     "${scripting_string-}"
 scala_exit_status=$?
 onExit
diff --git a/dist/bin/scaladoc.bat b/dist/bin/scaladoc.bat
new file mode 100644
index 000000000000..698f2a7c36f1
--- /dev/null
+++ b/dist/bin/scaladoc.bat
@@ -0,0 +1,164 @@
+@echo off
+setlocal enabledelayedexpansion
+
+@rem #########################################################################
+@rem ## Environment setup
+
+set _EXITCODE=0
+
+set "_PROG_HOME=%~dp0"
+
+call "%_PROG_HOME%\common.bat"
+if not %_EXITCODE%==0 goto end
+
+set _DEFAULT_JAVA_OPTS=-Xmx768m -Xms768m
+
+call :args %*
+
+@rem #########################################################################
+@rem ## Main
+
+call :classpathArgs
+
+if defined JAVA_OPTS ( set _JAVA_OPTS=%JAVA_OPTS%
+) else ( set _JAVA_OPTS=%_DEFAULT_JAVA_OPTS%
+)
+call "%_JAVACMD%" %_JAVA_OPTS% %_JAVA_DEBUG% %_JAVA_ARGS% ^
+-classpath "%_CLASS_PATH%" ^
+-Dscala.usejavacp=true ^
+dotty.tools.scaladoc.Main %_SCALA_ARGS% %_RESIDUAL_ARGS%
+if not %ERRORLEVEL%==0 (
+    @rem echo Error: Scaladoc execution failed 1>&2
+    set _EXITCODE=1
+    goto end
+)
+goto end
+
+@rem #########################################################################
+@rem ## Subroutines
+
+:args
+set _JAVA_DEBUG=
+set _HELP=
+set _VERBOSE=
+set _QUIET=
+set _COLORS=
+set _SCALA_ARGS=
+set _JAVA_ARGS=
+set _RESIDUAL_ARGS=
+
+:args_loop
+if "%~1"=="" goto args_done
+set "__ARG=%~1"
+if "%__ARG%"=="--" (
+    @rem for arg; do addResidual "$arg"; done; set -- ;;
+) else if "%__ARG%"=="-h" (
+    set _HELP=true
+    call :addScala "-help"
+) else if "%__ARG%"=="-help" (
+    set _HELP=true
+    call :addScala "-help"
+) else if "%__ARG%"=="-v" (
+    set _VERBOSE=true
+    call :addScala "-verbose"
+) else if "%__ARG%"=="-verbose" (
+    set _VERBOSE=true
+    call :addScala "-verbose"
+) else if "%__ARG%"=="-debug" ( set "_JAVA_DEBUG=%_DEBUG_STR%"
+) else if "%__ARG%"=="-q" ( set _QUIET=true
+) else if "%__ARG%"=="-quiet" ( set _QUIET=true
+) else if "%__ARG%"=="-colors" ( set _COLORS=true
+) else if "%__ARG%"=="-no-colors" ( set _COLORS=
+) else if "%__ARG:~0,2%"=="-D" ( call :addJava "%__ARG%"
+) else if "%__ARG:~0,2%"=="-J" ( call :addJava "%__ARG:~2%"
+) else (
+    if defined _IN_SCRIPTING_ARGS ( call :addScripting "%__ARG%"
+    ) else ( call :addResidual "%__ARG%"
+    )
+)
+shift
+goto args_loop
+:args_done
+goto :eof
+
+@rem output parameter: _SCALA_ARGS
+:addScala
+set _SCALA_ARGS=%_SCALA_ARGS% %~1
+goto :eof
+
+@rem output parameter: _JAVA_ARGS
+:addJava
+set _JAVA_ARGS=%_JAVA_ARGS% %~1
+goto :eof
+
+@rem output parameter: _RESIDUAL_ARGS
+:addResidual
+set _RESIDUAL_ARGS=%_RESIDUAL_ARGS% %~1
+goto :eof
+
+@rem output parameter: _CLASS_PATH
+:classpathArgs
+for /f "delims=" %%f in ("%_PROG_HOME%\.") do set "_LIB_DIR=%%~dpflib"
+set _CLASS_PATH=
+@rem keep list in sync with bash script `bin\scaladoc` !
+call :updateClasspath "scaladoc"
+call :updateClasspath "scala3-compiler"
+call :updateClasspath "scala3-interfaces"
+call :updateClasspath "scala3-library"
+call :updateClasspath "tasty-core"
+call :updateClasspath "scala3-tasty-inspector"
+call :updateClasspath "flexmark-0"
+call :updateClasspath "flexmark-html-parser"
+call :updateClasspath "flexmark-ext-anchorlink"
+call :updateClasspath "flexmark-ext-autolink"
+call :updateClasspath "flexmark-ext-emoji"
+call :updateClasspath "flexmark-ext-gfm-strikethrough"
+call :updateClasspath "flexmark-ext-gfm-tables"
+call :updateClasspath "flexmark-ext-gfm-tasklist"
+call :updateClasspath "flexmark-ext-wikilink"
+call :updateClasspath "flexmark-ext-yaml-front-matter"
+call :updateClasspath "liqp"
+call :updateClasspath "jsoup"
+call :updateClasspath "jackson-dataformat-yaml"
+call :updateClasspath "jackson-datatype-jsr310"
+call :updateClasspath "strftime4j"
+call :updateClasspath "scala-asm"
+call :updateClasspath "compiler-interface"
+call :updateClasspath "jline-reader"
+call :updateClasspath "jline-terminal-3"
+call :updateClasspath "jline-terminal-jna"
+call :updateClasspath "flexmark-util"
+call :updateClasspath "flexmark-formatter"
+call :updateClasspath "autolink-0.6"
+call :updateClasspath "flexmark-jira-converter"
+call :updateClasspath "antlr4"
+call :updateClasspath "jackson-annotations"
+call :updateClasspath "jackson-core"
+call :updateClasspath "jackson-databind"
+call :updateClasspath "snakeyaml"
+call :updateClasspath "scala-library"
+call :updateClasspath "protobuf-java"
+call :updateClasspath "util-interface"
+call :updateClasspath "jna-5"
+call :updateClasspath "flexmark-ext-tables"
+call :updateClasspath "flexmark-ext-ins"
+call :updateClasspath "flexmark-ext-superscript"
+call :updateClasspath "antlr4-runtime"
+call :updateClasspath "ST4"
+goto :eof
+
+@rem input parameter: %1=pattern for library file
+@rem output parameter: _CLASS_PATH
+:updateClasspath
+set "__PATTERN=%~1"
+for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*%__PATTERN%*"') do (
+    set "_CLASS_PATH=!_CLASS_PATH!%_LIB_DIR%\%%f%_PSEP%"
+)
+goto :eof
+
+@rem #########################################################################
+@rem ## Cleanups
+
+:end
+exit /b %_EXITCODE%
+endlocal
diff --git a/docs/css/bootstrap.min.css b/docs/_assets/css/bootstrap.min.css
similarity index 100%
rename from docs/css/bootstrap.min.css
rename to docs/_assets/css/bootstrap.min.css
diff --git a/docs/css/color-brewer.css b/docs/_assets/css/color-brewer.css
similarity index 100%
rename from docs/css/color-brewer.css
rename to docs/_assets/css/color-brewer.css
diff --git a/docs/css/dottydoc.css b/docs/_assets/css/dottydoc.css
similarity index 86%
rename from docs/css/dottydoc.css
rename to docs/_assets/css/dottydoc.css
index caeb967d6fd1..ca7613835ff0 100644
--- a/docs/css/dottydoc.css
+++ b/docs/_assets/css/dottydoc.css
@@ -25,6 +25,10 @@ main > h1 {
   margin-bottom: 20px;
 }
 
+.byline {
+  font-size: 14px;
+}
+
 .byline, .byline a {
   color: grey;
 }
@@ -43,7 +47,7 @@ ul.post-list {
 
 /* headings anchors */
 a.anchor {
-  color: white;
+  color: transparent;
   margin-left: -23px;
   padding-right: 3px;
   transition: color .4s ease-out;
@@ -180,18 +184,6 @@ h5:hover a.anchor:hover {
   font-family: var(--font-family-monospace);
 }
 
-/* code */
-pre, code {
-  font-variant-ligatures: none;
-}
-pre {
-  padding: 0;
-  font-size: 13px;
-  background: var(--pre-bg);
-  border-radius: 2px;
-  border: 1px solid rgba(0, 0, 0, 0.1);
-}
-
 /* admonitions */
 blockquote {
   padding: 0 1em;
@@ -205,8 +197,8 @@ aside {
 }
 
 aside.warning {
-  border-left: 3px solid #d62c2c;
-  background-color: #ffe4e4;
+  border-left: 3px solid var(--red500);
+  background-color: var(--aside-warning-bg);
 }
 
 aside.notice {
@@ -219,21 +211,6 @@ aside.success {
   background-color: #ebfddd;
 }
 
-/* gitter chat */
-.gitter-open-chat-button {
-  background-color: grey;
-}
-.gitter-open-chat-button:focus, .gitter-open-chat-button:hover {
-  background-color: var(--primary);
-}
-.gitter-open-chat-button:focus {
-  box-shadow: 0 0 8px var(--primary);
-}
-.gitter-chat-embed {
-  top: 40px; /* 50px (navbar) - 10px (aside's margin) */
-  bottom: -10px;
-}
-
 /* media queries for bigger screens (dottydoc is mobile-first) */
 @media (min-width: 576px) {
   .byline .author {
@@ -263,4 +240,4 @@ aside.success {
 header {
   position: static !important;
   width: 100% !important;
-}
\ No newline at end of file
+}
diff --git a/docs/css/frontpage.css b/docs/_assets/css/frontpage.css
similarity index 97%
rename from docs/css/frontpage.css
rename to docs/_assets/css/frontpage.css
index 722d4ba09802..a3a5c0d7dd8a 100644
--- a/docs/css/frontpage.css
+++ b/docs/_assets/css/frontpage.css
@@ -119,12 +119,6 @@ pre, code {
   line-height: 1.75em;
 }
 
-/* gitter chat */
-.gitter-chat-embed {
-  top: 0;
-  bottom: 0;
-}
-
 aside {
   margin: 0;
   padding: 0;
diff --git a/docs/css/search.css b/docs/_assets/css/search.css
similarity index 100%
rename from docs/css/search.css
rename to docs/_assets/css/search.css
diff --git a/docs/css/sidebar.css b/docs/_assets/css/sidebar.css
similarity index 100%
rename from docs/css/sidebar.css
rename to docs/_assets/css/sidebar.css
diff --git a/docs/css/toolbar.css b/docs/_assets/css/toolbar.css
similarity index 100%
rename from docs/css/toolbar.css
rename to docs/_assets/css/toolbar.css
diff --git a/docs/_assets/docsScalaLangResources/scaladoc-assets.html b/docs/_assets/docsScalaLangResources/scaladoc-assets.html
new file mode 100644
index 000000000000..504a93b25fa8
--- /dev/null
+++ b/docs/_assets/docsScalaLangResources/scaladoc-assets.html
@@ -0,0 +1,5 @@
+
+
+
+
+
diff --git a/docs/images/aggelos.jpg b/docs/_assets/images/aggelos.jpg
similarity index 100%
rename from docs/images/aggelos.jpg
rename to docs/_assets/images/aggelos.jpg
diff --git a/docs/images/allan.jpg b/docs/_assets/images/allan.jpg
similarity index 100%
rename from docs/images/allan.jpg
rename to docs/_assets/images/allan.jpg
diff --git a/docs/images/anatolii.png b/docs/_assets/images/anatolii.png
similarity index 100%
rename from docs/images/anatolii.png
rename to docs/_assets/images/anatolii.png
diff --git a/docs/images/dotty-ide/decompiler.png b/docs/_assets/images/dotty-ide/decompiler.png
similarity index 100%
rename from docs/images/dotty-ide/decompiler.png
rename to docs/_assets/images/dotty-ide/decompiler.png
diff --git a/docs/images/dotty-ide/documentation-hover.png b/docs/_assets/images/dotty-ide/documentation-hover.png
similarity index 100%
rename from docs/images/dotty-ide/documentation-hover.png
rename to docs/_assets/images/dotty-ide/documentation-hover.png
diff --git a/docs/images/dotty-ide/signature-help.png b/docs/_assets/images/dotty-ide/signature-help.png
similarity index 100%
rename from docs/images/dotty-ide/signature-help.png
rename to docs/_assets/images/dotty-ide/signature-help.png
diff --git a/docs/images/dotty-logo-white.svg b/docs/_assets/images/dotty-logo-white.svg
similarity index 100%
rename from docs/images/dotty-logo-white.svg
rename to docs/_assets/images/dotty-logo-white.svg
diff --git a/docs/images/explicit-nulls/explicit-nulls-type-hierarchy.png b/docs/_assets/images/explicit-nulls/explicit-nulls-type-hierarchy.png
similarity index 100%
rename from docs/images/explicit-nulls/explicit-nulls-type-hierarchy.png
rename to docs/_assets/images/explicit-nulls/explicit-nulls-type-hierarchy.png
diff --git a/docs/images/favicon.png b/docs/_assets/images/favicon.png
similarity index 100%
rename from docs/images/favicon.png
rename to docs/_assets/images/favicon.png
diff --git a/docs/images/felix.jpg b/docs/_assets/images/felix.jpg
similarity index 100%
rename from docs/images/felix.jpg
rename to docs/_assets/images/felix.jpg
diff --git a/docs/images/fengyun.jpg b/docs/_assets/images/fengyun.jpg
similarity index 100%
rename from docs/images/fengyun.jpg
rename to docs/_assets/images/fengyun.jpg
diff --git a/docs/images/github-logo.svg b/docs/_assets/images/github-logo.svg
similarity index 100%
rename from docs/images/github-logo.svg
rename to docs/_assets/images/github-logo.svg
diff --git a/docs/_assets/images/logo.svg b/docs/_assets/images/logo.svg
new file mode 100644
index 000000000000..472d0d6025fc
--- /dev/null
+++ b/docs/_assets/images/logo.svg
@@ -0,0 +1,16 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/docs/images/martin.jpg b/docs/_assets/images/martin.jpg
similarity index 100%
rename from docs/images/martin.jpg
rename to docs/_assets/images/martin.jpg
diff --git a/docs/images/nico.jpg b/docs/_assets/images/nico.jpg
similarity index 100%
rename from docs/images/nico.jpg
rename to docs/_assets/images/nico.jpg
diff --git a/docs/images/olivier.jpg b/docs/_assets/images/olivier.jpg
similarity index 100%
rename from docs/images/olivier.jpg
rename to docs/_assets/images/olivier.jpg
diff --git a/docs/images/others/scala-days-logo.png b/docs/_assets/images/others/scala-days-logo.png
similarity index 100%
rename from docs/images/others/scala-days-logo.png
rename to docs/_assets/images/others/scala-days-logo.png
diff --git a/docs/images/petrashko.jpg b/docs/_assets/images/petrashko.jpg
similarity index 100%
rename from docs/images/petrashko.jpg
rename to docs/_assets/images/petrashko.jpg
diff --git a/docs/images/preview.png b/docs/_assets/images/preview.png
similarity index 100%
rename from docs/images/preview.png
rename to docs/_assets/images/preview.png
diff --git a/docs/images/scala-logo.svg b/docs/_assets/images/scala-logo.svg
similarity index 100%
rename from docs/images/scala-logo.svg
rename to docs/_assets/images/scala-logo.svg
diff --git a/docs/images/scaladoc-logo.png b/docs/_assets/images/scaladoc-logo.png
similarity index 100%
rename from docs/images/scaladoc-logo.png
rename to docs/_assets/images/scaladoc-logo.png
diff --git a/docs/_assets/images/scaladoc/inkuire-1.0.0-M2_js_flatMap.gif b/docs/_assets/images/scaladoc/inkuire-1.0.0-M2_js_flatMap.gif
new file mode 100644
index 000000000000..1cc25b5683f7
Binary files /dev/null and b/docs/_assets/images/scaladoc/inkuire-1.0.0-M2_js_flatMap.gif differ
diff --git a/docs/_assets/images/scaladoc/nightly.gif b/docs/_assets/images/scaladoc/nightly.gif
new file mode 100644
index 000000000000..d6e764a032d5
Binary files /dev/null and b/docs/_assets/images/scaladoc/nightly.gif differ
diff --git a/docs/images/smarter.jpg b/docs/_assets/images/smarter.jpg
similarity index 100%
rename from docs/images/smarter.jpg
rename to docs/_assets/images/smarter.jpg
diff --git a/docs/images/worksheets/config-autorun.png b/docs/_assets/images/worksheets/config-autorun.png
similarity index 100%
rename from docs/images/worksheets/config-autorun.png
rename to docs/_assets/images/worksheets/config-autorun.png
diff --git a/docs/images/worksheets/worksheet-demo.gif b/docs/_assets/images/worksheets/worksheet-demo.gif
similarity index 100%
rename from docs/images/worksheets/worksheet-demo.gif
rename to docs/_assets/images/worksheets/worksheet-demo.gif
diff --git a/docs/images/worksheets/worksheet-help.png b/docs/_assets/images/worksheets/worksheet-help.png
similarity index 100%
rename from docs/images/worksheets/worksheet-help.png
rename to docs/_assets/images/worksheets/worksheet-help.png
diff --git a/docs/images/worksheets/worksheet-run.png b/docs/_assets/images/worksheets/worksheet-run.png
similarity index 100%
rename from docs/images/worksheets/worksheet-run.png
rename to docs/_assets/images/worksheets/worksheet-run.png
diff --git a/docs/js/api-search.js b/docs/_assets/js/api-search.js
similarity index 100%
rename from docs/js/api-search.js
rename to docs/_assets/js/api-search.js
diff --git a/docs/js/bootstrap.min.js b/docs/_assets/js/bootstrap.min.js
similarity index 100%
rename from docs/js/bootstrap.min.js
rename to docs/_assets/js/bootstrap.min.js
diff --git a/docs/js/highlight.pack.js b/docs/_assets/js/highlight.pack.js
similarity index 100%
rename from docs/js/highlight.pack.js
rename to docs/_assets/js/highlight.pack.js
diff --git a/docs/js/jquery.min.js b/docs/_assets/js/jquery.min.js
similarity index 100%
rename from docs/js/jquery.min.js
rename to docs/_assets/js/jquery.min.js
diff --git a/docs/js/sidebar.js b/docs/_assets/js/sidebar.js
similarity index 100%
rename from docs/js/sidebar.js
rename to docs/_assets/js/sidebar.js
diff --git a/docs/js/toolbar.js b/docs/_assets/js/toolbar.js
similarity index 100%
rename from docs/js/toolbar.js
rename to docs/_assets/js/toolbar.js
diff --git a/docs/_assets/resources/safer-exceptions.pdf b/docs/_assets/resources/safer-exceptions.pdf
new file mode 100644
index 000000000000..1a10ab6c0e46
Binary files /dev/null and b/docs/_assets/resources/safer-exceptions.pdf differ
diff --git a/docs/blog/_posts/2015-10-23-dotty-compiler-bootstraps.md b/docs/_blog/_posts/2015-10-23-dotty-compiler-bootstraps.md
similarity index 100%
rename from docs/blog/_posts/2015-10-23-dotty-compiler-bootstraps.md
rename to docs/_blog/_posts/2015-10-23-dotty-compiler-bootstraps.md
diff --git a/docs/blog/_posts/2016-01-02-new-year-resolutions.md b/docs/_blog/_posts/2016-01-02-new-year-resolutions.md
similarity index 100%
rename from docs/blog/_posts/2016-01-02-new-year-resolutions.md
rename to docs/_blog/_posts/2016-01-02-new-year-resolutions.md
diff --git a/docs/blog/_posts/2016-02-03-essence-of-scala.md b/docs/_blog/_posts/2016-02-03-essence-of-scala.md
similarity index 100%
rename from docs/blog/_posts/2016-02-03-essence-of-scala.md
rename to docs/_blog/_posts/2016-02-03-essence-of-scala.md
diff --git a/docs/blog/_posts/2016-02-17-scaling-dot-soundness.md b/docs/_blog/_posts/2016-02-17-scaling-dot-soundness.md
similarity index 100%
rename from docs/blog/_posts/2016-02-17-scaling-dot-soundness.md
rename to docs/_blog/_posts/2016-02-17-scaling-dot-soundness.md
diff --git a/docs/blog/_posts/2016-05-05-multiversal-equality.md b/docs/_blog/_posts/2016-05-05-multiversal-equality.md
similarity index 100%
rename from docs/blog/_posts/2016-05-05-multiversal-equality.md
rename to docs/_blog/_posts/2016-05-05-multiversal-equality.md
diff --git a/docs/blog/_posts/2016-12-05-implicit-function-types.md b/docs/_blog/_posts/2016-12-05-implicit-function-types.md
similarity index 100%
rename from docs/blog/_posts/2016-12-05-implicit-function-types.md
rename to docs/_blog/_posts/2016-12-05-implicit-function-types.md
diff --git a/docs/blog/_posts/2017-05-31-first-dotty-milestone-release.md b/docs/_blog/_posts/2017-05-31-first-dotty-milestone-release.md
similarity index 100%
rename from docs/blog/_posts/2017-05-31-first-dotty-milestone-release.md
rename to docs/_blog/_posts/2017-05-31-first-dotty-milestone-release.md
diff --git a/docs/blog/_posts/2017-07-12-second-dotty-milestone-release.md b/docs/_blog/_posts/2017-07-12-second-dotty-milestone-release.md
similarity index 98%
rename from docs/blog/_posts/2017-07-12-second-dotty-milestone-release.md
rename to docs/_blog/_posts/2017-07-12-second-dotty-milestone-release.md
index 19de33740776..aa71d62ffc97 100644
--- a/docs/blog/_posts/2017-07-12-second-dotty-milestone-release.md
+++ b/docs/_blog/_posts/2017-07-12-second-dotty-milestone-release.md
@@ -10,7 +10,7 @@ Today, we are excited to release Dotty version 0.2.0-RC1. This release
 serves as a technology preview that demonstrates new language features
 and the compiler supporting them.
 
-This release is based on the [previous milestone](/blog/_posts/2017-05-31-first-dotty-milestone-release.html).
+This release is based on the [previous milestone](/_blog/_posts/2017-05-31-first-dotty-milestone-release.html).
 The highlights of this release are:
  - substantial improvement of quality of generated code for pattern matching
  - improvements in VS Code IDE stability
@@ -24,7 +24,7 @@ The highlights of this release are:
 This is our second scheduled release according to our [6-week release schedule](https://dotty.epfl.ch/docs/usage/version-numbers.html).
 
 ## What’s in the 0.2.0-RC1 technology preview?
-The [previous technology preview](/blog/_posts/2017-05-31-first-dotty-milestone-release.html) has shipped new language features planned for Scala 3:
+The [previous technology preview](/_blog/_posts/2017-05-31-first-dotty-milestone-release.html) has shipped new language features planned for Scala 3:
 [Intersection Types](https://dotty.epfl.ch/docs/reference/new-types/intersection-types.html),
 [Union Types](https://dotty.epfl.ch/docs/reference/new-types/union-types.html),
 [Trait Parameters](https://dotty.epfl.ch/docs/reference/other-new-features/trait-parameters.html),
diff --git a/docs/blog/_posts/2017-09-07-third-dotty-milestone-release.md b/docs/_blog/_posts/2017-09-07-third-dotty-milestone-release.md
similarity index 98%
rename from docs/blog/_posts/2017-09-07-third-dotty-milestone-release.md
rename to docs/_blog/_posts/2017-09-07-third-dotty-milestone-release.md
index d07c5d518f19..eb20292884f1 100644
--- a/docs/blog/_posts/2017-09-07-third-dotty-milestone-release.md
+++ b/docs/_blog/_posts/2017-09-07-third-dotty-milestone-release.md
@@ -20,7 +20,7 @@ You can learn more about Dotty on our [website](http://dotty.epfl.ch).
 
 
 This is our third scheduled release according to our [6-week release schedule](https://dotty.epfl.ch/docs/usage/version-numbers.html).
-The [previous technology preview](/blog/_posts/2017-07-12-second-dotty-milestone-release.html) improved
+The [previous technology preview](/_blog/_posts/2017-07-12-second-dotty-milestone-release.html) improved
 stability and reliability:
 
  - substantial improvement of quality of generated code for pattern matching
diff --git a/docs/blog/_posts/2017-10-16-fourth-dotty-milestone-release.md b/docs/_blog/_posts/2017-10-16-fourth-dotty-milestone-release.md
similarity index 98%
rename from docs/blog/_posts/2017-10-16-fourth-dotty-milestone-release.md
rename to docs/_blog/_posts/2017-10-16-fourth-dotty-milestone-release.md
index 52a1f6914034..4990ec198aa0 100644
--- a/docs/blog/_posts/2017-10-16-fourth-dotty-milestone-release.md
+++ b/docs/_blog/_posts/2017-10-16-fourth-dotty-milestone-release.md
@@ -20,7 +20,7 @@ You can learn more about Dotty on our [website](http://dotty.epfl.ch).
 
 
 This is our fourth scheduled release according to our [6-week release schedule](https://dotty.epfl.ch/docs/usage/version-numbers.html).
-The [previous technology preview](/blog/_posts/2017-09-07-third-dotty-milestone-release.html) improved
+The [previous technology preview](/_blog/_posts/2017-09-07-third-dotty-milestone-release.html) improved
 stability and reliability.
 
 ## What’s new in the 0.4.0-RC1 technology preview?
diff --git a/docs/blog/_posts/2017-12-01-fifth-dotty-milestone-release.md b/docs/_blog/_posts/2017-12-01-fifth-dotty-milestone-release.md
similarity index 98%
rename from docs/blog/_posts/2017-12-01-fifth-dotty-milestone-release.md
rename to docs/_blog/_posts/2017-12-01-fifth-dotty-milestone-release.md
index 8204771b3134..98fa6e127088 100644
--- a/docs/blog/_posts/2017-12-01-fifth-dotty-milestone-release.md
+++ b/docs/_blog/_posts/2017-12-01-fifth-dotty-milestone-release.md
@@ -20,7 +20,7 @@ You can learn more about Dotty on our [website](http://dotty.epfl.ch).
 
 
 This is our fifth scheduled release according to our [6-week release schedule](https://dotty.epfl.ch/docs/usage/version-numbers.html).
-The [previous technology preview](/blog/_posts/2017-10-16-fourth-dotty-milestone-release.html) added
+The [previous technology preview](/_blog/_posts/2017-10-16-fourth-dotty-milestone-release.html) added
 support for Scala 2.12 and came with a brand new REPL.
 
 ## What’s new in the 0.5.0-RC1 technology preview?
diff --git a/docs/blog/_posts/2018-03-05-seventh-dotty-milestone-release.md b/docs/_blog/_posts/2018-03-05-seventh-dotty-milestone-release.md
similarity index 100%
rename from docs/blog/_posts/2018-03-05-seventh-dotty-milestone-release.md
rename to docs/_blog/_posts/2018-03-05-seventh-dotty-milestone-release.md
diff --git a/docs/blog/_posts/2018-04-27-eighth-dotty-milestone-release.md b/docs/_blog/_posts/2018-04-27-eighth-dotty-milestone-release.md
similarity index 100%
rename from docs/blog/_posts/2018-04-27-eighth-dotty-milestone-release.md
rename to docs/_blog/_posts/2018-04-27-eighth-dotty-milestone-release.md
diff --git a/docs/blog/_posts/2018-07-06-ninth-dotty-milestone-release.md b/docs/_blog/_posts/2018-07-06-ninth-dotty-milestone-release.md
similarity index 100%
rename from docs/blog/_posts/2018-07-06-ninth-dotty-milestone-release.md
rename to docs/_blog/_posts/2018-07-06-ninth-dotty-milestone-release.md
diff --git a/docs/blog/_posts/2018-10-10-10th-dotty-milestone-release.md b/docs/_blog/_posts/2018-10-10-10th-dotty-milestone-release.md
similarity index 100%
rename from docs/blog/_posts/2018-10-10-10th-dotty-milestone-release.md
rename to docs/_blog/_posts/2018-10-10-10th-dotty-milestone-release.md
diff --git a/docs/blog/_posts/2018-11-30-11th-dotty-milestone-release.md b/docs/_blog/_posts/2018-11-30-11th-dotty-milestone-release.md
similarity index 100%
rename from docs/blog/_posts/2018-11-30-11th-dotty-milestone-release.md
rename to docs/_blog/_posts/2018-11-30-11th-dotty-milestone-release.md
diff --git a/docs/blog/_posts/2019-01-21-12th-dotty-milestone-release.md b/docs/_blog/_posts/2019-01-21-12th-dotty-milestone-release.md
similarity index 100%
rename from docs/blog/_posts/2019-01-21-12th-dotty-milestone-release.md
rename to docs/_blog/_posts/2019-01-21-12th-dotty-milestone-release.md
diff --git a/docs/blog/_posts/2019-03-05-13th-dotty-milestone-release.md b/docs/_blog/_posts/2019-03-05-13th-dotty-milestone-release.md
similarity index 100%
rename from docs/blog/_posts/2019-03-05-13th-dotty-milestone-release.md
rename to docs/_blog/_posts/2019-03-05-13th-dotty-milestone-release.md
diff --git a/docs/blog/_posts/2019-04-15-14th-dotty-milestone-release.md b/docs/_blog/_posts/2019-04-15-14th-dotty-milestone-release.md
similarity index 100%
rename from docs/blog/_posts/2019-04-15-14th-dotty-milestone-release.md
rename to docs/_blog/_posts/2019-04-15-14th-dotty-milestone-release.md
diff --git a/docs/blog/_posts/2019-05-23-15th-dotty-milestone-release.md b/docs/_blog/_posts/2019-05-23-15th-dotty-milestone-release.md
similarity index 100%
rename from docs/blog/_posts/2019-05-23-15th-dotty-milestone-release.md
rename to docs/_blog/_posts/2019-05-23-15th-dotty-milestone-release.md
diff --git a/docs/blog/_posts/2019-06-11-16th-dotty-milestone-release.md b/docs/_blog/_posts/2019-06-11-16th-dotty-milestone-release.md
similarity index 99%
rename from docs/blog/_posts/2019-06-11-16th-dotty-milestone-release.md
rename to docs/_blog/_posts/2019-06-11-16th-dotty-milestone-release.md
index e3d81f6c3df5..41194df26625 100644
--- a/docs/blog/_posts/2019-06-11-16th-dotty-milestone-release.md
+++ b/docs/_blog/_posts/2019-06-11-16th-dotty-milestone-release.md
@@ -156,7 +156,7 @@ enum B(val gravity: Double) extends java.lang.Enum[B] {
 }
 ```
 
-For more information please check the [test case](https://github.com/lampepfl/dotty/tree/master/tests/run/enum-java) and
+For more information please check the [test case](https://github.com/lampepfl/dotty/tree/main/tests/run/enum-java) and
 also the relevant PRs [#6602](https://github.com/lampepfl/dotty/pull/6602) and
 [#6629](https://github.com/lampepfl/dotty/pull/6629).
 
diff --git a/docs/blog/_posts/2019-07-25-17th-dotty-milestone-release.md b/docs/_blog/_posts/2019-07-25-17th-dotty-milestone-release.md
similarity index 100%
rename from docs/blog/_posts/2019-07-25-17th-dotty-milestone-release.md
rename to docs/_blog/_posts/2019-07-25-17th-dotty-milestone-release.md
diff --git a/docs/blog/_posts/2019-08-30-18th-dotty-milestone-release.md b/docs/_blog/_posts/2019-08-30-18th-dotty-milestone-release.md
similarity index 100%
rename from docs/blog/_posts/2019-08-30-18th-dotty-milestone-release.md
rename to docs/_blog/_posts/2019-08-30-18th-dotty-milestone-release.md
diff --git a/docs/blog/_posts/2019-09-23-19th-dotty-milestone-release.md b/docs/_blog/_posts/2019-09-23-19th-dotty-milestone-release.md
similarity index 100%
rename from docs/blog/_posts/2019-09-23-19th-dotty-milestone-release.md
rename to docs/_blog/_posts/2019-09-23-19th-dotty-milestone-release.md
diff --git a/docs/blog/_posts/2019-11-04-20th-dotty-milestone-release.md b/docs/_blog/_posts/2019-11-04-20th-dotty-milestone-release.md
similarity index 100%
rename from docs/blog/_posts/2019-11-04-20th-dotty-milestone-release.md
rename to docs/_blog/_posts/2019-11-04-20th-dotty-milestone-release.md
diff --git a/docs/blog/_posts/2019-12-20-21th-dotty-milestone-release.md b/docs/_blog/_posts/2019-12-20-21th-dotty-milestone-release.md
similarity index 99%
rename from docs/blog/_posts/2019-12-20-21th-dotty-milestone-release.md
rename to docs/_blog/_posts/2019-12-20-21th-dotty-milestone-release.md
index 6b5d28c35254..94d8ee61bec9 100644
--- a/docs/blog/_posts/2019-12-20-21th-dotty-milestone-release.md
+++ b/docs/_blog/_posts/2019-12-20-21th-dotty-milestone-release.md
@@ -48,7 +48,7 @@ It means that we can now put the Scala 3 compiler under heavy load, getting it
 ready for industrial strength applications. At the moment we have 23
 projects on our community projects and we expect this number to go up!
 
-> https://github.com/lampepfl/dotty/tree/master/community-build/community-projects
+> https://github.com/lampepfl/dotty/tree/main/community-build/community-projects
 
 This project contains tests to build and test a corpus of open sources Scala 2.x
 projects against Scala 3.
diff --git a/docs/blog/_posts/2020-02-05-22nd-dotty-milestone-release.md b/docs/_blog/_posts/2020-02-05-22nd-dotty-milestone-release.md
similarity index 100%
rename from docs/blog/_posts/2020-02-05-22nd-dotty-milestone-release.md
rename to docs/_blog/_posts/2020-02-05-22nd-dotty-milestone-release.md
diff --git a/docs/blog/_posts/2020-03-18-23rd-dotty-milestone-release.md b/docs/_blog/_posts/2020-03-18-23rd-dotty-milestone-release.md
similarity index 100%
rename from docs/blog/_posts/2020-03-18-23rd-dotty-milestone-release.md
rename to docs/_blog/_posts/2020-03-18-23rd-dotty-milestone-release.md
diff --git a/docs/blog/_posts/2020-04-29-24th-dotty-milestone-release.md b/docs/_blog/_posts/2020-04-29-24th-dotty-milestone-release.md
similarity index 99%
rename from docs/blog/_posts/2020-04-29-24th-dotty-milestone-release.md
rename to docs/_blog/_posts/2020-04-29-24th-dotty-milestone-release.md
index d4f34446e5c4..e32df8cacc55 100644
--- a/docs/blog/_posts/2020-04-29-24th-dotty-milestone-release.md
+++ b/docs/_blog/_posts/2020-04-29-24th-dotty-milestone-release.md
@@ -134,7 +134,7 @@ We are looking forward to having you join the team of contributors.
 
 Dotty now has a set of widely-used community libraries that are built against every nightly Dotty
 snapshot. Currently, this includes shapeless, ScalaPB, algebra, scalatest, scopt and squants.
-Join our [community build](https://github.com/lampepfl/dotty/tree/master/community-build)
+Join our [community build](https://github.com/lampepfl/dotty/tree/main/community-build)
 to make sure that our regression suite includes your library.
 
 [Scastie]: https://scastie.scala-lang.org/?target=dotty
diff --git a/docs/blog/_posts/2020-06-22-25th-dotty-milestone-release.md b/docs/_blog/_posts/2020-06-22-25th-dotty-milestone-release.md
similarity index 99%
rename from docs/blog/_posts/2020-06-22-25th-dotty-milestone-release.md
rename to docs/_blog/_posts/2020-06-22-25th-dotty-milestone-release.md
index 8989a2f7b410..dd5def04bfe9 100644
--- a/docs/blog/_posts/2020-06-22-25th-dotty-milestone-release.md
+++ b/docs/_blog/_posts/2020-06-22-25th-dotty-milestone-release.md
@@ -98,7 +98,7 @@ We are looking forward to having you join the team of contributors.
 
 Dotty now has a set of widely-used community libraries that are built against every nightly Dotty
 snapshot. Currently, this includes shapeless, ScalaPB, algebra, scalatest, scopt and squants.
-Join our [community build](https://github.com/lampepfl/dotty/tree/master/community-build)
+Join our [community build](https://github.com/lampepfl/dotty/tree/main/community-build)
 to make sure that our regression suite includes your library.
 
 [Scastie]: https://scastie.scala-lang.org/?target=dotty
diff --git a/docs/blog/_posts/2020-07-27-26th-dotty-milestone-release.md b/docs/_blog/_posts/2020-07-27-26th-dotty-milestone-release.md
similarity index 99%
rename from docs/blog/_posts/2020-07-27-26th-dotty-milestone-release.md
rename to docs/_blog/_posts/2020-07-27-26th-dotty-milestone-release.md
index 06a76455e433..b0d153dded7e 100644
--- a/docs/blog/_posts/2020-07-27-26th-dotty-milestone-release.md
+++ b/docs/_blog/_posts/2020-07-27-26th-dotty-milestone-release.md
@@ -128,7 +128,7 @@ We are looking forward to having you join the team of contributors.
 
 Dotty now has a set of widely-used community libraries that are built against every nightly Dotty
 snapshot. Currently, this includes shapeless, ScalaPB, algebra, scalatest, scopt and squants.
-Join our [community build](https://github.com/lampepfl/dotty/tree/master/community-build)
+Join our [community build](https://github.com/lampepfl/dotty/tree/main/community-build)
 to make sure that our regression suite includes your library.
 
 [Scastie]: https://scastie.scala-lang.org/?target=dotty
diff --git a/docs/blog/_posts/2020-08-31-27th-dotty-milestone-release.md b/docs/_blog/_posts/2020-08-31-27th-dotty-milestone-release.md
similarity index 99%
rename from docs/blog/_posts/2020-08-31-27th-dotty-milestone-release.md
rename to docs/_blog/_posts/2020-08-31-27th-dotty-milestone-release.md
index 641db674abb6..e42e98a1385b 100644
--- a/docs/blog/_posts/2020-08-31-27th-dotty-milestone-release.md
+++ b/docs/_blog/_posts/2020-08-31-27th-dotty-milestone-release.md
@@ -109,7 +109,7 @@ We are looking forward to having you join the team of contributors.
 
 Dotty now has a set of widely-used community libraries that are built against every nightly Dotty
 snapshot. Currently, this includes shapeless, ScalaPB, algebra, scalatest, scopt and squants.
-Join our [community build](https://github.com/lampepfl/dotty/tree/master/community-build)
+Join our [community build](https://github.com/lampepfl/dotty/tree/main/community-build)
 to make sure that our regression suite includes your library.
 
 [Scastie]: https://scastie.scala-lang.org/?target=dotty
diff --git a/docs/blog/_posts/2020-09-21-naming-schema-change.md b/docs/_blog/_posts/2020-09-21-naming-schema-change.md
similarity index 100%
rename from docs/blog/_posts/2020-09-21-naming-schema-change.md
rename to docs/_blog/_posts/2020-09-21-naming-schema-change.md
diff --git a/docs/blog/_posts/2020-11-09-scala3-m1.md b/docs/_blog/_posts/2020-11-09-scala3-m1.md
similarity index 99%
rename from docs/blog/_posts/2020-11-09-scala3-m1.md
rename to docs/_blog/_posts/2020-11-09-scala3-m1.md
index 5292ad0e8344..ffef5618f9ff 100644
--- a/docs/blog/_posts/2020-11-09-scala3-m1.md
+++ b/docs/_blog/_posts/2020-11-09-scala3-m1.md
@@ -199,7 +199,7 @@ We are looking forward to having you join the team of contributors.
 
 Dotty now has a set of widely-used community libraries that are built against every nightly Dotty
 snapshot. Currently, this includes shapeless, ScalaPB, algebra, scalatest, scopt and squants.
-Join our [community build](https://github.com/lampepfl/dotty/tree/master/community-build)
+Join our [community build](https://github.com/lampepfl/dotty/tree/main/community-build)
 to make sure that our regression suite includes your library.
 
 [Scastie]: https://scastie.scala-lang.org/?target=dotty
diff --git a/docs/blog/_posts/2020-12-18-scala3-m3.md b/docs/_blog/_posts/2020-12-18-scala3-m3.md
similarity index 99%
rename from docs/blog/_posts/2020-12-18-scala3-m3.md
rename to docs/_blog/_posts/2020-12-18-scala3-m3.md
index e0cba2b144d8..41cfa76c0265 100644
--- a/docs/blog/_posts/2020-12-18-scala3-m3.md
+++ b/docs/_blog/_posts/2020-12-18-scala3-m3.md
@@ -231,7 +231,7 @@ We are looking forward to having you join the team of contributors.
 
 ## Library authors: Join our community build
 
-Scala 3 is regularly tested against a sample of libraries known as the "community build". You can add your library to the [community build](https://github.com/lampepfl/dotty/tree/master/community-build) by submitting a PR.
+Scala 3 is regularly tested against a sample of libraries known as the "community build". You can add your library to the [community build](https://github.com/lampepfl/dotty/tree/main/community-build) by submitting a PR.
 
 [Scastie]: https://scastie.scala-lang.org/?target=dotty
 
diff --git a/docs/blog/_posts/2021-02-17-scala3-rc1.md b/docs/_blog/_posts/2021-02-17-scala3-rc1.md
similarity index 100%
rename from docs/blog/_posts/2021-02-17-scala3-rc1.md
rename to docs/_blog/_posts/2021-02-17-scala3-rc1.md
diff --git a/docs/blog/_posts/2021-03-31-scala3-rc2.md b/docs/_blog/_posts/2021-03-31-scala3-rc2.md
similarity index 99%
rename from docs/blog/_posts/2021-03-31-scala3-rc2.md
rename to docs/_blog/_posts/2021-03-31-scala3-rc2.md
index 8e4439037be5..178dfabfbffc 100644
--- a/docs/blog/_posts/2021-03-31-scala3-rc2.md
+++ b/docs/_blog/_posts/2021-03-31-scala3-rc2.md
@@ -116,7 +116,7 @@ We are looking forward to having you join the team of contributors.
 
 Dotty now has a set of widely-used community libraries that are built against every nightly Dotty
 snapshot. Currently, this includes shapeless, ScalaPB, algebra, scalatest, scopt and squants.
-Join our [community build](https://github.com/lampepfl/dotty/tree/master/community-build)
+Join our [community build](https://github.com/lampepfl/dotty/tree/main/community-build)
 to make sure that our regression suite includes your library.
 
 [Scastie]: https://scastie.scala-lang.org/?target=dotty
diff --git a/docs/_blog/_posts/2021-04-21-scala3-rc3.md b/docs/_blog/_posts/2021-04-21-scala3-rc3.md
new file mode 100644
index 000000000000..8651730da93a
--- /dev/null
+++ b/docs/_blog/_posts/2021-04-21-scala3-rc3.md
@@ -0,0 +1,70 @@
+---
+layout: blog-page
+title: Scala 3.0.0-RC3 – bug fixes for 3.0.0 stable
+author: Anatolii Kmetiuk
+authorImg: /images/anatolii.png
+date: 2021-04-21
+---
+
+Hello! With this blog article, we would like to announce the release of Scala 3.0.0-RC3. Some critical bugs were discovered during the previous release cycle, whose fixes had to be included in 3.0.0 stable. Therefore, we are having RC3 to give the community time to test our fixes.
+
+This release also impacts the release date for 3.0.0 stable. 3.0.0 stable will get a green light if no critical bugs are discovered until May 12th. Otherwise we may need to have an RC4.
+
+
+
+# Bug fixes included
+- Fix type test for trait parameter arguments [#12066](https://github.com/lampepfl/dotty/pull/12066)
+- Set file filter correctly [#12119](https://github.com/lampepfl/dotty/pull/12119)
+- Provide mirror support after inlining [#12079](https://github.com/lampepfl/dotty/pull/12079)
+- Revert "Recursively check nonvariant arguments of base types for realizability" [#12067](https://github.com/lampepfl/dotty/pull/12067)
+- When simplifying match types, ensure fully defined before reducing [#12068](https://github.com/lampepfl/dotty/pull/12068)
+- sbt-dotty: the binary version is 3 for Scala >= 3.0.0 [#12084](https://github.com/lampepfl/dotty/pull/12084)
+- Fix isInstanceOf[Array[?]] returning true on non-Array [#12108](https://github.com/lampepfl/dotty/pull/12108)
+- Scala2Unpickler: don't unpickle the same type parameter twice [#12129](https://github.com/lampepfl/dotty/pull/12129)
+- Overloading resolution: Handle SAM types more like Java and Scala 2 [#12131](https://github.com/lampepfl/dotty/pull/12131)
+- Add TermParamClause.isGiven [#12042](https://github.com/lampepfl/dotty/pull/12042)
+
+## Contributors
+Thank you to all the contributors who made this release possible 🎉
+
+According to `git shortlog -sn --no-merges 3.0.0-RC2..3.0.0-RC3` these are:
+
+```
+     8  Martin Odersky
+     7  Adrien Piquerez
+     6  Guillaume Martres
+     4  Liu Fengyun
+     1  Anatolii Kmetiuk
+     1  Nicolas Stucki
+```
+
+If you want to get your hands dirty and contribute to Dotty, now is a good time to get involved!
+Head to our [Getting Started page for new contributors](https://dotty.epfl.ch/docs/contributing/getting-started.html),
+and have a look at some of the [good first issues](https://github.com/lampepfl/dotty/issues?q=is%3Aissue+is%3Aopen+label%3Aexp%3Anovice).
+They make perfect entry points into hacking on the compiler.
+
+We are looking forward to having you join the team of contributors.
+
+## Library authors: Join our community build
+
+Dotty now has a set of widely-used community libraries that are built against every nightly Dotty
+snapshot. Currently, this includes shapeless, ScalaPB, algebra, scalatest, scopt and squants.
+Join our [community build](https://github.com/lampepfl/dotty/tree/main/community-build)
+to make sure that our regression suite includes your library.
+
+[Scastie]: https://scastie.scala-lang.org/?target=dotty
+
+[@odersky]: https://github.com/odersky
+[@DarkDimius]: https://github.com/DarkDimius
+[@smarter]: https://github.com/smarter
+[@felixmulder]: https://github.com/felixmulder
+[@nicolasstucki]: https://github.com/nicolasstucki
+[@liufengyun]: https://github.com/liufengyun
+[@OlivierBlanvillain]: https://github.com/OlivierBlanvillain
+[@biboudis]: https://github.com/biboudis
+[@allanrenucci]: https://github.com/allanrenucci
+[@Blaisorblade]: https://github.com/Blaisorblade
+[@Duhemm]: https://github.com/Duhemm
+[@AleksanderBG]: https://github.com/AleksanderBG
+[@milessabin]: https://github.com/milessabin
+[@anatoliykmetyuk]: https://github.com/anatoliykmetyuk
diff --git a/docs/_blog/_posts/2021-06-07-scala3.0.1-rc1-release.md b/docs/_blog/_posts/2021-06-07-scala3.0.1-rc1-release.md
new file mode 100644
index 000000000000..e9fac2d0447c
--- /dev/null
+++ b/docs/_blog/_posts/2021-06-07-scala3.0.1-rc1-release.md
@@ -0,0 +1,168 @@
+---
+layout: blog-page
+title: Scala 3.0.1-RC1 – further stabilising the compiler
+author: Anatolii Kmetiuk
+authorImg: /images/anatolii.png
+date: 2021-06-07
+---
+
+Hello! We are happy to announce Scala 3.0.1-RC1 – the first release candidate in the post-3.0.0 era. With this release, we continue the work on making the compiler even more stable.
+
+
+
+# Experimental language features policy
+Research and experimentation has always been an integral part of the Scala community's culture. That's what made Scala the language it is right now. Like many things in engineering, however, it's a part of a trade-off between experimentation and stability. Experimenting is fun and insightful, but when stakes on your project are high, stability becomes a priority.
+
+Therefore, to ensure wide adoption of the language and its reliability in wide range of applications, we need to balance the two. We would like to have the room for trying new things out – but while doing so, we would like to keep in mind an ordinary Scala user who may not necessarily be interested in the bleeding edge and who would like their Scala dependencies and code to simply work. In the post-3.0.0 era, we must prioritize stability.
+
+With this release, we are introducing a restriction on [features marked as experimental](https://dotty.epfl.ch/api/scala/language$$experimental$.html). Now, it is not possible to use them from stable or `RC` releases. If you would like to experiment with them, you need to use a `NIGHTLY` version of the compiler. Nightlies are published every 24 hours, and you can find them on [Maven](https://repo1.maven.org/maven2/org/scala-lang/scala3-compiler_3/).
+
+The spirit of this policy is to make sure that effectively, no library published for Scala 3 contain experimental features. This way, what is experimental can be easily changed and is not subject to the guarantees of the wider language. And, most importantly, the changes to such features would not affect the community *in practice* – the guarantee not achievable if we just announced the policy without implementing a mechanism to enforce it.
+
+Having said that, we still encourage people to play with the experimental features from the `NIGHTLY` compiler versions and discuss their findings. Without the curious and adventurous part of the community playing with the new features, there is no way of knowing what they are good for, and no way to decide whether they should be dropped or promoted to a stable feature.
+
+More about this change you can read in the PR [#12102](https://github.com/lampepfl/dotty/pull/12102).
+
+# Kind-projector work
+This release also brings extra features for the [Kind Projector](https://docs.scala-lang.org/scala3/guides/migration/plugin-kind-projector.html) migration support. First, PR [#12378](https://github.com/lampepfl/dotty/pull/12378) allows `_` as type lambda placeholder. Second, PR [#12341](https://github.com/lampepfl/dotty/pull/12341) brings support for the variance annotations on the placeholder. This work enhances the ability to cross-compile Scala 2 code that uses the Kind Projector plugin to Scala 3.
+
+# Improved error reporting
+Down the error reporting lane, match type reduction errors were improved. When using a match type, it may or may not reduce to one of its cases. If it doesn't match type is used as specified, e.g. if `M[T]` is a match type and it didn't reduce for `M[Int]`, `M[Int]` will be used. This behavior, however, is frequently not what you want: there is a lot of cases where you would expect a match type to reduce but it doesn't. In such cases, it would be nice to have some diagnostic regarding why it didn't reduce. PR [#12053](https://github.com/lampepfl/dotty/pull/12053/) adds just such a diagnostic. E.g. the following code:
+
+```scala
+trait A
+trait B
+type M[X] = X match
+  case A => Int
+  case B => String
+val x: String = ??? : M[B] // error
+```
+
+will report the following error:
+
+```
+6 |val x: String = ??? : M[B] // error
+  |                ^^^^^^^^^^
+  |                Found:    M[B]
+  |                Required: String
+  |
+  |                Note: a match type could not be fully reduced:
+  |
+  |                  trying to reduce  M[B]
+  |                  failed since selector  B
+  |                  does not match  case A => Int
+  |                  and cannot be shown to be disjoint from it either.
+  |                  Therefore, reduction cannot advance to the remaining case
+  |
+  |                    case B => String
+```
+
+# Scaladoc
+We have updated the [documentation](http://dotty.epfl.ch/docs/usage/scaladoc/index.html) for Scaladoc making it easier for you to get started. Also, PR [#11582](https://github.com/lampepfl/dotty/pull/11582) has added the snippet compiler to ensure the snippets in your scaladoc documentation comments aren't broken. You can read more about this feature on the [mailing list](https://contributors.scala-lang.org/t/snippet-validation-in-scaladoc-for-scala-3/4976).
+
+# Metaprogramming
+A lot of metaprogramming work was focused on improving the performance. Some of the notable PRs include:
+
+- Cache quote unpickling [#12242](https://github.com/lampepfl/dotty/pull/12242)
+- Avoid pickled tasty for some captured quote reference [#12248](https://github.com/lampepfl/dotty/pull/12248)
+- Improve quote matcher performance [#12418](https://github.com/lampepfl/dotty/pull/12418)
+- Port scala.quoted.runtime.impl.QuoteMatcher [#12402](https://github.com/lampepfl/dotty/pull/12402)
+
+
+# Issue fixing
+Otherwise, we are making an effort to reduce our issue tracker. Among others, the following are some of the PRs dedicated to issue fixing:
+
+- IArray.toArray: Deprecate broken method [#12598](https://github.com/lampepfl/dotty/pull/12598)
+- Fix comparison of dependent function types [#12214](https://github.com/lampepfl/dotty/pull/12214)
+- Make translucentSuperType handle match types [#12153](https://github.com/lampepfl/dotty/pull/12153)
+- Harden Type Inference [#12560](https://github.com/lampepfl/dotty/pull/12560)
+- Reject references to self in super constructor calls [#12567](https://github.com/lampepfl/dotty/pull/12567)
+- Provide mirror support after inlining [#12062](https://github.com/lampepfl/dotty/pull/12062)
+- Allow export paths to see imports [#12134](https://github.com/lampepfl/dotty/pull/12134)
+- Streamline given syntax [#12107](https://github.com/lampepfl/dotty/pull/12107)
+- Export constructor proxies [#12311](https://github.com/lampepfl/dotty/pull/12311)
+- Identify package and nested package object in isSubPrefix [#12297](https://github.com/lampepfl/dotty/pull/12297)
+- Treat Refinements more like AndTypes [#12317](https://github.com/lampepfl/dotty/pull/12317)
+- Fix [#9871](https://github.com/lampepfl/dotty/pull/9871): use toNestedPairs in provablyDisjoint [#10560](https://github.com/lampepfl/dotty/pull/10560)
+
+
+# Contributors
+Thank you to all the contributors who made this release possible 🎉
+
+According to `git shortlog -sn --no-merges 3.0.0-RC2..3.0.1-RC1`† these are:
+
+```
+   121  Martin Odersky
+   111  Liu Fengyun
+    98  Nicolas Stucki
+    29  Guillaume Martres
+    24  Phil
+    20  Olivier Blanvillain
+    14  Tom Grigg
+    14  Adrien Piquerez
+    13  Natsu Kagami
+    12  Andrzej Ratajczak
+    10  odersky
+    10  Aleksander Boruch-Gruszecki
+     9  Anatolii Kmetiuk
+     8  Jamie Thompson
+     6  Maxime Kjaer
+     5  Som Snytt
+     3  Filip Zybała
+     3  Krzysztof Romanowski
+     3  Kai
+     3  Fengyun Liu
+     3  noti0na1
+     3  Phil Walker
+     2  Johannes Rudolph
+     2  soronpo
+     2  tanishiking
+     2  Adam Warski
+     2  Kacper Korban
+     2  Raphael Jolly
+     2  Sébastien Doeraene
+     1  xuwei-k
+     1  Alexander Ioffe
+     1  David Barri
+     1  Devon Stewart
+     1  Dmitrii Naumenko
+     1  Ivan Kurchenko
+     1  Jakub Kozłowski
+     1  Jonas Ackermann
+     1  Kevin Lee
+     1  Martin
+     1  Michał Pałka
+     1  Miles Sabin
+     1  Oron Port
+     1  Paweł Marks
+     1  Ruslan Shevchenko
+     1  Seth Tisue
+     1  Vadim Chelyshov
+     1  nogurenn
+     1  nurekata
+```
+
+†: Note that we measure against `3.0.0-RC2` and not `3.0.0` because we stabilized on `3.0.0-RC2`. Only critical bug fixes found their way into `3.0.0-RC3` and further, while the majority of changes ended up in `3.0.1-RC1`.
+
+## Library authors: Join our community build
+
+Scala 3 now has a set of widely-used community libraries that are built against every nightly Scala 3 snapshot.
+Join our [community build](https://github.com/lampepfl/dotty/tree/main/community-build)
+to make sure that our regression suite includes your library.
+
+[Scastie]: https://scastie.scala-lang.org/?target=dotty
+
+[@odersky]: https://github.com/odersky
+[@DarkDimius]: https://github.com/DarkDimius
+[@smarter]: https://github.com/smarter
+[@felixmulder]: https://github.com/felixmulder
+[@nicolasstucki]: https://github.com/nicolasstucki
+[@liufengyun]: https://github.com/liufengyun
+[@OlivierBlanvillain]: https://github.com/OlivierBlanvillain
+[@biboudis]: https://github.com/biboudis
+[@allanrenucci]: https://github.com/allanrenucci
+[@Blaisorblade]: https://github.com/Blaisorblade
+[@Duhemm]: https://github.com/Duhemm
+[@AleksanderBG]: https://github.com/AleksanderBG
+[@milessabin]: https://github.com/milessabin
+[@anatoliykmetyuk]: https://github.com/anatoliykmetyuk
diff --git a/docs/_blog/_posts/2021-06-25-scala301-rc2.md b/docs/_blog/_posts/2021-06-25-scala301-rc2.md
new file mode 100644
index 000000000000..76257d1a8664
--- /dev/null
+++ b/docs/_blog/_posts/2021-06-25-scala301-rc2.md
@@ -0,0 +1,50 @@
+---
+layout: blog-page
+title: Scala 3.0.1-RC2 – backports of critical bugfixes
+author: Anatolii Kmetiuk
+authorImg: /images/anatolii.png
+date: 2021-06-25
+---
+
+This post is a quick announcement of Scala 3.0.1-RC2. This is the second release candidate for 3.0.1. The reason for this release is that a regression with respect to 3.0.0 was introduced by PR [#12519](https://github.com/lampepfl/dotty/pull/12519) which caused the compiler to fail where it shouldn't. We have fixed this regression in PR [#12827](https://github.com/lampepfl/dotty/pull/12827) and backported it to 3.0.1. This is the main reason for having 3.0.1-RC2 before 3.0.1 which is due in one week.
+
+Besides this main change, taking advantage of the fact that RC2 is happening, we have also included various SBT reporting improvements (PR [#12845](https://github.com/lampepfl/dotty/pull/12845)) which should improve interaction with [Metals](https://scalameta.org/metals/). Also we've backported a few infrastructural fixes even though they aren't a regression from 3.0.0.
+
+
+
+# Contributors
+Thank you to all the contributors who made this release possible 🎉
+
+According to `git shortlog -sn --no-merges 3.0.1-RC1..3.0.1-RC2` these are:
+
+```
+     3  Guillaume Martres
+     2  Krzysztof Romanowski
+     1  Anatolii Kmetiuk
+     1  Markus Sutter
+     1  Martin Odersky
+     1  Vadim Chelyshov
+```
+
+## Library authors: Join our community build
+
+Scala 3 now has a set of widely-used community libraries that are built against every nightly Scala 3 snapshot.
+Join our [community build](https://github.com/lampepfl/dotty/tree/main/community-build)
+to make sure that our regression suite includes your library.
+
+[Scastie]: https://scastie.scala-lang.org/?target=dotty
+
+[@odersky]: https://github.com/odersky
+[@DarkDimius]: https://github.com/DarkDimius
+[@smarter]: https://github.com/smarter
+[@felixmulder]: https://github.com/felixmulder
+[@nicolasstucki]: https://github.com/nicolasstucki
+[@liufengyun]: https://github.com/liufengyun
+[@OlivierBlanvillain]: https://github.com/OlivierBlanvillain
+[@biboudis]: https://github.com/biboudis
+[@allanrenucci]: https://github.com/allanrenucci
+[@Blaisorblade]: https://github.com/Blaisorblade
+[@Duhemm]: https://github.com/Duhemm
+[@AleksanderBG]: https://github.com/AleksanderBG
+[@milessabin]: https://github.com/milessabin
+[@anatoliykmetyuk]: https://github.com/anatoliykmetyuk
diff --git a/docs/_blog/index.html b/docs/_blog/index.html
new file mode 100644
index 000000000000..b1ad4787b41b
--- /dev/null
+++ b/docs/_blog/index.html
@@ -0,0 +1,32 @@
+---
+layout: static-site-main
+title: Blog
+---
+
+

{{ page.title }}

+ +
    + {% for post in site.posts %} +
  • +

    + {{ post.title }} +

    + +
    + {{ post.excerpt }} +
    +
  • + {% endfor %} +
+
diff --git a/docs/docs/contributing/checklist.sh b/docs/_docs/contributing/checklist.sh similarity index 96% rename from docs/docs/contributing/checklist.sh rename to docs/_docs/contributing/checklist.sh index 827f0911d84d..d3cfe70b4e21 100755 --- a/docs/docs/contributing/checklist.sh +++ b/docs/_docs/contributing/checklist.sh @@ -48,6 +48,7 @@ LIST='- [ ] Publish artifacts to Maven via CI - [ ] Publish Blog Post on dotty.epfl.ch - [ ] Make an announcement thread on https://contributors.scala-lang.org - [ ] Tweet the announcement blog post on https://twitter.com/scala_lang + - [ ] Run workflow releases CI to publish scala on SDKMAN - https://github.com/lampepfl/dotty/actions/workflows/releases.yml [Instructions on how to release](https://dotty.epfl.ch/docs/contributing/release.html)' diff --git a/docs/docs/contributing/contribute-knowledge.md b/docs/_docs/contributing/contribute-knowledge.md similarity index 100% rename from docs/docs/contributing/contribute-knowledge.md rename to docs/_docs/contributing/contribute-knowledge.md diff --git a/docs/docs/contributing/debug-tests.md b/docs/_docs/contributing/debug-tests.md similarity index 100% rename from docs/docs/contributing/debug-tests.md rename to docs/_docs/contributing/debug-tests.md diff --git a/docs/docs/contributing/debugging.md b/docs/_docs/contributing/debugging.md similarity index 99% rename from docs/docs/contributing/debugging.md rename to docs/_docs/contributing/debugging.md index 84f6937f628f..959ad6706290 100644 --- a/docs/docs/contributing/debugging.md +++ b/docs/_docs/contributing/debugging.md @@ -164,7 +164,7 @@ val YshowVarBounds = BooleanSetting("-Yshow-var-bounds" , "Print type varia val YtestPickler = BooleanSetting("-Ytest-pickler" , "self-test for pickling functionality; should be used with -Ystop-after:pickler") ``` -They are defined in [ScalaSettings.scala](https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala). E.g. `YprintPos` is defined as: +They are defined in [ScalaSettings.scala](https://github.com/lampepfl/dotty/blob/main/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala). E.g. `YprintPos` is defined as: ```scala val YprintPos: Setting[Boolean] = BooleanSetting("-Yprint-pos", "show tree positions.") diff --git a/docs/_docs/contributing/getting-started.md b/docs/_docs/contributing/getting-started.md new file mode 100644 index 000000000000..c842fd0a49d0 --- /dev/null +++ b/docs/_docs/contributing/getting-started.md @@ -0,0 +1,100 @@ +--- +layout: doc-page +title: Getting Started +--- + + + +Requirements +------------ +Make sure that you are using macOS or Linux (or WSL on Windows) with Java 8 or newer. You can determine which version of the JDK is the +default by typing `java -version` in a Terminal window. + +Compiling and Running +--------------------- +Start by cloning the repository: + +```bash +$ git clone https://github.com/lampepfl/dotty.git +$ cd dotty +$ sbt managedSources # Needed for IDE import to succeed +``` + +Dotty provides a standard sbt build: compiling, running and starting a repl can +all be done from within sbt: + +```bash +$ sbt +> scalac tests/pos/HelloWorld.scala +> scala HelloWorld +hello world +``` + +There are also bash scripts that can be used in the same way. Assuming that you have cloned the Dotty repo locally, append +the following line on your `.bash_profile`: + +```shell +$ export PATH=$HOME/dotty/bin:$PATH +``` + +and you will be able to run the corresponding commands directly from your console: + +```shell +# Compile code using Dotty +$ scalac tests/pos/HelloWorld.scala + +# Run it with the proper classpath +$ scala HelloWorld +``` + + +Starting a REPL +--------------- +```bash +$ sbt +> repl +Welcome to Scala.next (pre-alpha) (Java HotSpot(TM) 64-Bit Server VM, Java 1.8.0_101). +Type in expressions to have them evaluated. +Type :help for more information. +scala> +``` + +or via bash: + +```bash +$ scala +``` +Publish to local repository +--------------------------------- +To test our cloned compiler on local projects: + +```bash +$ sbt publishLocal +``` +Then in the `build.sbt` file of a test project: + +```bash +ThisBuild / scalaVersion := "-bin-SNAPSHOT" +``` +where `dotty-version` can be found in the file `project/Build.scala`, like `3.0.0-M2` + + +Generating Documentation +------------------------- +To generate this page and other static page docs, run +```bash +$ sbt +> scaladoc/generateScalaDocumentation +``` +For more information, see `scaladoc/README.md`. + +Before contributing to Dotty, we invite you to consult the +[Dotty Developer Guidelines](https://github.com/lampepfl/dotty/blob/main/CONTRIBUTING.md). + + +Community +------------- +The main development discussion channels are: +- [github.com/lampepfl/dotty/discussions](https://github.com/lampepfl/dotty/discussions) +- [contributors.scala-lang.org](https://contributors.scala-lang.org) +- [gitter.im/scala/contributors](https://gitter.im/scala/contributors) diff --git a/docs/docs/contributing/procedures/release.md b/docs/_docs/contributing/procedures/release.md similarity index 100% rename from docs/docs/contributing/procedures/release.md rename to docs/_docs/contributing/procedures/release.md diff --git a/docs/docs/contributing/procedures/vulpix.md b/docs/_docs/contributing/procedures/vulpix.md similarity index 100% rename from docs/docs/contributing/procedures/vulpix.md rename to docs/_docs/contributing/procedures/vulpix.md diff --git a/docs/docs/contributing/scala2-vs-scala3.md b/docs/_docs/contributing/scala2-vs-scala3.md similarity index 100% rename from docs/docs/contributing/scala2-vs-scala3.md rename to docs/_docs/contributing/scala2-vs-scala3.md diff --git a/docs/docs/contributing/testing.md b/docs/_docs/contributing/testing.md similarity index 75% rename from docs/docs/contributing/testing.md rename to docs/_docs/contributing/testing.md index c0fdca5f10dc..41e8b0b9b85e 100644 --- a/docs/docs/contributing/testing.md +++ b/docs/_docs/contributing/testing.md @@ -17,7 +17,7 @@ The entire suite of tests can be run using the bootstrapped compiler as follows: ```bash $ sbt -> dotty-bootstrapped/test +> scala3-bootstrapped/test ``` There are currently several forms of tests in Dotty. These can be split into @@ -32,7 +32,7 @@ To run all tests in e.g., for the compiler test-suite you can write: ```bash $ sbt -> dotty-compiler/test +> scala3-compiler/test ``` To run a single test class you use `testOnly` and the fully qualified class name. @@ -127,10 +127,21 @@ $ sbt > testCompilation --help ``` +### Joint and separate sources compilation + +When the sources of a test consist of multiple source files places in a single directory they are passed to the compiler in a single run and the compiler decides in which order to compile them. In some cases, however, to reproduce a specific test scenario it might be necessary to compile the source files in several steps in a specified order. To achieve that one can add a `_${step_index}` suffix to a file name (before the `.scala` or `.java` extension) indicating the order of compilation. E.g. if the test directory contains files named `Foo_1.scala`, `Bar_2.scala` and `Baz_2.scala` then `Foo_1.scala` will be compiled first and after that `Bar_2.scala` together with `Baz_2.scala`. + +There are also other suffixes indicating how some particular files are compiled: +* `_c${compilerVersion}` - compile a file with a specific version of the compiler instead of the one developed on the current branch + (e.g. `Foo_c3.0.2.scala`) +* `_r${release}` - compile a file with a given value of `-Yscala-release` flag (e.g. `Foo_r3.0.scala`) + +Different suffixes can be mixed together (their order is not important although consistency is advised), e.g. `Foo_1_r3.0`, `Bar_2_c3.0.2`. + ### Bootstrapped-only tests To run `testCompilation` on a bootstrapped Dotty compiler, use -`dotty-compiler-bootstrapped/testCompilation` (with the same syntax as above). +`scala3-compiler-bootstrapped/testCompilation` (with the same syntax as above). Some tests can only be run in bootstrapped compilers; that includes all tests with `with-compiler` in their name. @@ -144,10 +155,15 @@ with `with-compiler` in their name. > testCompilation --from-tasty ``` - This mode can be run under `dotty-compiler-bootstrapped/testCompilation` to test on a bootstrapped Dotty compiler. + This mode can be run under `scala3-compiler-bootstrapped/testCompilation` to test on a bootstrapped Dotty compiler. ### SemanticDB tests +```bash +$ sbt +> scala3-compiler-bootstrapped/testOnly dotty.tools.dotc.semanticdb.SemanticdbTests +``` + The output of the `extractSemanticDB` phase, enabled with `-Xsemanticdb` is tested with the bootstrapped JUnit test `dotty.tools.dotc.semanticdb.SemanticdbTests`. It uses source files in `tests/semanticdb/expect` to generate two kinds of output file that are compared with "expect files": placement of semanticdb symbol occurrences inline in @@ -157,5 +173,12 @@ Expect files are used as regression tests to detect changes in the compiler. The test suite will create a new file if it detects any difference, which can be compared with the original expect file, or if the user wants to globally replace all expect files for semanticdb they can use -`dotty-compiler-bootstrapped/test:runMain dotty.tools.dotc.semanticdb.updateExpect`, and compare the changes via version +`scala3-compiler-bootstrapped/test:runMain dotty.tools.dotc.semanticdb.updateExpect`, and compare the changes via version control. + +## Troubleshooting + +Some of the tests depend on temporary state stored in the `out` directory. In rare cases, that directory +can enter an inconsistent state and cause spurious test failures. If you suspect a spurious test failure, +you can run `rm -rf out/*` from the root of the repository and run your tests again. If that fails, you +can try `git clean -xfd`. diff --git a/docs/docs/contributing/tools/ide.md b/docs/_docs/contributing/tools/ide.md similarity index 100% rename from docs/docs/contributing/tools/ide.md rename to docs/_docs/contributing/tools/ide.md diff --git a/docs/docs/contributing/tools/mill.md b/docs/_docs/contributing/tools/mill.md similarity index 100% rename from docs/docs/contributing/tools/mill.md rename to docs/_docs/contributing/tools/mill.md diff --git a/docs/docs/contributing/tools/scalafix.md b/docs/_docs/contributing/tools/scalafix.md similarity index 100% rename from docs/docs/contributing/tools/scalafix.md rename to docs/_docs/contributing/tools/scalafix.md diff --git a/docs/_docs/contributing/workflow.md b/docs/_docs/contributing/workflow.md new file mode 100644 index 000000000000..956ce2998c75 --- /dev/null +++ b/docs/_docs/contributing/workflow.md @@ -0,0 +1,110 @@ +--- +layout: doc-page +title: Workflow +--- + +Check [Getting Started](getting-started.md) for instructions on how to obtain the source code of dotty. +This document details common workflow patterns when working with Dotty. + +## Compiling files with scalac ## + +As we have seen you can compile a test file either from sbt: + +```bash +$ sbt +> scalac +``` + +or from terminal: + +```bash +$ scalac +``` + +Here are some useful debugging ``: + +* `-Xprint:PHASE1,PHASE2,...` or `-Xprint:all`: prints the `AST` after each + specified phase. Phase names can be found by examining the + `dotty.tools.dotc.transform.*` classes for their `phaseName` field e.g., `-Xprint:erasure`. + You can discover all phases in the `dotty.tools.dotc.Compiler` class +* `-Ylog:PHASE1,PHASE2,...` or `-Ylog:all`: enables `ctx.log("")` logging for + the specified phase. +* `-Ycheck:all` verifies the consistency of `AST` nodes between phases, in + particular checks that types do not change. Some phases currently can't be + `Ycheck`ed, therefore in the tests we run: + `-Ycheck:tailrec,resolveSuper,mixin,restoreScopes,labelDef`. +* the last frontier of debugging (before actual debugging) is the range of logging capabilities that +can be enabled through the `dotty.tools.dotc.config.Printers` object. Change any of the desired printer from `noPrinter` to +`default` and this will give you the full logging capability of the compiler. + +## Inspecting Types with Type Stealer ## + +You can inspect types with the main method `dotty.tools.printTypes` from the sbt shell, +passing at least three arguments: +- The first argument is a string that introduces some +Scala definitions +- The second argument introduces how the the remaining arguments should be interpreted, +comprising of + - `rhs` - the return type of a definition + - `class` - the signature of a class, after its name + - `method` - the signature of a method, after its name + - `type` - the signature of a type, after its name +- The remaining arguments are type signatures, these may reference definitions introduced by the first argument. + +Each type signature is then be printed, displaying their internal structure, alongside their class, using +the same representation that can later be used in pattern matching to decompose the type. + +Here, we inspect a refinement of a class `Box`: +```bash +$ sbt +> scala3-compiler-bootstrapped/Test/runMain dotty.tools.printTypes "class Box { def x: Any }" "rhs" "Box { def x: Int }" +RefinedType(TypeRef(ThisType(TypeRef(NoPrefix, module class )),class Box), x, ExprType(TypeRef(TermRef(ThisType(TypeRef(NoPrefix, module class )), object scala), class Int))) [class dotty.tools.dotc.core.Types$CachedRefinedType] +``` + +You can also pass the empty string as the second +argument, e.g. to inspect a standard library type: +```bash +$ sbt +> scala3-compiler-bootstrapped/Test/runMain dotty.tools.printTypes "" "rhs" "1 *: EmptyTuple" +AppliedType(TypeRef(TermRef(ThisType(TypeRef(NoPrefix, module class )), object scala), class *:), List(ConstantType(Constant(1)), TypeRef(TermRef(ThisType(TypeRef(NoPrefix, module class scala)), object Tuple$package), type EmptyTuple))) +``` + +Here are some other examples you can follow: +- `...printTypes "" class "[T] extends Foo[T] {}"` +- `...printTypes "" method "(x: Int): x.type"` +- `...printTypes "" type "<: Int" "= [T] =>> List[T]"` + +If you want to further inspect the types, and not just print them, the object `dotty.tools.DottyTypeStealer` has a +method `stealType`. It takes the same arguments as `printTypes`, but returns both a `Context` containing the +definitions passed, along with the list of types: +```scala +// compiler/test/dotty/tools/DottyTypeStealer.scala +object DottyTypeStealer extends DottyTest { + + enum Kind: + case `rhs`, `method`, `class`, `type` + ... + + def stealType(kind: Kind, source: String, typeStrings: String*): (Context, List[Type]) = { + ... + } +} +``` +Any test source within `compiler/test` can then call `stealType` for custom purposes. + +## Pretty-printing ## +Many objects in the scalac compiler implement a `Showable` trait (e.g. `Tree`, +`Symbol`, `Type`). These objects may be prettyprinted using the `.show` +method + +## SBT Commands Cheat Sheet ## +The basics of working with Dotty codebase are documented [here](https://dotty.epfl.ch/docs/contributing/getting-started.html) and [here](https://dotty.epfl.ch/docs/contributing/workflow.html). Below is a cheat sheet of some frequently used commands (to be used from SBT console – `sbt`). + + +| Command | Description | +|------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------| +| `scalac ../issues/Playground.scala` | Compile the given file – path relative to the Dotty directory. Output the compiled class files to the Dotty directory itself. | +| `scala Playground` | Run the compiled class `Playground`. Dotty directory is on classpath by default. | +| `repl` | Start REPL | +| `testOnly dotty.tools.dotc.CompilationTests -- *pos` | Run test (method) `pos` from `CompilationTests` suite. | +| `testCompilation sample` | In all test suites, run test files containing the word `sample` in their title. | diff --git a/docs/_docs/index.md b/docs/_docs/index.md new file mode 100644 index 000000000000..839d14d16308 --- /dev/null +++ b/docs/_docs/index.md @@ -0,0 +1,22 @@ +--- +layout: doc-page +redirectFrom: docs/index.html +--- + +Dotty is the project name for technologies that are considered for inclusion in Scala 3. Scala has +pioneered the fusion of object-oriented and functional programming in a typed setting. Scala 3 will +be a big step towards realizing the full potential of these ideas. Its main objectives are to + +- become more opinionated by promoting programming idioms we found to work well, +- simplify where possible, +- eliminate inconsistencies and surprising behaviors, +- build on strong foundations to ensure the design hangs well together, +- consolidate language constructs to improve the language’s consistency, safety, ergonomics, and performance. + +In this documentation you will find information on how to use the Dotty compiler on your machine, +navigate through the code, setup Dotty with your favorite IDE and more! + +Table of Contents +================= +{% assign titles = sidebar.titles %} +{% include "table-of-contents" %} diff --git a/docs/docs/internals/backend.md b/docs/_docs/internals/backend.md similarity index 100% rename from docs/docs/internals/backend.md rename to docs/_docs/internals/backend.md diff --git a/docs/docs/internals/classpaths.md b/docs/_docs/internals/classpaths.md similarity index 100% rename from docs/docs/internals/classpaths.md rename to docs/_docs/internals/classpaths.md diff --git a/docs/docs/internals/contexts.md b/docs/_docs/internals/contexts.md similarity index 100% rename from docs/docs/internals/contexts.md rename to docs/_docs/internals/contexts.md diff --git a/docs/docs/internals/core-data-structures.md b/docs/_docs/internals/core-data-structures.md similarity index 100% rename from docs/docs/internals/core-data-structures.md rename to docs/_docs/internals/core-data-structures.md diff --git a/docs/docs/internals/debug-macros.md b/docs/_docs/internals/debug-macros.md similarity index 100% rename from docs/docs/internals/debug-macros.md rename to docs/_docs/internals/debug-macros.md diff --git a/docs/docs/internals/dotc-scalac.md b/docs/_docs/internals/dotc-scalac.md similarity index 100% rename from docs/docs/internals/dotc-scalac.md rename to docs/_docs/internals/dotc-scalac.md diff --git a/docs/docs/internals/dotty-internals-1-notes.md b/docs/_docs/internals/dotty-internals-1-notes.md similarity index 99% rename from docs/docs/internals/dotty-internals-1-notes.md rename to docs/_docs/internals/dotty-internals-1-notes.md index 8f3110bd91fb..899fb49438b3 100644 --- a/docs/docs/internals/dotty-internals-1-notes.md +++ b/docs/_docs/internals/dotty-internals-1-notes.md @@ -88,7 +88,7 @@ on the structure of a tree is error-prone. ## Errors `dotc/typer/ErrorReporting.scala` -Sometimes there's an error during compilation, but we want to continue compilling (as opposed to failing outright), to +Sometimes there's an error during compilation, but we want to continue compiling (as opposed to failing outright), to uncover additional errors. In cases where a tree is expected but there's an error, we can use the `errorTree` methods in `ErrorReporting` to create diff --git a/docs/docs/internals/explicit-nulls.md b/docs/_docs/internals/explicit-nulls.md similarity index 100% rename from docs/docs/internals/explicit-nulls.md rename to docs/_docs/internals/explicit-nulls.md diff --git a/docs/docs/internals/higher-kinded-v2.md b/docs/_docs/internals/higher-kinded-v2.md similarity index 100% rename from docs/docs/internals/higher-kinded-v2.md rename to docs/_docs/internals/higher-kinded-v2.md diff --git a/docs/docs/internals/overall-structure.md b/docs/_docs/internals/overall-structure.md similarity index 94% rename from docs/docs/internals/overall-structure.md rename to docs/_docs/internals/overall-structure.md index 874155589116..f50ab6bf03a7 100644 --- a/docs/docs/internals/overall-structure.md +++ b/docs/_docs/internals/overall-structure.md @@ -97,7 +97,8 @@ phases. The current list of phases is specified in class [Compiler] as follows: /** Phases dealing with the frontend up to trees ready for TASTY pickling */ protected def frontendPhases: List[List[Phase]] = - List(new FrontEnd) :: // Compiler frontend: scanner, parser, namer, typer + List(new Parser) :: // scanner, parser + List(new TyperPhase) :: // namer, typer List(new YCheckPositions) :: // YCheck positions List(new sbt.ExtractDependencies) :: // Sends information on classes' dependencies to sbt via callbacks List(new semanticdb.ExtractSemanticDB) :: // Extract info into .semanticdb files @@ -232,10 +233,10 @@ Phases fall into four categories: * Code generators: These map the transformed trees to Java classfiles or .sjsir files. -[dotty.tools]: https://github.com/lampepfl/dotty/tree/master/compiler/src/dotty/tools -[dotc]: https://github.com/lampepfl/dotty/tree/master/compiler/src/dotty/tools/dotc -[Main]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/Main.scala -[Driver]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/Driver.scala -[Compiler]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/Compiler.scala -[Run]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/Run.scala -[Context]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/core/Contexts.scala +[dotty.tools]: https://github.com/lampepfl/dotty/tree/main/compiler/src/dotty/tools +[dotc]: https://github.com/lampepfl/dotty/tree/main/compiler/src/dotty/tools/dotc +[Main]: https://github.com/lampepfl/dotty/blob/main/compiler/src/dotty/tools/dotc/Main.scala +[Driver]: https://github.com/lampepfl/dotty/blob/main/compiler/src/dotty/tools/dotc/Driver.scala +[Compiler]: https://github.com/lampepfl/dotty/blob/main/compiler/src/dotty/tools/dotc/Compiler.scala +[Run]: https://github.com/lampepfl/dotty/blob/main/compiler/src/dotty/tools/dotc/Run.scala +[Context]: https://github.com/lampepfl/dotty/blob/main/compiler/src/dotty/tools/dotc/core/Contexts.scala diff --git a/docs/docs/internals/periods.md b/docs/_docs/internals/periods.md similarity index 100% rename from docs/docs/internals/periods.md rename to docs/_docs/internals/periods.md diff --git a/docs/docs/internals/syntax-3.1.md b/docs/_docs/internals/syntax-3.1.md similarity index 97% rename from docs/docs/internals/syntax-3.1.md rename to docs/_docs/internals/syntax-3.1.md index 8a7eea52a09f..ef18bd1486b1 100644 --- a/docs/docs/internals/syntax-3.1.md +++ b/docs/_docs/internals/syntax-3.1.md @@ -24,14 +24,14 @@ form. whiteSpace ::= ‘\u0020’ | ‘\u0009’ | ‘\u000D’ | ‘\u000A’ upper ::= ‘A’ | … | ‘Z’ | ‘\$’ | ‘_’ “… and Unicode category Lu” lower ::= ‘a’ | … | ‘z’ “… and Unicode category Ll” -letter ::= upper | lower “… and Unicode categories Lo, Lt, Nl” +letter ::= upper | lower “… and Unicode categories Lo, Lt, Lm, Nl” digit ::= ‘0’ | … | ‘9’ paren ::= ‘(’ | ‘)’ | ‘[’ | ‘]’ | ‘{’ | ‘}’ | ‘'(’ | ‘'[’ | ‘'{’ delim ::= ‘`’ | ‘'’ | ‘"’ | ‘.’ | ‘;’ | ‘,’ -opchar ::= “printableChar not matched by (whiteSpace | upper | lower | - letter | digit | paren | delim | opchar | Unicode_Sm | - Unicode_So)” -printableChar ::= “all characters in [\u0020, \u007F] inclusive” +opchar ::= ‘!’ | ‘#’ | ‘%’ | ‘&’ | ‘*’ | ‘+’ | ‘-’ | ‘/’ | ‘:’ | + ‘<’ | ‘=’ | ‘>’ | ‘?’ | ‘@’ | ‘\’ | ‘^’ | ‘|’ | ‘~’ + “… and Unicode categories Sm, So” +printableChar ::= “all characters in [\u0020, \u007E] inclusive” charEscapeSeq ::= ‘\’ (‘b’ | ‘t’ | ‘n’ | ‘f’ | ‘r’ | ‘"’ | ‘'’ | ‘\’) op ::= opchar {opchar} @@ -96,7 +96,7 @@ erased extends false final finally for given if implied import lazy match new null object package private protected override return super sealed then throw trait true try type val var while yield -: = <- => <: :> # @ +: = <- => <: >: # @ ``` ### Soft keywords diff --git a/docs/_docs/internals/syntax.md b/docs/_docs/internals/syntax.md new file mode 100644 index 000000000000..c484c42864e5 --- /dev/null +++ b/docs/_docs/internals/syntax.md @@ -0,0 +1,463 @@ +--- +layout: doc-page +title: "Scala 3 Syntax Summary" +--- + +The following description of Scala tokens uses literal characters `‘c’` when +referring to the ASCII fragment `\u0000` – `\u007F`. + +_Unicode escapes_ are used to represent the [Unicode character](https://www.w3.org/International/articles/definitions-characters/) with the given +hexadecimal code: + +```ebnf +UnicodeEscape ::= ‘\’ ‘u’ {‘u’} hexDigit hexDigit hexDigit hexDigit +hexDigit ::= ‘0’ | … | ‘9’ | ‘A’ | … | ‘F’ | ‘a’ | … | ‘f’ +``` + +Informal descriptions are typeset as `“some comment”`. + +### Lexical Syntax + +The lexical syntax of Scala is given by the following grammar in EBNF +form. + +```ebnf +whiteSpace ::= ‘\u0020’ | ‘\u0009’ | ‘\u000D’ | ‘\u000A’ +upper ::= ‘A’ | … | ‘Z’ | ‘\$’ | ‘_’ “… and Unicode category Lu” +lower ::= ‘a’ | … | ‘z’ “… and Unicode category Ll” +letter ::= upper | lower “… and Unicode categories Lo, Lt, Lm, Nl” +digit ::= ‘0’ | … | ‘9’ +paren ::= ‘(’ | ‘)’ | ‘[’ | ‘]’ | ‘{’ | ‘}’ | ‘'(’ | ‘'[’ | ‘'{’ +delim ::= ‘`’ | ‘'’ | ‘"’ | ‘.’ | ‘;’ | ‘,’ +opchar ::= ‘!’ | ‘#’ | ‘%’ | ‘&’ | ‘*’ | ‘+’ | ‘-’ | ‘/’ | ‘:’ | + ‘<’ | ‘=’ | ‘>’ | ‘?’ | ‘@’ | ‘\’ | ‘^’ | ‘|’ | ‘~’ + “… and Unicode categories Sm, So” +printableChar ::= “all characters in [\u0020, \u007E] inclusive” +charEscapeSeq ::= ‘\’ (‘b’ | ‘t’ | ‘n’ | ‘f’ | ‘r’ | ‘"’ | ‘'’ | ‘\’) + +op ::= opchar {opchar} +varid ::= lower idrest +alphaid ::= upper idrest + | varid +plainid ::= alphaid + | op +id ::= plainid + | ‘`’ { charNoBackQuoteOrNewline | UnicodeEscape | charEscapeSeq } ‘`’ +idrest ::= {letter | digit} [‘_’ op] +quoteId ::= ‘'’ alphaid + +integerLiteral ::= (decimalNumeral | hexNumeral) [‘L’ | ‘l’] +decimalNumeral ::= ‘0’ | nonZeroDigit [{digit | ‘_’} digit] +hexNumeral ::= ‘0’ (‘x’ | ‘X’) hexDigit [{hexDigit | ‘_’} hexDigit] +nonZeroDigit ::= ‘1’ | … | ‘9’ + +floatingPointLiteral + ::= [decimalNumeral] ‘.’ digit [{digit | ‘_’} digit] [exponentPart] [floatType] + | decimalNumeral exponentPart [floatType] + | decimalNumeral floatType +exponentPart ::= (‘E’ | ‘e’) [‘+’ | ‘-’] digit [{digit | ‘_’} digit] +floatType ::= ‘F’ | ‘f’ | ‘D’ | ‘d’ + +booleanLiteral ::= ‘true’ | ‘false’ + +characterLiteral ::= ‘'’ (printableChar | charEscapeSeq) ‘'’ + +stringLiteral ::= ‘"’ {stringElement} ‘"’ + | ‘"""’ multiLineChars ‘"""’ +stringElement ::= printableChar \ (‘"’ | ‘\’) + | UnicodeEscape + | charEscapeSeq +multiLineChars ::= {[‘"’] [‘"’] char \ ‘"’} {‘"’} +processedStringLiteral + ::= alphaid ‘"’ {[‘\’] processedStringPart | ‘\\’ | ‘\"’} ‘"’ + | alphaid ‘"""’ {[‘"’] [‘"’] char \ (‘"’ | ‘$’) | escape} {‘"’} ‘"""’ +processedStringPart + ::= printableChar \ (‘"’ | ‘$’ | ‘\’) | escape +escape ::= ‘$$’ + | ‘$’ letter { letter | digit } + | ‘{’ Block [‘;’ whiteSpace stringFormat whiteSpace] ‘}’ +stringFormat ::= {printableChar \ (‘"’ | ‘}’ | ‘ ’ | ‘\t’ | ‘\n’)} + +symbolLiteral ::= ‘'’ plainid // until 2.13 + +comment ::= ‘/*’ “any sequence of characters; nested comments are allowed” ‘*/’ + | ‘//’ “any sequence of characters up to end of line” + +nl ::= “new line character” +semi ::= ‘;’ | nl {nl} +``` + + +## Optional Braces + +The lexical analyzer also inserts `indent` and `outdent` tokens that represent regions of indented code [at certain points](../reference/other-new-features/indentation.md) + +In the context-free productions below we use the notation `<<< ts >>>` +to indicate a token sequence `ts` that is either enclosed in a pair of braces `{ ts }` or that constitutes an indented region `indent ts outdent`. Analogously, the +notation `:<<< ts >>>` indicates a token sequence `ts` that is either enclosed in a pair of braces `{ ts }` or that constitutes an indented region `indent ts outdent` that follows +a `:` at the end of a line. + + +``` + <<< ts >>> ::= ‘{’ ts ‘}’ + | indent ts outdent +:<<< ts >>> ::= [nl] ‘{’ ts ‘}’ + | `:` indent ts outdent +``` + +## Keywords + +### Regular keywords + +``` +abstract case catch class def do else +enum export extends false final finally for +given if implicit import lazy match new +null object override package private protected return +sealed super then throw trait true try +type val var while with yield +: = <- => <: >: # +@ =>> ?=> +``` + +### Soft keywords + +``` +as derives end extension infix inline opaque open transparent using | * + - +``` + +See the [separate section on soft keywords](../reference/soft-modifier.md) for additional +details on where a soft keyword is recognized. + +## Context-free Syntax + +The context-free syntax of Scala is given by the following EBNF +grammar: + +### Literals and Paths +```ebnf +SimpleLiteral ::= [‘-’] integerLiteral + | [‘-’] floatingPointLiteral + | booleanLiteral + | characterLiteral + | stringLiteral +Literal ::= SimpleLiteral + | processedStringLiteral + | symbolLiteral + | ‘null’ + +QualId ::= id {‘.’ id} +ids ::= id {‘,’ id} + +SimpleRef ::= id + | [id ‘.’] ‘this’ + | [id ‘.’] ‘super’ [ClassQualifier] ‘.’ id + +ClassQualifier ::= ‘[’ id ‘]’ +``` + +### Types +```ebnf +Type ::= FunType + | HkTypeParamClause ‘=>>’ Type LambdaTypeTree(ps, t) + | FunParamClause ‘=>>’ Type TermLambdaTypeTree(ps, t) + | MatchType + | InfixType +FunType ::= FunTypeArgs (‘=>’ | ‘?=>’) Type Function(ts, t) + | HKTypeParamClause '=>' Type PolyFunction(ps, t) +FunTypeArgs ::= InfixType + | ‘(’ [ FunArgTypes ] ‘)’ + | FunParamClause +FunParamClause ::= ‘(’ TypedFunParam {‘,’ TypedFunParam } ‘)’ +TypedFunParam ::= id ‘:’ Type +MatchType ::= InfixType `match` <<< TypeCaseClauses >>> +InfixType ::= RefinedType {id [nl] RefinedType} InfixOp(t1, op, t2) +RefinedType ::= AnnotType {[nl] Refinement} RefinedTypeTree(t, ds) +AnnotType ::= SimpleType {Annotation} Annotated(t, annot) + +SimpleType ::= SimpleLiteral SingletonTypeTree(l) + | ‘?’ TypeBounds + | SimpleType1 +SimpleType1 ::= id Ident(name) + | Singleton ‘.’ id Select(t, name) + | Singleton ‘.’ ‘type’ SingletonTypeTree(p) + | ‘(’ Types ‘)’ Tuple(ts) + | Refinement RefinedTypeTree(EmptyTree, refinement) + | ‘$’ ‘{’ Block ‘}’ -- unless inside quoted pattern + | ‘$’ ‘{’ Pattern ‘}’ -- only inside quoted pattern + | SimpleType1 TypeArgs AppliedTypeTree(t, args) + | SimpleType1 ‘#’ id Select(t, name) +Singleton ::= SimpleRef + | SimpleLiteral + | Singleton ‘.’ id +Singletons ::= Singleton { ‘,’ Singleton } +FunArgType ::= Type + | ‘=>’ Type PrefixOp(=>, t) +FunArgTypes ::= FunArgType { ‘,’ FunArgType } +ParamType ::= [‘=>’] ParamValueType +ParamValueType ::= Type [‘*’] PostfixOp(t, "*") +TypeArgs ::= ‘[’ Types ‘]’ ts +Refinement ::= ‘{’ [RefineDcl] {semi [RefineDcl]} ‘}’ ds +TypeBounds ::= [‘>:’ Type] [‘<:’ Type] TypeBoundsTree(lo, hi) +TypeParamBounds ::= TypeBounds {‘:’ Type} ContextBounds(typeBounds, tps) +Types ::= Type {‘,’ Type} +``` + +### Expressions +```ebnf +Expr ::= FunParams (‘=>’ | ‘?=>’) Expr Function(args, expr), Function(ValDef([implicit], id, TypeTree(), EmptyTree), expr) + | HkTypeParamClause ‘=>’ Expr PolyFunction(ts, expr) + | Expr1 +BlockResult ::= FunParams (‘=>’ | ‘?=>’) Block + | HkTypeParamClause ‘=>’ Block + | Expr1 +FunParams ::= Bindings + | id + | ‘_’ +Expr1 ::= [‘inline’] ‘if’ ‘(’ Expr ‘)’ {nl} Expr [[semi] ‘else’ Expr] If(Parens(cond), thenp, elsep?) + | [‘inline’] ‘if’ Expr ‘then’ Expr [[semi] ‘else’ Expr] If(cond, thenp, elsep?) + | ‘while’ ‘(’ Expr ‘)’ {nl} Expr WhileDo(Parens(cond), body) + | ‘while’ Expr ‘do’ Expr WhileDo(cond, body) + | ‘try’ Expr Catches [‘finally’ Expr] Try(expr, catches, expr?) + | ‘try’ Expr [‘finally’ Expr] Try(expr, Nil, expr?) + | ‘throw’ Expr Throw(expr) + | ‘return’ [Expr] Return(expr?) + | ForExpr + | [SimpleExpr ‘.’] id ‘=’ Expr Assign(expr, expr) + | PrefixOperator SimpleExpr ‘=’ Expr Assign(expr, expr) + | SimpleExpr ArgumentExprs ‘=’ Expr Assign(expr, expr) + | PostfixExpr [Ascription] + | ‘inline’ InfixExpr MatchClause +Ascription ::= ‘:’ InfixType Typed(expr, tp) + | ‘:’ Annotation {Annotation} Typed(expr, Annotated(EmptyTree, annot)*) +Catches ::= ‘catch’ (Expr | ExprCaseClause) +PostfixExpr ::= InfixExpr [id] PostfixOp(expr, op) +InfixExpr ::= PrefixExpr + | InfixExpr id [nl] InfixExpr InfixOp(expr, op, expr) + | InfixExpr id ‘:’ IndentedExpr + | InfixExpr MatchClause +MatchClause ::= ‘match’ <<< CaseClauses >>> Match(expr, cases) +PrefixExpr ::= [PrefixOperator] SimpleExpr PrefixOp(expr, op) +PrefixOperator ::= ‘-’ | ‘+’ | ‘~’ | ‘!’ +SimpleExpr ::= SimpleRef + | Literal + | ‘_’ + | BlockExpr + | ‘$’ ‘{’ Block ‘}’ -- unless inside quoted pattern + | ‘$’ ‘{’ Pattern ‘}’ -- only inside quoted pattern + | Quoted + | quoteId -- only inside splices + | ‘new’ ConstrApp {‘with’ ConstrApp} [TemplateBody] New(constr | templ) + | ‘new’ TemplateBody + | ‘(’ ExprsInParens ‘)’ Parens(exprs) + | SimpleExpr ‘.’ id Select(expr, id) + | SimpleExpr ‘.’ MatchClause + | SimpleExpr TypeArgs TypeApply(expr, args) + | SimpleExpr ArgumentExprs Apply(expr, args) + | SimpleExpr ‘:’ IndentedExpr -- under language.experimental.fewerBraces + | SimpleExpr FunParams (‘=>’ | ‘?=>’) IndentedExpr -- under language.experimental.fewerBraces + | SimpleExpr ‘_’ PostfixOp(expr, _) (to be dropped) + | XmlExpr -- to be dropped +IndentedExpr ::= indent CaseClauses | Block outdent +Quoted ::= ‘'’ ‘{’ Block ‘}’ + | ‘'’ ‘[’ Type ‘]’ +ExprsInParens ::= ExprInParens {‘,’ ExprInParens} +ExprInParens ::= PostfixExpr ‘:’ Type -- normal Expr allows only RefinedType here + | Expr +ParArgumentExprs ::= ‘(’ [‘using’] ExprsInParens ‘)’ exprs + | ‘(’ [ExprsInParens ‘,’] PostfixExpr ‘*’ ‘)’ exprs :+ Typed(expr, Ident(wildcardStar)) +ArgumentExprs ::= ParArgumentExprs + | BlockExpr +BlockExpr ::= <<< CaseClauses | Block >>> +Block ::= {BlockStat semi} [BlockResult] Block(stats, expr?) +BlockStat ::= Import + | {Annotation {nl}} {LocalModifier} Def + | Extension + | Expr1 + | EndMarker + +ForExpr ::= ‘for’ ‘(’ Enumerators0 ‘)’ {nl} [‘do‘ | ‘yield’] Expr ForYield(enums, expr) / ForDo(enums, expr) + | ‘for’ ‘{’ Enumerators0 ‘}’ {nl} [‘do‘ | ‘yield’] Expr + | ‘for’ Enumerators0 (‘do‘ | ‘yield’) Expr +Enumerators0 ::= {nl} Enumerators [semi] +Enumerators ::= Generator {semi Enumerator | Guard} +Enumerator ::= Generator + | Guard {Guard} + | Pattern1 ‘=’ Expr GenAlias(pat, expr) +Generator ::= [‘case’] Pattern1 ‘<-’ Expr GenFrom(pat, expr) +Guard ::= ‘if’ PostfixExpr + +CaseClauses ::= CaseClause { CaseClause } Match(EmptyTree, cases) +CaseClause ::= ‘case’ Pattern [Guard] ‘=>’ Block CaseDef(pat, guard?, block) // block starts at => +ExprCaseClause ::= ‘case’ Pattern [Guard] ‘=>’ Expr +TypeCaseClauses ::= TypeCaseClause { TypeCaseClause } +TypeCaseClause ::= ‘case’ (InfixType | ‘_’) ‘=>’ Type [semi] + +Pattern ::= Pattern1 { ‘|’ Pattern1 } Alternative(pats) +Pattern1 ::= Pattern2 [‘:’ RefinedType] Bind(name, Typed(Ident(wildcard), tpe)) +Pattern2 ::= [id ‘@’] InfixPattern Bind(name, pat) +InfixPattern ::= SimplePattern { id [nl] SimplePattern } InfixOp(pat, op, pat) +SimplePattern ::= PatVar Ident(wildcard) + | Literal Bind(name, Ident(wildcard)) + | ‘(’ [Patterns] ‘)’ Parens(pats) Tuple(pats) + | Quoted + | XmlPattern (to be dropped) + | SimplePattern1 [TypeArgs] [ArgumentPatterns] + | ‘given’ RefinedType +SimplePattern1 ::= SimpleRef + | SimplePattern1 ‘.’ id +PatVar ::= varid + | ‘_’ +Patterns ::= Pattern {‘,’ Pattern} +ArgumentPatterns ::= ‘(’ [Patterns] ‘)’ Apply(fn, pats) + | ‘(’ [Patterns ‘,’] PatVar ‘*’ ‘)’ +``` + +### Type and Value Parameters +```ebnf +ClsTypeParamClause::= ‘[’ ClsTypeParam {‘,’ ClsTypeParam} ‘]’ +ClsTypeParam ::= {Annotation} [‘+’ | ‘-’] TypeDef(Modifiers, name, tparams, bounds) + id [HkTypeParamClause] TypeParamBounds Bound(below, above, context) + +DefTypeParamClause::= ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ +DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeParamBounds + +TypTypeParamClause::= ‘[’ TypTypeParam {‘,’ TypTypeParam} ‘]’ +TypTypeParam ::= {Annotation} id [HkTypeParamClause] TypeBounds + +HkTypeParamClause ::= ‘[’ HkTypeParam {‘,’ HkTypeParam} ‘]’ +HkTypeParam ::= {Annotation} [‘+’ | ‘-’] (id [HkTypeParamClause] | ‘_’) + TypeBounds + +ClsParamClauses ::= {ClsParamClause} [[nl] ‘(’ [‘implicit’] ClsParams ‘)’] +ClsParamClause ::= [nl] ‘(’ ClsParams ‘)’ + | [nl] ‘(’ ‘using’ (ClsParams | FunArgTypes) ‘)’ +ClsParams ::= ClsParam {‘,’ ClsParam} +ClsParam ::= {Annotation} ValDef(mods, id, tpe, expr) -- point of mods on val/var + [{Modifier} (‘val’ | ‘var’) | ‘inline’] Param +Param ::= id ‘:’ ParamType [‘=’ Expr] + +DefParamClauses ::= {DefParamClause} [[nl] ‘(’ [‘implicit’] DefParams ‘)’] +DefParamClause ::= [nl] ‘(’ DefParams ‘)’ | UsingParamClause +UsingParamClause ::= [nl] ‘(’ ‘using’ (DefParams | FunArgTypes) ‘)’ +DefParams ::= DefParam {‘,’ DefParam} +DefParam ::= {Annotation} [‘inline’] Param ValDef(mods, id, tpe, expr) -- point of mods at id. +``` + +### Bindings and Imports +```ebnf +Bindings ::= ‘(’ [Binding {‘,’ Binding}] ‘)’ +Binding ::= (id | ‘_’) [‘:’ Type] ValDef(_, id, tpe, EmptyTree) + +Modifier ::= LocalModifier + | AccessModifier + | ‘override’ + | ‘opaque’ +LocalModifier ::= ‘abstract’ + | ‘final’ + | ‘sealed’ + | ‘open’ + | ‘implicit’ + | ‘lazy’ + | ‘inline’ +AccessModifier ::= (‘private’ | ‘protected’) [AccessQualifier] +AccessQualifier ::= ‘[’ id ‘]’ + +Annotation ::= ‘@’ SimpleType1 {ParArgumentExprs} Apply(tpe, args) + +Import ::= ‘import’ ImportExpr {‘,’ ImportExpr} +Export ::= ‘export’ ImportExpr {‘,’ ImportExpr} +ImportExpr ::= SimpleRef {‘.’ id} ‘.’ ImportSpec Import(expr, sels) + | SimpleRef ‘as’ id Import(EmptyTree, ImportSelector(ref, id)) +ImportSpec ::= NamedSelector + | WildcardSelector + | ‘{’ ImportSelectors) ‘}’ +NamedSelector ::= id [‘as’ (id | ‘_’)] +WildCardSelector ::= ‘*' | ‘given’ [InfixType] +ImportSelectors ::= NamedSelector [‘,’ ImportSelectors] + | WildCardSelector {‘,’ WildCardSelector} + +EndMarker ::= ‘end’ EndMarkerTag -- when followed by EOL +EndMarkerTag ::= id | ‘if’ | ‘while’ | ‘for’ | ‘match’ | ‘try’ + | ‘new’ | ‘this’ | ‘given’ | ‘extension’ | ‘val’ +``` + +### Declarations and Definitions +```ebnf +RefineDcl ::= ‘val’ ValDcl + | ‘def’ DefDcl + | ‘type’ {nl} TypeDcl +Dcl ::= RefineDcl + | ‘var’ VarDcl +ValDcl ::= ids ‘:’ Type PatDef(_, ids, tpe, EmptyTree) +VarDcl ::= ids ‘:’ Type PatDef(_, ids, tpe, EmptyTree) +DefDcl ::= DefSig ‘:’ Type DefDef(_, name, tparams, vparamss, tpe, EmptyTree) +DefSig ::= id [DefTypeParamClause] DefParamClauses +TypeDcl ::= id [TypeParamClause] {FunParamClause} TypeBounds TypeDefTree(_, name, tparams, bound + [‘=’ Type] + +Def ::= ‘val’ PatDef + | ‘var’ PatDef + | ‘def’ DefDef + | ‘type’ {nl} TypeDcl + | TmplDef +PatDef ::= ids [‘:’ Type] ‘=’ Expr + | Pattern2 [‘:’ Type] ‘=’ Expr PatDef(_, pats, tpe?, expr) +DefDef ::= DefSig [‘:’ Type] ‘=’ Expr DefDef(_, name, tparams, vparamss, tpe, expr) + | ‘this’ DefParamClause DefParamClauses ‘=’ ConstrExpr DefDef(_, , Nil, vparamss, EmptyTree, expr | Block) + +TmplDef ::= ([‘case’] ‘class’ | ‘trait’) ClassDef + | [‘case’] ‘object’ ObjectDef + | ‘enum’ EnumDef + | ‘given’ GivenDef +ClassDef ::= id ClassConstr [Template] ClassDef(mods, name, tparams, templ) +ClassConstr ::= [ClsTypeParamClause] [ConstrMods] ClsParamClauses with DefDef(_, , Nil, vparamss, EmptyTree, EmptyTree) as first stat +ConstrMods ::= {Annotation} [AccessModifier] +ObjectDef ::= id [Template] ModuleDef(mods, name, template) // no constructor +EnumDef ::= id ClassConstr InheritClauses EnumBody +GivenDef ::= [GivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) +GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefParamClause`, `UsingParamClause` must be present +StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ TemplateBody] +Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} + ‘(’ DefParam ‘)’ {UsingParamClause} ExtMethods +ExtMethods ::= ExtMethod | [nl] <<< ExtMethod {semi ExtMethod} >>> +ExtMethod ::= {Annotation [nl]} {Modifier} ‘def’ DefDef +Template ::= InheritClauses [TemplateBody] +InheritClauses ::= [‘extends’ ConstrApps] [‘derives’ QualId {‘,’ QualId}] +ConstrApps ::= ConstrApp ({‘,’ ConstrApp} | {‘with’ ConstrApp}) +ConstrApp ::= SimpleType1 {Annotation} {ParArgumentExprs} +ConstrExpr ::= SelfInvocation + | <<< SelfInvocation {semi BlockStat} >>> +SelfInvocation ::= ‘this’ ArgumentExprs {ArgumentExprs} + +TemplateBody ::= :<<< [SelfType] TemplateStat {semi TemplateStat} >>> +TemplateStat ::= Import + | Export + | {Annotation [nl]} {Modifier} Def + | {Annotation [nl]} {Modifier} Dcl + | Extension + | Expr1 + | EndMarker + | +SelfType ::= id [‘:’ InfixType] ‘=>’ ValDef(_, name, tpt, _) + | ‘this’ ‘:’ InfixType ‘=>’ + +EnumBody ::= :<<< [SelfType] EnumStat {semi EnumStat} >>> +EnumStat ::= TemplateStat + | {Annotation [nl]} {Modifier} EnumCase +EnumCase ::= ‘case’ (id ClassConstr [‘extends’ ConstrApps]] | ids) + +TopStats ::= TopStat {semi TopStat} +TopStat ::= Import + | Export + | {Annotation [nl]} {Modifier} Def + | Extension + | Packaging + | PackageObject + | EndMarker + | +Packaging ::= ‘package’ QualId :<<< TopStats >>> +PackageObject ::= ‘package’ ‘object’ ObjectDef + +CompilationUnit ::= {‘package’ QualId semi} TopStats +``` diff --git a/docs/docs/internals/type-system.md b/docs/_docs/internals/type-system.md similarity index 87% rename from docs/docs/internals/type-system.md rename to docs/_docs/internals/type-system.md index 30aa907f6773..8fa8912a7118 100644 --- a/docs/docs/internals/type-system.md +++ b/docs/_docs/internals/type-system.md @@ -23,23 +23,24 @@ Type -+- ProxyType --+- NamedType ----+--- TypeRef | | +--- ThisType | | +--- SuperType | | +--- ConstantType - | | +--- MethodParam + | | +--- TermParamRef | | +----RecThis | | +--- SkolemType - | +- PolyParam + | +- TypeParamRef | +- RefinedOrRecType -+-- RefinedType | | -+-- RecType - | +- HKApply + | +- AppliedType | +- TypeBounds | +- ExprType | +- AnnotatedType | +- TypeVar - | +- PolyType + | +- HKTypeLambda + | +- MatchType | +- GroundType -+- AndType +- OrType - +- MethodType -----+- ImplicitMethodType - | +- JavaMethodType + +- MethodOrPoly ---+-- PolyType + | +-- MethodType +- ClassInfo | +- NoType @@ -131,8 +132,8 @@ TODO ## Type inference via constraint solving ## TODO -[1]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/core/Types.scala +[1]: https://github.com/lampepfl/dotty/blob/main/compiler/src/dotty/tools/dotc/core/Types.scala [2]: https://github.com/samuelgruetter/dotty/blob/classdiagrampdf/dotty-types.pdf [3]: https://github.com/samuelgruetter/scaladiagrams/tree/print-descendants -[4]: https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/core/TypeComparer.scala -[5]: https://github.com/lampepfl/dotty/blob/master/tests/pos/refinedSubtyping.scala +[4]: https://github.com/lampepfl/dotty/blob/main/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +[5]: https://github.com/lampepfl/dotty/blob/main/tests/pos/refinedSubtyping.scala diff --git a/docs/_docs/reference/changed-features/changed-features.md b/docs/_docs/reference/changed-features/changed-features.md new file mode 100644 index 000000000000..05a138b396fa --- /dev/null +++ b/docs/_docs/reference/changed-features/changed-features.md @@ -0,0 +1,7 @@ +--- +layout: doc-page +title: "Other Changed Features" +movedTo: https://docs.scala-lang.org/scala3/reference/changed-features.html +--- + +The following pages document the features that have changed in Scala 3, compared to Scala 2. diff --git a/docs/docs/reference/changed-features/compiler-plugins.md b/docs/_docs/reference/changed-features/compiler-plugins.md similarity index 80% rename from docs/docs/reference/changed-features/compiler-plugins.md rename to docs/_docs/reference/changed-features/compiler-plugins.md index debf369cc0c1..2a446e9cfb84 100644 --- a/docs/docs/reference/changed-features/compiler-plugins.md +++ b/docs/_docs/reference/changed-features/compiler-plugins.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Changes in Compiler Plugins" +movedTo: https://docs.scala-lang.org/scala3/reference/changed-features/compiler-plugins.html --- Compiler plugins are supported by Dotty (and Scala 3) since 0.9. There are two notable changes @@ -63,28 +64,28 @@ import dotty.tools.dotc.plugins.{PluginPhase, StandardPlugin} import dotty.tools.dotc.transform.{Pickler, Staging} class DivideZero extends StandardPlugin: - val name: String = "divideZero" - override val description: String = "divide zero check" + val name: String = "divideZero" + override val description: String = "divide zero check" - def init(options: List[String]): List[PluginPhase] = - (new DivideZeroPhase) :: Nil + def init(options: List[String]): List[PluginPhase] = + (new DivideZeroPhase) :: Nil class DivideZeroPhase extends PluginPhase: - import tpd.* + import tpd.* - val phaseName = "divideZero" + val phaseName = "divideZero" - override val runsAfter = Set(Pickler.name) - override val runsBefore = Set(Staging.name) + override val runsAfter = Set(Pickler.name) + override val runsBefore = Set(Staging.name) - override def transformApply(tree: Apply)(implicit ctx: Context): Tree = - tree match - case Apply(Select(rcvr, nme.DIV), List(Literal(Constant(0)))) - if rcvr.tpe <:< defn.IntType => - report.error("dividing by zero", tree.pos) - case _ => - () - tree + override def transformApply(tree: Apply)(implicit ctx: Context): Tree = + tree match + case Apply(Select(rcvr, nme.DIV), List(Literal(Constant(0)))) + if rcvr.tpe <:< defn.IntType => + report.error("dividing by zero", tree.pos) + case _ => + () + tree end DivideZeroPhase ``` @@ -109,11 +110,11 @@ import dotty.tools.dotc.core.Phases.Phase import dotty.tools.dotc.plugins.ResearchPlugin class DummyResearchPlugin extends ResearchPlugin: - val name: String = "dummy" - override val description: String = "dummy research plugin" + val name: String = "dummy" + override val description: String = "dummy research plugin" - def init(options: List[String], phases: List[List[Phase]])(implicit ctx: Context): List[List[Phase]] = - phases + def init(options: List[String], phases: List[List[Phase]])(implicit ctx: Context): List[List[Phase]] = + phases end DummyResearchPlugin ``` diff --git a/docs/docs/reference/changed-features/eta-expansion-spec.md b/docs/_docs/reference/changed-features/eta-expansion-spec.md similarity index 97% rename from docs/docs/reference/changed-features/eta-expansion-spec.md rename to docs/_docs/reference/changed-features/eta-expansion-spec.md index 932d7e9cd236..67f7606d5b15 100644 --- a/docs/docs/reference/changed-features/eta-expansion-spec.md +++ b/docs/_docs/reference/changed-features/eta-expansion-spec.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Automatic Eta Expansion - More Details" +movedTo: https://docs.scala-lang.org/scala3/reference/changed-features/eta-expansion-spec.html --- ## Motivation diff --git a/docs/docs/reference/changed-features/eta-expansion.md b/docs/_docs/reference/changed-features/eta-expansion.md similarity index 93% rename from docs/docs/reference/changed-features/eta-expansion.md rename to docs/_docs/reference/changed-features/eta-expansion.md index adfaa96f531a..2ae9af870a55 100644 --- a/docs/docs/reference/changed-features/eta-expansion.md +++ b/docs/_docs/reference/changed-features/eta-expansion.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Automatic Eta Expansion" +movedTo: https://docs.scala-lang.org/scala3/reference/changed-features/eta-expansion.html --- The conversion of _methods_ into _functions_ has been improved and happens automatically for methods with one or more parameters. diff --git a/docs/docs/reference/changed-features/implicit-conversions-spec.md b/docs/_docs/reference/changed-features/implicit-conversions-spec.md similarity index 91% rename from docs/docs/reference/changed-features/implicit-conversions-spec.md rename to docs/_docs/reference/changed-features/implicit-conversions-spec.md index 33557ecef72c..b4ab97355036 100644 --- a/docs/docs/reference/changed-features/implicit-conversions-spec.md +++ b/docs/_docs/reference/changed-features/implicit-conversions-spec.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Implicit Conversions - More Details" +movedTo: https://docs.scala-lang.org/scala3/reference/changed-features/implicit-conversions-spec.html --- ## Implementation @@ -11,13 +12,13 @@ defined by either: - An `implicit def` which has type `S => T` or `(=> S) => T` - An implicit value which has type `Conversion[S, T]` -The standard library defines an abstract class `Conversion`: +The standard library defines an abstract class [`Conversion`](https://scala-lang.org/api/3.x/scala/Conversion.html): ```scala package scala @java.lang.FunctionalInterface abstract class Conversion[-T, +U] extends Function1[T, U]: - def apply(x: T): U + def apply(x: T): U ``` Function literals are automatically converted to `Conversion` values. @@ -85,7 +86,7 @@ Note that implicit conversions are also affected by the [changes to implicit res ## Motivation for the changes -The introduction of [`scala.Conversion`](https://github.com/lampepfl/dotty/blob/master/library/src/scala/Conversion.scala) +The introduction of [`scala.Conversion`](https://scala-lang.org/api/3.x/scala/Conversion.html) in Scala 3 and the decision to restrict implicit values of this type to be considered as potential views comes from the desire to remove surprising behavior from the language: @@ -101,7 +102,7 @@ This snippet contains a type error. The right-hand side of `val x` does not conform to type `String`. In Scala 2, the compiler will use `m` as an implicit conversion from `Int` to `String`, whereas Scala 3 will report a type error, because `Map` isn't an instance of -`Conversion`. +[`Conversion`](https://scala-lang.org/api/3.x/scala/Conversion.html). ## Migration path diff --git a/docs/_docs/reference/changed-features/implicit-conversions.md b/docs/_docs/reference/changed-features/implicit-conversions.md new file mode 100644 index 000000000000..69737d5b6bf9 --- /dev/null +++ b/docs/_docs/reference/changed-features/implicit-conversions.md @@ -0,0 +1,65 @@ +--- +layout: doc-page +title: "Implicit Conversions" +movedTo: https://docs.scala-lang.org/scala3/reference/changed-features/implicit-conversions.html +--- + +An _implicit conversion_, also called _view_, is a conversion that +is applied by the compiler in several situations: + +1. When an expression `e` of type `T` is encountered, but the compiler + needs an expression of type `S`. +1. When an expression `e.m` where `e` has type `T` but `T` defines no + member `m` is encountered. + +In those cases, the compiler looks in the implicit scope for a +conversion that can convert an expression of type `T` to an expression +of type `S` (or to a type that defines a member `m` in the second +case). + +This conversion can be either: + +1. An `implicit def` of type `T => S` or `(=> T) => S` +1. An implicit value of type `scala.Conversion[T, S]` + +Defining an implicit conversion will emit a warning unless the import +`scala.language.implicitConversions` is in scope, or the flag +`-language:implicitConversions` is given to the compiler. + +## Examples + +The first example is taken from [`scala.Predef`](https://scala-lang.org/api/3.x/scala/Predef$.html). +Thanks to this implicit conversion, it is possible to pass a +[`scala.Int`](https://scala-lang.org/api/3.x/scala/Int.html) +to a Java method that expects a `java.lang.Integer` + +```scala +import scala.language.implicitConversions +implicit def int2Integer(x: Int): java.lang.Integer = + x.asInstanceOf[java.lang.Integer] +``` + +The second example shows how to use `Conversion` to define an +`Ordering` for an arbitrary type, given existing `Ordering`s for other +types: + +```scala +import scala.language.implicitConversions +implicit def ordT[T, S]( + implicit conv: Conversion[T, S], + ordS: Ordering[S] + ): Ordering[T] = + // `ordS` compares values of type `S`, but we can convert from `T` to `S` + (x: T, y: T) => ordS.compare(x, y) + +class A(val x: Int) // The type for which we want an `Ordering` + +// Convert `A` to a type for which an `Ordering` is available: +implicit val AToInt: Conversion[A, Int] = _.x + +implicitly[Ordering[Int]] // Ok, exists in the standard library +implicitly[Ordering[A]] // Ok, will use the implicit conversion from + // `A` to `Int` and the `Ordering` for `Int`. +``` + +[More details](implicit-conversions-spec.md) diff --git a/docs/docs/reference/changed-features/implicit-resolution.md b/docs/_docs/reference/changed-features/implicit-resolution.md similarity index 93% rename from docs/docs/reference/changed-features/implicit-resolution.md rename to docs/_docs/reference/changed-features/implicit-resolution.md index 90b6c80cf3bf..cf8af361b787 100644 --- a/docs/docs/reference/changed-features/implicit-resolution.md +++ b/docs/_docs/reference/changed-features/implicit-resolution.md @@ -1,7 +1,9 @@ --- layout: doc-page title: "Changes in Implicit Resolution" +movedTo: https://docs.scala-lang.org/scala3/reference/changed-features/implicit-resolution.html --- + This section describes changes to the implicit resolution that apply both to the new `given`s and to the old-style `implicit`s in Scala 3. Implicit resolution uses a new algorithm which caches implicit results more aggressively for performance. There are also some changes that @@ -13,23 +15,23 @@ where the type may still be inferred: ```scala class C { - val ctx: Context = ... // ok + val ctx: Context = ... // ok - /*!*/ implicit val x = ... // error: type must be given explicitly + /*!*/ implicit val x = ... // error: type must be given explicitly - /*!*/ implicit def y = ... // error: type must be given explicitly + /*!*/ implicit def y = ... // error: type must be given explicitly } val y = { - implicit val ctx = this.ctx // ok - ... + implicit val ctx = this.ctx // ok + ... } ``` **2.** Nesting is now taken into account for selecting an implicit. Consider for instance the following scenario: ```scala def f(implicit i: C) = { - def g(implicit j: C) = { - implicitly[C] - } + def g(implicit j: C) = { + implicitly[C] + } } ``` This will now resolve the `implicitly` call to `j`, because `j` is nested @@ -45,8 +47,8 @@ no longer applies. given a: A = A() object o: - given b: B = B() - type C + given b: B = B() + type C ``` Both `a` and `b` are visible as implicits at the point of the definition of `type C`. However, a reference to `p.o.C` outside of package `p` will @@ -56,6 +58,7 @@ In more detail, here are the rules for what constitutes the implicit scope of a type: **Definition:** A reference is an _anchor_ if it refers to an object, a class, a trait, an abstract type, an opaque type alias, or a match type alias. References to packages and package objects are anchors only under `-source:3.0-migration`. +Opaque type aliases count as anchors only outside the scope where their alias is visible. **Definition:** The _anchors_ of a type _T_ is a set of references defined as follows: @@ -107,7 +110,7 @@ which means that the alternative `c` would be chosen as solution! Scala 2's somewhat puzzling behavior with respect to ambiguity has been exploited to implement the analogue of a "negated" search in implicit resolution, where a query `Q1` fails if some other query `Q2` succeeds and `Q1` succeeds if `Q2` fails. With the new cleaned up behavior -these techniques no longer work. But there is now a new special type `scala.util.NotGiven` +these techniques no longer work. But there is now a new special type [`scala.util.NotGiven`](https://scala-lang.org/api/3.x/scala/util/NotGiven.html) which implements negation directly. For any query type `Q`, `NotGiven[Q]` succeeds if and only if the implicit search for `Q` fails. diff --git a/docs/docs/reference/changed-features/imports.md b/docs/_docs/reference/changed-features/imports.md similarity index 93% rename from docs/docs/reference/changed-features/imports.md rename to docs/_docs/reference/changed-features/imports.md index cdd24ae6382d..7d364ce68b92 100644 --- a/docs/docs/reference/changed-features/imports.md +++ b/docs/_docs/reference/changed-features/imports.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Imports" +movedTo: https://docs.scala-lang.org/scala3/reference/changed-features/imports.html --- The syntax of wildcard and renaming imports (and exports) has changed. @@ -16,8 +17,8 @@ If you want to import a member named `*` specifically, you can use backticks aro ```scala object A: - def * = ... - def min = ... + def * = ... + def min = ... object B: import A.`*` // imports just `*` diff --git a/docs/docs/reference/changed-features/interpolation-escapes.md b/docs/_docs/reference/changed-features/interpolation-escapes.md similarity index 79% rename from docs/docs/reference/changed-features/interpolation-escapes.md rename to docs/_docs/reference/changed-features/interpolation-escapes.md index 9a96f2a6d285..6c9afa9d85cb 100644 --- a/docs/docs/reference/changed-features/interpolation-escapes.md +++ b/docs/_docs/reference/changed-features/interpolation-escapes.md @@ -1,9 +1,10 @@ --- layout: doc-page title: "Escapes in interpolations" +movedTo: https://docs.scala-lang.org/scala3/reference/changed-features/interpolation-escapes.html --- -In Scala 2 there is no straightforward way to represent a single quote character `"` in a single quoted interpolation. A `\` character can't be used for that because interpolators themselves decide how to handle escaping, so the parser doesn't know whether the `"` should be escaped or used as a terminator. +In Scala 2 there is no straightforward way to represent a single quote character `"` in a single quoted interpolation. A `\` character can't be used for that because interpolators themselves decide how to handle escaping, so the parser doesn't know whether the `"` character should be escaped or used as a terminator. In Scala 3, we can use the `$` meta character of interpolations to escape a `"` character. Example: diff --git a/docs/docs/reference/changed-features/lazy-vals-init.md b/docs/_docs/reference/changed-features/lazy-vals-init.md similarity index 95% rename from docs/docs/reference/changed-features/lazy-vals-init.md rename to docs/_docs/reference/changed-features/lazy-vals-init.md index 5fbb126e5ddf..c3b25cdc5ab3 100644 --- a/docs/docs/reference/changed-features/lazy-vals-init.md +++ b/docs/_docs/reference/changed-features/lazy-vals-init.md @@ -1,6 +1,7 @@ --- layout: doc-page -title: Lazy Vals initialization +title: Lazy Vals Initialization +movedTo: https://docs.scala-lang.org/scala3/reference/changed-features/lazy-vals-init.html --- Scala 3 implements [Version 6](https://docs.scala-lang.org/sips/improved-lazy-val-initialization.html#version-6---no-synchronization-on-this-and-concurrent-initialization-of-fields) diff --git a/docs/_docs/reference/changed-features/main-functions.md b/docs/_docs/reference/changed-features/main-functions.md new file mode 100644 index 000000000000..7485dc942d99 --- /dev/null +++ b/docs/_docs/reference/changed-features/main-functions.md @@ -0,0 +1,87 @@ +--- +layout: doc-page +title: "Main Methods" +movedTo: https://docs.scala-lang.org/scala3/reference/changed-features/main-functions.html +--- + +Scala 3 offers a new way to define programs that can be invoked from the command line: +A [`@main`](https://scala-lang.org/api/3.x/scala/main.html) annotation on a method turns this method into an executable program. +Example: + +```scala +@main def happyBirthday(age: Int, name: String, others: String*) = + val suffix = + age % 100 match + case 11 | 12 | 13 => "th" + case _ => + age % 10 match + case 1 => "st" + case 2 => "nd" + case 3 => "rd" + case _ => "th" + val bldr = new StringBuilder(s"Happy $age$suffix birthday, $name") + for other <- others do bldr.append(" and ").append(other) + bldr.toString +``` + +This would generate a main program `happyBirthday` that could be called like this + +``` +> scala happyBirthday 23 Lisa Peter +Happy 23rd birthday, Lisa and Peter +``` + +A [`@main`](https://scala-lang.org/api/3.x/scala/main.html) annotated method can be written either at the top-level or in a statically accessible object. The name of the program is in each case the name of the method, without any object prefixes. The [`@main`](https://scala-lang.org/api/3.x/scala/main.html) method can have an arbitrary number of parameters. +For each parameter type there must be an instance of the [`scala.util.CommandLineParser.FromString[T]`](https://scala-lang.org/api/3.x/scala/util/CommandLineParser$$FromString.html) type class that is used to convert an argument string to the required parameter type `T`. +The parameter list of a main method can end in a repeated parameter that then takes all remaining arguments given on the command line. + +The program implemented from a [`@main`](https://scala-lang.org/api/3.x/scala/main.html) method checks that there are enough arguments on +the command line to fill in all parameters, and that argument strings are convertible to +the required types. If a check fails, the program is terminated with an error message. + +Examples: + +``` +> scala happyBirthday 22 +Illegal command line after first argument: more arguments expected + +> scala happyBirthday sixty Fred +Illegal command line: java.lang.NumberFormatException: For input string: "sixty" +``` + +The Scala compiler generates a program from a [`@main`](https://scala-lang.org/api/3.x/scala/main.html) method `f` as follows: + + - It creates a class named `f` in the package where the [`@main`](https://scala-lang.org/api/3.x/scala/main.html) method was found + - The class has a static method `main` with the usual signature. It takes an `Array[String]` + as argument and returns [`Unit`](https://scala-lang.org/api/3.x/scala/Unit.html). + - The generated `main` method calls method `f` with arguments converted using + methods in the [`scala.util.CommandLineParser`](https://scala-lang.org/api/3.x/scala/util/CommandLineParser$.html) object. + +For instance, the `happyBirthDay` method above would generate additional code equivalent to the following class: + +```scala +final class happyBirthday: + import scala.util.CommandLineParser as CLP + def main(args: Array[String]): Unit = + try + happyBirthday( + CLP.parseArgument[Int](args, 0), + CLP.parseArgument[String](args, 1), + CLP.parseRemainingArguments[String](args, 2)) + catch + case error: CLP.ParseError => CLP.showError(error) +``` + +**Note**: The `` modifier above expresses that the `main` method is generated +as a static method of class `happyBirthDay`. It is not available for user programs in Scala. Regular "static" members are generated in Scala using objects instead. + +[`@main`](https://scala-lang.org/api/3.x/scala/main.html) methods are the recommended scheme to generate programs that can be invoked from the command line in Scala 3. They replace the previous scheme to write program as objects with a special `App` parent class. In Scala 2, `happyBirthday` could be written also like this: + +```scala +object happyBirthday extends App: + // needs by-hand parsing of arguments vector + ... +``` + +The previous functionality of [`App`](https://www.scala-lang.org/api/3.x/scala/App.html), which relied on the "magic" [`DelayedInit`](../dropped-features/delayed-init.md) trait, is no longer available. [`App`](https://scala-lang.org/api/3.x/scala/App.html) still exists in limited form for now, but it does not support command line arguments and will be deprecated in the future. If programs need to cross-build +between Scala 2 and Scala 3, it is recommended to use an explicit `main` method with an `Array[String]` argument instead. diff --git a/docs/_docs/reference/changed-features/match-syntax.md b/docs/_docs/reference/changed-features/match-syntax.md new file mode 100644 index 000000000000..cff77262611b --- /dev/null +++ b/docs/_docs/reference/changed-features/match-syntax.md @@ -0,0 +1,56 @@ +--- +layout: doc-page +title: "Match Expressions" +movedTo: https://docs.scala-lang.org/scala3/reference/changed-features/match-syntax.html +--- + +The syntactical precedence of match expressions has been changed. +`match` is still a keyword, but it is used like an alphabetical operator. This has several consequences: + + 1. `match` expressions can be chained: + + ```scala + xs match { + case Nil => "empty" + case _ => "nonempty" + } match { + case "empty" => 0 + case "nonempty" => 1 + } + ``` + + (or, dropping the optional braces) + + ```scala + xs match + case Nil => "empty" + case _ => "nonempty" + match + case "empty" => 0 + case "nonempty" => 1 + ``` + + 2. `match` may follow a period: + + ```scala + if xs.match + case Nil => false + case _ => true + then "nonempty" + else "empty" + ``` + + 3. The scrutinee of a match expression must be an `InfixExpr`. Previously the scrutinee could be followed by a type ascription `: T`, but this is no longer supported. So `x : T match { ... }` now has to be + written `(x: T) match { ... }`. + +## Syntax + +The new syntax of match expressions is as follows. + +``` +InfixExpr ::= ... + | InfixExpr MatchClause +SimpleExpr ::= ... + | SimpleExpr ‘.’ MatchClause +MatchClause ::= ‘match’ ‘{’ CaseClauses ‘}’ +``` diff --git a/docs/_docs/reference/changed-features/numeric-literals.md b/docs/_docs/reference/changed-features/numeric-literals.md new file mode 100644 index 000000000000..758cc2e21373 --- /dev/null +++ b/docs/_docs/reference/changed-features/numeric-literals.md @@ -0,0 +1,7 @@ +--- +layout: doc-page +title: "Numeric Literals" +movedTo: https://docs.scala-lang.org/scala3/reference/changed-features/numeric-literals.html +--- + +[Document was moved](../experimental/numeric-literals.md) diff --git a/docs/docs/reference/changed-features/operators.md b/docs/_docs/reference/changed-features/operators.md similarity index 87% rename from docs/docs/reference/changed-features/operators.md rename to docs/_docs/reference/changed-features/operators.md index ff30c6c74bd9..495eb8c5ede9 100644 --- a/docs/docs/reference/changed-features/operators.md +++ b/docs/_docs/reference/changed-features/operators.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Rules for Operators" +movedTo: https://docs.scala-lang.org/scala3/reference/changed-features/operators.html --- The rules for infix operators have changed in some parts: @@ -18,12 +19,12 @@ import scala.annotation.targetName trait MultiSet[T]: - infix def union(other: MultiSet[T]): MultiSet[T] + infix def union(other: MultiSet[T]): MultiSet[T] - def difference(other: MultiSet[T]): MultiSet[T] + def difference(other: MultiSet[T]): MultiSet[T] - @targetName("intersection") - def *(other: MultiSet[T]): MultiSet[T] + @targetName("intersection") + def *(other: MultiSet[T]): MultiSet[T] end MultiSet @@ -79,8 +80,8 @@ The purpose of the `infix` modifier is to achieve consistency across a code base infix def op3[T](x: T, y: S): R // error: two parameters extension (x: A) - infix def op4(y: B): R // ok - infix def op5(y1: B, y2: B): R // error: two parameters + infix def op4(y: B): R // ok + infix def op5(y1: B, y2: B): R // error: two parameters ``` 4. `infix` modifiers can also be given to type, trait or class definitions that have exactly two type parameters. An infix type like @@ -113,14 +114,14 @@ Infix operators can now appear at the start of lines in a multi-line expression. ```scala val str = "hello" - ++ " world" - ++ "!" + ++ " world" + ++ "!" def condition = - x > 0 - || - xs.exists(_ > 0) - || xs.isEmpty + x > 0 + || + xs.exists(_ > 0) + || xs.isEmpty ``` Previously, those expressions would have been rejected, since the compiler's semicolon inference @@ -128,10 +129,12 @@ would have treated the continuations `++ " world"` or `|| xs.isEmpty` as separat To make this syntax work, the rules are modified to not infer semicolons in front of leading infix operators. A _leading infix operator_ is - - a symbolic identifier such as `+`, or `approx_==`, or an identifier in backticks, - - that starts a new line, - - that precedes a token on the same or the next line that can start an expression, - - and that is immediately followed by at least one whitespace character. + - a symbolic identifier such as `+`, or `approx_==`, or an identifier in backticks that + - starts a new line, and + - is not following a blank line, and + - is followed by at least one whitespace character and a token that can start an expression. + - Furthermore, if the operator appears on its own line, the next line must have at least + the same indentation width as the operator. Example: diff --git a/docs/docs/reference/changed-features/overload-resolution.md b/docs/_docs/reference/changed-features/overload-resolution.md similarity index 97% rename from docs/docs/reference/changed-features/overload-resolution.md rename to docs/_docs/reference/changed-features/overload-resolution.md index 63eb3b318371..e6c929f7a2ef 100644 --- a/docs/docs/reference/changed-features/overload-resolution.md +++ b/docs/_docs/reference/changed-features/overload-resolution.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Changes in Overload Resolution" +movedTo: https://docs.scala-lang.org/scala3/reference/changed-features/overload-resolution.html --- Overload resolution in Scala 3 improves on Scala 2 in two ways. diff --git a/docs/docs/reference/changed-features/pattern-bindings.md b/docs/_docs/reference/changed-features/pattern-bindings.md similarity index 90% rename from docs/docs/reference/changed-features/pattern-bindings.md rename to docs/_docs/reference/changed-features/pattern-bindings.md index 0c2871ae06b9..2c8d1c10ceae 100644 --- a/docs/docs/reference/changed-features/pattern-bindings.md +++ b/docs/_docs/reference/changed-features/pattern-bindings.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Pattern Bindings" +movedTo: https://docs.scala-lang.org/scala3/reference/changed-features/pattern-bindings.html --- In Scala 2, pattern bindings in `val` definitions and `for` expressions are @@ -20,12 +21,11 @@ This code gives a compile-time warning in Scala 3.1 (and also in Scala 3.0 under val pair = (1, true) val (x, y) = pair ``` -Sometimes one wants to decompose data anyway, even though the pattern is refutable. For instance, if at some point one knows that a list `elems` is non-empty one might -want to decompose it like this: +Sometimes one wants to decompose data anyway, even though the pattern is refutable. For instance, if at some point one knows that a list `elems` is non-empty one might want to decompose it like this: ```scala val first :: rest = elems // error ``` -This works in Scala 2. In fact it is a typical use case for Scala 2's rules. But in Scala 3.1 it will give a warning. One can avoid the warning by marking the right-hand side with an `@unchecked` annotation: +This works in Scala 2. In fact it is a typical use case for Scala 2's rules. But in Scala 3.1 it will give a warning. One can avoid the warning by marking the right-hand side with an [`@unchecked`](https://scala-lang.org/api/3.x/scala/unchecked.html) annotation: ```scala val first :: rest = elems: @unchecked // OK ``` diff --git a/docs/docs/reference/changed-features/pattern-matching.md b/docs/_docs/reference/changed-features/pattern-matching.md similarity index 78% rename from docs/docs/reference/changed-features/pattern-matching.md rename to docs/_docs/reference/changed-features/pattern-matching.md index b61ce8fa886e..b9115fe93233 100644 --- a/docs/docs/reference/changed-features/pattern-matching.md +++ b/docs/_docs/reference/changed-features/pattern-matching.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Option-less pattern matching" +movedTo: https://docs.scala-lang.org/scala3/reference/changed-features/pattern-matching.html --- The implementation of pattern matching in Scala 3 was greatly simplified compared to Scala 2. From a user perspective, this means that Scala 3 generated patterns are a *lot* easier to debug, as variables all show up in debug modes and positions are correctly preserved. @@ -101,11 +102,11 @@ For example: ```scala object Even: - def unapply(s: String): Boolean = s.size % 2 == 0 + def unapply(s: String): Boolean = s.size % 2 == 0 "even" match - case s @ Even() => println(s"$s has an even number of characters") - case s => println(s"$s has an odd number of characters") + case s @ Even() => println(s"$s has an even number of characters") + case s => println(s"$s has an odd number of characters") // even has an even number of characters ``` @@ -122,20 +123,20 @@ For example: ```scala class FirstChars(s: String) extends Product: - def _1 = s.charAt(0) - def _2 = s.charAt(1) + def _1 = s.charAt(0) + def _2 = s.charAt(1) // Not used by pattern matching: Product is only used as a marker trait. - def canEqual(that: Any): Boolean = ??? - def productArity: Int = ??? - def productElement(n: Int): Any = ??? + def canEqual(that: Any): Boolean = ??? + def productArity: Int = ??? + def productElement(n: Int): Any = ??? object FirstChars: - def unapply(s: String): FirstChars = new FirstChars(s) + def unapply(s: String): FirstChars = new FirstChars(s) "Hi!" match - case FirstChars(char1, char2) => - println(s"First: $char1; Second: $char2") + case FirstChars(char1, char2) => + println(s"First: $char1; Second: $char2") // First: H; Second: i ``` @@ -148,15 +149,15 @@ object FirstChars: ```scala class Nat(val x: Int): - def get: Int = x - def isEmpty = x < 0 + def get: Int = x + def isEmpty = x < 0 object Nat: - def unapply(x: Int): Nat = new Nat(x) + def unapply(x: Int): Nat = new Nat(x) 5 match - case Nat(n) => println(s"$n is a natural number") - case _ => () + case Nat(n) => println(s"$n is a natural number") + case _ => () // 5 is a natural number ``` @@ -168,15 +169,15 @@ object Nat: ```Scala object ProdEmpty: - def _1: Int = ??? - def _2: String = ??? - def isEmpty = true - def unapply(s: String): this.type = this - def get = this + def _1: Int = ??? + def _2: String = ??? + def isEmpty = true + def unapply(s: String): this.type = this + def get = this "" match - case ProdEmpty(_, _) => ??? - case _ => () + case ProdEmpty(_, _) => ??? + case _ => () ``` @@ -186,10 +187,10 @@ object ProdEmpty: ```Scala type X = { - def lengthCompare(len: Int): Int // or, `def length: Int` - def apply(i: Int): T1 - def drop(n: Int): scala.Seq[T2] - def toSeq: scala.Seq[T3] + def lengthCompare(len: Int): Int // or, `def length: Int` + def apply(i: Int): T1 + def drop(n: Int): scala.Seq[T2] + def toSeq: scala.Seq[T3] } ``` @@ -200,13 +201,13 @@ type X = { ```scala object CharList: - def unapplySeq(s: String): Option[Seq[Char]] = Some(s.toList) + def unapplySeq(s: String): Option[Seq[Char]] = Some(s.toList) "example" match - case CharList(c1, c2, c3, c4, _, _, _) => - println(s"$c1,$c2,$c3,$c4") - case _ => - println("Expected *exactly* 7 characters!") + case CharList(c1, c2, c3, c4, _, _, _) => + println(s"$c1,$c2,$c3,$c4") + case _ => + println("Expected *exactly* 7 characters!") // e,x,a,m ``` @@ -222,12 +223,12 @@ object CharList: ```Scala class Foo(val name: String, val children: Int *) object Foo: - def unapplySeq(f: Foo): Option[(String, Seq[Int])] = - Some((f.name, f.children)) + def unapplySeq(f: Foo): Option[(String, Seq[Int])] = + Some((f.name, f.children)) def foo(f: Foo) = f match - case Foo(name, ns : _*) => - case Foo(name, x, y, ns : _*) => + case Foo(name, ns : _*) => + case Foo(name, x, y, ns : _*) => ``` There are plans for further simplification, in particular to factor out *product diff --git a/docs/_docs/reference/changed-features/structural-types-spec.md b/docs/_docs/reference/changed-features/structural-types-spec.md new file mode 100644 index 000000000000..290189a67eda --- /dev/null +++ b/docs/_docs/reference/changed-features/structural-types-spec.md @@ -0,0 +1,153 @@ +--- +layout: doc-page +title: "Programmatic Structural Types - More Details" +movedTo: https://docs.scala-lang.org/scala3/reference/changed-features/structural-types-spec.html +--- + +## Syntax + +``` +SimpleType ::= ... | Refinement +Refinement ::= ‘{’ RefineStatSeq ‘}’ +RefineStatSeq ::= RefineStat {semi RefineStat} +RefineStat ::= ‘val’ VarDcl | ‘def’ DefDcl | ‘type’ {nl} TypeDcl +``` + +## Implementation of Structural Types + +The standard library defines a universal marker trait +[`scala.Selectable`](https://github.com/lampepfl/dotty/blob/main/library/src/scala/Selectable.scala): + +```scala +trait Selectable extends Any +``` + +An implementation of `Selectable` that relies on [Java reflection](https://www.oracle.com/technical-resources/articles/java/javareflection.html) is +available in the standard library: `scala.reflect.Selectable`. Other +implementations can be envisioned for platforms where Java reflection +is not available. + +Implementations of `Selectable` have to make available one or both of +the methods `selectDynamic` and `applyDynamic`. The methods could be members of the `Selectable` implementation or they could be extension methods. + +The `selectDynamic` method takes a field name and returns the value associated with that name in the `Selectable`. +It should have a signature of the form: + +```scala +def selectDynamic(name: String): T +``` + +Often, the return type `T` is `Any`. + +Unlike `scala.Dynamic`, there is no special meaning for an `updateDynamic` method. +However, we reserve the right to give it meaning in the future. +Consequently, it is recommended not to define any member called `updateDynamic` in `Selectable`s. + +The `applyDynamic` method is used for selections that are applied to arguments. It takes a method name and possibly `Class`es representing its parameters types as well as the arguments to pass to the function. +Its signature should be of one of the two following forms: + +```scala +def applyDynamic(name: String)(args: Any*): T +def applyDynamic(name: String, ctags: Class[?]*)(args: Any*): T +``` + +Both versions are passed the actual arguments in the `args` parameter. The second version takes in addition a vararg argument of `java.lang.Class`es that identify the method's parameter classes. Such an argument is needed +if `applyDynamic` is implemented using Java reflection, but it could be +useful in other cases as well. `selectDynamic` and `applyDynamic` can also take additional context parameters in using clauses. These are resolved in the normal way at the callsite. + +Given a value `v` of type `C { Rs }`, where `C` is a class reference +and `Rs` are structural refinement declarations, and given `v.a` of type `U`, we consider three distinct cases: + +- If `U` is a value type, we map `v.a` to: + ```scala + v.selectDynamic("a").asInstanceOf[U] + ``` + +- If `U` is a method type `(T11, ..., T1n)...(TN1, ..., TNn): R` and it is not a dependent method type, we map `v.a(a11, ..., a1n)...(aN1, ..., aNn)` to: + ```scala + v.applyDynamic("a")(a11, ..., a1n, ..., aN1, ..., aNn) + .asInstanceOf[R] + ``` + If this call resolves to an `applyDynamic` method of the second form that takes a `Class[?]*` argument, we further rewrite this call to + ```scala + v.applyDynamic("a", c11, ..., c1n, ..., cN1, ... cNn)( + a11, ..., a1n, ..., aN1, ..., aNn) + .asInstanceOf[R] + ``` + where each `c_ij` is the literal `java.lang.Class[?]` of the type of the formal parameter `Tij`, i.e., `classOf[Tij]`. + +- If `U` is neither a value nor a method type, or a dependent method + type, an error is emitted. + +Note that `v`'s static type does not necessarily have to conform to `Selectable`, nor does it need to have `selectDynamic` and `applyDynamic` as members. It suffices that there is an implicit +conversion that can turn `v` into a `Selectable`, and the selection methods could also be available as +[extension methods](../contextual/extension-methods.md). + +## Limitations of Structural Types + +- Dependent methods cannot be called via structural call. + +- Refinements may not introduce overloads: If a refinement specifies the signature + of a method `m`, and `m` is also defined in the parent type of the refinement, then + the new signature must properly override the existing one. + +- Subtyping of structural refinements must preserve erased parameter types: Assume + we want to prove `S <: T { def m(x: A): B }`. Then, as usual, `S` must have a member method `m` that can take an argument of type `A`. Furthermore, if `m` is not a member of `T` (i.e. the refinement is structural), an additional condition applies. In this case, the member _definition_ `m` of `S` will have a parameter + with type `A'` say. The additional condition is that the erasure of `A'` and `A` is the same. Here is an example: + + ```scala + class Sink[A] { def put(x: A): Unit = {} } + val a = Sink[String]() + val b: { def put(x: String): Unit } = a // error + b.put("abc") // looks for a method with a `String` parameter + ``` + The second to last line is not well-typed, + since the erasure of the parameter type of `put` in class `Sink` is `Object`, + but the erasure of `put`'s parameter in the type of `b` is `String`. + This additional condition is necessary, since we will have to resort + to some (as yet unknown) form of reflection to call a structural member + like `put` in the type of `b` above. The condition ensures that the statically + known parameter types of the refinement correspond up to erasure to the + parameter types of the selected call target at runtime. + + Most reflection dispatch algorithms need to know exact erased parameter types. For instance, if the example above would typecheck, the call + `b.put("abc")` on the last line would look for a method `put` in the runtime type of `b` that takes a `String` parameter. But the `put` method is the one from class `Sink`, which takes an `Object` parameter. Hence the call would fail at runtime with a `NoSuchMethodException`. + + One might hope for a "more intelligent" reflexive dispatch algorithm that does not require exact parameter type matching. Unfortunately, this can always run into ambiguities, as long as overloading is a possibility. For instance, continuing the example above, we might introduce a new subclass `Sink1` of `Sink` and change the definition of `a` as follows: + + ```scala + class Sink1[A] extends Sink[A] { def put(x: "123") = ??? } + val a: Sink[String] = Sink1[String]() + ``` + + Now there are two `put` methods in the runtime type of `b` with erased parameter + types `Object` and `String`, respectively. Yet dynamic dispatch still needs to go + to the first `put` method, even though the second looks like a better match. + + For the cases where we can in fact implement reflection without knowing precise parameter types (for instance if static overloading is replaced by dynamically dispatched multi-methods), there is an escape hatch. For types that extend `scala.Selectable.WithoutPreciseParameterTypes` the signature check is omitted. Example: + + ```scala + trait MultiMethodSelectable extends Selectable.WithoutPreciseParameterTypes: + // Assume this version of `applyDynamic` can be implemented without knowing + // precise parameter types `paramTypes`: + def applyDynamic(name: String, paramTypes: Class[_]*)(args: Any*): Any = ??? + + class Sink[A] extends MultiMethodSelectable: + def put(x: A): Unit = {} + + val a = new Sink[String] + val b: MultiMethodSelectable { def put(x: String): Unit } = a // OK + ``` +## Differences with Scala 2 Structural Types + +- Scala 2 supports structural types by means of Java reflection. Unlike + Scala 3, structural calls do not rely on a mechanism such as + `Selectable`, and reflection cannot be avoided. +- In Scala 2, refinements can introduce overloads. +- In Scala 2, mutable `var`s are allowed in refinements. In Scala 3, + they are no longer allowed. +- Scala 2 does not impose the "same-erasure" restriction on subtyping of structural types. It allows some calls to fail at runtime instead. + +## Context + +For more information, see [Rethink Structural Types](https://github.com/lampepfl/dotty/issues/1886). diff --git a/docs/docs/reference/changed-features/structural-types.md b/docs/_docs/reference/changed-features/structural-types.md similarity index 78% rename from docs/docs/reference/changed-features/structural-types.md rename to docs/_docs/reference/changed-features/structural-types.md index 9a056fb2beee..cc07487feb4d 100644 --- a/docs/docs/reference/changed-features/structural-types.md +++ b/docs/_docs/reference/changed-features/structural-types.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Programmatic Structural Types" +movedTo: https://docs.scala-lang.org/scala3/reference/changed-features/structural-types.html --- ## Motivation @@ -35,12 +36,12 @@ Here's an example of a structural type `Person`: ```scala class Record(elems: (String, Any)*) extends Selectable: - private val fields = elems.toMap - def selectDynamic(name: String): Any = fields(name) + private val fields = elems.toMap + def selectDynamic(name: String): Any = fields(name) type Person = Record { val name: String; val age: Int } ``` - + The type `Person` adds a _refinement_ to its parent type `Record` that defines the two fields `name` and `age`. We say the refinement is _structural_ since `name` and `age` are not defined in the parent type. But they exist nevertheless as members of class `Person`. For instance, the following program would print "Emma is 42 years old.": @@ -58,7 +59,7 @@ help from the user. In practice, the connection between a structural type and its underlying generic representation would most likely be done by a database layer, and therefore would not be a concern of the end user. -`Record` extends the marker trait `scala.Selectable` and defines +`Record` extends the marker trait [`scala.Selectable`](https://scala-lang.org/api/3.x/scala/Selectable.html) and defines a method `selectDynamic`, which maps a field name to its value. Selecting a structural type member is done by calling this method. The `person.name` and `person.age` selections are translated by @@ -89,7 +90,7 @@ Structural types can also be accessed using [Java reflection](https://www.oracle def close(): Unit ``` -Here, we define a structural type `Closeable` that defines a `close` method. There are various classes that have `close` methods, we just list `FileInputStream` and `Channel` as two examples. It would be easiest if the two classes shared a common interface that factors out the `close` method. But such factorings are often not possible if different libraries are combined in one application. Yet, we can still have methods that work on +Here, we define a structural type `Closeable` that defines a `close` method. There are various classes that have `close` methods, we just list [`FileInputStream`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/io/FileInputStream.html#%3Cinit%3E(java.io.File)) and [`Channel`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/nio/channels/Channel.html) as two examples. It would be easiest if the two classes shared a common interface that factors out the `close` method. But such factorings are often not possible if different libraries are combined in one application. Yet, we can still have methods that work on all classes with a `close` method by using the `Closeable` type. For instance, ```scala @@ -135,29 +136,28 @@ than other classes. Here is an example: ```scala trait Vehicle extends reflect.Selectable: - val wheels: Int + val wheels: Int val i3 = new Vehicle: // i3: Vehicle { val range: Int } - val wheels = 4 - val range = 240 + val wheels = 4 + val range = 240 i3.range ``` The type of `i3` in this example is `Vehicle { val range: Int }`. Hence, `i3.range` is well-formed. Since the base class `Vehicle` does not define a `range` field or method, we need structural dispatch to access the `range` field of the anonymous class that initializes `id3`. Structural dispatch -is implemented by the base trait `reflect.Selectable` of `Vehicle`, which -defines the necessary `selectDynamic` member. +is implemented by the base trait [`reflect.Selectable`](https://scala-lang.org/api/3.x/scala/reflect/Selectable.html) of `Vehicle`, which defines the necessary `selectDynamic` member. -`Vehicle` could also extend some other subclass of `scala.Selectable` that implements `selectDynamic` and `applyDynamic` differently. But if it does not extend a `Selectable` at all, the code would no longer typecheck: +`Vehicle` could also extend some other subclass of [`scala.Selectable`](https://scala-lang.org/api/3.x/scala/Selectable.html) that implements `selectDynamic` and `applyDynamic` differently. But if it does not extend a `Selectable` at all, the code would no longer typecheck: ```scala trait Vehicle: - val wheels: Int + val wheels: Int val i3 = new Vehicle: // i3: Vehicle - val wheels = 4 - val range = 240 + val wheels = 4 + val range = 240 i3.range // error: range is not a member of `Vehicle` ``` @@ -167,11 +167,11 @@ adding any refinements. Hence, `i3` now has just type `Vehicle` and the selectio Note that in Scala 2 all local and anonymous classes could produce values with refined types. But members defined by such refinements could be selected only with the language import -`reflectiveCalls`. +[`reflectiveCalls`](https://scala-lang.org/api/3.x/scala/languageFeature$$reflectiveCalls$.html). ## Relation with `scala.Dynamic` -There are clearly some connections with `scala.Dynamic` here, since +There are clearly some connections with [`scala.Dynamic`](https://scala-lang.org/api/3.x/scala/Dynamic.html) here, since both select members programmatically. But there are also some differences. @@ -179,13 +179,13 @@ differences. is, as long as the correspondence of the structural type with the underlying value is as stated. -- `Dynamic` is just a marker trait, which gives more leeway where and +- [`Dynamic`](https://scala-lang.org/api/3.x/scala/Dynamic.html) is just a marker trait, which gives more leeway where and how to define reflective access operations. By contrast `Selectable` is a trait which declares the access operations. - Two access operations, `selectDynamic` and `applyDynamic` are shared between both approaches. In `Selectable`, `applyDynamic` also may also take - `java.lang.Class` arguments indicating the method's formal parameter types. - `Dynamic` comes with `updateDynamic`. + [`java.lang.Class`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/lang/Class.html) arguments indicating the method's formal parameter types. + [`Dynamic`](https://scala-lang.org/api/3.x/scala/Dynamic.html) comes with `updateDynamic`. [More details](structural-types-spec.md) diff --git a/docs/_docs/reference/changed-features/type-checking.md b/docs/_docs/reference/changed-features/type-checking.md new file mode 100644 index 000000000000..09a62698d4c7 --- /dev/null +++ b/docs/_docs/reference/changed-features/type-checking.md @@ -0,0 +1,7 @@ +--- +layout: doc-page +title: "Changes in Type Checking" +movedTo: https://docs.scala-lang.org/scala3/reference/changed-features/type-checking.html +--- + +*** **TO BE FILLED IN** *** diff --git a/docs/docs/reference/changed-features/type-inference.md b/docs/_docs/reference/changed-features/type-inference.md similarity index 78% rename from docs/docs/reference/changed-features/type-inference.md rename to docs/_docs/reference/changed-features/type-inference.md index b1cd806c0999..020878444914 100644 --- a/docs/docs/reference/changed-features/type-inference.md +++ b/docs/_docs/reference/changed-features/type-inference.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Changes in Type Inference" +movedTo: https://docs.scala-lang.org/scala3/reference/changed-features/type-inference.html --- For more information, see the two presentations diff --git a/docs/docs/reference/changed-features/vararg-splices.md b/docs/_docs/reference/changed-features/vararg-splices.md similarity index 75% rename from docs/docs/reference/changed-features/vararg-splices.md rename to docs/_docs/reference/changed-features/vararg-splices.md index a787edd04225..1d8b61408176 100644 --- a/docs/docs/reference/changed-features/vararg-splices.md +++ b/docs/_docs/reference/changed-features/vararg-splices.md @@ -1,16 +1,17 @@ --- layout: doc-page title: "Vararg Splices" +movedTo: https://docs.scala-lang.org/scala3/reference/changed-features/vararg-splices.html --- The syntax of vararg splices in patterns and function arguments has changed. The new syntax uses a postfix `*`, analogously to how a vararg parameter is declared. ```scala val arr = Array(0, 1, 2, 3) -val lst = List(arr*) // vararg splice argument +val lst = List(arr*) // vararg splice argument lst match - case List(0, 1, xs*) => println(xs) // binds xs to Seq(2, 3) - case List(1, _*) => // wildcard pattern + case List(0, 1, xs*) => println(xs) // binds xs to Seq(2, 3) + case List(1, _*) => // wildcard pattern ``` The old syntax for splice arguments will be phased out. @@ -18,12 +19,12 @@ The old syntax for splice arguments will be phased out. ```scala /*!*/ val lst = List(arr: _*) // syntax error lst match - case List(0, 1, xs @ _*) // ok, equivalent to `xs*` + case List(0, 1, xs @ _*) // ok, equivalent to `xs*` ``` ## Syntax -```ebnf +``` ArgumentPatterns ::= ‘(’ [Patterns] ‘)’ | ‘(’ [Patterns ‘,’] Pattern2 ‘*’ ‘)’ diff --git a/docs/docs/reference/changed-features/wildcards.md b/docs/_docs/reference/changed-features/wildcards.md similarity index 78% rename from docs/docs/reference/changed-features/wildcards.md rename to docs/_docs/reference/changed-features/wildcards.md index 45d436e2cb19..3336f43c3795 100644 --- a/docs/docs/reference/changed-features/wildcards.md +++ b/docs/_docs/reference/changed-features/wildcards.md @@ -1,6 +1,7 @@ --- layout: doc-page title: Wildcard Arguments in Types +movedTo: https://docs.scala-lang.org/scala3/reference/changed-features/wildcards.html --- The syntax of wildcard arguments in types has changed from `_` to `?`. Example: @@ -42,3 +43,8 @@ option `-Ykind-projector`: 3. In Scala 3.3, `*` is removed again, and all type parameter placeholders will be expressed with `_`. These rules make it possible to cross build between Scala 2 using the kind projector plugin and Scala 3.0 - 3.2 using the compiler option `-Ykind-projector`. + +There is also a migration path for users that want a one-time transition to syntax with `_` as a type parameter placeholder. +With option `-Ykind-projector:underscores` Scala 3 will regard `_` as a type parameter placeholder, leaving `?` as the only syntax for wildcards. + +To cross-compile with old Scala 2 sources, while using `_` a placeholder, you must use options `-Xsource:3 -P:kind-projector:underscore-placeholders` together with a recent version of kind-projector (`0.13` and higher) and most recent versions of Scala 2 (`2.13.5` and higher and `2.12.14` and higher) diff --git a/docs/docs/reference/contextual/by-name-context-parameters.md b/docs/_docs/reference/contextual/by-name-context-parameters.md similarity index 88% rename from docs/docs/reference/contextual/by-name-context-parameters.md rename to docs/_docs/reference/contextual/by-name-context-parameters.md index 549cdb4e08b7..8e8427f7457f 100644 --- a/docs/docs/reference/contextual/by-name-context-parameters.md +++ b/docs/_docs/reference/contextual/by-name-context-parameters.md @@ -1,20 +1,21 @@ --- layout: doc-page title: "By-Name Context Parameters" +movedTo: https://docs.scala-lang.org/scala3/reference/contextual/by-name-context-parameters.html --- Context parameters can be declared by-name to avoid a divergent inferred expansion. Example: ```scala trait Codec[T]: - def write(x: T): Unit + def write(x: T): Unit given intCodec: Codec[Int] = ??? given optionCodec[T](using ev: => Codec[T]): Codec[Option[T]] with - def write(xo: Option[T]) = xo match - case Some(x) => ev.write(x) - case None => + def write(xo: Option[T]) = xo match + case Some(x) => ev.write(x) + case None => val s = summon[Codec[Option[Int]]] @@ -52,7 +53,7 @@ In the example above, the definition of `s` would be expanded as follows. ```scala val s = summon[Test.Codec[Option[Int]]]( - optionCodec[Int](using intCodec) + optionCodec[Int](using intCodec) ) ``` diff --git a/docs/docs/reference/contextual/context-bounds.md b/docs/_docs/reference/contextual/context-bounds.md similarity index 84% rename from docs/docs/reference/contextual/context-bounds.md rename to docs/_docs/reference/contextual/context-bounds.md index e227ad8b0b8a..e336f00cc463 100644 --- a/docs/docs/reference/contextual/context-bounds.md +++ b/docs/_docs/reference/contextual/context-bounds.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Context Bounds" +movedTo: https://docs.scala-lang.org/scala3/reference/contextual/context-bounds.html --- A context bound is a shorthand for expressing the common pattern of a context parameter that depends on a type parameter. Using a context bound, the `maximum` function of the last section can be written like this: @@ -9,7 +10,7 @@ A context bound is a shorthand for expressing the common pattern of a context pa def maximum[T: Ord](xs: List[T]): T = xs.reduceLeft(max) ``` -A bound like `: Ord` on a type parameter `T` of a method or class indicates a context parameter `with Ord[T]`. The context parameter(s) generated from context bounds come last in the definition of the containing method or class. For instance, +A bound like `: Ord` on a type parameter `T` of a method or class indicates a context parameter `using Ord[T]`. The context parameter(s) generated from context bounds come last in the definition of the containing method or class. For instance, ```scala def f[T: C1 : C2, U: C3](x: T)(using y: U, z: V): R diff --git a/docs/docs/reference/contextual/context-functions-spec.md b/docs/_docs/reference/contextual/context-functions-spec.md similarity index 90% rename from docs/docs/reference/contextual/context-functions-spec.md rename to docs/_docs/reference/contextual/context-functions-spec.md index 981bc208bf79..3c61e0e0c6ca 100644 --- a/docs/docs/reference/contextual/context-functions-spec.md +++ b/docs/_docs/reference/contextual/context-functions-spec.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Context Functions - More Details" +movedTo: https://docs.scala-lang.org/scala3/reference/contextual/context-functions-spec.html --- ## Syntax @@ -26,7 +27,7 @@ methods with context parameters. Specifically, the `N`-ary function type ```scala package scala trait ContextFunctionN[-T1, ..., -TN, +R]: - def apply(using x1: T1, ..., xN: TN): R + def apply(using x1: T1, ..., xN: TN): R ``` Context function types erase to normal function types, so these classes are @@ -49,7 +50,7 @@ The context function literal is evaluated as the instance creation expression ```scala new scala.ContextFunctionN[T1, ..., Tn, T]: - def apply(using x1: T1, ..., xn: Tn): T = e + def apply(using x1: T1, ..., xn: Tn): T = e ``` A context parameter may also be a wildcard represented by an underscore `_`. In that case, a fresh name for the parameter is chosen arbitrarily. @@ -62,7 +63,7 @@ Context function literals `(x1: T1, ..., xn: Tn) ?=> e` are automatically created for any expression `e` whose expected type is `scala.ContextFunctionN[T1, ..., Tn, R]`, unless `e` is itself a context function literal. This is analogous to the automatic -insertion of `scala.Function0` around expressions in by-name argument position. +insertion of [`scala.Function0`](https://scala-lang.org/api/3.x/scala/Function0.html) around expressions in by-name argument position. Context function types generalize to `N > 22` in the same way that function types do, see [the corresponding documentation](../dropped-features/limit22.md). diff --git a/docs/docs/reference/contextual/context-functions.md b/docs/_docs/reference/contextual/context-functions.md similarity index 82% rename from docs/docs/reference/contextual/context-functions.md rename to docs/_docs/reference/contextual/context-functions.md index 46b7ffa539ae..c556a0cadf8f 100644 --- a/docs/docs/reference/contextual/context-functions.md +++ b/docs/_docs/reference/contextual/context-functions.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Context Functions" +movedTo: https://docs.scala-lang.org/scala3/reference/contextual/context-functions.html --- _Context functions_ are functions with (only) context parameters. @@ -54,28 +55,28 @@ instance, here is how they can support the "builder pattern", where the aim is to construct tables like this: ```scala table { - row { - cell("top left") - cell("top right") - } - row { - cell("bottom left") - cell("bottom right") - } + row { + cell("top left") + cell("top right") + } + row { + cell("bottom left") + cell("bottom right") + } } ``` The idea is to define classes for `Table` and `Row` that allow the addition of elements via `add`: ```scala class Table: - val rows = new ArrayBuffer[Row] - def add(r: Row): Unit = rows += r - override def toString = rows.mkString("Table(", ", ", ")") + val rows = new ArrayBuffer[Row] + def add(r: Row): Unit = rows += r + override def toString = rows.mkString("Table(", ", ", ")") class Row: - val cells = new ArrayBuffer[Cell] - def add(c: Cell): Unit = cells += c - override def toString = cells.mkString("Row(", ", ", ")") + val cells = new ArrayBuffer[Cell] + def add(c: Cell): Unit = cells += c + override def toString = cells.mkString("Row(", ", ", ")") case class Cell(elem: String) ``` @@ -84,9 +85,9 @@ with context function types as parameters to avoid the plumbing boilerplate that would otherwise be necessary. ```scala def table(init: Table ?=> Unit) = - given t: Table = Table() - init - t + given t: Table = Table() + init + t def row(init: Row ?=> Unit)(using t: Table) = given r: Row = Row() @@ -117,14 +118,14 @@ As a larger example, here is a way to define constructs for checking arbitrary p ```scala object PostConditions: - opaque type WrappedResult[T] = T + opaque type WrappedResult[T] = T - def result[T](using r: WrappedResult[T]): T = r + def result[T](using r: WrappedResult[T]): T = r - extension [T](x: T) - def ensuring(condition: WrappedResult[T] ?=> Boolean): T = - assert(condition(using x)) - x + extension [T](x: T) + def ensuring(condition: WrappedResult[T] ?=> Boolean): T = + assert(condition(using x)) + x end PostConditions import PostConditions.{ensuring, result} @@ -137,14 +138,13 @@ scope to pass along to the `result` method. `WrappedResult` is a fresh type, to that we do not get unwanted givens in scope (this is good practice in all cases where context parameters are involved). Since `WrappedResult` is an opaque type alias, its values need not be boxed, and since `ensuring` is added as an extension method, its argument -does not need boxing either. Hence, the implementation of `ensuring` is as about as efficient -as the best possible code one could write by hand: +does not need boxing either. Hence, the implementation of `ensuring` is close in efficiency to the best possible code one could write by hand: ```scala val s = - val result = List(1, 2, 3).sum - assert(result == 6) - result + val result = List(1, 2, 3).sum + assert(result == 6) + result ``` ### Reference diff --git a/docs/_docs/reference/contextual/contextual.md b/docs/_docs/reference/contextual/contextual.md new file mode 100644 index 000000000000..a2fe095c44c0 --- /dev/null +++ b/docs/_docs/reference/contextual/contextual.md @@ -0,0 +1,83 @@ +--- +layout: doc-page +title: "Contextual Abstractions" +movedTo: https://docs.scala-lang.org/scala3/reference/contextual.html +--- + +### Critique of the Status Quo + +Scala's implicits are its most distinguished feature. They are _the_ fundamental way to abstract over context. They represent a unified paradigm with a great variety of use cases, among them: implementing type classes, establishing context, dependency injection, expressing capabilities, computing new types and proving relationships between them. + +Following Haskell, Scala was the second popular language to have some form of implicits. Other languages have followed suit. E.g [Rust's traits](https://doc.rust-lang.org/rust-by-example/trait.html) or [Swift's protocol extensions](https://docs.swift.org/swift-book/LanguageGuide/Protocols.html#ID521). Design proposals are also on the table for Kotlin as [compile time dependency resolution](https://github.com/Kotlin/KEEP/blob/e863b25f8b3f2e9b9aaac361c6ee52be31453ee0/proposals/compile-time-dependency-resolution.md), for C# as [Shapes and Extensions](https://github.com/dotnet/csharplang/issues/164) +or for F# as [Traits](https://github.com/MattWindsor91/visualfsharp/blob/hackathon-vs/examples/fsconcepts.md). Implicits are also a common feature of theorem provers such as [Coq](https://coq.inria.fr/refman/language/extensions/implicit-arguments.html) or [Agda](https://agda.readthedocs.io/en/latest/language/implicit-arguments.html). + +Even though these designs use widely different terminology, they are all variants of the core idea of _term inference_. Given a type, the compiler synthesizes a "canonical" term that has that type. Scala embodies the idea in a purer form than most other languages: An implicit parameter directly leads to an inferred argument term that could also be written down explicitly. By contrast, type class based designs are less direct since they hide term inference behind some form of type classification and do not offer the option of writing the inferred quantities (typically, dictionaries) explicitly. + +Given that term inference is where the industry is heading, and given that Scala has it in a very pure form, how come implicits are not more popular? In fact, it's fair to say that implicits are at the same time Scala's most distinguished and most controversial feature. I believe this is due to a number of aspects that together make implicits harder to learn than necessary and also make it harder to prevent abuses. + +Particular criticisms are: + +1. Being very powerful, implicits are easily over-used and mis-used. This observation holds in almost all cases when we talk about _implicit conversions_, which, even though conceptually different, share the same syntax with other implicit definitions. For instance, regarding the two definitions + + ```scala + implicit def i1(implicit x: T): C[T] = ... + implicit def i2(x: T): C[T] = ... + ``` + + the first of these is a conditional implicit _value_, the second an implicit _conversion_. Conditional implicit values are a cornerstone for expressing type classes, whereas most applications of implicit conversions have turned out to be of dubious value. The problem is that many newcomers to the language start with defining implicit conversions since they are easy to understand and seem powerful and convenient. Scala 3 will put under a language flag both definitions and applications of "undisciplined" implicit conversions between types defined elsewhere. This is a useful step to push back against overuse of implicit conversions. But the problem remains that syntactically, conversions and values just look too similar for comfort. + +2. Another widespread abuse is over-reliance on implicit imports. This often leads to inscrutable type errors that go away with the right import incantation, leaving a feeling of frustration. Conversely, it is hard to see what implicits a program uses since implicits can hide anywhere in a long list of imports. + +3. The syntax of implicit definitions is too minimal. It consists of a single modifier, `implicit`, that can be attached to a large number of language constructs. A problem with this for newcomers is that it conveys mechanism instead of intent. For instance, a type class instance is an implicit object or val if unconditional and an implicit def with implicit parameters referring to some class if conditional. This describes precisely what the implicit definitions translate to -- just drop the `implicit` modifier, and that's it! But the cues that define intent are rather indirect and can be easily misread, as demonstrated by the definitions of `i1` and `i2` above. + +4. The syntax of implicit parameters also has shortcomings. While implicit _parameters_ are designated specifically, arguments are not. Passing an argument to an implicit parameter looks like a regular application `f(arg)`. This is problematic because it means there can be confusion regarding what parameter gets instantiated in a call. For instance, in + + ```scala + def currentMap(implicit ctx: Context): Map[String, Int] + ``` + + one cannot write `currentMap("abc")` since the string `"abc"` is taken as explicit argument to the implicit `ctx` parameter. One has to write `currentMap.apply("abc")` instead, which is awkward and irregular. For the same reason, a method definition can only have one implicit parameter section and it must always come last. This restriction not only reduces orthogonality, but also prevents some useful program constructs, such as a method with a regular parameter whose type depends on an implicit value. Finally, it's also a bit annoying that implicit parameters must have a name, even though in many cases that name is never referenced. + +5. Implicits pose challenges for tooling. The set of available implicits depends on context, so command completion has to take context into account. This is feasible in an IDE but tools like [Scaladoc](https://docs.scala-lang.org/overviews/scaladoc/overview.html) that are based on static web pages can only provide an approximation. Another problem is that failed implicit searches often give very unspecific error messages, in particular if some deeply recursive implicit search has failed. Note that the Scala 3 compiler has already made a lot of progress in the error diagnostics area. If a recursive search fails some levels down, it shows what was constructed and what is missing. Also, it suggests imports that can bring missing implicits in scope. + +None of the shortcomings is fatal, after all implicits are very widely used, and many libraries and applications rely on them. But together, they make code using implicits a lot more cumbersome and less clear than it could be. + +Historically, many of these shortcomings come from the way implicits were gradually "discovered" in Scala. Scala originally had only implicit conversions with the intended use case of "extending" a class or trait after it was defined, i.e. what is expressed by implicit classes in later versions of Scala. Implicit parameters and instance definitions came later in 2006 and we picked similar syntax since it seemed convenient. For the same reason, no effort was made to distinguish implicit imports or arguments from normal ones. + +Existing Scala programmers by and large have gotten used to the status quo and see little need for change. But for newcomers this status quo presents a big hurdle. I believe if we want to overcome that hurdle, we should take a step back and allow ourselves to consider a radically new design. + +### The New Design + +The following pages introduce a redesign of contextual abstractions in Scala. They introduce four fundamental changes: + +1. [Given Instances](./givens.md) are a new way to define basic terms that can be synthesized. They replace implicit definitions. The core principle of the proposal is that, rather than mixing the `implicit` modifier with a large number of features, we have a single way to define terms that can be synthesized for types. + +2. [Using Clauses](./using-clauses.md) are a new syntax for implicit _parameters_ and their _arguments_. It unambiguously aligns parameters and arguments, solving a number of language warts. It also allows us to have several `using` clauses in a definition. + +3. ["Given" Imports](./given-imports.md) are a new class of import selectors that specifically import + givens and nothing else. + +4. [Implicit Conversions](./conversions.md) are now expressed as given instances of a standard `Conversion` class. All other forms of implicit conversions will be phased out. + +This section also contains pages describing other language features that are related to context abstraction. These are: + +- [Context Bounds](./context-bounds.md), which carry over unchanged. +- [Extension Methods](./extension-methods.md) replace implicit classes in a way that integrates better with type classes. +- [Implementing Type Classes](./type-classes.md) demonstrates how some common type classes can be implemented using the new constructs. +- [Type Class Derivation](./derivation.md) introduces constructs to automatically derive type class instances for ADTs. +- [Multiversal Equality](./multiversal-equality.md) introduces a special type class to support type safe equality. +- [Context Functions](./context-functions.md) provide a way to abstract over context parameters. +- [By-Name Context Parameters](./by-name-context-parameters.md) are an essential tool to define recursive synthesized values without looping. +- [Relationship with Scala 2 Implicits](./relationship-implicits.md) discusses the relationship between old-style implicits and new-style givens and how to migrate from one to the other. + +Overall, the new design achieves a better separation of term inference from the rest of the language: There is a single way to define givens instead of a multitude of forms all taking an `implicit` modifier. There is a single way to introduce implicit parameters and arguments instead of conflating implicit with normal arguments. There is a separate way to import givens that does not allow them to hide in a sea of normal imports. And there is a single way to define an implicit conversion which is clearly marked as such and does not require special syntax. + +This design thus avoids feature interactions and makes the language more consistent and orthogonal. It will make implicits easier to learn and harder to abuse. It will greatly improve the clarity of the 95% of Scala programs that use implicits. It has thus the potential to fulfil the promise of term inference in a principled way that is also accessible and friendly. + +Could we achieve the same goals by tweaking existing implicits? After having tried for a long time, I believe now that this is impossible. + +- First, some of the problems are clearly syntactic and require different syntax to solve them. +- Second, there is the problem how to migrate. We cannot change the rules in mid-flight. At some stage of language evolution we need to accommodate both the new and the old rules. With a syntax change, this is easy: Introduce the new syntax with new rules, support the old syntax for a while to facilitate cross compilation, deprecate and phase out the old syntax at some later time. Keeping the same syntax does not offer this path, and in fact does not seem to offer any viable path for evolution +- Third, even if we would somehow succeed with migration, we still have the problem + how to teach this. We cannot make existing tutorials go away. Almost all existing tutorials start with implicit conversions, which will go away; they use normal imports, which will go away, and they explain calls to methods with implicit parameters by expanding them to plain applications, which will also go away. This means that we'd have + to add modifications and qualifications to all existing literature and courseware, likely causing more confusion with beginners instead of less. By contrast, with a new syntax there is a clear criterion: Any book or courseware that mentions `implicit` is outdated and should be updated. diff --git a/docs/_docs/reference/contextual/conversions.md b/docs/_docs/reference/contextual/conversions.md new file mode 100644 index 000000000000..9928719663e2 --- /dev/null +++ b/docs/_docs/reference/contextual/conversions.md @@ -0,0 +1,76 @@ +--- +layout: doc-page +title: "Implicit Conversions" +movedTo: https://docs.scala-lang.org/scala3/reference/contextual/conversions.html +--- + +Implicit conversions are defined by given instances of the `scala.Conversion` class. +This class is defined in package `scala` as follows: +```scala +abstract class Conversion[-T, +U] extends (T => U): + def apply (x: T): U +``` +For example, here is an implicit conversion from `String` to `Token`: +```scala +given Conversion[String, Token] with + def apply(str: String): Token = new KeyWord(str) +``` +Using an alias this can be expressed more concisely as: +```scala +given Conversion[String, Token] = new KeyWord(_) +``` +An implicit conversion is applied automatically by the compiler in three situations: + +1. If an expression `e` has type `T`, and `T` does not conform to the expression's expected type `S`. +2. In a selection `e.m` with `e` of type `T`, but `T` defines no member `m`. +3. In an application `e.m(args)` with `e` of type `T`, if `T` does define + some member(s) named `m`, but none of these members can be applied to the arguments `args`. + +In the first case, the compiler looks for a given `scala.Conversion` instance that maps +an argument of type `T` to type `S`. In the second and third +case, it looks for a given `scala.Conversion` instance that maps an argument of type `T` +to a type that defines a member `m` which can be applied to `args` if present. +If such an instance `C` is found, the expression `e` is replaced by `C.apply(e)`. + +## Examples + +1. The `Predef` package contains "auto-boxing" conversions that map +primitive number types to subclasses of `java.lang.Number`. For instance, the +conversion from `Int` to `java.lang.Integer` can be defined as follows: + ```scala + given int2Integer: Conversion[Int, java.lang.Integer] = + java.lang.Integer.valueOf(_) + ``` + +2. The "magnet" pattern is sometimes used to express many variants of a method. Instead of defining overloaded versions of the method, one can also let the method take one or more arguments of specially defined "magnet" types, into which various argument types can be converted. Example: + ```scala + object Completions: + + // The argument "magnet" type + enum CompletionArg: + case Error(s: String) + case Response(f: Future[HttpResponse]) + case Status(code: Future[StatusCode]) + + object CompletionArg: + + // conversions defining the possible arguments to pass to `complete` + // these always come with CompletionArg + // They can be invoked explicitly, e.g. + // + // CompletionArg.fromStatusCode(statusCode) + + given fromString : Conversion[String, CompletionArg] = Error(_) + given fromFuture : Conversion[Future[HttpResponse], CompletionArg] = Response(_) + given fromStatusCode: Conversion[Future[StatusCode], CompletionArg] = Status(_) + end CompletionArg + import CompletionArg.* + + def complete[T](arg: CompletionArg) = arg match + case Error(s) => ... + case Response(f) => ... + case Status(code) => ... + + end Completions + ``` +This setup is more complicated than simple overloading of `complete`, but it can still be useful if normal overloading is not available (as in the case above, since we cannot have two overloaded methods that take `Future[...]` arguments), or if normal overloading would lead to a combinatorial explosion of variants. diff --git a/docs/docs/reference/contextual/derivation-macro.md b/docs/_docs/reference/contextual/derivation-macro.md similarity index 85% rename from docs/docs/reference/contextual/derivation-macro.md rename to docs/_docs/reference/contextual/derivation-macro.md index 22b4d0663c7e..5ff0007268dd 100644 --- a/docs/docs/reference/contextual/derivation-macro.md +++ b/docs/_docs/reference/contextual/derivation-macro.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "How to write a type class `derived` method using macros" +movedTo: https://docs.scala-lang.org/scala3/reference/contextual/derivation-macro.html --- In the main [derivation](./derivation.md) documentation page, we explained the @@ -17,7 +18,7 @@ As in the original code, the type class definition is the same: ```scala trait Eq[T]: - def eqv(x: T, y: T): Boolean + def eqv(x: T, y: T): Boolean ``` we need to implement a method `Eq.derived` on the companion object of `Eq` that @@ -41,25 +42,25 @@ from the signature. The body of the `derived` method is shown below: ```scala given derived[T: Type](using Quotes): Expr[Eq[T]] = - import quotes.reflect.* + import quotes.reflect.* - val ev: Expr[Mirror.Of[T]] = Expr.summon[Mirror.Of[T]].get + val ev: Expr[Mirror.Of[T]] = Expr.summon[Mirror.Of[T]].get - ev match - case '{ $m: Mirror.ProductOf[T] { type MirroredElemTypes = elementTypes }} => - val elemInstances = summonAll[elementTypes] - val eqProductBody: (Expr[T], Expr[T]) => Expr[Boolean] = (x, y) => - elemInstances.zipWithIndex.foldLeft(Expr(true: Boolean)) { - case (acc, (elem, index)) => - val e1 = '{$x.asInstanceOf[Product].productElement(${Expr(index)})} - val e2 = '{$y.asInstanceOf[Product].productElement(${Expr(index)})} - '{ $acc && $elem.asInstanceOf[Eq[Any]].eqv($e1, $e2) } - } + ev match + case '{ $m: Mirror.ProductOf[T] { type MirroredElemTypes = elementTypes }} => + val elemInstances = summonAll[elementTypes] + val eqProductBody: (Expr[T], Expr[T]) => Expr[Boolean] = (x, y) => + elemInstances.zipWithIndex.foldLeft(Expr(true: Boolean)) { + case (acc, (elem, index)) => + val e1 = '{$x.asInstanceOf[Product].productElement(${Expr(index)})} + val e2 = '{$y.asInstanceOf[Product].productElement(${Expr(index)})} + '{ $acc && $elem.asInstanceOf[Eq[Any]].eqv($e1, $e2) } + } - '{ eqProduct((x: T, y: T) => ${eqProductBody('x, 'y)}) } + '{ eqProduct((x: T, y: T) => ${eqProductBody('x, 'y)}) } - // case for Mirror.ProductOf[T] - // ... + // case for Mirror.ProductOf[T] + // ... ``` Note, that in the `inline` case we can merely write @@ -97,7 +98,7 @@ One additional difference with the body of `derived` here as opposed to the one with `inline` is that with macros we need to synthesize the body of the code during the macro-expansion time. That is the rationale behind the `eqProductBody` function. Assuming that we calculate the equality of two `Person`s defined with a case -class that holds a name of type [`String`](https://dotty.epfl.ch/api/scala/Predef$.html#String) +class that holds a name of type [`String`](https://scala-lang.org/api/3.x/scala/Predef$.html#String-0) and an age of type `Int`, the equality check we want to generate is the following: ```scala @@ -108,7 +109,7 @@ and an age of type `Int`, the equality check we want to generate is the followin ## Calling the derived method inside the macro -Following the rules in [Macros](../metaprogramming/toc.md) we create two methods. +Following the rules in [Macros](../metaprogramming/metaprogramming.md) we create two methods. One that hosts the top-level splice `eqv` and one that is the implementation. Alternatively and what is shown below is that we can call the `eqv` method directly. The `eqGen` can trigger the derivation. diff --git a/docs/docs/reference/contextual/derivation.md b/docs/_docs/reference/contextual/derivation.md similarity index 75% rename from docs/docs/reference/contextual/derivation.md rename to docs/_docs/reference/contextual/derivation.md index 9a89fad25324..843116217d56 100644 --- a/docs/docs/reference/contextual/derivation.md +++ b/docs/_docs/reference/contextual/derivation.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Type Class Derivation" +movedTo: https://docs.scala-lang.org/scala3/reference/contextual/derivation.html --- Type class derivation is a way to automatically generate given instances for type classes which satisfy some simple @@ -10,8 +11,8 @@ on. Common examples are `Eq`, `Ordering`, or `Show`. For example, given the foll ```scala enum Tree[T] derives Eq, Ordering, Show: - case Branch(left: Tree[T], right: Tree[T]) - case Leaf(elem: T) + case Branch(left: Tree[T], right: Tree[T]) + case Leaf(elem: T) ``` The `derives` clause generates the following given instances for the `Eq`, `Ordering` and `Show` type classes in the @@ -42,37 +43,37 @@ derivation support. ```scala sealed trait Mirror: - /** the type being mirrored */ - type MirroredType + /** the type being mirrored */ + type MirroredType - /** the type of the elements of the mirrored type */ - type MirroredElemTypes + /** the type of the elements of the mirrored type */ + type MirroredElemTypes - /** The mirrored *-type */ - type MirroredMonoType + /** The mirrored *-type */ + type MirroredMonoType - /** The name of the type */ - type MirroredLabel <: String + /** The name of the type */ + type MirroredLabel <: String - /** The names of the elements of the type */ - type MirroredElemLabels <: Tuple + /** The names of the elements of the type */ + type MirroredElemLabels <: Tuple object Mirror: - /** The Mirror for a product type */ - trait Product extends Mirror: + /** The Mirror for a product type */ + trait Product extends Mirror: - /** Create a new instance of type `T` with elements - * taken from product `p`. - */ - def fromProduct(p: scala.Product): MirroredMonoType + /** Create a new instance of type `T` with elements + * taken from product `p`. + */ + def fromProduct(p: scala.Product): MirroredMonoType - trait Sum extends Mirror: + trait Sum extends Mirror: - /** The ordinal number of the case class of `x`. - * For enums, `ordinal(x) == x.ordinal` - */ - def ordinal(x: MirroredMonoType): Int + /** The ordinal number of the case class of `x`. + * For enums, `ordinal(x) == x.ordinal` + */ + def ordinal(x: MirroredMonoType): Int end Mirror ``` @@ -85,37 +86,37 @@ For the `Tree` ADT from above the following `Mirror` instances will be automatic ```scala // Mirror for Tree new Mirror.Sum: - type MirroredType = Tree - type MirroredElemTypes[T] = (Branch[T], Leaf[T]) - type MirroredMonoType = Tree[_] - type MirroredLabels = "Tree" - type MirroredElemLabels = ("Branch", "Leaf") + type MirroredType = Tree + type MirroredElemTypes[T] = (Branch[T], Leaf[T]) + type MirroredMonoType = Tree[_] + type MirroredLabel = "Tree" + type MirroredElemLabels = ("Branch", "Leaf") - def ordinal(x: MirroredMonoType): Int = x match - case _: Branch[_] => 0 - case _: Leaf[_] => 1 + def ordinal(x: MirroredMonoType): Int = x match + case _: Branch[_] => 0 + case _: Leaf[_] => 1 // Mirror for Branch new Mirror.Product: - type MirroredType = Branch - type MirroredElemTypes[T] = (Tree[T], Tree[T]) - type MirroredMonoType = Branch[_] - type MirroredLabels = "Branch" - type MirroredElemLabels = ("left", "right") + type MirroredType = Branch + type MirroredElemTypes[T] = (Tree[T], Tree[T]) + type MirroredMonoType = Branch[_] + type MirroredLabel = "Branch" + type MirroredElemLabels = ("left", "right") - def fromProduct(p: Product): MirroredMonoType = - new Branch(...) + def fromProduct(p: Product): MirroredMonoType = + new Branch(...) // Mirror for Leaf new Mirror.Product: - type MirroredType = Leaf - type MirroredElemTypes[T] = Tuple1[T] - type MirroredMonoType = Leaf[_] - type MirroredLabels = "Leaf" - type MirroredElemLabels = Tuple1["elem"] - - def fromProduct(p: Product): MirroredMonoType = - new Leaf(...) + type MirroredType = Leaf + type MirroredElemTypes[T] = Tuple1[T] + type MirroredMonoType = Leaf[_] + type MirroredLabel = "Leaf" + type MirroredElemLabels = Tuple1["elem"] + + def fromProduct(p: Product): MirroredMonoType = + new Leaf(...) ``` Note the following properties of `Mirror` types, @@ -142,6 +143,8 @@ signature and implementation of a `derived` method for a type class `TC[_]` are following form, ```scala +import scala.deriving.Mirror + def derived[T](using Mirror.Of[T]): TC[T] = ... ``` @@ -169,18 +172,20 @@ type-level constructs in Scala 3: inline methods, inline matches, and implicit s ```scala trait Eq[T]: - def eqv(x: T, y: T): Boolean + def eqv(x: T, y: T): Boolean ``` we need to implement a method `Eq.derived` on the companion object of `Eq` that produces a given instance for `Eq[T]` given a `Mirror[T]`. Here is a possible implementation, ```scala +import scala.deriving.Mirror + inline given derived[T](using m: Mirror.Of[T]): Eq[T] = - val elemInstances = summonAll[m.MirroredElemTypes] // (1) - inline m match // (2) - case s: Mirror.SumOf[T] => eqSum(s, elemInstances) - case p: Mirror.ProductOf[T] => eqProduct(p, elemInstances) + val elemInstances = summonAll[m.MirroredElemTypes] // (1) + inline m match // (2) + case s: Mirror.SumOf[T] => eqSum(s, elemInstances) + case p: Mirror.ProductOf[T] => eqProduct(p, elemInstances) ``` Note that `derived` is defined as an `inline` given. This means that the method will be expanded at @@ -194,9 +199,9 @@ implementation of `summonAll` is `inline` and uses Scala 3's `summonInline` cons ```scala inline def summonAll[T <: Tuple]: List[Eq[_]] = - inline erasedValue[T] match - case _: EmptyTuple => Nil - case _: (t *: ts) => summonInline[Eq[t]] :: summonAll[ts] + inline erasedValue[T] match + case _: EmptyTuple => Nil + case _: (t *: ts) => summonInline[Eq[t]] :: summonAll[ts] ``` with the instances for children in hand the `derived` method uses an `inline match` to dispatch to methods which can @@ -209,23 +214,27 @@ values are of the same subtype of the ADT (3) and then, if they are, to further instance for the appropriate ADT subtype using the auxiliary method `check` (4). ```scala +import scala.deriving.Mirror + def eqSum[T](s: Mirror.SumOf[T], elems: List[Eq[_]]): Eq[T] = - new Eq[T]: - def eqv(x: T, y: T): Boolean = - val ordx = s.ordinal(x) // (3) - (s.ordinal(y) == ordx) && check(elems(ordx))(x, y) // (4) + new Eq[T]: + def eqv(x: T, y: T): Boolean = + val ordx = s.ordinal(x) // (3) + (s.ordinal(y) == ordx) && check(elems(ordx))(x, y) // (4) ``` In the product case, `eqProduct` we test the runtime values of the arguments to `eqv` for equality as products based on the `Eq` instances for the fields of the data type (5), ```scala +import scala.deriving.Mirror + def eqProduct[T](p: Mirror.ProductOf[T], elems: List[Eq[_]]): Eq[T] = - new Eq[T]: - def eqv(x: T, y: T): Boolean = - iterator(x).zip(iterator(y)).zip(elems.iterator).forall { // (5) - case ((x, y), elem) => check(elem)(x, y) - } + new Eq[T]: + def eqv(x: T, y: T): Boolean = + iterator(x).zip(iterator(y)).zip(elems.iterator).forall { // (5) + case ((x, y), elem) => check(elem)(x, y) + } ``` Pulling this all together we have the following complete implementation, @@ -235,40 +244,40 @@ import scala.deriving.* import scala.compiletime.{erasedValue, summonInline} inline def summonAll[T <: Tuple]: List[Eq[_]] = - inline erasedValue[T] match - case _: EmptyTuple => Nil - case _: (t *: ts) => summonInline[Eq[t]] :: summonAll[ts] + inline erasedValue[T] match + case _: EmptyTuple => Nil + case _: (t *: ts) => summonInline[Eq[t]] :: summonAll[ts] trait Eq[T]: - def eqv(x: T, y: T): Boolean + def eqv(x: T, y: T): Boolean object Eq: - given Eq[Int] with - def eqv(x: Int, y: Int) = x == y - - def check(elem: Eq[_])(x: Any, y: Any): Boolean = - elem.asInstanceOf[Eq[Any]].eqv(x, y) - - def iterator[T](p: T) = p.asInstanceOf[Product].productIterator - - def eqSum[T](s: Mirror.SumOf[T], elems: => List[Eq[_]]): Eq[T] = - new Eq[T]: - def eqv(x: T, y: T): Boolean = - val ordx = s.ordinal(x) - (s.ordinal(y) == ordx) && check(elems(ordx))(x, y) - - def eqProduct[T](p: Mirror.ProductOf[T], elems: => List[Eq[_]]): Eq[T] = - new Eq[T]: - def eqv(x: T, y: T): Boolean = - iterator(x).zip(iterator(y)).zip(elems.iterator).forall { - case ((x, y), elem) => check(elem)(x, y) - } - - inline given derived[T](using m: Mirror.Of[T]): Eq[T] = - lazy val elemInstances = summonAll[m.MirroredElemTypes] - inline m match - case s: Mirror.SumOf[T] => eqSum(s, elemInstances) - case p: Mirror.ProductOf[T] => eqProduct(p, elemInstances) + given Eq[Int] with + def eqv(x: Int, y: Int) = x == y + + def check(elem: Eq[_])(x: Any, y: Any): Boolean = + elem.asInstanceOf[Eq[Any]].eqv(x, y) + + def iterator[T](p: T) = p.asInstanceOf[Product].productIterator + + def eqSum[T](s: Mirror.SumOf[T], elems: => List[Eq[_]]): Eq[T] = + new Eq[T]: + def eqv(x: T, y: T): Boolean = + val ordx = s.ordinal(x) + (s.ordinal(y) == ordx) && check(elems(ordx))(x, y) + + def eqProduct[T](p: Mirror.ProductOf[T], elems: => List[Eq[_]]): Eq[T] = + new Eq[T]: + def eqv(x: T, y: T): Boolean = + iterator(x).zip(iterator(y)).zip(elems.iterator).forall { + case ((x, y), elem) => check(elem)(x, y) + } + + inline given derived[T](using m: Mirror.Of[T]): Eq[T] = + lazy val elemInstances = summonAll[m.MirroredElemTypes] + inline m match + case s: Mirror.SumOf[T] => eqSum(s, elemInstances) + case p: Mirror.ProductOf[T] => eqProduct(p, elemInstances) end Eq ``` @@ -276,15 +285,15 @@ we can test this relative to a simple ADT like so, ```scala enum Opt[+T] derives Eq: - case Sm(t: T) - case Nn + case Sm(t: T) + case Nn @main def test(): Unit = - import Opt.* - val eqoi = summon[Eq[Opt[Int]]] - assert(eqoi.eqv(Sm(23), Sm(23))) - assert(!eqoi.eqv(Sm(23), Sm(13))) - assert(!eqoi.eqv(Sm(23), Nn)) + import Opt.* + val eqoi = summon[Eq[Opt[Int]]] + assert(eqoi.eqv(Sm(23), Sm(23))) + assert(!eqoi.eqv(Sm(23), Sm(13))) + assert(!eqoi.eqv(Sm(23), Nn)) ``` In this case the code that is generated by the inline expansion for the derived `Eq` instance for `Opt` looks like the @@ -292,13 +301,13 @@ following, after a little polishing, ```scala given derived$Eq[T](using eqT: Eq[T]): Eq[Opt[T]] = - eqSum( - summon[Mirror[Opt[T]]], - List( - eqProduct(summon[Mirror[Sm[T]]], List(summon[Eq[T]])), - eqProduct(summon[Mirror[Nn.type]], Nil) - ) - ) + eqSum( + summon[Mirror[Opt[T]]], + List( + eqProduct(summon[Mirror[Sm[T]]], List(summon[Eq[T]])), + eqProduct(summon[Mirror[Nn.type]], Nil) + ) + ) ``` Alternative approaches can be taken to the way that `derived` methods can be defined. For example, more aggressively @@ -310,18 +319,18 @@ As a third example, using a higher level library such as Shapeless the type clas ```scala given eqSum[A](using inst: => K0.CoproductInstances[Eq, A]): Eq[A] with - def eqv(x: A, y: A): Boolean = inst.fold2(x, y)(false)( - [t] => (eqt: Eq[t], t0: t, t1: t) => eqt.eqv(t0, t1) - ) + def eqv(x: A, y: A): Boolean = inst.fold2(x, y)(false)( + [t] => (eqt: Eq[t], t0: t, t1: t) => eqt.eqv(t0, t1) + ) given eqProduct[A](using inst: K0.ProductInstances[Eq, A]): Eq[A] with - def eqv(x: A, y: A): Boolean = inst.foldLeft2(x, y)(true: Boolean)( - [t] => (acc: Boolean, eqt: Eq[t], t0: t, t1: t) => - Complete(!eqt.eqv(t0, t1))(false)(true) - ) + def eqv(x: A, y: A): Boolean = inst.foldLeft2(x, y)(true: Boolean)( + [t] => (acc: Boolean, eqt: Eq[t], t0: t, t1: t) => + Complete(!eqt.eqv(t0, t1))(false)(true) + ) -inline def derived[A](using gen: K0.Generic[A]) as Eq[A] = - gen.derive(eqSum, eqProduct) +inline def derived[A](using gen: K0.Generic[A]): Eq[A] = + gen.derive(eqSum, eqProduct) ``` The framework described here enables all three of these approaches without mandating any of them. @@ -345,7 +354,7 @@ hand side of this definition in the same way as an instance defined in ADT compa ### Syntax -```ebnf +``` Template ::= InheritClauses [TemplateBody] EnumDef ::= id ClassConstr InheritClauses EnumBody InheritClauses ::= [‘extends’ ConstrApps] [‘derives’ QualId {‘,’ QualId}] diff --git a/docs/docs/reference/contextual/extension-methods.md b/docs/_docs/reference/contextual/extension-methods.md similarity index 76% rename from docs/docs/reference/contextual/extension-methods.md rename to docs/_docs/reference/contextual/extension-methods.md index 0052ebb06dc9..c2a12081cf99 100644 --- a/docs/docs/reference/contextual/extension-methods.md +++ b/docs/_docs/reference/contextual/extension-methods.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Extension Methods" +movedTo: https://docs.scala-lang.org/scala3/reference/contextual/extension-methods.html --- Extension methods allow one to add methods to a type after the type is defined. Example: @@ -9,7 +10,7 @@ Extension methods allow one to add methods to a type after the type is defined. case class Circle(x: Double, y: Double, radius: Double) extension (c: Circle) - def circumference: Double = c.radius * math.Pi * 2 + def circumference: Double = c.radius * math.Pi * 2 ``` Like regular methods, extension methods can be invoked with infix `.`: @@ -24,7 +25,7 @@ circle.circumference An extension method translates to a specially labelled method that takes the leading parameter section as its first argument list. The label, expressed as `` here, is compiler-internal. So, the definition of `circumference` above translates to the following method, and can also be invoked as such: -```scala +``` def circumference(c: Circle): Double = c.radius * math.Pi * 2 assert(circle.circumference == circumference(circle)) @@ -36,11 +37,11 @@ The extension method syntax can also be used to define operators. Examples: ```scala extension (x: String) - def < (y: String): Boolean = ... + def < (y: String): Boolean = ... extension (x: Elem) - def +: (xs: Seq[Elem]): Seq[Elem] = ... + def +: (xs: Seq[Elem]): Seq[Elem] = ... extension (x: Number) - infix def min (y: Number): Number = ... + infix def min (y: Number): Number = ... "ab" < "c" 1 +: List(2, 3) @@ -49,7 +50,7 @@ x min 3 The three definitions above translate to -```scala +``` def < (x: String)(y: String): Boolean = ... def +: (xs: Seq[Elem])(x: Elem): Seq[Elem] = ... infix def min(x: Number)(y: Number): Number = ... @@ -68,10 +69,10 @@ It is also possible to extend generic types by adding type parameters to an exte ```scala extension [T](xs: List[T]) - def second = xs.tail.head + def second = xs.tail.head extension [T: Numeric](x: T) - def + (y: T): T = summon[Numeric[T]].plus(x, y) + def + (y: T): T = summon[Numeric[T]].plus(x, y) ``` Type parameters on extensions can also be combined with type parameters on the methods @@ -79,7 +80,7 @@ themselves: ```scala extension [T](xs: List[T]) - def sumBy[U: Numeric](f: T => U): U = ... + def sumBy[U: Numeric](f: T => U): U = ... ``` Type arguments matching method type parameters are passed as usual: @@ -105,7 +106,7 @@ Extensions can also take using clauses. For instance, the `+` extension above co ```scala extension [T](x: T)(using n: Numeric[T]) - def + (y: T): T = n.plus(x, y) + def + (y: T): T = n.plus(x, y) ``` ### Collective Extensions @@ -118,11 +119,11 @@ Example: ```scala extension (ss: Seq[String]) - def longestStrings: Seq[String] = - val maxLength = ss.map(_.length).max - ss.filter(_.length == maxLength) + def longestStrings: Seq[String] = + val maxLength = ss.map(_.length).max + ss.filter(_.length == maxLength) - def longestString: String = longestStrings.head + def longestString: String = longestStrings.head ``` The same can be written with braces as follows (note that indented regions can still be used inside braces): @@ -130,10 +131,10 @@ The same can be written with braces as follows (note that indented regions can s ```scala extension (ss: Seq[String]) { - def longestStrings: Seq[String] = { - val maxLength = ss.map(_.length).max - ss.filter(_.length == maxLength) - } + def longestStrings: Seq[String] = { + val maxLength = ss.map(_.length).max + ss.filter(_.length == maxLength) + } def longestString: String = longestStrings.head } @@ -147,22 +148,22 @@ where each method is defined separately. For instance, the first extension above ```scala extension (ss: Seq[String]) - def longestStrings: Seq[String] = - val maxLength = ss.map(_.length).max - ss.filter(_.length == maxLength) + def longestStrings: Seq[String] = + val maxLength = ss.map(_.length).max + ss.filter(_.length == maxLength) extension (ss: Seq[String]) - def longestString: String = ss.longestStrings.head + def longestString: String = ss.longestStrings.head ``` Collective extensions also can take type parameters and have using clauses. Example: ```scala extension [T](xs: List[T])(using Ordering[T]) - def smallest(n: Int): List[T] = xs.sorted.take(n) - def smallestIndices(n: Int): List[Int] = - val limit = smallest(n).max - xs.zipWithIndex.collect { case (x, i) if x <= limit => i } + def smallest(n: Int): List[T] = xs.sorted.take(n) + def smallestIndices(n: Int): List[Int] = + val limit = smallest(n).max + xs.zipWithIndex.collect { case (x, i) if x <= limit => i } ``` ### Translation of Calls to Extension Methods @@ -183,33 +184,33 @@ Here is an example for the first rule: ```scala trait IntOps: - extension (i: Int) def isZero: Boolean = i == 0 + extension (i: Int) def isZero: Boolean = i == 0 - extension (i: Int) def safeMod(x: Int): Option[Int] = - // extension method defined in same scope IntOps - if x.isZero then None - else Some(i % x) + extension (i: Int) def safeMod(x: Int): Option[Int] = + // extension method defined in same scope IntOps + if x.isZero then None + else Some(i % x) object IntOpsEx extends IntOps: - extension (i: Int) def safeDiv(x: Int): Option[Int] = - // extension method brought into scope via inheritance from IntOps - if x.isZero then None - else Some(i / x) + extension (i: Int) def safeDiv(x: Int): Option[Int] = + // extension method brought into scope via inheritance from IntOps + if x.isZero then None + else Some(i / x) trait SafeDiv: - import IntOpsEx.* // brings safeDiv and safeMod into scope + import IntOpsEx.* // brings safeDiv and safeMod into scope - extension (i: Int) def divide(d: Int): Option[(Int, Int)] = - // extension methods imported and thus in scope - (i.safeDiv(d), i.safeMod(d)) match - case (Some(d), Some(r)) => Some((d, r)) - case _ => None + extension (i: Int) def divide(d: Int): Option[(Int, Int)] = + // extension methods imported and thus in scope + (i.safeDiv(d), i.safeMod(d)) match + case (Some(d), Some(r)) => Some((d, r)) + case _ => None ``` By the second rule, an extension method can be made available by defining a given instance containing it, like this: ```scala -given ops1: IntOps with {} // brings safeMod into scope +given ops1: IntOps() // brings safeMod into scope 1.safeMod(2) ``` @@ -218,15 +219,15 @@ By the third and fourth rule, an extension method is available if it is in the i ```scala class List[T]: - ... + ... object List: - ... - extension [T](xs: List[List[T]]) - def flatten: List[T] = xs.foldLeft(List.empty[T])(_ ++ _) + ... + extension [T](xs: List[List[T]]) + def flatten: List[T] = xs.foldLeft(List.empty[T])(_ ++ _) - given [T: Ordering]: Ordering[List[T]] with - extension (xs: List[T]) - def < (ys: List[T]): Boolean = ... + given [T: Ordering]: Ordering[List[T]] with + extension (xs: List[T]) + def < (ys: List[T]): Boolean = ... end List // extension method available since it is in the implicit scope @@ -258,25 +259,25 @@ An extension method can also be referenced using a simple identifier without a p ```scala extension (x: T) - def f ... = ... g ... - def g ... + def f ... = ... g ... + def g ... ``` the identifier is rewritten to `x.g`. This is also the case if `f` and `g` are the same method. Example: ```scala extension (s: String) - def position(ch: Char, n: Int): Int = - if n < s.length && s(n) != ch then position(ch, n + 1) - else n + def position(ch: Char, n: Int): Int = + if n < s.length && s(n) != ch then position(ch, n + 1) + else n ``` The recursive call `position(ch, n + 1)` expands to `s.position(ch, n + 1)` in this case. The whole extension method rewrites to ```scala def position(s: String)(ch: Char, n: Int): Int = - if n < s.length && s(n) != ch then position(s)(ch, n + 1) - else n + if n < s.length && s(n) != ch then position(s)(ch, n + 1) + else n ``` ### Syntax @@ -284,12 +285,12 @@ def position(s: String)(ch: Char, n: Int): Int = Here are the syntax changes for extension methods and collective extensions relative to the [current syntax](../syntax.md). -```ebnf +``` BlockStat ::= ... | Extension TemplateStat ::= ... | Extension TopStat ::= ... | Extension -Extension ::= ‘extension’ [DefTypeParamClause] ‘(’ DefParam ‘)’ - {UsingParamClause} ExtMethods +Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} + ‘(’ DefParam ‘)’ {UsingParamClause} ExtMethods ExtMethods ::= ExtMethod | [nl] <<< ExtMethod {semi ExtMethod} >>> ExtMethod ::= {Annotation [nl]} {Modifier} ‘def’ DefDef ``` diff --git a/docs/docs/reference/contextual/given-imports.md b/docs/_docs/reference/contextual/given-imports.md similarity index 86% rename from docs/docs/reference/contextual/given-imports.md rename to docs/_docs/reference/contextual/given-imports.md index 0ffbb44d643d..3e3bc3d275cc 100644 --- a/docs/docs/reference/contextual/given-imports.md +++ b/docs/_docs/reference/contextual/given-imports.md @@ -1,20 +1,21 @@ --- layout: doc-page title: "Importing Givens" +movedTo: https://docs.scala-lang.org/scala3/reference/contextual/given-imports.html --- A special form of import wildcard selector is used to import given instances. Example: ```scala object A: - class TC - given tc: TC = ??? - def f(using TC) = ??? + class TC + given tc: TC = ??? + def f(using TC) = ??? object B: - import A.* - import A.given - ... + import A.* + import A.given + ... ``` In the code above, the `import A.*` clause in object `B` imports all members @@ -23,11 +24,11 @@ The two import clauses can also be merged into one: ```scala object B: - import A.{given, *} + import A.{given, *} ... ``` -Generally, a normal wildcard selector `_` brings all definitions other than givens or extensions into scope +Generally, a normal wildcard selector `*` brings all definitions other than givens or extensions into scope whereas a `given` selector brings all givens (including those resulting from extensions) into scope. There are two main benefits arising from these rules: @@ -59,10 +60,10 @@ For instance, assuming the object ```scala object Instances: - given intOrd: Ordering[Int] = ... - given listOrd[T: Ordering]: Ordering[List[T]] = ... - given ec: ExecutionContext = ... - given im: Monoid[Int] = ... + given intOrd: Ordering[Int] = ... + given listOrd[T: Ordering]: Ordering[List[T]] = ... + given ec: ExecutionContext = ... + given im: Monoid[Int] = ... ``` the import clause @@ -90,11 +91,11 @@ normal imports to givens and given imports. The following modifications avoid this hurdle to migration. 1. A `given` import selector also brings old style implicits into scope. So, in Scala 3.0 - an old-style implicit definition can be brought into scope either by a `_` or a `given` wildcard selector. + an old-style implicit definition can be brought into scope either by a `*` or a `given` wildcard selector. - 2. In Scala 3.1, old-style implicits accessed through a `_` wildcard import will give a deprecation warning. + 2. In Scala 3.1, old-style implicits accessed through a `*` wildcard import will give a deprecation warning. - 3. In some version after 3.1, old-style implicits accessed through a `_` wildcard import will give a compiler error. + 3. In some version after 3.1, old-style implicits accessed through a `*` wildcard import will give a compiler error. These rules mean that library users can use `given` selectors to access old-style implicits in Scala 3.0, and will be gently nudged and then forced to do so in later versions. Libraries can then switch to diff --git a/docs/_docs/reference/contextual/givens.md b/docs/_docs/reference/contextual/givens.md new file mode 100644 index 000000000000..2af94c0a8aea --- /dev/null +++ b/docs/_docs/reference/contextual/givens.md @@ -0,0 +1,193 @@ +--- +layout: doc-page +title: "Given Instances" +movedTo: https://docs.scala-lang.org/scala3/reference/contextual/givens.html +--- + +Given instances (or, simply, "givens") define "canonical" values of certain types +that serve for synthesizing arguments to [context parameters](./using-clauses.md). Example: + +```scala +trait Ord[T]: + def compare(x: T, y: T): Int + extension (x: T) def < (y: T) = compare(x, y) < 0 + extension (x: T) def > (y: T) = compare(x, y) > 0 + +given intOrd: Ord[Int] with + def compare(x: Int, y: Int) = + if x < y then -1 else if x > y then +1 else 0 + +given listOrd[T](using ord: Ord[T]): Ord[List[T]] with + + def compare(xs: List[T], ys: List[T]): Int = (xs, ys) match + case (Nil, Nil) => 0 + case (Nil, _) => -1 + case (_, Nil) => +1 + case (x :: xs1, y :: ys1) => + val fst = ord.compare(x, y) + if fst != 0 then fst else compare(xs1, ys1) + +``` + +This code defines a trait `Ord` with two given instances. `intOrd` defines +a given for the type `Ord[Int]` whereas `listOrd[T]` defines givens +for `Ord[List[T]]` for all types `T` that come with a given instance for `Ord[T]` +themselves. The `using` clause in `listOrd` defines a condition: There must be a +given of type `Ord[T]` for a given of type `Ord[List[T]]` to exist. +Such conditions are expanded by the compiler to [context parameters](./using-clauses.md). + +## Anonymous Givens + +The name of a given can be left out. So the definitions +of the last section can also be expressed like this: + +```scala +given Ord[Int] with + ... +given [T](using Ord[T]): Ord[List[T]] with + ... +``` + +If the name of a given is missing, the compiler will synthesize a name from +the implemented type(s). + +**Note** The name synthesized by the compiler is chosen to be readable and reasonably concise. For instance, the two instances above would get the names: + +```scala +given_Ord_Int +given_Ord_List_T +``` + +The precise rules for synthesizing names are found [here](./relationship-implicits.html#anonymous-given-instances). These rules do not guarantee absence of name conflicts between +given instances of types that are "too similar". To avoid conflicts one can +use named instances. + +**Note** To ensure robust binary compatibility, publicly available libraries should prefer named instances. + +## Alias Givens + +An alias can be used to define a given instance that is equal to some expression. Example: + +```scala +given global: ExecutionContext = ForkJoinPool() +``` + +This creates a given `global` of type `ExecutionContext` that resolves to the right +hand side `ForkJoinPool()`. +The first time `global` is accessed, a new `ForkJoinPool` is created, which is then +returned for this and all subsequent accesses to `global`. This operation is thread-safe. + +Alias givens can be anonymous as well, e.g. + +```scala +given Position = enclosingTree.position +given (using config: Config): Factory = MemoizingFactory(config) +``` + +An alias given can have type parameters and context parameters just like any other given, +but it can only implement a single type. + +## Given Macros + +Given aliases can have the `inline` and `transparent` modifiers. +Example: + +```scala +transparent inline given mkAnnotations[A, T]: Annotations[A, T] = ${ + // code producing a value of a subtype of Annotations +} +``` + +Since `mkAnnotations` is `transparent`, the type of an application is the type of its right-hand side, which can be a proper subtype of the declared result type `Annotations[A, T]`. + +Given instances can have the `inline` but not `transparent` modifiers as their type is already known from the signature. +Example: + +```scala +trait Show[T] { + inline def show(x: T): String +} + +inline given Show[Foo] with { + /*transparent*/ inline def show(x: Foo): String = ${ ... } +} + +def app = + // inlines `show` method call and removes the call to `given Show[Foo]` + summon[Show[Foo]].show(foo) +``` +Note that the inline methods within the given instances may be `transparent`. + +The inlining of given instances will not inline/duplicate the implementation of the given, it will just inline the instantiation of that instance. +This is used to help dead code elimination of the given instances that are not used after inlining. + + +## Pattern-Bound Given Instances + +Given instances can also appear in patterns. Example: + +```scala +for given Context <- applicationContexts do + +pair match + case (ctx @ given Context, y) => ... +``` + +In the first fragment above, anonymous given instances for class `Context` are established by enumerating over `applicationContexts`. In the second fragment, a given `Context` +instance named `ctx` is established by matching against the first half of the `pair` selector. + +In each case, a pattern-bound given instance consists of `given` and a type `T`. The pattern matches exactly the same selectors as the type ascription pattern `_: T`. + +## Negated Givens + +Scala 2's somewhat puzzling behavior with respect to ambiguity has been exploited to implement the analogue of a "negated" search in implicit resolution, +where a query Q1 fails if some other query Q2 succeeds and Q1 succeeds if Q2 fails. With the new cleaned up behavior these techniques no longer work. +But the new special type [`scala.util.NotGiven`](https://scala-lang.org/api/3.x/scala/util/NotGiven.html) now implements negation directly. + +For any query type `Q`, [`NotGiven[Q]`](https://scala-lang.org/api/3.x/scala/util/NotGiven.html) succeeds if and only if the implicit +search for `Q` fails, for example: + +```scala +import scala.util.NotGiven + +trait Tagged[A] + +case class Foo[A](value: Boolean) +object Foo: + given fooTagged[A](using Tagged[A]): Foo[A] = Foo(true) + given fooNotTagged[A](using NotGiven[Tagged[A]]): Foo[A] = Foo(false) + +@main def test(): Unit = + given Tagged[Int]() + assert(summon[Foo[Int]].value) // fooTagged is found + assert(!summon[Foo[String]].value) // fooNotTagged is found +``` + +## Given Instance Initialization + +A given instance without type or context parameters is initialized on-demand, the first +time it is accessed. If a given has type or context parameters, a fresh instance +is created for each reference. + +## Syntax + +Here is the syntax for given instances: + +``` +TmplDef ::= ... + | ‘given’ GivenDef +GivenDef ::= [GivenSig] StructuralInstance + | [GivenSig] AnnotType ‘=’ Expr + | [GivenSig] AnnotType +GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ +StructuralInstance ::= ConstrApp {‘with’ ConstrApp} ‘with’ TemplateBody +``` + +A given instance starts with the reserved word `given` and an optional _signature_. The signature +defines a name and/or parameters for the instance. It is followed by `:`. There are three kinds +of given instances: + +- A _structural instance_ contains one or more types or constructor applications, + followed by `with` and a template body that contains member definitions of the instance. +- An _alias instance_ contains a type, followed by `=` and a right-hand side expression. +- An _abstract instance_ contains just the type, which is not followed by anything. diff --git a/docs/docs/reference/contextual/multiversal-equality.md b/docs/_docs/reference/contextual/multiversal-equality.md similarity index 95% rename from docs/docs/reference/contextual/multiversal-equality.md rename to docs/_docs/reference/contextual/multiversal-equality.md index 35a232903576..f01b64d2e444 100644 --- a/docs/docs/reference/contextual/multiversal-equality.md +++ b/docs/_docs/reference/contextual/multiversal-equality.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Multiversal Equality" +movedTo: https://docs.scala-lang.org/scala3/reference/contextual/multiversal-equality.html --- Previously, Scala had universal equality: Two values of any types @@ -24,7 +25,7 @@ the program will still typecheck, since values of all types can be compared with But it will probably give unexpected results and fail at runtime. Multiversal equality is an opt-in way to make universal equality safer. -It uses a binary type class [`scala.CanEqual`](https://github.com/lampepfl/dotty/blob/master/library/src/scala/CanEqual.scala) +It uses a binary type class [`scala.CanEqual`](https://github.com/lampepfl/dotty/blob/main/library/src/scala/CanEqual.scala) to indicate that values of two given types can be compared with each other. The example above would not typecheck if `S` or `T` was a class that derives `CanEqual`, e.g. @@ -55,7 +56,7 @@ import annotation.implicitNotFound sealed trait CanEqual[-L, -R] object CanEqual: - object derived extends CanEqual[Any, Any] + object derived extends CanEqual[Any, Any] ``` One can have several `CanEqual` given instances for a type. For example, the four @@ -69,7 +70,7 @@ given CanEqual[A, B] = CanEqual.derived given CanEqual[B, A] = CanEqual.derived ``` -The [`scala.CanEqual`](https://github.com/lampepfl/dotty/blob/master/library/src/scala/CanEqual.scala) +The [`scala.CanEqual`](https://github.com/lampepfl/dotty/blob/main/library/src/scala/CanEqual.scala) object defines a number of `CanEqual` given instances that together define a rule book for what standard types can be compared (more details below). @@ -108,7 +109,7 @@ this generates the following `CanEqual` instance in the companion object of `Box ```scala given [T, U](using CanEqual[T, U]): CanEqual[Box[T], Box[U]] = - CanEqual.derived + CanEqual.derived ``` That is, two boxes are comparable with `==` or `!=` if their elements are. Examples: @@ -173,22 +174,22 @@ we are dealing with a refinement of pre-existing, universal equality. It is best Say you want to come up with a safe version of the `contains` method on `List[T]`. The original definition of `contains` in the standard library was: ```scala class List[+T]: - ... - def contains(x: Any): Boolean + ... + def contains(x: Any): Boolean ``` That uses universal equality in an unsafe way since it permits arguments of any type to be compared with the list's elements. The "obvious" alternative definition ```scala - def contains(x: T): Boolean + def contains(x: T): Boolean ``` does not work, since it refers to the covariant parameter `T` in a nonvariant context. The only variance-correct way to use the type parameter `T` in `contains` is as a lower bound: ```scala - def contains[U >: T](x: U): Boolean + def contains[U >: T](x: U): Boolean ``` This generic version of `contains` is the one used in the current (Scala 2.13) version of `List`. It looks different but it admits exactly the same applications as the `contains(x: Any)` definition we started with. However, we can make it more useful (i.e. restrictive) by adding a `CanEqual` parameter: ```scala - def contains[U >: T](x: U)(using CanEqual[T, U]): Boolean // (1) + def contains[U >: T](x: U)(using CanEqual[T, U]): Boolean // (1) ``` This version of `contains` is equality-safe! More precisely, given `x: T`, `xs: List[T]` and `y: U`, then `xs.contains(y)` is type-correct if and only if @@ -196,7 +197,7 @@ This version of `contains` is equality-safe! More precisely, given Unfortunately, the crucial ability to "lift" equality type checking from simple equality and pattern matching to arbitrary user-defined operations gets lost if we restrict ourselves to an equality class with a single type parameter. Consider the following signature of `contains` with a hypothetical `CanEqual1[T]` type class: ```scala - def contains[U >: T](x: U)(using CanEqual1[U]): Boolean // (2) + def contains[U >: T](x: U)(using CanEqual1[U]): Boolean // (2) ``` This version could be applied just as widely as the original `contains(x: Any)` method, since the `CanEqual1[Any]` fallback is always available! So we have gained nothing. What got lost in the transition to a single parameter type class was the original rule that `CanEqual[A, B]` is available only if neither `A` nor `B` have a reflexive `CanEqual` instance. That rule simply cannot be expressed if there is a single type parameter for `CanEqual`. diff --git a/docs/docs/reference/contextual/relationship-implicits.md b/docs/_docs/reference/contextual/relationship-implicits.md similarity index 96% rename from docs/docs/reference/contextual/relationship-implicits.md rename to docs/_docs/reference/contextual/relationship-implicits.md index 784f01dcbf48..4a23e781bd70 100644 --- a/docs/docs/reference/contextual/relationship-implicits.md +++ b/docs/_docs/reference/contextual/relationship-implicits.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Relationship with Scala 2 Implicits" +movedTo: https://docs.scala-lang.org/scala3/reference/contextual/relationship-implicits.html --- Many, but not all, of the new contextual abstraction features in Scala 3 can be mapped to Scala 2's implicits. This page gives a rundown on the relationships between new and old features. @@ -34,7 +35,7 @@ Given instances can be mapped to combinations of implicit objects, classes and i ```scala class listOrd[T](implicit ord: Ord[T]) extends Ord[List[T]] { ... } final implicit def listOrd[T](implicit ord: Ord[T]): listOrd[T] = - new listOrd[T] + new listOrd[T] ``` 3. Alias givens map to implicit methods or implicit lazy vals. If an alias has neither type nor context parameters, @@ -114,14 +115,14 @@ Extension methods have no direct counterpart in Scala 2, but they can be simulat ```scala extension (c: Circle) - def circumference: Double = c.radius * math.Pi * 2 + def circumference: Double = c.radius * math.Pi * 2 ``` could be simulated to some degree by ```scala implicit class CircleDecorator(c: Circle) extends AnyVal { - def circumference: Double = c.radius * math.Pi * 2 + def circumference: Double = c.radius * math.Pi * 2 } ``` @@ -153,7 +154,7 @@ one can write ```scala given stringToToken: Conversion[String, Token] with - def apply(str: String): Token = KeyWord(str) + def apply(str: String): Token = KeyWord(str) ``` or diff --git a/docs/docs/reference/contextual/right-associative-extension-methods.md b/docs/_docs/reference/contextual/right-associative-extension-methods.md similarity index 95% rename from docs/docs/reference/contextual/right-associative-extension-methods.md rename to docs/_docs/reference/contextual/right-associative-extension-methods.md index 798370c4c373..7c2cf6b02675 100644 --- a/docs/docs/reference/contextual/right-associative-extension-methods.md +++ b/docs/_docs/reference/contextual/right-associative-extension-methods.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Right-Associative Extension Methods: Details" +movedTo: https://docs.scala-lang.org/scala3/reference/contextual/right-associative-extension-methods.html --- The most general form of leading parameters of an extension method is as follows: diff --git a/docs/docs/reference/contextual/type-classes.md b/docs/_docs/reference/contextual/type-classes.md similarity index 79% rename from docs/docs/reference/contextual/type-classes.md rename to docs/_docs/reference/contextual/type-classes.md index 5911eff48eed..c4648b3baf28 100644 --- a/docs/docs/reference/contextual/type-classes.md +++ b/docs/_docs/reference/contextual/type-classes.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Implementing Type classes" +movedTo: https://docs.scala-lang.org/scala3/reference/contextual/type-classes.html --- A _type class_ is an abstract, parameterized type that lets you add new behavior to any closed data type without using sub-typing. This can be useful in multiple use-cases, for example: @@ -17,47 +18,47 @@ Here's the `Monoid` type class definition: ```scala trait SemiGroup[T]: - extension (x: T) def combine (y: T): T + extension (x: T) def combine (y: T): T trait Monoid[T] extends SemiGroup[T]: - def unit: T + def unit: T ``` An implementation of this `Monoid` type class for the type `String` can be the following: ```scala given Monoid[String] with - extension (x: String) def combine (y: String): String = x.concat(y) - def unit: String = "" + extension (x: String) def combine (y: String): String = x.concat(y) + def unit: String = "" ``` Whereas for the type `Int` one could write the following: ```scala given Monoid[Int] with - extension (x: Int) def combine (y: Int): Int = x + y - def unit: Int = 0 + extension (x: Int) def combine (y: Int): Int = x + y + def unit: Int = 0 ``` This monoid can now be used as _context bound_ in the following `combineAll` method: ```scala def combineAll[T: Monoid](xs: List[T]): T = - xs.foldLeft(summon[Monoid[T]].unit)(_.combine(_)) + xs.foldLeft(summon[Monoid[T]].unit)(_.combine(_)) ``` To get rid of the `summon[...]` we can define a `Monoid` object as follows: ```scala object Monoid: - def apply[T](using m: Monoid[T]) = m + def apply[T](using m: Monoid[T]) = m ``` Which would allow to re-write the `combineAll` method this way: ```scala def combineAll[T: Monoid](xs: List[T]): T = - xs.foldLeft(Monoid[T].unit)(_.combine(_)) + xs.foldLeft(Monoid[T].unit)(_.combine(_)) ``` ### Functors @@ -69,7 +70,7 @@ The definition of a generic `Functor` would thus be written as: ```scala trait Functor[F[_]]: - def map[A, B](x: F[A], f: A => B): F[B] + def map[A, B](x: F[A], f: A => B): F[B] ``` Which could read as follows: "A `Functor` for the type constructor `F[_]` represents the ability to transform `F[A]` to `F[B]` through the application of function `f` with type `A => B`". We call the `Functor` definition here a _type class_. @@ -77,8 +78,8 @@ This way, we could define an instance of `Functor` for the `List` type: ```scala given Functor[List] with - def map[A, B](x: List[A], f: A => B): List[B] = - x.map(f) // List already has a `map` method + def map[A, B](x: List[A], f: A => B): List[B] = + x.map(f) // List already has a `map` method ``` With this `given` instance in scope, everywhere a `Functor` is expected, the compiler will accept a `List` to be used. @@ -87,7 +88,7 @@ For instance, we may write such a testing method: ```scala def assertTransformation[F[_]: Functor, A, B](expected: F[B], original: F[A], mapping: A => B): Unit = - assert(expected == summon[Functor[F]].map(original, mapping)) + assert(expected == summon[Functor[F]].map(original, mapping)) ``` And use it this way, for example: @@ -101,17 +102,17 @@ As in the previous example of Monoids, [`extension` methods](extension-methods.m ```scala trait Functor[F[_]]: - extension [A](x: F[A]) - def map[B](f: A => B): F[B] + extension [A](x: F[A]) + def map[B](f: A => B): F[B] ``` The instance of `Functor` for `List` now becomes: ```scala given Functor[List] with - extension [A](xs: List[A]) - def map[B](f: A => B): List[B] = - xs.map(f) // List already has a `map` method + extension [A](xs: List[A]) + def map[B](f: A => B): List[B] = + xs.map(f) // List already has a `map` method ``` @@ -119,7 +120,7 @@ It simplifies the `assertTransformation` method: ```scala def assertTransformation[F[_]: Functor, A, B](expected: F[B], original: F[A], mapping: A => B): Unit = - assert(expected == original.map(mapping)) + assert(expected == original.map(mapping)) ``` The `map` method is now directly used on `original`. It is available as an extension method @@ -140,15 +141,15 @@ Here is the translation of this definition in Scala 3: ```scala trait Monad[F[_]] extends Functor[F]: - /** The unit value for a monad */ - def pure[A](x: A): F[A] + /** The unit value for a monad */ + def pure[A](x: A): F[A] - extension [A](x: F[A]) - /** The fundamental composition operation */ - def flatMap[B](f: A => F[B]): F[B] + extension [A](x: F[A]) + /** The fundamental composition operation */ + def flatMap[B](f: A => F[B]): F[B] - /** The `map` operation can now be defined in terms of `flatMap` */ - def map[B](f: A => B) = x.flatMap(f.andThen(pure)) + /** The `map` operation can now be defined in terms of `flatMap` */ + def map[B](f: A => B) = x.flatMap(f.andThen(pure)) end Monad ``` @@ -159,11 +160,11 @@ A `List` can be turned into a monad via this `given` instance: ```scala given listMonad: Monad[List] with - def pure[A](x: A): List[A] = - List(x) - extension [A](xs: List[A]) - def flatMap[B](f: A => List[B]): List[B] = - xs.flatMap(f) // rely on the existing `flatMap` method of `List` + def pure[A](x: A): List[A] = + List(x) + extension [A](xs: List[A]) + def flatMap[B](f: A => List[B]): List[B] = + xs.flatMap(f) // rely on the existing `flatMap` method of `List` ``` Since `Monad` is a subtype of `Functor`, `List` is also a functor. The Functor's `map` @@ -176,12 +177,12 @@ it explicitly. ```scala given optionMonad: Monad[Option] with - def pure[A](x: A): Option[A] = - Option(x) - extension [A](xo: Option[A]) - def flatMap[B](f: A => Option[B]): Option[B] = xo match - case Some(x) => f(x) - case None => None + def pure[A](x: A): Option[A] = + Option(x) + extension [A](xo: Option[A]) + def flatMap[B](f: A => Option[B]): Option[B] = xo match + case Some(x) => f(x) + case None => None ``` #### Reader @@ -224,12 +225,12 @@ The monad instance will look like this: ```scala given configDependentMonad: Monad[ConfigDependent] with - def pure[A](x: A): ConfigDependent[A] = - config => x + def pure[A](x: A): ConfigDependent[A] = + config => x - extension [A](x: ConfigDependent[A]) - def flatMap[B](f: A => ConfigDependent[B]): ConfigDependent[B] = - config => f(x(config))(config) + extension [A](x: ConfigDependent[A]) + def flatMap[B](f: A => ConfigDependent[B]): ConfigDependent[B] = + config => f(x(config))(config) end configDependentMonad ``` @@ -245,12 +246,12 @@ Using this syntax would turn the previous `configDependentMonad` into: ```scala given configDependentMonad: Monad[[Result] =>> Config => Result] with - def pure[A](x: A): Config => A = - config => x + def pure[A](x: A): Config => A = + config => x - extension [A](x: Config => A) - def flatMap[B](f: A => Config => B): Config => B = - config => f(x(config))(config) + extension [A](x: Config => A) + def flatMap[B](f: A => Config => B): Config => B = + config => f(x(config))(config) end configDependentMonad ``` @@ -260,12 +261,12 @@ It is likely that we would like to use this pattern with other kinds of environm ```scala given readerMonad[Ctx]: Monad[[X] =>> Ctx => X] with - def pure[A](x: A): Ctx => A = - ctx => x + def pure[A](x: A): Ctx => A = + ctx => x - extension [A](x: Ctx => A) - def flatMap[B](f: A => Ctx => B): Ctx => B = - ctx => f(x(ctx))(ctx) + extension [A](x: Ctx => A) + def flatMap[B](f: A => Ctx => B): Ctx => B = + ctx => f(x(ctx))(ctx) end readerMonad ``` diff --git a/docs/docs/reference/contextual/using-clauses.md b/docs/_docs/reference/contextual/using-clauses.md similarity index 82% rename from docs/docs/reference/contextual/using-clauses.md rename to docs/_docs/reference/contextual/using-clauses.md index 42178a75a64c..e8a077ccdc0a 100644 --- a/docs/docs/reference/contextual/using-clauses.md +++ b/docs/_docs/reference/contextual/using-clauses.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Using Clauses" +movedTo: https://docs.scala-lang.org/scala3/reference/contextual/using-clauses.html --- Functional programming tends to express most dependencies as simple function parameterization. @@ -13,7 +14,7 @@ a `max` function that works for any arguments for which an ordering exists can b ```scala def max[T](x: T, y: T)(using ord: Ord[T]): T = - if ord.compare(x, y) < 0 then y else x + if ord.compare(x, y) < 0 then y else x ``` Here, `ord` is a _context parameter_ introduced with a `using` clause. @@ -39,7 +40,7 @@ and just provide its type. Example: ```scala def maximum[T](xs: List[T])(using Ord[T]): T = - xs.reduceLeft(max) + xs.reduceLeft(max) ``` `maximum` takes a context parameter of type `Ord` only to pass it on as an @@ -47,16 +48,43 @@ inferred argument to `max`. The name of the parameter is left out. Generally, context parameters may be defined either as a full parameter list `(p_1: T_1, ..., p_n: T_n)` or just as a sequence of types `T_1, ..., T_n`. Vararg parameters are not supported in `using` clauses. +## Class Context Parameters + +If a class context parameter is made a member by adding a `val` or `var` modifier, +then that member is available as a given instance. + +Compare the following examples, where the attempt to supply an explicit `given` member induces an ambiguity: + +```scala +class GivenIntBox(using val givenInt: Int): + def n = summon[Int] + +class GivenIntBox2(using givenInt: Int): + given Int = givenInt + //def n = summon[Int] // ambiguous +``` + +The `given` member is importable as explained in the section on [importing `given`s](./given-imports.md): + +```scala +val b = GivenIntBox(using 23) +import b.given +summon[Int] // 23 + +import b.* +//givenInt // Not found +``` + ## Inferring Complex Arguments Here are two other methods that have a context parameter of type `Ord[T]`: ```scala def descending[T](using asc: Ord[T]): Ord[T] = new Ord[T]: - def compare(x: T, y: T) = asc.compare(y, x) + def compare(x: T, y: T) = asc.compare(y, x) def minimum[T](xs: List[T])(using Ord[T]) = - maximum(xs)(using descending) + maximum(xs)(using descending) ``` The `minimum` method's right-hand side passes `descending` as an explicit argument to `maximum(xs)`. diff --git a/docs/docs/reference/dropped-features/auto-apply.md b/docs/_docs/reference/dropped-features/auto-apply.md similarity index 95% rename from docs/docs/reference/dropped-features/auto-apply.md rename to docs/_docs/reference/dropped-features/auto-apply.md index 9ff9aa648502..c5424ffff22d 100644 --- a/docs/docs/reference/dropped-features/auto-apply.md +++ b/docs/_docs/reference/dropped-features/auto-apply.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Dropped: Auto-Application" +movedTo: https://docs.scala-lang.org/scala3/reference/dropped-features/auto-apply.html --- Previously an empty argument list `()` was implicitly inserted when @@ -74,10 +75,10 @@ exactly in their parameter lists. ```scala class A: - def next(): Int + def next(): Int class B extends A: - def next: Int // overriding error: incompatible type + def next: Int // overriding error: incompatible type ``` Methods overriding Java or Scala 2 methods are again exempted from this diff --git a/docs/docs/reference/dropped-features/class-shadowing-spec.md b/docs/_docs/reference/dropped-features/class-shadowing-spec.md similarity index 84% rename from docs/docs/reference/dropped-features/class-shadowing-spec.md rename to docs/_docs/reference/dropped-features/class-shadowing-spec.md index 427337a63df8..98acff828cd3 100644 --- a/docs/docs/reference/dropped-features/class-shadowing-spec.md +++ b/docs/_docs/reference/dropped-features/class-shadowing-spec.md @@ -1,13 +1,14 @@ --- layout: doc-page title: "Dropped: Class Shadowing - More Details" +movedTo: https://docs.scala-lang.org/scala3/reference/dropped-features/class-shadowing-spec.html --- Spec diff: in section [5.1.4 Overriding](https://www.scala-lang.org/files/archive/spec/2.13/05-classes-and-objects.html#Overriding), add *M' must not be a class*. > Why do we want to make this change to the language? -Class shadowing is irregular compared to other types of overrides. Indeed, inner classes are not actually overriden but simply shadowed. +Class shadowing is irregular compared to other types of overrides. Indeed, inner classes are not actually overridden but simply shadowed. > How much existing code is going to be affected? diff --git a/docs/docs/reference/dropped-features/class-shadowing.md b/docs/_docs/reference/dropped-features/class-shadowing.md similarity index 88% rename from docs/docs/reference/dropped-features/class-shadowing.md rename to docs/_docs/reference/dropped-features/class-shadowing.md index d1d92a47bcbc..839b40253169 100644 --- a/docs/docs/reference/dropped-features/class-shadowing.md +++ b/docs/_docs/reference/dropped-features/class-shadowing.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Dropped: Class Shadowing" +movedTo: https://docs.scala-lang.org/scala3/reference/dropped-features/class-shadowing.html --- Scala 2 so far allowed patterns like this: diff --git a/docs/_docs/reference/dropped-features/delayed-init.md b/docs/_docs/reference/dropped-features/delayed-init.md new file mode 100644 index 000000000000..ab48de388569 --- /dev/null +++ b/docs/_docs/reference/dropped-features/delayed-init.md @@ -0,0 +1,32 @@ +--- +layout: doc-page +title: "Dropped: DelayedInit" +movedTo: https://docs.scala-lang.org/scala3/reference/dropped-features/delayed-init.html +--- + +The special handling of the [`DelayedInit`](https://scala-lang.org/api/3.x/scala/DelayedInit.html) +trait is no longer supported. + +One consequence is that the [`App`](https://scala-lang.org/api/3.x/scala/App.html) class, +which used [`DelayedInit`](https://scala-lang.org/api/3.x/scala/DelayedInit.html) is +now partially broken. You can still use `App` as a simple way to set up a main program. Example: + +```scala +object HelloWorld extends App { + println("Hello, world!") +} +``` + +However, the code is now run in the initializer of the object, which on +some JVM's means that it will only be interpreted. So, better not use it +for benchmarking! Also, if you want to access the command line arguments, +you need to use an explicit `main` method for that. + +```scala +object Hello: + def main(args: Array[String]) = + println(s"Hello, ${args(0)}") +``` + +On the other hand, Scala 3 offers a convenient alternative to such "program" objects +with [`@main` methods](../changed-features/main-functions.md). diff --git a/docs/docs/reference/dropped-features/do-while.md b/docs/_docs/reference/dropped-features/do-while.md similarity index 85% rename from docs/docs/reference/dropped-features/do-while.md rename to docs/_docs/reference/dropped-features/do-while.md index b2ef652f4898..5f5ca5feeea2 100644 --- a/docs/docs/reference/dropped-features/do-while.md +++ b/docs/_docs/reference/dropped-features/do-while.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Dropped: Do-While" +movedTo: https://docs.scala-lang.org/scala3/reference/dropped-features/do-while.html --- The syntax construct @@ -15,22 +16,22 @@ while ({ ; }) () For instance, instead of ```scala do - i += 1 + i += 1 while (f(i) == 0) ``` one writes ```scala while - i += 1 - f(i) == 0 + i += 1 + f(i) == 0 do () ``` The idea to use a block as the condition of a while also gives a solution to the "loop-and-a-half" problem. Here is another example: ```scala while - val x: Int = iterator.next - x >= 0 + val x: Int = iterator.next + x >= 0 do print(".") ``` diff --git a/docs/_docs/reference/dropped-features/dropped-features.md b/docs/_docs/reference/dropped-features/dropped-features.md new file mode 100644 index 000000000000..f4cd72942949 --- /dev/null +++ b/docs/_docs/reference/dropped-features/dropped-features.md @@ -0,0 +1,7 @@ +--- +layout: doc-page +title: "Dropped Features" +movedTo: https://docs.scala-lang.org/scala3/reference/dropped-features.html +--- + +The following pages document the features of Scala 2 that have been dropped in Scala 3. diff --git a/docs/docs/reference/dropped-features/early-initializers.md b/docs/_docs/reference/dropped-features/early-initializers.md similarity index 83% rename from docs/docs/reference/dropped-features/early-initializers.md rename to docs/_docs/reference/dropped-features/early-initializers.md index 0f7b5717b197..cb2ffea94376 100644 --- a/docs/docs/reference/dropped-features/early-initializers.md +++ b/docs/_docs/reference/dropped-features/early-initializers.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Dropped: Early Initializers" +movedTo: https://docs.scala-lang.org/scala3/reference/dropped-features/early-initializers.html --- Early initializers of the form diff --git a/docs/docs/reference/dropped-features/existential-types.md b/docs/_docs/reference/dropped-features/existential-types.md similarity index 93% rename from docs/docs/reference/dropped-features/existential-types.md rename to docs/_docs/reference/dropped-features/existential-types.md index 6969feac33dc..4d7d9e2f8c10 100644 --- a/docs/docs/reference/dropped-features/existential-types.md +++ b/docs/_docs/reference/dropped-features/existential-types.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Dropped: Existential Types" +movedTo: https://docs.scala-lang.org/scala3/reference/dropped-features/existential-types.html --- Existential types using `forSome` (as in diff --git a/docs/_docs/reference/dropped-features/limit22.md b/docs/_docs/reference/dropped-features/limit22.md new file mode 100644 index 000000000000..514120fd2ce9 --- /dev/null +++ b/docs/_docs/reference/dropped-features/limit22.md @@ -0,0 +1,17 @@ +--- +layout: doc-page +title: "Dropped: Limit 22" +movedTo: https://docs.scala-lang.org/scala3/reference/dropped-features/limit22.html +--- + +The limits of 22 for the maximal number of parameters of function types and the +maximal number of fields in tuple types have been dropped. + +* Functions can now have an arbitrary number of parameters. Functions beyond + [`scala.Function22`](https://www.scala-lang.org/api/current/scala/Function22.html) are erased to a new trait [`scala.runtime.FunctionXXL`](https://scala-lang.org/api/3.x/scala/runtime/FunctionXXL.html). + +* Tuples can also have an arbitrary number of fields. Tuples beyond [`scala.Tuple22`](https://www.scala-lang.org/api/current/scala/Tuple22.html) + are erased to a new class [`scala.runtime.TupleXXL`](https://scala-lang.org/api/3.x/scala/runtime/TupleXXL.html) (which extends the trait [`scala.Product`](https://scala-lang.org/api/3.x/scala/Product.md)). Furthermore, they support generic + operation such as concatenation and indexing. + +Both of these are implemented using arrays. diff --git a/docs/_docs/reference/dropped-features/macros.md b/docs/_docs/reference/dropped-features/macros.md new file mode 100644 index 000000000000..e71bf120fe13 --- /dev/null +++ b/docs/_docs/reference/dropped-features/macros.md @@ -0,0 +1,16 @@ +--- +layout: doc-page +title: "Dropped: Scala 2 Macros" +movedTo: https://docs.scala-lang.org/scala3/reference/dropped-features/macros.html +--- + +The previous, experimental macro system has been dropped. + +Instead, there is a cleaner, more restricted system based on two complementary concepts: `inline` and `'{ ... }`/`${ ... }` code generation. +`'{ ... }` delays the compilation of the code and produces an object containing the code, dually `${ ... }` evaluates an expression which produces code and inserts it in the surrounding `${ ... }`. +In this setting, a definition marked as inlined containing a `${ ... }` is a macro, the code inside the `${ ... }` is executed at compile-time and produces code in the form of `'{ ... }`. +Additionally, the contents of code can be inspected and created with a more complex reflection API as an extension of `'{ ... }`/`${ ... }` framework. + +* `inline` has been [implemented](../metaprogramming/inline.md) in Scala 3. +* Quotes `'{ ... }` and splices `${ ... }` has been [implemented](../metaprogramming/macros.md) in Scala 3. +* [TASTy reflect](../metaprogramming/reflection.md) provides more complex tree based APIs to inspect or create quoted code. diff --git a/docs/_docs/reference/dropped-features/nonlocal-returns.md b/docs/_docs/reference/dropped-features/nonlocal-returns.md new file mode 100644 index 000000000000..99cdcdf0cd03 --- /dev/null +++ b/docs/_docs/reference/dropped-features/nonlocal-returns.md @@ -0,0 +1,27 @@ +--- +layout: doc-page +title: "Deprecated: Nonlocal Returns" + +movedTo: https://docs.scala-lang.org/scala3/reference/dropped-features/nonlocal-returns.html +--- + +Returning from nested anonymous functions has been deprecated. + +Nonlocal returns are implemented by throwing and catching `scala.runtime.NonLocalReturnException`-s. This is rarely what is intended by the programmer. It can be problematic because of the hidden performance cost of throwing and catching exceptions. Furthermore, it is a leaky implementation: a catch-all exception handler can intercept a `NonLocalReturnException`. + +A drop-in library replacement is provided in [`scala.util.control.NonLocalReturns`](https://scala-lang.org/api/3.x/scala/util/control/NonLocalReturns$.html). Example: + +```scala +import scala.util.control.NonLocalReturns.* + +extension [T](xs: List[T]) + def has(elem: T): Boolean = returning { + for x <- xs do + if x == elem then throwReturn(true) + false + } + +@main def test(): Unit = + val xs = List(1, 2, 3, 4, 5) + assert(xs.has(2) == xs.contains(2)) +``` diff --git a/docs/docs/reference/dropped-features/package-objects.md b/docs/_docs/reference/dropped-features/package-objects.md similarity index 83% rename from docs/docs/reference/dropped-features/package-objects.md rename to docs/_docs/reference/dropped-features/package-objects.md index 6b5b71f1c6da..5e58b5479fd8 100644 --- a/docs/docs/reference/dropped-features/package-objects.md +++ b/docs/_docs/reference/dropped-features/package-objects.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Dropped: Package Objects" +movedTo: https://docs.scala-lang.org/scala3/reference/dropped-features/package-objects.html --- Package objects @@ -10,7 +11,7 @@ package object p { def b = ... } ``` -will be dropped. They are still available in Scala 3.0, but will be deprecated and removed afterwards. +will be dropped. They are still available in Scala 3.0 and 3.1, but will be deprecated and removed afterwards. Package objects are no longer needed since all kinds of definitions can now be written at the top-level. Example: ```scala @@ -31,7 +32,8 @@ The compiler generates synthetic objects that wrap top-level definitions falling - implicit classes and objects, - companion objects of opaque type aliases. -If a source file `src.scala` contains such top-level definitions, they will be put in a synthetic object named `src$package`. The wrapping is transparent, however. The definitions in `src` can still be accessed as members of the enclosing package. +If a source file `src.scala` contains such top-level definitions, they will be put in a synthetic object named `src$package`. The wrapping is transparent, however. The definitions in `src` can still be accessed as members of the enclosing package. The synthetic object will be placed last in the file, +after any other package clauses, imports, or object and class definitions. **Note:** This means that 1. The name of a source file containing wrapped top-level definitions is relevant for binary compatibility. If the name changes, so does the name of the generated object and its class. diff --git a/docs/docs/reference/dropped-features/procedure-syntax.md b/docs/_docs/reference/dropped-features/procedure-syntax.md similarity index 84% rename from docs/docs/reference/dropped-features/procedure-syntax.md rename to docs/_docs/reference/dropped-features/procedure-syntax.md index 374b06502b93..cc82636d370d 100644 --- a/docs/docs/reference/dropped-features/procedure-syntax.md +++ b/docs/_docs/reference/dropped-features/procedure-syntax.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Dropped: Procedure Syntax" +movedTo: https://docs.scala-lang.org/scala3/reference/dropped-features/procedure-syntax.html --- Procedure syntax diff --git a/docs/_docs/reference/dropped-features/symlits.md b/docs/_docs/reference/dropped-features/symlits.md new file mode 100644 index 000000000000..cec98030a831 --- /dev/null +++ b/docs/_docs/reference/dropped-features/symlits.md @@ -0,0 +1,24 @@ +--- +layout: doc-page +title: "Dropped: Symbol Literals" +movedTo: https://docs.scala-lang.org/scala3/reference/dropped-features/symlits.html +--- + +Symbol literals are no longer supported. + +The [`scala.Symbol`](https://scala-lang.org/api/3.x/scala/Symbol.html) class still exists, so a literal translation of the symbol literal `'xyz` is `Symbol("xyz")`. However, it is recommended to use a plain string literal `"xyz"` instead. (The `Symbol` class will be deprecated and removed in the future). Example: + + +``` +scalac Test.scala +-- Error: Test.scala:1:25 ------------------------------------------------------------------------------------------------ + +1 |@main def test = println('abc) + | ^ + | symbol literal 'abc is no longer supported, + | use a string literal "abc" or an application Symbol("abc") instead, + | or enclose in braces '{abc} if you want a quoted expression. + | For now, you can also `import language.deprecated.symbolLiterals` to accept + | the idiom, but this possibility might no longer be available in the future. +1 error found +``` diff --git a/docs/_docs/reference/dropped-features/this-qualifier.md b/docs/_docs/reference/dropped-features/this-qualifier.md new file mode 100644 index 000000000000..e1814e1d194e --- /dev/null +++ b/docs/_docs/reference/dropped-features/this-qualifier.md @@ -0,0 +1,33 @@ +--- +layout: doc-page +title: "Dropped: private[this] and protected[this]" +movedTo: https://docs.scala-lang.org/scala3/reference/dropped-features/this-qualifier.html +--- + +The `private[this]` and `protected[this]` access modifiers are deprecated and will be phased out. + +Previously, these modifiers were needed for + + - avoiding the generation of getters and setters + - excluding code under a `private[this]` from variance checks. (Scala 2 also excludes `protected[this]` but this was found to be unsound and was therefore removed). + - avoiding the generation of fields, if a `private[this] val` is not accessed + by a class method. + +The compiler now infers for `private` members the fact that they are only accessed via `this`. Such members are treated as if they had been declared `private[this]`. `protected[this]` is dropped without a replacement. + +This change can in some cases change the semantics of a Scala program, since a +`private` val is no longer guaranteed to generate a field. The field +is omitted if + + - the `val` is only accessed via `this`, and + - the `val` is not accessed from a method in the current class. + +This can cause problems if a program tries to access the missing private field via reflection. The recommended fix is to declare the field instead to be qualified private with the enclosing class as qualifier. Example: +```scala + class C(x: Int): + private[C] val field = x + 1 + // [C] needed if `field` is to be accessed through reflection + val retained = field * field +``` + + diff --git a/docs/docs/reference/dropped-features/type-projection.md b/docs/_docs/reference/dropped-features/type-projection.md similarity index 88% rename from docs/docs/reference/dropped-features/type-projection.md rename to docs/_docs/reference/dropped-features/type-projection.md index 73b7a358cb38..f45eff5c423f 100644 --- a/docs/docs/reference/dropped-features/type-projection.md +++ b/docs/_docs/reference/dropped-features/type-projection.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Dropped: General Type Projection" +movedTo: https://docs.scala-lang.org/scala3/reference/dropped-features/type-projection.html --- Scala so far allowed general type projection `T#A` where `T` is an arbitrary type diff --git a/docs/docs/reference/dropped-features/weak-conformance-spec.md b/docs/_docs/reference/dropped-features/weak-conformance-spec.md similarity index 95% rename from docs/docs/reference/dropped-features/weak-conformance-spec.md rename to docs/_docs/reference/dropped-features/weak-conformance-spec.md index a02c2ee781ba..1432782a8e38 100644 --- a/docs/docs/reference/dropped-features/weak-conformance-spec.md +++ b/docs/_docs/reference/dropped-features/weak-conformance-spec.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Dropped: Weak Conformance - More Details" +movedTo: https://docs.scala-lang.org/scala3/reference/dropped-features/weak-conformance-spec.html --- To simplify the underlying type theory, Scala 3 drops the notion of diff --git a/docs/docs/reference/dropped-features/weak-conformance.md b/docs/_docs/reference/dropped-features/weak-conformance.md similarity index 94% rename from docs/docs/reference/dropped-features/weak-conformance.md rename to docs/_docs/reference/dropped-features/weak-conformance.md index c925249db9e7..c2dabe39e7c7 100644 --- a/docs/docs/reference/dropped-features/weak-conformance.md +++ b/docs/_docs/reference/dropped-features/weak-conformance.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Dropped: Weak Conformance" +movedTo: https://docs.scala-lang.org/scala3/reference/dropped-features/weak-conformance.html --- In some situations, Scala used a _weak conformance_ relation when diff --git a/docs/docs/reference/dropped-features/wildcard-init.md b/docs/_docs/reference/dropped-features/wildcard-init.md similarity index 84% rename from docs/docs/reference/dropped-features/wildcard-init.md rename to docs/_docs/reference/dropped-features/wildcard-init.md index bf8e60976057..c4b89674b11d 100644 --- a/docs/docs/reference/dropped-features/wildcard-init.md +++ b/docs/_docs/reference/dropped-features/wildcard-init.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Dropped: Wildcard Initializer" +movedTo: https://docs.scala-lang.org/scala3/reference/dropped-features/wildcard-init.html --- The syntax diff --git a/docs/docs/reference/dropped-features/xml.md b/docs/_docs/reference/dropped-features/xml.md similarity index 93% rename from docs/docs/reference/dropped-features/xml.md rename to docs/_docs/reference/dropped-features/xml.md index 602c0f08a12a..9de06ca30f2e 100644 --- a/docs/docs/reference/dropped-features/xml.md +++ b/docs/_docs/reference/dropped-features/xml.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Dropped: XML Literals" +movedTo: https://docs.scala-lang.org/scala3/reference/dropped-features/xml.html --- XML Literals are still supported, but will be dropped in the near future, to diff --git a/docs/docs/reference/enums/adts.md b/docs/_docs/reference/enums/adts.md similarity index 86% rename from docs/docs/reference/enums/adts.md rename to docs/_docs/reference/enums/adts.md index 00f33c887998..6fc5be78d7b8 100644 --- a/docs/docs/reference/enums/adts.md +++ b/docs/_docs/reference/enums/adts.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Algebraic Data Types" +movedTo: https://docs.scala-lang.org/scala3/reference/enums/adts.html --- The [`enum` concept](./enums.md) is general enough to also support algebraic data @@ -9,8 +10,8 @@ how an `Option` type can be represented as an ADT: ```scala enum Option[+T]: - case Some(x: T) - case None + case Some(x: T) + case None ``` This example introduces an `Option` enum with a covariant type @@ -24,8 +25,8 @@ be given explicitly: ```scala enum Option[+T]: - case Some(x: T) extends Option[T] - case None extends Option[Nothing] + case Some(x: T) extends Option[T] + case None extends Option[Nothing] ``` Note that the parent type of the `None` value is inferred as @@ -60,17 +61,17 @@ As all other enums, ADTs can define methods. For instance, here is `Option` agai ```scala enum Option[+T]: - case Some(x: T) - case None + case Some(x: T) + case None - def isDefined: Boolean = this match - case None => false - case _ => true + def isDefined: Boolean = this match + case None => false + case _ => true object Option: - def apply[T >: Null](x: T): Option[T] = - if x == null then None else Some(x) + def apply[T >: Null](x: T): Option[T] = + if x == null then None else Some(x) end Option ``` @@ -84,10 +85,10 @@ parameterized case that takes an RGB value. ```scala enum Color(val rgb: Int): - case Red extends Color(0xFF0000) - case Green extends Color(0x00FF00) - case Blue extends Color(0x0000FF) - case Mix(mix: Int) extends Color(mix) + case Red extends Color(0xFF0000) + case Green extends Color(0x00FF00) + case Blue extends Color(0x0000FF) + case Mix(mix: Int) extends Color(mix) ``` ### Parameter Variance of Enums @@ -101,7 +102,7 @@ mapping a type `T` to itself: ```scala enum View[-T]: - case Refl(f: T => T) + case Refl(f: T => T) ``` The definition of `Refl` is incorrect, as it uses contravariant type `T` in the covariant result position of a @@ -119,7 +120,7 @@ Because `Refl` does not declare explicit parameters, it looks to the compiler li ```scala enum View[-T]: - case Refl[/*synthetic*/-T1](f: T1 => T1) extends View[T1] + case Refl[/*synthetic*/-T1](f: T1 => T1) extends View[T1] ``` The compiler has inferred for `Refl` the contravariant type parameter `T1`, following `T` in `View`. @@ -128,8 +129,8 @@ and can remedy the error by the following change to `Refl`: ```diff enum View[-T]: -- case Refl(f: T => T) -+ case Refl[R](f: R => R) extends View[R] +- case Refl(f: T => T) ++ case Refl[R](f: R => R) extends View[R] ``` Above, type `R` is chosen as the parameter for `Refl` to highlight that it has a different meaning to @@ -140,10 +141,10 @@ as the function type `T => U`: ```scala enum View[-T, +U] extends (T => U): - case Refl[R](f: R => R) extends View[R, R] + case Refl[R](f: R => R) extends View[R, R] - final def apply(t: T): U = this match - case refl: Refl[r] => refl.f(t) + final def apply(t: T): U = this match + case refl: Refl[r] => refl.f(t) ``` ### Syntax of Enums @@ -153,7 +154,7 @@ The changes are specified below as deltas with respect to the Scala syntax given 1. Enum definitions are defined as follows: - ```ebnf + ``` TmplDef ::= `enum' EnumDef EnumDef ::= id ClassConstr [`extends' [ConstrApps]] EnumBody EnumBody ::= [nl] ‘{’ [SelfType] EnumStat {semi EnumStat} ‘}’ @@ -163,7 +164,7 @@ The changes are specified below as deltas with respect to the Scala syntax given 2. Cases of enums are defined as follows: - ```ebnf + ``` EnumCase ::= `case' (id ClassConstr [`extends' ConstrApps]] | ids) ``` diff --git a/docs/docs/reference/enums/desugarEnums.md b/docs/_docs/reference/enums/desugarEnums.md similarity index 96% rename from docs/docs/reference/enums/desugarEnums.md rename to docs/_docs/reference/enums/desugarEnums.md index 6430724e8572..95df1a7513ce 100644 --- a/docs/docs/reference/enums/desugarEnums.md +++ b/docs/_docs/reference/enums/desugarEnums.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Translation of Enums and ADTs" +movedTo: https://docs.scala-lang.org/scala3/reference/enums/desugarEnums.html --- The compiler expands enums and their cases to code that only uses @@ -41,7 +42,7 @@ map into `case class`es or `val`s. ```scala sealed abstract class E ... extends with scala.reflect.Enum { import E.{ } - + } object E { } ``` @@ -176,10 +177,10 @@ If `E` contains at least one simple case, its companion object will define in ad ```scala private def $new(_$ordinal: Int, $name: String) = - new E with runtime.EnumValue: - def ordinal = _$ordinal - override def productPrefix = $name // if not overridden in `E` - override def toString = $name // if not overridden in `E` + new E with runtime.EnumValue: + def ordinal = _$ordinal + override def productPrefix = $name // if not overridden in `E` + override def toString = $name // if not overridden in `E` ``` The anonymous class also implements the abstract `Product` methods that it inherits from `Enum`. diff --git a/docs/_docs/reference/enums/enums-index.md b/docs/_docs/reference/enums/enums-index.md new file mode 100644 index 000000000000..794c0130c9ec --- /dev/null +++ b/docs/_docs/reference/enums/enums-index.md @@ -0,0 +1,7 @@ +--- +layout: doc-page +title: "Enums" +movedTo: https://docs.scala-lang.org/scala3/reference/enums.html +--- + +This chapter documents enums in Scala 3. diff --git a/docs/_docs/reference/enums/enums.md b/docs/_docs/reference/enums/enums.md new file mode 100644 index 000000000000..fda03d2dee81 --- /dev/null +++ b/docs/_docs/reference/enums/enums.md @@ -0,0 +1,194 @@ +--- +layout: doc-page +title: "Enumerations" +movedTo: https://docs.scala-lang.org/scala3/reference/enums/enums.html +--- + +An enumeration is used to define a type consisting of a set of named values. + +```scala +enum Color: + case Red, Green, Blue +``` + +This defines a new `sealed` class, `Color`, with three values, `Color.Red`, +`Color.Green`, `Color.Blue`. The color values are members of `Color`s +companion object. + +### Parameterized enums + +Enums can be parameterized. + +```scala +enum Color(val rgb: Int): + case Red extends Color(0xFF0000) + case Green extends Color(0x00FF00) + case Blue extends Color(0x0000FF) +``` + +As the example shows, you can define the parameter value by using an +explicit extends clause. + +### Methods defined for enums + +The values of an enum correspond to unique integers. The integer +associated with an enum value is returned by its `ordinal` method: + +```scala +scala> val red = Color.Red +val red: Color = Red +scala> red.ordinal +val res0: Int = 0 +``` + +The companion object of an enum also defines three utility methods. +The `valueOf` method obtains an enum value +by its name. The `values` method returns all enum values +defined in an enumeration in an `Array`. The `fromOrdinal` +method obtains an enum value from its ordinal (`Int`) value. + +```scala +scala> Color.valueOf("Blue") +val res0: Color = Blue +scala> Color.values +val res1: Array[Color] = Array(Red, Green, Blue) +scala> Color.fromOrdinal(0) +val res2: Color = Red +``` + +### User-defined members of enums + +It is possible to add your own definitions to an enum. Example: + +```scala +enum Planet(mass: Double, radius: Double): + private final val G = 6.67300E-11 + def surfaceGravity = G * mass / (radius * radius) + def surfaceWeight(otherMass: Double) = otherMass * surfaceGravity + + case Mercury extends Planet(3.303e+23, 2.4397e6) + case Venus extends Planet(4.869e+24, 6.0518e6) + case Earth extends Planet(5.976e+24, 6.37814e6) + case Mars extends Planet(6.421e+23, 3.3972e6) + case Jupiter extends Planet(1.9e+27, 7.1492e7) + case Saturn extends Planet(5.688e+26, 6.0268e7) + case Uranus extends Planet(8.686e+25, 2.5559e7) + case Neptune extends Planet(1.024e+26, 2.4746e7) +end Planet +``` + +It is also possible to define an explicit companion object for an enum: + +```scala +object Planet: + def main(args: Array[String]) = + val earthWeight = args(0).toDouble + val mass = earthWeight / Earth.surfaceGravity + for p <- values do + println(s"Your weight on $p is ${p.surfaceWeight(mass)}") +end Planet +``` + +### Deprecation of Enum Cases + +As a library author, you may want to signal that an enum case is no longer intended for use. However you could still want to gracefully handle the removal of a case from your public API, such as special casing deprecated cases. + +To illustrate, say that the `Planet` enum originally had an additional case: + +```diff + enum Planet(mass: Double, radius: Double): + ... + case Neptune extends Planet(1.024e+26, 2.4746e7) ++ case Pluto extends Planet(1.309e+22, 1.1883e3) + end Planet +``` + +We now want to deprecate the `Pluto` case. First we add the `scala.deprecated` annotation to `Pluto`: + +```diff + enum Planet(mass: Double, radius: Double): + ... + case Neptune extends Planet(1.024e+26, 2.4746e7) +- case Pluto extends Planet(1.309e+22, 1.1883e3) ++ ++ @deprecated("refer to IAU definition of planet") ++ case Pluto extends Planet(1.309e+22, 1.1883e3) + end Planet +``` + +Outside the lexical scopes of `enum Planet` or `object Planet`, references to `Planet.Pluto` will produce a deprecation warning, but within those scopes we can still reference it to implement introspection over the deprecated cases: + +```scala +trait Deprecations[T <: reflect.Enum] { + extension (t: T) def isDeprecatedCase: Boolean +} + +object Planet { + given Deprecations[Planet] with { + extension (p: Planet) + def isDeprecatedCase = p == Pluto + } +} +``` + +We could imagine that a library may use [type class derivation](../contextual/derivation.md) to automatically provide an instance for `Deprecations`. + +### Compatibility with Java Enums + +If you want to use the Scala-defined enums as [Java enums](https://docs.oracle.com/javase/tutorial/java/javaOO/enum.html), you can do so by extending +the class `java.lang.Enum`, which is imported by default, as follows: + +```scala +enum Color extends Enum[Color] { case Red, Green, Blue } +``` + +The type parameter comes from the Java enum [definition](https://docs.oracle.com/javase/8/docs/api/index.html?java/lang/Enum.html) and should be the same as the type of the enum. +There is no need to provide constructor arguments (as defined in the Java API docs) to `java.lang.Enum` when extending it – the compiler will generate them automatically. + +After defining `Color` like that, you can use it like you would a Java enum: + +```scala +scala> Color.Red.compareTo(Color.Green) +val res15: Int = -1 +``` + +For a more in-depth example of using Scala 3 enums from Java, see [this test](https://github.com/lampepfl/dotty/tree/main/tests/run/enum-java). In the test, the enums are defined in the `MainScala.scala` file and used from a Java source, `Test.java`. + +### Implementation + +Enums are represented as `sealed` classes that extend the `scala.reflect.Enum` trait. +This trait defines a single public method, `ordinal`: + +```scala +package scala.reflect + +/** A base trait of all Scala enum definitions */ +transparent trait Enum extends Any, Product, Serializable: + + /** A number uniquely identifying a case of an enum */ + def ordinal: Int +``` + +Enum values with `extends` clauses get expanded to anonymous class instances. +For instance, the `Venus` value above would be defined like this: + +```scala +val Venus: Planet = new Planet(4.869E24, 6051800.0): + def ordinal: Int = 1 + override def productPrefix: String = "Venus" + override def toString: String = "Venus" +``` + +Enum values without `extends` clauses all share a single implementation +that can be instantiated using a private method that takes a tag and a name as arguments. +For instance, the first +definition of value `Color.Red` above would expand to: + +```scala +val Red: Color = $new(0, "Red") +``` + +### Reference + +For more information, see [Issue #1970](https://github.com/lampepfl/dotty/issues/1970) and +[PR #4003](https://github.com/lampepfl/dotty/pull/4003). diff --git a/docs/_docs/reference/experimental/canthrow.md b/docs/_docs/reference/experimental/canthrow.md new file mode 100644 index 000000000000..222bc63b6739 --- /dev/null +++ b/docs/_docs/reference/experimental/canthrow.md @@ -0,0 +1,281 @@ +--- +layout: doc-page +title: "CanThrow Capabilities" +movedTo: https://docs.scala-lang.org/scala3/reference/experimental/canthrow.html +--- + +This page describes experimental support for exception checking in Scala 3. It is enabled by the language import +```scala +import language.experimental.saferExceptions +``` +The reason for publishing this extension now is to get feedback on its usability. We are working on more advanced type systems that build on the general ideas put forward in the extension. Those type systems have application areas beyond checked exceptions. Exception checking is a useful starting point since exceptions are familiar to all Scala programmers and their current treatment leaves room for improvement. + +## Why Exceptions? + +Exceptions are an ideal mechanism for error handling in many situations. They serve the intended purpose of propagating error conditions with a minimum of boilerplate. They cause zero overhead for the "happy path", which means they are very efficient as long as errors arise infrequently. Exceptions are also debug friendly, since they produce stack traces that can be inspected at the handler site. So one never has to guess where an erroneous condition originated. + +## Why Not Exceptions? + +However, exceptions in current Scala and many other languages are not reflected in the type system. This means that an essential part of the contract of a function - i.e. what exceptions can it produce? - is not statically checked. Most people acknowledge that this is a problem, but that so far the alternative of checked exceptions was just too painful to be considered. A good example are Java checked exceptions, which do the right thing in principle, but are widely regarded as a mistake since they are so difficult to deal with. So far, none of the successor languages that are modeled after Java or that build on the JVM has copied this feature. See for example Anders Hejlsberg's [statement on why C# does not have checked exceptions](https://www.artima.com/articles/the-trouble-with-checked-exceptions). + +## The Problem With Java's Checked Exceptions + +The main problem with [Java's checked exception model](https://docs.oracle.com/javase/specs/jls/se8/html/jls-11.html#jls-11.2) is its inflexibility, which is due to lack of polymorphism. Consider for instance the `map` function which is declared on `List[A]` like this: +```scala + def map[B](f: A => B): List[B] +``` +In the Java model, function `f` is not allowed to throw a checked exception. So the following call would be invalid: +```scala + xs.map(x => if x < limit then x * x else throw LimitExceeded()) +``` +The only way around this would be to wrap the checked exception `LimitExceeded` in an unchecked [`java.lang.RuntimeException`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/lang/RuntimeException.html) that is caught at the callsite and unwrapped again. Something like this: +```scala + try + xs.map(x => if x < limit then x * x else throw Wrapper(LimitExceeded())) + catch case Wrapper(ex) => throw ex +``` +Ugh! No wonder checked exceptions in Java are not very popular. + +## Monadic Effects + +So the dilemma is that exceptions are easy to use only as long as we forget static type checking. This has caused many people working with Scala to abandon exceptions altogether and to use an error monad like [`Either`](https://scala-lang.org/api/3.x/scala/util/Either.html) instead. This can work in many situations but is not without its downsides either. It makes code a lot more complicated and harder to refactor. It means one is quickly confronted with the problem how to work with several monads. In general, dealing with one monad at a time in Scala is straightforward but dealing with several monads together is much less pleasant since monads don't compose. A great number of techniques have been proposed, implemented, and promoted to deal with this, from monad transformers, to free monads, to tagless final. But none of these techniques is universally liked; each introduces a complicated DSL that's hard to understand for non-experts, introduces runtime overheads, and makes debugging difficult. In the end, quite a few developers prefer to work instead with a single "super-monad" like [`ZIO`](https://zio.dev/version-1.x/datatypes/core/zio) that has error propagation built in alongside other aspects. This one-size fits all approach can work very nicely, even though (or is it because?) it represents an all-encompassing framework. + +However, a programming language is not a framework; it has to cater also for those applications that do not fit the framework's use cases. So there's still a strong motivation for getting exception checking right. + +## From Effects To Capabilities + +Why does `map` work so poorly with Java's checked exception model? It's because +`map`'s signature limits function arguments to not throw checked exceptions. We could try to come up with a more polymorphic formulation of `map`. For instance, it could look like this: +```scala + def map[B, E](f: A => B throws E): List[B] throws E +``` +This assumes a type `A throws E` to indicate computations of type `A` that can throw an exception of type `E`. But in practice the overhead of the additional type parameters makes this approach unappealing as well. Note in particular that we'd have to parameterize _every method_ that takes a function argument that way, so the added overhead of declaring all these exception types looks just like a sort of ceremony we would like to avoid. + +But there is a way to avoid the ceremony. Instead of concentrating on possible _effects_ such as "this code might throw an exception", concentrate on _capabilities_ such as "this code needs the capability to throw an exception". From a standpoint of expressiveness this is quite similar. But capabilities can be expressed as parameters whereas traditionally effects are expressed as some addition to result values. It turns out that this can make a big difference! + +## The `CanThrow` Capability + +In the _effects as capabilities_ model, an effect is expressed as an (implicit) parameter of a certain type. For exceptions we would expect parameters of type +[`CanThrow[E]`](https://scala-lang.org/api/3.x/scala/CanThrow.html) where `E` stands for the exception that can be thrown. Here is the definition of `CanThrow`: +```scala +erased class CanThrow[-E <: Exception] +``` +This shows another experimental Scala feature: [erased definitions](./erased-defs.md). Roughly speaking, values of an erased class do not generate runtime code; they are erased before code generation. This means that all `CanThrow` capabilities are compile-time only artifacts; they do not have a runtime footprint. + +Now, if the compiler sees a `throw Exc()` construct where `Exc` is a checked exception, it will check that there is a capability of type `CanThrow[Exc]` that can be summoned as a given. It's a compile-time error if that's not the case. + +How can the capability be produced? There are several possibilities: + +Most often, the capability is produced by having a using clause `(using CanThrow[Exc])` in some enclosing scope. This roughly corresponds to a [`throws`](https://docs.oracle.com/javase/specs/jls/se7/html/jls-8.html#jls-8.4.6) clause in Java. The analogy is even stronger since alongside [`CanThrow`](https://scala-lang.org/api/3.x/scala/CanThrow.html) there is also the following type alias defined in the [`scala`](https://scala-lang.org/api/3.x/scala.html) package: +```scala +infix type A = Int +``` +```scala +infix type $throws[R, +E <: Exception] = CanThrow[E] ?=> R +``` +That is, [`R $throws E`](https://scala-lang.org/api/3.x/scala/runtime.html#$throws-0) is a context function type that takes an implicit `CanThrow[E]` parameter and that returns a value of type `R`. What's more, the compiler will translate an infix types with `throws` as the operator to `$throws` applications according to the rules +``` + A throws E --> A $throws E + A throws E₁ | ... | Eᵢ --> A $throws E₁ ... $throws Eᵢ +``` +Therefore, a method written like this: +```scala +def m(x: T)(using CanThrow[E]): U +``` +can alternatively be expressed like this: +```scala +def m(x: T): U throws E +``` +Also the capability to throw multiple types of exceptions can be expressed in a few ways as shown in the examples below: +```scala +def m(x: T): U throws E1 | E2 +def m(x: T): U throws E1 throws E2 +def m(x: T)(using CanThrow[E1], CanThrow[E2]): U +def m(x: T)(using CanThrow[E1])(using CanThrow[E2]): U +def m(x: T)(using CanThrow[E1]): U throws E2 +``` + +**Note 1:** A signature like +```scala +def m(x: T)(using CanThrow[E1 | E2]): U +``` +would also allow throwing `E1` or `E2` inside the method's body but might cause problems when someone tried to call this method +from another method declaring its `CanThrow` capabilities like in the earlier examples. +This is because `CanThrow` has a contravariant type parameter so `CanThrow[E1 | E2]` is a subtype of both `CanThrow[E1]` and `CanThrow[E2]`. +Hence the presence of a given instance of `CanThrow[E1 | E2]` in scope satisfies the requirement for `CanThrow[E1]` and `CanThrow[E2]` +but given instances of `CanThrow[E1]` and `CanThrow[E2]` cannot be combined to provide and instance of `CanThrow[E1 | E2]`. + +**Note 2:** One should keep in mind that `|` binds its left and right arguments more tightly than `throws` so `A | B throws E1 | E2` means `(A | B) throws (Ex1 | Ex2)`, not `A | (B throws E1) | E2`. + +The `CanThrow`/`throws` combo essentially propagates the `CanThrow` requirement outwards. But where are these capabilities created in the first place? That's in the `try` expression. Given a `try` like this: + +```scala +try + body +catch + case ex1: Ex1 => handler1 + ... + case exN: ExN => handlerN +``` +the compiler generates an accumulated capability of type `CanThrow[Ex1 | ... | Ex2]` that is available as a given in the scope of `body`. It does this by augmenting the `try` roughly as follows: +```scala +try + erased given CanThrow[Ex1 | ... | ExN] = compiletime.erasedValue + body +catch ... +``` +Note that the right-hand side of the synthesized given is `???` (undefined). This is OK since +this given is erased; it will not be executed at runtime. + +**Note 1:** The [`saferExceptions`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$experimental$$saferExceptions$.html) feature is designed to work only with checked exceptions. An exception type is _checked_ if it is a subtype of +`Exception` but not of `RuntimeException`. The signature of `CanThrow` still admits `RuntimeException`s since `RuntimeException` is a proper subtype of its bound, `Exception`. But no capabilities will be generated for `RuntimeException`s. Furthermore, `throws` clauses +also may not refer to `RuntimeException`s. + +**Note 2:** To keep things simple, the compiler will currently only generate capabilities +for catch clauses of the form +```scala + case ex: Ex => +``` +where `ex` is an arbitrary variable name (`_` is also allowed), and `Ex` is an arbitrary +checked exception type. Constructor patterns such as `Ex(...)` or patterns with guards +are not allowed. The compiler will issue an error if one of these is used to catch +a checked exception and `saferExceptions` is enabled. + +## Example + +That's it. Let's see it in action in an example. First, add an import +```scala +import language.experimental.saferExceptions +``` +to enable exception checking. Now, define an exception `LimitExceeded` and +a function `f` like this: +```scala +val limit = 10e9 +class LimitExceeded extends Exception +def f(x: Double): Double = + if x < limit then x * x else throw LimitExceeded() +``` +You'll get this error message: +``` + if x < limit then x * x else throw LimitExceeded() + ^^^^^^^^^^^^^^^^^^^^^ +The capability to throw exception LimitExceeded is missing. +``` +The capability can be provided by one of the following: + + - Adding a using clause `(using CanThrow[LimitExceeded])` to the definition of the enclosing method + - Adding `throws LimitExceeded` clause after the result type of the enclosing method + - Wrapping this piece of code with a `try` block that catches `LimitExceeded` + +The following import might fix the problem: +```scala + import unsafeExceptions.canThrowAny +``` +As the error message implies, you have to declare that `f` needs the capability to throw a `LimitExceeded` exception. The most concise way to do so is to add a `throws` clause: +```scala +def f(x: Double): Double throws LimitExceeded = + if x < limit then x * x else throw LimitExceeded() +``` +Now put a call to `f` in a `try` that catches `LimitExceeded`: +```scala +@main def test(xs: Double*) = + try println(xs.map(f).sum) + catch case ex: LimitExceeded => println("too large") +``` +Run the program with some inputs: +``` +> scala test 1 2 3 +14.0 +> scala test +0.0 +> scala test 1 2 3 100000000000 +too large +``` +Everything typechecks and works as expected. But wait - we have called `map` without any ceremony! How did that work? Here's how the compiler expands the `test` function: +```scala +// compiler-generated code +@main def test(xs: Double*) = + try + erased given ctl: CanThrow[LimitExceeded] = compiletime.erasedValue + println(xs.map(x => f(x)(using ctl)).sum) + catch case ex: LimitExceeded => println("too large") +``` +The `CanThrow[LimitExceeded]` capability is passed in a synthesized `using` clause to `f`, since `f` requires it. Then the resulting closure is passed to `map`. The signature of `map` does not have to account for effects. It takes a closure as always, but that +closure may refer to capabilities in its free variables. This means that `map` is +already effect polymorphic even though we did not change its signature at all. +So the takeaway is that the effects as capabilities model naturally provides for effect polymorphism whereas this is something that other approaches struggle with. + +## Gradual Typing Via Imports + +Another advantage is that the model allows a gradual migration from current unchecked exceptions to safer exceptions. Imagine for a moment that [`experimental.saferExceptions`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$experimental$$saferExceptions$.html) is turned on everywhere. There would be lots of code that breaks since functions have not yet been properly annotated with `throws`. But it's easy to create an escape hatch that lets us ignore the breakages for a while: simply add the import +```scala +import scala.unsafeExceptions.canThrowAny +``` +This will provide the [`CanThrow`](https://scala-lang.org/api/3.x/scala/CanThrow.html) capability for any exception, and thereby allow +all throws and all other calls, no matter what the current state of `throws` declarations is. Here's the +definition of [`canThrowAny`](https://scala-lang.org/api/3.x/scala/unsafeExceptions$.html#canThrowAny-0): +```scala +package scala +object unsafeExceptions: + given canThrowAny: CanThrow[Exception] = ??? +``` +Of course, defining a global capability like this amounts to cheating. But the cheating is useful for gradual typing. The import could be used to migrate existing code, or to +enable more fluid explorations of code without regard for complete exception safety. At the end of these migrations or explorations the import should be removed. + +## Scope Of the Extension + +To summarize, the extension for safer exception checking consists of the following elements: + + - It adds to the standard library the class `scala.CanThrow`, the type `scala.$throws`, and the [`scala.unsafeExceptions`](https://scala-lang.org/api/3.x/scala/unsafeExceptions$.html) object, as they were described above. + - It adds some desugaring rules ro rewrite `throws` types to cascaded `$throws` types. + - It augments the type checking of `throw` by _demanding_ a `CanThrow` capability or the thrown exception. + - It augments the type checking of `try` by _providing_ `CanThrow` capabilities for every caught exception. + +That's all. It's quite remarkable that one can do exception checking in this way without any special additions to the type system. We just need regular givens and context functions. Any runtime overhead is eliminated using `erased`. + +## Caveats + +Our capability model allows to declare and check the thrown exceptions of first-order code. But as it stands, it does not give us enough mechanism to enforce the _absence_ of +capabilities for arguments to higher-order functions. Consider a variant `pureMap` +of `map` that should enforce that its argument does not throw exceptions or have any other effects (maybe because wants to reorder computations transparently). Right now +we cannot enforce that since the function argument to `pureMap` can capture arbitrary +capabilities in its free variables without them showing up in its type. One possible way to +address this would be to introduce a pure function type (maybe written `A -> B`). Pure functions are not allowed to close over capabilities. Then `pureMap` could be written +like this: +```scala + def pureMap(f: A -> B): List[B] +``` +Another area where the lack of purity requirements shows up is when capabilities escape from bounded scopes. Consider the following function +```scala +def escaped(xs: Double*): () => Int = + try () => xs.map(f).sum + catch case ex: LimitExceeded => -1 +``` +With the system presented here, this function typechecks, with expansion +```scala +// compiler-generated code +def escaped(xs: Double*): () => Int = + try + given ctl: CanThrow[LimitExceeded] = ??? + () => xs.map(x => f(x)(using ctl)).sum + catch case ex: LimitExceeded => -1 +``` +But if you try to call `escaped` like this +```scala +val g = escaped(1, 2, 1000000000) +g() +``` +the result will be a `LimitExceeded` exception thrown at the second line where `g` is called. What's missing is that `try` should enforce that the capabilities it generates do not escape as free variables in the result of its body. It makes sense to describe such scoped effects as _ephemeral capabilities_ - they have lifetimes that cannot be extended to delayed code in a lambda. + + +## Outlook + +We are working on a new class of type system that supports ephemeral capabilities by tracking the free variables of values. Once that research matures, it will hopefully be possible to augment the Scala language so that we can enforce the missing properties. + +And it would have many other applications besides: Exceptions are a special case of _algebraic effects_, which has been a very active research area over the last 20 years and is finding its way into programming languages (e.g. [Koka](https://koka-lang.github.io/koka/doc/book.html#why-handlers), [Eff](https://www.eff-lang.org/learn/), [Multicore OCaml](https://discuss.ocaml.org/t/multicore-ocaml-september-2021-effect-handlers-will-be-in-ocaml-5-0/8554), [Unison](https://www.unisonweb.org/docs/language-reference/#abilities-and-ability-handlers)). In fact, algebraic effects have been characterized as being equivalent to exceptions with an additional _resume_ operation. The techniques developed here for exceptions can probably be generalized to other classes of algebraic effects. + +But even without these additional mechanisms, exception checking is already useful as it is. It gives a clear path forward to make code that uses exceptions safer, better documented, and easier to refactor. The only loophole arises for scoped capabilities - here we have to verify manually that these capabilities do not escape. Specifically, a `try` always has to be placed in the same computation stage as the throws that it enables. + +Put another way: If the status quo is 0% static checking since 100% is too painful, then an alternative that gives you 95% static checking with great ergonomics looks like a win. And we might still get to 100% in the future. + +For more info, see also our [paper at the ACM Scala Symposium 2021](https://infoscience.epfl.ch/record/290885). diff --git a/docs/_docs/reference/experimental/cc.md b/docs/_docs/reference/experimental/cc.md new file mode 100644 index 000000000000..592d410a4502 --- /dev/null +++ b/docs/_docs/reference/experimental/cc.md @@ -0,0 +1,739 @@ +--- +layout: doc-page +title: "Capture Checking" +--- + +Capture checking is a research project that modifies the Scala type system to track references to capabilities in values. It is currently +implemented in an experimental branch [cc-experiment](https://github.com/lampepfl/dotty/tree/cc-experiment) in the dotty +repo and can be enabled on this branch with a `-Ycc` compiler option. + +To get an idea what capture checking can do, let's start with a small example: +```scala +def usingLogFile[T](op: FileOutputStream => T): T = + val logFile = FileOutputStream("log") + val result = op(logFile) + logFile.close() + result +``` +The `usingLogFile` method invokes a given operation with a fresh log file as parameter. Once the operation has ended, the log file is closed and the +operation's result is returned. This is a typical _try-with-resources_ pattern, similar to many other such patterns which are often supported by special language constructs in other languages. + +The problem is that `usingLogFile`'s implementation is not entirely safe. One can +undermine it by passing an operation that performs the logging at some later point +after it has terminated. For instance: +```scala +val later = usingLogFile { file => () => file.write(0) } +later() // crash +``` +When `later` is executed it tries to write to a file that is already closed, which +results in an uncaught `IOException`. + +Capture checking gives us the mechanism to prevent such errors _statically_. To +prevent unsafe usages of `usingLogFile`, we can declare it like this: +```scala +def usingLogFile[T](op: ({*} FileOutputStream) => T): T = + // same body as before +``` +The only thing that's changed is that the `FileOutputStream` parameter of `op` is now +tagged with `{*}`. We'll see that this turns the parameter into a _capability_ whose lifetime is tracked. + +If we now try to define the problematic value `later`, we get a static error: +``` + | val later = usingLogFile { f => () => f.write(0) } + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + |The expression's type {*} () -> Unit is not allowed to capture the root capability `*`. + |This usually means that a capability persists longer than its allowed lifetime. +``` +In this case, it was easy to see that the `logFile` capability escapes in the closure passed to `usingLogFile`. But capture checking also works for more complex cases. +For instance, capture checking is able to distinguish between the following safe code: +```scala +val xs = usingLogFile { f => + List(1, 2, 3).map { x => f.write(x); x * x } +} +``` +and the following unsafe one: +```scala +val xs = usingLogFile { f => + LazyList(1, 2, 3).map { x => f.write(x); x * x } +} +``` +An error would be issued in the second case, but not the first one (this assumes a capture-aware +formulation of `LazyList` which we will present later in this page). + +It turns out that capture checking has very broad applications. Besides the various +try-with-resources patterns, it can also be a key part to the solutions of many other long standing problems in programming languages. Among them: + + - How to have a simple and flexible system for checked exceptions. We show later + how capture checking enables a clean and fully safe system for checked exceptions in Scala. + - How to address the problem of effect polymorphism in general. + - How to solve the "what color is your function?" problem of mixing synchronous + and asynchronous computations. + - How to do region-based allocation, safely, + - How to reason about capabilities associated with memory locations. + +The following sections explain in detail how capture checking works in Scala 3. + + +## Overview + +The capture checker extension introduces a new kind of types and it enforces some rules for working with these types. + +Capture checking is enabled by the compiler option `-Ycc`. If the option is not given, the new +type forms can still be written but they are not checked for consistency, because they are +treated simply as certain uninterpreted annotated types. + +## Capabilities and Capturing Types + +Capture checking is done in terms of _capturing types_ of the form +`{c₁, ..., cᵢ} T`. Here `T` is a type, and `{c₁, ..., cᵢ}` is a _capture set_ consisting of references to capabilities `c₁, ..., cᵢ`. + +A _capability_ is syntactically a method- or class-parameter, a local variable, or the `this` of an enclosing class. The type of a capability +must be a capturing type with a non-empty capture set. We also say that +variables that are capabilities are _tracked_. + +In a sense, every +capability gets its authority from some other, more sweeping capability which it captures. The most sweeping capability, from which ultimately all others are derived is written `*`. We call it the _universal capability_. + +Here is an example: +```scala +class FileSystem + +class Logger(fs: {*} FileSystem): + def log(s: String): Unit = ... // Write to a log file, using `fs` + +def test(fs: {*} FileSystem) = + val l: {fs} Logger = Logger(fs) + l.log("hello world!") + val xs: {l} LazyList[Int] = + LazyList.from(1) + .map { i => + l.log(s"computing elem # $i") + i * i + } + xs +``` +Here, the `test` method takes a `FileSystem` as a parameter. `fs` is a capability since its type has a non-empty capture set. The capability is passed to the `Logger` constructor +and retained as a field in class `Logger`. Hence, the local variable `l` has type +`{fs} Logger`: it is a `Logger` which retains the `fs` capability. + +The second variable defined in `test` is `xs`, a lazy list that is obtained from +`LazyList.from(1)` by logging and mapping consecutive numbers. Since the list is lazy, +it needs to retain the reference to the logger `l` for its computations. Hence, the +type of the list is `{l} LazyList[Int]`. On the other hand, since `xs` only logs but does +not do other file operations, it retains the `fs` capability only indirectly. That's why +`fs` does not show up in the capture set of `xs`. + +Capturing types come with a subtype relation where types with "smaller" capture sets are subtypes of types with larger sets (the _subcapturing_ relation is defined in more detail below). If a type `T` does not have a capture set, it is called _pure_, and is a subtype of +any capturing type that adds a capture set to `T`. + +## Function Types + +The usual function type `A => B` now stands for a function that can capture arbitrary capabilities. We call such functions +_impure_. By contrast, the new single arrow function type `A -> B` stands for a function that cannot capture any capabilities, or otherwise said, is _pure_. One can add a capture set in front of an otherwise pure function. +For instance, `{c, d} A -> B` would be a function that can capture capabilities `c` and `d`, but no others. + +The impure function type `A => B` is treated as an alias for `{*} A -> B`. That is, impure functions are functions that can capture anything. + +Function types and captures both associate to the right, so +```scala +{c} A -> {d} B -> C +``` +is the same as +```scala +{c} (A -> {d} (B -> C)) +``` +Contrast with +```scala +({c} A) -> ({d} B) -> C +``` +which is a curried pure function over argument types that can capture `c` and `d`, respectively. + +Analogous conventions apply to context function types. `A ?=> B` is an impure context function, with `A ?-> B` as its pure complement. + +**Note 1:** The identifiers `->` and `?->` are now treated as soft keywords when used as infix type operators. They are +still available as regular identifiers for terms. For instance, the mapping syntax `Map("x" -> 1, "y" -> 2)` is still supported since it only applies to terms. + +**Note 2:** The distinctions between pure vs impure function types do not apply to methods. In fact, since methods are not values they never capture anything directly. References to +capabilities in a method are instead counted in the capture set of the enclosing object. + +## By-Name Parameter Types + +A convention analogous to function types also extends to by-name parameters. In +```scala +def f(x: => Int): Int +``` +the actual argument can refer to arbitrary capabilities. So the following would be OK: +```scala +f(if p(y) then throw Ex() else 1) +``` +On the other hand, if `f` was defined like this +```scala +def f(x: -> Int): Int +``` +the actual argument to `f` could not refer to any capabilities, so the call above would be rejected. +One can also allow specific capabilities like this: +```scala +def f(x: {c}-> Int): Int +``` +Here, the actual argument to `f` is allowed to use the `c` capability but no others. + +**Note**: It is strongly recommended to write the capability set and the arrow `->` without intervening spaces, +as otherwise the notation would look confusingly like a function type. + +## Subtyping and Subcapturing + +Capturing influences subtyping. As usual we write `T₁ <: T₂` to express that the type +`T₁` is a subtype of the type `T₂`, or equivalently, that `T₁` conforms to `T₂`. An +analogous _subcapturing_ relation applies to capture sets. If `C₁` and `C₂` are capture sets, we write `C₁ <: C₂` to express that `C₁` _is covered by_ `C₂`, or, swapping the operands, that `C₂` _covers_ `C₁`. + +Subtyping extends as follows to capturing types: + + - Pure types are subtypes of capturing types. That is, `T <: C T`, for any type `T`, capturing set `C`. + - For capturing types, smaller capturing sets produce subtypes: `C₁ T₁ <: C₂ T₂` if + `C₁ <: C₂` and `T₁ <: T₂`. + +A subcapturing relation `C₁ <: C₂` holds if `C₂` _accounts for_ every element `c` in `C₁`. This means one of the following three conditions must be true: + + - `c ∈ C₂`, + - `c` refers to a parameter of some class `Cls` and `C₂` contains `Cls.this`, + - `c`'s type has capturing set `C` and `C₂` accounts for every element of `C` (that is, `C <: C₂`). + + +**Example 1.** Given +```scala +fs: {*} FileSystem +ct: {*} CanThrow[Exception] +l : {fs} Logger +``` +we have +``` +{l} <: {fs} <: {*} +{fs} <: {fs, ct} <: {*} +{ct} <: {fs, ct} <: {*} +``` +The set consisting of the root capability `{*}` covers every other capture set. This is +a consequence of the fact that, ultimately, every capability is created from `*`. + +**Example 2.** Consider again the FileSystem/Logger example from before. `LazyList[Int]` is a proper subtype of `{l} LazyList[Int]`. So if the `test` method in that example +was declared with a result type `LazyList[Int]`, we'd get a type error. Here is the error message: +``` +11 |def test(using fs: {*} FileSystem): LazyList[Int] = { + | ^ + | Found: {fs} LazyList[Int] + | Required: LazyList[Int] +``` +Why does it say `{fs} LazyList[Int]` and not `{l} LazyList[Int]`, which is, after all, the type of the returned value `xs`? The reason is that `l` is a local variable in the body of `test`, so it cannot be referred to in a type outside that body. What happens instead is that the type is _widened_ to the smallest supertype that does not mention `l`. Since `l` has capture set `fs`, we have that `{fs}` covers `{l}`, and `{fs}` is acceptable in a result type of `test`, so `{fs}` is the result of that widening. +This widening is called _avoidance_; it is not specific to capture checking but applies to all variable references in Scala types. + +## Capability Classes + +Classes like `CanThrow` or `FileSystem` have the property that their values are always intended to be capabilities. We can make this intention explicit and save boilerplate by declaring these classes with a `@capability` annotation. + +The capture set of a capability class type is always `{*}`. This means we could equivalently express the `FileSystem` and `Logger` classes as follows: +```scala +import annotation.capability + +@capability class FileSystem + +class Logger(using FileSystem): + def log(s: String): Unit = ??? + +def test(using fs: FileSystem) = + val l: {fs} Logger = Logger() + ... +``` +In this version, `FileSystem` is a capability class, which means that the `{*}` capture set is implied on the parameters of `Logger` and `test`. Writing the capture set explicitly produces a warning: +```scala +class Logger(using {*} FileSystem): + ^^^^^^^^^^^^^^ + redundant capture: FileSystem already accounts for * +``` +Another, unrelated change in the version of the last example here is that the `FileSystem` capability is now passed as an implicit parameter. It is quite natural to model capabilities with implicit parameters since it greatly reduces the wiring overhead once multiple capabilities are in play. + +## Capture Checking of Closures + +If a closure refers to capabilities in its body, it captures these capabilities in its type. For instance, consider: +```scala +def test(fs: FileSystem): {fs} String -> Unit = + (x: String) => Logger(fs).log(x) +``` +Here, the body of `test` is a lambda that refers to the capability `fs`, which means that `fs` is retained in the lambda. +Consequently, the type of the lambda is `{fs} String -> Unit`. + +**Note:** Function values are always written with `=>` (or `?=>` for context functions). There is no syntactic +distinction for pure _vs_ impure function values. The distinction is only made in their types. + +A closure also captures all capabilities that are captured by the functions +it calls. For instance, in +```scala +def test(fs: FileSystem) = + def f() = g() + def g() = (x: String) => Logger(fs).log(x) + f +``` +the result of `test` has type `{fs} String -> Unit` even though function `f` itself does not refer to `fs`. + +## Capture Checking of Classes + +The principles for capture checking closures also apply to classes. For instance, consider: +```scala +class Logger(using fs: FileSystem): + def log(s: String): Unit = ... summon[FileSystem] ... + +def test(xfs: FileSystem): {xfs} Logger = + Logger(xfs) +``` +Here, class `Logger` retains the capability `fs` as a (private) field. Hence, the result +of `test` is of type `{xfs} Logger` + +Sometimes, a tracked capability is meant to be used only in the constructor of a class, but +is not intended to be retained as a field. This fact can be communicated to the capture +checker by declaring the parameter as `@constructorOnly`. Example: +```scala +import annotation.constructorOnly + +class NullLogger(using @constructorOnly fs: FileSystem): + ... +def test2(using fs: FileSystem): NullLogger = NullLogger() // OK +``` + +The captured references of a class include _local capabilities_ and _argument capabilities_. Local capabilities are capabilities defined outside the class and referenced from its body. Argument capabilities are passed as parameters to the primary constructor of the class. Local capabilities are inherited: +the local capabilities of a superclass are also local capabilities of its subclasses. Example: + +```scala +@capability class Cap + +def test(a: Cap, b: Cap, c: Cap) = + class Super(y: Cap): + def f = a + class Sub(x: Cap) extends Super(x) + def g = b + Sub(c) +``` +Here class `Super` has local capability `a`, which gets inherited by class +`Sub` and is combined with `Sub`'s own local capability `b`. Class `Sub` also has an argument capability corresponding to its parameter `x`. This capability gets instantiated to `c` in the final constructor call `Sub(c)`. Hence, +the capture set of that call is `{a, b, c}`. + +The capture set of the type of `this` of a class is inferred by the capture checker, unless the type is explicitly declared with a self type annotation like this one: +```scala +class C: + self: {a, b} D => ... +``` +The inference observes the following constraints: + + - The type of `this` of a class `C` includes all captured references of `C`. + - The type of `this` of a class `C` is a subtype of the type of `this` + of each parent class of `C`. + - The type of `this` must observe all constraints where `this` is used. + +For instance, in +```scala +@capability class Cap +def test(c: Cap) = + class A: + val x: A = this + def f = println(c) // error +``` +we know that the type of `this` must be pure, since `this` is the right hand side of a `val` with type `A`. However, in the last line we find that the capture set of the class, and with it the capture set of `this`, would include `c`. This leads to a contradiction, and hence to a checking error: +``` +16 | def f = println(c) // error + | ^ + |(c : Cap) cannot be referenced here; it is not included in the allowed capture set {} +``` + +## Capture Tunnelling + +Consider the following simple definition of a `Pair` class: +```scala +class Pair[+A, +B](x: A, y: B): + def fst: A = x + def snd: B = y +``` +What happens if `Pair` is instantiated like this (assuming `ct` and `fs` are two capabilities in scope)? +```scala +def x: {ct} Int -> String +def y: {fs} Logger +def p = Pair(x, y) +``` +The last line will be typed as follows: +```scala +def p: Pair[{ct} Int -> String, {fs} Logger] = Pair(x, y) +``` +This might seem surprising. The `Pair(x, y)` value does capture capabilities `ct` and `fs`. Why don't they show up in its type at the outside? + +The answer is capture tunnelling. Once a type variable is instantiated to a capturing type, the +capture is not propagated beyond this point. On the other hand, if the type variable is instantiated +again on access, the capture information "pops out" again. For instance, even though `p` is technically pure because its capture set is empty, writing `p.fst` would record a reference to the captured capability `ct`. So if this access was put in a closure, the capability would again form part of the outer capture set. E.g. +```scala +() => p.fst : {ct} () -> {ct} Int -> String +``` +In other words, references to capabilities "tunnel through" in generic instantiations from creation to access; they do not affect the capture set of the enclosing generic data constructor applications. +This principle may seem surprising at first, but it is the key to make capture checking concise and practical. + +## Escape Checking + +The universal capability `*` should be conceptually available only as a parameter to the main program. Indeed, if it was available everywhere, capability checking would be undermined since one could mint new capabilities +at will. In line with this reasoning, some capture sets are restricted so that +they are not allowed to contain the universal capability. + +Specifically, if a capturing type is an instance of a type variable, that capturing type +is not allowed to carry the universal capability `{*}`. There's a connection to tunnelling here. +The capture set of a type has to be present in the environment when a type is instantiated from +a type variable. But `*` is not itself available as a global entity in the environment. Hence, +an error should result. + +We can now reconstruct how this principle produced the error in the introductory example, where +`usingLogFile` was declared like this: +```scala +def usingLogFile[T](op: ({*} FileOutputStream) => T): T = ... +``` +The error message was: +``` + | val later = usingLogFile { f => () => f.write(0) } + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + |The expression's type {*} () -> Unit is not allowed to capture the root capability `*`. + |This usually means that a capability persists longer than its allowed lifetime. +``` +This error message was produced by the following logic: + + - The `f` parameter has type `{*} FileOutputStream`, which makes it a capability. + - Therefore, the type of the expression `() => f.write(0)` is `{f} () -> Unit`. + - This makes the whole type of the closure passed to `usingLogFile` the dependent function type + `(f: {*} FileOutputStream) -> {f} () -> Unit`. + - The expected type of the closure is a simple, parametric, impure function type `({*} FileOutputStream) => T`, + for some instantiation of the type variable `T`. + - The smallest supertype of the closure's dependent function type that is a parametric function type is + `({*} FileOutputStream) => {*} () -> Unit` + - Hence, the type variable `T` is instantiated to `* () -> Unit`, which causes the error. + +An analogous restriction applies to the type of a mutable variable. +Another way one could try to undermine capture checking would be to +assign a closure with a local capability to a global variable. Maybe +like this: +```scala +var loophole: {*} () -> Unit = () => () +usingLogFile { f => + loophole = () => f.write(0) +} +loophole() +``` +But this will not compile either, since mutable variables cannot have universal capture sets. + +One also needs to prevent returning or assigning a closure with a local capability in an argument of a parametric type. For instance, here is a +slightly more refined attack: +```scala +class Cell[+A](x: A) +val sneaky = usingLogFile { f => Cell(() => f.write(0)) } +sneaky.x() +``` +At the point where the `Cell` is created, the capture set of the argument is `f`, which +is OK. But at the point of use, it is `*` (because `f` is no longer in scope), which causes again an error: +``` + | sneaky.x() + | ^^^^^^^^ + |The expression's type {*} () -> Unit is not allowed to capture the root capability `*`. + |This usually means that a capability persists longer than its allowed lifetime. +``` + +Looking at object graphs, we observe a monotonicity property: The capture set of an object `x` covers the capture sets of all objects reachable through `x`. This property is reflected in the type system by the following _monotonicity rule_: + + - In a class `C` with a field `f`, the capture set `{this}` covers the capture set `{this.f}` as well as the capture set of any application of `this.f` to pure arguments. + +## Checked Exceptions + +Scala enables checked exceptions through a language import. Here is an example, +taken from the [safer exceptions page](./canthrow.md), and also described in a +[paper](https://infoscience.epfl.ch/record/290885) presented at the + 2021 Scala Symposium. +```scala +import language.experimental.saferExceptions + +class LimitExceeded extends Exception + +val limit = 10e+10 +def f(x: Double): Double throws LimitExceeded = + if x < limit then x * x else throw LimitExceeded() +``` +The new `throws` clause expands into an implicit parameter that provides +a `CanThrow` capability. Hence, function `f` could equivalently be written +like this: +```scala +def f(x: Double)(using CanThrow[LimitExceeded]): Double = ... +``` +If the implicit parameter is missing, an error is reported. For instance, the function definition +```scala +def g(x: Double): Double = + if x < limit then x * x else throw LimitExceeded() +``` +is rejected with this error message: +``` + | if x < limit then x * x else throw LimitExceeded() + | ^^^^^^^^^^^^^^^^^^^^^ + |The capability to throw exception LimitExceeded is missing. + |The capability can be provided by one of the following: + | - Adding a using clause `(using CanThrow[LimitExceeded])` to the definition of the enclosing method + | - Adding `throws LimitExceeded` clause after the result type of the enclosing method + | - Wrapping this piece of code with a `try` block that catches LimitExceeded +``` +`CanThrow` capabilities are required by `throw` expressions and are created +by `try` expressions. For instance, the expression +```scala +try xs.map(f).sum +catch case ex: LimitExceeded => -1 +``` +would be expanded by the compiler to something like the following: +```scala +try + erased given ctl: CanThrow[LimitExceeded] = compiletime.erasedValue + xs.map(f).sum +catch case ex: LimitExceeded => -1 +``` +(The `ctl` capability is only used for type checking but need not show up in the generated code, so it can be declared as +erased.) + +As with other capability based schemes, one needs to guard against capabilities +that are captured in results. For instance, here is a problematic use case: +```scala +def escaped(xs: Double*): (() => Double) throws LimitExceeded = + try () => xs.map(f).sum + catch case ex: LimitExceeded => () => -1 +val crasher = escaped(1, 2, 10e+11) +crasher() +``` +This code needs to be rejected since otherwise the call to `crasher()` would cause +an unhandled `LimitExceeded` exception to be thrown. + +Under `-Ycc`, the code is indeed rejected +``` +14 | try () => xs.map(f).sum + | ^ + |The expression's type {*} () -> Double is not allowed to capture the root capability `*`. + |This usually means that a capability persists longer than its allowed lifetime. +15 | catch case ex: LimitExceeded => () => -1 +``` +To integrate exception and capture checking, only two changes are needed: + + - `CanThrow` is declared as a `@capability` class, so all references to `CanThrow` instances are tracked. + - Escape checking is extended to `try` expressions. The result type of a `try` is not allowed to + capture the universal capability. + +## A Larger Example + +As a larger example, we present an implementation of lazy lists and some use cases. For simplicity, +our lists are lazy only in their tail part. This corresponds to what the Scala-2 type `Stream` did, whereas Scala 3's `LazyList` type computes strictly less since it is also lazy in the first argument. + +Here is the base trait `LzyList` for our version of lazy lists: +```scala +trait LzyList[+A]: + def isEmpty: Boolean + def head: A + def tail: {this} LzyList[A] +``` +Note that `tail` carries a capture annotation. It says that the tail of a lazy list can +potentially capture the same references as the lazy list as a whole. + +The empty case of a `LzyList` is written as usual: +```scala +object LzyNil extends LzyList[Nothing]: + def isEmpty = true + def head = ??? + def tail = ??? +``` +Here is a formulation of the class for lazy cons nodes: +```scala +import scala.compiletime.uninitialized + +final class LzyCons[+A](hd: A, tl: () => {*} LzyList[A]) extends LzyList[A]: + private var forced = false + private var cache: {this} LzyList[A] = uninitialized + private def force = + if !forced then { cache = tl(); forced = true } + cache + + def isEmpty = false + def head = hd + def tail: {this} LzyList[A] = force +end LzyCons +``` +The `LzyCons` class takes two parameters: A head `hd` and a tail `tl`, which is a function +returning a `LzyList`. Both the function and its result can capture arbitrary capabilities. +The result of applying the function is memoized after the first dereference of `tail` in +the private mutable field `cache`. Note that the typing of the assignment `cache = tl()` relies on the monotonicity rule for `{this}` capture sets. + +Here is an extension method to define an infix cons operator `#:` for lazy lists. It is analogous +to `::` but instead of a strict list it produces a lazy list without evaluating its right operand. +```scala +extension [A](x: A) + def #:(xs1: => {*} LzyList[A]): {xs1} LzyList[A] = + LzyCons(x, () => xs1) +``` +Note that `#:` takes an impure call-by-name parameter `xs1` as its right argument. The result +of `#:` is a lazy list that captures that argument. + +As an example usage of `#:`, here is a method `tabulate` that creates a lazy list +of given length with a generator function `gen`. The generator function is allowed +to have side effects. +```scala +def tabulate[A](n: Int)(gen: Int => A) = + def recur(i: Int): {gen} LzyList[A] = + if i == n then LzyNil + else gen(i) #: recur(i + 1) + recur(0) +``` +Here is a use of `tabulate`: +```scala +class LimitExceeded extends Exception +def squares(n: Int)(using ct: CanThrow[LimitExceeded]) = + tabulate(10) { i => + if i > 9 then throw LimitExceeded() + i * i + } +``` +The inferred result type of `squares` is `{ct} LzyList[Int]`, i.e it is a lazy list of +`Int`s that can throw the `LimitExceeded` exception when it is elaborated by calling `tail` +one or more times. + +Here are some further extension methods for mapping, filtering, and concatenating lazy lists: +```scala +extension [A](xs: {*} LzyList[A]) + def map[B](f: A => B): {xs, f} LzyList[B] = + if xs.isEmpty then LzyNil + else f(xs.head) #: xs.tail.map(f) + + def filter(p: A => Boolean): {xs, p} LzyList[A] = + if xs.isEmpty then LzyNil + else if p(xs.head) then xs.head #: xs.tail.filter(p) + else xs.tail.filter(p) + + def concat(ys: {*} LzyList[A]): {xs, ys} LzyList[A] = + if xs.isEmpty then ys + else xs.head #: xs.tail.concat(ys) + + def drop(n: Int): {xs} LzyList[A] = + if n == 0 then xs else xs.tail.drop(n - 1) +``` +Their capture annotations are all as one would expect: + + - Mapping a lazy list produces a lazy list that captures the original list as well + as the (possibly impure) mapping function. + - Filtering a lazy list produces a lazy list that captures the original list as well + as the (possibly impure) filtering predicate. + - Concatenating two lazy lists produces a lazy list that captures both arguments. + - Dropping elements from a lazy list gives a safe approximation where the original list is captured in the result. In fact, it's only some suffix of the list that is retained at run time, but our modelling identifies lazy lists and their suffixes, so this additional knowledge would not be useful. + +Of course the function passed to `map` or `filter` could also be pure. After all, `A -> B` is a subtype of `{*} A -> B` which is the same as `A => B`. In that case, the pure function +argument will _not_ show up in the result type of `map` or `filter`. For instance: +```scala +val xs = squares(10) +val ys: {xs} LzyList[Int] = xs.map(_ + 1) +``` +The type of the mapped list `ys` has only `xs` in its capture set. The actual function +argument does not show up since it is pure. Likewise, if the lazy list +`xs` was pure, it would not show up in any of the method results. +This demonstrates that capability-based +effect systems with capture checking are naturally _effect polymorphic_. + +This concludes our example. It's worth mentioning that an equivalent program defining and using standard, strict lists would require no capture annotations whatsoever. It would compile exactly as written now in standard Scala 3, yet one gets the capture checking for free. Essentially, `=>` already means "can capture anything" and since in a strict list side effecting operations are not retained in the result, there are no additional captures to record. A strict list could of course capture side-effecting closures in its elements but then tunnelling applies, since +these elements are represented by a type variable. This means we don't need to annotate anything there either. + +Another possibility would be a variant of lazy lists that requires all functions passed to `map`, `filter` and other operations like it to be pure. E.g. `map` on such a list would be defined like this: +```scala +extension [A](xs: LzyList[A]) + def map[B](f: A -> B): LzyList[B] = ... +``` +That variant would not require any capture annotations either. + +To summarize, there are two "sweet spots" of data structure design: strict lists in +side-effecting or resource-aware code and lazy lists in purely functional code. +Both are already correctly capture-typed without requiring any explicit annotations. Capture annotations only come into play where the semantics gets more complicated because we deal with delayed effects such as in impure lazy lists or side-effecting iterators over strict lists. This property is probably one of the greatest plus points of our approach to capture checking compared to previous techniques which tend to be more noisy. + +## Function Type Shorthands + +TBD + +## Compilation Options + +The following options are relevant for capture checking. + + - **-Ycc** Enables capture checking. + - **-Xprint:cc** Prints the program with capturing types as inferred by capture checking. + - **-Ycc-debug** Gives more detailed, implementation-oriented information about capture checking, as described in the next section. + + The implementation supporting capture checking with these options is currently in branch `cc-experiment` on dotty.epfl.ch. + +## Capture Checking Internals + +The capture checker is architected as a propagation constraint solver, which runs as a separate phase after type-checking and some initial transformations. + +Constraint variables stand for unknown capture sets. A constraint variable is introduced + + - for every part of a previously inferred type, + - for the accessed references of every method, class, anonymous function, or by-name argument, + - for the parameters passed in a class constructor call. + +Capture sets in explicitly written types are treated as constants (before capture checking, such sets are simply ignored). + +The capture checker essentially rechecks the program with the usual typing rules. Every time a subtype requirement between capturing types is checked, this translates to a subcapturing test on capture sets. If the two sets are constant, this is simply a yes/no question, where a no will produce an error message. + +If the lower set `C₁` of a comparison `C₁ <: C₂` is a variable, the set `C₂` is recorded +as a _superset_ of `C₁`. If the upper set `C₂` is a variable, the elements of `C₁` are _propagated_ to `C₂`. Propagation of an element `x` to a set `C` means that `x` is included as an element in `C`, and it is also propagated +to all known supersets of `C`. If such a superset is a constant, it is checked that `x` is included in it. If that's not the case, the original comparison `C₁ <: C₂` has no solution and an error is reported. + +The type checker also performs various maps on types, for instance when substituting actual argument types for formal parameter types in dependent functions, or mapping +member types with "as-seen-from" in a selection. Maps keep track of the variance +of positions in a type. The variance is initially covariant, it flips to +contravariant in function parameter positions, and can be either covariant, +contravariant, or nonvariant in type arguments, depending on the variance of +the type parameter. + +When capture checking, the same maps are also performed on capture sets. If a capture set is a constant, its elements (which are capabilities) are mapped as regular types. If the result of such a map is not a capability, the result is approximated according to the variance of the type. A covariant approximation replaces a type by its capture set. +A contravariant approximation replaces it with the empty capture set. A nonvariant +approximation replaces the enclosing capturing type with a range of possible types +that gets propagated and resolved further out. + +When a mapping `m` is performed on a capture set variable `C`, a new variable `Cm` is created that contains the mapped elements and that is linked with `C`. If `C` subsequently acquires further elements through propagation, these are also propagated to `Cm` after being transformed by the `m` mapping. `Cm` also gets the same supersets as `C`, mapped again using `m`. + +One interesting aspect of the capture checker concerns the implementation of capture tunnelling. The [foundational theory](https://infoscience.epfl.ch/record/290885) on which capture checking is based makes tunnelling explicit through so-called _box_ and +_unbox_ operations. Boxing hides a capture set and unboxing recovers it. The capture checker inserts virtual box and unbox operations based on actual and expected types similar to the way the type checker inserts implicit conversions. When capture set variables are first introduced, any capture set in a capturing type that is an instance of a type parameter instance is marked as "boxed". A boxing operation is +inserted if the expected type of an expression is a capturing type with +a boxed capture set variable. The effect of the insertion is that any references +to capabilities in the boxed expression are forgotten, which means that capture +propagation is stopped. Dually, if the actual type of an expression has +a boxed variable as capture set, an unbox operation is inserted, which adds all +elements of the capture set to the environment. + +Boxing and unboxing has no runtime effect, so the insertion of these operations is only simulated; the only visible effect is the retraction and insertion +of variables in the capture sets representing the environment of the currently checked expression. + +The `-Ycc-debug` option provides some insight into the workings of the capture checker. +When it is turned on, boxed sets are marked explicitly and capture set variables are printed with an ID and some information about their provenance. For instance, the string `{f, xs}33M5V` indicates a capture set +variable that is known to hold elements `f` and `xs`. The variable's ID is `33`. The `M` +indicates that the variable was created through a mapping from a variable with ID `5`. The latter is a regular variable, as indicated + by `V`. + +Generally, the string following the capture set consists of alternating numbers and letters where each number gives a variable ID and each letter gives the provenance of the variable. Possible letters are + + - `V` : a regular variable, + - `M` : a variable resulting from a _mapping_ of the variable indicated by the string to the right, + - `B` : similar to `M` but where the mapping is a _bijection_, + - `F` : a variable resulting from _filtering_ the elements of the variable indicated by the string to the right, + - `I` : a variable resulting from an _intersection_ of two capture sets, + - `D` : a variable resulting from the set _difference_ of two capture sets. + +At the end of a compilation run, `-Ycc-debug` will print all variable dependencies of variables referred to in previous output. Here is an example: +``` +Capture set dependencies: + {}2V :: + {}3V :: + {}4V :: + {f, xs}5V :: {f, xs}31M5V, {f, xs}32M5V + {f, xs}31M5V :: {xs, f} + {f, xs}32M5V :: +``` +This section lists all variables that appeared in previous diagnostics and their dependencies, recursively. For instance, we learn that + + - variables 2, 3, 4 are empty and have no dependencies, + - variable `5` has two dependencies: variables `31` and `32` which both result from mapping variable `5`, + - variable `31` has a constant fixed superset `{xs, f}` + - variable `32` has no dependencies. + diff --git a/docs/docs/reference/experimental/erased-defs-spec.md b/docs/_docs/reference/experimental/erased-defs-spec.md similarity index 93% rename from docs/docs/reference/experimental/erased-defs-spec.md rename to docs/_docs/reference/experimental/erased-defs-spec.md index ed8490f7e30e..7ca815878b55 100644 --- a/docs/docs/reference/experimental/erased-defs-spec.md +++ b/docs/_docs/reference/experimental/erased-defs-spec.md @@ -1,6 +1,7 @@ --- layout: doc-page -title: "Erased Definitions: More Details" +title: "Erased Definitions - More Details" +movedTo: https://docs.scala-lang.org/scala3/reference/experimental/erased-defs-spec.html --- TODO: complete @@ -61,4 +62,3 @@ TODO: complete 7. Overriding * Member definitions overriding each other must both be `erased` or not be `erased` * `def foo(x: T): U` cannot be overridden by `def foo(erased x: T): U` and vice-versa - diff --git a/docs/docs/reference/experimental/erased-defs.md b/docs/_docs/reference/experimental/erased-defs.md similarity index 77% rename from docs/docs/reference/experimental/erased-defs.md rename to docs/_docs/reference/experimental/erased-defs.md index bd22a5149614..d8562d20b420 100644 --- a/docs/docs/reference/experimental/erased-defs.md +++ b/docs/_docs/reference/experimental/erased-defs.md @@ -1,14 +1,17 @@ --- layout: doc-page title: "Erased Definitions" +movedTo: https://docs.scala-lang.org/scala3/reference/experimental/erased-defs.html --- `erased` is a modifier that expresses that some definition or expression is erased by the compiler instead of being represented in the compiled output. It is not yet part of the Scala language standard. To enable `erased`, turn on the language feature -`experimental.erasedDefinitions`. This can be done with a language import +[`experimental.erasedDefinitions`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$experimental$$erasedDefinitions$.html). This can be done with a language import ```scala import scala.language.experimental.erasedDefinitions ``` or by setting the command line option `-language:experimental.erasedDefinitions`. +Erased definitions must be in an experimental scope (see [Experimental definitions](../other-new-features/experimental-defs.md)). + ## Why erased terms? Let's describe the motivation behind erased terms with an example. In the @@ -27,10 +30,10 @@ final class Off extends State @implicitNotFound("State must be Off") class IsOff[S <: State] object IsOff: - given isOff: IsOff[Off] = new IsOff[Off] + given isOff: IsOff[Off] = new IsOff[Off] class Machine[S <: State]: - def turnedOn(using IsOff[S]): Machine[On] = new Machine[On] + def turnedOn(using IsOff[S]): Machine[On] = new Machine[On] val m = new Machine[Off] m.turnedOn @@ -57,7 +60,7 @@ in front of a parameter list (like `given`). def methodWithErasedEv(erased ev: Ev): Int = 42 val lambdaWithErasedEv: erased Ev => Int = - (erased ev: Ev) => 42 + (erased ev: Ev) => 42 ``` `erased` parameters will not be usable for computations, though they can be used @@ -65,10 +68,10 @@ as arguments to other `erased` parameters. ```scala def methodWithErasedInt1(erased i: Int): Int = - i + 42 // ERROR: can not use i + i + 42 // ERROR: can not use i def methodWithErasedInt2(erased i: Int): Int = - methodWithErasedInt1(i) // OK + methodWithErasedInt1(i) // OK ``` Not only parameters can be marked as erased, `val` and `def` can also be marked @@ -125,37 +128,37 @@ final class Off extends State @implicitNotFound("State must be Off") class IsOff[S <: State] object IsOff: - // will not be called at runtime for turnedOn, the - // compiler will only require that this evidence exists - given IsOff[Off] = new IsOff[Off] + // will not be called at runtime for turnedOn, the + // compiler will only require that this evidence exists + given IsOff[Off] = new IsOff[Off] @implicitNotFound("State must be On") class IsOn[S <: State] object IsOn: - // will not exist at runtime, the compiler will only - // require that this evidence exists at compile time - erased given IsOn[On] = new IsOn[On] + // will not exist at runtime, the compiler will only + // require that this evidence exists at compile time + erased given IsOn[On] = new IsOn[On] class Machine[S <: State] private (): - // ev will disappear from both functions - def turnedOn(using erased ev: IsOff[S]): Machine[On] = new Machine[On] - def turnedOff(using erased ev: IsOn[S]): Machine[Off] = new Machine[Off] + // ev will disappear from both functions + def turnedOn(using erased ev: IsOff[S]): Machine[On] = new Machine[On] + def turnedOff(using erased ev: IsOn[S]): Machine[Off] = new Machine[Off] object Machine: - def newMachine(): Machine[Off] = new Machine[Off] + def newMachine(): Machine[Off] = new Machine[Off] @main def test = - val m = Machine.newMachine() - m.turnedOn - m.turnedOn.turnedOff + val m = Machine.newMachine() + m.turnedOn + m.turnedOn.turnedOff - // m.turnedOff - // ^ - // State must be On + // m.turnedOff + // ^ + // State must be On - // m.turnedOn.turnedOn - // ^ - // State must be Off + // m.turnedOn.turnedOn + // ^ + // State must be Off ``` Note that in [Inline](../metaprogramming/inline.md) we discussed `erasedValue` and inline @@ -170,27 +173,27 @@ final class On extends State final class Off extends State class Machine[S <: State]: - transparent inline def turnOn(): Machine[On] = - inline erasedValue[S] match - case _: Off => new Machine[On] - case _: On => error("Turning on an already turned on machine") + transparent inline def turnOn(): Machine[On] = + inline erasedValue[S] match + case _: Off => new Machine[On] + case _: On => error("Turning on an already turned on machine") - transparent inline def turnOff(): Machine[Off] = - inline erasedValue[S] match - case _: On => new Machine[Off] - case _: Off => error("Turning off an already turned off machine") + transparent inline def turnOff(): Machine[Off] = + inline erasedValue[S] match + case _: On => new Machine[Off] + case _: Off => error("Turning off an already turned off machine") object Machine: - def newMachine(): Machine[Off] = - println("newMachine") - new Machine[Off] + def newMachine(): Machine[Off] = + println("newMachine") + new Machine[Off] end Machine @main def test = - val m = Machine.newMachine() - m.turnOn() - m.turnOn().turnOff() - m.turnOn().turnOn() // error: Turning on an already turned on machine + val m = Machine.newMachine() + m.turnOn() + m.turnOn().turnOff() + m.turnOn().turnOn() // error: Turning on an already turned on machine ``` ## Erased Classes diff --git a/docs/docs/reference/other-new-features/explicit-nulls.md b/docs/_docs/reference/experimental/explicit-nulls.md similarity index 78% rename from docs/docs/reference/other-new-features/explicit-nulls.md rename to docs/_docs/reference/experimental/explicit-nulls.md index 1eb245fd1cec..ebdb3356d99e 100644 --- a/docs/docs/reference/other-new-features/explicit-nulls.md +++ b/docs/_docs/reference/experimental/explicit-nulls.md @@ -1,10 +1,11 @@ --- layout: doc-page title: "Explicit Nulls" +movedTo: https://docs.scala-lang.org/scala3/reference/other-new-features/explicit-nulls.html --- Explicit nulls is an opt-in feature that modifies the Scala type system, which makes reference types -(anything that extends `AnyRef`) _non-nullable_. +(anything that extends [`AnyRef`](https://scala-lang.org/api/3.x/scala/AnyRef.html)) _non-nullable_. This means the following code will no longer typecheck: @@ -35,7 +36,7 @@ When explicit nulls are enabled, the type hierarchy changes so that `Null` is on This is the new type hierarchy: -!["Type Hierarchy for Explicit Nulls"](/images/explicit-nulls/explicit-nulls-type-hierarchy.png) +!["Type Hierarchy for Explicit Nulls"](images/explicit-nulls/explicit-nulls-type-hierarchy.png) After erasure, `Null` remains a subtype of all reference types (as forced by the JVM). @@ -58,7 +59,7 @@ So far, we have found the following useful: Don't use `.nn` on mutable variables directly, because it may introduce an unknown type into the type of the variable. -- An `unsafeNulls` language feature. +- An [`unsafeNulls`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$unsafeNulls$.html) language feature. When imported, `T | Null` can be used as `T`, similar to regular Scala (without explicit nulls). @@ -72,8 +73,8 @@ The unsoundness happens because uninitialized fields in a class start out as `nu ```scala class C: - val f: String = foo(f) - def foo(f2: String): String = f2 + val f: String = foo(f) + def foo(f2: String): String = f2 val c = new C() // c.f == "field is null" @@ -123,8 +124,8 @@ We illustrate the rules with following examples: ```java class C { - String s; - int x; + String s; + int x; } ``` @@ -132,8 +133,8 @@ We illustrate the rules with following examples: ```scala class C: - val s: String | Null - val x: Int + val s: String | Null + val x: Int ``` - We nullify type parameters because in Java a type parameter is always nullable, so the following code compiles. @@ -152,8 +153,8 @@ We illustrate the rules with following examples: ```scala class InScala: - val c: C[Bool] = ??? // C as above - val b: Bool = c.foo() // no longer typechecks, since foo now returns Bool | Null + val c: C[Bool] = ??? // C as above + val b: Bool = c.foo() // no longer typechecks, since foo now returns Bool | Null ``` - We can reduce the number of redundant nullable types we need to add. Consider @@ -193,8 +194,8 @@ We illustrate the rules with following examples: ```scala class BoxFactory[T]: - def makeBox(): Box[T | Null] | Null - def makeCrazyBoxes(): java.util.List[Box[java.util.List[T] | Null]] | Null + def makeBox(): Box[T | Null] | Null + def makeCrazyBoxes(): java.util.List[Box[java.util.List[T] | Null]] | Null ``` In this case, since `Box` is Scala-defined, we will get `Box[T | Null] | Null`. @@ -209,11 +210,11 @@ We illustrate the rules with following examples: ```java class Constants { - final String NAME = "name"; - final int AGE = 0; - final char CHAR = 'a'; + final String NAME = "name"; + final int AGE = 0; + final char CHAR = 'a'; - final String NAME_GENERATED = getNewName(); + final String NAME_GENERATED = getNewName(); } ``` @@ -221,11 +222,11 @@ We illustrate the rules with following examples: ```scala class Constants: - val NAME: String("name") = "name" - val AGE: Int(0) = 0 - val CHAR: Char('a') = 'a' + val NAME: String("name") = "name" + val AGE: Int(0) = 0 + val CHAR: Char('a') = 'a' - val NAME_GENERATED: String | Null = getNewName() + val NAME_GENERATED: String | Null = getNewName() ``` - We don't append `Null` to a field nor to a return type of a method which is annotated with a @@ -233,9 +234,9 @@ We illustrate the rules with following examples: ```java class C { - @NotNull String name; - @NotNull List getNames(String prefix); // List is Java-defined - @NotNull Box getBoxedName(); // Box is Scala-defined + @NotNull String name; + @NotNull List getNames(String prefix); // List is Java-defined + @NotNull Box getBoxedName(); // Box is Scala-defined } ``` @@ -243,9 +244,9 @@ We illustrate the rules with following examples: ```scala class C: - val name: String - def getNames(prefix: String | Null): java.util.List[String] // we still need to nullify the paramter types - def getBoxedName(): Box[String | Null] // we don't append `Null` to the outmost level, but we still need to nullify inside + val name: String + def getNames(prefix: String | Null): java.util.List[String] // we still need to nullify the paramter types + def getBoxedName(): Box[String | Null] // we don't append `Null` to the outmost level, but we still need to nullify inside ``` The annotation must be from the list below to be recognized as `NotNull` by the compiler. @@ -276,7 +277,7 @@ We illustrate the rules with following examples: ### Override check -When we check overriding between Scala classes and Java classes, the rules are relaxed for `Null` type with this feature, in order to help users to working with Java libraries. +When we check overriding between Scala classes and Java classes, the rules are relaxed for [`Null`](https://scala-lang.org/api/3.x/scala/Null.html) type with this feature, in order to help users to working with Java libraries. Suppose we have Java method `String f(String x)`, we can override this method in Scala in any of the following forms: @@ -304,7 +305,7 @@ Example: ```scala val s: String | Null = ??? if s != null then - // s: String + // s: String // s: String | Null @@ -316,9 +317,9 @@ A similar inference can be made for the `else` case if the test is `p == null` ```scala if s == null then - // s: String | Null + // s: String | Null else - // s: String + // s: String ``` `==` and `!=` is considered a comparison for the purposes of the flow inference. @@ -331,15 +332,15 @@ We also support logical operators (`&&`, `||`, and `!`): val s: String | Null = ??? val s2: String | Null = ??? if s != null && s2 != null then - // s: String - // s2: String + // s: String + // s2: String if s == null || s2 == null then - // s: String | Null - // s2: String | Null + // s: String | Null + // s2: String | Null else - // s: String - // s2: String + // s: String + // s2: String ``` ### Inside Conditions @@ -350,12 +351,12 @@ We also support type specialization _within_ the condition, taking into account val s: String | Null = ??? if s != null && s.length > 0 then // s: String in `s.length > 0` - // s: String + // s: String if s == null || s.length > 0 then // s: String in `s.length > 0` - // s: String | Null + // s: String | Null else - // s: String + // s: String ``` ### Match Case @@ -366,8 +367,8 @@ The non-null cases can be detected in match statements. val s: String | Null = ??? s match - case _: String => // s: String - case _ => + case _: String => // s: String + case _ => ``` ### Mutable Variable @@ -380,13 +381,13 @@ class C(val x: Int, val next: C | Null) var xs: C | Null = C(1, C(2, null)) // xs is trackable, since all assignments are in the same method while xs != null do - // xs: C - val xsx: Int = xs.x - val xscpy: C = xs - xs = xscpy // since xscpy is non-null, xs still has type C after this line - // xs: C - xs = xs.next // after this assignment, xs can be null again - // xs: C | Null + // xs: C + val xsx: Int = xs.x + val xscpy: C = xs + xs = xscpy // since xscpy is non-null, xs still has type C after this line + // xs: C + xs = xs.next // after this assignment, xs can be null again + // xs: C | Null ``` When dealing with local mutable variables, there are two questions: @@ -399,7 +400,7 @@ When dealing with local mutable variables, there are two questions: ```scala var x: String | Null = ??? def y = - x = null + x = null if x != null then // y can be called here, which would break the fact @@ -416,16 +417,16 @@ When dealing with local mutable variables, there are two questions: ```scala var x: String | Null = ??? def y = - if x != null then - // not safe to use the fact (x != null) here - // since y can be executed at the same time as the outer block - val _: String = x + if x != null then + // not safe to use the fact (x != null) here + // since y can be executed at the same time as the outer block + val _: String = x if x != null then - val a: String = x // ok to use the fact here - x = null + val a: String = x // ok to use the fact here + x = null ``` -See [more examples](https://github.com/lampepfl/dotty/blob/master/tests/explicit-nulls/neg/flow-varref-in-closure.scala). +See [more examples](https://github.com/lampepfl/dotty/blob/main/tests/explicit-nulls/neg/flow-varref-in-closure.scala). Currently, we are unable to track paths with a mutable variable prefix. For example, `x.a` if `x` is mutable. @@ -441,16 +442,16 @@ We don't support: val s: String | Null = ??? val s2: String | Null = ??? if s != null && s == s2 then - // s: String inferred - // s2: String not inferred + // s: String inferred + // s2: String not inferred ``` ### UnsafeNulls -It is difficult to work with many nullable values, we introduce a language feature `unsafeNulls`. +It is difficult to work with many nullable values, we introduce a language feature [`unsafeNulls`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$unsafeNulls$.html). Inside this "unsafe" scope, all `T | Null` values can be used as `T`. -Users can import `scala.language.unsafeNulls` to create such scopes, or use `-language:unsafeNulls` to enable this feature globally (for migration purpose only). +Users can import [`scala.language.unsafeNulls`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$unsafeNulls$.html) to create such scopes, or use `-language:unsafeNulls` to enable this feature globally (for migration purpose only). Assume `T` is a reference type (a subtype of `AnyRef`), the following unsafe operation rules are applied in this unsafe-nulls scope: @@ -469,7 +470,7 @@ can be used as `T2` if `T1` is a subtype of `T2` using regular subtyping rules Addtionally, `null` can be used as `AnyRef` (`Object`), which means you can select `.eq` or `.toString` on it. -The program in `unsafeNulls` will have a **similar** semantic as regular Scala, but not **equivalent**. +The program in [`unsafeNulls`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$unsafeNulls$.html) will have a **similar** semantic as regular Scala, but not **equivalent**. For example, the following code cannot be compiled even using unsafe nulls. Because of the Java interoperation, the type of the get method becomes `T | Null`. @@ -482,7 +483,7 @@ Since the compiler doesn’t know whether `T` is a reference type, it is unable to `T`. A `.nn` need to be inserted after `xs.get(0)` by user manually to fix the error, which strips the `Null` from its type. -The intention of this `unsafeNulls` is to give users a better migration path for explicit nulls. +The intention of this [`unsafeNulls`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$unsafeNulls$.html) is to give users a better migration path for explicit nulls. Projects for Scala 2 or regular Scala 3 can try this by adding `-Yexplicit-nulls -language:unsafeNulls` to the compile options. A small number of manual modifications are expected. To migrate to the full explicit nulls feature in the future, `-language:unsafeNulls` can be dropped and add @@ -513,9 +514,9 @@ class C[T >: Null <: String] // define a type bound with unsafe conflict bound val n = nullOf[String] // apply a type bound unsafely ``` -Without the `unsafeNulls`, all these unsafe operations will not be type-checked. +Without the [`unsafeNulls`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$unsafeNulls$.html), all these unsafe operations will not be type-checked. -`unsafeNulls` also works for extension methods and implicit search. +[`unsafeNulls`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$unsafeNulls$.html) also works for extension methods and implicit search. ```scala import scala.language.unsafeNulls @@ -534,4 +535,4 @@ Our strategy for binary compatibility with Scala binaries that predate explicit and new libraries compiled without `-Yexplicit-nulls` is to leave the types unchanged and be compatible but unsound. -[More details](../../internals/explicit-nulls.md) +[More details](https://dotty.epfl.ch/docs/internals/explicit-nulls.html) diff --git a/docs/docs/reference/other-new-features/named-typeargs-spec.md b/docs/_docs/reference/experimental/named-typeargs-spec.md similarity index 82% rename from docs/docs/reference/other-new-features/named-typeargs-spec.md rename to docs/_docs/reference/experimental/named-typeargs-spec.md index f56f97d89100..7bf5c2f43ccf 100644 --- a/docs/docs/reference/other-new-features/named-typeargs-spec.md +++ b/docs/_docs/reference/experimental/named-typeargs-spec.md @@ -1,8 +1,11 @@ --- layout: doc-page title: "Named Type Arguments - More Details" +movedTo: https://docs.scala-lang.org/scala3/reference/experimental/named-typeargs-spec.html --- +In this section we give more details about the [named type arguments](named-typeargs.md) (*experimental*). + ## Syntax The addition to the grammar is: @@ -20,7 +23,7 @@ Note in particular that named arguments cannot be passed to type constructors: class C[T] val x: C[T = Int] = // error - new C[T = Int] // error + new C[T = Int] // error class E extends C[T = Int] // error ``` diff --git a/docs/docs/reference/other-new-features/named-typeargs.md b/docs/_docs/reference/experimental/named-typeargs.md similarity index 88% rename from docs/docs/reference/other-new-features/named-typeargs.md rename to docs/_docs/reference/experimental/named-typeargs.md index e9a67b0368dd..b9b4f7f33d69 100644 --- a/docs/docs/reference/other-new-features/named-typeargs.md +++ b/docs/_docs/reference/experimental/named-typeargs.md @@ -1,6 +1,8 @@ --- layout: doc-page title: "Named Type Arguments" +redirectFrom: reference/other-new-features/named-typeargs.html +movedTo: https://docs.scala-lang.org/scala3/reference/experimental/named-typeargs.html --- **Note:** This feature is implemented in Scala 3, but is not expected to be part of Scala 3.0. diff --git a/docs/_docs/reference/experimental/numeric-literals.md b/docs/_docs/reference/experimental/numeric-literals.md new file mode 100644 index 000000000000..56684d2722d5 --- /dev/null +++ b/docs/_docs/reference/experimental/numeric-literals.md @@ -0,0 +1,257 @@ +--- +layout: doc-page +title: "Numeric Literals" +movedTo: https://docs.scala-lang.org/scala3/reference/experimental/numeric-literals.html +--- + +**Note**: This feature is not yet part of the Scala 3 language definition. It can be made available by a language import: + +```scala +import scala.language.experimental.genericNumberLiterals +``` + +In Scala 2, numeric literals were confined to the primitive numeric types `Int`, `Long`, `Float`, and `Double`. Scala 3 allows to write numeric literals also for user-defined types. Example: + +```scala +val x: Long = -10_000_000_000 +val y: BigInt = 0x123_abc_789_def_345_678_901 +val z: BigDecimal = 110_222_799_799.99 + +(y: BigInt) match + case 123_456_789_012_345_678_901 => +``` + +The syntax of numeric literals is the same as before, except there are no pre-set limits +how large they can be. + +## Meaning of Numeric Literals + +The meaning of a numeric literal is determined as follows: + +- If the literal ends with `l` or `L`, it is a `Long` integer (and must fit in its legal range). +- If the literal ends with `f` or `F`, it is a single precision floating point number of type `Float`. +- If the literal ends with `d` or `D`, it is a double precision floating point number of type `Double`. + +In each of these cases the conversion to a number is exactly as in Scala 2 or in Java. If a numeric literal does _not_ end in one of these suffixes, its meaning is determined by the expected type: + +1. If the expected type is `Int`, `Long`, `Float`, or `Double`, the literal is + treated as a standard literal of that type. +2. If the expected type is a fully defined type `T` that has a given instance of type + [`scala.util.FromDigits[T]`](https://scala-lang.org/api/3.x/scala/util/FromDigits.html), the literal is converted to a value of type `T` by passing it as an argument to + the `fromDigits` method of that instance (more details below). +3. Otherwise, the literal is treated as a `Double` literal (if it has a decimal point or an + exponent), or as an `Int` literal (if not). (This last possibility is again as in Scala 2 or Java.) + +With these rules, the definition + +```scala +val x: Long = -10_000_000_000 +``` + +is legal by rule (1), since the expected type is `Long`. The definitions + +```scala +val y: BigInt = 0x123_abc_789_def_345_678_901 +val z: BigDecimal = 111222333444.55 +``` + +are legal by rule (2), since both `BigInt` and `BigDecimal` have [`FromDigits`](https://scala-lang.org/api/3.x/scala/util/FromDigits.html) instances (which implement the `FromDigits` subclasses [`FromDigits.WithRadix`](https://scala-lang.org/api/3.x/scala/util/FromDigits$$WithRadix.html) and [`FromDigits.Decimal`](https://scala-lang.org/api/3.x/scala/util/FromDigits$$Decimal.html), respectively). On the other hand, + +```scala +val x = -10_000_000_000 +``` + +gives a type error, since without an expected type `-10_000_000_000` is treated by rule (3) as an `Int` literal, but it is too large for that type. + +## The `FromDigits` Trait + +To allow numeric literals, a type simply has to define a `given` instance of the +[`scala.util.FromDigits`](https://scala-lang.org/api/3.x/scala/util/FromDigits.html) type class, or one of its subclasses. `FromDigits` is defined as follows: + +```scala +trait FromDigits[T]: + def fromDigits(digits: String): T +``` + +Implementations of the `fromDigits` convert strings of digits to the values of the +implementation type `T`. +The `digits` string consists of digits between `0` and `9`, possibly preceded by a +sign ("+" or "-"). Number separator characters `_` are filtered out before +the string is passed to `fromDigits`. + +The companion object [`FromDigits`](https://scala-lang.org/api/3.x/scala/util/FromDigits$.html) also defines subclasses of `FromDigits` for whole numbers with a given radix, for numbers with a decimal point, and for numbers that can have both a decimal point and an exponent: + +```scala +object FromDigits: + + /** A subclass of `FromDigits` that also allows to convert whole + * number literals with a radix other than 10 + */ + trait WithRadix[T] extends FromDigits[T]: + def fromDigits(digits: String): T = fromDigits(digits, 10) + def fromDigits(digits: String, radix: Int): T + + /** A subclass of `FromDigits` that also allows to convert number + * literals containing a decimal point ".". + */ + trait Decimal[T] extends FromDigits[T] + + /** A subclass of `FromDigits`that allows also to convert number + * literals containing a decimal point "." or an + * exponent `('e' | 'E')['+' | '-']digit digit*`. + */ + trait Floating[T] extends Decimal[T] +``` + +A user-defined number type can implement one of those, which signals to the compiler +that hexadecimal numbers, decimal points, or exponents are also accepted in literals +for this type. + +## Error Handling + +`FromDigits` implementations can signal errors by throwing exceptions of some subtype +of [`FromDigitsException`](https://scala-lang.org/api/3.x/scala/util/FromDigits$$FromDigitsException.html). `FromDigitsException` is defined with three subclasses in the +`FromDigits` object as follows: + +```scala +abstract class FromDigitsException(msg: String) extends NumberFormatException(msg) + +class NumberTooLarge (msg: String = "number too large") extends FromDigitsException(msg) +class NumberTooSmall (msg: String = "number too small") extends FromDigitsException(msg) +class MalformedNumber(msg: String = "malformed number literal") extends FromDigitsException(msg) +``` + +## Example + +As a fully worked out example, here is an implementation of a new numeric class, `BigFloat`, that accepts numeric literals. `BigFloat` is defined in terms of a `BigInt` mantissa and an `Int` exponent: + +```scala +case class BigFloat(mantissa: BigInt, exponent: Int): + override def toString = s"${mantissa}e${exponent}" +``` + +`BigFloat` literals can have a decimal point as well as an exponent. E.g. the following expression +should produce the `BigFloat` number `BigFloat(-123, 997)`: + +```scala +-0.123E+1000: BigFloat +``` + +The companion object of `BigFloat` defines an `apply` constructor method to construct a `BigFloat` +from a `digits` string. Here is a possible implementation: + +```scala +object BigFloat: + import scala.util.FromDigits + + def apply(digits: String): BigFloat = + val (mantissaDigits, givenExponent) = + digits.toUpperCase.split('E') match + case Array(mantissaDigits, edigits) => + val expo = + try FromDigits.intFromDigits(edigits) + catch case ex: FromDigits.NumberTooLarge => + throw FromDigits.NumberTooLarge(s"exponent too large: $edigits") + (mantissaDigits, expo) + case Array(mantissaDigits) => + (mantissaDigits, 0) + val (intPart, exponent) = + mantissaDigits.split('.') match + case Array(intPart, decimalPart) => + (intPart ++ decimalPart, givenExponent - decimalPart.length) + case Array(intPart) => + (intPart, givenExponent) + BigFloat(BigInt(intPart), exponent) +``` + +To accept `BigFloat` literals, all that's needed in addition is a `given` instance of type +`FromDigits.Floating[BigFloat]`: + +```scala + given FromDigits: FromDigits.Floating[BigFloat] with + def fromDigits(digits: String) = apply(digits) +end BigFloat +``` + +Note that the `apply` method does not check the format of the `digits` argument. It is +assumed that only valid arguments are passed. For calls coming from the compiler +that assumption is valid, since the compiler will first check whether a numeric +literal has the correct format before it gets passed on to a conversion method. + +## Compile-Time Errors + +With the setup of the previous section, a literal like + +```scala +1e10_0000_000_000: BigFloat +``` + +would be expanded by the compiler to + +```scala +BigFloat.FromDigits.fromDigits("1e100000000000") +``` + +Evaluating this expression throws a [`NumberTooLarge`](https://scala-lang.org/api/3.x/scala/util/FromDigits$$NumberTooLarge.html) exception at run time. We would like it to +produce a compile-time error instead. We can achieve this by tweaking the `BigFloat` class +with a small dose of metaprogramming. The idea is to turn the `fromDigits` method +into a macro, i.e. make it an inline method with a splice as right-hand side. +To do this, replace the `FromDigits` instance in the `BigFloat` object by the following two definitions: + +```scala +object BigFloat: + ... + + class FromDigits extends FromDigits.Floating[BigFloat]: + def fromDigits(digits: String) = apply(digits) + + given FromDigits with + override inline def fromDigits(digits: String) = ${ + fromDigitsImpl('digits) + } +``` + +Note that an inline method cannot directly fill in for an abstract method, since it produces +no code that can be executed at runtime. That is why we define an intermediary class +`FromDigits` that contains a fallback implementation which is then overridden by the inline +method in the `FromDigits` given instance. That method is defined in terms of a macro +implementation method `fromDigitsImpl`. Here is its definition: + +```scala + private def fromDigitsImpl(digits: Expr[String])(using ctx: Quotes): Expr[BigFloat] = + digits.value match + case Some(ds) => + try + val BigFloat(m, e) = apply(ds) + '{BigFloat(${Expr(m)}, ${Expr(e)})} + catch case ex: FromDigits.FromDigitsException => + ctx.error(ex.getMessage) + '{BigFloat(0, 0)} + case None => + '{apply($digits)} +end BigFloat +``` + +The macro implementation takes an argument of type `Expr[String]` and yields +a result of type `Expr[BigFloat]`. It tests whether its argument is a constant +string. If that is the case, it converts the string using the `apply` method +and lifts the resulting `BigFloat` back to `Expr` level. For non-constant +strings `fromDigitsImpl(digits)` is simply `apply(digits)`, i.e. everything is +evaluated at runtime in this case. + +The interesting part is the `catch` part of the case where `digits` is constant. +If the `apply` method throws a `FromDigitsException`, the exception's message is issued as a compile time error in the `ctx.error(ex.getMessage)` call. + +With this new implementation, a definition like + +```scala +val x: BigFloat = 1234.45e3333333333 +``` + +would give a compile time error message: + +```scala +3 | val x: BigFloat = 1234.45e3333333333 + | ^^^^^^^^^^^^^^^^^^ + | exponent too large: 3333333333 +``` diff --git a/docs/_docs/reference/experimental/overview.md b/docs/_docs/reference/experimental/overview.md new file mode 100644 index 000000000000..2644de0d864e --- /dev/null +++ b/docs/_docs/reference/experimental/overview.md @@ -0,0 +1,28 @@ +--- +layout: doc-page +title: "Overview" +movedTo: https://docs.scala-lang.org/scala3/reference/experimental/overview.html +--- + +### Experimental language features + +All experimental language features can be found under the `scala.language.experimental` package. +They are enabled by importing the feature or using the `-language` compiler flag. + +* [`erasedDefinitions`](./erased-defs.md): Enable support for `erased` modifier. +* `fewerBraces`: Enable support for using indentation for arguments. +* [`genericNumberLiterals`](./numeric-literals.md): Enable support for generic number literals. +* [`namedTypeArguments`](./named-typeargs.md): Enable support for named type arguments +* [`saferExceptions`](./canthrow.md): Enable support for checked exceptions. + +### Experimental language imports + +In general, experimental language features can be imported in an experimental scope (see [experimental definitions](../other-new-features/experimental-defs.md). +They can be imported at the top-level if all top-level definitions are @experimental. + +### Experimental language features supported by special compiler options + +Some experimental language features that are still in research and development can be enabled with special compiler options. These include + +* [`-Yexplicit-nulls`](./explicit-nulls.md). Enable support for tracking null references in the type system. + diff --git a/docs/docs/reference/features-classification.md b/docs/_docs/reference/features-classification.md similarity index 99% rename from docs/docs/reference/features-classification.md rename to docs/_docs/reference/features-classification.md index 6196394b572a..21da1dfc065a 100644 --- a/docs/docs/reference/features-classification.md +++ b/docs/_docs/reference/features-classification.md @@ -1,8 +1,7 @@ --- layout: doc-page title: "A Classification of Proposed Language Features" -date: April 6, 2019 -author: Martin Odersky +movedTo: https://docs.scala-lang.org/scala3/reference/features-classification.html --- This document provides an overview of the constructs proposed for Scala 3 with the aim to facilitate the discussion what to include and when to include it. It classifies features into eight groups: (1) essential foundations, (2) simplifications, (3) restrictions, (4) dropped features, (5) changed features, (6) new features, (7) features oriented towards metaprogramming with the aim to replace existing macros, and (8) changes to type checking and inference. diff --git a/docs/_docs/reference/language-versions/binary-compatibility.md b/docs/_docs/reference/language-versions/binary-compatibility.md new file mode 100644 index 000000000000..a25a4203fd6b --- /dev/null +++ b/docs/_docs/reference/language-versions/binary-compatibility.md @@ -0,0 +1,36 @@ +--- +layout: doc-page +title: "Binary Compatibility" +--- + +In Scala 2 different minor versions of the compiler were free to change the way how they encode different language features in JVM bytecode so each bump of the compiler's minor version resulted in breaking binary compatibility and if a project had any Scala dependencies they all needed to be (cross-)compiled to the same minor Scala version that was used in that project itself. On the contrary, Scala 3 has a stable encoding into JVM bytecode. + +In addition to classfiles the compilation process in Scala 3 also produces files with `.tasty` extension. The [TASTy](https://docs.scala-lang.org/scala3/guides/tasty-overview.html) format is an intermediate representation of Scala code containing full information about sources together with information provided by the typer. Some of this information is lost during generation of bytecode so Scala 3 compilers read TASTy files during compilation in addition to classfiles to know the exact types of values, methods, etc. in already compiled classes (although compilation from TASTy files only is also possible). TASTy files are also typically distributed together with classfiles in published artifacts. + +TASTy format is extensible but it preserves backward compatibility and the evolution happens between minor releases of the language. This means a Scala compiler in version `3.x1.y1` is able to read TASTy files produced by another compiler in version `3.x2.y2` if `x1 >= x2` (assuming two stable versions of the compiler are considered - `SNAPSHOT` or `NIGHTLY` compiler versions can read TASTy in an older stable format but their TASTY versions are not compatible between each other even if the compilers have the same minor version; also compilers in stable versions cannot read TASTy generated by an unstable version). + +TASTy version number has the format of `.-` and the numbering changes in parallel to language releases in such a way that a bump in language minor version corresponds to a bump in TASTy minor version (e.g. for Scala `3.0.0` the TASTy version is `28.0-0`). Experimental version set to 0 signifies a stable version while others are considered unstable/experimental. TASTy version is not strictly bound to the data format itself - any changes to the API of the standard library also require a change in TASTy minor version. + +Being able to bump the compiler version in a project without having to wait for all of its dependencies to do the same is already a big leap forward when compared to Scala 2. However, we might still try to do better, especially from the perspective of authors of libraries. +If you maintain a library and you would like it to be usable as a dependency for all Scala 3 projects, you would have to always emit TASTy in a version that would be readble by everyone, which would normally mean getting stuck at 3.0.x forever. + +To solve this problem a new experimental compiler flag `-Yscala-release ` (available since 3.1.2-RC1) has been added. Setting this flag makes the compiler produce TASTy files that should be possible to use by all Scala 3 compilers in version `` or newer (this flag was inspired by how `-release` works for specifying the target version of JDK). More specifically this enforces emitting TASTy files in an older format ensuring that: +* the code contains no references to parts of the standard library which were added to the API after `` and would crash at runtime when a program is executed with the older version of the standard library on the classpath +* no dependency found on the classpath during compilation (except for the standard library itself) contains TASTy files produced by a compiler newer than `` (otherwise they could potentially leak such disallowed references to the standard library). + +If any of the checks above is not fulfilled or for any other reason older TASTy cannot be emitted (e.g. the code uses some new language features which cannot be expressed the the older format) the entire compilation fails (with errors reported for each of such issues). + +As this feature is experimental it does not have any special support in build tools yet (at least not in sbt 1.6.1 or lower). +E.g. when a project gets compiled with Scala compiler `3.x1.y1` and `-Yscala-release 3.x2` option and then published using sbt +then the standard library in version `3.x1.y1` gets added to the project's dependencies instead of `3.x2.y2`. +When the dependencies are added to the classpath during compilation with Scala `3.x2.y2` the compiler will crash while trying to read TASTy files in the newer format. +A currently known workaround is to modify the build definition of the dependent project by explicitly overriding the version of Scala standard library in dependencies, e.g. + +```scala +dependencyOverrides ++= Seq( + scalaOrganization.value %% "scala3-library" % scalaVersion.value, + scalaOrganization.value %% "scala3-library_sjs1" % scalaVersion.value // for Scala.js projects +) +``` + +The behaviour of `-Yscala-release` flag might still change in the future, especially it's not guaranteed that every new version of the compiler would be able to generate TASTy in all older formats going back to the one produced by `3.0.x` compiler. diff --git a/docs/_docs/reference/language-versions/language-versions.md b/docs/_docs/reference/language-versions/language-versions.md new file mode 100644 index 000000000000..e98aba32fd34 --- /dev/null +++ b/docs/_docs/reference/language-versions/language-versions.md @@ -0,0 +1,6 @@ +--- +layout: doc-page +title: "Language Versions" +--- + +Additional information on interoperability and migration between Scala 2 and 3 can be found [here](https://docs.scala-lang.org/scala3/guides/migration/compatibility-intro.html). diff --git a/docs/_docs/reference/language-versions/source-compatibility.md b/docs/_docs/reference/language-versions/source-compatibility.md new file mode 100644 index 000000000000..029a3674ba73 --- /dev/null +++ b/docs/_docs/reference/language-versions/source-compatibility.md @@ -0,0 +1,38 @@ +--- +layout: doc-page +title: "Source Compatibility" +movedTo: https://docs.scala-lang.org/scala3/reference/language-versions.html +--- + +Scala 3 does NOT guarantee source compatibility between different minor language versions (e.g. some syntax valid in 3.x might get deprecated and then phased out in 3.y for y > x). There are also some syntax structures that were valid in Scala 2 but are not anymore in Scala 3. However the compiler provides a possibility to specify the desired version of syntax used in a particular file or globally for a run of the compiler to make migration between versions easier. + +The default Scala language syntax version currently supported by the Dotty compiler is [`3.0`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$3/0$.html). There are also other language versions that can be specified instead: + +- [`3.0-migration`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$3/0-migration$.html): Same as `3.0` but with a Scala 2 compatibility mode that helps moving Scala 2.13 sources over to Scala 3. In particular, it + + - flags some Scala 2 constructs that are disallowed in Scala 3 as migration warnings instead of hard errors, + - changes some rules to be more lenient and backwards compatible with Scala 2.13 + - gives some additional warnings where the semantics has changed between Scala 2.13 and 3.0 + - in conjunction with `-rewrite`, offer code rewrites from Scala 2.13 to 3.0. + +- [`future`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$future$.html): A preview of changes introduced in the next versions after 3.0. In the doc pages here we refer to the language version with these changes as `3.1`, but it might be that some of these changes will be rolled out in later `3.x` versions. + +Some Scala 2 specific idioms will be dropped in this version. The feature set supported by this version will be refined over time as we approach its release. + +- [`future-migration`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$future-migration$.html): Same as `future` but with additional helpers to migrate from `3.0`. Similarly to the helpers available under `3.0-migration`, these include migration warnings and optional rewrites. + +There are two ways to specify a language version : + +- with a `-source` command line setting, e.g. `-source 3.0-migration`. +- with a `scala.language` import at the top of a source file, e.g: + +```scala +package p +import scala.language.`future-migration` + +class C { ... } +``` + +Language imports supersede command-line settings in the source files where they are specified. Only one language import specifying a source version is allowed in a source file, and it must come before any definitions in that file. + +**Note**: The [Scala 3 Migration Guide](https://docs.scala-lang.org/scala3/guides/migration/compatibility-intro.html) gives further information to help the Scala programmer moving from Scala 2.13 to Scala 3. diff --git a/docs/_docs/reference/metaprogramming/compiletime-ops.md b/docs/_docs/reference/metaprogramming/compiletime-ops.md new file mode 100644 index 000000000000..944cdac5389a --- /dev/null +++ b/docs/_docs/reference/metaprogramming/compiletime-ops.md @@ -0,0 +1,294 @@ +--- +layout: doc-page +title: "Compile-time operations" +movedTo: https://docs.scala-lang.org/scala3/reference/metaprogramming/compiletime-ops.html +--- + +## The `scala.compiletime` Package + +The [`scala.compiletime`](https://scala-lang.org/api/3.x/scala/compiletime.html) package contains helper definitions that provide support for compile-time operations over values. They are described in the following. + +### `constValue` and `constValueOpt` + +`constValue` is a function that produces the constant value represented by a +type. + +```scala +import scala.compiletime.constValue +import scala.compiletime.ops.int.S + +transparent inline def toIntC[N]: Int = + inline constValue[N] match + case 0 => 0 + case _: S[n1] => 1 + toIntC[n1] + +inline val ctwo = toIntC[2] +``` + +`constValueOpt` is the same as `constValue`, however returning an `Option[T]` +enabling us to handle situations where a value is not present. Note that `S` is +the type of the successor of some singleton type. For example the type `S[1]` is +the singleton type `2`. + +### `erasedValue` + +So far we have seen inline methods that take terms (tuples and integers) as +parameters. What if we want to base case distinctions on types instead? For +instance, one would like to be able to write a function `defaultValue`, that, +given a type `T`, returns optionally the default value of `T`, if it exists. +We can already express this using rewrite match expressions and a simple +helper function, `scala.compiletime.erasedValue`, which is defined as follows: + +```scala +def erasedValue[T]: T +``` + +The `erasedValue` function _pretends_ to return a value of its type argument `T`. +Calling this function will always result in a compile-time error unless the call +is removed from the code while inlining. + +Using `erasedValue`, we can then define `defaultValue` as follows: + +```scala +import scala.compiletime.erasedValue + +transparent inline def defaultValue[T] = + inline erasedValue[T] match + case _: Byte => Some(0: Byte) + case _: Char => Some(0: Char) + case _: Short => Some(0: Short) + case _: Int => Some(0) + case _: Long => Some(0L) + case _: Float => Some(0.0f) + case _: Double => Some(0.0d) + case _: Boolean => Some(false) + case _: Unit => Some(()) + case _ => None +``` + +Then: + +```scala +val dInt: Some[Int] = defaultValue[Int] +val dDouble: Some[Double] = defaultValue[Double] +val dBoolean: Some[Boolean] = defaultValue[Boolean] +val dAny: None.type = defaultValue[Any] +``` + +As another example, consider the type-level version of `toInt` below: +given a _type_ representing a Peano number, +return the integer _value_ corresponding to it. +Consider the definitions of numbers as in the _Inline +Match_ section above. Here is how `toIntT` can be defined: + +```scala +transparent inline def toIntT[N <: Nat]: Int = + inline scala.compiletime.erasedValue[N] match + case _: Zero.type => 0 + case _: Succ[n] => toIntT[n] + 1 + +inline val two = toIntT[Succ[Succ[Zero.type]]] +``` + +`erasedValue` is an `erased` method so it cannot be used and has no runtime +behavior. Since `toIntT` performs static checks over the static type of `N` we +can safely use it to scrutinize its return type (`S[S[Z]]` in this case). + +### `error` + +The `error` method is used to produce user-defined compile errors during inline expansion. +It has the following signature: + +```scala +inline def error(inline msg: String): Nothing +``` + +If an inline expansion results in a call `error(msgStr)` the compiler +produces an error message containing the given `msgStr`. + +```scala +import scala.compiletime.{error, code} + +inline def fail() = + error("failed for a reason") + +fail() // error: failed for a reason +``` + +or + +```scala +inline def fail(p1: => Any) = + error(code"failed on: $p1") + +fail(identity("foo")) // error: failed on: identity("foo") +``` + +### The `scala.compiletime.ops` package + +The [`scala.compiletime.ops`](https://scala-lang.org/api/3.x/scala/compiletime/ops.html) package contains types that provide support for +primitive operations on singleton types. For example, +`scala.compiletime.ops.int.*` provides support for multiplying two singleton +`Int` types, and `scala.compiletime.ops.boolean.&&` for the conjunction of two +`Boolean` types. When all arguments to a type in `scala.compiletime.ops` are +singleton types, the compiler can evaluate the result of the operation. + +```scala +import scala.compiletime.ops.int.* +import scala.compiletime.ops.boolean.* + +val conjunction: true && true = true +val multiplication: 3 * 5 = 15 +``` + +Many of these singleton operation types are meant to be used infix (as in [SLS §3.2.10](https://www.scala-lang.org/files/archive/spec/2.13/03-types.html#infix-types)). + +Since type aliases have the same precedence rules as their term-level +equivalents, the operations compose with the expected precedence rules: + +```scala +import scala.compiletime.ops.int.* +val x: 1 + 2 * 3 = 7 +``` + +The operation types are located in packages named after the type of the +left-hand side parameter: for instance, `scala.compiletime.ops.int.+` represents +addition of two numbers, while `scala.compiletime.ops.string.+` represents string +concatenation. To use both and distinguish the two types from each other, a +match type can dispatch to the correct implementation: + +```scala +import scala.compiletime.ops.* + +import scala.annotation.infix + +type +[X <: Int | String, Y <: Int | String] = (X, Y) match + case (Int, Int) => int.+[X, Y] + case (String, String) => string.+[X, Y] + +val concat: "a" + "b" = "ab" +val addition: 1 + 1 = 2 +``` + +## Summoning Implicits Selectively + +It is foreseen that many areas of typelevel programming can be done with rewrite +methods instead of implicits. But sometimes implicits are unavoidable. The +problem so far was that the Prolog-like programming style of implicit search +becomes viral: Once some construct depends on implicit search it has to be +written as a logic program itself. Consider for instance the problem of creating +a `TreeSet[T]` or a `HashSet[T]` depending on whether `T` has an `Ordering` or +not. We can create a set of implicit definitions like this: + +```scala +trait SetFor[T, S <: Set[T]] + +class LowPriority: + implicit def hashSetFor[T]: SetFor[T, HashSet[T]] = ... + +object SetsFor extends LowPriority: + implicit def treeSetFor[T: Ordering]: SetFor[T, TreeSet[T]] = ... +``` + +Clearly, this is not pretty. Besides all the usual indirection of implicit +search, we face the problem of rule prioritization where we have to ensure that +`treeSetFor` takes priority over `hashSetFor` if the element type has an +ordering. This is solved (clumsily) by putting `hashSetFor` in a superclass +`LowPriority` of the object `SetsFor` where `treeSetFor` is defined. Maybe the +boilerplate would still be acceptable if the crufty code could be contained. +However, this is not the case. Every user of the abstraction has to be +parameterized itself with a `SetFor` implicit. Considering the simple task _"I +want a `TreeSet[T]` if `T` has an ordering and a `HashSet[T]` otherwise"_, this +seems like a lot of ceremony. + +There are some proposals to improve the situation in specific areas, for +instance by allowing more elaborate schemes to specify priorities. But they all +keep the viral nature of implicit search programs based on logic programming. + +By contrast, the new `summonFrom` construct makes implicit search available +in a functional context. To solve the problem of creating the right set, one +would use it as follows: + +```scala +import scala.compiletime.summonFrom + +inline def setFor[T]: Set[T] = summonFrom { + case ord: Ordering[T] => new TreeSet[T]()(using ord) + case _ => new HashSet[T] +} +``` + +A `summonFrom` call takes a pattern matching closure as argument. All patterns +in the closure are type ascriptions of the form `identifier : Type`. + +Patterns are tried in sequence. The first case with a pattern `x: T` such that an implicit value of type `T` can be summoned is chosen. + +Alternatively, one can also use a pattern-bound given instance, which avoids the explicit using clause. For instance, `setFor` could also be formulated as follows: + +```scala +import scala.compiletime.summonFrom + +inline def setFor[T]: Set[T] = summonFrom { + case given Ordering[T] => new TreeSet[T] + case _ => new HashSet[T] +} +``` + +`summonFrom` applications must be reduced at compile time. + +Consequently, if we summon an `Ordering[String]` the code above will return a +new instance of `TreeSet[String]`. + +```scala +summon[Ordering[String]] + +println(setFor[String].getClass) // prints class scala.collection.immutable.TreeSet +``` + +**Note** `summonFrom` applications can raise ambiguity errors. Consider the following +code with two givens in scope of type `A`. The pattern match in `f` will raise +an ambiguity error of `f` is applied. + +```scala +class A +given a1: A = new A +given a2: A = new A + +inline def f: Any = summonFrom { + case given _: A => ??? // error: ambiguous givens +} +``` + +## `summonInline` + +The shorthand `summonInline` provides a simple way to write a `summon` that is delayed until the call is inlined. +Unlike `summonFrom`, `summonInline` also yields the implicit-not-found error, if a given instance of the summoned +type is not found. +```scala +import scala.compiletime.summonInline +import scala.annotation.implicitNotFound + +@implicitNotFound("Missing One") +trait Missing1 + +@implicitNotFound("Missing Two") +trait Missing2 + +trait NotMissing +given NotMissing = ??? + +transparent inline def summonInlineCheck[T <: Int](inline t : T) : Any = + inline t match + case 1 => summonInline[Missing1] + case 2 => summonInline[Missing2] + case _ => summonInline[NotMissing] + +val missing1 = summonInlineCheck(1) // error: Missing One +val missing2 = summonInlineCheck(2) // error: Missing Two +val notMissing : NotMissing = summonInlineCheck(3) +``` + +## Reference + +For more information about compile-time operations, see [PR #4768](https://github.com/lampepfl/dotty/pull/4768), +which explains how `summonFrom`'s predecessor (implicit matches) can be used for typelevel programming and code specialization and [PR #7201](https://github.com/lampepfl/dotty/pull/7201) which explains the new `summonFrom` syntax. diff --git a/docs/_docs/reference/metaprogramming/inline.md b/docs/_docs/reference/metaprogramming/inline.md new file mode 100644 index 000000000000..c625f4774a18 --- /dev/null +++ b/docs/_docs/reference/metaprogramming/inline.md @@ -0,0 +1,389 @@ +--- +layout: doc-page +title: Inline +movedTo: https://docs.scala-lang.org/scala3/reference/metaprogramming/inline.html +--- + +## Inline Definitions + +`inline` is a new [soft modifier](../soft-modifier.md) that guarantees that a +definition will be inlined at the point of use. Example: + +```scala +object Config: + inline val logging = false + +object Logger: + + private var indent = 0 + + inline def log[T](msg: String, indentMargin: =>Int)(op: => T): T = + if Config.logging then + println(s"${" " * indent}start $msg") + indent += indentMargin + val result = op + indent -= indentMargin + println(s"${" " * indent}$msg = $result") + result + else op +end Logger +``` + +The `Config` object contains a definition of the **inline value** `logging`. +This means that `logging` is treated as a _constant value_, equivalent to its +right-hand side `false`. The right-hand side of such an `inline val` must itself +be a [constant expression](https://scala-lang.org/files/archive/spec/2.13/06-expressions.html#constant-expressions). +Used in this way, `inline` is equivalent to Java and Scala 2's `final`. Note that `final`, meaning +_inlined constant_, is still supported in Scala 3, but will be phased out. + +The `Logger` object contains a definition of the **inline method** `log`. This +method will always be inlined at the point of call. + +In the inlined code, an `if-then-else` with a constant condition will be rewritten +to its `then`- or `else`-part. Consequently, in the `log` method above the +`if Config.logging` with `Config.logging == true` will get rewritten into its +`then`-part. + +Here's an example: + +```scala +var indentSetting = 2 + +def factorial(n: BigInt): BigInt = + log(s"factorial($n)", indentSetting) { + if n == 0 then 1 + else n * factorial(n - 1) + } +``` + +If `Config.logging == false`, this will be rewritten (simplified) to: + +```scala +def factorial(n: BigInt): BigInt = + if n == 0 then 1 + else n * factorial(n - 1) +``` + +As you notice, since neither `msg` or `indentMargin` were used, they do not +appear in the generated code for `factorial`. Also note the body of our `log` +method: the `else-` part reduces to just an `op`. In the generated code we do +not generate any closures because we only refer to a by-name parameter *once*. +Consequently, the code was inlined directly and the call was beta-reduced. + +In the `true` case the code will be rewritten to: + +```scala +def factorial(n: BigInt): BigInt = + val msg = s"factorial($n)" + println(s"${" " * indent}start $msg") + Logger.inline$indent_=(indent.+(indentSetting)) + val result = + if n == 0 then 1 + else n * factorial(n - 1) + Logger.inline$indent_=(indent.-(indentSetting)) + println(s"${" " * indent}$msg = $result") + result +``` + +Note that the by-value parameter `msg` is evaluated only once, per the usual Scala +semantics, by binding the value and reusing the `msg` through the body of +`factorial`. Also, note the special handling of the assignment to the private var +`indent`. It is achieved by generating a setter method `def inline$indent_=` and calling it instead. + +Inline methods always have to be fully applied. For instance, a call to +```scala +Logger.log[String]("some op", indentSetting) +``` +would be ill-formed and the compiler would complain that arguments are missing. +However, it is possible to pass wildcard arguments instead. For instance, +```scala +Logger.log[String]("some op", indentSetting)(_) +``` +would typecheck. + +### Recursive Inline Methods + +Inline methods can be recursive. For instance, when called with a constant +exponent `n`, the following method for `power` will be implemented by +straight inline code without any loop or recursion. + +```scala +inline def power(x: Double, n: Int): Double = + if n == 0 then 1.0 + else if n == 1 then x + else + val y = power(x, n / 2) + if n % 2 == 0 then y * y else y * y * x + +power(expr, 10) +// translates to +// +// val x = expr +// val y1 = x * x // ^2 +// val y2 = y1 * y1 // ^4 +// val y3 = y2 * x // ^5 +// y3 * y3 // ^10 +``` + +Parameters of inline methods can have an `inline` modifier as well. This means +that actual arguments to these parameters will be inlined in the body of the +`inline def`. `inline` parameters have call semantics equivalent to by-name parameters +but allow for duplication of the code in the argument. It is usually useful when constant +values need to be propagated to allow further optimizations/reductions. + +The following example shows the difference in translation between by-value, by-name and `inline` +parameters: + +```scala +inline def funkyAssertEquals(actual: Double, expected: =>Double, inline delta: Double): Unit = + if (actual - expected).abs > delta then + throw new AssertionError(s"difference between ${expected} and ${actual} was larger than ${delta}") + +funkyAssertEquals(computeActual(), computeExpected(), computeDelta()) +// translates to +// +// val actual = computeActual() +// def expected = computeExpected() +// if (actual - expected).abs > computeDelta() then +// throw new AssertionError(s"difference between ${expected} and ${actual} was larger than ${computeDelta()}") +``` + +### Rules for Overriding + +Inline methods can override other non-inline methods. The rules are as follows: + +1. If an inline method `f` implements or overrides another, non-inline method, the inline method can also be invoked at runtime. For instance, consider the scenario: + + ```scala + abstract class A: + def f: Int + def g: Int = f + + class B extends A: + inline def f = 22 + override inline def g = f + 11 + + val b = new B + val a: A = b + // inlined invocatons + assert(b.f == 22) + assert(b.g == 33) + // dynamic invocations + assert(a.f == 22) + assert(a.g == 33) + ``` + + The inlined invocations and the dynamically dispatched invocations give the same results. + +2. Inline methods are effectively final. + +3. Inline methods can also be abstract. An abstract inline method can be implemented only by other inline methods. It cannot be invoked directly: + + ```scala + abstract class A: + inline def f: Int + + object B extends A: + inline def f: Int = 22 + + B.f // OK + val a: A = B + a.f // error: cannot inline f in A. + ``` + +### Relationship to `@inline` + +Scala 2 also defines a `@inline` annotation which is used as a hint for the +backend to inline code. The `inline` modifier is a more powerful option: + +- expansion is guaranteed instead of best effort, +- expansion happens in the frontend instead of in the backend and +- expansion also applies to recursive methods. + + + +#### The definition of constant expression + +Right-hand sides of inline values and of arguments for inline parameters must be +constant expressions in the sense defined by the [SLS §6.24](https://www.scala-lang.org/files/archive/spec/2.13/06-expressions.html#constant-expressions), +including _platform-specific_ extensions such as constant folding of pure +numeric computations. + +An inline value must have a literal type such as `1` or `true`. + +```scala +inline val four = 4 +// equivalent to +inline val four: 4 = 4 +``` + +It is also possible to have inline vals of types that do not have a syntax, such as `Short(4)`. + +```scala +trait InlineConstants: + inline val myShort: Short + +object Constants extends InlineConstants: + inline val myShort/*: Short(4)*/ = 4 +``` + +## Transparent Inline Methods + +Inline methods can additionally be declared `transparent`. +This means that the return type of the inline method can be +specialized to a more precise type upon expansion. Example: + +```scala +class A +class B extends A: + def m = true + +transparent inline def choose(b: Boolean): A = + if b then new A else new B + +val obj1 = choose(true) // static type is A +val obj2 = choose(false) // static type is B + +// obj1.m // compile-time error: `m` is not defined on `A` +obj2.m // OK +``` + +Here, the inline method `choose` returns an instance of either of the two types `A` or `B`. +If `choose` had not been declared to be `transparent`, the result +of its expansion would always be of type `A`, even though the computed value might be of the subtype `B`. +The inline method is a "blackbox" in the sense that details of its implementation do not leak out. +But if a `transparent` modifier is given, the expansion is the type of the expanded body. If the argument `b` +is `true`, that type is `A`, otherwise it is `B`. Consequently, calling `m` on `obj2` +type-checks since `obj2` has the same type as the expansion of `choose(false)`, which is `B`. +Transparent inline methods are "whitebox" in the sense that the type +of an application of such a method can be more specialized than its declared +return type, depending on how the method expands. + +In the following example, we see how the return type of `zero` is specialized to +the singleton type `0` permitting the addition to be ascribed with the correct +type `1`. + +```scala +transparent inline def zero: Int = 0 + +val one: 1 = zero + 1 +``` + +### Transparent vs. non-transparent inline + +As we already discussed, transparent inline methods may influence type checking at call site. +Technically this implies that transparent inline methods must be expanded during type checking of the program. +Other inline methods are inlined later after the program is fully typed. + +For example, the following two functions will be typed the same way but will be inlined at different times. + +```scala +inline def f1: T = ... +transparent inline def f2: T = (...): T +``` + +A noteworthy difference is the behavior of `transparent inline given`. +If there is an error reported when inlining that definition, it will be considered as an implicit search mismatch and the search will continue. +A `transparent inline given` can add a type ascription in its RHS (as in `f2` from the previous example) to avoid the precise type but keep the search behavior. +On the other hand, an `inline given` is taken as an implicit and then inlined after typing. +Any error will be emitted as usual. + +## Inline Conditionals + +An if-then-else expression whose condition is a constant expression can be simplified to +the selected branch. Prefixing an if-then-else expression with `inline` enforces that +the condition has to be a constant expression, and thus guarantees that the conditional will always +simplify. + +Example: + +```scala +inline def update(delta: Int) = + inline if delta >= 0 then increaseBy(delta) + else decreaseBy(-delta) +``` + +A call `update(22)` would rewrite to `increaseBy(22)`. But if `update` was called with +a value that was not a compile-time constant, we would get a compile time error like the one +below: + +```scala + | inline if delta >= 0 then ??? + | ^ + | cannot reduce inline if + | its condition + | delta >= 0 + | is not a constant value + | This location is in code that was inlined at ... +``` + +In a transparent inline, an `inline if` will force the inlining of any inline definition in its condition during type checking. + +## Inline Matches + +A `match` expression in the body of an `inline` method definition may be +prefixed by the `inline` modifier. If there is enough static information to +unambiguously take a branch, the expression is reduced to that branch and the +type of the result is taken. If not, a compile-time error is raised that +reports that the match cannot be reduced. + +The example below defines an inline method with a +single inline match expression that picks a case based on its static type: + +```scala +transparent inline def g(x: Any): Any = + inline x match + case x: String => (x, x) // Tuple2[String, String](x, x) + case x: Double => x + +g(1.0d) // Has type 1.0d which is a subtype of Double +g("test") // Has type (String, String) +``` + +The scrutinee `x` is examined statically and the inline match is reduced +accordingly returning the corresponding value (with the type specialized because `g` is declared `transparent`). This example performs a simple type test over the +scrutinee. The type can have a richer structure like the simple ADT below. +`toInt` matches the structure of a number in [Church-encoding](https://en.wikipedia.org/wiki/Church_encoding) +and _computes_ the corresponding integer. + +```scala +trait Nat +case object Zero extends Nat +case class Succ[N <: Nat](n: N) extends Nat + +transparent inline def toInt(n: Nat): Int = + inline n match + case Zero => 0 + case Succ(n1) => toInt(n1) + 1 + +inline val natTwo = toInt(Succ(Succ(Zero))) +val intTwo: 2 = natTwo +``` + +`natTwo` is inferred to have the singleton type 2. + +### Reference + +For more information about the semantics of `inline`, see the [Scala 2020: Semantics-preserving inlining for metaprogramming](https://dl.acm.org/doi/10.1145/3426426.3428486) paper. diff --git a/docs/docs/reference/metaprogramming/macros-spec.md b/docs/_docs/reference/metaprogramming/macros-spec.md similarity index 92% rename from docs/docs/reference/metaprogramming/macros-spec.md rename to docs/_docs/reference/metaprogramming/macros-spec.md index e03bb2fc2d20..d152377e6d62 100644 --- a/docs/docs/reference/metaprogramming/macros-spec.md +++ b/docs/_docs/reference/metaprogramming/macros-spec.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Macros Spec" +movedTo: https://docs.scala-lang.org/scala3/reference/metaprogramming/macros-spec.html --- ## Implementation @@ -176,25 +177,25 @@ implementation of `power` otherwise. import scala.quoted.* inline def power(x: Double, n: Int): Double = - ${ powerExpr('x, 'n) } + ${ powerExpr('x, 'n) } private def powerExpr(x: Expr[Double], n: Expr[Int]) (using Quotes): Expr[Double] = - n.value match - case Some(m) => powerExpr(x, m) - case _ => '{ dynamicPower($x, $n) } + n.value match + case Some(m) => powerExpr(x, m) + case _ => '{ dynamicPower($x, $n) } private def powerExpr(x: Expr[Double], n: Int) (using Quotes): Expr[Double] = - if n == 0 then '{ 1.0 } - else if n == 1 then x - else if n % 2 == 0 then '{ val y = $x * $x; ${ powerExpr('y, n / 2) } } - else '{ $x * ${ powerExpr(x, n - 1) } } + if n == 0 then '{ 1.0 } + else if n == 1 then x + else if n % 2 == 0 then '{ val y = $x * $x; ${ powerExpr('y, n / 2) } } + else '{ $x * ${ powerExpr(x, n - 1) } } private def dynamicPower(x: Double, n: Int): Double = - if n == 0 then 1.0 - else if n % 2 == 0 then dynamicPower(x * x, n / 2) - else x * dynamicPower(x, n - 1) + if n == 0 then 1.0 + else if n % 2 == 0 then dynamicPower(x * x, n / 2) + else x * dynamicPower(x, n - 1) ``` In the above, the method `.value` maps a constant expression of the type @@ -205,15 +206,15 @@ that maps expressions over functions to functions over expressions can be implemented in user code: ```scala given AsFunction1[T, U]: Conversion[Expr[T => U], Expr[T] => Expr[U]] with - def apply(f: Expr[T => U]): Expr[T] => Expr[U] = - (x: Expr[T]) => f match - case Lambda(g) => g(x) - case _ => '{ ($f)($x) } + def apply(f: Expr[T => U]): Expr[T] => Expr[U] = + (x: Expr[T]) => f match + case Lambda(g) => g(x) + case _ => '{ ($f)($x) } ``` This assumes an extractor ```scala object Lambda: - def unapply[T, U](x: Expr[T => U]): Option[Expr[T] => Expr[U]] + def unapply[T, U](x: Expr[T => U]): Option[Expr[T] => Expr[U]] ``` Once we allow inspection of code via extractors, it’s tempting to also add constructors that create typed trees directly without going diff --git a/docs/_docs/reference/metaprogramming/macros.md b/docs/_docs/reference/metaprogramming/macros.md new file mode 100644 index 000000000000..e1267ea82a59 --- /dev/null +++ b/docs/_docs/reference/metaprogramming/macros.md @@ -0,0 +1,823 @@ +--- +layout: doc-page +title: "Macros" +movedTo: https://docs.scala-lang.org/scala3/reference/metaprogramming/macros.html +--- + +> When developing macros enable `-Xcheck-macros` scalac option flag to have extra runtime checks. + +## Macros: Quotes and Splices + +Macros are built on two well-known fundamental operations: quotation and splicing. +Quotation is expressed as `'{...}` for expressions and splicing is expressed as `${ ... }`. +Additionally, within a quote or a splice we can quote or splice identifiers directly (i.e. `'e` and `$e`). +Readers may notice the resemblance of the two aforementioned syntactic +schemes with the familiar string interpolation syntax. + +```scala +println(s"Hello, $name, here is the result of 1 + 1 = ${1 + 1}") +``` + +In string interpolation we _quoted_ a string and then we _spliced_ into it, two others. The first, `name`, is a reference to a value of type [`String`](https://scala-lang.org/api/3.x/scala/Predef$.html#String-0), and the second is an arithmetic expression that will be _evaluated_ followed by the splicing of its string representation. + +Quotes and splices in this section allow us to treat code in a similar way, +effectively supporting macros. The entry point for macros is an inline method +with a top-level splice. We call it a top-level because it is the only occasion +where we encounter a splice outside a quote (consider as a quote the +compilation-unit at the call-site). For example, the code below presents an +`inline` method `assert` which calls at compile-time a method `assertImpl` with +a boolean expression tree as argument. `assertImpl` evaluates the expression and +prints it again in an error message if it evaluates to `false`. + +```scala +import scala.quoted.* + +inline def assert(inline expr: Boolean): Unit = + ${ assertImpl('expr) } + +def assertImpl(expr: Expr[Boolean])(using Quotes) = '{ + if !$expr then + throw AssertionError(s"failed assertion: ${${ showExpr(expr) }}") +} + +def showExpr(expr: Expr[Boolean])(using Quotes): Expr[String] = + '{ [actual implementation later in this document] } +``` + +If `e` is an expression, then `'{e}` represents the typed +abstract syntax tree representing `e`. If `T` is a type, then `Type.of[T]` +represents the type structure representing `T`. The precise +definitions of "typed abstract syntax tree" or "type-structure" do not +matter for now, the terms are used only to give some +intuition. Conversely, `${e}` evaluates the expression `e`, which must +yield a typed abstract syntax tree or type structure, and embeds the +result as an expression (respectively, type) in the enclosing program. + +Quotations can have spliced parts in them; in this case the embedded +splices are evaluated and embedded as part of the formation of the +quotation. + +Quotes and splices can also be applied directly to identifiers. An identifier +`$x` starting with a `$` that appears inside a quoted expression or type is treated as a +splice `${x}`. Analogously, an quoted identifier `'x` that appears inside a splice +is treated as a quote `'{x}`. See the Syntax section below for details. + +Quotes and splices are duals of each other. +For arbitrary expressions `e` we have: + +```scala +${'{e}} = e +'{${e}} = e +``` + +## Types for Quotations + +The type signatures of quotes and splices can be described using +two fundamental types: + +- `Expr[T]`: abstract syntax trees representing expressions of type `T` +- `Type[T]`: non erased representation of type `T`. + +Quoting takes expressions of type `T` to expressions of type `Expr[T]` +and it takes types `T` to expressions of type `Type[T]`. Splicing +takes expressions of type `Expr[T]` to expressions of type `T` and it +takes expressions of type `Type[T]` to types `T`. + +The two types can be defined in package [`scala.quoted`](https://scala-lang.org/api/3.x/scala/quoted.html) as follows: + +```scala +package scala.quoted + +sealed trait Expr[+T] +sealed trait Type[T] +``` + +Both `Expr` and `Type` are abstract and sealed, so all constructors for +these types are provided by the system. One way to construct values of +these types is by quoting, the other is by type-specific lifting +operations that will be discussed later on. + +## The Phase Consistency Principle + +A fundamental *phase consistency principle* (PCP) regulates accesses +to free variables in quoted and spliced code: + +- _For any free variable reference `x`, the number of quoted scopes and the number of spliced scopes between the reference to `x` and the definition of `x` must be equal_. + +Here, `this`-references count as free variables. On the other +hand, we assume that all imports are fully expanded and that `_root_` is +not a free variable. So references to global definitions are +allowed everywhere. + +The phase consistency principle can be motivated as follows: First, +suppose the result of a program `P` is some quoted text `'{ ... x +... }` that refers to a free variable `x` in `P`. This can be +represented only by referring to the original variable `x`. Hence, the +result of the program will need to persist the program state itself as +one of its parts. We don’t want to do this, hence this situation +should be made illegal. Dually, suppose a top-level part of a program +is a spliced text `${ ... x ... }` that refers to a free variable `x` +in `P`. This would mean that we refer during _construction_ of `P` to +a value that is available only during _execution_ of `P`. This is of +course impossible and therefore needs to be ruled out. Now, the +small-step evaluation of a program will reduce quotes and splices in +equal measure using the cancellation rules above. But it will neither +create nor remove quotes or splices individually. So the PCP ensures +that program elaboration will lead to neither of the two unwanted +situations described above. + +In what concerns the range of features it covers, this form of macros introduces +a principled metaprogramming framework that is quite close to the MetaML family of +languages. One difference is that MetaML does not have an equivalent of the PCP - +quoted code in MetaML _can_ access variables in its immediately enclosing +environment, with some restrictions and caveats since such accesses involve +serialization. However, this does not constitute a fundamental gain in +expressiveness. + +## From `Expr`s to Functions and Back + +It is possible to convert any `Expr[T => R]` into `Expr[T] => Expr[R]` and back. +These conversions can be implemented as follows: + +```scala +def to[T: Type, R: Type](f: Expr[T] => Expr[R])(using Quotes): Expr[T => R] = + '{ (x: T) => ${ f('x) } } + +def from[T: Type, R: Type](f: Expr[T => R])(using Quotes): Expr[T] => Expr[R] = + (x: Expr[T]) => '{ $f($x) } +``` + +Note how the fundamental phase consistency principle works in two +different directions here for `f` and `x`. In the method `to`, the reference to `f` is +legal because it is quoted, then spliced, whereas the reference to `x` +is legal because it is spliced, then quoted. + +They can be used as follows: + +```scala +val f1: Expr[Int => String] = + to((x: Expr[Int]) => '{ $x.toString }) // '{ (x: Int) => x.toString } + +val f2: Expr[Int] => Expr[String] = + from('{ (x: Int) => x.toString }) // (x: Expr[Int]) => '{ ((x: Int) => x.toString)($x) } +f2('{2}) // '{ ((x: Int) => x.toString)(2) } +``` + +One limitation of `from` is that it does not β-reduce when a lambda is called immediately, as evidenced in the code `{ ((x: Int) => x.toString)(2) }`. +In some cases we want to remove the lambda from the code, for this we provide the method `Expr.betaReduce` that turns a tree +describing a function into a function mapping trees to trees. + +```scala +object Expr: + ... + def betaReduce[...](...)(...): ... = ... +``` + +The definition of `Expr.betaReduce(f)(x)` is assumed to be functionally the same as +`'{($f)($x)}`, however it should optimize this call by returning the +result of beta-reducing `f(x)` if `f` is a known lambda expression. +`Expr.betaReduce` distributes applications of `Expr` over function arrows: + +```scala +Expr.betaReduce(_): Expr[(T1, ..., Tn) => R] => ((Expr[T1], ..., Expr[Tn]) => Expr[R]) +``` + +## Lifting Types + +Types are not directly affected by the phase consistency principle. +It is possible to use types defined at any level in any other level. +But, if a type is used in a subsequent stage it will need to be lifted to a `Type`. +Indeed, the definition of `to` above uses `T` in the next stage, there is a +quote but no splice between the parameter binding of `T` and its +usage. But the code can be rewritten by adding an explicit binding of a `Type[T]`: + +```scala +def to[T, R](f: Expr[T] => Expr[R])(using t: Type[T])(using Type[R], Quotes): Expr[T => R] = + '{ (x: t.Underlying) => ${ f('x) } } +``` + +In this version of `to`, the type of `x` is now the result of +inserting the type `Type[T]` and selecting its `Underlying`. + +To avoid clutter, the compiler converts any type reference to +a type `T` in subsequent phases to `summon[Type[T]].Underlying`. + +And to avoid duplication it does it once per type, and creates +an alias for that type at the start of the quote. + +For instance, the user-level definition of `to`: + +```scala +def to[T, R](f: Expr[T] => Expr[R])(using t: Type[T], r: Type[R])(using Quotes): Expr[T => R] = + '{ (x: T) => ${ f('x) } } +``` + +would be rewritten to + +```scala +def to[T, R](f: Expr[T] => Expr[R])(using t: Type[T], r: Type[R])(using Quotes): Expr[T => R] = + '{ + type T = t.Underlying + (x: T) => ${ f('x) } + } +``` + +The `summon` query succeeds because there is a given instance of +type `Type[T]` available (namely the given parameter corresponding +to the context bound `: Type`), and the reference to that value is +phase-correct. If that was not the case, the phase inconsistency for +`T` would be reported as an error. + +## Lifting Expressions + +Consider the following implementation of a staged interpreter that implements +a compiler through staging. + +```scala +import scala.quoted.* + +enum Exp: + case Num(n: Int) + case Plus(e1: Exp, e2: Exp) + case Var(x: String) + case Let(x: String, e: Exp, in: Exp) + +import Exp.* +``` + +The interpreted language consists of numbers `Num`, addition `Plus`, and variables +`Var` which are bound by `Let`. Here are two sample expressions in the language: + +```scala +val exp = Plus(Plus(Num(2), Var("x")), Num(4)) +val letExp = Let("x", Num(3), exp) +``` + +Here’s a compiler that maps an expression given in the interpreted +language to quoted Scala code of type `Expr[Int]`. +The compiler takes an environment that maps variable names to Scala `Expr`s. + +```scala +import scala.quoted.* + +def compile(e: Exp, env: Map[String, Expr[Int]])(using Quotes): Expr[Int] = + e match + case Num(n) => + Expr(n) + case Plus(e1, e2) => + '{ ${ compile(e1, env) } + ${ compile(e2, env) } } + case Var(x) => + env(x) + case Let(x, e, body) => + '{ val y = ${ compile(e, env) }; ${ compile(body, env + (x -> 'y)) } } +``` + +Running `compile(letExp, Map())` would yield the following Scala code: + +```scala +'{ val y = 3; (2 + y) + 4 } +``` + +The body of the first clause, `case Num(n) => Expr(n)`, looks suspicious. `n` +is declared as an `Int`, yet it is converted to an `Expr[Int]` with `Expr()`. +Shouldn’t `n` be quoted? In fact this would not +work since replacing `n` by `'n` in the clause would not be phase +correct. + +The `Expr.apply` method is defined in package `quoted`: + +```scala +package quoted + +object Expr: + ... + def apply[T: ToExpr](x: T)(using Quotes): Expr[T] = + summon[ToExpr[T]].toExpr(x) +``` + +This method says that values of types implementing the `ToExpr` type class can be +converted to `Expr` values using `Expr.apply`. + +Scala 3 comes with given instances of `ToExpr` for +several types including `Boolean`, `String`, and all primitive number +types. For example, `Int` values can be converted to `Expr[Int]` +values by wrapping the value in a `Literal` tree node. This makes use +of the underlying tree representation in the compiler for +efficiency. But the `ToExpr` instances are nevertheless not _magic_ +in the sense that they could all be defined in a user program without +knowing anything about the representation of `Expr` trees. For +instance, here is a possible instance of `ToExpr[Boolean]`: + +```scala +given ToExpr[Boolean] with + def toExpr(b: Boolean) = + if b then '{ true } else '{ false } +``` + +Once we can lift bits, we can work our way up. For instance, here is a +possible implementation of `ToExpr[Int]` that does not use the underlying +tree machinery: + +```scala +given ToExpr[Int] with + def toExpr(n: Int) = n match + case Int.MinValue => '{ Int.MinValue } + case _ if n < 0 => '{ - ${ toExpr(-n) } } + case 0 => '{ 0 } + case _ if n % 2 == 0 => '{ ${ toExpr(n / 2) } * 2 } + case _ => '{ ${ toExpr(n / 2) } * 2 + 1 } +``` + +Since `ToExpr` is a type class, its instances can be conditional. For example, +a `List` is liftable if its element type is: + +```scala +given [T: ToExpr : Type]: ToExpr[List[T]] with + def toExpr(xs: List[T]) = xs match + case head :: tail => '{ ${ Expr(head) } :: ${ toExpr(tail) } } + case Nil => '{ Nil: List[T] } +``` + +In the end, `ToExpr` resembles very much a serialization +framework. Like the latter it can be derived systematically for all +collections, case classes and enums. Note also that the synthesis +of _type-tag_ values of type `Type[T]` is essentially the type-level +analogue of lifting. + +Using lifting, we can now give the missing definition of `showExpr` in the introductory example: + +```scala +def showExpr[T](expr: Expr[T])(using Quotes): Expr[String] = + val code: String = expr.show + Expr(code) +``` + +That is, the `showExpr` method converts its `Expr` argument to a string (`code`), and lifts +the result back to an `Expr[String]` using `Expr.apply`. + +## Lifting Types + +The previous section has shown that the metaprogramming framework has +to be able to take a type `T` and convert it to a type tree of type +`Type[T]` that can be reified. This means that all free variables of +the type tree refer to types and values defined in the current stage. + +For a reference to a global class, this is easy: Just issue the fully +qualified name of the class. Members of reifiable types are handled by +just reifying the containing type together with the member name. But +what to do for references to type parameters or local type definitions +that are not defined in the current stage? Here, we cannot construct +the `Type[T]` tree directly, so we need to get it from a recursive +implicit search. For instance, to implement + +```scala +summon[Type[List[T]]] +``` + +where `T` is not defined in the current stage, we construct the type constructor +of `List` applied to the splice of the result of searching for a given instance for `Type[T]`: + +```scala +Type.of[ List[ summon[Type[T]].Underlying ] ] +``` + +This is exactly the algorithm that Scala 2 uses to search for type tags. +In fact Scala 2's type tag feature can be understood as a more ad-hoc version of +`quoted.Type`. As was the case for type tags, the implicit search for a `quoted.Type` +is handled by the compiler, using the algorithm sketched above. + +## Relationship with `inline` + +Seen by itself, principled metaprogramming looks more like a framework for +runtime metaprogramming than one for compile-time metaprogramming with macros. +But combined with Scala 3’s `inline` feature it can be turned into a compile-time +system. The idea is that macro elaboration can be understood as a combination of +a macro library and a quoted program. For instance, here’s the `assert` macro +again together with a program that calls `assert`. + +```scala +object Macros: + + inline def assert(inline expr: Boolean): Unit = + ${ assertImpl('expr) } + + def assertImpl(expr: Expr[Boolean])(using Quotes) = + val failMsg: Expr[String] = Expr("failed assertion: " + expr.show) + '{ if !($expr) then throw new AssertionError($failMsg) } + +@main def program = + val x = 1 + Macros.assert(x != 0) +``` + +Inlining the `assert` function would give the following program: + +```scala +@main def program = + val x = 1 + ${ Macros.assertImpl('{ x != 0}) } +``` + +The example is only phase correct because `Macros` is a global value and +as such not subject to phase consistency checking. Conceptually that’s +a bit unsatisfactory. If the PCP is so fundamental, it should be +applicable without the global value exception. But in the example as +given this does not hold since both `assert` and `program` call +`assertImpl` with a splice but no quote. + +However, one could argue that the example is really missing +an important aspect: The macro library has to be compiled in a phase +prior to the program using it, but in the code above, macro +and program are defined together. A more accurate view of +macros would be to have the user program be in a phase after the macro +definitions, reflecting the fact that macros have to be defined and +compiled before they are used. Hence, conceptually the program part +should be treated by the compiler as if it was quoted: + +```scala +@main def program = '{ + val x = 1 + ${ Macros.assertImpl('{ x != 0 }) } +} +``` + +If `program` is treated as a quoted expression, the call to +`Macro.assertImpl` becomes phase correct even if macro library and +program are conceptualized as local definitions. + +But what about the call from `assert` to `assertImpl`? Here, we need a +tweak of the typing rules. An inline function such as `assert` that +contains a splice operation outside an enclosing quote is called a +_macro_. Macros are supposed to be expanded in a subsequent phase, +i.e. in a quoted context. Therefore, they are also type checked as if +they were in a quoted context. For instance, the definition of +`assert` is typechecked as if it appeared inside quotes. This makes +the call from `assert` to `assertImpl` phase-correct, even if we +assume that both definitions are local. + +The `inline` modifier is used to declare a `val` that is +either a constant or is a parameter that will be a constant when instantiated. This +aspect is also important for macro expansion. + +To get values out of expressions containing constants `Expr` provides the method +`value` (or `valueOrError`). This will convert the `Expr[T]` into a `Some[T]` (or `T`) when the +expression contains value. Otherwise it will return `None` (or emit an error). +To avoid having incidental val bindings generated by the inlining of the `def` +it is recommended to use an inline parameter. To illustrate this, consider an +implementation of the `power` function that makes use of a statically known exponent: + +```scala +inline def power(x: Double, inline n: Int) = ${ powerCode('x, 'n) } + +private def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = + n.value match + case Some(m) => powerCode(x, m) + case None => '{ Math.pow($x, $n.toDouble) } + +private def powerCode(x: Expr[Double], n: Int)(using Quotes): Expr[Double] = + if n == 0 then '{ 1.0 } + else if n == 1 then x + else if n % 2 == 0 then '{ val y = $x * $x; ${ powerCode('y, n / 2) } } + else '{ $x * ${ powerCode(x, n - 1) } } +``` + +## Scope Extrusion + +Quotes and splices are duals as far as the PCP is concerned. But there is an +additional restriction that needs to be imposed on splices to guarantee +soundness: code in splices must be free of side effects. The restriction +prevents code like this: + +```scala +var x: Expr[T] = ... +'{ (y: T) => ${ x = 'y; 1 } } +``` + +This code, if it was accepted, would _extrude_ a reference to a quoted variable +`y` from its scope. This would subsequently allow access to a variable outside the +scope where it is defined, which is likely problematic. The code is clearly +phase consistent, so we cannot use PCP to rule it out. Instead, we postulate a +future effect system that can guarantee that splices are pure. In the absence of +such a system we simply demand that spliced expressions are pure by convention, +and allow for undefined compiler behavior if they are not. This is analogous to +the status of pattern guards in Scala, which are also required, but not +verified, to be pure. + +[Multi-Stage Programming](./staging.md) introduces one additional method where +you can expand code at runtime with a method `run`. There is also a problem with +that invocation of `run` in splices. Consider the following expression: + +```scala +'{ (x: Int) => ${ run('x); 1 } } +``` + +This is again phase correct, but will lead us into trouble. Indeed, evaluating +the splice will reduce the expression `run('x)` to `x`. But then the result + +```scala +'{ (x: Int) => ${ x; 1 } } +``` + +is no longer phase correct. To prevent this soundness hole it seems easiest to +classify `run` as a side-effecting operation. It would thus be prevented from +appearing in splices. In a base language with side effects we would have to do this +anyway: Since `run` runs arbitrary code it can always produce a side effect if +the code it runs produces one. + +## Example Expansion + +Assume we have two methods, one `map` that takes an `Expr[Array[T]]` and a +function `f` and one `sum` that performs a sum by delegating to `map`. + +```scala +object Macros: + + def map[T](arr: Expr[Array[T]], f: Expr[T] => Expr[Unit]) + (using Type[T], Quotes): Expr[Unit] = '{ + var i: Int = 0 + while i < ($arr).length do + val element: T = ($arr)(i) + ${f('element)} + i += 1 + } + + def sum(arr: Expr[Array[Int]])(using Quotes): Expr[Int] = '{ + var sum = 0 + ${ map(arr, x => '{sum += $x}) } + sum + } + + inline def sum_m(arr: Array[Int]): Int = ${sum('arr)} + +end Macros +``` + +A call to `sum_m(Array(1,2,3))` will first inline `sum_m`: + +```scala +val arr: Array[Int] = Array.apply(1, [2,3 : Int]:Int*) +${_root_.Macros.sum('arr)} +``` + +then it will splice `sum`: + +```scala +val arr: Array[Int] = Array.apply(1, [2,3 : Int]:Int*) + +var sum = 0 +${ map('arr, x => '{sum += $x}) } +sum +``` + +then it will inline `map`: + +```scala +val arr: Array[Int] = Array.apply(1, [2,3 : Int]:Int*) + +var sum = 0 +val f = x => '{sum += $x} +${ _root_.Macros.map('arr, 'f)(Type.of[Int])} +sum +``` + +then it will expand and splice inside quotes `map`: + +```scala +val arr: Array[Int] = Array.apply(1, [2,3 : Int]:Int*) + +var sum = 0 +val f = x => '{sum += $x} +var i: Int = 0 +while i < arr.length do + val element: Int = (arr)(i) + sum += element + i += 1 +sum +``` + +Finally cleanups and dead code elimination: + +```scala +val arr: Array[Int] = Array.apply(1, [2,3 : Int]:Int*) +var sum = 0 +var i: Int = 0 +while i < arr.length do + val element: Int = arr(i) + sum += element + i += 1 +sum +``` + +## Find implicits within a macro + +Similarly to the `summonFrom` construct, it is possible to make implicit search available +in a quote context. For this we simply provide `scala.quoted.Expr.summon`: + +```scala +import scala.collection.immutable.{ TreeSet, HashSet } +inline def setFor[T]: Set[T] = ${ setForExpr[T] } + +def setForExpr[T: Type](using Quotes): Expr[Set[T]] = + Expr.summon[Ordering[T]] match + case Some(ord) => '{ new TreeSet[T]()($ord) } + case _ => '{ new HashSet[T] } +``` + +## Relationship with Transparent Inline + +[Inline](./inline.md) documents inlining. The code below introduces a transparent +inline method that can calculate either a value of type `Int` or a value of type +`String`. + +```scala +transparent inline def defaultOf(inline str: String) = + ${ defaultOfImpl('str) } + +def defaultOfImpl(strExpr: Expr[String])(using Quotes): Expr[Any] = + strExpr.valueOrError match + case "int" => '{1} + case "string" => '{"a"} + +// in a separate file +val a: Int = defaultOf("int") +val b: String = defaultOf("string") + +``` + +## Defining a macro and using it in a single project + +It is possible to define macros and use them in the same project as long as the implementation +of the macros does not have run-time dependencies on code in the file where it is used. +It might still have compile-time dependencies on types and quoted code that refers to the use-site file. + +To provide this functionality Scala 3 provides a transparent compilation mode where files that +try to expand a macro but fail because the macro has not been compiled yet are suspended. +If there are any suspended files when the compilation ends, the compiler will automatically restart +compilation of the suspended files using the output of the previous (partial) compilation as macro classpath. +In case all files are suspended due to cyclic dependencies the compilation will fail with an error. + +## Pattern matching on quoted expressions + +It is possible to deconstruct or extract values out of `Expr` using pattern matching. + +`scala.quoted` contains objects that can help extracting values from `Expr`. + +- `scala.quoted.Expr`/`scala.quoted.Exprs`: matches an expression of a value (or list of values) and returns the value (or list of values). +- `scala.quoted.Const`/`scala.quoted.Consts`: Same as `Expr`/`Exprs` but only works on primitive values. +- `scala.quoted.Varargs`: matches an explicit sequence of expressions and returns them. These sequences are useful to get individual `Expr[T]` out of a varargs expression of type `Expr[Seq[T]]`. + +These could be used in the following way to optimize any call to `sum` that has statically known values. + +```scala +inline def sum(inline args: Int*): Int = ${ sumExpr('args) } +private def sumExpr(argsExpr: Expr[Seq[Int]])(using Quotes): Expr[Int] = + argsExpr match + case Varargs(args @ Exprs(argValues)) => + // args is of type Seq[Expr[Int]] + // argValues is of type Seq[Int] + Expr(argValues.sum) // precompute result of sum + case Varargs(argExprs) => // argExprs is of type Seq[Expr[Int]] + val staticSum: Int = argExprs.map(_.value.getOrElse(0)).sum + val dynamicSum: Seq[Expr[Int]] = argExprs.filter(_.value.isEmpty) + dynamicSum.foldLeft(Expr(staticSum))((acc, arg) => '{ $acc + $arg }) + case _ => + '{ $argsExpr.sum } +``` + +### Quoted patterns + +Quoted pattens allow deconstructing complex code that contains a precise structure, types or methods. +Patterns `'{ ... }` can be placed in any location where Scala expects a pattern. + +For example + +```scala +optimize { + sum(sum(1, a, 2), 3, b) +} // should be optimized to 6 + a + b +``` + +```scala +def sum(args: Int*): Int = args.sum +inline def optimize(inline arg: Int): Int = ${ optimizeExpr('arg) } +private def optimizeExpr(body: Expr[Int])(using Quotes): Expr[Int] = + body match + // Match a call to sum without any arguments + case '{ sum() } => Expr(0) + // Match a call to sum with an argument $n of type Int. + // n will be the Expr[Int] representing the argument. + case '{ sum($n) } => n + // Match a call to sum and extracts all its args in an `Expr[Seq[Int]]` + case '{ sum(${Varargs(args)}: _*) } => sumExpr(args) + case body => body + +private def sumExpr(args1: Seq[Expr[Int]])(using Quotes): Expr[Int] = + def flatSumArgs(arg: Expr[Int]): Seq[Expr[Int]] = arg match + case '{ sum(${Varargs(subArgs)}: _*) } => subArgs.flatMap(flatSumArgs) + case arg => Seq(arg) + val args2 = args1.flatMap(flatSumArgs) + val staticSum: Int = args2.map(_.value.getOrElse(0)).sum + val dynamicSum: Seq[Expr[Int]] = args2.filter(_.value.isEmpty) + dynamicSum.foldLeft(Expr(staticSum))((acc, arg) => '{ $acc + $arg }) +``` + +### Recovering precise types using patterns + +Sometimes it is necessary to get a more precise type for an expression. This can be achieved using the following pattern match. + +```scala +def f(expr: Expr[Any])(using Quotes) = expr match + case '{ $x: t } => + // If the pattern match succeeds, then there is + // some type `t` such that + // - `x` is bound to a variable of type `Expr[t]` + // - `t` is bound to a new type `t` and a given + // instance `Type[t]` is provided for it + // That is, we have `x: Expr[t]` and `given Type[t]`, + // for some (unknown) type `t`. +``` + +This might be used to then perform an implicit search as in: + +```scala +extension (inline sc: StringContext) + inline def showMe(inline args: Any*): String = ${ showMeExpr('sc, 'args) } + +private def showMeExpr(sc: Expr[StringContext], argsExpr: Expr[Seq[Any]])(using Quotes): Expr[String] = + import quotes.reflect.report + argsExpr match + case Varargs(argExprs) => + val argShowedExprs = argExprs.map { + case '{ $arg: tp } => + Expr.summon[Show[tp]] match + case Some(showExpr) => + '{ $showExpr.show($arg) } + case None => + report.error(s"could not find implicit for ${Type.show[Show[tp]]}", arg); '{???} + } + val newArgsExpr = Varargs(argShowedExprs) + '{ $sc.s($newArgsExpr: _*) } + case _ => + // `new StringContext(...).showMeExpr(args: _*)` not an explicit `showMeExpr"..."` + report.error(s"Args must be explicit", argsExpr) + '{???} + +trait Show[-T]: + def show(x: T): String + +// in a different file +given Show[Boolean] with + def show(b: Boolean) = "boolean!" + +println(showMe"${true}") +``` + +### Open code patterns + +Quoted pattern matching also provides higher-order patterns to match open terms. If a quoted term contains a definition, +then the rest of the quote can refer to this definition. + +```scala +'{ + val x: Int = 4 + x * x +} +``` + +To match such a term we need to match the definition and the rest of the code, but we need to explicitly state that the rest of the code may refer to this definition. + +```scala +case '{ val y: Int = $x; $body(y): Int } => +``` + +Here `$x` will match any closed expression while `$body(y)` will match an expression that is closed under `y`. Then +the subexpression of type `Expr[Int]` is bound to `body` as an `Expr[Int => Int]`. The extra argument represents the references to `y`. Usually this expression is used in combination with `Expr.betaReduce` to replace the extra argument. + +```scala +inline def eval(inline e: Int): Int = ${ evalExpr('e) } + +private def evalExpr(e: Expr[Int])(using Quotes): Expr[Int] = e match + case '{ val y: Int = $x; $body(y): Int } => + // body: Expr[Int => Int] where the argument represents + // references to y + evalExpr(Expr.betaReduce('{$body(${evalExpr(x)})})) + case '{ ($x: Int) * ($y: Int) } => + (x.value, y.value) match + case (Some(a), Some(b)) => Expr(a * b) + case _ => e + case _ => e +``` + +```scala +eval { // expands to the code: (16: Int) + val x: Int = 4 + x * x +} +``` + +We can also close over several bindings using `$b(a1, a2, ..., an)`. +To match an actual application we can use braces on the function part `${b}(a1, a2, ..., an)`. + +## More details + +[More details](./macros-spec.md) diff --git a/docs/_docs/reference/metaprogramming/metaprogramming.md b/docs/_docs/reference/metaprogramming/metaprogramming.md new file mode 100644 index 000000000000..d2f210187337 --- /dev/null +++ b/docs/_docs/reference/metaprogramming/metaprogramming.md @@ -0,0 +1,47 @@ +--- +layout: doc-page +title: "Metaprogramming" +movedTo: https://docs.scala-lang.org/scala3/reference/metaprogramming.html +--- + +The following pages introduce the redesign of metaprogramming in Scala. They +introduce the following fundamental facilities: + +1. [`inline`](./inline.md) is a new modifier that guarantees that + a definition will be inlined at the point of use. The primary motivation + behind inline is to reduce the overhead behind function calls and access to + values. The expansion will be performed by the Scala compiler during the + `Typer` compiler phase. As opposed to inlining in some other ecosystems, + inlining in Scala is not merely a request to the compiler but is a + _command_. The reason is that inlining in Scala can drive other compile-time + operations, like inline pattern matching (enabling type-level + programming), macros (enabling compile-time, generative, metaprogramming) and + runtime code generation (multi-stage programming). + +2. [Compile-time ops](./compiletime-ops.md) are helper definitions in the + standard library that provide support for compile-time operations over values and types. + +3. [Macros](./macros.md) are built on two well-known fundamental + operations: quotation and splicing. Quotation converts program code to + data, specifically, a (tree-like) representation of this code. It is + expressed as `'{...}` for expressions and as `'[...]` for types. Splicing, + expressed as `${ ... }`, goes the other way: it converts a program's representation + to program code. Together with `inline`, these two abstractions allow + to construct program code programmatically. + +4. [Runtime Staging](./staging.md) Where macros construct code at _compile-time_, + staging lets programs construct new code at _runtime_. That way, + code generation can depend not only on static data but also on data available at runtime. This splits the evaluation of the program in two or more phases or ... + stages. Consequently, this method of generative programming is called "Multi-Stage Programming". Staging is built on the same foundations as macros. It uses + quotes and splices, but leaves out `inline`. + +5. [Reflection](./reflection.md) Quotations are a "black-box" + representation of code. They can be parameterized and composed using + splices, but their structure cannot be analyzed from the outside. TASTy + reflection gives a way to analyze code structure by partly revealing the representation type of a piece of code in a standard API. The representation + type is a form of typed abstract syntax tree, which gives rise to the `TASTy` + moniker. + +6. [TASTy Inspection](./tasty-inspect.md) Typed abstract syntax trees are serialized + in a custom compressed binary format stored in `.tasty` files. TASTy inspection allows + to load these files and analyze their content's tree structure. diff --git a/docs/docs/reference/metaprogramming/reflection.md b/docs/_docs/reference/metaprogramming/reflection.md similarity index 76% rename from docs/docs/reference/metaprogramming/reflection.md rename to docs/_docs/reference/metaprogramming/reflection.md index a6c50a6c6038..74bb4f693e1b 100644 --- a/docs/docs/reference/metaprogramming/reflection.md +++ b/docs/_docs/reference/metaprogramming/reflection.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Reflection" +movedTo: https://docs.scala-lang.org/scala3/reference/metaprogramming/reflection.html --- Reflection enables inspection and construction of Typed Abstract Syntax Trees @@ -28,8 +29,8 @@ import scala.quoted.* inline def natConst(inline x: Int): Int = ${natConstImpl('{x})} def natConstImpl(x: Expr[Int])(using Quotes): Expr[Int] = - import quotes.reflect.* - ... + import quotes.reflect.* + ... ``` ### Extractors @@ -39,18 +40,18 @@ For example the `Literal(_)` extractor used below. ```scala def natConstImpl(x: Expr[Int])(using Quotes): Expr[Int] = - import quotes.reflect.* - val tree: Term = x.asTerm - tree match - case Inlined(_, _, Literal(IntConstant(n))) => - if n <= 0 then - report.error("Parameter must be natural number") - '{0} - else - tree.asExprOf[Int] - case _ => - report.error("Parameter must be a known constant") - '{0} + import quotes.reflect.* + val tree: Term = x.asTerm + tree match + case Inlined(_, _, Literal(IntConstant(n))) => + if n <= 0 then + report.error("Parameter must be natural number") + '{0} + else + tree.asExprOf[Int] + case _ => + report.error("Parameter must be a known constant") + '{0} ``` We can easily know which extractors are needed using `Printer.TreeStructure.show`, @@ -78,18 +79,18 @@ expansion point. ```scala def macroImpl()(quotes: Quotes): Expr[Unit] = - import quotes.reflect.* - val pos = Position.ofMacroExpansion - - val path = pos.sourceFile.jpath.toString - val start = pos.start - val end = pos.end - val startLine = pos.startLine - val endLine = pos.endLine - val startColumn = pos.startColumn - val endColumn = pos.endColumn - val sourceCode = pos.sourceCode - ... + import quotes.reflect.* + val pos = Position.ofMacroExpansion + + val path = pos.sourceFile.jpath.toString + val start = pos.start + val end = pos.end + val startLine = pos.startLine + val endLine = pos.endLine + val startColumn = pos.startColumn + val endColumn = pos.endColumn + val sourceCode = pos.sourceCode + ... ``` ### Tree Utilities @@ -104,14 +105,14 @@ example, collects the `val` definitions in the tree. ```scala def collectPatternVariables(tree: Tree)(using ctx: Context): List[Symbol] = - val acc = new TreeAccumulator[List[Symbol]]: - def foldTree(syms: List[Symbol], tree: Tree)(owner: Symbol): List[Symbol] = tree match - case ValDef(_, _, rhs) => - val newSyms = tree.symbol :: syms - foldTree(newSyms, body)(tree.symbol) - case _ => - foldOverTree(syms, tree)(owner) - acc(Nil, tree) + val acc = new TreeAccumulator[List[Symbol]]: + def foldTree(syms: List[Symbol], tree: Tree)(owner: Symbol): List[Symbol] = tree match + case ValDef(_, _, rhs) => + val newSyms = tree.symbol :: syms + foldTree(newSyms, body)(tree.symbol) + case _ => + foldOverTree(syms, tree)(owner) + acc(Nil, tree) ``` A `TreeTraverser` extends a `TreeAccumulator` and performs the same traversal diff --git a/docs/docs/reference/metaprogramming/simple-smp.md b/docs/_docs/reference/metaprogramming/simple-smp.md similarity index 98% rename from docs/docs/reference/metaprogramming/simple-smp.md rename to docs/_docs/reference/metaprogramming/simple-smp.md index 7335135869c7..78277987c874 100644 --- a/docs/docs/reference/metaprogramming/simple-smp.md +++ b/docs/_docs/reference/metaprogramming/simple-smp.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "The Meta-theory of Symmetric Metaprogramming" +movedTo: https://docs.scala-lang.org/scala3/reference/metaprogramming/simple-smp.html --- This note presents a simplified variant of diff --git a/docs/docs/reference/metaprogramming/staging.md b/docs/_docs/reference/metaprogramming/staging.md similarity index 95% rename from docs/docs/reference/metaprogramming/staging.md rename to docs/_docs/reference/metaprogramming/staging.md index 633f7e2984b4..2b76df3f5fd5 100644 --- a/docs/docs/reference/metaprogramming/staging.md +++ b/docs/_docs/reference/metaprogramming/staging.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Runtime Multi-Stage Programming" +movedTo: https://docs.scala-lang.org/scala3/reference/metaprogramming/staging.html --- The framework expresses at the same time compile-time metaprogramming and @@ -110,10 +111,10 @@ import scala.quoted.* given staging.Compiler = staging.Compiler.make(getClass.getClassLoader) val f: Array[Int] => Int = staging.run { - val stagedSum: Expr[Array[Int] => Int] = - '{ (arr: Array[Int]) => ${sum('arr)}} - println(stagedSum.show) // Prints "(arr: Array[Int]) => { var sum = 0; ... }" - stagedSum + val stagedSum: Expr[Array[Int] => Int] = + '{ (arr: Array[Int]) => ${sum('arr)}} + println(stagedSum.show) // Prints "(arr: Array[Int]) => { var sum = 0; ... }" + stagedSum } f.apply(Array(1, 2, 3)) // Returns 6 diff --git a/docs/docs/reference/metaprogramming/tasty-inspect.md b/docs/_docs/reference/metaprogramming/tasty-inspect.md similarity index 94% rename from docs/docs/reference/metaprogramming/tasty-inspect.md rename to docs/_docs/reference/metaprogramming/tasty-inspect.md index a08e15009af1..2ab2ce84c354 100644 --- a/docs/docs/reference/metaprogramming/tasty-inspect.md +++ b/docs/_docs/reference/metaprogramming/tasty-inspect.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "TASTy Inspection" +movedTo: https://docs.scala-lang.org/scala3/reference/metaprogramming/tasty-inspect.html --- ```scala diff --git a/docs/_docs/reference/new-types/dependent-function-types-spec.md b/docs/_docs/reference/new-types/dependent-function-types-spec.md new file mode 100644 index 000000000000..c3915ba9b8ac --- /dev/null +++ b/docs/_docs/reference/new-types/dependent-function-types-spec.md @@ -0,0 +1,125 @@ +--- +layout: doc-page +title: "Dependent Function Types - More Details" +movedTo: https://docs.scala-lang.org/scala3/reference/new-types/dependent-function-types-spec.html +--- + +Initial implementation in [PR #3464](https://github.com/lampepfl/dotty/pull/3464). + +## Syntax + +``` +FunArgTypes ::= InfixType + | ‘(’ [ FunArgType {',' FunArgType } ] ‘)’ + | ‘(’ TypedFunParam {',' TypedFunParam } ‘)’ +TypedFunParam ::= id ‘:’ Type +``` + +Dependent function types associate to the right, e.g. +`(s: S) => (t: T) => U` is the same as `(s: S) => ((t: T) => U)`. + +## Implementation + +Dependent function types are shorthands for class types that define `apply` +methods with a dependent result type. Dependent function types desugar to +refinement types of `scala.FunctionN`. A dependent function type +`(x1: K1, ..., xN: KN) => R` of arity `N` translates to: + +```scala +FunctionN[K1, ..., Kn, R']: + def apply(x1: K1, ..., xN: KN): R +``` + +where the result type parameter `R'` is the least upper approximation of the +precise result type `R` without any reference to value parameters `x1, ..., xN`. + +The syntax and semantics of anonymous dependent functions is identical to the +one of regular functions. Eta expansion is naturally generalized to produce +dependent function types for methods with dependent result types. + +Dependent functions can be implicit, and generalize to arity `N > 22` in the +same way that other functions do, see +[the corresponding documentation](../dropped-features/limit22.md). + +## Examples + +The example below defines a trait `C` and the two dependent function types +`DF` and `IDF` and prints the results of the respective function applications: + +[depfuntype.scala]: https://github.com/lampepfl/dotty/blob/main/tests/pos/depfuntype.scala + +```scala +trait C { type M; val m: M } + +type DF = (x: C) => x.M + +type IDF = (x: C) ?=> x.M + +@main def test = + val c = new C { type M = Int; val m = 3 } + + val depfun: DF = (x: C) => x.m + val t = depfun(c) + println(s"t=$t") // prints "t=3" + + val idepfun: IDF = summon[C].m + val u = idepfun(using c) + println(s"u=$u") // prints "u=3" + +``` + +In the following example the depend type `f.Eff` refers to the effect type `CanThrow`: + +[eff-dependent.scala]: https://github.com/lampepfl/dotty/blob/main/tests/run/eff-dependent.scala + +```scala +trait Effect + +// Type X => Y +abstract class Fun[-X, +Y]: + type Eff <: Effect + def apply(x: X): Eff ?=> Y + +class CanThrow extends Effect +class CanIO extends Effect + +given ct: CanThrow = new CanThrow +given ci: CanIO = new CanIO + +class I2S extends Fun[Int, String]: + type Eff = CanThrow + def apply(x: Int) = x.toString + +class S2I extends Fun[String, Int]: + type Eff = CanIO + def apply(x: String) = x.length + +// def map(f: A => B)(xs: List[A]): List[B] +def map[A, B](f: Fun[A, B])(xs: List[A]): f.Eff ?=> List[B] = + xs.map(f.apply) + +// def mapFn[A, B]: (A => B) -> List[A] -> List[B] +def mapFn[A, B]: (f: Fun[A, B]) => List[A] => f.Eff ?=> List[B] = + f => xs => map(f)(xs) + +// def compose(f: A => B)(g: B => C)(x: A): C +def compose[A, B, C](f: Fun[A, B])(g: Fun[B, C])(x: A): + f.Eff ?=> g.Eff ?=> C = + g(f(x)) + +// def composeFn: (A => B) -> (B => C) -> A -> C +def composeFn[A, B, C]: + (f: Fun[A, B]) => (g: Fun[B, C]) => A => f.Eff ?=> g.Eff ?=> C = + f => g => x => compose(f)(g)(x) + +@main def test = + val i2s = new I2S + val s2i = new S2I + + assert(mapFn(i2s)(List(1, 2, 3)).mkString == "123") + assert(composeFn(i2s)(s2i)(22) == 2) +``` + +### Type Checking + +After desugaring no additional typing rules are required for dependent function types. diff --git a/docs/docs/reference/new-types/dependent-function-types.md b/docs/_docs/reference/new-types/dependent-function-types.md similarity index 88% rename from docs/docs/reference/new-types/dependent-function-types.md rename to docs/_docs/reference/new-types/dependent-function-types.md index 07becacc82ba..33f677276aac 100644 --- a/docs/docs/reference/new-types/dependent-function-types.md +++ b/docs/_docs/reference/new-types/dependent-function-types.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Dependent Function Types" +movedTo: https://docs.scala-lang.org/scala3/reference/new-types/dependent-function-types.html --- A dependent function type is a function type whose result depends @@ -35,14 +36,14 @@ This type describes function values that take any argument `e` of type `Entry` and return a result of type `e.Key`. Recall that a normal function type `A => B` is represented as an -instance of the [`Function1` trait](https://dotty.epfl.ch/api/scala/Function1.html) +instance of the [`Function1` trait](https://scala-lang.org/api/3.x/scala/Function1.html) (i.e. `Function1[A, B]`) and analogously for functions with more parameters. Dependent functions are also represented as instances of these traits, but they get an additional refinement. In fact, the dependent function type above is just syntactic sugar for ```scala Function1[Entry, Entry#Key]: - def apply(e: Entry): e.Key + def apply(e: Entry): e.Key ``` [More details](./dependent-function-types-spec.md) diff --git a/docs/docs/reference/new-types/intersection-types-spec.md b/docs/_docs/reference/new-types/intersection-types-spec.md similarity index 95% rename from docs/docs/reference/new-types/intersection-types-spec.md rename to docs/_docs/reference/new-types/intersection-types-spec.md index 1ad0ed62601d..8ed9305e1486 100644 --- a/docs/docs/reference/new-types/intersection-types-spec.md +++ b/docs/_docs/reference/new-types/intersection-types-spec.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Intersection Types - More Details" +movedTo: https://docs.scala-lang.org/scala3/reference/new-types/intersection-types-spec.html --- ## Syntax @@ -97,7 +98,7 @@ glb(A, _) = A // use first In the above, `|T|` means the erased type of `T`, `JArray` refers to the type of Java Array. -See also: [`TypeErasure#erasedGlb`](https://github.com/lampepfl/dotty/blob/master/compiler/src/dotty/tools/dotc/core/TypeErasure.scala#L289). +See also: [`TypeErasure#erasedGlb`](https://github.com/lampepfl/dotty/blob/main/compiler/src/dotty/tools/dotc/core/TypeErasure.scala#L289). ## Relationship with Compound Type (`with`) diff --git a/docs/docs/reference/new-types/intersection-types.md b/docs/_docs/reference/new-types/intersection-types.md similarity index 87% rename from docs/docs/reference/new-types/intersection-types.md rename to docs/_docs/reference/new-types/intersection-types.md index a2cfc1f380c8..d498e664d150 100644 --- a/docs/docs/reference/new-types/intersection-types.md +++ b/docs/_docs/reference/new-types/intersection-types.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Intersection Types" +movedTo: https://docs.scala-lang.org/scala3/reference/new-types/intersection-types.html --- Used on types, the `&` operator creates an intersection type. @@ -11,14 +12,14 @@ The type `S & T` represents values that are of the type `S` and `T` at the same ```scala trait Resettable: - def reset(): Unit + def reset(): Unit trait Growable[T]: - def add(t: T): Unit + def add(t: T): Unit def f(x: Resettable & Growable[String]) = - x.reset() - x.add("first") + x.reset() + x.add("first") ``` The parameter `x` is required to be _both_ a `Resettable` and a @@ -35,10 +36,10 @@ of its type in `A` and its type in `B`. For instance, assume the definitions: ```scala trait A: - def children: List[A] + def children: List[A] trait B: - def children: List[B] + def children: List[B] val x: A & B = new C val ys: List[A & B] = x.children @@ -60,7 +61,7 @@ to give at that point a definition of a `children` method with the required type ```scala class C extends A, B: - def children: List[A & B] = ??? + def children: List[A & B] = ??? ``` diff --git a/docs/docs/reference/new-types/match-types.md b/docs/_docs/reference/new-types/match-types.md similarity index 90% rename from docs/docs/reference/new-types/match-types.md rename to docs/_docs/reference/new-types/match-types.md index 7be51e6a09bd..a2a086f3819e 100644 --- a/docs/docs/reference/new-types/match-types.md +++ b/docs/_docs/reference/new-types/match-types.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Match Types" +movedTo: https://docs.scala-lang.org/scala3/reference/new-types/match-types.html --- A match type reduces to one of its right-hand sides, depending on the type of @@ -8,9 +9,9 @@ its scrutinee. For example: ```scala type Elem[X] = X match - case String => Char - case Array[t] => t - case Iterable[t] => t + case String => Char + case Array[t] => t + case Iterable[t] => t ``` This defines a type that reduces as follows: @@ -38,18 +39,18 @@ Match types can form part of recursive type definitions. Example: ```scala type LeafElem[X] = X match - case String => Char - case Array[t] => LeafElem[t] - case Iterable[t] => LeafElem[t] - case AnyVal => X + case String => Char + case Array[t] => LeafElem[t] + case Iterable[t] => LeafElem[t] + case AnyVal => X ``` Recursive match type definitions can also be given an upper bound, like this: ```scala type Concat[Xs <: Tuple, +Ys <: Tuple] <: Tuple = Xs match - case EmptyTuple => Ys - case x *: xs => x *: Concat[xs, Ys] + case EmptyTuple => Ys + case x *: xs => x *: Concat[xs, Ys] ``` In this definition, every instance of `Concat[A, B]`, whether reducible or not, @@ -65,10 +66,10 @@ use of the match type as the return type): ```scala def leafElem[X](x: X): LeafElem[X] = x match - case x: String => x.charAt(0) - case x: Array[t] => leafElem(x(9)) - case x: Iterable[t] => leafElem(x.head) - case x: AnyVal => x + case x: String => x.charAt(0) + case x: Array[t] => leafElem(x(9)) + case x: Iterable[t] => leafElem(x.head) + case x: AnyVal => x ``` This special mode of typing for match expressions is only used when the @@ -191,7 +192,7 @@ error message: ```scala type L[X] = X match - case Int => L[X] + case Int => L[X] def g[X]: L[X] = ??? ``` @@ -213,14 +214,10 @@ type errors. If there is a stack overflow during subtyping, the exception will be caught and turned into a compile-time error that indicates a trace of the subtype tests that caused the overflow without showing a full stack trace. -## Variance Laws for Match Types -**Note:** This section does not reflect the current implementation. +## Match Types Variance -Within a match type `Match(S, Cs) <: B`, all occurrences of type variables count -as covariant. By the nature of the cases `Ci` this means that occurrences in -pattern position are contravariant (since patterns are represented as function -type arguments). +All type positions in a match type (scrutinee, patterns, bodies) are considered invariant. ## Related Work diff --git a/docs/_docs/reference/new-types/new-types.md b/docs/_docs/reference/new-types/new-types.md new file mode 100644 index 000000000000..dd2d1036d2e6 --- /dev/null +++ b/docs/_docs/reference/new-types/new-types.md @@ -0,0 +1,7 @@ +--- +layout: doc-page +title: "New Types" +movedTo: https://docs.scala-lang.org/scala3/reference/new-types.html +--- + +This chapter documents the new types introduced in Scala 3. diff --git a/docs/docs/reference/new-types/polymorphic-function-types.md b/docs/_docs/reference/new-types/polymorphic-function-types.md similarity index 89% rename from docs/docs/reference/new-types/polymorphic-function-types.md rename to docs/_docs/reference/new-types/polymorphic-function-types.md index 1be8baca17ad..2a803f5e65db 100644 --- a/docs/docs/reference/new-types/polymorphic-function-types.md +++ b/docs/_docs/reference/new-types/polymorphic-function-types.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Polymorphic Function Types" +movedTo: https://docs.scala-lang.org/scala3/reference/new-types/polymorphic-function-types.html --- A polymorphic function type is a function type which accepts type parameters. @@ -49,8 +50,8 @@ in a strongly-typed way: ```scala enum Expr[A]: - case Var(name: String) - case Apply[A, B](fun: Expr[B => A], arg: Expr[B]) extends Expr[A] + case Var(name: String) + case Apply[A, B](fun: Expr[B => A], arg: Expr[B]) extends Expr[A] ``` We would like to provide a way for users to map a function @@ -61,9 +62,9 @@ Here is how to implement this using polymorphic function types: ```scala def mapSubexpressions[A](e: Expr[A])(f: [B] => Expr[B] => Expr[B]): Expr[A] = - e match - case Apply(fun, arg) => Apply(f(fun), f(arg)) - case Var(n) => Var(n) + e match + case Apply(fun, arg) => Apply(f(fun), f(arg)) + case Var(n) => Var(n) ``` And here is how to use this function to _wrap_ each subexpression @@ -73,7 +74,7 @@ defined as a variable: ```scala val e0 = Apply(Var("f"), Var("a")) val e1 = mapSubexpressions(e0)( - [B] => (se: Expr[B]) => Apply(Var[B => B]("wrap"), se)) + [B] => (se: Expr[B]) => Apply(Var[B => B]("wrap"), se)) println(e1) // Apply(Apply(Var(wrap),Var(f)),Apply(Var(wrap),Var(a))) ``` diff --git a/docs/docs/reference/new-types/type-lambdas-spec.md b/docs/_docs/reference/new-types/type-lambdas-spec.md similarity index 97% rename from docs/docs/reference/new-types/type-lambdas-spec.md rename to docs/_docs/reference/new-types/type-lambdas-spec.md index 98362c004466..5c791ba40272 100644 --- a/docs/docs/reference/new-types/type-lambdas-spec.md +++ b/docs/_docs/reference/new-types/type-lambdas-spec.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Type Lambdas - More Details" +movedTo: https://docs.scala-lang.org/scala3/reference/new-types/type-lambdas-spec.html --- ## Syntax @@ -113,6 +114,3 @@ The body of a type lambda can again be a type lambda. Example: type TL = [X] =>> [Y] =>> (X, Y) ``` Currently, no special provision is made to infer type arguments to such curried type lambdas. This is left for future work. - - - diff --git a/docs/docs/reference/new-types/type-lambdas.md b/docs/_docs/reference/new-types/type-lambdas.md similarity index 84% rename from docs/docs/reference/new-types/type-lambdas.md rename to docs/_docs/reference/new-types/type-lambdas.md index 0b69b40a4af6..8ede3f9305c7 100644 --- a/docs/docs/reference/new-types/type-lambdas.md +++ b/docs/_docs/reference/new-types/type-lambdas.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Type Lambdas" +movedTo: https://docs.scala-lang.org/scala3/reference/new-types/type-lambdas.html --- A _type lambda_ lets one express a higher-kinded type directly, without diff --git a/docs/docs/reference/new-types/union-types-spec.md b/docs/_docs/reference/new-types/union-types-spec.md similarity index 98% rename from docs/docs/reference/new-types/union-types-spec.md rename to docs/_docs/reference/new-types/union-types-spec.md index 760bce4ec3e6..042dedea6954 100644 --- a/docs/docs/reference/new-types/union-types-spec.md +++ b/docs/_docs/reference/new-types/union-types-spec.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Union Types - More Details" +movedTo: https://docs.scala-lang.org/scala3/reference/new-types/union-types-spec.html --- ## Syntax @@ -139,7 +140,7 @@ On the other hand, the following would be allowed ```scala trait C { def hello: String } -trait A extends C with D +trait A extends C with D trait B extends C with E def test(x: A | B) = x.hello // ok as `hello` is a member of the join of A | B which is C diff --git a/docs/docs/reference/new-types/union-types.md b/docs/_docs/reference/new-types/union-types.md similarity index 85% rename from docs/docs/reference/new-types/union-types.md rename to docs/_docs/reference/new-types/union-types.md index 4de0fd842421..9c52cff3864d 100644 --- a/docs/docs/reference/new-types/union-types.md +++ b/docs/_docs/reference/new-types/union-types.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Union Types" +movedTo: https://docs.scala-lang.org/scala3/reference/new-types/union-types.html --- A union type `A | B` has as values all values of type `A` and also all values of type `B`. @@ -11,10 +12,10 @@ case class UserName(name: String) case class Password(hash: Hash) def help(id: UserName | Password) = - val user = id match - case UserName(name) => lookupName(name) - case Password(hash) => lookupPassword(hash) - ... + val user = id match + case UserName(name) => lookupName(name) + case Password(hash) => lookupPassword(hash) + ... ``` Union types are duals of intersection types. `|` is _commutative_: diff --git a/docs/docs/reference/other-new-features/control-syntax.md b/docs/_docs/reference/other-new-features/control-syntax.md similarity index 89% rename from docs/docs/reference/other-new-features/control-syntax.md rename to docs/_docs/reference/other-new-features/control-syntax.md index 544eb5ebd584..2f0bbde41331 100644 --- a/docs/docs/reference/other-new-features/control-syntax.md +++ b/docs/_docs/reference/other-new-features/control-syntax.md @@ -1,6 +1,7 @@ --- layout: doc-page title: New Control Syntax +movedTo: https://docs.scala-lang.org/scala3/reference/other-new-features/control-syntax.html --- Scala 3 has a new "quiet" syntax for control expressions that does not rely on @@ -8,11 +9,11 @@ enclosing the condition in parentheses, and also allows to drop parentheses or b around the generators of a `for`-expression. Examples: ```scala if x < 0 then - "negative" + "negative" else if x == 0 then - "zero" + "zero" else - "positive" + "positive" if x < 0 then -x else x @@ -22,10 +23,10 @@ for x <- xs if x > 0 yield x * x for - x <- xs - y <- ys + x <- xs + y <- ys do - println(x + y) + println(x + y) try body catch case ex: IOException => handle diff --git a/docs/docs/reference/other-new-features/creator-applications.md b/docs/_docs/reference/other-new-features/creator-applications.md similarity index 89% rename from docs/docs/reference/other-new-features/creator-applications.md rename to docs/_docs/reference/other-new-features/creator-applications.md index 219fb16d2ea0..17bc1574f763 100644 --- a/docs/docs/reference/other-new-features/creator-applications.md +++ b/docs/_docs/reference/other-new-features/creator-applications.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Universal Apply Methods" +movedTo: https://docs.scala-lang.org/scala3/reference/other-new-features/creator-applications.html --- Scala case classes generate apply methods, so that values of case classes can be created using simple @@ -10,7 +11,7 @@ Scala 3 generalizes this scheme to all concrete classes. Example: ```scala class StringBuilder(s: String): - def this() = this("") + def this() = this("") StringBuilder("abc") // old: new StringBuilder("abc") StringBuilder() // old: new StringBuilder() @@ -21,8 +22,8 @@ is generated together with the class. The object looks like this: ```scala object StringBuilder: - inline def apply(s: String): StringBuilder = new StringBuilder(s) - inline def apply(): StringBuilder = new StringBuilder() + inline def apply(s: String): StringBuilder = new StringBuilder(s) + inline def apply(): StringBuilder = new StringBuilder() ``` The synthetic object `StringBuilder` and its `apply` methods are called _constructor proxies_. diff --git a/docs/_docs/reference/other-new-features/experimental-defs.md b/docs/_docs/reference/other-new-features/experimental-defs.md new file mode 100644 index 000000000000..ef9eca1ea7f5 --- /dev/null +++ b/docs/_docs/reference/other-new-features/experimental-defs.md @@ -0,0 +1,316 @@ +--- +layout: doc-page +title: "Experimental Definitions" +movedTo: https://docs.scala-lang.org/scala3/reference/other-new-features/experimental-defs.html +--- + +The [`@experimental`](https://scala-lang.org/api/3.x/scala/annotation/experimental.html) annotation allows the definition of an API that is not guaranteed backward binary or source compatibility. +This annotation can be placed on term or type definitions. + +## References to experimental definitions + +Experimental definitions can only be referenced in an experimental scope. Experimental scopes are defined as follows: + +1. The RHS of an experimental `def`, `val`, `var`, `given` or `type` is an experimental scope. Examples: + +
+ Example 1 + + ```scala + import scala.annotation.experimental + + @experimental + def x = () + + def d1 = x // error: value x is marked @experimental and therefore ... + @experimental def d2 = x + + val v1 = x // error: value x is marked @experimental and therefore ... + @experimental val v2 = x + + var vr1 = x // error: value x is marked @experimental and therefore ... + @experimental var vr2 = x + + lazy val lv1 = x // error: value x is marked @experimental and therefore ... + @experimental lazy val lv2 = x + ``` +
+ +
+ Example 2 + + ```scala + import scala.annotation.experimental + + @experimental + val x = () + + @experimental + def f() = () + + @experimental + object X: + def fx() = 1 + + def test1: Unit = + f() // error: def f is marked @experimental and therefore ... + x // error: value x is marked @experimental and therefore ... + X.fx() // error: object X is marked @experimental and therefore ... + import X.fx + fx() // error: object X is marked @experimental and therefore ... + + @experimental + def test2: Unit = + // references to f, x and X are ok because `test2` is experimental + f() + x + X.fx() + import X.fx + fx() + ``` +
+ +
+ Example 3 + + ```scala + import scala.annotation.experimental + + @experimental type E + + type A = E // error type E is marked @experimental and therefore ... + @experimental type B = E + ``` +
+ +
+ Example 4 + + ```scala + import scala.annotation.experimental + + @experimental class A + @experimental type X + @experimental type Y = Int + @experimental opaque type Z = Int + + def test: Unit = + new A // error: class A is marked @experimental and therefore ... + val i0: A = ??? // error: class A is marked @experimental and therefore ... + val i1: X = ??? // error: type X is marked @experimental and therefore ... + val i2: Y = ??? // error: type Y is marked @experimental and therefore ... + val i2: Z = ??? // error: type Y is marked @experimental and therefore ... + () + ``` +
+ +
+ Example 5 + + ```scala + @experimental + trait ExpSAM { + def foo(x: Int): Int + } + def bar(f: ExpSAM): Unit = {} // error: error form rule 2 + + def test: Unit = + bar(x => x) // error: reference to experimental SAM + () + ``` +
+ +2. The signatures of an experimental `def`, `val`, `var`, `given` and `type`, or constructors of `class` and `trait` are experimental scopes. Examples: + +
+ Example 1 + + ```scala + import scala.annotation.experimental + + @experimental def x = 2 + @experimental class A + @experimental type X + @experimental type Y = Int + @experimental opaque type Z = Int + + def test1( + p1: A, // error: class A is marked @experimental and therefore ... + p2: List[A], // error: class A is marked @experimental and therefore ... + p3: X, // error: type X is marked @experimental and therefore ... + p4: Y, // error: type Y is marked @experimental and therefore ... + p5: Z, // error: type Z is marked @experimental and therefore ... + p6: Any = x // error: def x is marked @experimental and therefore ... + ): A = ??? // error: class A is marked @experimental and therefore ... + + @experimental def test2( + p1: A, + p2: List[A], + p3: X, + p4: Y, + p5: Z, + p6: Any = x + ): A = ??? + + class Test1( + p1: A, // error + p2: List[A], // error + p3: X, // error + p4: Y, // error + p5: Z, // error + p6: Any = x // error + ) {} + + @experimental class Test2( + p1: A, + p2: List[A], + p3: X, + p4: Y, + p5: Z, + p6: Any = x + ) {} + + trait Test1( + p1: A, // error + p2: List[A], // error + p3: X, // error + p4: Y, // error + p5: Z, // error + p6: Any = x // error + ) {} + + @experimental trait Test2( + p1: A, + p2: List[A], + p3: X, + p4: Y, + p5: Z, + p6: Any = x + ) {} + ``` +
+ +3. The `extends` clause of an experimental `class`, `trait` or `object` is an experimental scope. Examples: + +
+ Example 1 + + ```scala + import scala.annotation.experimental + + @experimental def x = 2 + + @experimental class A1(x: Any) + class A2(x: Any) + + + @experimental class B1 extends A1(1) + class B2 extends A1(1) // error: class A1 is marked @experimental and therefore marked @experimental and therefore ... + + @experimental class C1 extends A2(x) + class C2 extends A2(x) // error def x is marked @experimental and therefore + ``` +
+ +4. The body of an experimental `class`, `trait` or `object` is an experimental scope. Examples: + +
+ Example 1 + ```scala + import scala.annotation.experimental + + @experimental def x = 2 + + @experimental class A { + def f = x // ok because A is experimental + } + + @experimental class B { + def f = x // ok because A is experimental + } + + @experimental object C { + def f = x // ok because A is experimental + } + + @experimental class D { + def f = { + object B { + x // ok because A is experimental + } + } + } + ``` +
+ +5. Annotations of an experimental definition are in experimental scopes. Examples: + +
+ Example 1 + + ```scala + import scala.annotation.experimental + + @experimental class myExperimentalAnnot extends scala.annotation.Annotation + + @myExperimentalAnnot // error + def test: Unit = () + + @experimental + @myExperimentalAnnot + def test: Unit = () + ``` + +
+ +6. Any code compiled using a [_Nightly_](https://search.maven.org/artifact/org.scala-lang/scala3-compiler_3) or _Snapshot_ version of the compiler is considered to be in an experimental scope. +Can use the `-Yno-experimental` compiler flag to disable it and run as a proper release. + +In any other situation, a reference to an experimental definition will cause a compilation error. + +## Experimental inheritance + +All subclasses of an experimental `class` or `trait` must be marked as [`@experimental`](https://scala-lang.org/api/3.x/scala/annotation/experimental.html) even if they are in an experimental scope. +Anonymous classes and SAMs of experimental classes are considered experimental. + +We require explicit annotations to make sure we do not have completion or cycles issues with nested classes. This restriction could be relaxed in the future. + +## Experimental overriding + +For an overriding member `M` and overridden member `O`, if `O` is non-experimental then `M` must be non-experimental. + +This makes sure that we cannot have accidental binary incompatibilities such as the following change. +```diff +class A: + def f: Any = 1 +class B extends A: +- @experimental def f: Int = 2 +``` + +## Test frameworks + +Tests can be defined as experimental. Tests frameworks can execute tests using reflection even if they are in an experimental class, object or method. Examples: + +
+Example 1 + +Test that touch experimental APIs can be written as follows + +```scala +import scala.annotation.experimental + +@experimental def x = 2 + +class MyTests { + /*@Test*/ def test1 = x // error + @experimental /*@Test*/ def test2 = x +} + +@experimental +class MyExperimentalTests { + /*@Test*/ def test1 = x + /*@Test*/ def test2 = x +} +``` + +
diff --git a/docs/docs/reference/other-new-features/export.md b/docs/_docs/reference/other-new-features/export.md similarity index 84% rename from docs/docs/reference/other-new-features/export.md rename to docs/_docs/reference/other-new-features/export.md index 5ded49aebc75..ce1d536a1432 100644 --- a/docs/docs/reference/other-new-features/export.md +++ b/docs/_docs/reference/other-new-features/export.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Export Clauses" +movedTo: https://docs.scala-lang.org/scala3/reference/other-new-features/export.html --- An export clause defines aliases for selected members of an object. Example: @@ -10,22 +11,22 @@ class BitMap class InkJet class Printer: - type PrinterType - def print(bits: BitMap): Unit = ??? - def status: List[String] = ??? + type PrinterType + def print(bits: BitMap): Unit = ??? + def status: List[String] = ??? class Scanner: - def scan(): BitMap = ??? - def status: List[String] = ??? + def scan(): BitMap = ??? + def status: List[String] = ??? class Copier: - private val printUnit = new Printer { type PrinterType = InkJet } - private val scanUnit = new Scanner + private val printUnit = new Printer { type PrinterType = InkJet } + private val scanUnit = new Scanner - export scanUnit.scan - export printUnit.{status => _, _} + export scanUnit.scan + export printUnit.{status => _, *} - def status: List[String] = printUnit.status ++ scanUnit.status + def status: List[String] = printUnit.status ++ scanUnit.status ``` The two `export` clauses define the following _export aliases_ in class `Copier`: @@ -43,7 +44,7 @@ val copier = new Copier copier.print(copier.scan()) ``` -An export clause has the same format as an import clause. Its general form is: +An `export` clause has the same format as an import clause. Its general form is: ```scala export path . { sel_1, ..., sel_n } @@ -58,8 +59,12 @@ of one of the following forms: - An _omitting selector_ `x => _` prevents `x` from being aliased by a subsequent wildcard selector. - A _given selector_ `given x` has an optional type bound `x`. It creates aliases for all eligible given instances that conform to either `x`, or `Any` if `x` is omitted, except for members that are named by a previous simple, renaming, or omitting selector. - - A _wildcard selector_ `_` creates aliases for all eligible members of `path` except for given instances, + - A _wildcard selector_ `*` creates aliases for all eligible members of `path` except for given instances, synthetic members generated by the compiler and those members that are named by a previous simple, renaming, or omitting selector. + \ + Notes: + - eligible construtor proxies are also included, even though they are synthetic members. + - members created by an export are also included. They are created by the compiler, but are not considered synthetic. A member is _eligible_ if all of the following holds: @@ -73,7 +78,7 @@ A member is _eligible_ if all of the following holds: It is a compile-time error if a simple or renaming selector does not identify any eligible members. Type members are aliased by type definitions, and term members are aliased by method definitions. Export aliases copy the type and value parameters of the members they refer to. -Export aliases are always `final`. Aliases of given instances are again defined as givens (and aliases of old-style implicits are `implicit`). Aliases of inline methods or values are again defined `inline`. There are no other modifiers that can be given to an alias. This has the following consequences for overriding: +Export aliases are always `final`. Aliases of given instances are again defined as givens (and aliases of old-style implicits are `implicit`). Aliases of extensions are again defined as extensions. Aliases of inline methods or values are again defined `inline`. There are no other modifiers that can be given to an alias. This has the following consequences for overriding: - Export aliases cannot be overridden, since they are final. - Export aliases cannot override concrete members in base classes, since they are @@ -131,7 +136,7 @@ ImportSpec ::= NamedSelector | WildcardSelector | ‘{’ ImportSelectors) ‘}’ NamedSelector ::= id [‘as’ (id | ‘_’)] -WildCardSelector ::= ‘*' | ‘given’ [InfixType] +WildCardSelector ::= ‘*’ | ‘given’ [InfixType] ImportSelectors ::= NamedSelector [‘,’ ImportSelectors] | WildCardSelector {‘,’ WildCardSelector} ``` @@ -144,15 +149,15 @@ Consider the following example: ```scala class B { val c: Int } object a { val b = new B } -export a._ -export b._ +export a.* +export b.* ``` -Is the `export b._` clause legal? If yes, what does it export? Is it equivalent to `export a.b._`? What about if we swap the last two clauses? +Is the `export b.*` clause legal? If yes, what does it export? Is it equivalent to `export a.b.*`? What about if we swap the last two clauses? ``` -export b._ -export a._ +export b.* +export a.* ``` To avoid tricky questions like these, we fix the elaboration order of exports as follows. @@ -168,8 +173,7 @@ Export clauses are processed when the type information of the enclosing object o With export clauses, the following steps are added: - 6. Compute the types of all paths in export clauses in a context logically - inside the class but not considering any imports or exports in that class. + 6. Compute the types of all paths in export clauses. 7. Enter export aliases for the eligible members of all paths in export clauses. It is important that steps 6 and 7 are done in sequence: We first compute the types of _all_ diff --git a/docs/docs/reference/other-new-features/indentation-experimental.md b/docs/_docs/reference/other-new-features/indentation-experimental.md similarity index 83% rename from docs/docs/reference/other-new-features/indentation-experimental.md rename to docs/_docs/reference/other-new-features/indentation-experimental.md index 48bc271df67f..cbdd41eeaf12 100644 --- a/docs/docs/reference/other-new-features/indentation-experimental.md +++ b/docs/_docs/reference/other-new-features/indentation-experimental.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Fewer Braces" +movedTo: https://docs.scala-lang.org/scala3/reference/other-new-features/indentation-experimental.html --- By and large, the possible indentation regions coincide with those regions where braces `{...}` are also legal, no matter whether the braces enclose an expression or a set of definitions. There is one exception, though: Arguments to function can be enclosed in braces but they cannot be simply indented instead. Making indentation always significant for function arguments would be too restrictive and fragile. @@ -19,31 +20,31 @@ This variant is more contentious and less stable than the rest of the significan Similar to what is done for classes and objects, a `:` that follows a function reference at the end of a line means braces can be omitted for function arguments. Example: ```scala times(10): - println("ah") - println("ha") + println("ah") + println("ha") ``` The colon can also follow an infix operator: ```scala credentials ++ : - val file = Path.userHome / ".credentials" - if file.exists - then Seq(Credentials(file)) - else Seq() + val file = Path.userHome / ".credentials" + if file.exists + then Seq(Credentials(file)) + else Seq() ``` Function calls that take multiple argument lists can also be handled this way: ```scala val firstLine = files.get(fileName).fold: - val fileNames = files.values - s"""no file named $fileName found among - |${values.mkString(\n)}""".stripMargin - : - f => - val lines = f.iterator.map(_.readLine) - lines.mkString("\n) + val fileNames = files.values + s"""no file named $fileName found among + |${values.mkString(\n)}""".stripMargin + : + f => + val lines = f.iterator.map(_.readLine) + lines.mkString("\n) ``` @@ -52,10 +53,10 @@ val firstLine = files.get(fileName).fold: Braces can also be omitted around multiple line function value arguments: ```scala val xs = elems.map x => - val y = x - 1 - y * y + val y = x - 1 + y * y xs.foldLeft (x, y) => - x + y + x + y ``` Braces can be omitted if the lambda starts with a parameter list and `=>` or `=>?` at the end of one line and it has an indented body on the following lines. diff --git a/docs/docs/reference/other-new-features/indentation.md b/docs/_docs/reference/other-new-features/indentation.md similarity index 84% rename from docs/docs/reference/other-new-features/indentation.md rename to docs/_docs/reference/other-new-features/indentation.md index 4b1b78a9bf4e..c7a6fdbb7453 100644 --- a/docs/docs/reference/other-new-features/indentation.md +++ b/docs/_docs/reference/other-new-features/indentation.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Optional Braces" +movedTo: https://docs.scala-lang.org/scala3/reference/other-new-features/indentation.html --- Scala 3 enforces some rules on indentation and allows some occurrences of braces `{...}` to be optional: @@ -68,6 +69,9 @@ There are two rules: if match return then throw try while yield ``` + - after the closing `)` of a condition in an old-style `if` or `while`. + - after the closing `)` or `}` of the enumerations of an old-style `for` loop without a `do`. + If an `` is inserted, the indentation width of the token on the next line is pushed onto `IW`, which makes it the new current indentation width. @@ -100,9 +104,9 @@ It is an error if the indentation width of the token following an `` do ```scala if x < 0 then - -x + -x else // error: `else` does not align correctly - x + x ``` Indentation tokens are only inserted in regions where newline statement separators are also inferred: @@ -141,25 +145,25 @@ With these new rules, the following constructs are all valid: ```scala trait A: - def f: Int + def f: Int class C(x: Int) extends A: - def f = x + def f = x object O: - def f = 3 + def f = 3 enum Color: - case Red, Green, Blue + case Red, Green, Blue new A: - def f = 3 + def f = 3 package p: - def a = 1 + def a = 1 package q: - def b = 2 + def b = 2 ``` In each case, the `:` at the end of line can be replaced without change of meaning by a pair of braces that enclose the following indented definition(s). @@ -199,13 +203,13 @@ Indentation can be mixed freely with braces `{...}`, as well as brackets `[...]` For instance, consider: ```scala { - val x = f(x: Int, y => - x * ( - y + 1 - ) + - (x + - x) - ) + val x = f(x: Int, y => + x * ( + y + 1 + ) + + (x + + x) + ) } ``` - Here, the indentation width of the region enclosed by the braces is 3 (i.e. the indentation width of the @@ -247,12 +251,12 @@ To solve this problem, Scala 3 offers an optional `end` marker. Example: ```scala def largeMethod(...) = - ... - if ... then ... - else - ... // a large block - end if - ... // more code + ... + if ... then ... + else + ... // a large block + end if + ... // more code end largeMethod ``` @@ -279,47 +283,47 @@ For instance, the following end markers are all legal: ```scala package p1.p2: - abstract class C(): - - def this(x: Int) = - this() - if x > 0 then - val a :: b = - x :: Nil - end val - var y = - x - end y - while y > 0 do - println(y) - y -= 1 - end while - try - x match - case 0 => println("0") - case _ => - end match - finally - println("done") - end try - end if - end this - - def f: String - end C - - object C: - given C = - new C: - def f = "!" - end f - end new - end given - end C - - extension (x: C) - def ff: String = x.f ++ x.f - end extension + abstract class C(): + + def this(x: Int) = + this() + if x > 0 then + val a :: b = + x :: Nil + end val + var y = + x + end y + while y > 0 do + println(y) + y -= 1 + end while + try + x match + case 0 => println("0") + case _ => + end match + finally + println("done") + end try + end if + end this + + def f: String + end C + + object C: + given C = + new C: + def f = "!" + end f + end new + end given + end C + + extension (x: C) + def ff: String = x.f ++ x.f + end extension end p2 ``` @@ -351,56 +355,56 @@ Here is a (somewhat meta-circular) example of code using indentation. It provide ```scala enum IndentWidth: - case Run(ch: Char, n: Int) - case Conc(l: IndentWidth, r: Run) - - def <= (that: IndentWidth): Boolean = this match - case Run(ch1, n1) => - that match - case Run(ch2, n2) => n1 <= n2 && (ch1 == ch2 || n1 == 0) - case Conc(l, r) => this <= l - case Conc(l1, r1) => - that match - case Conc(l2, r2) => l1 == l2 && r1 <= r2 - case _ => false - - def < (that: IndentWidth): Boolean = - this <= that && !(that <= this) - - override def toString: String = - this match + case Run(ch: Char, n: Int) + case Conc(l: IndentWidth, r: Run) + + def <= (that: IndentWidth): Boolean = this match + case Run(ch1, n1) => + that match + case Run(ch2, n2) => n1 <= n2 && (ch1 == ch2 || n1 == 0) + case Conc(l, r) => this <= l + case Conc(l1, r1) => + that match + case Conc(l2, r2) => l1 == l2 && r1 <= r2 + case _ => false + + def < (that: IndentWidth): Boolean = + this <= that && !(that <= this) + + override def toString: String = + this match case Run(ch, n) => - val kind = ch match - case ' ' => "space" - case '\t' => "tab" - case _ => s"'$ch'-character" - val suffix = if n == 1 then "" else "s" - s"$n $kind$suffix" + val kind = ch match + case ' ' => "space" + case '\t' => "tab" + case _ => s"'$ch'-character" + val suffix = if n == 1 then "" else "s" + s"$n $kind$suffix" case Conc(l, r) => - s"$l, $r" + s"$l, $r" object IndentWidth: - private inline val MaxCached = 40 + private inline val MaxCached = 40 - private val spaces = IArray.tabulate(MaxCached + 1)(new Run(' ', _)) - private val tabs = IArray.tabulate(MaxCached + 1)(new Run('\t', _)) + private val spaces = IArray.tabulate(MaxCached + 1)(new Run(' ', _)) + private val tabs = IArray.tabulate(MaxCached + 1)(new Run('\t', _)) - def Run(ch: Char, n: Int): Run = - if n <= MaxCached && ch == ' ' then - spaces(n) - else if n <= MaxCached && ch == '\t' then - tabs(n) - else - new Run(ch, n) - end Run + def Run(ch: Char, n: Int): Run = + if n <= MaxCached && ch == ' ' then + spaces(n) + else if n <= MaxCached && ch == '\t' then + tabs(n) + else + new Run(ch, n) + end Run - val Zero = Run(' ', 0) + val Zero = Run(' ', 0) end IndentWidth ``` ### Settings and Rewrites -Significant indentation is enabled by default. It can be turned off by giving any of the options `-no-indent`, `old-syntax` and `language:Scala2`. If indentation is turned off, it is nevertheless checked that indentation conforms to the logical program structure as defined by braces. If that is not the case, the compiler issues a warning. +Significant indentation is enabled by default. It can be turned off by giving any of the options `-no-indent`, `-old-syntax` and `-language:Scala2`. If indentation is turned off, it is nevertheless checked that indentation conforms to the logical program structure as defined by braces. If that is not the case, the compiler issues a warning. The Scala 3 compiler can rewrite source code to indented code and back. When invoked with options `-rewrite -indent` it will rewrite braces to @@ -420,17 +424,17 @@ This variant is more contentious and less stable than the rest of the significan ```scala times(10): - println("ah") - println("ha") + println("ah") + println("ha") ``` or ```scala xs.map: - x => - val y = x - 1 - y * y + x => + val y = x - 1 + y * y ``` The colon is usable not only for lambdas and by-name parameters, but @@ -438,10 +442,10 @@ also even for ordinary parameters: ```scala credentials ++ : - val file = Path.userHome / ".credentials" - if file.exists - then Seq(Credentials(file)) - else Seq() + val file = Path.userHome / ".credentials" + if file.exists + then Seq(Credentials(file)) + else Seq() ``` How does this syntax variant work? Colons at the end of lines are their own token, distinct from normal `:`. diff --git a/docs/docs/reference/other-new-features/kind-polymorphism.md b/docs/_docs/reference/other-new-features/kind-polymorphism.md similarity index 84% rename from docs/docs/reference/other-new-features/kind-polymorphism.md rename to docs/_docs/reference/other-new-features/kind-polymorphism.md index 0f3899a42a46..057e9de9d55d 100644 --- a/docs/docs/reference/other-new-features/kind-polymorphism.md +++ b/docs/_docs/reference/other-new-features/kind-polymorphism.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Kind Polymorphism" +movedTo: https://docs.scala-lang.org/scala3/reference/other-new-features/kind-polymorphism.html --- Normally type parameters in Scala are partitioned into _kinds_. First-level types are types of values. Higher-kinded types are type constructors @@ -13,7 +14,7 @@ _must_ be applied to a type argument, unless they are passed to type parameters Sometimes we would like to have type parameters that can have more than one kind, for instance to define an implicit value that works for parameters of any kind. This is now possible through a form of (_subtype_) kind polymorphism. -Kind polymorphism relies on the special type `scala.AnyKind` that can be used as an upper bound of a type. +Kind polymorphism relies on the special type [`scala.AnyKind`](https://scala-lang.org/api/3.x/scala/AnyKind.html) that can be used as an upper bound of a type. ```scala def f[T <: AnyKind] = ... @@ -37,7 +38,7 @@ through advanced uses of implicits. (todo: insert good concise example) -Some technical details: `AnyKind` is a synthesized class just like `Any`, but without any members. It extends no other class. +Some technical details: [`AnyKind`](https://scala-lang.org/api/3.x/scala/AnyKind.html) is a synthesized class just like `Any`, but without any members. It extends no other class. It is declared `abstract` and `final`, so it can be neither instantiated nor extended. `AnyKind` plays a special role in Scala's subtype system: It is a supertype of all other types no matter what their kind is. It is also assumed to be kind-compatible with all other types. Furthermore, `AnyKind` is treated as a higher-kinded type (so it cannot be used as a type of values), but at the same time it has no type parameters (so it cannot be instantiated). diff --git a/docs/_docs/reference/other-new-features/matchable.md b/docs/_docs/reference/other-new-features/matchable.md new file mode 100644 index 000000000000..b1bb16697d62 --- /dev/null +++ b/docs/_docs/reference/other-new-features/matchable.md @@ -0,0 +1,140 @@ +--- +layout: doc-page +title: "The Matchable Trait" +movedTo: https://docs.scala-lang.org/scala3/reference/other-new-features/matchable.html +--- + +A new trait [`Matchable`](https://scala-lang.org/api/3.x/scala/Matchable.html) controls the ability to pattern match. + +### The Problem + +The Scala 3 standard library has a type [`IArray`](https://scala-lang.org/api/3.x/scala.html#IArray-0) for immutable +arrays that is defined like this: + +```scala + opaque type IArray[+T] = Array[_ <: T] +``` + +The `IArray` type offers extension methods for `length` and `apply`, but not for `update`; hence it seems values of type `IArray` cannot be updated. + +However, there is a potential hole due to pattern matching. Consider: + +```scala +val imm: IArray[Int] = ... +imm match + case a: Array[Int] => a(0) = 1 +``` + +The test will succeed at runtime since [`IArray`](https://scala-lang.org/api/3.x/scala.html#IArray-0)s _are_ represented as +`Array`s at runtime. But if we allowed it, it would break the fundamental abstraction of immutable arrays. + +__Aside:__ One could also achieve the same by casting: + +```scala +imm.asInstanceOf[Array[Int]](0) = 1 +``` + +But that is not as much of a problem since in Scala `asInstanceOf` is understood to be low-level and unsafe. By contrast, a pattern match that compiles without warning or error should not break abstractions. + +Note also that the problem is not tied to [opaque types](opaques.md) as match selectors. The following slight variant with a value of parametric +type `T` as match selector leads to the same problem: + +```scala +def f[T](x: T) = x match + case a: Array[Int] => a(0) = 0 +f(imm) +``` + +Finally, note that the problem is not linked to just [opaque types](opaques.md). No unbounded type parameter or abstract type should be decomposable with a pattern match. + +### The Solution + +There is a new type [`scala.Matchable`](https://scala-lang.org/api/3.x/scala/Matchable.html) that controls pattern matching. When typing a pattern match of a constructor pattern `C(...)` or +a type pattern `_: C` it is required that the selector type conforms +to `Matchable`. If that's not the case a warning is issued. For instance when compiling the example at the start of this section we get: + +``` +> sc ../new/test.scala -source future +-- Warning: ../new/test.scala:4:12 --------------------------------------------- +4 | case a: Array[Int] => a(0) = 0 + | ^^^^^^^^^^ + | pattern selector should be an instance of Matchable, + | but it has unmatchable type IArray[Int] instead +``` + +To allow migration from Scala 2 and cross-compiling +between Scala 2 and 3 the warning is turned on only for `-source future-migration` or higher. + +[`Matchable`](https://scala-lang.org/api/3.x/scala/Matchable.html) is a universal trait with `Any` as its parent class. It is extended by both [`AnyVal`](https://scala-lang.org/api/3.x/scala/AnyVal.html) and [`AnyRef`](https://scala-lang.org/api/3.x/scala/AnyRef.html). Since `Matchable` is a supertype of every concrete value or reference class it means that instances of such classes can be matched as before. However, match selectors of the following types will produce a warning: + +- Type `Any`: if pattern matching is required one should use `Matchable` instead. +- Unbounded type parameters and abstract types: If pattern matching is required they should have an upper bound `Matchable`. +- Type parameters and abstract types that are only bounded by some + universal trait: Again, `Matchable` should be added as a bound. + +Here is the hierarchy of top-level classes and traits with their defined methods: + +```scala +abstract class Any: + def getClass + def isInstanceOf + def asInstanceOf + def == + def != + def ## + def equals + def hashCode + def toString + +trait Matchable extends Any + +class AnyVal extends Any, Matchable +class Object extends Any, Matchable +``` + +[`Matchable`](https://scala-lang.org/api/3.x/scala/Matchable.html) is currently a marker trait without any methods. Over time +we might migrate methods `getClass` and `isInstanceOf` to it, since these are closely related to pattern-matching. + +### `Matchable` and Universal Equality + +Methods that pattern-match on selectors of type `Any` will need a cast once the +Matchable warning is turned on. The most common such method is the universal +`equals` method. It will have to be written as in the following example: + +```scala +class C(val x: String): + + override def equals(that: Any): Boolean = + that.asInstanceOf[Matchable] match + case that: C => this.x == that.x + case _ => false +``` + +The cast of `that` to [`Matchable`](https://scala-lang.org/api/3.x/scala/Matchable.html) serves as an indication that universal equality +is unsafe in the presence of abstract types and opaque types since it cannot properly distinguish the meaning of a type from its representation. The cast +is guaranteed to succeed at run-time since `Any` and [`Matchable`](https://scala-lang.org/api/3.x/scala/Matchable.html) both erase to +`Object`. + +For instance, consider the definitions + +```scala +opaque type Meter = Double +def Meter(x: Double) = x + +opaque type Second = Double +def Second(x: Double) = x +``` + +Here, universal `equals` will return true for + +```scala + Meter(10).equals(Second(10)) +``` + +even though this is clearly false mathematically. With [multiversal equality](../contextual/multiversal-equality.md) one can mitigate that problem somewhat by turning + +```scala + Meter(10) == Second(10) +``` + +into a type error. diff --git a/docs/docs/reference/other-new-features/opaques-details.md b/docs/_docs/reference/other-new-features/opaques-details.md similarity index 80% rename from docs/docs/reference/other-new-features/opaques-details.md rename to docs/_docs/reference/other-new-features/opaques-details.md index ef8d6ab0492f..0a992ff5f6be 100644 --- a/docs/docs/reference/other-new-features/opaques-details.md +++ b/docs/_docs/reference/other-new-features/opaques-details.md @@ -1,9 +1,10 @@ --- layout: doc-page title: "Opaque Type Aliases: More Details" +movedTo: https://docs.scala-lang.org/scala3/reference/other-new-features/opaques-details.html --- -### Syntax +## Syntax ``` Modifier ::= ... @@ -15,7 +16,7 @@ Modifier ::= ... Opaque type aliases must be members of classes, traits, or objects, or they are defined at the top-level. They cannot be defined in local blocks. -### Type Checking +## Type Checking The general form of a (monomorphic) opaque type alias is @@ -23,7 +24,7 @@ The general form of a (monomorphic) opaque type alias is opaque type T >: L <: U = R ``` -where the lower bound `L` and the upper bound `U` may be missing, in which case they are assumed to be `scala.Nothing` and `scala.Any`, respectively. If bounds are given, it is checked that the right-hand side `R` conforms to them, i.e. `L <: R` and `R <: U`. F-bounds are not supported for opaque type aliases: `T` is not allowed to appear in `L` or `U`. +where the lower bound `L` and the upper bound `U` may be missing, in which case they are assumed to be [`scala.Nothing`](https://scala-lang.org/api/3.x/scala/Nothing.html) and [`scala.Any`](https://scala-lang.org/api/3.x/scala/Any.html), respectively. If bounds are given, it is checked that the right-hand side `R` conforms to them, i.e. `L <: R` and `R <: U`. F-bounds are not supported for opaque type aliases: `T` is not allowed to appear in `L` or `U`. Inside the scope of the alias definition, the alias is transparent: `T` is treated as a normal alias of `R`. Outside its scope, the alias is treated as the abstract type @@ -34,7 +35,7 @@ A special case arises if the opaque type alias is defined in an object. Example: ```scala object o: - opaque type T = R + opaque type T = R ``` In this case we have inside the object (also for non-opaque types) that `o.T` is equal to @@ -45,12 +46,12 @@ also known that `o.T` is equal to `R`. This means the following code type-checks ```scala object o: - opaque type T = Int - val x: Int = id(2) + opaque type T = Int + val x: Int = id(2) def id(x: o.T): o.T = x ``` -### Type Parameters of Opaque Types +## Type Parameters of Opaque Types Opaque type aliases can have a single type parameter list. The following aliases are well-formed @@ -64,7 +65,7 @@ opaque type BadF[T] = [U] =>> (T, U) opaque type BadG = [T] =>> [U] => (T, U) ``` -### Translation of Equality +## Translation of Equality Comparing two values of opaque type with `==` or `!=` normally uses universal equality, unless another overloaded `==` or `!=` operator is defined for the type. To avoid @@ -79,7 +80,7 @@ defined on the underlying type. For instance, x == y // uses Int equality for the comparison. ``` -### Top-level Opaque Types +## Top-level Opaque Types An opaque type alias on the top-level is transparent in all other top-level definitions in the sourcefile where it appears, but is opaque in nested objects and classes and in all other source files. Example: @@ -89,7 +90,7 @@ opaque type A = String val x: A = "abc" object obj: - val y: A = "abc" // error: found: "abc", required: A + val y: A = "abc" // error: found: "abc", required: A // in test2.scala def z: String = x // error: found: A, required: String @@ -97,16 +98,16 @@ def z: String = x // error: found: A, required: String This behavior becomes clear if one recalls that top-level definitions are placed in their own synthetic object. For instance, the code in `test1.scala` would expand to ```scala object test1$package: - opaque type A = String - val x: A = "abc" + opaque type A = String + val x: A = "abc" object obj: - val y: A = "abc" // error: cannot assign "abc" to opaque type alias A + val y: A = "abc" // error: cannot assign "abc" to opaque type alias A ``` The opaque type alias `A` is transparent in its scope, which includes the definition of `x`, but not the definitions of `obj` and `y`. -### Relationship to SIP 35 +## Relationship to SIP 35 Opaque types in Scala 3 are an evolution from what is described in [Scala SIP 35](https://docs.scala-lang.org/sips/opaque-types.html). diff --git a/docs/_docs/reference/other-new-features/opaques.md b/docs/_docs/reference/other-new-features/opaques.md new file mode 100644 index 000000000000..402440bad90c --- /dev/null +++ b/docs/_docs/reference/other-new-features/opaques.md @@ -0,0 +1,167 @@ +--- +layout: doc-page +title: "Opaque Type Aliases" +movedTo: https://docs.scala-lang.org/scala3/reference/other-new-features/opaques.html +--- + +Opaque types aliases provide type abstraction without any overhead. Example: + +```scala +object MyMath: + + opaque type Logarithm = Double + + object Logarithm: + + // These are the two ways to lift to the Logarithm type + + def apply(d: Double): Logarithm = math.log(d) + + def safe(d: Double): Option[Logarithm] = + if d > 0.0 then Some(math.log(d)) else None + + end Logarithm + + // Extension methods define opaque types' public APIs + extension (x: Logarithm) + def toDouble: Double = math.exp(x) + def + (y: Logarithm): Logarithm = Logarithm(math.exp(x) + math.exp(y)) + def * (y: Logarithm): Logarithm = x + y + +end MyMath +``` + +This introduces `Logarithm` as a new abstract type, which is implemented as `Double`. +The fact that `Logarithm` is the same as `Double` is only known in the scope where +`Logarithm` is defined which in the above example corresponds to the object `MyMath`. +Or in other words, within the scope it is treated as type alias, but this is opaque to the outside world +where in consequence `Logarithm` is seen as an abstract type and has nothing to do with `Double`. + +The public API of `Logarithm` consists of the `apply` and `safe` methods defined in the companion object. +They convert from `Double`s to `Logarithm` values. Moreover, an operation `toDouble` that converts the other way, and operations `+` and `*` are defined as extension methods on `Logarithm` values. +The following operations would be valid because they use functionality implemented in the `MyMath` object. + +```scala +import MyMath.Logarithm + +val l = Logarithm(1.0) +val l2 = Logarithm(2.0) +val l3 = l * l2 +val l4 = l + l2 +``` + +But the following operations would lead to type errors: + +```scala +val d: Double = l // error: found: Logarithm, required: Double +val l2: Logarithm = 1.0 // error: found: Double, required: Logarithm +l * 2 // error: found: Int(2), required: Logarithm +l / l2 // error: `/` is not a member of Logarithm +``` + +### Bounds For Opaque Type Aliases + +Opaque type aliases can also come with bounds. Example: + +```scala +object Access: + + opaque type Permissions = Int + opaque type PermissionChoice = Int + opaque type Permission <: Permissions & PermissionChoice = Int + + extension (x: Permissions) + def & (y: Permissions): Permissions = x | y + extension (x: PermissionChoice) + def | (y: PermissionChoice): PermissionChoice = x | y + extension (granted: Permissions) + def is(required: Permissions) = (granted & required) == required + extension (granted: Permissions) + def isOneOf(required: PermissionChoice) = (granted & required) != 0 + + val NoPermission: Permission = 0 + val Read: Permission = 1 + val Write: Permission = 2 + val ReadWrite: Permissions = Read | Write + val ReadOrWrite: PermissionChoice = Read | Write + +end Access +``` + +The `Access` object defines three opaque type aliases: + +- `Permission`, representing a single permission, +- `Permissions`, representing a set of permissions with the meaning "all of these permissions granted", +- `PermissionChoice`, representing a set of permissions with the meaning "at least one of these permissions granted". + +Outside the `Access` object, values of type `Permissions` may be combined using the `&` operator, +where `x & y` means "all permissions in `x` *and* in `y` granted". +Values of type `PermissionChoice` may be combined using the `|` operator, +where `x | y` means "a permission in `x` *or* in `y` granted". + +Note that inside the `Access` object, the `&` and `|` operators always resolve to the corresponding methods of `Int`, +because members always take precedence over extension methods. +Because of that, the `|` extension method in `Access` does not cause infinite recursion. +Also, the definition of `ReadWrite` must use `|`, +even though an equivalent definition outside `Access` would use `&`. + +All three opaque type aliases have the same underlying representation type `Int`. The +`Permission` type has an upper bound `Permissions & PermissionChoice`. This makes +it known outside the `Access` object that `Permission` is a subtype of the other +two types. Hence, the following usage scenario type-checks. + +```scala +object User: + import Access.* + + case class Item(rights: Permissions) + + val roItem = Item(Read) // OK, since Permission <: Permissions + val rwItem = Item(ReadWrite) + val noItem = Item(NoPermission) + + assert(!roItem.rights.is(ReadWrite)) + assert(roItem.rights.isOneOf(ReadOrWrite)) + + assert(rwItem.rights.is(ReadWrite)) + assert(rwItem.rights.isOneOf(ReadOrWrite)) + + assert(!noItem.rights.is(ReadWrite)) + assert(!noItem.rights.isOneOf(ReadOrWrite)) +end User +``` + +On the other hand, the call `roItem.rights.isOneOf(ReadWrite)` would give a type error +since `Permissions` and `PermissionChoice` are different, unrelated types outside `Access`. + + +### Opaque Type Members on Classes +While typically, opaque types are used together with objects to hide implementation details of a module, they can also be used with classes. + +For example, we can redefine the above example of Logarithms as a class. +```scala +class Logarithms: + + opaque type Logarithm = Double + + def apply(d: Double): Logarithm = math.log(d) + + def safe(d: Double): Option[Logarithm] = + if d > 0.0 then Some(math.log(d)) else None + + def mul(x: Logarithm, y: Logarithm) = x + y +``` + +Opaque type members of different instances are treated as different: +```scala +val l1 = new Logarithms +val l2 = new Logarithms +val x = l1(1.5) +val y = l1(2.6) +val z = l2(3.1) +l1.mul(x, y) // type checks +l1.mul(x, z) // error: found l2.Logarithm, required l1.Logarithm +``` +In general, one can think of an opaque type as being only transparent in the scope of `private[this]`. + +[More details](opaques-details.md) diff --git a/docs/docs/reference/other-new-features/open-classes.md b/docs/_docs/reference/other-new-features/open-classes.md similarity index 76% rename from docs/docs/reference/other-new-features/open-classes.md rename to docs/_docs/reference/other-new-features/open-classes.md index 7164476d0ad4..6762b0eac14b 100644 --- a/docs/docs/reference/other-new-features/open-classes.md +++ b/docs/_docs/reference/other-new-features/open-classes.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Open Classes" +movedTo: https://docs.scala-lang.org/scala3/reference/other-new-features/open-classes.html --- An `open` modifier on a class signals that the class is planned for extensions. Example: @@ -10,18 +11,18 @@ package p open class Writer[T]: - /** Sends to stdout, can be overridden */ - def send(x: T) = println(x) + /** Sends to stdout, can be overridden */ + def send(x: T) = println(x) - /** Sends all arguments using `send` */ - def sendAll(xs: T*) = xs.foreach(send) + /** Sends all arguments using `send` */ + def sendAll(xs: T*) = xs.foreach(send) end Writer // File EncryptedWriter.scala package p class EncryptedWriter[T: Encryptable] extends Writer[T]: - override def send(x: T) = super.send(encrypt(x)) + override def send(x: T) = super.send(encrypt(x)) ``` An open class typically comes with some documentation that describes the internal calling patterns between methods of the class as well as hooks that can be overridden. We call this the _extension contract_ of the class. It is different from the _external contract_ between a class and its users. @@ -30,7 +31,7 @@ Classes that are not open can still be extended, but only if at least one of two - The extending class is in the same source file as the extended class. In this case, the extension is usually an internal implementation matter. - - The language feature `adhocExtensions` is enabled for the extending class. This is typically enabled by an import clause in the source file of the extension: + - The language feature [`adhocExtensions`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$adhocExtensions$.html) is enabled for the extending class. This is typically enabled by an import clause in the source file of the extension: ```scala import scala.language.adhocExtensions ``` @@ -46,7 +47,7 @@ Classes that are not open can still be extended, but only if at least one of two |or by setting the compiler option -language:adhocExtensions. ``` -### Motivation +## Motivation When writing a class, there are three possible expectations of extensibility: @@ -63,17 +64,17 @@ It is good practice to avoid _ad-hoc_ extensions in a code base, since they tend are still some situations where these extensions are useful: for instance, to mock classes in tests, or to apply temporary patches that add features or fix bugs in library classes. That's why _ad-hoc_ extensions are permitted, but only if there is an explicit opt-in via a language feature import. -### Details +## Details - `open` is a soft modifier. It is treated as a normal identifier unless it is in modifier position. - An `open` class cannot be `final` or `sealed`. - Traits or `abstract` classes are always `open`, so `open` is redundant for them. -### Relationship with `sealed` +## Relationship with `sealed` -A class that is neither `abstract` nor `open` is similar to a `sealed` class: it can still be extended, but only in the same compilation unit. The difference is what happens if an extension of the class is attempted in another compilation unit. For a `sealed` class, this is an error, whereas for a simple non-open class, this is still permitted provided the `adhocExtensions` feature is enabled, and it gives a warning otherwise. +A class that is neither `abstract` nor `open` is similar to a `sealed` class: it can still be extended, but only in the same source file. The difference is what happens if an extension of the class is attempted in another source file. For a `sealed` class, this is an error, whereas for a simple non-open class, this is still permitted provided the [`adhocExtensions`](https://scala-lang.org/api/3.x/scala/runtime/stdLibPatches/language$$adhocExtensions$.html) feature is enabled, and it gives a warning otherwise. -### Migration +## Migration `open` is a new modifier in Scala 3. To allow cross compilation between Scala 2.13 and Scala 3.0 without warnings, the feature warning for ad-hoc extensions is produced only under `-source future`. It will be produced by default from Scala 3.1 on. diff --git a/docs/_docs/reference/other-new-features/other-new-types.md b/docs/_docs/reference/other-new-features/other-new-types.md new file mode 100644 index 000000000000..85e768833a71 --- /dev/null +++ b/docs/_docs/reference/other-new-features/other-new-types.md @@ -0,0 +1,7 @@ +--- +layout: doc-page +title: "Other New Features" +movedTo: https://docs.scala-lang.org/scala3/reference/other-new-features.html +--- + +The following pages document new features of Scala 3. diff --git a/docs/docs/reference/other-new-features/parameter-untupling-spec.md b/docs/_docs/reference/other-new-features/parameter-untupling-spec.md similarity index 76% rename from docs/docs/reference/other-new-features/parameter-untupling-spec.md rename to docs/_docs/reference/other-new-features/parameter-untupling-spec.md index 513538c39074..e01e91059a27 100644 --- a/docs/docs/reference/other-new-features/parameter-untupling-spec.md +++ b/docs/_docs/reference/other-new-features/parameter-untupling-spec.md @@ -1,6 +1,7 @@ --- layout: doc-page title: "Parameter Untupling - More Details" +movedTo: https://docs.scala-lang.org/scala3/reference/other-new-features/parameter-untupling-spec.html --- ## Motivation @@ -16,14 +17,14 @@ Previously, the best way to do this was with a pattern-matching decomposition: ```scala xs.map { - case (x, y) => x + y + case (x, y) => x + y } ``` While correct, this is inconvenient. Instead, we propose to write it the following way: ```scala xs.map { - (x, y) => x + y + (x, y) => x + y } ``` @@ -67,20 +68,21 @@ is feasible for parameter untupling with the expected type `TupleN[T1, ..., Tn] ```scala (x: TupleN[T1, ..., Tn]) => - def p1: T1 = x._1 - ... - def pn: Tn = x._n - e + def p1: T1 = x._1 + ... + def pn: Tn = x._n + e ``` with the same expected type. ## Migration -Code like this could not be written before, hence the new notation would not be ambiguous after adoption. +Code like this could not be written before, hence the new notation is not ambiguous after adoption. -Though it is possible that someone has written an implicit conversion form `(T1, ..., Tn) => R` to `TupleN[T1, ..., Tn] => R` -for some `n`. This change could be detected and fixed by [`Scalafix`](https://scalacenter.github.io/scalafix/). Furthermore, such conversion would probably -be doing the same translation (semantically) but in a less efficient way. +It is possible that someone has written an implicit conversion from `(T1, ..., Tn) => R` to `TupleN[T1, ..., Tn] => R` for some `n`. +Such a conversion is now only useful for general conversions of function values, when parameter untupling is not applicable. +Some care is required to implement the conversion efficiently. +Obsolete conversions could be detected and fixed by [`Scalafix`](https://scalacenter.github.io/scalafix/). ## Reference diff --git a/docs/_docs/reference/other-new-features/parameter-untupling.md b/docs/_docs/reference/other-new-features/parameter-untupling.md new file mode 100644 index 000000000000..84bbe52bddc1 --- /dev/null +++ b/docs/_docs/reference/other-new-features/parameter-untupling.md @@ -0,0 +1,77 @@ +--- +layout: doc-page +title: "Parameter Untupling" +movedTo: https://docs.scala-lang.org/scala3/reference/other-new-features/parameter-untupling.html +--- + +Say you have a list of pairs + +```scala +val xs: List[(Int, Int)] +``` + +and you want to map `xs` to a list of `Int`s so that each pair of numbers is mapped to +their sum. Previously, the best way to do this was with a pattern-matching decomposition: + +```scala +xs map { + case (x, y) => x + y +} +``` + +While correct, this is also inconvenient and confusing, since the `case` +suggests that the pattern match could fail. As a shorter and clearer alternative Scala 3 now allows + +```scala +xs.map { + (x, y) => x + y +} +``` + +or, equivalently: + +```scala +xs.map(_ + _) +``` +and +```scala +def combine(i: Int, j: Int) = i + j +xs.map(combine) +``` + +Generally, a function value with `n > 1` parameters is converted to a +pattern-matching closure using `case` if the expected type is a unary +function type of the form `((T_1, ..., T_n)) => U`. + +More specifically, the adaptation is applied to the mismatching formal +parameter list. In particular, the adaptation is not a conversion +between function types. That is why the following is not accepted: + +```scala +val combiner: (Int, Int) => Int = _ + _ +xs.map(combiner) // Type Mismatch +``` + +The function value must be explicitly tupled, rather than the parameters untupled: +```scala +xs.map(combiner.tupled) +``` + +A conversion may be provided in user code: + +```scala +import scala.language.implicitConversions +transparent inline implicit def `fallback untupling`(f: (Int, Int) => Int): ((Int, Int)) => Int = + p => f(p._1, p._2) // use specialized apply instead of unspecialized `tupled` +xs.map(combiner) +``` + +Parameter untupling is attempted before conversions are applied, so that a conversion in scope +cannot subvert untupling. + +## Reference + +For more information see: + +* [More details](./parameter-untupling-spec.md) +* [Issue #897](https://github.com/lampepfl/dotty/issues/897). diff --git a/docs/_docs/reference/other-new-features/safe-initialization.md b/docs/_docs/reference/other-new-features/safe-initialization.md new file mode 100644 index 000000000000..57c962c9f5e8 --- /dev/null +++ b/docs/_docs/reference/other-new-features/safe-initialization.md @@ -0,0 +1,236 @@ +--- +layout: doc-page +title: "Safe Initialization" +movedTo: https://docs.scala-lang.org/scala3/reference/other-new-features/safe-initialization.html +--- + +Scala 3 implements experimental safe initialization check, which can be enabled by the compiler option `-Ysafe-init`. + +## A Quick Glance + +To get a feel of how it works, we first show several examples below. + +### Parent-Child Interaction + +Given the following code snippet: + +``` scala +abstract class AbstractFile: + def name: String + val extension: String = name.substring(4) + +class RemoteFile(url: String) extends AbstractFile: + val localFile: String = s"${url.##}.tmp" // error: usage of `localFile` before it's initialized + def name: String = localFile +``` + +The checker will report: + +``` scala +-- Warning: tests/init/neg/AbstractFile.scala:7:4 ------------------------------ +7 | val localFile: String = s"${url.##}.tmp" // error: usage of `localFile` before it's initialized + | ^ + | Access non-initialized field value localFile. Calling trace: + | -> val extension: String = name.substring(4) [ AbstractFile.scala:3 ] + | -> def name: String = localFile [ AbstractFile.scala:8 ] +``` + +### Inner-Outer Interaction + +Given the code below: + +``` scala +object Trees: + class ValDef { counter += 1 } + class EmptyValDef extends ValDef + val theEmptyValDef = new EmptyValDef + private var counter = 0 // error +``` + +The checker will report: + +``` scala +-- Warning: tests/init/neg/trees.scala:5:14 ------------------------------------ +5 | private var counter = 0 // error + | ^ + | Access non-initialized field variable counter. Calling trace: + | -> val theEmptyValDef = new EmptyValDef [ trees.scala:4 ] + | -> class EmptyValDef extends ValDef [ trees.scala:3 ] + | -> class ValDef { counter += 1 } [ trees.scala:2 ] +``` + +### Functions + +Given the code below: + +``` scala +abstract class Parent: + val f: () => String = () => this.message + def message: String + +class Child extends Parent: + val a = f() + val b = "hello" // error + def message: String = b +``` + +The checker reports: + +``` scala +-- Warning: tests/init/neg/features-high-order.scala:7:6 ----------------------- +7 | val b = "hello" // error + | ^ + |Access non-initialized field value b. Calling trace: + | -> val a = f() [ features-high-order.scala:6 ] + | -> val f: () => String = () => this.message [ features-high-order.scala:2 ] + | -> def message: String = b [ features-high-order.scala:8 ] +``` +## Design Goals + +We establish the following design goals: + +- __Sound__: checking always terminates, and is sound for common and reasonable usage (over-approximation) +- __Expressive__: support common and reasonable initialization patterns +- __Friendly__: simple rules, minimal syntactic overhead, informative error messages +- __Modular__: modular checking, no analysis beyond project boundary +- __Fast__: instant feedback +- __Simple__: no changes to core type system, explainable by a simple theory + +By _reasonable usage_, we include the following use cases (but not restricted to them): + +- Access fields on `this` and outer `this` during initialization +- Call methods on `this` and outer `this` during initialization +- Instantiate inner class and call methods on such instances during initialization +- Capture fields in functions + +## Principles + +To achieve the goals, we uphold three fundamental principles: +_stackability_, _monotonicity_ and _scopability_. + +Stackability means that all fields of a class are initialized at the end of the +class body. Scala enforces this property in syntax by demanding that all fields +are initialized at the end of the primary constructor, except for the language +feature below: + +``` scala +var x: T = _ +``` + +Control effects such as exceptions may break this property, as the +following example shows: + +``` scala +class MyException(val b: B) extends Exception("") +class A: + val b = try { new B } catch { case myEx: MyException => myEx.b } + println(b.a) + +class B: + throw new MyException(this) + val a: Int = 1 +``` + +In the code above, the control effect teleport the uninitialized value +wrapped in an exception. In the implementation, we avoid the problem +by ensuring that the values that are thrown must be transitively initialized. + +Monotonicity means that the initialization status of an object should +not go backward: initialized fields continue to be initialized, a +field points to an initialized object may not later point to an +object under initialization. As an example, the following code will be rejected: + +``` scala +trait Reporter: + def report(msg: String): Unit + +class FileReporter(ctx: Context) extends Reporter: + ctx.typer.reporter = this // ctx now reaches an uninitialized object + val file: File = new File("report.txt") + def report(msg: String) = file.write(msg) +``` + +In the code above, suppose `ctx` points to a transitively initialized +object. Now the assignment at line 3 makes `this`, which is not fully +initialized, reachable from `ctx`. This makes field usage dangerous, +as it may indirectly reach uninitialized fields. + +Monotonicity is based on a well-known technique called _heap monotonic +typestate_ to ensure soundness in the presence of aliasing +[1]. Roughly speaking, it means initialization state should not go backwards. + +Scopability means that there are no side channels to access to partially constructed objects. Control effects like coroutines, delimited +control, resumable exceptions may break the property, as they can transport a +value upper in the stack (not in scope) to be reachable from the current scope. +Static fields can also serve as a teleport thus breaks this property. In the +implementation, we need to enforce that teleported values are transitively +initialized. + +The principles enable _local reasoning_ of initialization, which means: + +> An initialized environment can only produce initialized values. + +For example, if the arguments to an `new`-expression are transitively +initialized, so is the result. If the receiver and arguments in a method call +are transitively initialized, so is the result. + +## Rules + +With the established principles and design goals, following rules are imposed: + +1. In an assignment `o.x = e`, the expression `e` may only point to transitively initialized objects. + + This is how monotonicity is enforced in the system. Note that in an + initialization `val f: T = e`, the expression `e` may point to an object + under initialization. This requires a distinction between mutation and + initialization in order to enforce different rules. Scala + has different syntax for them, it thus is not an issue. + +2. Objects under initialization may not be passed as arguments to method calls. + + Escape of `this` in the constructor is commonly regarded as an anti-pattern. + However, escape of `this` as constructor arguments are allowed, to support + creation of cyclic data structures. The checker will ensure that the escaped + non-initialized object is not used, i.e. calling methods or accessing fields + on the escaped object is not allowed. + +## Modularity + +The analysis takes the primary constructor of concrete classes as entry points. +It follows the constructors of super classes, which might be defined in another project. +The analysis takes advantage of TASTy for analyzing super classes defined in another project. + +The crossing of project boundary raises a concern about modularity. It is +well-known in object-oriented programming that superclass and subclass are +tightly coupled. For example, adding a method in the superclass requires +recompiling the child class for checking safe overriding. + +Initialization is no exception in this respect. The initialization of an object +essentially invovles close interaction between subclass and superclass. If the +superclass is defined in another project, the crossing of project boundary +cannot be avoided for soundness of the analysis. + +Meanwhile, inheritance across project boundary has been under scrutiny and the +introduction of [open classes](./open-classes.md) mitigate the concern here. +For example, the initialization check could enforce that the constructors of +open classes may not contain method calls on `this` or introduce annotations as +a contract. + +The feedback from the community on the topic is welcome. + +## Back Doors + +Occasionally you may want to suppress warnings reported by the +checker. You can either write `e: @unchecked` to tell the checker to +skip checking for the expression `e`, or you may use the old trick: +mark some fields as lazy. + +## Caveats + +- The system cannot provide safety guarantee when extending Java or Scala 2 classes. +- Safe initialization of global objects is only partially checked. + +## References + +1. Fähndrich, M. and Leino, K.R.M., 2003, July. [_Heap monotonic typestates_](https://www.microsoft.com/en-us/research/publication/heap-monotonic-typestate/). In International Workshop on Aliasing, Confinement and Ownership in object-oriented programming (IWACO). +2. Fengyun Liu, Ondřej Lhoták, Aggelos Biboudis, Paolo G. Giarrusso, and Martin Odersky. 2020. [_A type-and-effect system for object initialization_](https://dl.acm.org/doi/10.1145/3428243). OOPSLA, 2020. diff --git a/docs/_docs/reference/other-new-features/targetName.md b/docs/_docs/reference/other-new-features/targetName.md new file mode 100644 index 000000000000..d2a654697d15 --- /dev/null +++ b/docs/_docs/reference/other-new-features/targetName.md @@ -0,0 +1,118 @@ +--- +layout: doc-page +title: "The @targetName annotation" +movedTo: https://docs.scala-lang.org/scala3/reference/other-new-features/targetName.html +--- + +A [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotation on a definition defines an alternate name for the implementation of that definition. Example: + +```scala +import scala.annotation.targetName + +object VecOps: + extension [T](xs: Vec[T]) + @targetName("append") + def ++= [T] (ys: Vec[T]): Vec[T] = ... +``` + +Here, the `++=` operation is implemented (in Byte code or native code) under the name `append`. The implementation name affects the code that is generated, and is the name under which code from other languages can call the method. For instance, `++=` could be invoked from Java like this: + +```java +VecOps.append(vec1, vec2) +``` + +The [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotation has no bearing on Scala usages. Any application of that method in Scala has to use `++=`, not `append`. + +### Details + + 1. `@targetName` is defined in package `scala.annotation`. It takes a single argument + of type `String`. That string is called the _external name_ of the definition + that's annotated. + + 2. A `@targetName` annotation can be given for all kinds of definitions. + + 3. The name given in a [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotation must be a legal name + for the defined entities on the host platform. + + 4. It is recommended that definitions with symbolic names have a [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotation. This will establish an alternate name that is easier to search for and + will avoid cryptic encodings in runtime diagnostics. + + 5. Definitions with names in backticks that are not legal host platform names + should also have a [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotation. + +### Relationship with Overriding + +[`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotations are significant for matching two method definitions to decide whether they conflict or override each other. Two method definitions match if they have the same name, signature, and erased name. Here, + +- The _signature_ of a definition consists of the names of the erased types of all (value-) parameters and the method's result type. +- The _erased name_ of a method definition is its target name if a [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotation is given and its defined name otherwise. + +This means that `@targetName` annotations can be used to disambiguate two method definitions that would otherwise clash. For instance. + +```scala +def f(x: => String): Int = x.length +def f(x: => Int): Int = x + 1 // error: double definition +``` + +The two definitions above clash since their erased parameter types are both [`Function0`](https://scala-lang.org/api/3.x/scala/Function0.html), which is the type of the translation of a by-name-parameter. Hence they have the same names and signatures. But we can avoid the clash by adding a [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotation to either method or to both of them. Example: + +```scala +@targetName("f_string") +def f(x: => String): Int = x.length +def f(x: => Int): Int = x + 1 // OK +``` + +This will produce methods `f_string` and `f` in the generated code. + +However, [`@targetName`](https://scala-lang.org/api/3.x/scala/annotation/targetName.html) annotations are not allowed to break overriding relationships +between two definitions that have otherwise the same names and types. So the following would be in error: + +```scala +import annotation.targetName +class A: + def f(): Int = 1 +class B extends A: + @targetName("g") def f(): Int = 2 +``` + +The compiler reports here: + +``` +-- Error: test.scala:6:23 ------------------------------------------------------ +6 | @targetName("g") def f(): Int = 2 + | ^ + |error overriding method f in class A of type (): Int; + | method f of type (): Int should not have a @targetName + | annotation since the overridden member hasn't one either +``` + +The relevant overriding rules can be summarized as follows: + +- Two members can override each other if their names and signatures are the same, + and they either have the same erased names or the same types. +- If two members override, then both their erased names and their types must be the same. + +As usual, any overriding relationship in the generated code must also +be present in the original code. So the following example would also be in error: + +```scala +import annotation.targetName +class A: + def f(): Int = 1 +class B extends A: + @targetName("f") def g(): Int = 2 +``` + +Here, the original methods `g` and `f` do not override each other since they have +different names. But once we switch to target names, there is a clash that is reported by the compiler: + +``` +-- [E120] Naming Error: test.scala:4:6 ----------------------------------------- +4 |class B extends A: + | ^ + | Name clash between defined and inherited member: + | def f(): Int in class A at line 3 and + | def g(): Int in class B at line 5 + | have the same name and type after erasure. +1 error found +``` diff --git a/docs/_docs/reference/other-new-features/threadUnsafe-annotation.md b/docs/_docs/reference/other-new-features/threadUnsafe-annotation.md new file mode 100644 index 000000000000..cd0d45e2246a --- /dev/null +++ b/docs/_docs/reference/other-new-features/threadUnsafe-annotation.md @@ -0,0 +1,18 @@ +--- +layout: doc-page +title: "The @threadUnsafe annotation" +movedTo: https://docs.scala-lang.org/scala3/reference/other-new-features/threadUnsafe-annotation.html +--- + +A new annotation [`@threadUnsafe`](https://scala-lang.org/api/3.x/scala/annotation/threadUnsafe.html) can be used on a field which defines +a `lazy val`. When this annotation is used, the initialization of the +[`lazy val`](../changed-features/lazy-vals-init.md) will use a faster mechanism which is not thread-safe. + +### Example + +```scala +import scala.annotation.threadUnsafe + +class Hello: + @threadUnsafe lazy val x: Int = 1 +``` diff --git a/docs/docs/reference/other-new-features/trait-parameters.md b/docs/_docs/reference/other-new-features/trait-parameters.md similarity index 84% rename from docs/docs/reference/other-new-features/trait-parameters.md rename to docs/_docs/reference/other-new-features/trait-parameters.md index 5ccde10b05c1..6c833a097b21 100644 --- a/docs/docs/reference/other-new-features/trait-parameters.md +++ b/docs/_docs/reference/other-new-features/trait-parameters.md @@ -1,16 +1,17 @@ --- layout: doc-page title: "Trait Parameters" +movedTo: https://docs.scala-lang.org/scala3/reference/other-new-features/trait-parameters.html --- Scala 3 allows traits to have parameters, just like classes have parameters. ```scala trait Greeting(val name: String): - def msg = s"How are you, $name" + def msg = s"How are you, $name" class C extends Greeting("Bob"): - println(msg) + println(msg) ``` Arguments to a trait are evaluated immediately before the trait is initialized. @@ -36,7 +37,7 @@ Here's a trait extending the parameterized trait `Greeting`. ```scala trait FormalGreeting extends Greeting: - override def msg = s"How do you do, $name" + override def msg = s"How do you do, $name" ``` As is required, no arguments are passed to `Greeting`. However, this poses an issue when defining a class that extends `FormalGreeting`: @@ -55,7 +56,7 @@ class E extends Greeting("Bob"), FormalGreeting ### Traits With Context Parameters This "explicit extension required" rule is relaxed if the missing trait contains only -[context parameters](../contextual/using-clauses). In that case the trait reference is +[context parameters](../contextual/using-clauses.md). In that case the trait reference is implicitly inserted as an additional parent with inferred arguments. For instance, here's a variant of greetings where the addressee is a context parameter of type `ImpliedName`: @@ -65,10 +66,10 @@ case class ImpliedName(name: String): override def toString = name trait ImpliedGreeting(using val iname: ImpliedName): - def msg = s"How are you, $iname" + def msg = s"How are you, $iname" trait ImpliedFormalGreeting extends ImpliedGreeting: - override def msg = s"How do you do, $iname" + override def msg = s"How do you do, $iname" class F(using iname: ImpliedName) extends ImpliedFormalGreeting ``` @@ -76,9 +77,9 @@ class F(using iname: ImpliedName) extends ImpliedFormalGreeting The definition of `F` in the last line is implicitly expanded to ```scala class F(using iname: ImpliedName) extends - Object, - ImpliedGreeting(using iname), - ImpliedFormalGreeting(using iname) + Object, + ImpliedGreeting(using iname), + ImpliedFormalGreeting(using iname) ``` Note the inserted reference to the super trait `ImpliedGreeting`, which was not mentioned explicitly. diff --git a/docs/_docs/reference/other-new-features/transparent-traits.md b/docs/_docs/reference/other-new-features/transparent-traits.md new file mode 100644 index 000000000000..3236bf71252d --- /dev/null +++ b/docs/_docs/reference/other-new-features/transparent-traits.md @@ -0,0 +1,70 @@ +--- +layout: doc-page +title: "Transparent Traits" +movedTo: https://docs.scala-lang.org/scala3/reference/other-new-features/transparent-traits.html +--- + +Traits are used in two roles: + + 1. As mixins for other classes and traits + 2. As types of vals, defs, or parameters + +Some traits are used primarily in the first role, and we usually do not want to see them in inferred types. An example is the [`Product`](https://scala-lang.org/api/3.x/scala/Product.html) trait that the compiler adds as a mixin trait to every case class or case object. In Scala 2, this parent trait sometimes makes inferred types more complicated than they should be. Example: + +```scala +trait Kind +case object Var extends Kind +case object Val extends Kind +val x = Set(if condition then Val else Var) +``` + +Here, the inferred type of `x` is `Set[Kind & Product & Serializable]` whereas one would have hoped it to be `Set[Kind]`. The reasoning for this particular type to be inferred is as follows: + +- The type of the conditional above is the [union type](../new-types/union-types.md) `Val | Var`. +- A union type is widened in type inference to the least supertype that is not a union type. + In the example, this type is `Kind & Product & Serializable` since all three traits are traits of both `Val` and `Var`. + So that type becomes the inferred element type of the set. + +Scala 3 allows one to mark a mixin trait as `transparent`, which means that it can be suppressed in type inference. Here's an example that follows the lines of the code above, but now with a new transparent trait `S` instead of `Product`: + +```scala +transparent trait S +trait Kind +object Var extends Kind, S +object Val extends Kind, S +val x = Set(if condition then Val else Var) +``` + +Now `x` has inferred type `Set[Kind]`. The common transparent trait `S` does not +appear in the inferred type. + +## Transparent Traits + +The traits [`scala.Product`](https://scala-lang.org/api/3.x/scala/Product.html), [`java.io.Serializable`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/io/Serializable.html) and [`java.lang.Comparable`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/lang/Comparable.html) +are treated automatically as transparent. Other traits are turned into transparent traits using the modifier `transparent`. Scala 2 traits can also be made transparent +by adding a [`@transparentTrait`](https://scala-lang.org/api/3.x/scala/annotation/transparentTrait.html) annotation. This annotation is defined in [`scala.annotation`](https://scala-lang.org/api/3.x/scala/annotation.html). It will be deprecated and phased out once Scala 2/3 interopability is no longer needed. + +Typically, transparent traits are traits +that influence the implementation of inheriting classes and traits that are not usually used as types by themselves. Two examples from the standard collection library are: + +- [`IterableOps`](https://scala-lang.org/api/3.x/scala/collection/IterableOps.html), which provides method implementations for an [`Iterable`](https://scala-lang.org/api/3.x/scala/collection/Iterable.html). +- [`StrictOptimizedSeqOps`](https://scala-lang.org/api/3.x/scala/collection/StrictOptimizedSeqOps.html), which optimises some of these implementations for sequences with efficient indexing. + +Generally, any trait that is extended recursively is a good candidate to be +declared transparent. + +## Rules for Inference + +Transparent traits can be given as explicit types as usual. But they are often elided when types are inferred. Roughly, the rules for type inference say that transparent traits are dropped from intersections where possible. + +The precise rules are as follows: + +- When inferring a type of a type variable, or the type of a val, or the return type of a def, +- where that type is not higher-kinded, +- and where `B` is its known upper bound or `Any` if none exists: +- If the type inferred so far is of the form `T1 & ... & Tn` where + `n >= 1`, replace the maximal number of transparent `Ti`s by `Any`, while ensuring that + the resulting type is still a subtype of the bound `B`. +- However, do not perform this widening if all transparent traits `Ti` can get replaced in that way. + +The last clause ensures that a single transparent trait instance such as [`Product`](https://scala-lang.org/api/3.x/scala/Product.html) is not widened to [`Any`](https://scala-lang.org/api/3.x/scala/Any.html). Transparent trait instances are only dropped when they appear in conjunction with some other type. diff --git a/docs/_docs/reference/other-new-features/type-test.md b/docs/_docs/reference/other-new-features/type-test.md new file mode 100644 index 000000000000..ee4e612e9a37 --- /dev/null +++ b/docs/_docs/reference/other-new-features/type-test.md @@ -0,0 +1,181 @@ +--- +layout: doc-page +title: "TypeTest" +movedTo: https://docs.scala-lang.org/scala3/reference/other-new-features/type-test.html +--- + +## TypeTest + +When pattern matching there are two situations where a runtime type test must be performed. +The first case is an explicit type test using the ascription pattern notation. + +```scala +(x: X) match + case y: Y => +``` + +The second case is when an extractor takes an argument that is not a subtype of the scrutinee type. + +```scala +(x: X) match + case y @ Y(n) => + +object Y: + def unapply(x: Y): Some[Int] = ... +``` + +In both cases, a class test will be performed at runtime. +But when the type test is on an abstract type (type parameter or type member), the test cannot be performed because the type is erased at runtime. + +A [`TypeTest`](https://scala-lang.org/api/3.x/scala/reflect/TypeTest.html) can be provided to make this test possible. + +```scala +package scala.reflect + +trait TypeTest[-S, T]: + def unapply(s: S): Option[s.type & T] +``` + +It provides an extractor that returns its argument typed as a `T` if the argument is a `T`. +It can be used to encode a type test. + +```scala +def f[X, Y](x: X)(using tt: TypeTest[X, Y]): Option[Y] = x match + case tt(x @ Y(1)) => Some(x) + case tt(x) => Some(x) + case _ => None +``` + +To avoid the syntactic overhead the compiler will look for a type test automatically if it detects that the type test is on abstract types. +This means that `x: Y` is transformed to `tt(x)` and `x @ Y(_)` to `tt(x @ Y(_))` if there is a contextual `TypeTest[X, Y]` in scope. +The previous code is equivalent to + +```scala +def f[X, Y](x: X)(using TypeTest[X, Y]): Option[Y] = x match + case x @ Y(1) => Some(x) + case x: Y => Some(x) + case _ => None +``` + +We could create a type test at call site where the type test can be performed with runtime class tests directly as follows + +```scala +val tt: TypeTest[Any, String] = + new TypeTest[Any, String]: + def unapply(s: Any): Option[s.type & String] = s match + case s: String => Some(s) + case _ => None + +f[AnyRef, String]("acb")(using tt) +``` + +The compiler will synthesize a new instance of a type test if none is found in scope as: + +```scala +new TypeTest[A, B]: + def unapply(s: A): Option[s.type & B] = s match + case s: B => Some(s) + case _ => None +``` + +If the type tests cannot be done there will be an unchecked warning that will be raised on the `case s: B =>` test. + +The most common [`TypeTest`](https://scala-lang.org/api/3.x/scala/reflect/TypeTest.html) instances are the ones that take any parameters (i.e. `TypeTest[Any, T]`). +To make it possible to use such instances directly in context bounds we provide the alias + +```scala +package scala.reflect + +type Typeable[T] = TypeTest[Any, T] +``` + +This alias can be used as + +```scala +def f[T: Typeable]: Boolean = + "abc" match + case x: T => true + case _ => false + +f[String] // true +f[Int] // false +``` + +## TypeTest and ClassTag + +[`TypeTest`](https://scala-lang.org/api/3.x/scala/reflect/TypeTest.html) is a replacement for functionality provided previously by `ClassTag.unapply`. +Using [`ClassTag`](https://scala-lang.org/api/3.x/scala/reflect/ClassTag.html) instances was unsound since classtags can check only the class component of a type. +[`TypeTest`](https://scala-lang.org/api/3.x/scala/reflect/TypeTest.html) fixes that unsoundness. +[`ClassTag`](https://scala-lang.org/api/3.x/scala/reflect/ClassTag.html) type tests are still supported but a warning will be emitted after 3.0. + + +## Example + +Given the following abstract definition of Peano numbers that provides two given instances of types `TypeTest[Nat, Zero]` and `TypeTest[Nat, Succ]` + +```scala +import scala.reflect.* + +trait Peano: + type Nat + type Zero <: Nat + type Succ <: Nat + + def safeDiv(m: Nat, n: Succ): (Nat, Nat) + + val Zero: Zero + + val Succ: SuccExtractor + trait SuccExtractor: + def apply(nat: Nat): Succ + def unapply(succ: Succ): Some[Nat] + + given typeTestOfZero: TypeTest[Nat, Zero] + given typeTestOfSucc: TypeTest[Nat, Succ] +``` + +together with an implementation of Peano numbers based on type `Int` + +```scala +object PeanoInt extends Peano: + type Nat = Int + type Zero = Int + type Succ = Int + + def safeDiv(m: Nat, n: Succ): (Nat, Nat) = (m / n, m % n) + + val Zero: Zero = 0 + + val Succ: SuccExtractor = new: + def apply(nat: Nat): Succ = nat + 1 + def unapply(succ: Succ) = Some(succ - 1) + + def typeTestOfZero: TypeTest[Nat, Zero] = new: + def unapply(x: Nat): Option[x.type & Zero] = + if x == 0 then Some(x) else None + + def typeTestOfSucc: TypeTest[Nat, Succ] = new: + def unapply(x: Nat): Option[x.type & Succ] = + if x > 0 then Some(x) else None +``` + +it is possible to write the following program + +```scala +@main def test = + import PeanoInt.* + + def divOpt(m: Nat, n: Nat): Option[(Nat, Nat)] = + n match + case Zero => None + case s @ Succ(_) => Some(safeDiv(m, s)) + + val two = Succ(Succ(Zero)) + val five = Succ(Succ(Succ(two))) + + println(divOpt(five, two)) // prints "Some((2,1))" + println(divOpt(two, five)) // prints "Some((0,2))" + println(divOpt(two, Zero)) // prints "None" +``` + +Note that without the `TypeTest[Nat, Succ]` the pattern `Succ.unapply(nat: Succ)` would be unchecked. diff --git a/docs/_docs/reference/overview.md b/docs/_docs/reference/overview.md new file mode 100644 index 000000000000..051ba4b124cc --- /dev/null +++ b/docs/_docs/reference/overview.md @@ -0,0 +1,154 @@ +--- +layout: doc-page +title: "Overview" +movedTo: https://docs.scala-lang.org/scala3/reference/overview.html +--- + +Scala 3 implements many language changes and improvements over Scala 2. +In this reference, we discuss design decisions and present important differences compared to Scala 2. + +## Goals + +The language redesign was guided by three main goals: + +- Strengthen Scala's foundations. + Make the full programming language compatible with the foundational work on the + [DOT calculus](https://infoscience.epfl.ch/record/227176/files/soundness_oopsla16.pdf) + and apply the lessons learned from that work. +- Make Scala easier and safer to use. + Tame powerful constructs such as implicits to provide a gentler learning curve. Remove warts and puzzlers. +- Further improve the consistency and expressiveness of Scala's language constructs. + +Corresponding to these goals, the language changes fall into seven categories: +(1) Core constructs to strengthen foundations, (2) simplifications and (3) [restrictions](#restrictions), to make the language easier and safer to use, (4) [dropped constructs](#dropped-constructs) to make the language smaller and more regular, (5) [changed constructs](#changes) to remove warts, and increase consistency and usability, (6) [new constructs](#new-constructs) to fill gaps and increase expressiveness, (7) a new, principled approach to metaprogramming that replaces [Scala 2 experimental macros](https://docs.scala-lang.org/overviews/macros/overview.md). + +## Essential Foundations + +These new constructs directly model core features of DOT, higher-kinded types, and the [SI calculus for implicit resolution](https://infoscience.epfl.ch/record/229878/files/simplicitly_1.pdf). + +- [Intersection types](new-types/intersection-types.md), replacing compound types, +- [Union types](new-types/union-types.md), +- [Type lambdas](new-types/type-lambdas.md), replacing encodings using structural types and type projection. +- [Context functions](contextual/context-functions.md), offering abstraction over given parameters. + +## Simplifications + +These constructs replace existing constructs with the aim of making the language safer and simpler to use, and to promote uniformity in code style. + +- [Trait parameters](other-new-features/trait-parameters.md) + replace [early initializers](dropped-features/early-initializers.md) with a more generally useful construct. +- [Given instances](contextual/givens.md) + replace implicit objects and defs, focussing on intent over mechanism. +- [Using clauses](contextual/using-clauses.md) + replace implicit parameters, avoiding their ambiguities. +- [Extension methods](contextual/extension-methods.md) + replace implicit classes with a clearer and simpler mechanism. +- [Opaque type aliases](other-new-features/opaques.md) + replace most uses of value classes while guaranteeing absence of boxing. +- [Top-level definitions](dropped-features/package-objects.md) + replace package objects, dropping syntactic boilerplate. +- [Export clauses](other-new-features/export.md) + provide a simple and general way to express aggregation, which can replace + the previous facade pattern of package objects inheriting from classes. +- [Vararg splices](changed-features/vararg-splices.md) + now use the form `xs*` in function arguments and patterns instead of `xs: _*` and `xs @ _*`, +- [Universal apply methods](other-new-features/creator-applications.md) + allow using simple function call syntax instead of `new` expressions. `new` expressions stay around + as a fallback for the cases where creator applications cannot be used. + +With the exception of [early initializers](dropped-features/early-initializers.md) and old-style vararg patterns, all superseded constructs continue to be available in Scala 3.0. The plan is to deprecate and phase them out later. + +Value classes (superseded by opaque type aliases) are a special case. There are currently no deprecation plans for value classes, since we might bring them back in a more general form if they are supported natively by the JVM as is planned by [project Valhalla](https://openjdk.java.net/projects/valhalla/). + +## Restrictions + +These constructs are restricted to make the language safer. + +- [Implicit Conversions](contextual/conversions.md): + there is only one way to define implicit conversions instead of many, and potentially surprising implicit conversions require a language import. +- [Given Imports](contextual/given-imports.md): + implicits now require a special form of import, to make the import clearly visible. +- [Type Projection](dropped-features/type-projection.md): + only classes can be used as prefix `C` of a type projection `C#A`. Type projection on abstract types is no longer supported since it is unsound. +- [Multiversal Equality](contextual/multiversal-equality.md): + implement an "opt-in" scheme to rule out nonsensical comparisons with `==` and `!=`. +- [infix](changed-features/operators.md): + make method application syntax uniform across code bases. + +Unrestricted implicit conversions continue to be available in Scala 3.0, but will be deprecated and removed later. Unrestricted versions of the other constructs in the list above are available only under `-source 3.0-migration`. + +## Dropped Constructs + +These constructs are proposed to be dropped without a new construct replacing them. The motivation for dropping these constructs is to simplify the language and its implementation. + +- [DelayedInit](dropped-features/delayed-init.md), +- [Existential types](dropped-features/existential-types.md), +- [Procedure syntax](dropped-features/procedure-syntax.md), +- [Class shadowing](dropped-features/class-shadowing.md), +- [XML literals](dropped-features/xml.md), +- [Symbol literals](dropped-features/symlits.md), +- [Auto application](dropped-features/auto-apply.md), +- [Weak conformance](dropped-features/weak-conformance.md), +- Compound types (replaced by [Intersection types](new-types/intersection-types.md)), +- [Auto tupling](https://github.com/lampepfl/dotty/pull/4311) (implemented, but not merged). + +The date when these constructs are dropped varies. The current status is: + +- Not implemented at all: + - DelayedInit, existential types, weak conformance. +- Supported under `-source 3.0-migration`: + - procedure syntax, class shadowing, symbol literals, auto application, auto tupling in a restricted form. +- Supported in 3.0, to be deprecated and phased out later: + - [XML literals](dropped-features/xml.md), compound types. + +## Changes + +These constructs have undergone changes to make them more regular and useful. + +- [Structural Types](changed-features/structural-types.md): + They now allow pluggable implementations, which greatly increases their usefulness. Some usage patterns are restricted compared to the status quo. +- [Name-based pattern matching](changed-features/pattern-matching.md): + The existing undocumented Scala 2 implementation has been codified in a slightly simplified form. +- [Automatic Eta expansion](changed-features/eta-expansion.md): + Eta expansion is now performed universally also in the absence of an expected type. The postfix `_` operator is thus made redundant. It will be deprecated and dropped after Scala 3.0. +- [Implicit Resolution](changed-features/implicit-resolution.md): + The implicit resolution rules have been cleaned up to make them more useful and less surprising. Implicit scope is restricted to no longer include package prefixes. + +Most aspects of old-style implicit resolution are still available under `-source 3.0-migration`. The other changes in this list are applied unconditionally. + +## New Constructs + +These are additions to the language that make it more powerful or pleasant to use. + +- [Enums](enums/enums.md) provide concise syntax for enumerations and [algebraic data types](enums/adts.md). +- [Parameter untupling](other-new-features/parameter-untupling.md) avoids having to use `case` for tupled parameter destructuring. +- [Dependent function types](new-types/dependent-function-types.md) generalize dependent methods to dependent function values and types. +- [Polymorphic function types](new-types/polymorphic-function-types.md) generalize polymorphic methods to polymorphic function values and types. + _Current status_: There is a proposal and a merged prototype implementation, but the implementation has not been finalized (it is notably missing type inference support). +- [Kind polymorphism](other-new-features/kind-polymorphism.md) allows the definition of operators working equally on types and type constructors. +- [`@targetName` annotations](other-new-features/targetName.md) make it easier to interoperate with code written in other languages and give more flexibility for avoiding name clashes. + +## Metaprogramming + +The following constructs together aim to put metaprogramming in Scala on a new basis. So far, metaprogramming was achieved by a combination of macros and libraries such as [Shapeless](https://github.com/milessabin/shapeless) that were in turn based on some key macros. Current Scala 2 macro mechanisms are a thin veneer on top the current Scala 2 compiler, which makes them fragile and in many cases impossible to port to Scala 3. + +It's worth noting that macros were never included in the [Scala 2 language specification](https://scala-lang.org/files/archive/spec/2.13/) and were so far made available only under an `-experimental` flag. This has not prevented their widespread usage. + +To enable porting most uses of macros, we are experimenting with the advanced language constructs listed below. These designs are more provisional than the rest of the proposed language constructs for Scala 3.0. There might still be some changes until the final release. Stabilizing the feature set needed for metaprogramming is our first priority. + +- [Match Types](new-types/match-types.md) + allow computation on types. +- [Inline](metaprogramming/inline.md) + provides by itself a straightforward implementation of some simple macros and is at the same time an essential building block for the implementation of complex macros. +- [Quotes and Splices](metaprogramming/macros.md) + provide a principled way to express macros and staging with a unified set of abstractions. +- [Type class derivation](contextual/derivation.md) + provides an in-language implementation of the `Gen` macro in Shapeless and other foundational libraries. The new implementation is more robust, efficient and easier to use than the macro. +- [By-name context parameters](contextual/by-name-context-parameters.md) + provide a more robust in-language implementation of the `Lazy` macro in [Shapeless](https://github.com/milessabin/shapeless). + +## See Also + +[A classification of proposed language features](./features-classification.md) is +an expanded version of this page that adds the status (i.e. relative importance to be a part of Scala 3, and relative urgency when to decide this) and expected migration cost +of each language construct. diff --git a/docs/docs/reference/soft-modifier.md b/docs/_docs/reference/soft-modifier.md similarity index 80% rename from docs/docs/reference/soft-modifier.md rename to docs/_docs/reference/soft-modifier.md index 1097eccec9ff..6421c38f00dd 100644 --- a/docs/docs/reference/soft-modifier.md +++ b/docs/_docs/reference/soft-modifier.md @@ -1,11 +1,12 @@ --- layout: doc-page -title: Soft Keywords +title: "Soft Keywords" +movedTo: https://docs.scala-lang.org/scala3/reference/soft-modifier.html --- -A soft modifier is one of the identifiers `opaque`, `inline`, `open`, `transparent`, and `infix`. +A soft modifier is one of the identifiers `infix`, `inline`, `opaque`, `open` and `transparent`. -A soft keyword is a soft modifier, or one of `derives`, `end`, `extension`, `using`, `|`, `+`, `-`, `*` +A soft keyword is a soft modifier, or one of `as`, `derives`, `end`, `extension`, `throws`, `using`, `|`, `+`, `-`, `*` A soft modifier is treated as potential modifier of a definition if it is followed by a hard modifier or a keyword combination starting a definition (`def`, `val`, `var`, `type`, `given`, `class`, `trait`, `object`, `enum`, `case class`, `case object`). Between the two words there may be a sequence of newline tokens and soft modifiers. diff --git a/docs/_docs/reference/syntax.md b/docs/_docs/reference/syntax.md new file mode 100644 index 000000000000..ff219a46081c --- /dev/null +++ b/docs/_docs/reference/syntax.md @@ -0,0 +1,451 @@ +--- +layout: doc-page +title: "Scala 3 Syntax Summary" +movedTo: https://docs.scala-lang.org/scala3/reference/syntax.html +--- + +The following description of Scala tokens uses literal characters `‘c’` when +referring to the ASCII fragment `\u0000` – `\u007F`. + +_Unicode escapes_ are used to represent the [Unicode character](https://www.w3.org/International/articles/definitions-characters/) with the given +hexadecimal code: + +``` +UnicodeEscape ::= ‘\’ ‘u’ {‘u’} hexDigit hexDigit hexDigit hexDigit +hexDigit ::= ‘0’ | … | ‘9’ | ‘A’ | … | ‘F’ | ‘a’ | … | ‘f’ +``` + +Informal descriptions are typeset as `“some comment”`. + +### Lexical Syntax + +The lexical syntax of Scala is given by the following grammar in EBNF +form. + +``` +whiteSpace ::= ‘\u0020’ | ‘\u0009’ | ‘\u000D’ | ‘\u000A’ +upper ::= ‘A’ | … | ‘Z’ | ‘\$’ | ‘_’ “… and Unicode category Lu” +lower ::= ‘a’ | … | ‘z’ “… and Unicode category Ll” +letter ::= upper | lower “… and Unicode categories Lo, Lt, Nl” +digit ::= ‘0’ | … | ‘9’ +paren ::= ‘(’ | ‘)’ | ‘[’ | ‘]’ | ‘{’ | ‘}’ | ‘'(’ | ‘'[’ | ‘'{’ +delim ::= ‘`’ | ‘'’ | ‘"’ | ‘.’ | ‘;’ | ‘,’ +opchar ::= “printableChar not matched by (whiteSpace | upper | + lower | letter | digit | paren | delim | opchar | + Unicode_Sm | Unicode_So)” +printableChar ::= “all characters in [\u0020, \u007F] inclusive” +charEscapeSeq ::= ‘\’ (‘b’ | ‘t’ | ‘n’ | ‘f’ | ‘r’ | ‘"’ | ‘'’ | ‘\’) + +op ::= opchar {opchar} +varid ::= lower idrest +alphaid ::= upper idrest + | varid +plainid ::= alphaid + | op +id ::= plainid + | ‘`’ { charNoBackQuoteOrNewline | UnicodeEscape | charEscapeSeq } ‘`’ +idrest ::= {letter | digit} [‘_’ op] +quoteId ::= ‘'’ alphaid + +integerLiteral ::= (decimalNumeral | hexNumeral) [‘L’ | ‘l’] +decimalNumeral ::= ‘0’ | nonZeroDigit [{digit | ‘_’} digit] +hexNumeral ::= ‘0’ (‘x’ | ‘X’) hexDigit [{hexDigit | ‘_’} hexDigit] +nonZeroDigit ::= ‘1’ | … | ‘9’ + +floatingPointLiteral + ::= [decimalNumeral] ‘.’ digit [{digit | ‘_’} digit] [exponentPart] [floatType] + | decimalNumeral exponentPart [floatType] + | decimalNumeral floatType +exponentPart ::= (‘E’ | ‘e’) [‘+’ | ‘-’] digit [{digit | ‘_’} digit] +floatType ::= ‘F’ | ‘f’ | ‘D’ | ‘d’ + +booleanLiteral ::= ‘true’ | ‘false’ + +characterLiteral ::= ‘'’ (printableChar | charEscapeSeq) ‘'’ + +stringLiteral ::= ‘"’ {stringElement} ‘"’ + | ‘"""’ multiLineChars ‘"""’ +stringElement ::= printableChar \ (‘"’ | ‘\’) + | UnicodeEscape + | charEscapeSeq +multiLineChars ::= {[‘"’] [‘"’] char \ ‘"’} {‘"’} +processedStringLiteral + ::= alphaid ‘"’ {[‘\’] processedStringPart | ‘\\’ | ‘\"’} ‘"’ + | alphaid ‘"""’ {[‘"’] [‘"’] char \ (‘"’ | ‘$’) | escape} {‘"’} ‘"""’ +processedStringPart + ::= printableChar \ (‘"’ | ‘$’ | ‘\’) | escape +escape ::= ‘$$’ + | ‘$’ letter { letter | digit } + | ‘{’ Block [‘;’ whiteSpace stringFormat whiteSpace] ‘}’ +stringFormat ::= {printableChar \ (‘"’ | ‘}’ | ‘ ’ | ‘\t’ | ‘\n’)} + +symbolLiteral ::= ‘'’ plainid // until 2.13 + +comment ::= ‘/*’ “any sequence of characters; nested comments are allowed” ‘*/’ + | ‘//’ “any sequence of characters up to end of line” + +nl ::= “new line character” +semi ::= ‘;’ | nl {nl} +``` + +## Optional Braces + +The lexical analyzer also inserts `indent` and `outdent` tokens that represent regions of indented code [at certain points](./other-new-features/indentation.md). + +In the context-free productions below we use the notation `<<< ts >>>` +to indicate a token sequence `ts` that is either enclosed in a pair of braces `{ ts }` or that constitutes an indented region `indent ts outdent`. Analogously, the +notation `:<<< ts >>>` indicates a token sequence `ts` that is either enclosed in a pair of braces `{ ts }` or that constitutes an indented region `indent ts outdent` that follows +a `:` at the end of a line. + +``` + <<< ts >>> ::= ‘{’ ts ‘}’ + | indent ts outdent +:<<< ts >>> ::= [nl] ‘{’ ts ‘}’ + | `:` indent ts outdent +``` + +## Keywords + +### Regular keywords + +``` +abstract case catch class def do else +enum export extends false final finally for +given if implicit import lazy match new +null object override package private protected return +sealed super then throw trait true try +type val var while with yield +: = <- => <: :> # +@ =>> ?=> +``` + +### Soft keywords + +``` +as derives end extension infix inline opaque open throws +transparent using | * + - +``` + +See the [separate section on soft keywords](./soft-modifier.md) for additional +details on where a soft keyword is recognized. + +## Context-free Syntax + +The context-free syntax of Scala is given by the following EBNF +grammar: + +### Literals and Paths +``` +SimpleLiteral ::= [‘-’] integerLiteral + | [‘-’] floatingPointLiteral + | booleanLiteral + | characterLiteral + | stringLiteral +Literal ::= SimpleLiteral + | processedStringLiteral + | symbolLiteral + | ‘null’ + +QualId ::= id {‘.’ id} +ids ::= id {‘,’ id} + +SimpleRef ::= id + | [id ‘.’] ‘this’ + | [id ‘.’] ‘super’ [ClassQualifier] ‘.’ id + +ClassQualifier ::= ‘[’ id ‘]’ +``` + +### Types +``` +Type ::= FunType + | HkTypeParamClause ‘=>>’ Type + | FunParamClause ‘=>>’ Type + | MatchType + | InfixType +FunType ::= FunTypeArgs (‘=>’ | ‘?=>’) Type + | HKTypeParamClause '=>' Type +FunTypeArgs ::= InfixType + | ‘(’ [ FunArgTypes ] ‘)’ + | FunParamClause +FunParamClause ::= ‘(’ TypedFunParam {‘,’ TypedFunParam } ‘)’ +TypedFunParam ::= id ‘:’ Type +MatchType ::= InfixType `match` <<< TypeCaseClauses >>> +InfixType ::= RefinedType {id [nl] RefinedType} +RefinedType ::= AnnotType {[nl] Refinement} +AnnotType ::= SimpleType {Annotation} + +SimpleType ::= SimpleLiteral + | ‘?’ TypeBounds + | id + | Singleton ‘.’ id + | Singleton ‘.’ ‘type’ + | ‘(’ Types ‘)’ + | Refinement + | ‘$’ ‘{’ Block ‘}’ -- unless inside quoted pattern + | ‘$’ ‘{’ Pattern ‘}’ -- only inside quoted pattern + | SimpleType1 TypeArgs + | SimpleType1 ‘#’ id +Singleton ::= SimpleRef + | SimpleLiteral + | Singleton ‘.’ id + +FunArgType ::= Type + | ‘=>’ Type +FunArgTypes ::= FunArgType { ‘,’ FunArgType } +ParamType ::= [‘=>’] ParamValueType +ParamValueType ::= Type [‘*’] +TypeArgs ::= ‘[’ Types ‘]’ +Refinement ::= ‘{’ [RefineDcl] {semi [RefineDcl]} ‘}’ +TypeBounds ::= [‘>:’ Type] [‘<:’ Type] +TypeParamBounds ::= TypeBounds {‘:’ Type} +Types ::= Type {‘,’ Type} +``` + +### Expressions +``` +Expr ::= FunParams (‘=>’ | ‘?=>’) Expr + | HkTypeParamClause ‘=>’ Expr + | Expr1 +BlockResult ::= FunParams (‘=>’ | ‘?=>’) Block + | HkTypeParamClause ‘=>’ Block + | Expr1 +FunParams ::= Bindings + | id + | ‘_’ +Expr1 ::= [‘inline’] ‘if’ ‘(’ Expr ‘)’ {nl} Expr [[semi] ‘else’ Expr] + | [‘inline’] ‘if’ Expr ‘then’ Expr [[semi] ‘else’ Expr] + | ‘while’ ‘(’ Expr ‘)’ {nl} Expr + | ‘while’ Expr ‘do’ Expr + | ‘try’ Expr Catches [‘finally’ Expr] + | ‘try’ Expr [‘finally’ Expr] + | ‘throw’ Expr + | ‘return’ [Expr] + | ForExpr + | [SimpleExpr ‘.’] id ‘=’ Expr + | PrefixOperator SimpleExpr ‘=’ Expr + | SimpleExpr ArgumentExprs ‘=’ Expr + | PostfixExpr [Ascription] + | ‘inline’ InfixExpr MatchClause +Ascription ::= ‘:’ InfixType + | ‘:’ Annotation {Annotation} +Catches ::= ‘catch’ (Expr | ExprCaseClause) +PostfixExpr ::= InfixExpr [id] -- only if language.postfixOperators is enabled +InfixExpr ::= PrefixExpr + | InfixExpr id [nl] InfixExpr + | InfixExpr MatchClause +MatchClause ::= ‘match’ <<< CaseClauses >>> +PrefixExpr ::= [PrefixOperator] SimpleExpr +PrefixOperator ::= ‘-’ | ‘+’ | ‘~’ | ‘!’ +SimpleExpr ::= SimpleRef + | Literal + | ‘_’ + | BlockExpr + | ‘$’ ‘{’ Block ‘}’ -- unless inside quoted pattern + | ‘$’ ‘{’ Pattern ‘}’ -- only inside quoted pattern + | Quoted + | quoteId -- only inside splices + | ‘new’ ConstrApp {‘with’ ConstrApp} [TemplateBody] + | ‘new’ TemplateBody + | ‘(’ ExprsInParens ‘)’ + | SimpleExpr ‘.’ id + | SimpleExpr ‘.’ MatchClause + | SimpleExpr TypeArgs + | SimpleExpr ArgumentExprs +Quoted ::= ‘'’ ‘{’ Block ‘}’ + | ‘'’ ‘[’ Type ‘]’ +ExprsInParens ::= ExprInParens {‘,’ ExprInParens} +ExprInParens ::= PostfixExpr ‘:’ Type + | Expr +ParArgumentExprs ::= ‘(’ [‘using’] ExprsInParens ‘)’ + | ‘(’ [ExprsInParens ‘,’] PostfixExpr ‘*’ ‘)’ +ArgumentExprs ::= ParArgumentExprs + | BlockExpr +BlockExpr ::= <<< (CaseClauses | Block) >>> +Block ::= {BlockStat semi} [BlockResult] +BlockStat ::= Import + | {Annotation {nl}} {LocalModifier} Def + | Extension + | Expr1 + | EndMarker + +ForExpr ::= ‘for’ ‘(’ Enumerators0 ‘)’ {nl} [‘do‘ | ‘yield’] Expr + | ‘for’ ‘{’ Enumerators0 ‘}’ {nl} [‘do‘ | ‘yield’] Expr + | ‘for’ Enumerators0 (‘do‘ | ‘yield’) Expr +Enumerators0 ::= {nl} Enumerators [semi] +Enumerators ::= Generator {semi Enumerator | Guard} +Enumerator ::= Generator + | Guard {Guard} + | Pattern1 ‘=’ Expr +Generator ::= [‘case’] Pattern1 ‘<-’ Expr +Guard ::= ‘if’ PostfixExpr + +CaseClauses ::= CaseClause { CaseClause } +CaseClause ::= ‘case’ Pattern [Guard] ‘=>’ Block +ExprCaseClause ::= ‘case’ Pattern [Guard] ‘=>’ Expr +TypeCaseClauses ::= TypeCaseClause { TypeCaseClause } +TypeCaseClause ::= ‘case’ InfixType ‘=>’ Type [semi] + +Pattern ::= Pattern1 { ‘|’ Pattern1 } +Pattern1 ::= Pattern2 [‘:’ RefinedType] +Pattern2 ::= [id ‘@’] InfixPattern [‘*’] +InfixPattern ::= SimplePattern { id [nl] SimplePattern } +SimplePattern ::= PatVar + | Literal + | ‘(’ [Patterns] ‘)’ + | Quoted + | SimplePattern1 [TypeArgs] [ArgumentPatterns] + | ‘given’ RefinedType +SimplePattern1 ::= SimpleRef + | SimplePattern1 ‘.’ id +PatVar ::= varid + | ‘_’ +Patterns ::= Pattern {‘,’ Pattern} +ArgumentPatterns ::= ‘(’ [Patterns] ‘)’ + | ‘(’ [Patterns ‘,’] PatVar ‘*’ ‘)’ +``` + +### Type and Value Parameters +``` +ClsTypeParamClause::= ‘[’ ClsTypeParam {‘,’ ClsTypeParam} ‘]’ +ClsTypeParam ::= {Annotation} [‘+’ | ‘-’] id [HkTypeParamClause] TypeParamBounds + +DefTypeParamClause::= ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ +DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeParamBounds + +TypTypeParamClause::= ‘[’ TypTypeParam {‘,’ TypTypeParam} ‘]’ +TypTypeParam ::= {Annotation} id [HkTypeParamClause] TypeBounds + +HkTypeParamClause ::= ‘[’ HkTypeParam {‘,’ HkTypeParam} ‘]’ +HkTypeParam ::= {Annotation} [‘+’ | ‘-’] (id [HkTypeParamClause] | ‘_’) TypeBounds + +ClsParamClauses ::= {ClsParamClause} [[nl] ‘(’ [‘implicit’] ClsParams ‘)’] +ClsParamClause ::= [nl] ‘(’ ClsParams ‘)’ + | [nl] ‘(’ ‘using’ (ClsParams | FunArgTypes) ‘)’ +ClsParams ::= ClsParam {‘,’ ClsParam} +ClsParam ::= {Annotation} [{Modifier} (‘val’ | ‘var’) | ‘inline’] Param +Param ::= id ‘:’ ParamType [‘=’ Expr] + +DefParamClauses ::= {DefParamClause} [[nl] ‘(’ [‘implicit’] DefParams ‘)’] +DefParamClause ::= [nl] ‘(’ DefParams ‘)’ | UsingParamClause +UsingParamClause ::= [nl] ‘(’ ‘using’ (DefParams | FunArgTypes) ‘)’ +DefParams ::= DefParam {‘,’ DefParam} +DefParam ::= {Annotation} [‘inline’] Param +``` + +### Bindings and Imports +``` +Bindings ::= ‘(’ [Binding {‘,’ Binding}] ‘)’ +Binding ::= (id | ‘_’) [‘:’ Type] + +Modifier ::= LocalModifier + | AccessModifier + | ‘override’ + | ‘opaque’ +LocalModifier ::= ‘abstract’ + | ‘final’ + | ‘sealed’ + | ‘open’ + | ‘implicit’ + | ‘lazy’ + | ‘inline’ +AccessModifier ::= (‘private’ | ‘protected’) [AccessQualifier] +AccessQualifier ::= ‘[’ id ‘]’ + +Annotation ::= ‘@’ SimpleType1 {ParArgumentExprs} + +Import ::= ‘import’ ImportExpr {‘,’ ImportExpr} +Export ::= ‘export’ ImportExpr {‘,’ ImportExpr} +ImportExpr ::= SimpleRef {‘.’ id} ‘.’ ImportSpec + | SimpleRef ‘as’ id +ImportSpec ::= NamedSelector + | WildcardSelector + | ‘{’ ImportSelectors) ‘}’ +NamedSelector ::= id [‘as’ (id | ‘_’)] +WildCardSelector ::= ‘*' | ‘given’ [InfixType] +ImportSelectors ::= NamedSelector [‘,’ ImportSelectors] + | WildCardSelector {‘,’ WildCardSelector} + +EndMarker ::= ‘end’ EndMarkerTag -- when followed by EOL +EndMarkerTag ::= id | ‘if’ | ‘while’ | ‘for’ | ‘match’ | ‘try’ + | ‘new’ | ‘this’ | ‘given’ | ‘extension’ | ‘val’ +``` + +### Declarations and Definitions +``` +RefineDcl ::= ‘val’ ValDcl + | ‘def’ DefDcl + | ‘type’ {nl} TypeDcl +Dcl ::= RefineDcl + | ‘var’ VarDcl +ValDcl ::= ids ‘:’ Type +VarDcl ::= ids ‘:’ Type +DefDcl ::= DefSig ‘:’ Type +DefSig ::= id [DefTypeParamClause] DefParamClauses +TypeDcl ::= id [TypeParamClause] {FunParamClause} TypeBounds [‘=’ Type] + +Def ::= ‘val’ PatDef + | ‘var’ PatDef + | ‘def’ DefDef + | ‘type’ {nl} TypeDcl + | TmplDef +PatDef ::= ids [‘:’ Type] ‘=’ Expr + | Pattern2 [‘:’ Type] ‘=’ Expr +DefDef ::= DefSig [‘:’ Type] ‘=’ Expr + | ‘this’ DefParamClause DefParamClauses ‘=’ ConstrExpr + +TmplDef ::= ([‘case’] ‘class’ | ‘trait’) ClassDef + | [‘case’] ‘object’ ObjectDef + | ‘enum’ EnumDef + | ‘given’ GivenDef +ClassDef ::= id ClassConstr [Template] +ClassConstr ::= [ClsTypeParamClause] [ConstrMods] ClsParamClauses +ConstrMods ::= {Annotation} [AccessModifier] +ObjectDef ::= id [Template] +EnumDef ::= id ClassConstr InheritClauses EnumBody +GivenDef ::= [GivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) +GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefParamClause`, `UsingParamClause` must be present +StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ TemplateBody] +Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} + ‘(’ DefParam ‘)’ {UsingParamClause} ExtMethods +ExtMethods ::= ExtMethod | [nl] <<< ExtMethod {semi ExtMethod} >>> +ExtMethod ::= {Annotation [nl]} {Modifier} ‘def’ DefDef +Template ::= InheritClauses [TemplateBody] +InheritClauses ::= [‘extends’ ConstrApps] [‘derives’ QualId {‘,’ QualId}] +ConstrApps ::= ConstrApp ({‘,’ ConstrApp} | {‘with’ ConstrApp}) +ConstrApp ::= SimpleType1 {Annotation} {ParArgumentExprs} +ConstrExpr ::= SelfInvocation + | <<< SelfInvocation {semi BlockStat} >>> +SelfInvocation ::= ‘this’ ArgumentExprs {ArgumentExprs} + +TemplateBody ::= :<<< [SelfType] TemplateStat {semi TemplateStat} >>> +TemplateStat ::= Import + | Export + | {Annotation [nl]} {Modifier} Def + | {Annotation [nl]} {Modifier} Dcl + | Extension + | Expr1 + | EndMarker + | +SelfType ::= id [‘:’ InfixType] ‘=>’ + | ‘this’ ‘:’ InfixType ‘=>’ + +EnumBody ::= :<<< [SelfType] EnumStat {semi EnumStat} >>> +EnumStat ::= TemplateStat + | {Annotation [nl]} {Modifier} EnumCase +EnumCase ::= ‘case’ (id ClassConstr [‘extends’ ConstrApps]] | ids) + +TopStats ::= TopStat {semi TopStat} +TopStat ::= Import + | Export + | {Annotation [nl]} {Modifier} Def + | Extension + | Packaging + | PackageObject + | EndMarker + | +Packaging ::= ‘package’ QualId :<<< TopStats >>> +PackageObject ::= ‘package’ ‘object’ ObjectDef + +CompilationUnit ::= {‘package’ QualId semi} TopStats +``` diff --git a/docs/docs/release-notes/0.1.2.md b/docs/_docs/release-notes/0.1.2.md similarity index 100% rename from docs/docs/release-notes/0.1.2.md rename to docs/_docs/release-notes/0.1.2.md diff --git a/docs/docs/release-notes/syntax-changes-0.22.md b/docs/_docs/release-notes/syntax-changes-0.22.md similarity index 100% rename from docs/docs/release-notes/syntax-changes-0.22.md rename to docs/_docs/release-notes/syntax-changes-0.22.md diff --git a/docs/docs/usage/cbt-projects.md b/docs/_docs/usage/cbt-projects.md similarity index 100% rename from docs/docs/usage/cbt-projects.md rename to docs/_docs/usage/cbt-projects.md diff --git a/docs/docs/usage/dottydoc.md b/docs/_docs/usage/dottydoc.md similarity index 97% rename from docs/docs/usage/dottydoc.md rename to docs/_docs/usage/dottydoc.md index f7e15b692d99..28c29606a301 100644 --- a/docs/docs/usage/dottydoc.md +++ b/docs/_docs/usage/dottydoc.md @@ -92,7 +92,7 @@ It is also possible to use one of the [default layouts](#default-layouts) that s Blog ==== Dottydoc also allows for a simple blogging platform in the same vein as Jekyll. -Blog posts are placed within the `./blog/_posts` directory and have to be in +Blog posts are placed within the `./_blog/_posts` directory and have to be in the form `year-month-day-title.{md,html}`. An example of this would be: @@ -252,7 +252,7 @@ to alter the way Dottydoc renders API documentation. blog-page.html -------------- -A blog page uses files placed in `./blog/_posts/` as input to render a blog. +A blog page uses files placed in `./_blog/_posts/` as input to render a blog. Default Includes ================ diff --git a/docs/_docs/usage/ide-support.md b/docs/_docs/usage/ide-support.md new file mode 100644 index 000000000000..25739b021b78 --- /dev/null +++ b/docs/_docs/usage/ide-support.md @@ -0,0 +1,7 @@ +--- +layout: doc-page +title: "IDE support for Scala 3" +movedTo: https://docs.scala-lang.org/scala3/getting-started.html +--- + +This page is deprecated. Please go to the [getting-started](https://docs.scala-lang.org/scala3/getting-started.html) diff --git a/docs/_docs/usage/sbt-projects.md b/docs/_docs/usage/sbt-projects.md new file mode 100644 index 000000000000..13a61eff27db --- /dev/null +++ b/docs/_docs/usage/sbt-projects.md @@ -0,0 +1,6 @@ +--- +layout: doc-page +title: "Using Dotty with sbt" +--- + +To try it in your project see the [Getting Started User Guide](https://docs.scala-lang.org/scala3/getting-started.html). diff --git a/docs/_docs/usage/scaladoc/blog.md b/docs/_docs/usage/scaladoc/blog.md new file mode 100644 index 000000000000..85c2d9fa044a --- /dev/null +++ b/docs/_docs/usage/scaladoc/blog.md @@ -0,0 +1,40 @@ +--- +layout: doc-page +title: "Built-in blog" + + + +--- + + +Scaladoc allows you to include a simple blog in your documentation. For now, it +provides only basic features. In the future, we plan to include more advanced +features like tagging or author pages. + +Blog is treated a little differently than regular static sites. This article will help you set up your own blog. + +## Proper directory setup + +All your blogposts must be put under `blog/_posts` directory. + + +``` +├── blog +│ ├── _posts +│ │ └── 2016-12-05-implicit-function-types.md +│ └── index.html +``` + +If you are using yaml [sidebar](./static-site.md#sidebar) don't forget to place + +``` +sidebar: + - title: Blog +``` + +somewhere inside the `yaml` tree representing the sidebar sections. Scaladoc will attach under that section all of your blogposts. + +## Naming convention + +All the blogpost filenames should start with date in numeric format matching `YYYY-MM-DD`. +Example name is `2015-10-23-dotty-compiler-bootstraps.md`. diff --git a/docs/_docs/usage/scaladoc/docstrings.md b/docs/_docs/usage/scaladoc/docstrings.md new file mode 100644 index 000000000000..9935702e10bd --- /dev/null +++ b/docs/_docs/usage/scaladoc/docstrings.md @@ -0,0 +1,194 @@ +--- +layout: doc-page +title: "Docstrings - specific Tags and Features" +--- + +This chapter describes how to correctly write docstrings and how to use all the available features of scaladoc. +Since many things are the same as in the old scaladoc, some parts are reused from this [article](https://docs.scala-lang.org/overviews/scaladoc/for-library-authors.html) + + +Scaladoc extends Markdown with additional features, such as linking +to API definitions. This can be used from within static documentation and blog +posts to provide blend-in content. + + +## Where to put docstrings + +Scaladoc comments go before the items they pertain to in a special comment block that starts with a /** and ends with a */, like this: + +```scala +/** Start the comment here + * and use the left star followed by a + * white space on every line. + * + * Even on empty paragraph-break lines. + * + * Note that the * on each line is aligned + * with the second * in /** so that the + * left margin is on the same column on the + * first line and on subsequent ones. + * + * Close the comment with *\/ + * + * If you use Scaladoc tags (@param, @group, etc.), + * remember to put them at separate lines with nothing preceding. + * + * For example: + * + * Calculate the square of the given number + * + * @param d the Double to square + * @return the result of squaring d + */ + def square(d: Double): Double = d * d +``` + +In the example above, this Scaladoc comment is associated with the method square since it is right before it in the source code. + +Scaladoc comments can go before fields, methods, classes, traits, objects. +For now, scaladoc doesn't support straightforward solution to document packages. There is a dedicated github +[issue](https://github.com/lampepfl/dotty/issues/11284), where you can check the current status of the problem. + +For class primary constructors which in Scala coincide with the definition of the class itself, a @constructor tag is used to target a comment to be put on the primary constructor documentation rather than the class overview. + +## Tags + +Scaladoc uses `@` tags to provide specific detail fields in the comments. These include: + +### Class specific tags + +- `@constructor` placed in the class comment will describe the primary constructor. + +### Method specific tags + +- `@return` detail the return value from a method (one per method). + +### Method, Constructor and/or Class tags + +- `@throws` what exceptions (if any) the method or constructor may throw. +- `@param` detail a value parameter for a method or constructor, provide one per parameter to the method/constructor. +- `@tparam` detail a type parameter for a method, constructor or class. Provide one per type parameter. + +### Usage tags + +- `@see` reference other sources of information like external document links or related entities in the documentation. +- `@note` add a note for pre or post conditions, or any other notable restrictions or expectations. +- `@example` for providing example code or related example documentation. + + +### Member grouping tags + +These tags are well-suited to larger types or packages, with many members. They allow you to organize the Scaladoc page into distinct sections, with each one shown separately, in the order that you choose. + +These tags are not enabled by default! You must pass the -groups flag to Scaladoc in order to turn them on. Typically the sbt for this will look something like: + +```scala +Compile / doc / scalacOptions ++= Seq( + "-groups" +) +``` +Each section should have a single-word identifier that is used in all of these tags, shown as `group` below. By default, that identifier is shown as the title of that documentation section, but you can use @groupname to provide a longer title. + +Typically, you should put @groupprio (and optionally @groupname and @groupdesc) in the Scaladoc for the package/trait/class/object itself, describing what all the groups are, and their order. Then put @group in the Scaladoc for each member, saying which group it is in. + +Members that do not have a `@group` tag will be listed as “Ungrouped” in the resulting documentation. + +- `@group ` - mark the entity as a member of the `` group. +- `@groupname ` - provide an optional name for the group. `` is displayed as the group header before the group description. +- `@groupdesc ` - add optional descriptive text to display under the group name. Supports multiline formatted text. +- `@groupprio ` - control the order of the group on the page. Defaults to 0. Ungrouped elements have an implicit priority of 1000. Use a value between 0 and 999 to set a relative position to other groups. Low values will appear before high values. + +### Other tags + +- `@author` provide author information for the following entity +- `@version` the version of the system or API that this entity is a part of. +- `@since` like `@version` but defines the system or API that this entity was first defined in. +- `@deprecated` marks the entity as deprecated, providing both the replacement implementation that should be used and the version/date at which this entity was deprecated. +- `@syntax ` let you change the parser for docstring. The default syntax is markdown, however you can change it using this directive. Currently available syntaxes are `markdown` or `wiki`, e. g. `@syntax wiki` + +### Macros + +- `@define ` allows use of $name in other Scaladoc comments within the same source file which will be expanded to the contents of ``. + +If a comment is not provided for an entity at the current inheritance level, but is supplied for the overridden entity at a higher level in the inheritance hierarchy, the comment from the super-class will be used. + +Likewise if `@param`, `@tparam`, `@return` and other entity tags are omitted but available from a superclass, those comments will be used. + +### Explicit + +For explicit comment inheritance, use the @inheritdoc tag. + +### Markup + +Scaladoc provides two syntax parsers: `markdown` (default) or `wikidoc`. +It is still possible to embed HTML tags in Scaladoc (like with Javadoc), but not necessary most of the time as markup may be used instead. + +#### Markdown + +Markdown uses [commonmark flavour](https://spec.commonmark.org/current/) with two custom extensions: +- `wikidoc` links for referencing convenience +- `wikidoc` codeblocks with curly braces syntax + + +#### Wikidoc + +Wikidoc is syntax used for scala2 scaladoc. It is supported because of many existing source code, however it is **not** recommended to use it in new projects. +Wikisyntax can be toggled on with flag `-comment-syntax wiki` globally, or with `@syntax wiki` directive in docstring. + +Some of the standard markup available: + +``` +`monospace` +''italic text'' +'''bold text''' +__underline__ +^superscript^ +,,subscript,, +[[entity link]], e.g. [[scala.collection.Seq]] +[[https://external.link External Link]], e.g. [[https://scala-lang.org Scala Language Site]] +``` + +For more info about wiki links look at this [chapter](#linking-to-api) + +Other formatting notes + +- Paragraphs are started with one (or more) blank lines. `*` in the margin for the comment is valid (and should be included) but the line should be blank otherwise. +- Headings are defined with surrounding `=` characters, with more `=` denoting subheadings. E.g. `=Heading=`, `==Sub-Heading==`, etc. +- List blocks are a sequence of list items with the same style and level, with no interruptions from other block styles. Unordered lists can be bulleted using `-`, numbered lists can be denoted using `1.`, `i.`, `I.`, or `a.` for the various numbering styles. In both cases, you must have extra space in front, and more space makes a sub-level. + +The markup for list blocks looks like: + +``` +/** Here is an unordered list: + * + * - First item + * - Second item + * - Sub-item to the second + * - Another sub-item + * - Third item + * + * Here is an ordered list: + * + * 1. First numbered item + * 1. Second numbered item + * i. Sub-item to the second + * i. Another sub-item + * 1. Third item + */ +``` + +### General Notes for Writing Scaladoc Comments + +Concise is nice! Get to the point quickly, people have limited time to spend on your documentation, use it wisely. +Omit unnecessary words. Prefer returns X rather than this method returns X, and does X,Y & Z rather than this method does X, Y and Z. +DRY - don’t repeat yourself. Resist duplicating the method description in the @return tag and other forms of repetitive commenting. + +More details on writing Scaladoc + +Further information on the formatting and style recommendations can be found in Scala-lang scaladoc style guide. + +## Linking to API + +Scaladoc allows linking to API documentation with Wiki-style links. Linking to +`scala.collection.immutable.List` is as simple as +`[[scala.collection.immutable.List]]`. For more information on the exact syntax, see [doc comment documentation](./linking.md#definition-links). diff --git a/docs/_docs/usage/scaladoc/index.md b/docs/_docs/usage/scaladoc/index.md new file mode 100644 index 000000000000..dc53c01db666 --- /dev/null +++ b/docs/_docs/usage/scaladoc/index.md @@ -0,0 +1,8 @@ +--- +layout: doc-page +title: "Scaladoc" +--- + +![scaladoc logo](images/scaladoc-logo.png) + +scaladoc is a tool to generate the API documentation of your Scala 3 projects. It provides similar features to `javadoc` as well as `jekyll` or `docusaurus`. diff --git a/docs/_docs/usage/scaladoc/linking.md b/docs/_docs/usage/scaladoc/linking.md new file mode 100644 index 000000000000..daef312700fc --- /dev/null +++ b/docs/_docs/usage/scaladoc/linking.md @@ -0,0 +1,95 @@ +--- +layout: doc-page +title: "Linking documentation" +--- + +Scaladoc's main feature is creating API documentation from code comments. + +By default, the code comments are understood as Markdown, though we also support +Scaladoc's old [Wiki syntax](https://docs.scala-lang.org/style/scaladoc.html). + +## Syntax + +### Definition links + +Our definition link syntax is quite close to Scaladoc's syntax, though we have made some +quality-of-life improvements. + +#### Basic syntax + +A definition link looks as follows: `[[scala.collection.immutable.List]]`. + +Which is to say, a definition link is a sequence of identifiers separated by +`.`. The identifiers can be separated with `#` as well for Scaladoc compatibility. + +By default, an identifier `id` references the first (in source order) entity +named `id`. An identifier can end with `$`, which forces it to refer to a value +(an object, a value, a given); an identifier can also end with `!`, which forces +it to refer to a type (a class, a type alias, a type member). + +The links are resolved relative to the current location in source. That is, when +documenting a class, the links are relative to the entity enclosing the class (a +package, a class, an object); the same applies to documenting definitions. + +Special characters in links can be backslash-escaped, which makes them part of +identifiers instead. For example, `` [[scala.collection.immutable\.List]] `` +references the class named `` `immutable.List` `` in package `scala.collection`. + +#### New syntax + +We have extended Scaladoc definition links to make them a bit more pleasant to +write and read in source. The aim was also to bring the link and Scala syntax +closer together. The new features are: + +1. `package` can be used as a prefix to reference the enclosing package + Example: + ``` + package utils + class C { + def foo = "foo". + } + /** See also [[package.C]]. */ + class D { + def bar = "bar". + } + ``` + The `package` keyword helps make links to the enclosing package shorter + and a bit more resistant to name refactorings. +1. `this` can be used as a prefix to reference the enclosing classlike + Example: + ``` + class C { + def foo = "foo" + /** This is not [[this.foo]], this is bar. */ + def bar = "bar" + } + ``` + Using a Scala keyword here helps make the links more familiar, as well as + helps the links survive class name changes. +1. Backticks can be used to escape identifiers + Example: + ``` + def `([.abusive.])` = ??? + /** TODO: Figure out what [[`([.abusive.])`]] is. */ + def foo = `([.abusive.])` + ``` + Previously (versions 2.x), Scaladoc required backslash-escaping to reference such identifiers. Now (3.x versions), + Scaladoc allows using the familiar Scala backtick quotation. + +#### Why keep the Wiki syntax for links? + +There are a few reasons why we've kept the Wiki syntax for documentation links +instead of reusing the Markdown syntax. Those are: + +1. Nameless links in Markdown are ugly: `[](definition)` vs `[[definition]]` + By far, most links in documentation are nameless. It should be obvious how to + write them. +2. Local member lookup collides with URL fragments: `[](#field)` vs `[[#field]]` +3. Overload resolution collides with MD syntax: `[](meth(Int))` vs `[[meth(Int)]]` +4. Now that we have a parser for the link syntax, we can allow spaces inside (in + Scaladoc one needed to slash-escape those), but that doesn't get recognized + as a link in Markdown: `[](meth(Int, Float))` vs `[[meth(Int, Float)]]` + +None of these make it completely impossible to use the standard Markdown link +syntax, but they make it much more awkward and ugly than it needs to be. On top +of that, Markdown link syntax doesn't even save any characters. diff --git a/docs/_docs/usage/scaladoc/search-engine.md b/docs/_docs/usage/scaladoc/search-engine.md new file mode 100644 index 000000000000..a586c25c0a21 --- /dev/null +++ b/docs/_docs/usage/scaladoc/search-engine.md @@ -0,0 +1,82 @@ +--- +layout: doc-page +title: "Type-based search" +--- + +Searching for functions by their symbolic names can be time-consuming. +That is why the new scaladoc allows searching for methods and fields by their types. + + +Consider the following extension method definition: +``` +extension [T](arr: IArray[T]) def span(p: T => Boolean): (IArray[T], IArray[T]) = ... +``` +Instead of searching for `span` we can also search for `IArray[A] => (A => Boolean) => (IArray[A], IArray[A])`. + +To use this feature, type the signature of the member you are looking for in the scaladoc searchbar. This is how it works: + +![]({{ site.baseurl }}images/scaladoc/inkuire-1.0.0-M2_js_flatMap.gif) + +This feature is provided by the [Inkuire](https://github.com/VirtusLab/Inkuire) search engine, which works for Scala 3 and Kotlin. To be up-to-date with the development of this feature, follow the [Inkuire repository](https://github.com/VirtusLab/Inkuire). + +## Examples of queries + +Some examples of queries with intended results: +- `List[Int] => (Int => Long) => List[Long]` -> `map` +- `Seq[A] => (A => B) => Seq[B]` -> `map` +- `(A, B) => A` -> `_1` +- `Set[Long] => Long => Boolean` -> `contains` +- `Int => Long => Int` -> `const` +- `String => Int => Char` -> `apply` +- `(Int & Float) => (String | Double)` -> `toDouble`, `toString` +- `F[A] => Int` -> `length` + +## Query syntax + +In order for a scaladoc searchbar query to be searched using Inkuire instead of the default search engine, the query has to contain the `=>` character sequence. + +Accepted input is similar to a curried function signature in Scala 3. With some differences: +- AndTypes, OrTypes and Functions have to be enclosed in parentheses e.g. `(Int & Any) => String` +- fields and parameterless methods can be found by preceding their type with `=>`, e.g., `=> Int` +- A wildcard `_` can be used to indicate that we want to match any type in a given place e.g. `Long => Double => _` +- Types in the form of single letter e.g. `A` or a letter with a digit `X1` are automatically assumed to be type variables +- Other type variables can be declared just like in polymorphic functions e.g. `[AVariable, AlsoAVariable] => AVariable => AlsoAVariable => AVariable` + +### Working with type aliases and method receivers + +When it comes to how the code is mapped to InkuireDb entries, there are some transformations to make the engine more opinionated (though open to suggestions and changes). Firstly, the receiver (non-module owner) of a function can be treated as a first argument. Automatic currying is also applied, so that the results don't depend on argument lists. When finding matches, `val`s and `def`s are not distinguished. + +So the following declarations should be found by query `Num => Int => Int => Int`: +``` +class Num(): + def a(i: Int, j: Int): Int + def b(i: Int)(j: Int): Int + def c(i: Int): (Int => Int) + val d: Int => Int => Int + val e: Int => Int => Int + val f: (Int, Int) => Int +end Num + +def g(i: Num, j: Int, k: Int): Int +extension (i: Num) def h(j: Int, k: Int): Int +def i(i: Num, j: Int)(k: Int): Int +extension (i: Num) def j(j: Int)(k: Int): Int +... +``` + +When it comes to type aliases, they are desugared on both the declaration and the query signature. This means that for declarations: +``` +type Name = String + +def fromName(name: Name): String +def fromString(str: String): Name +``` +both methods, `fromName` and `fromString`, should be found for queries `Name => Name`, `String => String`, `Name => String` and `String => Name`. + +## How it works + +Inkuire works as a JavaScript worker in the browser thanks to the power of [ScalaJS](https://www.scala-js.org/). + +To enable Inkuire when running scaladoc, add the flag `-Ygenerate-inkuire`. By adding this flag two files are generated: +- `inkuire-db.json` - this is the file containing all the searchable declarations from the currently documented project in a format readable to the Inkuire search engine. +- `inkuire-config.json` - this file contains the locations of the database files that should be searchable from the documentation of the current project. By default, it will be generated with the location of the local db file as well as the default implied locations of database files in [External mappings](https://docs.scala-lang.org/scala3/guides/scaladoc/settings.html#-external-mappings). diff --git a/docs/_docs/usage/scaladoc/settings.md b/docs/_docs/usage/scaladoc/settings.md new file mode 100644 index 000000000000..067967ee58cc --- /dev/null +++ b/docs/_docs/usage/scaladoc/settings.md @@ -0,0 +1,179 @@ +--- +layout: doc-page +title: "Settings" +--- + +This chapter lists the configuration options that can be used when calling scaladoc. Some of the information shown here can be found by calling scaladoc with the `-help` flag. + +## Parity with scaladoc for Scala 2 + +Scaladoc has been rewritten from scratch and some of the features turned out to be useless in the new context. +If you want to know what is current state of compatibility with scaladoc old flags, you can visit this issue for tracking [progress](https://github.com/lampepfl/dotty/issues/11907). + +## Providing settings + +Supply scaladoc settings as command-line arguments, e.g., `scaladoc -d output -project my-project target/scala-3.0.0-RC2/classes`. If called from sbt, update the value of `Compile / doc / scalacOptions` and `Compile / doc / target` respectively, e. g. + +``` +Compile / doc / target ++= Seq("-d", "output") +Compile / doc / scalacOptions ++= Seq("-project", "my-project") +``` + +## Overview of all available settings + +##### -project +The name of the project. To provide compatibility with Scala2 aliases with `-doc-title` + +##### -project-version +The current version of your project that appears in a top left corner. To provide compatibility with Scala2 aliases with `-doc-version` + +##### -project-logo +The logo of your project that appears in a top left corner. To provide compatibility with Scala2 aliases with `-doc-logo` + +##### -project-footer +The string message that appears in a footer section. To provide compatibility with Scala2 aliases with `-doc-footer` + +##### -comment-syntax +The styling language used for parsing comments. +Currently we support two syntaxes: `markdown` or `wiki` +If setting is not present, scaladoc defaults `markdown` + +##### -revision +Revision (branch or ref) used to build project project. Useful with sourcelinks to prevent them from pointing always to the newest master that is subject to changes. + +##### -source-links +Source links provide a mapping between file in documentation and code repository. + +Example source links is: +`-source-links:docs=github://lampepfl/dotty/master#docs` + +Accepted formats: + +\=\ +\ + +where \ is one of following: + - `github:///[/revision][#subpath]` + will match https://github.com/$organization/$repository/\[blob|edit]/$revision\[/$subpath]/$filePath\[$lineNumber] + when revision is not provided then requires revision to be specified as argument for scaladoc + - `gitlab:///` + will match https://gitlab.com/$organization/$repository/-/\[blob|edit]/$revision\[/$subpath]/$filePath\[$lineNumber] + when revision is not provided then requires revision to be specified as argument for scaladoc + - \ + +\ is a format for `doc-source-url` parameter from old scaladoc. +NOTE: We only supports `€{FILE_PATH_EXT}`, `€{TPL_NAME}`, `€{FILE_EXT}`, + €{FILE_PATH}, and €{FILE_LINE} patterns. + + +Template can defined only by subset of sources defined by path prefix represented by ``. +In such case paths used in templates will be relativized against `` + + + +##### -external-mappings + +Mapping between regexes matching classpath entries and external documentation. + +Example external mapping is: +`-external-mappings:.*scala.*::scaladoc3::https://scala-lang.org/api/3.x/,.*java.*::javadoc::https://docs.oracle.com/javase/8/docs/api/` + +A mapping is of the form '\::\[scaladoc3|scaladoc|javadoc]::\'. You can supply several mappings, separated by commas, as shown in the example. + +##### -social-links + +Links to social sites. For example: + +`-social-links:github::https://github.com/lampepfl/dotty,gitter::https://gitter.im/scala/scala,twitter::https://twitter.com/scala_lang` + +Valid values are of the form: '\[github|twitter|gitter|discord]::link'. Scaladoc also supports 'custom::link::white_icon_name::black_icon_name'. In this case icons must be present in 'images/' directory. + +##### -skip-by-id + +Identifiers of packages or top-level classes to skip when generating documentation. + +##### -skip-by-regex + +Regexes that match fully qualified names of packages or top-level classes to skip when generating documentation. + +##### -doc-root-content + +The file from which the root package documentation should be imported. + +##### -author + +Adding authors in docstring with `@author Name Surname` by default won't be included in generated html documentation. +If you would like to label classes with authors explicitly, run scaladoc with this flag. + +##### -groups + +Group similar functions together (based on the @group annotation) + +##### -private + +Show all types and members. Unless specified, show only public and protected types and members. + +##### -doc-canonical-base-url + +A base URL to use as prefix and add `canonical` URLs to all pages. The canonical URL may be used by search engines to choose the URL that you want people to see in search results. If unset no canonical URLs are generated. + +##### -siteroot + +A directory containing static files from which to generate documentation. Default directory is `./docs` + +##### -no-link-warnings + +Suppress warnings for ambiguous or incorrect links in members’ lookup. Doesn't affect warnings for incorrect links of assets etc. + +##### -versions-dictionary-url + +A URL pointing to a JSON document containing a dictionary: `version label -> documentation location`. +The JSON file has single property `versions` that holds the dictionary associating the labels of specific versions of the documentation to the URLs pointing to their index.html +Useful for libraries that maintain different versions of their documentation. + +Example JSON file: +``` +{ + "versions": { + "3.0.x": "https://dotty.epfl.ch/3.0.x/docs/index.html", + "Nightly": "https://dotty.epfl.ch/docs/index.html" + } +} +``` + +##### -snippet-compiler + +Snippet compiler arguments provide a way to configure snippet type checking. + +This setting accepts a list of arguments in the format: +args := arg{,arg} +arg := [path=]flag +where `path` is a prefix of the path to source files where snippets are located and `flag` is the mode in which snippets will be type checked. + +If the path is not present, the argument will be used as the default for all unmatched paths. + +Available flags: +compile - Enables snippet checking. +nocompile - Disables snippet checking. +fail - Enables snippet checking, asserts that snippet doesn't compile. + +The fail flag comes in handy for snippets that present that some action would eventually fail during compilation, e. g. [Opaques page](../../reference/other-new-features/opaques.md) + +Example usage: + +`-snippet-compiler:my/path/nc=nocompile,my/path/f=fail,compile` + +Which means: + +all snippets in files under directory `my/path/nc` should not be compiled at all +all snippets in files under directory `my/path/f` should fail during compilation +all other snippets should compile successfully + +##### -Ysnippet-compiler-debug + +Setting this option makes snippet compiler print the snippet as it is compiled (after wrapping). + +##### -Ydocument-synthetic-types + +Include pages providing documentation for the intrinsic types (e. g. Any, Nothing) to the docs. The setting is useful only for stdlib because scaladoc for Scala 3 relies on TASTy files, but we cannot provide them for intrinsic types since they are embedded in the compiler. +All other users should not concern with this setting. diff --git a/docs/_docs/usage/scaladoc/site-versioning.md b/docs/_docs/usage/scaladoc/site-versioning.md new file mode 100644 index 000000000000..8ce52d21b067 --- /dev/null +++ b/docs/_docs/usage/scaladoc/site-versioning.md @@ -0,0 +1,35 @@ +--- +layout: doc-page +title: "Site versioning" +--- + +Scaladoc provides a convenient way to switch between different versions of the documentation. The feature is useful if we want to expose older docs for users that didn't migrate to the new version of our library. + +### How to setup it + +The feature was designed for easy scalability with no need to regenerate all scaladocs after adding a new version. To do so a new setting is introduced: `-versions-dictionary-url`. Its argument must be a URL to a JSON document holding information about the locations of specific versions. The JSON file has single property `versions` that holds the dictionary associating the labels of specific versions of the documentation to the URLs pointing to their index.html + +Example JSON file: +``` +{ + "versions": { + "3.0.x": "https://dotty.epfl.ch/3.0.x/docs/index.html", + "Nightly": "https://dotty.epfl.ch/docs/index.html" + } +} +``` + +This enforce us to provide the setting while generating docs for each of the versions, however it gives us more flexibility later. If you want to add a version of the API docs next to the previous 5 versions that you have already published, then you only need to upload the new docs to a web server and add a new entry to the JSON file. All versions of the site will now become aware of the new site version. + +The important thing to note is that there is only one JSON file to avoid redundancy and each scaladoc must set up its URL location beforehand, for example, in sbt: + +``` +doc / scalacOptions ++= Seq("-versions-dictionary-url", "https://dotty.epfl.ch/versions.json") +``` + + +### How does it look from user perspective + +Providing a JSON file via `-versions-dictionary-url` enables scaladoc to link between versions. It is also convenient to be able to change the revision label in the drop-down menu. Everything will change automatically. + +![]({{ site.baseurl }}images/scaladoc/nightly.gif) diff --git a/docs/_docs/usage/scaladoc/static-site.md b/docs/_docs/usage/scaladoc/static-site.md new file mode 100644 index 000000000000..6a1f8db0824c --- /dev/null +++ b/docs/_docs/usage/scaladoc/static-site.md @@ -0,0 +1,199 @@ +--- +layout: doc-page +title: "Static documentation" +--- + +Scaladoc can generate static sites, known from [Jekyll](http://jekyllrb.com/) or [Docusaurus](https://docusaurus.io/). +Having a combined tool allows providing interaction between static documentation and API, thus allowing the two to blend naturally. + +Creating a site is just as simple as in Jekyll. The site root contains the +the layout of the site and all files placed there will be either considered static, +or processed for template expansion. + +The files that are considered for template expansion must end in `*.{html,md}` +and will from here on be referred to as "template files" or "templates". + +A simple "hello world" site could look something like this: + +``` +├── docs +│ └── getting-started.md +└── index.html +``` + +This will give you a site with the following files in generated documentation: + +``` +index.html +docs/getting-started.html +``` + +Scaladoc can transform both files and directories (to organize your documentation into a tree-like structure). By default, directories have a title based on the file name and have empty content. It is possible to provide index pages for each section by creating `index.html` or `index.md` (not both) in the dedicated directory. + +## Properties + +Scaladoc uses the [Liquid](https://shopify.github.io/liquid/) templating engine +and provides several custom filters and tags specific to Scala +documentation. + +In Scaladoc, all templates can contain YAML front-matter. The front-matter +is parsed and put into the `page` variable available in templates via Liquid. + +Example front-matter + +``` +--- +title: My custom title +--- +``` + +Scaladoc uses some predefined properties to controls some aspects of page. + +Predefined properties: + +- **title** provide page title that will be used in navigation and HTML metadata. +- **extraCss** additional `.css` files that will be included in this page. Paths should be relative to the documentation root. **This setting is not exported to the template engine.** +- **extraJs** additional `.js` files that will be included in this page. Paths should be relative to the documentation root. **This setting is not exported to the template engine.** +- **hasFrame** when set to `false` page will not include default layout (navigation, breadcrumbs, etc.) but only token HTML wrapper to provide metadata and resources (js and css files). **This setting is not exported to the template engine.** +- **layout** - predefined layout to use, see below. **This setting is not exported to the template engine.** + + +## Using existing Templates and Layouts + +To perform template expansion, Dottydoc looks at the `layout` field in the front-matter. +Here's a simple example of the templating system in action, `index.html`: + +```html +--- +layout: main +--- + +

Hello world!

+``` + +With a simple main template like this: + +{% raw %} +```html + + + Hello, world! + + + {{ content }} + + +``` + +Would result in `{{ content }}` being replaced by `

Hello world!

` from +the `index.html` file. +{% endraw %} + +Layouts must be placed in a `_layouts` directory in the site root: + +``` +├── _layouts +│ └── main.html +├── docs +│ └── getting-started.md +└── index.html +``` + +## Sidebar + +Scaladoc by default uses layout of files in `docs` directory to create table of content. There is also ability to override it by providing a `sidebar.yml` file in the site root: + +```yaml +sidebar: + - title: Blog + - title: My title + page: my-page1.md + - page: my-page2.md + - page: my-page3/subsection + - title: Reference + subsection: + - page: my-page3.md + - index: my-page4/index.md + subsection: + - page: my-page4/my-page4.md + - title: My subsection + index: my-page5/index.md + subsection: + - page: my-page5/my-page5.md + - index: my-page6/index.md + subsection: + - index: my-page6/my-page6/index.md + subsection: + - page: my-page6/my-page6/my-page6.md +``` + +The `sidebar` key is mandatory. +On each level, you can have three different types of entries: `page`, `blog` or `subsection`. + +`page` is a leaf of the structure and accepts the following attributes: +- `title` (optional) - title of the page +- `page` (mandatory) - path to the file that will represent the page, it can be either html or markdown file to be rendered, there is also the possibility to pass the `directory` path. If so, the scaladoc will render the directory and all its content as if there were no `sidebar.yml` basing on its tree structure and index files. + +The `page` property `subsection` accepts nested nodes, these can be either pages or subsections, which allow you to create tree-like navigation. The attributes are: +- `title` (optional) - title of the page +- `index` (optional) - path to the file that will represent the index file of the subsection, it can be either html or markdown file to be rendered +- `subsection` (mandatory) - nested nodes, can be either pages or subsections + +In `subsection`s, you can omit `title` or `index`, however not specifying any of these properties prevents you from specifying the title of the section. + +`blog` is a special node represented by simple entry `- title: Blog` with no other attributes. All your blog posts will be automatically linked under this section. You can read more about the blog [here](./blog.md). + +``` +├── blog +│ ├── _posts +│ │ └── 2016-12-05-implicit-function-types.md +│ └── index.html +├── index.html +└── sidebar.yml +``` + +## Hierarchy of title + +If the title is specified multiple times, the priority is as follows (from highest to lowest priority): + +#### Page + +1. `title` from the `front-matter` of the markdown/html file +2. `title` property from the `sidebar.yml` property +3. filename + +#### Subsection + +1. `title` from the `front-matter` of the markdown/html index file +2. `title` property from the `sidebar.yml` property +3. filename + +Note that if you skip the `index` file in your tree structure or you don't specify the `title` in the frontmatter, there will be given a generic name `index`. The same applies when using `sidebar.yml` but not specifying `title` nor `index`, just a subsection. Again, a generic `index` name will appear. + + +## Static resources + +You can attach static resources (pdf, images) to your documentation by using two dedicated directories: +`resources` and `images`. After placing your assets under any of these directories, you can reference them in markdown +as if they were relatively at the same level. + +For example, consider the following situation: + +``` +├── blog +│ ├── _posts +│ │ └── 2016-12-05-implicit-function-types.md +│ └── index.html +├── index.html +├── resources +│ └── my_file.pdf +├── images +│ └── my_image.png +└── sidebar.yml + +``` + +You can refer to the assets from within any of the files using markdown links: + +``` +This is my blog post. Here is the image ![](my_image.png) and here is my [pdf](my_file.pdf)``` diff --git a/docs/docs/usage/version-numbers.md b/docs/_docs/usage/version-numbers.md similarity index 100% rename from docs/docs/usage/version-numbers.md rename to docs/_docs/usage/version-numbers.md diff --git a/docs/docs/usage/worksheet-mode-implementation-details.md b/docs/_docs/usage/worksheet-mode-implementation-details.md similarity index 99% rename from docs/docs/usage/worksheet-mode-implementation-details.md rename to docs/_docs/usage/worksheet-mode-implementation-details.md index d06939d810ae..17c1cc3a2430 100644 --- a/docs/docs/usage/worksheet-mode-implementation-details.md +++ b/docs/_docs/usage/worksheet-mode-implementation-details.md @@ -1,8 +1,13 @@ --- layout: doc-page title: "Worksheet Mode - Implementation details" + + + + --- + In brief, the worksheets extend the Language Server Protocol and rely on the Dotty REPL to evaluate code. diff --git a/docs/_layouts/blog-page.html b/docs/_layouts/blog-page.html index 6baad3d40ed8..c5d0fe8875e7 100644 --- a/docs/_layouts/blog-page.html +++ b/docs/_layouts/blog-page.html @@ -1,7 +1,7 @@ --- -layout: main +layout: static-site-main --- -
+

{{ page.title }}

\u000E))(.*)""") + new Regex("""(.*?)((?:\}\}\})|(?:```)|(?:\u000E\u000E))(.*)""") } diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/Comments.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/Comments.scala index c60187c1b8b9..261de910d4f6 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/Comments.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/Comments.scala @@ -4,16 +4,18 @@ package tasty.comments import scala.collection.immutable.SortedMap import scala.util.Try -import com.vladsch.flexmark.util.{ast => mdu} +import com.vladsch.flexmark.util.{ast => mdu, sequence} import com.vladsch.flexmark.{ast => mda} import com.vladsch.flexmark.formatter.Formatter import com.vladsch.flexmark.util.options.MutableDataSet import scala.quoted._ +import dotty.tools.scaladoc.tasty.comments.markdown.ExtendedFencedCodeBlock import dotty.tools.scaladoc.tasty.comments.wiki.Paragraph import dotty.tools.scaladoc.DocPart -import dotty.tools.scaladoc.tasty.SymOps +import dotty.tools.scaladoc.tasty.{ SymOpsWithLinkCache, SymOps } import collection.JavaConverters._ +import dotty.tools.scaladoc.snippets._ class Repr(val qctx: Quotes)(val sym: qctx.reflect.Symbol) @@ -65,32 +67,41 @@ case class PreparsedComment( hideImplicitConversions: List[String], shortDescription: List[String], syntax: List[String], + strippedLinesBeforeNo: Int, ) case class DokkaCommentBody(summary: Option[DocPart], body: DocPart) -abstract class MarkupConversion[T](val repr: Repr)(using DocContext) { +abstract class MarkupConversion[T](val repr: Repr)(using dctx: DocContext) { protected def stringToMarkup(str: String): T protected def markupToDokka(t: T): DocPart protected def markupToString(t: T): String protected def markupToDokkaCommentBody(t: T): DokkaCommentBody protected def filterEmpty(xs: List[String]): List[T] protected def filterEmpty(xs: SortedMap[String, String]): SortedMap[String, T] + protected def processSnippets(t: T, preparsed: PreparsedComment): T + + lazy val snippetChecker = dctx.snippetChecker val qctx: repr.qctx.type = if repr == null then null else repr.qctx // TODO why we do need null? val owner: qctx.reflect.Symbol = if repr == null then null.asInstanceOf[qctx.reflect.Symbol] else repr.sym + private given qctx.type = qctx + + lazy val srcPos = if owner == qctx.reflect.defn.RootClass then { + val sourceFile = dctx.args.rootDocPath.map(p => dotty.tools.dotc.util.SourceFile(dotty.tools.io.AbstractFile.getFile(p), scala.io.Codec.UTF8)) + sourceFile.fold(dotty.tools.dotc.util.NoSourcePosition)(sf => dotty.tools.dotc.util.SourcePosition(sf, dotty.tools.dotc.util.Spans.NoSpan)) + } else owner.pos.get.asInstanceOf[dotty.tools.dotc.util.SrcPos] - object SymOps extends SymOps[qctx.type](qctx) - export SymOps.dri - export SymOps.driInContextOfInheritingParent + object SymOpsWithLinkCache extends SymOpsWithLinkCache + export SymOpsWithLinkCache._ + import SymOps._ def resolveLink(queryStr: String): DocLink = if SchemeUri.matches(queryStr) then DocLink.ToURL(queryStr) else QueryParser(queryStr).tryReadQuery() match case Left(err) => - // TODO convert owner.pos to get to the comment, add stack trace - report.warning(s"Unable to parse query `$queryStr`: ${err.getMessage}") + report.warning(s"Unable to parse query `$queryStr`: ${err.getMessage}", srcPos) val msg = s"Unable to parse query: ${err.getMessage}" DocLink.UnresolvedDRI(queryStr, msg) case Right(query) => @@ -103,12 +114,11 @@ abstract class MarkupConversion[T](val repr: Repr)(using DocContext) { case None => val txt = s"No DRI found for query" val msg = s"$txt: $queryStr" - // TODO change to the commented-out version when we'll get rid of the warnings in stdlib - // report.warning( - // msg, - // owner.pos.get.asInstanceOf[dotty.tools.dotc.util.SrcPos], - // ) - report.inform(msg) + + if (!summon[DocContext].args.noLinkWarnings) then + + report.warning(msg, srcPos) + DocLink.UnresolvedDRI(queryStr, txt) private val SchemeUri = """[a-z]+:.*""".r @@ -120,8 +130,24 @@ abstract class MarkupConversion[T](val repr: Repr)(using DocContext) { case _ => None } + def snippetCheckingFunc: qctx.reflect.Symbol => SnippetChecker.SnippetCheckingFunc = + (s: qctx.reflect.Symbol) => { + val path = s.source.map(_.path) + val pathBasedArg = dctx.snippetCompilerArgs.get(path) + val scDataCollector = SnippetCompilerDataCollector[qctx.type](qctx) + val data = scDataCollector.getSnippetCompilerData(s, s) + val sourceFile = scDataCollector.getSourceFile(s) + (str: String, lineOffset: SnippetChecker.LineOffset, argOverride: Option[SCFlags]) => { + val arg = argOverride.fold(pathBasedArg)(pathBasedArg.overrideFlag(_)) + val res = snippetChecker.checkSnippet(str, Some(data), arg, lineOffset, sourceFile) + res.filter(r => !r.isSuccessful).foreach(_.reportMessages()(using compilerContext)) + res + } + } + final def parse(preparsed: PreparsedComment): Comment = - val body = markupToDokkaCommentBody(stringToMarkup(preparsed.body)) + val markup = stringToMarkup(preparsed.body) + val body = markupToDokkaCommentBody(processSnippets(markup, preparsed)) Comment( body = body.body, short = body.summary, @@ -146,7 +172,7 @@ abstract class MarkupConversion[T](val repr: Repr)(using DocContext) { ) } -class MarkdownCommentParser(repr: Repr)(using DocContext) +class MarkdownCommentParser(repr: Repr)(using dctx: DocContext) extends MarkupConversion[mdu.Node](repr) { def stringToMarkup(str: String) = @@ -172,6 +198,9 @@ class MarkdownCommentParser(repr: Repr)(using DocContext) xs.view.mapValues(_.trim) .filterNot { case (_, v) => v.isEmpty } .mapValues(stringToMarkup).to(SortedMap) + + def processSnippets(root: mdu.Node, preparsed: PreparsedComment): mdu.Node = + FlexmarkSnippetProcessor.processSnippets(root, Some(preparsed), snippetCheckingFunc(owner), withContext = true) } class WikiCommentParser(repr: Repr)(using DocContext) @@ -187,7 +216,7 @@ class WikiCommentParser(repr: Repr)(using DocContext) private def flatten(b: wiki.Inline): String = b match case wiki.Text(t) => t case wiki.Italic(t) => flatten(t) - case wiki.Bold(t) =>flatten(t) + case wiki.Bold(t) => flatten(t) case wiki.Underline(t) => flatten(t) case wiki.Superscript(t) => flatten(t) case wiki.Subscript(t) => flatten(t) @@ -206,6 +235,7 @@ class WikiCommentParser(repr: Repr)(using DocContext) case wiki.OrderedList(elems, _) => elems.headOption.fold("")(flatten) case wiki.DefinitionList(items) => items.headOption.fold("")(e => flatten(e._1)) case wiki.HorizontalRule => "" + case wiki.Table(header, columns, rows) => (header +: rows).flatMap(_.cells).flatMap(_.blocks).map(flatten).mkString def markupToString(str: wiki.Body) = str.blocks.headOption.fold("")(flatten) @@ -225,3 +255,7 @@ class WikiCommentParser(repr: Repr)(using DocContext) def filterEmpty(xs: SortedMap[String,String]) = xs.view.mapValues(stringToMarkup).to(SortedMap) .filterNot { case (_, v) => v.blocks.isEmpty } + + def processSnippets(root: wiki.Body, preparsed: PreparsedComment): wiki.Body = + // Currently not supported + root diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/MemberLookup.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/MemberLookup.scala index 93aafa7cb80a..b2c4e1bdcac4 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/MemberLookup.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/MemberLookup.scala @@ -1,28 +1,29 @@ package dotty.tools.scaladoc -package tasty.comments +package tasty +package comments import scala.quoted._ trait MemberLookup { def memberLookupResult(using Quotes)( - symbol: quotes.reflect.Symbol, + symbol: reflect.Symbol, label: String, - inheritingParent: Option[quotes.reflect.Symbol] = None - ): (quotes.reflect.Symbol, String, Option[quotes.reflect.Symbol]) = + inheritingParent: Option[reflect.Symbol] = None + ): (reflect.Symbol, String, Option[reflect.Symbol]) = (symbol, label, inheritingParent) def lookup(using Quotes, DocContext)( query: Query, - owner: quotes.reflect.Symbol, - ): Option[(quotes.reflect.Symbol, String, Option[quotes.reflect.Symbol])] = lookupOpt(query, Some(owner)) + owner: reflect.Symbol, + ): Option[(reflect.Symbol, String, Option[reflect.Symbol])] = lookupOpt(query, Some(owner)) def lookupOpt(using Quotes, DocContext)( query: Query, - ownerOpt: Option[quotes.reflect.Symbol], - ): Option[(quotes.reflect.Symbol, String, Option[quotes.reflect.Symbol])] = + ownerOpt: Option[reflect.Symbol], + ): Option[(reflect.Symbol, String, Option[reflect.Symbol])] = try - import quotes.reflect._ + import reflect._ def nearestClass(sym: Symbol): Symbol = if sym.isClassDef then sym else nearestClass(sym.owner) @@ -85,13 +86,13 @@ trait MemberLookup { res catch case e: Exception => - // TODO (https://github.com/lampepfl/scala3doc/issues/238): proper reporting - val msg = s"Unable to find a link for ${query} ${ownerOpt.fold("")(o => "in " + o.name)}" - report.warn(msg, e) + if (!summon[DocContext].args.noLinkWarnings) then + val msg = s"Unable to find a link for ${query} ${ownerOpt.fold("")(o => "in " + o.name)}" + report.warn(msg, e) None - private def hackMembersOf(using Quotes)(rsym: quotes.reflect.Symbol) = { - import quotes.reflect._ + private def hackMembersOf(using Quotes)(rsym: reflect.Symbol) = { + import reflect._ import dotty.tools.dotc given dotc.core.Contexts.Context = quotes.asInstanceOf[scala.quoted.runtime.impl.QuotesImpl].ctx val sym = rsym.asInstanceOf[dotc.core.Symbols.Symbol] @@ -103,19 +104,21 @@ trait MemberLookup { members.asInstanceOf[Iterator[Symbol]] } - private def hackIsNotAbsent(using Quotes)(rsym: quotes.reflect.Symbol) = { + private def hackIsNotAbsent(using Quotes)(rsym: reflect.Symbol) = import dotty.tools.dotc given dotc.core.Contexts.Context = quotes.asInstanceOf[scala.quoted.runtime.impl.QuotesImpl].ctx val sym = rsym.asInstanceOf[dotc.core.Symbols.Symbol] // note: Predef has .info = NoType for some reason - sym.isCompleted && sym.info.exists - } + val iorc = sym.infoOrCompleter + iorc match + case _: dotc.core.SymDenotations.ModuleCompleter | dotc.core.SymDenotations.NoCompleter | dotc.core.Types.NoType | _: dotc.core.NoLoader => false + case _ => true private def localLookup(using Quotes)( sel: MemberLookup.Selector, - owner: quotes.reflect.Symbol - ): Iterator[quotes.reflect.Symbol] = { - import quotes.reflect._ + owner: reflect.Symbol + ): Iterator[reflect.Symbol] = { + import reflect._ def findMatch(syms: Iterator[Symbol]): Iterator[Symbol] = { def matches(s: Symbol): Boolean = @@ -148,10 +151,7 @@ trait MemberLookup { } if owner.isPackageDef then - findMatch(hackMembersOf(owner).flatMap { - s => - (if s.name.endsWith("package$") then hackMembersOf(s) else Iterator.empty) ++ Iterator(s) - }) + findMatch(hackMembersOf(owner)) else owner.tree match { case tree: TypeDef => @@ -171,9 +171,9 @@ trait MemberLookup { } private def downwardLookup(using Quotes)( - query: List[String], owner: quotes.reflect.Symbol - ): Option[(quotes.reflect.Symbol, Option[quotes.reflect.Symbol])] = { - import quotes.reflect._ + query: List[String], owner: reflect.Symbol + ): Option[(reflect.Symbol, Option[reflect.Symbol])] = { + import reflect._ query match { case Nil => None case q :: Nil => @@ -191,9 +191,9 @@ trait MemberLookup { res match { case None => None case Some(sym) => - val externalOwner: Option[quotes.reflect.Symbol] = + val externalOwner: Option[reflect.Symbol] = if owner eq sym.owner then None - else if owner.flags.is(Flags.Module) then Some(owner.moduleClass) + else if owner.flags.is(Flags.Module) && !owner.flags.is(Flags.Package) then Some(owner.moduleClass) else if owner.isClassDef then Some(owner) else None Some(sym -> externalOwner) diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/Preparser.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/Preparser.scala index f62b9983584a..89c538d8d32d 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/Preparser.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/Preparser.scala @@ -32,14 +32,16 @@ object Preparser { tags: Map[TagKey, List[String]], lastTagKey: Option[TagKey], remaining: List[String], - inCodeBlock: Boolean - ): PreparsedComment = remaining match { + inCodeBlock: Boolean, + )(using strippedLinesBeforeNo: Int = 0): PreparsedComment = remaining match { case CodeBlockStartRegex(before, marker, after) :: ls if !inCodeBlock => - if (!before.trim.isEmpty && !after.trim.isEmpty) + if (!before.trim.isEmpty && !after.trim.isEmpty && marker == "```") + go(docBody, tags, lastTagKey, before :: (marker + after) :: ls, inCodeBlock = false) + else if (!before.trim.isEmpty && !after.trim.isEmpty) go(docBody, tags, lastTagKey, before :: marker :: after :: ls, inCodeBlock = false) else if (!before.trim.isEmpty) go(docBody, tags, lastTagKey, before :: marker :: ls, inCodeBlock = false) - else if (!after.trim.isEmpty) + else if (!after.trim.isEmpty && marker != "```") go(docBody, tags, lastTagKey, marker :: after :: ls, inCodeBlock = true) else lastTagKey match { case Some(key) => @@ -50,7 +52,7 @@ object Preparser { } go(docBody, tags + (key -> value), lastTagKey, ls, inCodeBlock = true) case None => - go(docBody append endOfLine append marker, tags, lastTagKey, ls, inCodeBlock = true) + go(docBody append endOfLine append (marker + after), tags, lastTagKey, ls, inCodeBlock = true) } case CodeBlockEndRegex(before, marker, after) :: ls => @@ -106,7 +108,7 @@ object Preparser { case line :: ls => if docBody.length > 0 then docBody.append(endOfLine) docBody.append(line) - go(docBody, tags, lastTagKey, ls, inCodeBlock) + go(docBody, tags, lastTagKey, ls, inCodeBlock)(using strippedLinesBeforeNo + (if line.isEmpty && docBody.length == 0 then 1 else 0)) case Nil => @@ -138,7 +140,7 @@ object Preparser { bodyTags.keys.toSeq flatMap { case stk: SymbolTagKey if (stk.name == key.name) => Some(stk) case stk: SimpleTagKey if (stk.name == key.name) => - // dottydoc.println(s"$span: tag '@${stk.name}' must be followed by a symbol name") + // scaladoc.println(s"$span: tag '@${stk.name}' must be followed by a symbol name") None case _ => None } @@ -146,7 +148,7 @@ object Preparser { for (key <- keys) yield { val bs = (bodyTags remove key).get // if (bs.length > 1) - // dottydoc.println(s"$span: only one '@${key.name}' tag for symbol ${key.symbol} is allowed") + // scaladoc.println(s"$span: only one '@${key.name}' tag for symbol ${key.symbol} is allowed") (key.symbol, bs.head) } SortedMap.empty[String, String] ++ pairs @@ -175,6 +177,7 @@ object Preparser { hideImplicitConversions = allTags(SimpleTagKey("hideImplicitConversion")), shortDescription = allTags(SimpleTagKey("shortDescription")), syntax = allTags(SimpleTagKey("syntax")), + strippedLinesBeforeNo = strippedLinesBeforeNo ) cmt diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/DocFlexmarkExtension.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/DocFlexmarkExtension.scala index 9cd0e0653898..b849da014780 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/DocFlexmarkExtension.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/DocFlexmarkExtension.scala @@ -10,13 +10,23 @@ import com.vladsch.flexmark.ext.wikilink.internal.WikiLinkLinkRefProcessor import com.vladsch.flexmark.util.ast._ import com.vladsch.flexmark.util.options._ import com.vladsch.flexmark.util.sequence.BasedSequence +import com.vladsch.flexmark._ +import dotty.tools.scaladoc.snippets._ +import scala.collection.JavaConverters._ class DocLinkNode( val target: DocLink, val body: String, seq: BasedSequence - ) extends WikiNode(seq, false, false, false, false) +) extends WikiNode(seq, false, false, false, false) + +case class ExtendedFencedCodeBlock( + name: Option[String], + codeBlock: ast.FencedCodeBlock, + compilationResult: Option[SnippetCompilationResult], + hasContext: Boolean +) extends BlankLine(codeBlock.getContentChars()) class DocFlexmarkParser(resolveLink: String => DocLink) extends Parser.ParserExtension: @@ -55,7 +65,9 @@ case class DocFlexmarkRenderer(renderLink: (DocLink, String) => String) object Render extends NodeRenderer: override def getNodeRenderingHandlers: JSet[NodeRenderingHandler[_]] = - JSet(new NodeRenderingHandler(classOf[DocLinkNode], Handler)) + JSet( + new NodeRenderingHandler(classOf[DocLinkNode], Handler), + ) object Factory extends NodeRendererFactory: override def create(options: DataHolder): NodeRenderer = Render @@ -65,5 +77,10 @@ case class DocFlexmarkRenderer(renderLink: (DocLink, String) => String) object DocFlexmarkRenderer: def render(node: Node)(renderLink: (DocLink, String) => String) = - val opts = MarkdownParser.mkMarkdownOptions(Seq(DocFlexmarkRenderer(renderLink))) + val opts = MarkdownParser.mkMarkdownOptions( + Seq( + DocFlexmarkRenderer(renderLink), + SnippetRenderingExtension + ) + ) HtmlRenderer.builder(opts).build().render(node) \ No newline at end of file diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/SnippetRenderer.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/SnippetRenderer.scala new file mode 100644 index 000000000000..9c95ae06d9ee --- /dev/null +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/SnippetRenderer.scala @@ -0,0 +1,148 @@ +package dotty.tools.scaladoc +package tasty.comments.markdown + +import com.vladsch.flexmark.html._ +import util.HTML._ + +import dotty.tools.scaladoc.snippets._ +import dotty.tools.scaladoc.util.HTML._ + +case class SnippetLine(content: String, lineNo: Int, classes: Set[String] = Set.empty, messages: Seq[String] = Seq.empty, attributes: Map[String, String] = Map.empty): + def withClass(cls: String) = this.copy(classes = classes + cls) + def withAttribute(name: String, value: String) = this.copy(attributes = attributes.updated(name, value)) + private def attributesToString: String = attributes.updated("id", lineNo).map((key, value) => s"""$key="$value"""").mkString(" ") + def toHTML = + val label = if messages.nonEmpty then s"""label="${messages.map(_.escapeReservedTokens).mkString("\n")}"""" else "" + s"""$content""" + +object SnippetRenderer: + val hiddenStartSymbol = "//{" + val hiddenEndSymbol = "//}" + + val importedStartSymbol = "//{i" + val importedEndSymbol = "//i}" + val importedRegex = """\/\/\{i:(.*)""".r + + private def compileMessageCSSClass(msg: SnippetCompilerMessage) = msg.level match + case MessageLevel.Info => "snippet-info" + case MessageLevel.Warning => "snippet-warn" + case MessageLevel.Error => "snippet-error" + case MessageLevel.Debug => "snippet-debug" + + private def cutBetweenSymbols[A]( + startSymbol: String, + endSymbol: String, + snippetLines: Seq[SnippetLine] + )( + f: (Seq[SnippetLine], Seq[SnippetLine], Seq[SnippetLine]) => A + ): Option[A] = + for { + startIdx <- snippetLines.zipWithIndex.find(_._1.content.contains(startSymbol)).map(_._2) + endIdx <- snippetLines.zipWithIndex.find(_._1.content.contains(endSymbol)).map(_._2) + (tmp, end) = snippetLines.splitAt(endIdx+1) + (begin, mid) = tmp.splitAt(startIdx) + } yield f(begin, mid, end) + + private def wrapImportedSection(snippetLines: Seq[SnippetLine]): Seq[SnippetLine] = + val mRes = cutBetweenSymbols(importedStartSymbol, importedEndSymbol, snippetLines) { + case (begin, mid, end) => + val name = importedRegex.findFirstMatchIn(mid.head.content).fold("")(_.group(1)) + begin ++ mid.drop(1).dropRight(1).map(_.withClass("hideable").withClass("include").withAttribute("name", name)) ++ wrapImportedSection(end) + } + mRes.getOrElse(snippetLines) + + private def wrapHiddenSymbols(snippetLines: Seq[SnippetLine]): Seq[SnippetLine] = + val mRes = cutBetweenSymbols(hiddenStartSymbol, hiddenEndSymbol, snippetLines) { + case (begin, mid, end) => + begin ++ mid.drop(1).dropRight(1).map(_.withClass("hideable")) ++ wrapHiddenSymbols(end) + } + mRes.getOrElse(snippetLines) + + private def wrapCommonIndent(snippetLines: Seq[SnippetLine]): Seq[SnippetLine] = + val nonHiddenSnippetLines = snippetLines.filter(l => !l.classes.contains("hideable")) + nonHiddenSnippetLines.headOption.map(_.content.takeWhile(_ == ' ')).map { prefix => + val maxCommonIndent = nonHiddenSnippetLines.foldLeft(prefix) { (currPrefix, elem) => + if elem.content.startsWith(currPrefix) then currPrefix else elem.content.takeWhile(_ == ' ') + } + snippetLines.map { line => + if line.classes.contains("hideable") || maxCommonIndent.size == 0 then line + else line.copy(content = span(cls := "hideable")(maxCommonIndent).toString + line.content.stripPrefix(maxCommonIndent)) + } + }.getOrElse(snippetLines) + + private def wrapLineInBetween(startSymbol: Option[String], endSymbol: Option[String], line: SnippetLine): SnippetLine = + val startIdx = startSymbol.map(s => line.content.indexOf(s)) + val endIdx = endSymbol.map(s => line.content.indexOf(s)) + (startIdx, endIdx) match + case (Some(idx), None) => + val (code, comment) = line.content.splitAt(idx) + comment match + case _ if code.forall(_.isWhitespace) => + line.withClass("hideable") + case _ if comment.last == '\n' => + line.copy(content = code + s"""${comment.dropRight(1)}${"\n"}""") + case _ => + line.copy(content = code + s"""$comment""") + case (None, Some(idx)) => + val (comment, code) = line.content.splitAt(idx+endSymbol.get.size) + comment match + case _ if code.forall(_.isWhitespace) => + line.withClass("hideable") + case _ => + line.copy(content = s"""$comment""" + code) + case (Some(startIdx), Some(endIdx)) => + val (tmp, end) = line.content.splitAt(endIdx+endSymbol.get.size) + val (begin, comment) = tmp.splitAt(startIdx) + line.copy(content = begin + s"""$comment""" + end) + case _ => line + + private def wrapCodeLines(codeLines: Seq[String]): Seq[SnippetLine] = + val snippetLines = codeLines.zipWithIndex.map { + case (content, idx) => SnippetLine(content.escapeReservedTokens, idx) + } + wrapImportedSection + .andThen(wrapHiddenSymbols) + .andThen(wrapCommonIndent) + .apply(snippetLines) + + private def addCompileMessages(messages: Seq[SnippetCompilerMessage])(codeLines: Seq[SnippetLine]): Seq[SnippetLine] = + val messagesDict = messages.filter(_.position.nonEmpty).groupBy(_.position.get.relativeLine).toMap[Int, Seq[SnippetCompilerMessage]] + codeLines.map { line => + messagesDict.get(line.lineNo) match + case None => line + case Some(messages) => + val classes = List( + messages.find(_.level == MessageLevel.Error).map(compileMessageCSSClass), + messages.find(_.level == MessageLevel.Warning).map(compileMessageCSSClass), + messages.find(_.level == MessageLevel.Info).map(compileMessageCSSClass) + ).flatten + line.copy(classes = line.classes ++ classes.toSet ++ Set("tooltip"), messages = messages.map(_.message)) + } + + private def messagesHTML(messages: Seq[SnippetCompilerMessage]): String = + if messages.isEmpty + then "" + else + val content = messages + .map { msg => + s"""${msg.message}""" + } + .mkString("
") + s"""
$content""" + + private def snippetLabel(name: String): String = div(cls := "snippet-meta")( + div(cls := "snippet-label")(name) + ).toString + + def renderSnippetWithMessages(snippetName: Option[String], codeLines: Seq[String], messages: Seq[SnippetCompilerMessage], hasContext: Boolean): String = + val transformedLines = wrapCodeLines.andThen(addCompileMessages(messages)).apply(codeLines).map(_.toHTML) + val codeHTML = s"""${transformedLines.mkString("")}""" + s"""
$codeHTML
${snippetName.fold("")(snippetLabel(_))}
""" + + def renderSnippetWithMessages(node: ExtendedFencedCodeBlock): String = + renderSnippetWithMessages( + node.name, + node.codeBlock.getContentChars.toString.split("\n").map(_ + "\n").toSeq, + node.compilationResult.toSeq.flatMap(_.messages), + node.hasContext + ) diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/SnippetRenderingExtension.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/SnippetRenderingExtension.scala new file mode 100644 index 000000000000..74d4f461dc7b --- /dev/null +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/markdown/SnippetRenderingExtension.scala @@ -0,0 +1,47 @@ +package dotty.tools.scaladoc +package tasty.comments.markdown + +import dotty.tools.scaladoc.snippets._ + +import com.vladsch.flexmark.html._ +import com.vladsch.flexmark.html.renderer._ +import com.vladsch.flexmark.parser._ +import com.vladsch.flexmark.ext.wikilink._ +import com.vladsch.flexmark.ext.wikilink.internal.WikiLinkLinkRefProcessor +import com.vladsch.flexmark.util.ast._ +import com.vladsch.flexmark.util.options._ +import com.vladsch.flexmark.util.sequence.BasedSequence +import com.vladsch.flexmark._ +import com.vladsch.flexmark.ast.FencedCodeBlock + +/** + * SnippetRenderingExtension is responsible for running an analysis for scala codeblocks in the static documentation/scaladoc comments. + * For each codeblock we run compiler to check whether snippet works in the newest scala version and to produce rich html codeblocks with + * compiler warnings/errors for IDE-like live experience. + */ +object SnippetRenderingExtension extends HtmlRenderer.HtmlRendererExtension: + def rendererOptions(opt: MutableDataHolder): Unit = () + object ExtendedFencedCodeBlockHandler extends CustomNodeRenderer[ExtendedFencedCodeBlock]: + override def render(node: ExtendedFencedCodeBlock, c: NodeRendererContext, html: HtmlWriter): Unit = + html.raw( + SnippetRenderer.renderSnippetWithMessages(node) + ) + + object FencedCodeBlockHandler extends CustomNodeRenderer[FencedCodeBlock]: + override def render(node: FencedCodeBlock, c: NodeRendererContext, html: HtmlWriter): Unit = + html.raw("""
""") + c.delegateRender() + html.raw("""
""") + + object Render extends NodeRenderer: + override def getNodeRenderingHandlers: JSet[NodeRenderingHandler[_]] = + JSet( + new NodeRenderingHandler(classOf[ExtendedFencedCodeBlock], ExtendedFencedCodeBlockHandler), + new NodeRenderingHandler(classOf[FencedCodeBlock], FencedCodeBlockHandler) + ) + + object Factory extends NodeRendererFactory: + override def create(options: DataHolder): NodeRenderer = Render + + def extend(htmlRendererBuilder: HtmlRenderer.Builder, tpe: String): Unit = + htmlRendererBuilder.nodeRendererFactory(Factory) diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/wiki/Entities.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/wiki/Entities.scala index 5cec60977813..f11e8095afe7 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/wiki/Entities.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/wiki/Entities.scala @@ -50,6 +50,15 @@ final case class UnorderedList(items: Seq[Block]) extends Block final case class OrderedList(items: Seq[Block], style: String) extends Block final case class DefinitionList(items: SortedMap[Inline, Block]) extends Block object HorizontalRule extends Block +final case class Table(header: Row, columnOptions: Seq[ColumnOption], rows: Seq[Row]) extends Block +final case class ColumnOption(option: 'L' | 'C' | 'R') +object ColumnOption { + val ColumnOptionLeft = ColumnOption('L') + val ColumnOptionCenter = ColumnOption('C') + val ColumnOptionRight = ColumnOption('R') +} +final case class Row(cells: Seq[Cell]) +final case class Cell(blocks: Seq[Block]) /** An section of text inside a block, possibly with formatting. */ sealed abstract class Inline extends WikiDocElement: diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/wiki/Parser.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/wiki/Parser.scala index 2436ca4e5239..a844f3384793 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/comments/wiki/Parser.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/comments/wiki/Parser.scala @@ -2,6 +2,7 @@ package dotty.tools.scaladoc package tasty.comments.wiki import scala.collection.mutable +import scala.annotation.tailrec import dotty.tools.scaladoc.tasty.comments.Regexes._ @@ -35,6 +36,8 @@ final class Parser( hrule() else if (checkList) listBlock() + else if (checkTableRow) + table() else { para() } @@ -127,6 +130,282 @@ final class Parser( HorizontalRule } + + /** Starts and end with a cell separator matching the minimal row || and all other possible rows */ + private val TableRow = """^\|.*\|$""".r + private val TableCellStart = "|" + + /* Checks for a well-formed table row */ + private def checkTableRow = { + check(TableCellStart) && { + val newlineIdx = buffer.indexOf('\n', offset) + newlineIdx != -1 && + TableRow.findFirstIn(buffer.substring(offset, newlineIdx)).isDefined + } + } + + /** {{{ + * table ::= headerRow '\n' delimiterRow '\n' dataRows '\n' + * content ::= inline-content + * row ::= '|' { content '|' }+ + * headerRow ::= row + * dataRows ::= row* + * align ::= ':' '-'+ | '-'+ | '-'+ ':' | ':' '-'+ ':' + * delimiterRow :: = '|' { align '|' }+ + * }}} + */ + def table(): Block = { + /* Helpers */ + + def peek(tag: String): Unit = { + val peek: String = buffer.substring(offset) + val limit = 60 + val limitedPeek = peek.substring(0, limit min peek.length) + println(s"peek: $tag: '$limitedPeek'") + } + + /* Accumulated state */ + + var header: Option[Row] = None + + val rows = mutable.ListBuffer.empty[Row] + + val cells = mutable.ListBuffer.empty[Cell] + + def finalizeCells(): Unit = { + if (cells.nonEmpty) { + rows += Row(cells.toList) + } + cells.clear() + } + + def finalizeHeaderCells(): Unit = { + if (cells.nonEmpty) { + if (header.isDefined) { + reportError("more than one table header") + } else { + header = Some(Row(cells.toList)) + } + } + cells.clear() + } + + val escapeChar = "\\" + + /* Poor man's negative lookbehind */ + def checkInlineEnd = + (check(TableCellStart) && !check(escapeChar, -1)) || check("\n") + + def decodeEscapedCellMark(text: String) = text.replace(escapeChar + TableCellStart, TableCellStart) + + def isEndOfText = char == endOfText + + //def isNewline = char == endOfLine + + //def skipNewline() = jump(endOfLine) + + def isStartMarkNewline = check(TableCellStart + endOfLine) + + def skipStartMarkNewline() = jump(TableCellStart + endOfLine) + + def isStartMark = check(TableCellStart) + + def skipStartMark() = jump(TableCellStart) + + def contentNonEmpty(content: Inline) = content != Text("") + + /* Parse cells of a table. + * @param cellStartMark The char indicating the start or end of a cell + * @param finalizeRow Function to invoke when the row has been fully parsed + */ + def parseCells(cellStartMark: String, finalizeRow: () => Unit): Unit = { + def jumpCellStartMark() = { + if (!jump(cellStartMark)) { + peek(s"Expected $cellStartMark") + sys.error(s"Precondition violated: Expected $cellStartMark.") + } + } + + val startPos = offset + + jumpCellStartMark() + + val content = Paragraph(getInline(isInlineEnd = checkInlineEnd, textTransform = decodeEscapedCellMark)) + + parseCells0(content :: Nil, finalizeRow, startPos, offset) + } + + // Continue parsing a table row. + // + // After reading inline content the following conditions will be encountered, + // + // Case : Next Chars + // .................. + // 1 : end-of-text + // 2 : '|' '\n' + // 3 : '|' + // 4 : '\n' + // + // Case 1. + // State : End of text + // Action: Store the current contents, close the row, report warning, stop parsing. + // + // Case 2. + // State : The cell separator followed by a newline + // Action: Store the current contents, skip the cell separator and newline, close the row, stop parsing. + // + // Case 3. + // State : The cell separator not followed by a newline + // Action: Store the current contents, skip the cell separator, continue parsing the row. + // + @tailrec def parseCells0( + contents: List[Block], + finalizeRow: () => Unit, + progressPreParse: Int, + progressPostParse: Int + ): Unit = { + + def storeContents() = cells += Cell(contents.reverse) + + val startPos = offset + + // The ordering of the checks ensures the state checks are correct. + if (progressPreParse == progressPostParse) { + peek("no-progress-table-row-parsing") + sys.error("No progress while parsing table row") + } else if (isEndOfText) { + // peek("1: end-of-text") + // Case 1 + storeContents() + finalizeRow() + reportError("unclosed table row") + } else if (isStartMarkNewline) { + // peek("2: start-mark-new-line/before") + // Case 2 + storeContents() + finalizeRow() + skipStartMarkNewline() + // peek("2: start-mark-new-line/after") + } else if (isStartMark) { + // peek("3: start-mark") + // Case 3 + storeContents() + skipStartMark() + val content = getInline(isInlineEnd = checkInlineEnd, textTransform = decodeEscapedCellMark) + // TrailingCellsEmpty produces empty content + val accContents = if (contentNonEmpty(content)) Paragraph(content) :: Nil else Nil + parseCells0(accContents, finalizeRow, startPos, offset) + } else { + // Case π√ⅈ + // When the impossible happens leave some clues. + reportError("unexpected table row markdown") + peek("parseCell0") + storeContents() + finalizeRow() + } + } + + /* Parsing */ + + jumpWhitespace() + + parseCells(TableCellStart, () => finalizeHeaderCells()) + + while (checkTableRow) { + val initialOffset = offset + + parseCells(TableCellStart, () => finalizeCells()) + + /* Progress should always be made */ + if (offset == initialOffset) { + peek("no-progress-table-parsing") + sys.error("No progress while parsing table") + } + } + + /* Finalize */ + + /* Structural consistency checks and coercion */ + + // https://github.github.com/gfm/#tables-extension- + // TODO: The header row must match the delimiter row in the number of cells. If not, a table will not be recognized: + // TODO: Break at following block level element: The table is broken at the first empty line, or beginning of another block-level structure: + // TODO: Do not return a table when: The header row must match the delimiter row in the number of cells. If not, a table will not be recognized + + if (cells.nonEmpty) { + reportError(s"Parsed and unused content: $cells") + } + assert(header.isDefined, "table header was not parsed") + val enforcedCellCount = header.get.cells.size + + def applyColumnCountConstraint(row: Row, defaultCell: Cell, rowType: String): Row = { + if (row.cells.size == enforcedCellCount) + row + else if (row.cells.size > enforcedCellCount) { + val excess = row.cells.size - enforcedCellCount + reportError(s"Dropping $excess excess table $rowType cells from row.") + Row(row.cells.take(enforcedCellCount)) + } else { + val missing = enforcedCellCount - row.cells.size + Row(row.cells ++ List.fill(missing)(defaultCell)) + } + } + + // TODO: Abandon table parsing when the delimiter is missing instead of fixing and continuing. + val delimiterRow :: dataRows = { + if (rows.nonEmpty) rows.toList + else { + reportError("Fixing missing delimiter row") + Row(Cell(Paragraph(Text("-")) :: Nil) :: Nil) :: Nil + } + } + + if (delimiterRow.cells.isEmpty) sys.error("TODO: Handle table with empty delimiter row") + + val constrainedDelimiterRow = applyColumnCountConstraint(delimiterRow, delimiterRow.cells(0), "delimiter") + + val constrainedDataRows = dataRows.map(applyColumnCountConstraint(_, Cell(Nil), "data")) + + /* Convert the row following the header row to column options */ + + val leftAlignmentPattern = "^:?-++$".r + val centerAlignmentPattern = "^:-++:$".r + val rightAlignmentPattern = "^-++:$".r + + import ColumnOption._ + /* Encourage user to fix by defaulting to least ignorable fix. */ + val defaultColumnOption = ColumnOptionRight + val columnOptions = constrainedDelimiterRow.cells.map { + alignmentSpecifier => + alignmentSpecifier.blocks match { + // TODO: Parse the second row without parsing inline markdown + // TODO: Save pos when delimiter row is parsed and use here in reported errors + case Paragraph(Text(as)) :: Nil => + as.trim match { + case leftAlignmentPattern(_*) => ColumnOptionLeft + case centerAlignmentPattern(_*) => ColumnOptionCenter + case rightAlignmentPattern(_*) => ColumnOptionRight + case x => + reportError(s"Fixing invalid column alignment: $x") + defaultColumnOption + } + case x => + reportError(s"Fixing invalid column alignment: $x") + defaultColumnOption + } + } + + if (check("\n", -1)) { + prevChar() + } else { + peek("expected-newline-missing") + sys.error("table parsing left buffer in unexpected state") + } + + blockEnded("table") + Table(header.get, columnOptions, constrainedDataRows) + } + /** {{{ para ::= inline '\n' }}} */ def para(): Block = { val p = @@ -185,67 +464,67 @@ final class Parser( list mkString } - def getInline(isInlineEnd: => Boolean): Inline = { + def getInline(isInlineEnd: => Boolean, textTransform: String => String = identity): Inline = { - def inline0(): Inline = { - if (char == safeTagMarker) { - val tag = htmlTag() - HtmlTag(tag.data + readHTMLFrom(tag)) - } - else if (check("'''")) bold() - else if (check("''")) italic() - else if (check("`")) monospace() - else if (check("__")) underline() - else if (check("^")) superscript() - else if (check(",,")) subscript() - else if (check("[[")) link() - else { - val str = readUntil { - char == safeTagMarker || - check("''") || - char == '`' || - check("__") || - char == '^' || - check(",,") || - check("[[") || - isInlineEnd || - checkParaEnded() || - char == endOfLine + def inline0(): Inline = { + if (char == safeTagMarker) { + val tag = htmlTag() + HtmlTag(tag.data + readHTMLFrom(tag)) + } + else if (check("'''")) bold() + else if (check("''")) italic() + else if (check("`")) monospace() + else if (check("__")) underline() + else if (check("^")) superscript() + else if (check(",,")) subscript() + else if (check("[[")) link() + else { + val str = readUntil { + char == safeTagMarker || + check("''") || + char == '`' || + check("__") || + char == '^' || + check(",,") || + check("[[") || + isInlineEnd || + checkParaEnded() || + char == endOfLine + } + Text(textTransform(str)) } - Text(str) } - } - val inlines: List[Inline] = { - val iss = mutable.ListBuffer.empty[Inline] - iss += inline0() - while (!isInlineEnd && !checkParaEnded()) { - val skipEndOfLine = if (char == endOfLine) { - nextChar() - true - } else { - false - } + val inlines: List[Inline] = { + val iss = mutable.ListBuffer.empty[Inline] + iss += inline0() + while (!isInlineEnd && !checkParaEnded()) { + val skipEndOfLine = if (char == endOfLine) { + nextChar() + true + } else { + false + } - val current = inline0() - (iss.last, current) match { - case (Text(t1), Text(t2)) if skipEndOfLine => - iss.update(iss.length - 1, Text(t1 + endOfLine + t2)) - case (i1, i2) if skipEndOfLine => - iss ++= List(Text(endOfLine.toString), i2) - case _ => iss += current + val current = inline0() + (iss.last, current) match { + case (Text(t1), Text(t2)) if skipEndOfLine => + iss.update(iss.length - 1, Text(t1 + endOfLine + t2)) + case (i1, i2) if skipEndOfLine => + iss ++= List(Text(endOfLine.toString), i2) + case _ => iss += current + } } + iss.toList } - iss.toList - } - inlines match { - case Nil => Text("") - case i :: Nil => i - case is => Chain(is) - } + inlines match { + case Nil => Text("") + case i :: Nil => i + case is => Chain(is) + } - } + } def htmlTag(): HtmlTag = { jump(safeTagMarker) @@ -371,6 +650,7 @@ final class Parser( checkSkipInitWhitespace('=') || checkSkipInitWhitespace("{{{") || checkList || + check(TableCellStart) || checkSkipInitWhitespace('\u003D') } offset = poff @@ -400,6 +680,10 @@ sealed class CharReader(buffer: String) { reader => final def nextChar() = offset += 1 + final def prevChar(): Unit = { + offset -= 1 + } + final def check(chars: String): Boolean = { val poff = offset val ok = jump(chars) @@ -407,6 +691,14 @@ sealed class CharReader(buffer: String) { reader => ok } + final def check(chars: String, checkOffset: Int): Boolean = { + val poff = offset + offset += checkOffset + val ok = jump(chars) + offset = poff + ok + } + def checkSkipInitWhitespace(c: Char): Boolean = { val poff = offset jumpWhitespace() diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/reflect.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/reflect.scala new file mode 100644 index 000000000000..b48519e29d28 --- /dev/null +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/reflect.scala @@ -0,0 +1,7 @@ +package dotty.tools.scaladoc +package tasty + +import scala.quoted._ + +/** Shorthand for `quotes.reflect` */ +transparent inline def reflect(using inline q: Quotes): q.reflect.type = q.reflect diff --git a/scaladoc/src/dotty/tools/scaladoc/transformers/ImplicitMembersExtensionTransformer.scala b/scaladoc/src/dotty/tools/scaladoc/transformers/ImplicitMembersExtensionTransformer.scala index 9d95f4462b78..0fa6b9c5d3cf 100644 --- a/scaladoc/src/dotty/tools/scaladoc/transformers/ImplicitMembersExtensionTransformer.scala +++ b/scaladoc/src/dotty/tools/scaladoc/transformers/ImplicitMembersExtensionTransformer.scala @@ -26,7 +26,7 @@ class ImplicitMembersExtensionTransformer(using DocContext) extends(Module => Mo val MyDri = c.dri def collectApplicableMembers(source: Member): Seq[Member] = source.members.flatMap { - case m @ Member(_, _, _, Kind.Extension(ExtensionTarget(_, _, MyDri, _), _), Origin.RegularlyDefined) => + case m @ Member(_, _, _, Kind.Extension(ExtensionTarget(_, _, _, _, MyDri, _), _), Origin.RegularlyDefined) => val kind = m.kind match case d: Kind.Def => d case _ => Kind.Def(Nil, Nil) diff --git a/scaladoc/src/dotty/tools/scaladoc/transformers/InheritanceInformationTransformer.scala b/scaladoc/src/dotty/tools/scaladoc/transformers/InheritanceInformationTransformer.scala index 63774d27c1be..02e224f10cf0 100644 --- a/scaladoc/src/dotty/tools/scaladoc/transformers/InheritanceInformationTransformer.scala +++ b/scaladoc/src/dotty/tools/scaladoc/transformers/InheritanceInformationTransformer.scala @@ -3,7 +3,7 @@ package transformers class InheritanceInformationTransformer(using DocContext) extends (Module => Module): override def apply(original: Module): Module = - val subtypes = getSupertypes(original.rootPackage).groupBy(_._1).transform((k, v) => v.map(_._2)) + val subtypes = getSupertypes(original.rootPackage).groupMap(_(0))(_(1)) original.updateMembers { m => val edges = getEdges(m.asLink.copy(kind = bareClasslikeKind(m.kind)), subtypes) val st: Seq[LinkToType] = edges.map(_._1).distinct diff --git a/scaladoc/src/dotty/tools/scaladoc/translators/FilterAttributes.scala b/scaladoc/src/dotty/tools/scaladoc/translators/FilterAttributes.scala index 0a1b62dcb4d6..1202e76a548c 100644 --- a/scaladoc/src/dotty/tools/scaladoc/translators/FilterAttributes.scala +++ b/scaladoc/src/dotty/tools/scaladoc/translators/FilterAttributes.scala @@ -20,7 +20,7 @@ object FilterAttributes: private def origin(m: Member): Map[String, String] = m.origin match case Origin.ImplicitlyAddedBy(name, _) => Map("implicitly" -> s"by $name") case Origin.ExtensionFrom(name, _) => Map("extension" -> s"from $name") - case Origin.ExportedFrom(name, _) => Map("export" -> s"from $name") + case Origin.ExportedFrom(Some(link)) => Map("export" -> s"from ${link.name}}") case _ => Map.empty diff --git a/scaladoc/src/dotty/tools/scaladoc/translators/ScalaSignatureProvider.scala b/scaladoc/src/dotty/tools/scaladoc/translators/ScalaSignatureProvider.scala index a2ade47bbbb2..0639b5af09bd 100644 --- a/scaladoc/src/dotty/tools/scaladoc/translators/ScalaSignatureProvider.scala +++ b/scaladoc/src/dotty/tools/scaladoc/translators/ScalaSignatureProvider.scala @@ -2,12 +2,12 @@ package dotty.tools.scaladoc package translators object ScalaSignatureProvider: - def rawSignature(documentable: Member, builder: SignatureBuilder): SignatureBuilder = - documentable.kind match + def rawSignature(documentable: Member, builder: SignatureBuilder)(kind: Kind = documentable.kind): SignatureBuilder = + kind match case Kind.Extension(_, m) => extensionSignature(documentable, m, builder) case Kind.Exported(d) => - methodSignature(documentable, d, builder) + rawSignature(documentable, builder)(d) case d: Kind.Def => methodSignature(documentable, d, builder) case Kind.Constructor(d) => @@ -22,6 +22,8 @@ object ScalaSignatureProvider: givenClassSignature(documentable, cls, builder) case Kind.Given(d: Kind.Def, _, _) => givenMethodSignature(documentable, d, builder) + case Kind.Given(Kind.Val, _, _) => + givenValSignature(documentable, builder) case cls: Kind.Class => classSignature(documentable, cls, builder) case enm: Kind.Enum => @@ -31,11 +33,11 @@ object ScalaSignatureProvider: case trt: Kind.Trait => traitSignature(documentable, trt, builder) case Kind.Val | Kind.Var | Kind.Implicit(Kind.Val, _) => - fieldSignature(documentable, documentable.kind.name, builder) + fieldSignature(documentable, kind.name, builder) case tpe: Kind.Type => typeSignature(tpe, documentable, builder) case Kind.Package => - builder.text("package ").name(documentable.name, documentable.dri) + builder.keyword("package ").name(documentable.name, documentable.dri) case Kind.RootPackage => builder case Kind.Unknown => @@ -43,7 +45,7 @@ object ScalaSignatureProvider: private def enumEntrySignature(member: Member, cls: Kind.Class, bdr: SignatureBuilder): SignatureBuilder = val withPrefixes: SignatureBuilder = bdr - .text("case ") + .keyword("case ") .name(member.name, member.dri) .generics(cls.typeParams) @@ -52,22 +54,22 @@ object ScalaSignatureProvider: private def enumPropertySignature(entry: Member, builder: SignatureBuilder): SignatureBuilder = val modifiedType = entry.signature.map { - case " & " => " with " + case Keyword(" & ") => Keyword(" with ") case o => o } builder - .text("case ") + .keyword("case ") .name(entry.name, entry.dri) - .text(" extends ") + .keyword(" extends ") .signature(modifiedType) private def parentsSignature(member: Member, builder: SignatureBuilder): SignatureBuilder = member.directParents match case Nil => builder case extendType :: withTypes => - val extendPart = builder.text(" extends ").signature(extendType.signature) - withTypes.foldLeft(extendPart)((bdr, tpe) => bdr.text(" with ").signature(tpe.signature)) + val extendPart = builder.keyword(" extends ").signature(extendType.signature) + withTypes.foldLeft(extendPart)((bdr, tpe) => bdr.keyword(" with ").signature(tpe.signature)) private def givenClassSignature(member: Member, cls: Kind.Class, builder: SignatureBuilder): SignatureBuilder = val prefixes = builder @@ -78,7 +80,7 @@ object ScalaSignatureProvider: member.kind match case Kind.Given(_, Some(instance), _) => prefixes - .text(": ") + .plain(": ") .signature(instance) case _ => prefixes @@ -123,16 +125,30 @@ object ScalaSignatureProvider: .generics(fun.typeParams) .functionParameters(fun.argsLists) - withSignature.text(":").text(" ").signature(extension.signature) + withSignature.plain(":").plain(" ").signature(extension.signature) private def givenMethodSignature(method: Member, body: Kind.Def, builder: SignatureBuilder): SignatureBuilder = method.kind match case Kind.Given(_, Some(instance), _) => - builder.text("given ") + builder.keyword("given ") .name(method.name, method.dri) - .text(": ") + .generics(body.typeParams) + .functionParameters(body.argsLists) + .plain(": ") .signature(instance) case _ => - builder.text("given ").name(method.name, method.dri) + builder.keyword("given ") + .name(method.name, method.dri) + .generics(body.typeParams) + .functionParameters(body.argsLists) + + private def givenValSignature(field: Member, builder: SignatureBuilder): SignatureBuilder = field.kind match + case Kind.Given(_, Some(instance), _) => + builder.keyword("given ") + .name(field.name, field.dri) + .plain(": ") + .signature(instance) + case _ => + builder.keyword("given ").name(field.name, field.dri) private def methodSignature(method: Member, cls: Kind.Def, builder: SignatureBuilder): SignatureBuilder = val bdr = builder @@ -141,7 +157,7 @@ object ScalaSignatureProvider: .generics(cls.typeParams) .functionParameters(cls.argsLists) if !method.kind.isInstanceOf[Kind.Constructor] then - bdr.text(": ").signature(method.signature) + bdr.plain(": ").signature(method.signature) else bdr private def typeSignature(tpe: Kind.Type, typeDef: Member, builder: SignatureBuilder): SignatureBuilder = @@ -150,25 +166,25 @@ object ScalaSignatureProvider: .name(typeDef.name, typeDef.dri) .generics(tpe.typeParams) if(!tpe.opaque){ - (if tpe.concreate then bdr.text(" = ") else bdr) + (if tpe.concreate then bdr.plain(" = ") else bdr) .signature(typeDef.signature) } else bdr private def givenPropertySignature(property: Member, builder: SignatureBuilder): SignatureBuilder = val bdr = builder - .text("given ") + .keyword("given ") .name(property.name, property.dri) property.kind match case Kind.Given(_, Some(instance), _) => - bdr.text(" as ").signature(instance) + bdr.keyword(" as ").signature(instance) case _ => bdr private def fieldSignature(member: Member, kind: String, builder: SignatureBuilder): SignatureBuilder = builder .modifiersAndVisibility(member, kind) .name(member.name, member.dri) - .text(":") - .text(" ") + .plain(":") + .plain(" ") .signature(member.signature) diff --git a/scaladoc/src/dotty/tools/scaladoc/translators/ScalaSignatureUtils.scala b/scaladoc/src/dotty/tools/scaladoc/translators/ScalaSignatureUtils.scala index 3817c70eef5a..7a5dc2310c0e 100644 --- a/scaladoc/src/dotty/tools/scaladoc/translators/ScalaSignatureUtils.scala +++ b/scaladoc/src/dotty/tools/scaladoc/translators/ScalaSignatureUtils.scala @@ -2,9 +2,11 @@ package dotty.tools.scaladoc package translators case class InlineSignatureBuilder(names: Signature = Nil, preName: Signature = Nil) extends SignatureBuilder: - override def text(str: String): SignatureBuilder = copy(names = str +: names) + override def plain(str: String): SignatureBuilder = copy(names = Plain(str) +: names) override def name(str: String, dri: DRI): SignatureBuilder = copy(names = Nil, preName = names) - override def driLink(text: String, dri: DRI): SignatureBuilder = copy(names = Link(text, dri) +: names) + override def tpe(text: String, dri: Option[DRI]): SignatureBuilder = copy(names = Type(text, dri) +: names) + override def keyword(str: String): SignatureBuilder = copy(names = Keyword(str) +: names) + def tpe(text: String, dri: DRI): SignatureBuilder = copy(names = Type(text, Some(dri)) +: names) override def signature(s: Signature): SignatureBuilder = copy(names = s.reverse ++ names) object InlineSignatureBuilder: @@ -12,86 +14,90 @@ object InlineSignatureBuilder: ScalaSignatureProvider.rawSignature(d, InlineSignatureBuilder()).asInstanceOf[InlineSignatureBuilder].names.reverse trait SignatureBuilder extends ScalaSignatureUtils { - def text(str: String): SignatureBuilder - def name(str: String, dri: DRI) = driLink(str, dri) - def driLink(text: String, dri: DRI): SignatureBuilder + def keyword(str: String): SignatureBuilder + def plain(str: String): SignatureBuilder + def name (str: String, dri: DRI): SignatureBuilder + def tpe(str: String, dri: Option[DRI]): SignatureBuilder def signature(s: Signature): SignatureBuilder = s.foldLeft(this){ - case (bld, s: String) => bld.text(s) - case (bld, Link(text: String, dri: DRI)) => bld.driLink(text, dri) + case (bld, Type(a, b)) => bld.tpe(a, b) + case (bld, Keyword(a)) => bld.keyword(a) + case (bld, Plain(a)) => bld.plain(a) } // Support properly once we rewrite signature builder - def memberName(name: String, dri: DRI) = text(name) + def memberName(name: String, dri: DRI) = plain(name) def list[E]( - elements: Seq[E], - prefix: String = "", - suffix: String = "", - separator: String = ", ", - )( - elemOp: (SignatureBuilder, E) => SignatureBuilder - ): SignatureBuilder = elements match { - case Nil => this - case head :: tail => - tail.foldLeft(elemOp(text(prefix), head))((b, e) => elemOp(b.text(separator), e)).text(suffix) - } + elements: Seq[E], + prefix: Signature = List(Plain("")), + suffix: Signature = List(Plain("")), + separator: Signature = List(Plain(", ")), + forcePrefixAndSuffix: Boolean = false + )( + elemOp: (SignatureBuilder, E) => SignatureBuilder + ): SignatureBuilder = elements match { + case Nil => if forcePrefixAndSuffix then signature(prefix).signature(suffix) else this + case head :: tail => + tail.foldLeft(elemOp(signature(prefix), head))((b, e) => elemOp(b.signature(separator), e)).signature(suffix) + } def annotationsBlock(d: Member): SignatureBuilder = - d.annotations.foldLeft(this){ (bdr, annotation) => bdr.buildAnnotation(annotation)} - - def annotationsInline(d: Parameter): SignatureBuilder = - d.annotations.foldLeft(this){ (bdr, annotation) => bdr.buildAnnotation(annotation) } + d.annotations.foldLeft(this){ (bdr, annotation) => bdr.buildAnnotation(annotation)} - def annotationsInline(t: TypeParameter): SignatureBuilder = - t.annotations.foldLeft(this){ (bdr, annotation) => bdr.buildAnnotation(annotation) } + def annotationsInline(d: Parameter): SignatureBuilder = + d.annotations.foldLeft(this){ (bdr, annotation) => bdr.buildAnnotation(annotation) } - private def buildAnnotation(a: Annotation): SignatureBuilder = - text("@").driLink(a.dri.location.split('.').last, a.dri).buildAnnotationParams(a).text(" ") + def annotationsInline(t: TypeParameter): SignatureBuilder = + t.annotations.foldLeft(this){ (bdr, annotation) => bdr.buildAnnotation(annotation) } - private def buildAnnotationParams(a: Annotation): SignatureBuilder = - if !a.params.isEmpty then - val params = a.params.filterNot { - case Annotation.LinkParameter(_, _, text) => text == "_" - case _ => false - } - list(params, "(", ")", ", "){ (bdr, param) => bdr.buildAnnotationParameter(param)} - else this + private def buildAnnotation(a: Annotation): SignatureBuilder = + tpe(s"@${a.dri.location.split('.').last}", Some(a.dri)).buildAnnotationParams(a).plain(" ") - private def addParameterName(txt: Option[String]): SignatureBuilder = txt match { - case Some(name) => this.text(s"$name = ") - case _ => this + private def buildAnnotationParams(a: Annotation): SignatureBuilder = + if !a.params.isEmpty then + val params = a.params.filterNot { + case Annotation.LinkParameter(_, _, text) => text == "_" + case _ => false } + list(params, List(Plain("(")), List(Plain(")")), List(Plain(", "))){ (bdr, param) => bdr.buildAnnotationParameter(param)} + else this - private def buildAnnotationParameter(a: Annotation.AnnotationParameter): SignatureBuilder = a match { - case Annotation.PrimitiveParameter(name, value) => - addParameterName(name).text(value) - case Annotation.LinkParameter(name, dri, text) => - addParameterName(name).driLink(text, dri) - case Annotation.UnresolvedParameter(name, value) => - addParameterName(name).text(value) - } + private def addParameterName(txt: Option[String]): SignatureBuilder = txt match { + case Some(name) => this.plain(s"$name = ") + case _ => this + } - def modifiersAndVisibility(t: Member, kind: String) = - val (prefixMods, suffixMods) = t.modifiers.partition(_.prefix) - val all = prefixMods.map(_.name) ++ Seq(t.visibility.asSignature) ++ suffixMods.map(_.name) + private def buildAnnotationParameter(a: Annotation.AnnotationParameter): SignatureBuilder = a match { + case Annotation.PrimitiveParameter(name, value) => + addParameterName(name).plain(value) + case Annotation.LinkParameter(name, dri, text) => + addParameterName(name).tpe(text, Some(dri)) + case Annotation.UnresolvedParameter(name, value) => + addParameterName(name).plain(value) + } - text(all.toSignatureString()).text(kind + " ") + def modifiersAndVisibility(t: Member, kind: String) = + val (prefixMods, suffixMods) = t.modifiers.partition(_.prefix) + val all = prefixMods.map(_.name) ++ Seq(t.visibility.asSignature) ++ suffixMods.map(_.name) + val filtered = all.filter(_.trim.nonEmpty) + val intermediate = if filtered.nonEmpty then keyword(filtered.toSignatureString()) else this + intermediate.keyword(kind + " ") - def generics(on: Seq[TypeParameter]) = list(on.toList, "[", "]"){ (bdr, e) => - bdr.annotationsInline(e).text(e.variance).memberName(e.name, e.dri).signature(e.signature) - } + def generics(on: Seq[TypeParameter]) = list(on.toList, List(Plain("[")), List(Plain("]"))){ (bdr, e) => + bdr.annotationsInline(e).keyword(e.variance).tpe(e.name, Some(e.dri)).signature(e.signature) + } - def functionParameters(params: Seq[ParametersList]) = - if params.isEmpty then this.text("") - else if params.size == 1 && params(0).parameters == Nil then this.text("()") - else this.list(params, separator = ""){ (bld, pList) => - bld.list(pList.parameters, s"(${pList.modifiers}", ")"){ (bld, p) => - val annotationsAndModifiers = bld.annotationsInline(p) - .text(p.modifiers) - val name = p.name.fold(annotationsAndModifiers)(annotationsAndModifiers.memberName(_, p.dri).text(": ")) - name.signature(p.signature) - } + def functionParameters(params: Seq[ParametersList]) = + if params.isEmpty then this.plain("") + else if params.size == 1 && params(0).parameters == Nil then this.plain("()") + else this.list(params, separator = List(Plain(""))) { (bld, pList) => + bld.list(pList.parameters, prefix = List(Plain("("), Keyword(pList.modifiers)), suffix = List(Plain(")")), forcePrefixAndSuffix = true) { (bld, p) => + val annotationsAndModifiers = bld.annotationsInline(p) + .keyword(p.modifiers) + val name = p.name.fold(annotationsAndModifiers)(annotationsAndModifiers.memberName(_, p.dri).plain(": ")) + name.signature(p.signature) } + } } trait ScalaSignatureUtils: diff --git a/scaladoc/src/dotty/tools/scaladoc/util/JSON.scala b/scaladoc/src/dotty/tools/scaladoc/util/JSON.scala index d822bf4cf898..468300db6616 100644 --- a/scaladoc/src/dotty/tools/scaladoc/util/JSON.scala +++ b/scaladoc/src/dotty/tools/scaladoc/util/JSON.scala @@ -5,6 +5,8 @@ import scala.annotation.tailrec opaque type JSON = String +def rawJSON(s: String): JSON = s + def jsonList(elems: Seq[JSON]): JSON = elems.mkString("[", ",\n", "]") def jsonObject(fields: (String, JSON)*): JSON = diff --git a/scaladoc/src/dotty/tools/scaladoc/util/escape.scala b/scaladoc/src/dotty/tools/scaladoc/util/escape.scala index 66035e76ca98..686d384337c1 100644 --- a/scaladoc/src/dotty/tools/scaladoc/util/escape.scala +++ b/scaladoc/src/dotty/tools/scaladoc/util/escape.scala @@ -1,4 +1,11 @@ package dotty.tools.scaladoc.util object Escape: - def escapeUrl(url: String) = url.replace("#","%23") \ No newline at end of file + def escapeUrl(url: String) = url + .replace("#","%23") + + def escapeFilename(filename: String) = + val escaped = filename + .replace("/", "$div") + .replace("\\", "$bslash") + if escaped != filename then escaped + "$" else escaped diff --git a/scaladoc/src/dotty/tools/scaladoc/util/html.scala b/scaladoc/src/dotty/tools/scaladoc/util/html.scala index 3c07222341b6..790891b95d99 100644 --- a/scaladoc/src/dotty/tools/scaladoc/util/html.scala +++ b/scaladoc/src/dotty/tools/scaladoc/util/html.scala @@ -11,7 +11,17 @@ object HTML: case class Tag(name: String): def apply(tags: TagArg*): AppliedTag = apply()(tags:_*) def apply(first: AttrArg, rest: AttrArg*): AppliedTag = apply((first +: rest):_*)() - def apply(attrs: AttrArg*)(tags: TagArg*): AppliedTag = { + def apply(attrs: AttrArg*)(tags: TagArg*): AppliedTag = + def unpackTags(tags: TagArg*)(using sb: StringBuilder): StringBuilder = + tags.foreach { + case t: AppliedTag => + sb.append(t) + case s: String => + sb.append(s.escapeReservedTokens) + case s: Seq[AppliedTag | String] => + unpackTags(s:_*) + } + sb val sb = StringBuilder() sb.append(s"<$name") attrs.filter(_ != Nil).foreach{ @@ -21,24 +31,11 @@ object HTML: sb.append(" ").append(e) } sb.append(">") - tags.foreach{ - case t: AppliedTag => - sb.append(t) - case s: String => - sb.append(s.escapeReservedTokens) - case s: Seq[AppliedTag | String] => - s.foreach{ - case a: AppliedTag => - sb.append(a) - case s: String => - sb.append(s.escapeReservedTokens) - } - } + unpackTags(tags:_*)(using sb) sb.append(s"") sb - } - extension (s: String) private def escapeReservedTokens: String = + extension (s: String) def escapeReservedTokens: String = s.replace("&", "&") .replace("<", "<") .replace(">", ">") @@ -68,6 +65,7 @@ object HTML: val svg = Tag("svg") val button = Tag("button") val input = Tag("input") + val label = Tag("label") val script = Tag("script") val link = Tag("link") val footer = Tag("footer") @@ -84,6 +82,12 @@ object HTML: val li = Tag("li") val code = Tag("code") val pre = Tag("pre") + val table = Tag("table") + val thead = Tag("thead") + val tbody = Tag("tbody") + val th = Tag("th") + val tr = Tag("tr") + val td = Tag("td") val cls = Attr("class") val href = Attr("href") @@ -102,6 +106,7 @@ object HTML: val value = Attr("value") val onclick=Attr("onclick") val titleAttr =Attr("title") + val onkeyup = Attr("onkeyup") def raw(content: String): AppliedTag = new AppliedTag(content) def raw(content: StringBuilder): AppliedTag = content diff --git a/scaladoc/src/scala/tasty/inspector/OldTastyInspector.scala b/scaladoc/src/scala/tasty/inspector/OldTastyInspector.scala index 91311b1ff9bb..16f9b0fdca1d 100644 --- a/scaladoc/src/scala/tasty/inspector/OldTastyInspector.scala +++ b/scaladoc/src/scala/tasty/inspector/OldTastyInspector.scala @@ -110,7 +110,10 @@ trait OldTastyInspector: override def newRun(implicit ctx: Context): Run = reset() - new TASTYRun(this, ctx.fresh.addMode(Mode.ReadPositions).addMode(Mode.ReadComments)) + val ctx2 = ctx.fresh + .addMode(Mode.ReadPositions) + .setSetting(ctx.settings.YreadComments, true) + new TASTYRun(this, ctx2) new InspectorDriver diff --git a/scaladoc/test-documentations/basic/images/basic.svg b/scaladoc/test-documentations/basic/_assets/images/basic.svg similarity index 100% rename from scaladoc/test-documentations/basic/images/basic.svg rename to scaladoc/test-documentations/basic/_assets/images/basic.svg diff --git a/scaladoc/test-documentations/basic/docs/Adoc.md b/scaladoc/test-documentations/basic/_docs/Adoc.md similarity index 100% rename from scaladoc/test-documentations/basic/docs/Adoc.md rename to scaladoc/test-documentations/basic/_docs/Adoc.md diff --git a/scaladoc/test-documentations/basic/docs/dir/index.md b/scaladoc/test-documentations/basic/_docs/dir/index.md similarity index 100% rename from scaladoc/test-documentations/basic/docs/dir/index.md rename to scaladoc/test-documentations/basic/_docs/dir/index.md diff --git a/scaladoc/test-documentations/basic/docs/dir/nested.md b/scaladoc/test-documentations/basic/_docs/dir/nested.md similarity index 100% rename from scaladoc/test-documentations/basic/docs/dir/nested.md rename to scaladoc/test-documentations/basic/_docs/dir/nested.md diff --git a/scaladoc/test-documentations/basic/docs/index.md b/scaladoc/test-documentations/basic/_docs/index.md similarity index 100% rename from scaladoc/test-documentations/basic/docs/index.md rename to scaladoc/test-documentations/basic/_docs/index.md diff --git a/scaladoc/test-documentations/basic/index.md b/scaladoc/test-documentations/basic/index.md deleted file mode 100644 index 2205e0c12d3e..000000000000 --- a/scaladoc/test-documentations/basic/index.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -title: Basic test ---- -# Header - -[Link to docs](docs/index.html) - -And a text! \ No newline at end of file diff --git a/scaladoc/test-documentations/conflicts-pages/_docs/tests/Adoc.md b/scaladoc/test-documentations/conflicts-pages/_docs/tests/Adoc.md new file mode 100644 index 000000000000..a0879c956838 --- /dev/null +++ b/scaladoc/test-documentations/conflicts-pages/_docs/tests/Adoc.md @@ -0,0 +1 @@ +# Trying to override a api page! \ No newline at end of file diff --git a/scaladoc/test-documentations/conflicts-resources/_assets/resources/tests/Adoc.html b/scaladoc/test-documentations/conflicts-resources/_assets/resources/tests/Adoc.html new file mode 100644 index 000000000000..482b8942606e --- /dev/null +++ b/scaladoc/test-documentations/conflicts-resources/_assets/resources/tests/Adoc.html @@ -0,0 +1 @@ +I am causing conflicts! \ No newline at end of file diff --git a/scaladoc/test-documentations/noDocsIndex/images/basic.svg b/scaladoc/test-documentations/noDocsIndex/_assets/images/basic.svg similarity index 100% rename from scaladoc/test-documentations/noDocsIndex/images/basic.svg rename to scaladoc/test-documentations/noDocsIndex/_assets/images/basic.svg diff --git a/scaladoc/test-documentations/noDocsIndex/docs/Adoc.md b/scaladoc/test-documentations/noDocsIndex/_docs/Adoc.md similarity index 100% rename from scaladoc/test-documentations/noDocsIndex/docs/Adoc.md rename to scaladoc/test-documentations/noDocsIndex/_docs/Adoc.md diff --git a/scaladoc/test-documentations/noDocsIndex/docs/dir/index.md b/scaladoc/test-documentations/noDocsIndex/_docs/dir/index.md similarity index 100% rename from scaladoc/test-documentations/noDocsIndex/docs/dir/index.md rename to scaladoc/test-documentations/noDocsIndex/_docs/dir/index.md diff --git a/scaladoc/test-documentations/noDocsIndex/docs/dir/nested.md b/scaladoc/test-documentations/noDocsIndex/_docs/dir/nested.md similarity index 100% rename from scaladoc/test-documentations/noDocsIndex/docs/dir/nested.md rename to scaladoc/test-documentations/noDocsIndex/_docs/dir/nested.md diff --git a/scaladoc/test-documentations/noDocsIndex/docs/index.md b/scaladoc/test-documentations/noDocsIndex/_docs/index.md similarity index 100% rename from scaladoc/test-documentations/noDocsIndex/docs/index.md rename to scaladoc/test-documentations/noDocsIndex/_docs/index.md diff --git a/scaladoc/test-documentations/noDocsIndex/index.md b/scaladoc/test-documentations/noDocsIndex/index.md deleted file mode 100644 index 2205e0c12d3e..000000000000 --- a/scaladoc/test-documentations/noDocsIndex/index.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -title: Basic test ---- -# Header - -[Link to docs](docs/index.html) - -And a text! \ No newline at end of file diff --git a/scaladoc/test-documentations/noGlobalIndex/images/basic.svg b/scaladoc/test-documentations/noGlobalIndex/_assets/images/basic.svg similarity index 100% rename from scaladoc/test-documentations/noGlobalIndex/images/basic.svg rename to scaladoc/test-documentations/noGlobalIndex/_assets/images/basic.svg diff --git a/scaladoc/test-documentations/noGlobalIndex/docs/Adoc.md b/scaladoc/test-documentations/noGlobalIndex/_docs/Adoc.md similarity index 100% rename from scaladoc/test-documentations/noGlobalIndex/docs/Adoc.md rename to scaladoc/test-documentations/noGlobalIndex/_docs/Adoc.md diff --git a/scaladoc/test-documentations/noGlobalIndex/docs/dir/index.md b/scaladoc/test-documentations/noGlobalIndex/_docs/dir/index.md similarity index 100% rename from scaladoc/test-documentations/noGlobalIndex/docs/dir/index.md rename to scaladoc/test-documentations/noGlobalIndex/_docs/dir/index.md diff --git a/scaladoc/test-documentations/noGlobalIndex/docs/dir/nested.md b/scaladoc/test-documentations/noGlobalIndex/_docs/dir/nested.md similarity index 100% rename from scaladoc/test-documentations/noGlobalIndex/docs/dir/nested.md rename to scaladoc/test-documentations/noGlobalIndex/_docs/dir/nested.md diff --git a/scaladoc/test-documentations/noGlobalIndex/docs/index.md b/scaladoc/test-documentations/noGlobalIndex/_docs/index.md similarity index 100% rename from scaladoc/test-documentations/noGlobalIndex/docs/index.md rename to scaladoc/test-documentations/noGlobalIndex/_docs/index.md diff --git a/scaladoc/test-documentations/noIndexes/images/basic.svg b/scaladoc/test-documentations/noIndexes/_assets/images/basic.svg similarity index 100% rename from scaladoc/test-documentations/noIndexes/images/basic.svg rename to scaladoc/test-documentations/noIndexes/_assets/images/basic.svg diff --git a/scaladoc/test-documentations/noIndexes/docs/Adoc.md b/scaladoc/test-documentations/noIndexes/_docs/Adoc.md similarity index 100% rename from scaladoc/test-documentations/noIndexes/docs/Adoc.md rename to scaladoc/test-documentations/noIndexes/_docs/Adoc.md diff --git a/scaladoc/test-documentations/noIndexes/docs/dir/index.md b/scaladoc/test-documentations/noIndexes/_docs/dir/index.md similarity index 100% rename from scaladoc/test-documentations/noIndexes/docs/dir/index.md rename to scaladoc/test-documentations/noIndexes/_docs/dir/index.md diff --git a/scaladoc/test-documentations/noIndexes/docs/dir/nested.md b/scaladoc/test-documentations/noIndexes/_docs/dir/nested.md similarity index 100% rename from scaladoc/test-documentations/noIndexes/docs/dir/nested.md rename to scaladoc/test-documentations/noIndexes/_docs/dir/nested.md diff --git a/scaladoc/test-documentations/static-links/_assets/images/basic.svg b/scaladoc/test-documentations/static-links/_assets/images/basic.svg new file mode 100644 index 000000000000..1fb642c8bfa0 --- /dev/null +++ b/scaladoc/test-documentations/static-links/_assets/images/basic.svg @@ -0,0 +1,30 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/scaladoc/test-documentations/static-links/_docs/Adoc.md b/scaladoc/test-documentations/static-links/_docs/Adoc.md new file mode 100644 index 000000000000..5e4078525c24 --- /dev/null +++ b/scaladoc/test-documentations/static-links/_docs/Adoc.md @@ -0,0 +1,14 @@ +--- +title: Adoc +--- +# Header in Adoc + +[A link to](dir/html.md) +[A link to](dir/name...with..dots..md) +[A link to](dir/name.with.md.and.html.md) +[A link to](dir/nested.md) +[A link to](dir/nested.svg) + + + +And a text! \ No newline at end of file diff --git a/scaladoc/test-documentations/static-links/_docs/dir/html.md b/scaladoc/test-documentations/static-links/_docs/dir/html.md new file mode 100644 index 000000000000..050ef4f92f29 --- /dev/null +++ b/scaladoc/test-documentations/static-links/_docs/dir/html.md @@ -0,0 +1,5 @@ +--- +title: html named file +--- + +And a text! \ No newline at end of file diff --git a/scaladoc/test-documentations/static-links/_docs/dir/index.md b/scaladoc/test-documentations/static-links/_docs/dir/index.md new file mode 100644 index 000000000000..40309d387819 --- /dev/null +++ b/scaladoc/test-documentations/static-links/_docs/dir/index.md @@ -0,0 +1,6 @@ +--- +title: A directory +--- +# {{ page.title }} + +And a text! \ No newline at end of file diff --git a/scaladoc/test-documentations/static-links/_docs/dir/name...with..dots..md b/scaladoc/test-documentations/static-links/_docs/dir/name...with..dots..md new file mode 100644 index 000000000000..b62aa2a6e0db --- /dev/null +++ b/scaladoc/test-documentations/static-links/_docs/dir/name...with..dots..md @@ -0,0 +1,5 @@ +--- +title: Strange name multipke dots in nane +--- + +And a text! \ No newline at end of file diff --git a/scaladoc/test-documentations/static-links/_docs/dir/name.with.md.and.html.md b/scaladoc/test-documentations/static-links/_docs/dir/name.with.md.and.html.md new file mode 100644 index 000000000000..2d2d183fb9b5 --- /dev/null +++ b/scaladoc/test-documentations/static-links/_docs/dir/name.with.md.and.html.md @@ -0,0 +1,5 @@ +--- +title: name.with.md.and.html +--- + +And a text! \ No newline at end of file diff --git a/scaladoc/test-documentations/static-links/_docs/dir/nested.md b/scaladoc/test-documentations/static-links/_docs/dir/nested.md new file mode 100644 index 000000000000..3e0ab5922afe --- /dev/null +++ b/scaladoc/test-documentations/static-links/_docs/dir/nested.md @@ -0,0 +1,6 @@ +--- +title: Nested in a directory +--- +# {{ page.title }} + +And a text! \ No newline at end of file diff --git a/scaladoc/test-documentations/static-links/_docs/dir/nested.svg b/scaladoc/test-documentations/static-links/_docs/dir/nested.svg new file mode 100644 index 000000000000..d3431c5dee3a --- /dev/null +++ b/scaladoc/test-documentations/static-links/_docs/dir/nested.svg @@ -0,0 +1,54 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + v + + + + + + + diff --git a/scaladoc/test-documentations/static-links/_docs/index.md b/scaladoc/test-documentations/static-links/_docs/index.md new file mode 100644 index 000000000000..5cdd253040db --- /dev/null +++ b/scaladoc/test-documentations/static-links/_docs/index.md @@ -0,0 +1,3 @@ +# {{ page.title }} in header + +And a text! \ No newline at end of file diff --git a/scaladoc/test-source-links/dotty/tools/scaladoc/source-links/RemoteLinksTest.scala b/scaladoc/test-source-links/dotty/tools/scaladoc/source-links/RemoteLinksTest.scala new file mode 100644 index 000000000000..1e8a1227235d --- /dev/null +++ b/scaladoc/test-source-links/dotty/tools/scaladoc/source-links/RemoteLinksTest.scala @@ -0,0 +1,97 @@ +package dotty.tools.scaladoc +package sourcelinks + +import scala.util.Random +import scala.io.Source +import scala.jdk.CollectionConverters._ +import scala.util.matching.Regex +import dotty.tools.scaladoc.test.BuildInfo +import java.nio.file.Path +import java.nio.file.Paths +import org.jsoup.Jsoup +import org.jsoup.nodes.Document +import util.IO +import org.junit.Assert.assertTrue +import org.junit.Test + +class RemoteLinksTest: + + class TimeoutException extends Exception + + val randomGenerator = new Random(125L) + // Predef has often problems with positions + val mtslAll = membersToSourceLinks(using testDocContext()).filter(_._1 != "Predef") + + @Test + def scala213XSourceLink = + assertTrue(mtslAll.find((k, _) => k == "AbstractMap").isDefined) // source link to Scala2.13.X stdlib class + + @Test + def scala3SourceLink = + assertTrue(mtslAll.find((k, _) => k == "PolyFunction").isDefined) // source link to Scala3 stdlib class + + @Test + def tastySourceLink = + assertTrue(mtslAll.find((k, _) => k == "TastyBuffer").isDefined) // source link to Scala3 tasty class + + @Test + def runTest = + assertTrue(mtslAll.nonEmpty) + val mtsl = randomGenerator.shuffle(mtslAll).take(80) // take 80 random entries + val pageToMtsl: Map[String, List[(String, Int)]] = + mtsl.groupMap(_._2.split("#L").head)(v => (v._1, v._2.split("#L").last.toInt)) + pageToMtsl.toSeq.sortBy(_._1).foreach { case (link, members) => + try + val doc = getDocumentFromUrl(link) + println(s"Checking $link") + members.foreach { case (member, expectedLine) => + if !member.startsWith("given_") then // TODO: handle synthetic givens, for now we disable them from testing + val toLine = expectedLine + 3 + val memberToMatch = member.replace("`", "") + val lineCorrectlyDefined = (expectedLine until toLine).exists{ line => + val loc = doc.select(s"#LC$line").text + + loc.contains(memberToMatch) + } + + assertTrue(s"Expected to find $memberToMatch at $link at lines $expectedLine-$toLine", lineCorrectlyDefined) + } + catch + case e: java.lang.IllegalArgumentException => + report.error(s"Could not open link for $link - invalid URL")(using testContext) + case e: TimeoutException => + report.error(s"Tried to open link $link 16 times but with no avail")(using testContext) + case e: org.jsoup.HttpStatusException => e.getStatusCode match + case 404 => throw AssertionError(s"Page $link does not exists") + case n => report.warning(s"Could not open link for $link, return code $n")(using testContext) + } + assertNoErrors(testContext.reportedDiagnostics) + + private def getDocumentFromUrl(link: String, retries: Int = 16): Document = + try + if retries == 0 then throw TimeoutException() + Jsoup.connect(link).get + catch + case e: org.jsoup.HttpStatusException => e.getStatusCode match + case 429 => + Thread.sleep(10) + getDocumentFromUrl(link, retries - 1) + case n => + throw e + + private def membersToSourceLinks(using DocContext): List[(String, String)] = + val output = Paths.get("scaladoc", "output", "scala3", "api").toAbsolutePath + val mtsl = List.newBuilder[(String, String)] + def processFile(path: Path): Unit = + val document = Jsoup.parse(IO.read(path)) + if document.select("span.kind").first.text == "package" then + document.select(".documentableElement").forEach { dElem => + if dElem.select("span.kind").first.text != "package" then + dElem.select("dt").forEach { elem => + val content = elem.text + if content == "Source" then + mtsl += dElem.select(".documentableName").first.text -> elem.nextSibling.childNode(0).attr("href") + } + } + IO.foreachFileIn(output, processFile) + mtsl.result diff --git a/scaladoc/test/dotty/tools/scaladoc/BaseHtmlTest.scala b/scaladoc/test/dotty/tools/scaladoc/BaseHtmlTest.scala index 6bb1f1a20010..6de233aba1db 100644 --- a/scaladoc/test/dotty/tools/scaladoc/BaseHtmlTest.scala +++ b/scaladoc/test/dotty/tools/scaladoc/BaseHtmlTest.scala @@ -24,34 +24,34 @@ class BaseHtmlTest: def withGeneratedDoc( pcks: Seq[String], - docsRoot: Option[String] = None)( + docsRoot: Option[String] = None, + customArgs: Option[Scaladoc.Args] = None, + )( op: ProjectContext ?=> Unit, - ): Unit = - val dest = Files.createTempDirectory("test-doc") - try - val args = Scaladoc.Args( - name = projectName, - tastyFiles = pcks.flatMap(tastyFiles(_)), - output = dest.toFile, - docsRoot = docsRoot, - projectVersion = Some(projectVersion), - ) - Scaladoc.run(args)(using testContext) - op(using ProjectContext(dest)) - - finally IO.delete(dest.toFile) - - val testDocPath = Paths.get(BuildInfo.testDocumentationRoot) + ): Unit = + val dest = customArgs.fold(Files.createTempDirectory("test-doc").toFile)(_.output) + try + val args = customArgs.getOrElse(Scaladoc.Args( + name = projectName, + tastyFiles = pcks.flatMap(tastyFiles(_)), + output = dest, + docsRoot = docsRoot, + projectVersion = Some(projectVersion) + )) + Scaladoc.run(args)(using testContext) + op(using ProjectContext(args.output.toPath)) + finally IO.delete(dest) + end withGeneratedDoc class DocumentContext(d: Document, path: Path): import collection.JavaConverters._ def niceMsg(msg: String) = s"$msg in $path (body):\n ${d.html()}:\n" def assertTextsIn(selector: String, expected: String*) = - assertFalse(niceMsg("Selector not found"), d.select(selector).isEmpty) + assertFalse(niceMsg(s"Selector not found for '$selector'"), d.select(selector).isEmpty) val found = d.select(selector).eachText.asScala - assertEquals(niceMsg(s"Context does not match for '$selector'"), expected.toList, found.toList) + assertEquals(niceMsg(s"Content does not match for '$selector'"), expected.toList, found.toList) def assertAttr(selector: String, attr: String, expected: String*) = assertFalse(niceMsg(s"Selector '$selector' not found"), d.select(selector).isEmpty) @@ -62,6 +62,9 @@ class BaseHtmlTest: val msg = niceMsg(s"Selector '$selector' exisits in document") assertTrue(msg, d.select(selector).isEmpty) + def fileExists = + assertTrue(path.toFile.exists) + def withHtmlFile(pathStr: String)(op: DocumentContext => Unit)(using ProjectContext) = val path = summon[ProjectContext].path.resolve(pathStr) assertTrue(s"File at $path does not exisits!", Files.exists(path)) diff --git a/scaladoc/test/dotty/tools/scaladoc/ExternalLocationProviderIntegrationTest.scala b/scaladoc/test/dotty/tools/scaladoc/ExternalLocationProviderIntegrationTest.scala index 9c10a557b9de..4f4f8c972b4e 100644 --- a/scaladoc/test/dotty/tools/scaladoc/ExternalLocationProviderIntegrationTest.scala +++ b/scaladoc/test/dotty/tools/scaladoc/ExternalLocationProviderIntegrationTest.scala @@ -25,23 +25,45 @@ class JavadocExternalLocationProviderIntegrationTest extends ExternalLocationPro class Scaladoc2ExternalLocationProviderIntegrationTest extends ExternalLocationProviderIntegrationTest( "externalScaladoc2", - List(".*scala.*::scaladoc2::https://www.scala-lang.org/api/current/"), + List( + ".*scala/.*::scaladoc2::https://www.scala-lang.org/api/current/", + ".*externalStubs.*::scaladoc2::https://external.stubs/api/" + ), List( "https://www.scala-lang.org/api/current/scala/util/matching/Regex$$Match.html", "https://www.scala-lang.org/api/current/scala/Predef$.html#String", "https://www.scala-lang.org/api/current/scala/collection/immutable/Map.html", "https://www.scala-lang.org/api/current/scala/collection/IterableOnceOps.html#addString(b:StringBuilder,start:String,sep:String,end:String):StringBuilder", - "https://www.scala-lang.org/api/current/scala/collection/IterableOnceOps.html#mkString(start:String,sep:String,end:String):String" + "https://www.scala-lang.org/api/current/scala/collection/IterableOnceOps.html#mkString(start:String,sep:String,end:String):String", + "https://external.stubs/api/tests/externalStubs/$div$bslash$.html", + "https://external.stubs/api/tests/externalStubs/$bslash$div$.html" ) ) class Scaladoc3ExternalLocationProviderIntegrationTest extends ExternalLocationProviderIntegrationTest( "externalScaladoc3", - List(".*scala.*::scaladoc3::https://dotty.epfl.ch/api/"), + List( + ".*scala/.*::scaladoc3::https://dotty.epfl.ch/api/", + ".*externalStubs.*::scaladoc3::https://external.stubs/api/" + ), List( "https://dotty.epfl.ch/api/scala/collection/immutable/Map.html", "https://dotty.epfl.ch/api/scala/Predef$.html#String-0", - "https://dotty.epfl.ch/api/scala/util/matching/Regex$$Match.html" + "https://dotty.epfl.ch/api/scala/util/matching/Regex$$Match.html", + "https://external.stubs/api/tests/externalStubs/$div$bslash$.html", + "https://external.stubs/api/tests/externalStubs/$bslash$div$.html" + ) +) + +class Scaladoc2LegacyExternalLocationProviderIntegrationTest extends LegacyExternalLocationProviderIntegrationTest( + "externalScaladoc2", + List(".*scala.*#https://www.scala-lang.org/api/current/"), + List( + "https://www.scala-lang.org/api/current/scala/util/matching/Regex$$Match.html", + "https://www.scala-lang.org/api/current/scala/Predef$.html#String", + "https://www.scala-lang.org/api/current/scala/collection/immutable/Map.html", + "https://www.scala-lang.org/api/current/scala/collection/IterableOnceOps.html#addString(b:StringBuilder,start:String,sep:String,end:String):StringBuilder", + "https://www.scala-lang.org/api/current/scala/collection/IterableOnceOps.html#mkString(start:String,sep:String,end:String):String" ) ) @@ -60,7 +82,7 @@ abstract class ExternalLocationProviderIntegrationTest( ) override def runTest = afterRendering { - val output = summon[DocContext].args.output.toPath.resolve("api") + val output = summon[DocContext].args.output.toPath val linksBuilder = List.newBuilder[String] def processFile(path: Path): Unit = @@ -72,7 +94,6 @@ abstract class ExternalLocationProviderIntegrationTest( linksBuilder ++= hrefValues } - println(output) IO.foreachFileIn(output, processFile) val links = linksBuilder.result val errors = expectedLinks.flatMap(expect => Option.when(!links.contains(expect))(expect)) @@ -86,3 +107,16 @@ abstract class ExternalLocationProviderIntegrationTest( } } :: Nil +abstract class LegacyExternalLocationProviderIntegrationTest( + name: String, + mappings: Seq[String], + expectedLinks: Seq[String] +) extends ExternalLocationProviderIntegrationTest(name, mappings, expectedLinks): + + override def args = super.args.copy( + externalMappings = mappings.flatMap( s => + ExternalDocLink.parseLegacy(s).fold(left => None, right => Some(right) + ) + ).toList + ) + diff --git a/scaladoc/test/dotty/tools/scaladoc/PackageDocumentationTest.scala b/scaladoc/test/dotty/tools/scaladoc/PackageDocumentationTest.scala new file mode 100644 index 000000000000..ca2ed505b09a --- /dev/null +++ b/scaladoc/test/dotty/tools/scaladoc/PackageDocumentationTest.scala @@ -0,0 +1,23 @@ +package dotty.tools.scaladoc + +import org.junit.Assert._ +import com.vladsch.flexmark.util.{ast => mdu, sequence} +import com.vladsch.flexmark.{ast => mda} +import collection.JavaConverters._ + + +class PackageDocumentationTest extends ScaladocTest("packageobjdocs"): + override def runTest: Unit = withModule { module => + module.members.values.find { + case member if member.kind == Kind.Package => true + case _ => false + }.flatMap(_.docs).map(_.body).fold(throw AssertionError("No package found or documentation is not present")) { + case node: mdu.ContentNode => + val text = node.getDescendants().asScala.toList.map { + case node: mdu.ContentNode => node.getContentChars().toString() + case _ => "" + }.mkString + assertTrue("Documentation for package is incorrect", text.contains("It's a test")) + case _ => throw AssertionError("No documentation node found in package docs") + } + } diff --git a/scaladoc/test/dotty/tools/scaladoc/RaportingTest.scala b/scaladoc/test/dotty/tools/scaladoc/RaportingTest.scala deleted file mode 100644 index a73f026f7774..000000000000 --- a/scaladoc/test/dotty/tools/scaladoc/RaportingTest.scala +++ /dev/null @@ -1,55 +0,0 @@ -package dotty.tools.scaladoc - -import java.nio.file.Files -import java.nio.file.Path -import java.nio.file.Paths -import org.junit.Test -import org.junit.Assert -import org.jsoup.Jsoup -import org.jsoup.nodes.Document -import java.nio.charset.Charset -import util.IO - -class ReportingTest: - import Scaladoc.Args - - private def checkReportedDiagnostics( - newArgs: Args => Args = identity, - ctx: CompilerContext = testContext)( - op: ReportedDiagnostics => Unit): Unit = - - val dest = Files.createTempDirectory("test-doc") - try - // We are using random package - Scaladoc.run(newArgs(testArgs(tastyFiles("nested"), dest.toFile)))(using ctx) - op(ctx.reportedDiagnostics) - - finally IO.delete(dest.toFile) - - @Test - def noMessageForMostCases = checkReportedDiagnostics(){ diag => - assertNoWarning(diag) - assertNoErrors(diag) - assertNoInfos(diag) - } - - @Test - def errorsInCaseOfIncompletClasspath = - val notTasty = Files.createTempFile("broken", ".notTasty") - try - Files.write(notTasty, "Random file".getBytes) - checkReportedDiagnostics(a => a.copy(tastyFiles = notTasty.toFile +: a.tastyFiles)){ diag => - assertMessagesAbout(diag.errorMsgs)("File extension is not `tasty` or `jar`") - } - finally Files.delete(notTasty) - - @Test - def verbosePrintsDokkaMessage = - val ctx = testContext - ctx.setSetting(ctx.settings.verbose, true) - checkReportedDiagnostics(ctx = ctx){ diag => - assertNoWarning(diag) - assertNoErrors(diag) - - assertMessagesAbout(diag.infoMsgs)("generation completed successfully") - } diff --git a/scaladoc/test/dotty/tools/scaladoc/ReportingTest.scala b/scaladoc/test/dotty/tools/scaladoc/ReportingTest.scala new file mode 100644 index 000000000000..e64eac41ac66 --- /dev/null +++ b/scaladoc/test/dotty/tools/scaladoc/ReportingTest.scala @@ -0,0 +1,87 @@ +package dotty.tools.scaladoc + +import java.nio.file.Files +import java.nio.file.Path +import java.nio.file.Paths +import org.junit.Test +import org.junit.Assert +import org.jsoup.Jsoup +import org.jsoup.nodes.Document +import java.nio.charset.Charset +import util.IO + +class ReportingTest: + import Scaladoc.Args + + private def checkReportedDiagnostics( + newArgs: Args => Args = identity, + ctx: CompilerContext = testContext)( + op: ReportedDiagnostics => Unit): Unit = + + val dest = Files.createTempDirectory("test-doc") + try + // We are using random package + Scaladoc.run(newArgs(testArgs(tastyFiles("nested"), dest.toFile)))(using ctx) + op(ctx.reportedDiagnostics) + + finally IO.delete(dest.toFile) + + @Test + def noMessageForMostCases = checkReportedDiagnostics(){ diag => + assertNoWarning(diag) + assertNoErrors(diag) + assertNoInfos(diag) + } + + @Test + def errorsInCaseOfIncompletClasspath = + val notTasty = Files.createTempFile("broken", ".notTasty") + try + Files.write(notTasty, "Random file".getBytes) + checkReportedDiagnostics(a => a.copy(tastyFiles = notTasty.toFile +: a.tastyFiles)){ diag => + assertMessagesAbout(diag.errorMsgs)("File extension is not `tasty` or `jar`") + } + finally Files.delete(notTasty) + + @Test + def testSuccessfulDocsGeneration = + val ctx = testContext + ctx.setSetting(ctx.settings.verbose, true) + checkReportedDiagnostics(ctx = ctx){ diag => + assertNoWarning(diag) + assertNoErrors(diag) + + assertMessagesAbout(diag.infoMsgs)("generation completed successfully") + } + + @Test + def testErrorInCaseOfAssetShadowing = + val ctx = testContext + ctx.setSetting(ctx.settings.verbose, true) + val docsRoot = testDocPath.resolve("conflicts-resources").toString + checkReportedDiagnostics(_.copy( + docsRoot = Some(docsRoot), + tastyFiles = tastyFiles("tests", rootPck = "resources") + )){ diag => + assertNoWarning(diag) + val Seq(msg) = diag.errorMsgs.map(_.toLowerCase) + Seq("conflict","api", "resource", "resources/tests/adoc.html").foreach(word => + Assert.assertTrue(s"Error message: $msg should contains $word", msg.contains(word))) + } + + @Test + def testErrorInCaseOfDocsShadowing = + val ctx = testContext + ctx.setSetting(ctx.settings.verbose, true) + val docsRoot = testDocPath.resolve("conflicts-pages").toString + checkReportedDiagnostics(_.copy( + docsRoot = Some(docsRoot), + tastyFiles = tastyFiles("tests", rootPck = "_docs") + )){ diag => + assertNoWarning(diag) + val Seq(msg) = diag.errorMsgs.map(_.toLowerCase) + Seq("conflict","api", "static", "page", "_docs/tests/adoc.html") + .foreach( word => + Assert.assertTrue(s"Error message: $msg should contains $word", msg.contains(word)) + ) + } diff --git a/scaladoc/test/dotty/tools/scaladoc/ScaladocTest.scala b/scaladoc/test/dotty/tools/scaladoc/ScaladocTest.scala index 8d7574adf13d..ad93d773cf4f 100644 --- a/scaladoc/test/dotty/tools/scaladoc/ScaladocTest.scala +++ b/scaladoc/test/dotty/tools/scaladoc/ScaladocTest.scala @@ -16,7 +16,7 @@ abstract class ScaladocTest(val name: String): given DocContext = testDocContext(tastyFiles(name)) op(ScalaModuleProvider.mkModule()) - private def getTempDir() : TemporaryFolder = + protected def getTempDir() : TemporaryFolder = val folder = new TemporaryFolder() folder.create() folder @@ -25,7 +25,8 @@ abstract class ScaladocTest(val name: String): name = "test", tastyFiles = tastyFiles(name), output = getTempDir().getRoot, - projectVersion = Some("1.0") + projectVersion = Some("1.0"), + sourceLinks = List("github://lampepfl/dotty/master") ) @Test diff --git a/scaladoc/test/dotty/tools/scaladoc/SourceLinksTests.scala b/scaladoc/test/dotty/tools/scaladoc/SourceLinksTests.scala deleted file mode 100644 index f5721fe90c03..000000000000 --- a/scaladoc/test/dotty/tools/scaladoc/SourceLinksTests.scala +++ /dev/null @@ -1,158 +0,0 @@ -package dotty.tools.scaladoc - -import java.nio.file._ -import org.junit.Assert._ -import org.junit.Test - -class SourceLinkTest: - - @Test - def testBasicFailures() = - def testFailure(template: String, messagePart: String) = - val res = SourceLinkParser(None).parse(template) - assertTrue(s"Expected failure containing $messagePart: $res", res.left.exists(_.contains(messagePart))) - - val resWithVersion = SourceLinkParser(Some("develop")).parse(template) - assertEquals(res, resWithVersion) - - testFailure("ala://ma/kota", "known provider") - testFailure("ala=ala=ala://ma/kota", "source link syntax") - testFailure("ala=ala=ala", "source link syntax") - testFailure("""€{TPL_OWNER}""", "scaladoc") - - - @Test - def testProperTemplates() = - def test(template: String) = - val res = try SourceLinkParser(Some("develop")).parse(template) catch - case e: Exception => throw RuntimeException(s"When testing $template", e) - assertTrue(s"Bad template: $template", res.isRight) - - - Seq( - "github://lampepfl/dotty", - "gitlab://lampepfl/dotty", - "https://github.com/scala/scala/blob/2.13.x€{FILE_PATH_EXT}#€{FILE_LINE}" - ).foreach{ template => - test(template) - } - - - @Test - def testSourceProviderWithoutRevision() = - Seq("github", "gitlab").foreach { provider => - val template = s"$provider://ala/ma" - val res = SourceLinkParser(None).parse(template) - assertTrue(s"Expected failure containing missing revision: $res", res.left.exists(_.contains("revision"))) - - Seq(s"$provider://ala/ma/", s"$provider://ala", s"$provider://ala/ma/develop/on/master").foreach { template => - val res = SourceLinkParser(Some("develop")).parse(template) - assertTrue(s"Expected failure syntax info: $res", res.left.exists(_.contains("syntax"))) - } - - } - -class SourceLinksTest: - // TODO (https://github.com/lampepfl/scaladoc/issues/240): configure source root - val projectRoot = Paths.get("").toAbsolutePath() - - val edit: Operation = "edit" // union types need explicit singletons - - type Args = String | (String, Operation) | (String, Int) | (String, Int, Operation) - - private def testLink(config: Seq[String], revision: Option[String])(cases: (Args, String | None.type)*): Unit = - val links = SourceLinks.load(config, revision)(using testContext) - cases.foreach { case (args, expected) => - val res = args match - case path: String => links.pathTo(projectRoot.resolve(path)) - case (path: String, line: Int) => links.pathTo(projectRoot.resolve(path), line = Some(line)) - case (path: String, operation: Operation) => links.pathTo(projectRoot.resolve(path), operation = operation) - case (path: String, line: Int, operation: Operation) => links.pathTo(projectRoot.resolve(path), operation = operation, line = Some(line)) - - val expectedRes = expected match - case s: String => Some(s) - case None => None - - assertEquals(s"For path $args", expectedRes, res) - } - - @Test - def testBasicPaths = - testLink(Seq("github://lampepfl/dotty"), Some("develop"))( - "project/Build.scala" -> "https://github.com/lampepfl/dotty/blob/develop/project/Build.scala", - ("project/Build.scala", 54) -> "https://github.com/lampepfl/dotty/blob/develop/project/Build.scala#L54", - ("project/Build.scala", edit) -> "https://github.com/lampepfl/dotty/edit/develop/project/Build.scala", - ("project/Build.scala", 54, edit) -> "https://github.com/lampepfl/dotty/edit/develop/project/Build.scala#L54", - ) - - testLink(Seq("github://lampepfl/dotty/dev"), Some("develop"))( - "project/Build.scala" -> "https://github.com/lampepfl/dotty/blob/dev/project/Build.scala", - ("project/Build.scala", 54) -> "https://github.com/lampepfl/dotty/blob/dev/project/Build.scala#L54", - ("project/Build.scala", edit) -> "https://github.com/lampepfl/dotty/edit/dev/project/Build.scala", - ("project/Build.scala", 54, edit) -> "https://github.com/lampepfl/dotty/edit/dev/project/Build.scala#L54", - ) - - testLink(Seq("github://lampepfl/dotty/dev#src/lib"), None)( - "project/Build.scala" -> "https://github.com/lampepfl/dotty/blob/dev/src/lib/project/Build.scala", - ("project/Build.scala", 54) -> "https://github.com/lampepfl/dotty/blob/dev/src/lib/project/Build.scala#L54", - ("project/Build.scala", edit) -> "https://github.com/lampepfl/dotty/edit/dev/src/lib/project/Build.scala", - ("project/Build.scala", 54, edit) -> "https://github.com/lampepfl/dotty/edit/dev/src/lib/project/Build.scala#L54", - ) - - testLink(Seq("github://lampepfl/dotty/dev#src/lib"), Some("develop"))( - "project/Build.scala" -> "https://github.com/lampepfl/dotty/blob/dev/src/lib/project/Build.scala", - ("project/Build.scala", 54) -> "https://github.com/lampepfl/dotty/blob/dev/src/lib/project/Build.scala#L54", - ("project/Build.scala", edit) -> "https://github.com/lampepfl/dotty/edit/dev/src/lib/project/Build.scala", - ("project/Build.scala", 54, edit) -> "https://github.com/lampepfl/dotty/edit/dev/src/lib/project/Build.scala#L54", - ) - - testLink(Seq("github://lampepfl/dotty#src/lib"), Some("develop"))( - "project/Build.scala" -> "https://github.com/lampepfl/dotty/blob/develop/src/lib/project/Build.scala", - ("project/Build.scala", 54) -> "https://github.com/lampepfl/dotty/blob/develop/src/lib/project/Build.scala#L54", - ("project/Build.scala", edit) -> "https://github.com/lampepfl/dotty/edit/develop/src/lib/project/Build.scala", - ("project/Build.scala", 54, edit) -> "https://github.com/lampepfl/dotty/edit/develop/src/lib/project/Build.scala#L54", - ) - - testLink(Seq("gitlab://lampepfl/dotty"), Some("develop"))( - "project/Build.scala" -> "https://gitlab.com/lampepfl/dotty/-/blob/develop/project/Build.scala", - ("project/Build.scala", 54) -> "https://gitlab.com/lampepfl/dotty/-/blob/develop/project/Build.scala#L54", - ("project/Build.scala", edit) -> "https://gitlab.com/lampepfl/dotty/-/edit/develop/project/Build.scala", - ("project/Build.scala", 54, edit) -> "https://gitlab.com/lampepfl/dotty/-/edit/develop/project/Build.scala#L54", - ) - - testLink(Seq("€{FILE_PATH}#€{FILE_LINE}"), Some("develop"))( - "project/Build.scala" -> "/project/Build.scala#", - ("project/Build.scala", 54) -> "/project/Build.scala#54", - ("project/Build.scala", edit) -> "/project/Build.scala#", - ("project/Build.scala", 54, edit) -> "/project/Build.scala#54", - ) - - testLink(Seq("https://github.com/scala/scala/blob/2.13.x€{FILE_PATH_EXT}#L€{FILE_LINE}"), Some("develop"))( - "project/Build.scala" -> "https://github.com/scala/scala/blob/2.13.x/project/Build.scala#L", - ("project/Build.scala", 54) -> "https://github.com/scala/scala/blob/2.13.x/project/Build.scala#L54", - ("project/Build.scala", edit) -> "https://github.com/scala/scala/blob/2.13.x/project/Build.scala#L", - ("project/Build.scala", 54, edit) -> "https://github.com/scala/scala/blob/2.13.x/project/Build.scala#L54", - ) - - @Test - def testBasicPrefixedPaths = - testLink(Seq("src=gitlab://lampepfl/dotty"), Some("develop"))( - "src/lib/core.scala" -> "https://gitlab.com/lampepfl/dotty/-/blob/develop/lib/core.scala", - ("src/lib/core.scala", 33, edit) -> "https://gitlab.com/lampepfl/dotty/-/edit/develop/lib/core.scala#L33", - ("src/lib/core.scala", 33, edit) -> "https://gitlab.com/lampepfl/dotty/-/edit/develop/lib/core.scala#L33", - "build.sbt" -> None - ) - - - @Test - def prefixedPaths = - testLink(Seq( - "src/generated=€{FILE_PATH}#€{FILE_LINE}", - "src=gitlab://lampepfl/dotty", - "github://lampepfl/dotty" - ), Some("develop"))( - ("project/Build.scala", 54, edit) -> "https://github.com/lampepfl/dotty/edit/develop/project/Build.scala#L54", - ("src/lib/core.scala", 33, edit) -> "https://gitlab.com/lampepfl/dotty/-/edit/develop/lib/core.scala#L33", - ("src/generated.scala", 33, edit) -> "https://gitlab.com/lampepfl/dotty/-/edit/develop/generated.scala#L33", - ("src/generated/template.scala", 1, edit) -> "/template.scala#1" - ) \ No newline at end of file diff --git a/scaladoc/test/dotty/tools/scaladoc/no-link-warnings/LinkWarningTest.scala b/scaladoc/test/dotty/tools/scaladoc/no-link-warnings/LinkWarningTest.scala new file mode 100644 index 000000000000..1d140315cc10 --- /dev/null +++ b/scaladoc/test/dotty/tools/scaladoc/no-link-warnings/LinkWarningTest.scala @@ -0,0 +1,19 @@ +package dotty.tools.scaladoc +package noLinkWarnings + +import org.junit.Assert.assertEquals + +class LinkWarningsTest extends ScaladocTest("noLinkWarnings"): + + override def args = Scaladoc.Args( + name = "test", + tastyFiles = tastyFiles(name), + output = getTempDir().getRoot, + projectVersion = Some("1.0") + ) + + override def runTest = afterRendering { + val diagnostics = summon[DocContext].compilerContext.reportedDiagnostics + assertEquals("There should be exactly one warning", 1, diagnostics.warningMsgs.size) + assertNoErrors(diagnostics) + } diff --git a/scaladoc/test/dotty/tools/scaladoc/no-link-warnings/NoLinkWarningsTest.scala b/scaladoc/test/dotty/tools/scaladoc/no-link-warnings/NoLinkWarningsTest.scala new file mode 100644 index 000000000000..c24842411781 --- /dev/null +++ b/scaladoc/test/dotty/tools/scaladoc/no-link-warnings/NoLinkWarningsTest.scala @@ -0,0 +1,18 @@ +package dotty.tools.scaladoc +package noLinkWarnings + +class NoLinkWarningsTest extends ScaladocTest("noLinkWarnings"): + + override def args = Scaladoc.Args( + name = "test", + tastyFiles = tastyFiles(name), + output = getTempDir().getRoot, + projectVersion = Some("1.0"), + noLinkWarnings = true + ) + + override def runTest = afterRendering { + val diagnostics = summon[DocContext].compilerContext.reportedDiagnostics + assertNoWarning(diagnostics) + assertNoErrors(diagnostics) + } diff --git a/scaladoc/test/dotty/tools/scaladoc/signatures/AbstractMemberSignaturesTest.scala b/scaladoc/test/dotty/tools/scaladoc/signatures/AbstractMemberSignaturesTest.scala index 0d47c9ee6560..eb27987f3f6c 100644 --- a/scaladoc/test/dotty/tools/scaladoc/signatures/AbstractMemberSignaturesTest.scala +++ b/scaladoc/test/dotty/tools/scaladoc/signatures/AbstractMemberSignaturesTest.scala @@ -26,14 +26,14 @@ class AbstractMembers extends ScaladocTest("abstractmembersignatures"): } private def signaturesFromDocumentation()(using DocContext): Map[String, List[(String, String)]] = - val output = summon[DocContext].args.output.toPath.resolve("api") + val output = summon[DocContext].args.output.toPath val signatures = List.newBuilder[(String, (String, String))] def processFile(path: Path): Unit = val document = Jsoup.parse(IO.read(path)) val content = document.select(".documentableList").forEach { elem => val group = elem.select(".groupHeader").eachText.asScala.mkString("") elem.select(".documentableElement").forEach { elem => - val modifiers = elem.select(".header .other-modifiers").eachText.asScala.mkString("") + val modifiers = elem.select(".header .modifiers").eachText.asScala.mkString("") val name = elem.select(".header .documentableName").eachText.asScala.mkString("") signatures += group -> (modifiers, name) } diff --git a/scaladoc/test/dotty/tools/scaladoc/signatures/SignatureTest.scala b/scaladoc/test/dotty/tools/scaladoc/signatures/SignatureTest.scala index 05fa7164508e..4d8a9f46f21e 100644 --- a/scaladoc/test/dotty/tools/scaladoc/signatures/SignatureTest.scala +++ b/scaladoc/test/dotty/tools/scaladoc/signatures/SignatureTest.scala @@ -50,6 +50,7 @@ abstract class SignatureTest( (s"Not documented signatures:\n${expectedButNotFound.mkString("\n")}") val unexpectedReport = Option.when(!unexpected.isEmpty) (s"Unexpectedly documented signatures:\n${unexpected.mkString("\n")}") + val reports = missingReport ++ unexpectedReport if !reports.isEmpty then @@ -90,21 +91,21 @@ abstract class SignatureTest( private def signaturesFromSources(source: Source, kinds: Seq[String]): Seq[SignatureRes] = source.getLines.map(_.trim) - .filterNot(_.isEmpty) - .filterNot(_.startWithAnyOfThese("=",":","{","}", "//")) - .toSeq - .flatMap { - case unexpectedRegex(signature) => findName(signature, kinds).map(Unexpected(_)) - case expectedRegex(signature) => findName(signature, kinds).map(Expected(_, signature)) - case signature => - findName(signature, kinds).map( - Expected(_, commentRegex.replaceAllIn(signature, "") - .compactWhitespaces.reverse.dropWhile(List('{', ':').contains(_)).reverse) - ) - } + .filterNot(_.isEmpty) + .filterNot(_.startWithAnyOfThese("=",":","{","}", "//")) + .toSeq + .flatMap { + case unexpectedRegex(signature) => findName(signature, kinds).map(Unexpected(_)) + case expectedRegex(signature) => findName(signature, kinds).map(Expected(_, signature)) + case signature => + findName(signature, kinds).map( + Expected(_, commentRegex.replaceAllIn(signature, "") + .compactWhitespaces.reverse.dropWhile(List('{', ':').contains(_)).reverse) + ) + } private def signaturesFromDocumentation()(using DocContext): Seq[String] = - val output = summon[DocContext].args.output.toPath.resolve("api") + val output = summon[DocContext].args.output.toPath val signatures = List.newBuilder[String] def processFile(path: Path): Unit = if filterFunc(path) then @@ -118,7 +119,7 @@ abstract class SignatureTest( val sigPrefix = elem.select(".header .signature").textNodes match case list if list.size > 0 && list.get(0).getWholeText().startsWith(" ") => " " case _ => "" - val all = s"$annotations$other $kind $name$sigPrefix$signature".trim() + val all = s"$annotations$other $sigPrefix$signature".trim() signatures += all } @@ -128,6 +129,6 @@ abstract class SignatureTest( object SignatureTest { val classlikeKinds = Seq("class", "object", "trait", "enum") // TODO add docs for packages - val members = Seq("type", "def", "val", "var") + val members = Seq("type", "def", "val", "var", "given") val all = classlikeKinds ++ members } diff --git a/scaladoc/test/dotty/tools/scaladoc/signatures/TranslatableSignaturesTestCases.scala b/scaladoc/test/dotty/tools/scaladoc/signatures/TranslatableSignaturesTestCases.scala index b07e9d010e55..699aff7b6cf6 100644 --- a/scaladoc/test/dotty/tools/scaladoc/signatures/TranslatableSignaturesTestCases.scala +++ b/scaladoc/test/dotty/tools/scaladoc/signatures/TranslatableSignaturesTestCases.scala @@ -84,3 +84,13 @@ class ImplicitConversionsTest3 extends SignatureTest( ) class SpecializedSignature extends SignatureTest("specializedSignature", SignatureTest.all) + +class ContextBounds extends SignatureTest("contextBounds", SignatureTest.all) + +class FBoundedTypeParameters extends SignatureTest("fboundedTypeParameters", SignatureTest.all) + +class Exports extends SignatureTest("exports2", SignatureTest.all, sourceFiles = List("exports1", "exports2")) + +class ContextFunctions extends SignatureTest("contextfunctions", SignatureTest.all) + +class MarkdownCode extends SignatureTest("markdowncode", SignatureTest.all) diff --git a/scaladoc/test/dotty/tools/scaladoc/site/IndexPagesTest.scala b/scaladoc/test/dotty/tools/scaladoc/site/IndexPagesTest.scala new file mode 100644 index 000000000000..dc9e85a3974d --- /dev/null +++ b/scaladoc/test/dotty/tools/scaladoc/site/IndexPagesTest.scala @@ -0,0 +1,39 @@ +package dotty.tools.scaladoc +package site + +import org.junit.Test +import java.nio.file.Files + +class IndexPagesTest extends BaseHtmlTest: + + + private val baseArgs = Scaladoc.Args( + name = projectName, + tastyFiles = Seq("site").flatMap(tastyFiles(_)), + output = Files.createTempDirectory("test-doc").toFile, + projectVersion = Some(projectVersion) + ) + + @Test + def staticSiteAndApiSubdirectory = gridTest(baseArgs.copy(docsRoot = Some(testDocPath.resolve("noIndexes").toAbsolutePath.toString), apiSubdirectory = true)) + + @Test + def staticSiteAndNOApiSubdirectoryAndReadyToGoIndex = gridTest(baseArgs.copy(docsRoot = Some(testDocPath.resolve("basic").toAbsolutePath.toString), apiSubdirectory = false)) + + @Test + def staticSiteAndApiSubdirectoryAndReadyToGoIndex = gridTest(baseArgs.copy(docsRoot = Some(testDocPath.resolve("basic").toAbsolutePath.toString), apiSubdirectory = true)) + + @Test + def staticSiteAndNOApiSubdirectory = gridTest(baseArgs.copy(docsRoot = Some(testDocPath.resolve("noIndexes").toAbsolutePath.toString), apiSubdirectory = false)) + + @Test + def NOstaticSiteAndApSubdirectory = gridTest(baseArgs.copy(docsRoot = None, apiSubdirectory = true)) + + @Test + def NOstaticSiteAndNOApiSubdirectory = gridTest(baseArgs.copy(docsRoot = None, apiSubdirectory = false)) + + private def gridTest(args: Scaladoc.Args) = withGeneratedDoc(Seq.empty, None, customArgs = Some(args)) { + withHtmlFile("index.html") { content => + content.fileExists + } + } diff --git a/scaladoc/test/dotty/tools/scaladoc/site/NavigationTest.scala b/scaladoc/test/dotty/tools/scaladoc/site/NavigationTest.scala index adfbb5113ff2..39bd7acf84f6 100644 --- a/scaladoc/test/dotty/tools/scaladoc/site/NavigationTest.scala +++ b/scaladoc/test/dotty/tools/scaladoc/site/NavigationTest.scala @@ -9,42 +9,40 @@ class NavigationTest extends BaseHtmlTest: def testNavMenu(page: String, topLevel: NavMenuTestEntry)(using ProjectContext): Unit = withHtmlFile(page){ content => - def flatten(l: NavMenuTestEntry): Seq[NavMenuTestEntry] = l +: l.nested.flatMap(flatten) def test(query: String, el: Seq[NavMenuTestEntry]) = content.assertTextsIn(query, el.map(_.name):_*) content.assertAttr(query,"href", el.map(_.link):_*) - test("#sideMenu2 a", flatten(topLevel)) - test("#sideMenu2>div>div>a", topLevel.nested) - test("#sideMenu2>div>div>div>a", topLevel.nested.flatMap(_.nested)) - test("#sideMenu2>div>div>div>div>a", topLevel.nested.flatMap(_.nested.flatMap(_.nested))) + test("#sideMenu2>div>span>a", topLevel :: Nil) + test("#sideMenu2>div>div>span>a", topLevel.nested) + test("#sideMenu2>div>div>div>span>a", topLevel.nested.flatMap(_.nested)) + test("#sideMenu2>div>div>div>div>span>a", topLevel.nested.flatMap(_.nested.flatMap(_.nested))) } @Test - def testBasicNavigation() = withGeneratedSite(testDocPath.resolve("basic")){ + def testBasicNavigation() = withGeneratedSite(testDocPath.resolve("basic")) { val topLevelNav = NavMenuTestEntry(projectName, "index.html", Seq( NavMenuTestEntry("A directory", "dir/index.html", Seq( NavMenuTestEntry("Nested in a directory", "dir/nested.html", Nil) )), NavMenuTestEntry("Adoc", "Adoc.html", Seq()), - NavMenuTestEntry("Basic test", "../index.html", Seq()), - NavMenuTestEntry("API", "../api/index.html", Seq( - NavMenuTestEntry("tests.site", "../api/tests/site.html", Seq( - NavMenuTestEntry("BrokenLink", "../api/tests/site/BrokenLink.html", Nil), - NavMenuTestEntry("BrokenLinkWiki", "../api/tests/site/BrokenLinkWiki.html", Nil), - NavMenuTestEntry("OtherPackageLink", "../api/tests/site/OtherPackageLink.html", Nil), - NavMenuTestEntry("OtherPackageLinkWiki", "../api/tests/site/OtherPackageLinkWiki.html", Nil), - NavMenuTestEntry("SamePackageLink", "../api/tests/site/SamePackageLink.html", Nil), - NavMenuTestEntry("SamePackageLinkWiki", "../api/tests/site/SamePackageLinkWiki.html", Nil), - NavMenuTestEntry("SomeClass", "../api/tests/site/SomeClass.html", Nil) + NavMenuTestEntry("API", "../index.html", Seq( + NavMenuTestEntry("tests.site", "../tests/site.html", Seq( + NavMenuTestEntry("BrokenLink", "../tests/site/BrokenLink.html", Nil), + NavMenuTestEntry("BrokenLinkWiki", "../tests/site/BrokenLinkWiki.html", Nil), + NavMenuTestEntry("OtherPackageLink", "../tests/site/OtherPackageLink.html", Nil), + NavMenuTestEntry("OtherPackageLinkWiki", "../tests/site/OtherPackageLinkWiki.html", Nil), + NavMenuTestEntry("SamePackageLink", "../tests/site/SamePackageLink.html", Nil), + NavMenuTestEntry("SamePackageLinkWiki", "../tests/site/SamePackageLinkWiki.html", Nil), + NavMenuTestEntry("SomeClass", "../tests/site/SomeClass.html", Nil) )), - NavMenuTestEntry("tests.site.some.other", "../api/tests/site/some/other.html", Seq( - NavMenuTestEntry("SomeOtherPackage", "../api/tests/site/some/other/SomeOtherPackage.html", Nil), + NavMenuTestEntry("tests.site.some.other", "../tests/site/some/other.html", Seq( + NavMenuTestEntry("SomeOtherPackage", "../tests/site/some/other/SomeOtherPackage.html", Nil), )) )), )) - testNavMenu("docs/Adoc.html", topLevelNav) - } \ No newline at end of file + testNavMenu("_docs/Adoc.html", topLevelNav) + } diff --git a/scaladoc/test/dotty/tools/scaladoc/site/SidebarParserTest.scala b/scaladoc/test/dotty/tools/scaladoc/site/SidebarParserTest.scala new file mode 100644 index 000000000000..72fc6515fdee --- /dev/null +++ b/scaladoc/test/dotty/tools/scaladoc/site/SidebarParserTest.scala @@ -0,0 +1,55 @@ +package dotty.tools.scaladoc +package site + +import org.junit.Test +import org.junit.Assert._ + +// TODO add negaitve and more details tests +class SidebarParserTest: + + private val sidebar = + """index: index.md + |subsection: + | - title: My title + | page: my-page1.md + | - page: my-page2.md + | - page: my-page3/subsection + | - title: Reference + | subsection: + | - page: my-page3.md + | hidden: true + | - index: my-page4/index.md + | subsection: + | - page: my-page4/my-page4.md + | - title: My subsection + | index: my-page5/index.md + | subsection: + | - page: my-page5/my-page5.md + | - subsection: + | - page: my-page7/my-page7.md + | - index: my-page6/index.md + | subsection: + | - index: my-page6/my-page6/index.md + | subsection: + | - page: my-page6/my-page6/my-page6.md + """.stripMargin + + @Test + def loadSidebar(): Unit = assertEquals( + Sidebar.Category( + None, + Some("index.md"), + List( + Sidebar.Page(Some("My title"), "my-page1.md", false), + Sidebar.Page(None, "my-page2.md", false), + Sidebar.Page(None, "my-page3/subsection", false), + Sidebar.Category(Some("Reference"), None, List(Sidebar.Page(None, "my-page3.md", true)), None), + Sidebar.Category(None, Some("my-page4/index.md"), List(Sidebar.Page(None, "my-page4/my-page4.md", false)), None), + Sidebar.Category(Some("My subsection"), Some("my-page5/index.md"), List(Sidebar.Page(None, "my-page5/my-page5.md", false)), None), + Sidebar.Category(None, None, List(Sidebar.Page(None, "my-page7/my-page7.md", false)), None), + Sidebar.Category(None, Some("my-page6/index.md"), List(Sidebar.Category(None, Some("my-page6/my-page6/index.md"), List(Sidebar.Page(None, "my-page6/my-page6/my-page6.md", false)), None)), None), + ), + None + ), + Sidebar.load(sidebar)(using testContext) + ) diff --git a/scaladoc/test/dotty/tools/scaladoc/site/SiteGeneratationTest.scala b/scaladoc/test/dotty/tools/scaladoc/site/SiteGeneratationTest.scala index 972f9d782f0d..1b3c53aba3c0 100644 --- a/scaladoc/test/dotty/tools/scaladoc/site/SiteGeneratationTest.scala +++ b/scaladoc/test/dotty/tools/scaladoc/site/SiteGeneratationTest.scala @@ -14,7 +14,7 @@ import dotty.tools.scaladoc.test.BuildInfo class SiteGeneratationTest extends BaseHtmlTest: def indexLinks(content: DocumentContext) = - content.assertAttr("p a","href", "docs/index.html") + content.assertAttr("p a","href", "_docs/index.html") def checkFile( path: String)( @@ -32,31 +32,29 @@ class SiteGeneratationTest extends BaseHtmlTest: } def testDocPages()(using ProjectContext) = - checkFile("docs/Adoc.html")(title = "Adoc", header = "Header in Adoc", parents = Seq(projectName)) - checkFile("docs/dir/index.html")(title = "A directory", header = "A directory", parents = Seq(projectName)) - checkFile("docs/dir/nested.html")( + checkFile("_docs/Adoc.html")(title = "Adoc", header = "Header in Adoc", parents = Seq(projectName)) + checkFile("_docs/dir/index.html")(title = "A directory", header = "A directory", parents = Seq(projectName)) + checkFile("_docs/dir/nested.html")( title = "Nested in a directory", header = "Nested in a directory", parents = Seq(projectName, "A directory")) def testDocIndexPage()(using ProjectContext) = - checkFile("docs/index.html")(title = projectName, header = s"$projectName in header") - - def testMainIndexPage()(using ProjectContext) = - checkFile("index.html")(title = "Basic test", header = "Header", parents = Seq(projectName), indexLinks) + checkFile("_docs/index.html")(title = projectName, header = s"$projectName in header") def testApiPages( mainTitle: String = "API", - parents: Seq[String] = Seq(projectName))(using ProjectContext) = - checkFile("api/index.html")( + parents: Seq[String] = Seq(projectName), + hasToplevelIndexIndex: Boolean = false)(using ProjectContext) = + checkFile((if hasToplevelIndexIndex then "api/" else "" )+ "index.html")( title = mainTitle, header = projectName, parents = parents ) - checkFile("api/tests/site.html")( + checkFile("tests/site.html")( title = "tests.site", header = "tests.site", parents = parents :+ mainTitle ) - checkFile("api/tests/site/SomeClass.html")( + checkFile("tests/site/SomeClass.html")( title = "SomeClass", header = "SomeClass", parents = parents ++ Seq(mainTitle, "tests.site") @@ -66,16 +64,15 @@ class SiteGeneratationTest extends BaseHtmlTest: def basicTest() = withGeneratedSite(testDocPath.resolve("basic")){ testDocPages() testDocIndexPage() - testMainIndexPage() testApiPages() - withHtmlFile("docs/Adoc.html"){ content => - content.assertAttr("p a","href", "../api/tests/site/SomeClass.html") + withHtmlFile("_docs/Adoc.html"){ content => + content.assertAttr("p a","href", "../tests/site/SomeClass.html") } - withHtmlFile("api/tests/site/SomeClass.html"){ content => + withHtmlFile("tests/site/SomeClass.html"){ content => content.assertAttr(".breadcrumbs a","href", - "../../../docs/index.html", "../../index.html", "../site.html", "SomeClass.html" + "../../_docs/index.html", "../../index.html", "../site.html", "SomeClass.html" ) } } @@ -84,16 +81,30 @@ class SiteGeneratationTest extends BaseHtmlTest: def noGlobalIndexTest() = withGeneratedSite(testDocPath.resolve("noGlobalIndex")){ testDocPages() testDocIndexPage() - testApiPages() + testApiPages(hasToplevelIndexIndex = false) } @Test def noIndexesTest() = withGeneratedSite(testDocPath.resolve("noIndexes")){ testDocPages() - testApiPages() + testApiPages(hasToplevelIndexIndex = false) } @Test def noExistingDocs() = withGeneratedSite(testDocPath.resolve("noExisting")){ - testApiPages(mainTitle = projectName, parents = Nil) - } \ No newline at end of file + testApiPages(mainTitle = projectName, parents = Nil, hasToplevelIndexIndex = false) + } + + @Test + def staticLinking() = withGeneratedSite(testDocPath.resolve("static-links")){ + + withHtmlFile("_docs/Adoc.html"){ content => + content.assertAttr("p a","href", + "dir/html.html", + "dir/name...with..dots..html", + "dir/name.with.md.and.html.html", + "dir/nested.html", + "dir/nested.svg" + ) + } + } diff --git a/scaladoc/test/dotty/tools/scaladoc/site/StaticSiteContextTest.scala b/scaladoc/test/dotty/tools/scaladoc/site/StaticSiteContextTest.scala deleted file mode 100644 index 126a6b85d04b..000000000000 --- a/scaladoc/test/dotty/tools/scaladoc/site/StaticSiteContextTest.scala +++ /dev/null @@ -1,27 +0,0 @@ -package dotty.tools.scaladoc -package site - -import org.junit.Test -import org.junit.Assert._ - -// TODO add negaitve and more details tests -class SidebarParserTest: - - private val sidebar = """sidebar: - - title: Blog - url: blog/index.html - - title: Reference - subsection: - - title: Overview - url: docs/reference/overview.html - - title: New Types - subsection: - - title: Intersection types - url: docs/reference/new-types/intersection-types.html - - title: Union types - url: docs/reference/new-types/union-types.html - """ - - @Test - def loadSidebar(): Unit = - assertEquals(2, Sidebar.load(sidebar).size) diff --git a/scaladoc/test/dotty/tools/scaladoc/site/TemplateFileTests.scala b/scaladoc/test/dotty/tools/scaladoc/site/TemplateFileTests.scala index d7ff5340f46f..cbb942e36476 100644 --- a/scaladoc/test/dotty/tools/scaladoc/site/TemplateFileTests.scala +++ b/scaladoc/test/dotty/tools/scaladoc/site/TemplateFileTests.scala @@ -8,6 +8,7 @@ import org.junit.Test import java.nio.file.Files class TemplateFileTests: + given staticSiteContext: StaticSiteContext = testDocContext().staticSiteContext.get private def testTemplate(code: String, ext: String = "html")(op: TemplateFile => Unit): Unit = val tmpFile = Files.createTempFile("headerTests", s".${ext}").toFile() try @@ -42,7 +43,7 @@ class TemplateFileTests: |code""".stripMargin ) { t => assertEquals(t.rawCode, "code") - assertEquals(t.title, "myTitle") + assertEquals(t.title.name, "myTitle") } diff --git a/scaladoc/test/dotty/tools/scaladoc/snippets/SnippetCompilerTest.scala b/scaladoc/test/dotty/tools/scaladoc/snippets/SnippetCompilerTest.scala new file mode 100644 index 000000000000..dc0bcefbc1cc --- /dev/null +++ b/scaladoc/test/dotty/tools/scaladoc/snippets/SnippetCompilerTest.scala @@ -0,0 +1,68 @@ +package dotty.tools.scaladoc +package snippets + +import org.junit.Test +import org.junit.Assert._ +import dotty.tools.io.{AbstractFile, VirtualDirectory} + +class SnippetCompilerTest { + val compiler = SnippetCompiler( + Seq(SnippetCompilerSetting(testContext.settings.usejavacp, true)) + ) + def wrapFn: String => WrappedSnippet = (str: String) => WrappedSnippet( + str, + Some("test"), + Nil, + Nil, + 0, + 0 + ) + + def runTest(str: String) = compiler.compile(wrapFn(str), SnippetCompilerArg(SCFlags.Compile), dotty.tools.dotc.util.SourceFile.virtual("test", str)) + + private def assertSuccessfulCompilation(res: SnippetCompilationResult): Unit = res match { + case r @ SnippetCompilationResult(_, isSuccessful, _, messages) => assert(isSuccessful, r.messages.map(_.message).mkString("\n")) + } + + private def assertFailedCompilation(res: SnippetCompilationResult): Unit = res match { + case r @ SnippetCompilationResult(_, isSuccessful, _, messages) => assert(!isSuccessful, r.messages.map(_.message).mkString("\n")) + } + + def assertSuccessfulCompilation(str: String): Unit = assertSuccessfulCompilation(runTest(str)) + + def assertFailedCompilation(str: String): Unit = assertFailedCompilation(runTest(str)) + + def assertMessageLevelPresent(str: String, level: MessageLevel): Unit = assertMessageLevelPresent(runTest(str), level) + + def assertMessageLevelPresent(res: SnippetCompilationResult, level: MessageLevel): Unit = res match { + case r @ SnippetCompilationResult(_, isSuccessful, _, messages) => assertTrue( + s"Expected message with level: ${level.text}. Got result ${r.messages.map(_.message).mkString("\n")}", + messages.exists(_.level == level) + ) + } + + + @Test + def snippetCompilerTest: Unit = { + val simpleCorrectSnippet = s""" + |class A: + | val b: String = "asd" + |""".stripMargin + + val simpleIncorrectSnippet = s""" + |class A: + | val b: String + |""".stripMargin + val warningSnippet = s""" + |class A: + | val a: Int = try { + | 5 + | } + |""".stripMargin + assertSuccessfulCompilation(simpleCorrectSnippet) + assertFailedCompilation(simpleIncorrectSnippet) + assertMessageLevelPresent(simpleIncorrectSnippet, MessageLevel.Error) + assertMessageLevelPresent(warningSnippet, MessageLevel.Warning) + //No test for Info + } +} \ No newline at end of file diff --git a/scaladoc/test/dotty/tools/scaladoc/snippets/SnippetsE2eTest.scala b/scaladoc/test/dotty/tools/scaladoc/snippets/SnippetsE2eTest.scala new file mode 100644 index 000000000000..cf0c52ee2a50 --- /dev/null +++ b/scaladoc/test/dotty/tools/scaladoc/snippets/SnippetsE2eTest.scala @@ -0,0 +1,166 @@ +package dotty.tools.scaladoc +package snippets + +import scala.io.Source + +import org.junit.Test +import org.junit.Assert._ +import dotty.tools.io.{AbstractFile, VirtualDirectory} +import dotty.tools.scaladoc.test.BuildInfo +import scala.util.matching.Regex +import dotty.tools.dotc.reporting.{ Diagnostic, StoreReporter } + +import com.vladsch.flexmark.util.{ast => mdu, sequence} +import com.vladsch.flexmark.{ast => mda} +import com.vladsch.flexmark.formatter.Formatter +import com.vladsch.flexmark.util.options.MutableDataSet +import collection.JavaConverters._ + +import dotty.tools.scaladoc.tasty.comments.markdown.ExtendedFencedCodeBlock + +abstract class SnippetsE2eTest(testName: String, flag: SCFlags) extends ScaladocTest(testName): + + import SnippetsE2eTest._ + + val source = Source.fromFile(s"${BuildInfo.test_testcasesSourceRoot}/tests/$testName.scala") + + val snippetsCount = source.getLines.filter(_.indexOf("```scala") != -1).size + + def report(str: String) = s"""|In test $testName: + |$str""".stripMargin + + override def args = Scaladoc.Args( + name = "test", + tastyDirs = BuildInfo.test_testcasesOutputDir.map(java.io.File(_)).toSeq, + tastyFiles = tastyFiles(testName), + output = getTempDir().getRoot, + projectVersion = Some("1.0"), + snippetCompiler = List(s"${BuildInfo.test_testcasesSourceRoot}/tests=${flag.flagName}") + ) + + override def withModule(op: DocContext ?=> Module => Unit) = + given DocContext = DocContext(args, testContext.fresh.setReporter(new StoreReporter)) + op(ScalaModuleProvider.mkModule()) + + private def checkWrappedSnippet(ws: WrappedSnippet, si: SnippetInfo) = { + assertTrue( + report( + s"Invalid outer line offset: ${ws.outerLineOffset}. " + + s"Expected: ${si.outerOffset.line}\n" + ), + ws.outerLineOffset == si.outerOffset.line + ) + assertTrue( + report( + s"Invalid outer column offset: ${ws.outerColumnOffset}. " + + s"Expected: ${si.outerOffset.column}\n" + ), + ws.outerColumnOffset == si.outerOffset.column + ) + assertTrue( + report( + s"Invalid inner line offset: ${ws.innerLineOffset}. " + + s"Expected: ${si.innerOffset.line}\n" + ), + ws.innerLineOffset == si.innerOffset.line + ) + assertTrue( + report( + s"Invalid inner column offset: ${ws.innerColumnOffset}. " + + s"Expected: ${si.innerOffset.column}\n" + ), + ws.innerColumnOffset == si.innerOffset.column + ) + } + + private def checkMessages(compilationMessages: Seq[SnippetCompilerMessage], messages: Seq[Message], ws: WrappedSnippet) = { + val compilationMessagesWithPos = compilationMessages.collect { + case m @ SnippetCompilerMessage(Some(_), _, _) => m + }.toList + def isSamePosition(msg: Message, cmsg: SnippetCompilerMessage): Boolean = + cmsg.level == msg.level && (cmsg.position.get.srcPos.line + 1) == msg.offset.line && cmsg.position.get.srcPos.column == msg.offset.column + + def checkRelativeLines(msg: Message, cmsg: SnippetCompilerMessage): Seq[String] = + val pos = cmsg.position.get + if !(pos.relativeLine == pos.srcPos.line + ws.innerLineOffset - ws.outerLineOffset + 1) then Seq( + s"Expected ${msg.level.text} message at relative line: ${pos.srcPos.line + ws.innerLineOffset - ws.outerLineOffset + 1} " + + s"but found at ${pos.relativeLine}" + ) else Nil + + val mResult = compilationMessagesWithPos.flatMap { cmsg => + messages + .find(msg => isSamePosition(msg, cmsg)) + .fold(Seq(s"Unexpected compilation message: ${cmsg.message} at relative line: ${cmsg.position.fold(-1)(_.relativeLine)}"))(_ => Seq()) + } + + val result = mResult ++ messages.flatMap { msg => + compilationMessagesWithPos + .find(cmsg => isSamePosition(msg, cmsg)) + .fold(Seq(s"Expected ${msg.level.text} message at ${msg.offset.line}:${msg.offset.column}.")) { resp => + checkRelativeLines(msg, resp) + } + } + + if !result.isEmpty then { + val errors = result.mkString("\n") + val foundMessages = compilationMessages.map(m => s"${m.level} at ${m.position.get.srcPos.line}:${m.position.get.srcPos.column}").mkString("\n") + throw AssertionError(Seq("Errors:", errors,"Found:", foundMessages).mkString("\n", "\n", "\n")) + } + } + + def moduleTestingFunc: DocContext ?=> Module => Unit = (m: Module) => { + val snippets = m.members.values + .flatMap(_.docs) + .map(_.body) + .collect { case n: mdu.Node => n } + .flatMap(_.getDescendants.asScala) + .collect { case en: ExtendedFencedCodeBlock => en } + + assertTrue(report(s"Expected $snippetsCount snippets but found ${snippets.size}"), snippets.size == snippetsCount) + + snippets.foreach { snippet => + val configStrs = (snippet.getPrevious() match { + case c: mdu.ContentNode => + c.getContentChars.toString.split("\n").map(_.trim) + case _ => throw AssertionError(s"Not found info for snippet ${snippet.codeBlock.getContentChars.toString}") + }).toList + val info = SnippetInfo(configStrs.head) + val messages = configStrs.tail.map(Message.apply) + val compilationResult = snippet.compilationResult match { + case Some(res) => res + case None => throw AssertionError(s"Snippet validation failed:\n${snippet.codeBlock.getContentChars.toString}") + } + val wrappedSnippet = compilationResult.wrappedSnippet + checkWrappedSnippet(wrappedSnippet, info) + checkMessages(compilationResult.messages, messages, wrappedSnippet) + } + } + + def runTest = { + org.junit.Assume.assumeTrue("Running on Windows", java.io.File.separatorChar == '/') + withModule(moduleTestingFunc) + } + +object SnippetsE2eTest: + case class Offset(line: Int, column: Int) + case class SnippetInfo(outerOffset: Offset, innerOffset: Offset) + case class Message(level: MessageLevel, offset: Offset) + object SnippetInfo: + def apply(str: String): SnippetInfo = str match { + case snippetInfoRegex(ol, oc, il, ic) => SnippetInfo( + Offset(ol.toInt, oc.toInt), + Offset(il.toInt, ic.toInt) + ) + } + + object Message: + def apply(str: String): Message = str match { + case errorRegex(ln, cl) => Message(MessageLevel.Error, Offset(ln.toInt, cl.toInt)) + case warningRegex(ln, cl) => Message(MessageLevel.Warning, Offset(ln.toInt, cl.toInt)) + } + val snippetInfoRegex = (raw"SNIPPET\(" + + raw"OUTERLINEOFFSET:(\d+),OUTERCOLUMNOFFSET:(\d+)," + + raw"INNERLINEOFFSET:(\d+),INNERCOLUMNOFFSET:(\d+)\)").r + + val warningRegex = raw"WARNING\(LINE:(\d+),COLUMN:(\d+)\)".r + val errorRegex = raw"ERROR\(LINE:(\d+),COLUMN:(\d+)\)".r \ No newline at end of file diff --git a/scaladoc/test/dotty/tools/scaladoc/snippets/SnippetsE2eTestcases.scala b/scaladoc/test/dotty/tools/scaladoc/snippets/SnippetsE2eTestcases.scala new file mode 100644 index 000000000000..7ec71600c68a --- /dev/null +++ b/scaladoc/test/dotty/tools/scaladoc/snippets/SnippetsE2eTestcases.scala @@ -0,0 +1,10 @@ +package dotty.tools.scaladoc +package snippets + +class SnippetE2eTestcase1 extends SnippetsE2eTest("snippetTestcase1", SCFlags.Compile) + + +class SnippetE2eTestcase2 extends SnippetsE2eTest("snippetTestcase2", SCFlags.Compile) + + +class SnippetE2eTestcase3 extends SnippetsE2eTest("snippetTestcase3", SCFlags.Compile) diff --git a/scaladoc/test/dotty/tools/scaladoc/source-links/SourceLinksTest.scala b/scaladoc/test/dotty/tools/scaladoc/source-links/SourceLinksTest.scala new file mode 100644 index 000000000000..1cc4d9c7a7ab --- /dev/null +++ b/scaladoc/test/dotty/tools/scaladoc/source-links/SourceLinksTest.scala @@ -0,0 +1,168 @@ +package dotty.tools.scaladoc +package sourcelinks + +import java.nio.file._ +import org.junit.Assert._ +import org.junit.Test + +class SourceLinkTest: + + @Test + def testBasicFailures() = + def testFailure(template: String, messagePart: String) = + val res = SourceLinkParser(None).parse(template) + assertTrue(s"Expected failure containing $messagePart: $res", res.left.exists(_.contains(messagePart))) + + val resWithVersion = SourceLinkParser(Some("develop")).parse(template) + assertEquals(res, resWithVersion) + + testFailure("ala://ma/kota", "known provider") + testFailure("ala=ala=ala://ma/kota", "source link syntax") + testFailure("ala=ala=ala", "source link syntax") + testFailure("""€{TPL_OWNER}""", "scaladoc") + + + @Test + def testProperTemplates() = + def test(template: String) = + val res = try SourceLinkParser(Some("develop")).parse(template) catch + case e: Exception => throw RuntimeException(s"When testing $template", e) + assertTrue(s"Bad template: $template", res.isRight) + + + Seq( + "github://lampepfl/dotty", + "gitlab://lampepfl/dotty", + "github://lampepfl/dotty/branch/withslash", + "https://github.com/scala/scala/blob/2.13.x€{FILE_PATH_EXT}#€{FILE_LINE}" + ).foreach{ template => + test(template) + } + + + @Test + def testSourceProviderWithoutRevision() = + Seq("github", "gitlab").foreach { provider => + val template = s"$provider://ala/ma" + val res = SourceLinkParser(None).parse(template) + assertTrue(s"Expected failure containing missing revision: $res", res.left.exists(_.contains("revision"))) + } + +class SourceLinksTest: + // TODO (https://github.com/lampepfl/scaladoc/issues/240): configure source root + val projectRoot = Paths.get("").toAbsolutePath() + + val edit: Operation = "edit" // union types need explicit singletons + + type Args = String | (String, Operation) | (String, Int) | (String, Int, Operation) + + private def testLink(config: Seq[String], revision: Option[String])(cases: (Args, String | None.type)*): Unit = + val links = SourceLinks.load(config, revision)(using testContext) + cases.foreach { case (args, expected) => + val res = args match + case path: String => links.pathTo(projectRoot.resolve(path)) + case (path: String, line: Int) => links.pathTo(projectRoot.resolve(path), line = Some(line)) + case (path: String, operation: Operation) => links.pathTo(projectRoot.resolve(path), operation = operation) + case (path: String, line: Int, operation: Operation) => links.pathTo(projectRoot.resolve(path), operation = operation, line = Some(line)) + + val expectedRes = expected match + case s: String => Some(s) + case None => None + + assertEquals(s"For path $args", expectedRes, res) + } + + @Test + def testBasicPaths = + testLink(Seq("github://lampepfl/dotty"), Some("develop"))( + "project/Build.scala" -> "https://github.com/lampepfl/dotty/blob/develop/project/Build.scala", + ("project/Build.scala", 54) -> "https://github.com/lampepfl/dotty/blob/develop/project/Build.scala#L54", + ("project/Build.scala", edit) -> "https://github.com/lampepfl/dotty/edit/develop/project/Build.scala", + ("project/Build.scala", 54, edit) -> "https://github.com/lampepfl/dotty/edit/develop/project/Build.scala#L54", + ) + + testLink(Seq("github://lampepfl/dotty/dev"), Some("develop"))( + "project/Build.scala" -> "https://github.com/lampepfl/dotty/blob/dev/project/Build.scala", + ("project/Build.scala", 54) -> "https://github.com/lampepfl/dotty/blob/dev/project/Build.scala#L54", + ("project/Build.scala", edit) -> "https://github.com/lampepfl/dotty/edit/dev/project/Build.scala", + ("project/Build.scala", 54, edit) -> "https://github.com/lampepfl/dotty/edit/dev/project/Build.scala#L54", + ) + + testLink(Seq("github://lampepfl/dotty/dev#src/lib"), None)( + "project/Build.scala" -> "https://github.com/lampepfl/dotty/blob/dev/src/lib/project/Build.scala", + ("project/Build.scala", 54) -> "https://github.com/lampepfl/dotty/blob/dev/src/lib/project/Build.scala#L54", + ("project/Build.scala", edit) -> "https://github.com/lampepfl/dotty/edit/dev/src/lib/project/Build.scala", + ("project/Build.scala", 54, edit) -> "https://github.com/lampepfl/dotty/edit/dev/src/lib/project/Build.scala#L54", + ) + + testLink(Seq("github://lampepfl/dotty/dev#src/lib"), Some("develop"))( + "project/Build.scala" -> "https://github.com/lampepfl/dotty/blob/dev/src/lib/project/Build.scala", + ("project/Build.scala", 54) -> "https://github.com/lampepfl/dotty/blob/dev/src/lib/project/Build.scala#L54", + ("project/Build.scala", edit) -> "https://github.com/lampepfl/dotty/edit/dev/src/lib/project/Build.scala", + ("project/Build.scala", 54, edit) -> "https://github.com/lampepfl/dotty/edit/dev/src/lib/project/Build.scala#L54", + ) + + testLink(Seq("github://lampepfl/dotty#src/lib"), Some("develop"))( + "project/Build.scala" -> "https://github.com/lampepfl/dotty/blob/develop/src/lib/project/Build.scala", + ("project/Build.scala", 54) -> "https://github.com/lampepfl/dotty/blob/develop/src/lib/project/Build.scala#L54", + ("project/Build.scala", edit) -> "https://github.com/lampepfl/dotty/edit/develop/src/lib/project/Build.scala", + ("project/Build.scala", 54, edit) -> "https://github.com/lampepfl/dotty/edit/develop/src/lib/project/Build.scala#L54", + ) + + testLink(Seq("gitlab://lampepfl/dotty"), Some("develop"))( + "project/Build.scala" -> "https://gitlab.com/lampepfl/dotty/-/blob/develop/project/Build.scala", + ("project/Build.scala", 54) -> "https://gitlab.com/lampepfl/dotty/-/blob/develop/project/Build.scala#L54", + ("project/Build.scala", edit) -> "https://gitlab.com/lampepfl/dotty/-/edit/develop/project/Build.scala", + ("project/Build.scala", 54, edit) -> "https://gitlab.com/lampepfl/dotty/-/edit/develop/project/Build.scala#L54", + ) + + testLink(Seq("€{FILE_PATH}.scala#€{FILE_LINE}"), Some("develop"))( + "project/Build.scala" -> "/project/Build.scala#", + ("project/Build.scala", 54) -> "/project/Build.scala#54", + ("project/Build.scala", edit) -> "/project/Build.scala#", + ("project/Build.scala", 54, edit) -> "/project/Build.scala#54", + ) + + testLink(Seq("https://github.com/scala/scala/blob/2.13.x€{FILE_PATH_EXT}#L€{FILE_LINE}"), Some("develop"))( + "project/Build.scala" -> "https://github.com/scala/scala/blob/2.13.x/project/Build.scala#L", + ("project/Build.scala", 54) -> "https://github.com/scala/scala/blob/2.13.x/project/Build.scala#L54", + ("project/Build.scala", edit) -> "https://github.com/scala/scala/blob/2.13.x/project/Build.scala#L", + ("project/Build.scala", 54, edit) -> "https://github.com/scala/scala/blob/2.13.x/project/Build.scala#L54", + ) + + testLink(Seq("https://github.com/scala/scala/blob/2.13.x€{FILE_PATH}.scala#L€{FILE_LINE}"), Some("develop"))( + "project/Build.scala" -> "https://github.com/scala/scala/blob/2.13.x/project/Build.scala#L", + ("project/Build.scala", 54) -> "https://github.com/scala/scala/blob/2.13.x/project/Build.scala#L54", + ("project/Build.scala", edit) -> "https://github.com/scala/scala/blob/2.13.x/project/Build.scala#L", + ("project/Build.scala", 54, edit) -> "https://github.com/scala/scala/blob/2.13.x/project/Build.scala#L54", + ) + + testLink(Seq("github://lampepfl/dotty/branch/withslash#src/lib"), None)( + "project/Build.scala" -> "https://github.com/lampepfl/dotty/blob/branch/withslash/src/lib/project/Build.scala", + ("project/Build.scala", 54) -> "https://github.com/lampepfl/dotty/blob/branch/withslash/src/lib/project/Build.scala#L54", + ("project/Build.scala", edit) -> "https://github.com/lampepfl/dotty/edit/branch/withslash/src/lib/project/Build.scala", + ("project/Build.scala", 54, edit) -> "https://github.com/lampepfl/dotty/edit/branch/withslash/src/lib/project/Build.scala#L54", + ) + + @Test + def testBasicPrefixedPaths = + testLink(Seq("src=gitlab://lampepfl/dotty"), Some("develop"))( + "src/lib/core.scala" -> "https://gitlab.com/lampepfl/dotty/-/blob/develop/lib/core.scala", + ("src/lib/core.scala", 33, edit) -> "https://gitlab.com/lampepfl/dotty/-/edit/develop/lib/core.scala#L33", + ("src/lib/core.scala", 33, edit) -> "https://gitlab.com/lampepfl/dotty/-/edit/develop/lib/core.scala#L33", + "build.sbt" -> None + ) + + + @Test + def prefixedPaths = + testLink(Seq( + "src/generated=€{FILE_PATH_EXT}#€{FILE_LINE}", + "src=gitlab://lampepfl/dotty", + "github://lampepfl/dotty" + ), Some("develop"))( + ("project/Build.scala", 54, edit) -> "https://github.com/lampepfl/dotty/edit/develop/project/Build.scala#L54", + ("src/lib/core.scala", 33, edit) -> "https://gitlab.com/lampepfl/dotty/-/edit/develop/lib/core.scala#L33", + ("src/generated.scala", 33, edit) -> "https://gitlab.com/lampepfl/dotty/-/edit/develop/generated.scala#L33", + ("src/generated/template.scala", 1, edit) -> "/template.scala#1" + ) diff --git a/scaladoc/test/dotty/tools/scaladoc/tasty/comments/CommentExpanderTests.scala b/scaladoc/test/dotty/tools/scaladoc/tasty/comments/CommentExpanderTests.scala index 63bb4fb21f03..b27642ae3dc4 100644 --- a/scaladoc/test/dotty/tools/scaladoc/tasty/comments/CommentExpanderTests.scala +++ b/scaladoc/test/dotty/tools/scaladoc/tasty/comments/CommentExpanderTests.scala @@ -1,5 +1,6 @@ package dotty.tools.scaladoc -package tasty.comments +package tasty +package comments import scala.quoted._ @@ -9,25 +10,25 @@ import dotty.tools.scaladoc.tasty.util._ import dotty.tools.scaladoc.tasty.TastyParser class CommentExpanderTests { - def check(using quoted.Quotes)(): Unit = + def check(using Quotes)(): Unit = assertCommentEquals( - qr.Symbol.requiredClass("tests.B").memberMethod("otherMethod").head, + reflect.Symbol.requiredClass("tests.B").methodMember("otherMethod").head, "/** This is my foo: Bar, actually. */", ) assertCommentEquals( - qr.Symbol.requiredClass("tests.C"), + reflect.Symbol.requiredClass("tests.C"), "/** This is foo: Foo expanded. */", ) assertCommentEquals( - qr.Symbol.requiredModule("tests.O").memberMethod("method").head, + reflect.Symbol.requiredModule("tests.O").methodMember("method").head, "/** This is foo: O's foo. */", ) def assertCommentEquals( - using quoted.Quotes + using Quotes )( - rsym: quotes.reflect.Symbol, + rsym: reflect.Symbol, str: String ): Unit = import dotty.tools.dotc @@ -41,13 +42,12 @@ class CommentExpanderTests { import scala.tasty.inspector.OldTastyInspector class Inspector extends OldTastyInspector: - def processCompilationUnit(using quoted.Quotes)(root: quotes.reflect.Tree): Unit = () + def processCompilationUnit(using Quotes)(root: reflect.Tree): Unit = () - override def postProcess(using quoted.Quotes): Unit = + override def postProcess(using Quotes): Unit = check() Inspector().inspectTastyFiles(TestUtils.listOurClasses()) } - private def qr(using quoted.Quotes): quotes.reflect.type = quotes.reflect } diff --git a/scaladoc/test/dotty/tools/scaladoc/tasty/comments/IntegrationTest.scala b/scaladoc/test/dotty/tools/scaladoc/tasty/comments/IntegrationTest.scala index 0a5329cf6333..71f4b42f3320 100644 --- a/scaladoc/test/dotty/tools/scaladoc/tasty/comments/IntegrationTest.scala +++ b/scaladoc/test/dotty/tools/scaladoc/tasty/comments/IntegrationTest.scala @@ -18,11 +18,11 @@ abstract class BaseIntegrationTest(pck: String) extends BaseHtmlTest: "", "" // each represent a link ) - withHtmlFile(s"api/tests/$pck/BrokenLinks.html")(checkUnresolved) + withHtmlFile(s"tests/$pck/BrokenLinks.html")(checkUnresolved) val otherPackagePath = "../commonlinks/SomeOtherPackage.html" - withHtmlFile(s"api/tests/$pck/OtherPackageLink.html")(checkDocLinks(otherPackagePath)) + withHtmlFile(s"tests/$pck/OtherPackageLink.html")(checkDocLinks(otherPackagePath)) // OtherPackageMembers - does not work, TODO? - withHtmlFile(s"api/tests/$pck/SamePackageLink.html")(checkDocLinks("SomeClass.html")) + withHtmlFile(s"tests/$pck/SamePackageLink.html")(checkDocLinks("SomeClass.html")) // SamePackageMembers - does not work, TODO? } diff --git a/scaladoc/test/dotty/tools/scaladoc/tasty/comments/MemberLookupTests.scala b/scaladoc/test/dotty/tools/scaladoc/tasty/comments/MemberLookupTests.scala index dafd4003fa6f..0b9b88fa5bb8 100644 --- a/scaladoc/test/dotty/tools/scaladoc/tasty/comments/MemberLookupTests.scala +++ b/scaladoc/test/dotty/tools/scaladoc/tasty/comments/MemberLookupTests.scala @@ -13,6 +13,8 @@ class LookupTestCases[Q <: Quotes](val q: Quotes) { def testAll(): Unit = { testOwnerlessLookup() + testOwnerlessLookupOfInherited() + testOwnerlessLookupOfClassWithinPackageWithPackageObject() testOwnedLookup() testStrictMemberLookup() } @@ -38,12 +40,15 @@ class LookupTestCases[Q <: Quotes](val q: Quotes) { "java.util.AbstractCollection" -> cls("java.util.AbstractCollection"), "java.lang.String" -> cls("java.lang.String"), + "java.util.Formatter" -> cls("java.util.Formatter"), + "java.io.Flushable" -> cls("java.io.Flushable"), + "java.util.List" -> cls("java.util.List"), "tests.lookupInheritedMembers.pack1.A.x" -> cls("tests.lookupInheritedMembers.pack1.A").fun("x"), "tests.lookupInheritedMembers.pack2.B.x" -> - cls("tests.lookupInheritedMembers.pack1.A").fun("x"), + cls("tests.lookupInheritedMembers.pack2.B").fun("x"), ) cases.foreach { case (query, sym) => @@ -59,6 +64,49 @@ class LookupTestCases[Q <: Quotes](val q: Quotes) { assertSame(query, target, lookedUp) } + /** + * We cannot test for cls().fun() beucase it returns parent fun symbol from tasty. Hence we will look for member (val, def, type) but compare its owner to just cls() + */ + def testOwnerlessLookupOfInherited(): Unit = { + val cases = List[(String, Sym)]( + "tests.lookupInheritedMembers.pack2.B.x" -> + cls("tests.lookupInheritedMembers.pack2.B"), + + "tests.lookupInheritedMembers.pack2.B.y" -> + cls("tests.lookupInheritedMembers.pack2.B"), + + "tests.lookupInheritedMembers.pack2.B.MyType" -> + cls("tests.lookupInheritedMembers.pack2.B"), + ) + + cases.foreach { case (query, sym) => + val target = sym.symbol + val lookupRes = MemberLookup.lookupOpt(parseQuery(query), None) + assertTrue(s"Couldn't look up: $query", lookupRes.nonEmpty) + val Some((_ , _, Some(owner))) = lookupRes + assertSame(query, target, owner) + } + } + + /** + * Classes should not have owner of package object + */ + def testOwnerlessLookupOfClassWithinPackageWithPackageObject(): Unit = { + val cases = List[(String, Sym)]( + "<:<" -> + cls("scala.<:<"), + ) + + cases.foreach { case (query, sym) => + val target = sym.symbol + val lookupRes = MemberLookup.lookupOpt(parseQuery(query), Some(cls("scala.=:=").symbol)) + assertTrue(s"Couldn't look up: $query", lookupRes.nonEmpty) + println(lookupRes) + val Some((_ , _, owner)) = lookupRes + assertSame(query, None, owner) + } + } + def testOwnedLookup(): Unit = { val cases = List[((Sym, String), Sym)]( cls("tests.A") -> "tests.Methods.simple" -> cls("tests.Methods").fun("simple"), @@ -138,9 +186,9 @@ class LookupTestCases[Q <: Quotes](val q: Quotes) { if s.flags.is(q.reflect.Flags.Module) then s.moduleClass else s Sym(hackResolveModule(symbol.declaredField(name))) def fun(name: String) = - val List(sym) = symbol.memberMethod(name) + val List(sym) = symbol.methodMember(name) Sym(sym) - def tpe(name: String) = Sym(symbol.memberType(name)) + def tpe(name: String) = Sym(symbol.typeMember(name)) } def cls(fqn: String) = Sym(q.reflect.Symbol.classSymbol(fqn)) diff --git a/scaladoc/test/dotty/tools/scaladoc/testUtils.scala b/scaladoc/test/dotty/tools/scaladoc/testUtils.scala index cfab383c9ba8..21ed7398f74e 100644 --- a/scaladoc/test/dotty/tools/scaladoc/testUtils.scala +++ b/scaladoc/test/dotty/tools/scaladoc/testUtils.scala @@ -7,6 +7,7 @@ import dotty.tools.dotc.interfaces.Diagnostic.{ERROR, INFO, WARNING} import dotty.tools.scaladoc.test.BuildInfo import org.junit.Assert._ import java.io.File +import java.nio.file.Paths case class ReportedDiagnostics(errors: List[Diagnostic], warnings: List[Diagnostic], infos: List[Diagnostic]): @@ -54,13 +55,17 @@ def testArgs(files: Seq[File] = Nil, dest: File = new File("notUsed")) = Scalado name = "Test Project Name", output = dest, tastyFiles = files, + docsRoot = Some(""), ) -def testContext = (new ContextBase).initialCtx.fresh.setReporter(new TestReporter) +def testContext = + val ctx = (new ContextBase).initialCtx.fresh.setReporter(new TestReporter) + ctx.setSetting(ctx.settings.usejavacp, true) + ctx def testDocContext(files: Seq[File] = Nil) = DocContext(testArgs(files), testContext) -def tastyFiles(name: String, allowEmpty: Boolean = false) = +def tastyFiles(name: String, allowEmpty: Boolean = false, rootPck: String = "tests") = def listFilesSafe(dir: File) = Option(dir.listFiles).getOrElse { throw AssertionError(s"$dir not found. The test name is incorrect or scaladoc-testcases were not recompiled.") } @@ -69,7 +74,9 @@ def tastyFiles(name: String, allowEmpty: Boolean = false) = case f if f.getName endsWith ".tasty" => f :: Nil case _ => Nil } - val files = BuildInfo.test_testcasesOutputDir.flatMap(p => collectFiles(File(s"$p/tests/$name"))) + val outputDir = BuildInfo.test_testcasesOutputDir + val files = outputDir.flatMap(p => collectFiles(File(s"$p/$rootPck/$name"))) assert(files.nonEmpty || allowEmpty) files.toSeq +def testDocPath = Paths.get(BuildInfo.testDocumentationRoot) \ No newline at end of file diff --git a/semanticdb/project/build.properties b/semanticdb/project/build.properties index 7c58a83abffb..3161d2146c63 100644 --- a/semanticdb/project/build.properties +++ b/semanticdb/project/build.properties @@ -1 +1 @@ -sbt.version=1.2.6 +sbt.version=1.6.1 diff --git a/staging/src/scala/quoted/staging/QuoteDriver.scala b/staging/src/scala/quoted/staging/QuoteDriver.scala index cc3ecfe1ceec..8de0cd218b23 100644 --- a/staging/src/scala/quoted/staging/QuoteDriver.scala +++ b/staging/src/scala/quoted/staging/QuoteDriver.scala @@ -4,6 +4,7 @@ package staging import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.Driver import dotty.tools.dotc.core.Contexts.{Context, ContextBase, FreshContext} +import dotty.tools.dotc.quoted.QuotesCache import dotty.tools.io.{AbstractFile, Directory, PlainDirectory, VirtualDirectory} import dotty.tools.repl.AbstractFileClassLoader import dotty.tools.dotc.reporting._ @@ -33,8 +34,11 @@ private class QuoteDriver(appClassloader: ClassLoader) extends Driver: new VirtualDirectory("") end outDir - val ctx0 = setup(settings.compilerArgs.toArray :+ "dummy.scala", initCtx.fresh).get._2 - val ctx = setCompilerSettings(ctx0.fresh.setSetting(ctx0.settings.outputDir, outDir), settings) + val ctx = { + val ctx0 = QuotesCache.init(initCtx.fresh) + val ctx1 = setup(settings.compilerArgs.toArray :+ "dummy.scala", ctx0).get._2 + setCompilerSettings(ctx1.fresh.setSetting(ctx1.settings.outputDir, outDir), settings) + } new QuoteCompiler().newRun(ctx).compileExpr(exprBuilder) match case Right(value) => diff --git a/stdlib-bootstrapped-tasty-tests/test/BootstrappedStdLibTASYyTest.scala b/stdlib-bootstrapped-tasty-tests/test/BootstrappedStdLibTASYyTest.scala index 54b89b1f5e3b..100292b1f5a7 100644 --- a/stdlib-bootstrapped-tasty-tests/test/BootstrappedStdLibTASYyTest.scala +++ b/stdlib-bootstrapped-tasty-tests/test/BootstrappedStdLibTASYyTest.scala @@ -109,8 +109,8 @@ object BootstrappedStdLibTASYyTest: () } val tastyFiles = scalaLibTastyPaths.filterNot(blacklisted) - val hasErrors = TastyInspector.inspectTastyFiles(tastyFiles.map(x => scalaLibClassesPath.resolve(x).toString))(inspector) - assert(!hasErrors, "Errors reported while loading from TASTy") + val isSuccess = TastyInspector.inspectTastyFiles(tastyFiles.map(x => scalaLibClassesPath.resolve(x).toString))(inspector) + assert(isSuccess, "Errors reported while loading from TASTy") def compileFromTastyInJar(blacklisted: Set[String]): Unit = { val driver = new dotty.tools.dotc.Driver diff --git a/tasty-inspector/src/scala/tasty/inspector/TastyInspector.scala b/tasty-inspector/src/scala/tasty/inspector/TastyInspector.scala index f2ea76e30b7b..4c6440530ba2 100644 --- a/tasty-inspector/src/scala/tasty/inspector/TastyInspector.scala +++ b/tasty-inspector/src/scala/tasty/inspector/TastyInspector.scala @@ -10,6 +10,7 @@ import dotty.tools.dotc.core.Contexts.Context import dotty.tools.dotc.core.Mode import dotty.tools.dotc.core.Phases.Phase import dotty.tools.dotc.fromtasty._ +import dotty.tools.dotc.quoted.QuotesCache import dotty.tools.dotc.util.ClasspathFromClassloader import dotty.tools.dotc.CompilationUnit import dotty.tools.unsupported @@ -22,6 +23,8 @@ object TastyInspector: /** Load and process TASTy files using TASTy reflect * * @param tastyFiles List of paths of `.tasty` files + * + * @return boolean value indicating whether the process succeeded */ def inspectTastyFiles(tastyFiles: List[String])(inspector: Inspector): Boolean = inspectAllTastyFiles(tastyFiles, Nil, Nil)(inspector) @@ -29,6 +32,8 @@ object TastyInspector: /** Load and process TASTy files in a `jar` file using TASTy reflect * * @param jars Path of `.jar` file + * + * @return boolean value indicating whether the process succeeded */ def inspectTastyFilesInJar(jar: String)(inspector: Inspector): Boolean = inspectAllTastyFiles(Nil, List(jar), Nil)(inspector) @@ -38,6 +43,8 @@ object TastyInspector: * @param tastyFiles List of paths of `.tasty` files * @param jars List of path of `.jar` files * @param dependenciesClasspath Classpath with extra dependencies needed to load class in the `.tasty` files + * + * @return boolean value indicating whether the process succeeded */ def inspectAllTastyFiles(tastyFiles: List[String], jars: List[String], dependenciesClasspath: List[String])(inspector: Inspector): Boolean = def checkFile(fileName: String, ext: String): Unit = @@ -49,7 +56,7 @@ object TastyInspector: tastyFiles.foreach(checkFile(_, "tasty")) jars.foreach(checkFile(_, "jar")) val files = tastyFiles ::: jars - files.nonEmpty && inspectFiles(dependenciesClasspath, files)(inspector) + inspectFiles(dependenciesClasspath, files)(inspector) private def inspectorDriver(inspector: Inspector) = class InspectorDriver extends Driver: @@ -58,7 +65,11 @@ object TastyInspector: class TastyInspectorPhase extends Phase: override def phaseName: String = "tastyInspector" - override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = + override def runOn(units: List[CompilationUnit])(using ctx0: Context): List[CompilationUnit] = + val ctx = QuotesCache.init(ctx0.fresh) + runOnImpl(units)(using ctx) + + private def runOnImpl(units: List[CompilationUnit])(using Context): List[CompilationUnit] = val quotesImpl = QuotesImpl() class TastyImpl(val path: String, val ast: quotesImpl.reflect.Tree) extends Tasty[quotesImpl.type] { val quotes = quotesImpl @@ -86,7 +97,10 @@ object TastyInspector: override def newRun(implicit ctx: Context): Run = reset() - new TASTYRun(this, ctx.fresh.addMode(Mode.ReadPositions).addMode(Mode.ReadComments)) + val ctx2 = ctx.fresh + .addMode(Mode.ReadPositions) + .setSetting(ctx.settings.YreadComments, true) + new TASTYRun(this, ctx2) new InspectorDriver @@ -97,11 +111,11 @@ object TastyInspector: private def inspectFiles(classpath: List[String], classes: List[String])(inspector: Inspector): Boolean = - if (classes.isEmpty) - throw new IllegalArgumentException("Parameter classes should no be empty") - - val reporter = inspectorDriver(inspector).process(inspectorArgs(classpath, classes)) - reporter.hasErrors + classes match + case Nil => true + case _ => + val reporter = inspectorDriver(inspector).process(inspectorArgs(classpath, classes)) + !reporter.hasErrors end inspectFiles diff --git a/tasty/src/dotty/tools/tasty/TastyFormat.scala b/tasty/src/dotty/tools/tasty/TastyFormat.scala index f8c077c0aa0b..f4f72a6e7e19 100644 --- a/tasty/src/dotty/tools/tasty/TastyFormat.scala +++ b/tasty/src/dotty/tools/tasty/TastyFormat.scala @@ -10,15 +10,16 @@ lower case letter*_ are for explanation of semantic content only, they can be dropped without changing the grammar. Micro-syntax: - +```none LongInt = Digit* StopDigit -- big endian 2's complement, value fits in a Long w/o overflow Int = LongInt -- big endian 2's complement, fits in an Int w/o overflow Nat = LongInt -- non-negative value, fits in an Int without overflow Digit = 0 | ... | 127 StopDigit = 128 | ... | 255 -- value = digit - 128 +``` Macro-format: - +```none File = Header majorVersion_Nat minorVersion_Nat experimentalVersion_Nat VersionString UUID nameTable_Length Name* Section* Header = 0x5CA1AB1F @@ -39,6 +40,7 @@ Macro-format: SUPERACCESSOR Length underlying_NameRef -- super$A INLINEACCESSOR Length underlying_NameRef -- inline$A OBJECTCLASS Length underlying_NameRef -- A$ (name of the module class for module A) + BODYRETAINER Length underlying_NameRef -- A$retainedBody SIGNED Length original_NameRef resultSig_NameRef ParamSig* -- name + signature TARGETSIGNED Length original_NameRef target_NameRef resultSig_NameRef ParamSig* @@ -47,13 +49,14 @@ Macro-format: // If positive, this is a NameRef for the fully qualified name of a term parameter. NameRef = Nat // ordinal number of name in name table, starting from 1. +``` Note: Unqualified names in the name table are strings. The context decides whether a name is a type-name or a term-name. The same string can represent both. Standard-Section: "ASTs" TopLevelStat* - +```none TopLevelStat = PACKAGE Length Path TopLevelStat* -- package path { topLevelStats } Stat @@ -70,7 +73,7 @@ Standard-Section: "ASTs" TopLevelStat* BOUNDED type_Term -- type bound TypeParam = TYPEPARAM Length NameRef type_Term Modifier* -- modifiers name bounds - TermParam = PARAM Length NameRef type_Term rhs_Term? Modifier* -- modifiers name : type (= rhs_Term)?. `rhsTerm` is present in the case of an aliased class parameter + TermParam = PARAM Length NameRef type_Term Modifier* -- modifiers name : type. EMPTYCLAUSE -- an empty parameter clause () SPLITCLAUSE -- splits two non-empty parameter clauses of the same kind Param = TypeParam @@ -157,7 +160,7 @@ Standard-Section: "ASTs" TopLevelStat* TYPEREFin Length NameRef qual_Type namespace_Type -- A reference `qual.name` to a non-local member that's private in `namespace`. RECtype parent_Type -- A wrapper for recursive refined types SUPERtype Length this_Type underlying_Type -- A super type reference to `underlying` - REFINEDtype Length underlying_Type refinement_NameRef info_Type -- underlying { refinement_name : info } + REFINEDtype Length refinement_NameRef underlying_Type info_Type -- underlying { refinement_name : info } APPLIEDtype Length tycon_Type arg_Type* -- tycon[args] TYPEBOUNDS Length lowOrAlias_Type high_Type? Variance* -- = alias or >: low <: high, possibly with variances of lambda parameters ANNOTATEDtype Length underlying_Type annotation_Term -- underlying @ annotation @@ -176,7 +179,6 @@ Standard-Section: "ASTs" TopLevelStat* TypeName = typeOrBounds_ASTRef paramName_NameRef -- (`termName`: `type`) or (`typeName` `bounds`) Modifier = PRIVATE -- private - INTERNAL -- package private (not yet used) PROTECTED -- protected PRIVATEqualified qualifier_Type -- private[qualifier] (to be dropped(?) PROTECTEDqualified qualifier_Type -- protecred[qualifier] (to be dropped(?) @@ -220,22 +222,23 @@ Standard-Section: "ASTs" TopLevelStat* | CONTRAVARIANT Annotation = ANNOTATION Length tycon_Type fullAnnotation_Term -- An annotation, given (class) type of constructor, and full application tree +``` Note: The signature of a SELECTin or TERMREFin node is the signature of the selected symbol, not the signature of the reference. The latter undergoes an asSeenFrom but the former does not. Note: Tree tags are grouped into 5 categories that determine what follows, and thus allow to compute the size of the tagged tree in a generic way. - +```none Category 1 (tags 1-59) : tag Category 2 (tags 60-89) : tag Nat Category 3 (tags 90-109) : tag AST Category 4 (tags 110-127): tag Nat AST Category 5 (tags 128-255): tag Length - +``` Standard-Section: "Positions" LinesSizes Assoc* - +```none LinesSizes = Nat Nat* // Number of lines followed by the size of each line not counting the trailing `\n` Assoc = Header offset_Delta? offset_Delta? point_Delta? @@ -251,15 +254,17 @@ Standard-Section: "Positions" LinesSizes Assoc* SOURCE = 4 // Impossible as header, since addr_Delta = 0 implies that we refer to the // same tree as the previous one, but then hasStartDiff = 1 implies that // the tree's range starts later than the range of itself. +``` All elements of a position section are serialized as Ints Standard Section: "Comments" Comment* - +```none Comment = Length Bytes LongInt // Raw comment's bytes encoded as UTF-8, followed by the comment's coordinates. +``` - +* @syntax markdown **************************************************************************************/ object TastyFormat { @@ -272,68 +277,68 @@ object TastyFormat { */ final val header: Array[Int] = Array(0x5C, 0xA1, 0xAB, 0x1F) - /**Natural number. Each increment of the `MajorVersion` begins a - * new series of backward compatible TASTy versions. + /** Natural number. Each increment of the `MajorVersion` begins a + * new series of backward compatible TASTy versions. * - * A TASTy file in either the preceeding or succeeding series is - * incompatible with the current value. + * A TASTy file in either the preceeding or succeeding series is + * incompatible with the current value. */ final val MajorVersion: Int = 28 - /**Natural number. Each increment of the `MinorVersion`, within - * a series declared by the `MajorVersion`, breaks forward - * compatibility, but remains backwards compatible, with all - * preceeding `MinorVersion`. + /** Natural number. Each increment of the `MinorVersion`, within + * a series declared by the `MajorVersion`, breaks forward + * compatibility, but remains backwards compatible, with all + * preceeding `MinorVersion`. */ - final val MinorVersion: Int = 0 + final val MinorVersion: Int = 2 - /**Natural Number. The `ExperimentalVersion` allows for - * experimentation with changes to TASTy without committing - * to any guarantees of compatibility. + /** Natural Number. The `ExperimentalVersion` allows for + * experimentation with changes to TASTy without committing + * to any guarantees of compatibility. * - * A zero value indicates that the TASTy version is from a - * stable, final release. + * A zero value indicates that the TASTy version is from a + * stable, final release. * - * A strictly positive value indicates that the TASTy - * version is experimental. An experimental TASTy file - * can only be read by a tool with the same version. - * However, tooling with an experimental TASTy version - * is able to read final TASTy documents if the file's - * `MinorVersion` is strictly less than the current value. + * A strictly positive value indicates that the TASTy + * version is experimental. An experimental TASTy file + * can only be read by a tool with the same version. + * However, tooling with an experimental TASTy version + * is able to read final TASTy documents if the file's + * `MinorVersion` is strictly less than the current value. */ - final val ExperimentalVersion: Int = 3 + final val ExperimentalVersion: Int = 1 /**This method implements a binary relation (`<:<`) between two TASTy versions. + * * We label the lhs `file` and rhs `compiler`. * if `file <:< compiler` then the TASTy file is valid to be read. * - * TASTy versions have a partial order, - * for example `a <:< b` and `b <:< a` are both false if `a` and `b` have different major versions. + * A TASTy version, e.g. `v := 28.0-3` is composed of three fields: + * - v.major == 28 + * - v.minor == 0 + * - v.experimental == 3 + * + * TASTy versions have a partial order, for example, + * `a <:< b` and `b <:< a` are both false if + * - `a` and `b` have different `major` fields. + * - `a` and `b` have the same `major` & `minor` fields, + * but different `experimental` fields, both non-zero. + * + * A TASTy version with a zero value for its `experimental` field + * is considered to be stable. Files with a stable TASTy version + * can be read by a compiler with an unstable TASTy version, + * (where the compiler's TASTy version has a higher `minor` field). + * + * A compiler with a stable TASTy version can never read a file + * with an unstable TASTy version. * * We follow the given algorithm: + * * ``` - * if file.major != compiler.major then - * return incompatible - * if compiler.experimental == 0 then - * if file.experimental != 0 then - * return incompatible - * if file.minor > compiler.minor then - * return incompatible - * else - * return compatible - * else invariant[compiler.experimental != 0] - * if file.experimental == compiler.experimental then - * if file.minor == compiler.minor then - * return compatible (all fields equal) - * else - * return incompatible - * else if file.experimental == 0, - * if file.minor < compiler.minor then - * return compatible (an experimental version can read a previous released version) - * else - * return incompatible (an experimental version cannot read its own minor version or any later version) - * else invariant[file.experimental is non-0 and different than compiler.experimental] - * return incompatible + * (fileMajor, fileMinor, fileExperimental) match + * case (`compilerMajor`, `compilerMinor`, `compilerExperimental`) => true // full equality + * case (`compilerMajor`, minor, 0) if minor < compilerMinor => true // stable backwards compatibility + * case _ => false * ``` * @syntax markdown */ @@ -345,18 +350,9 @@ object TastyFormat { compilerMinor: Int, compilerExperimental: Int ): Boolean = ( - fileMajor == compilerMajor && ( - if (fileExperimental == compilerExperimental) { - if (compilerExperimental == 0) { - fileMinor <= compilerMinor - } - else { - fileMinor == compilerMinor - } - } - else { - fileExperimental == 0 && fileMinor < compilerMinor - } + fileMajor == compilerMajor && + ( fileMinor == compilerMinor && fileExperimental == compilerExperimental // full equality + || fileMinor < compilerMinor && fileExperimental == 0 // stable backwards compatibility ) ) @@ -433,12 +429,13 @@ object TastyFormat { // Cat. 1: tag final val firstSimpleTreeTag = UNITconst + // final val ??? = 1 final val UNITconst = 2 final val FALSEconst = 3 final val TRUEconst = 4 final val NULLconst = 5 final val PRIVATE = 6 - final val INTERNAL = 7 + // final val ??? = 7 final val PROTECTED = 8 final val ABSTRACT = 9 final val FINAL = 10 @@ -461,6 +458,7 @@ object TastyFormat { final val CASEaccessor = 27 final val COVARIANT = 28 final val CONTRAVARIANT = 29 + // final val ??? = 30 final val HASDEFAULT = 31 final val STABLE = 32 final val MACRO = 33 @@ -536,6 +534,7 @@ object TastyFormat { final val IMPORT = 132 final val TYPEPARAM = 133 final val PARAM = 134 + // final val ??? = 135 final val APPLY = 136 final val TYPEAPPLY = 137 final val TYPED = 138 @@ -566,7 +565,9 @@ object TastyFormat { final val TYPEBOUNDS = 163 final val TYPEBOUNDStpt = 164 final val ANDtype = 165 + // final val ??? = 166 final val ORtype = 167 + // final val ??? = 168 final val POLYtype = 169 final val TYPELAMBDAtype = 170 final val LAMBDAtpt = 171 @@ -576,7 +577,8 @@ object TastyFormat { final val TYPEREFin = 175 final val SELECTin = 176 final val EXPORT = 177 - + // final val ??? = 178 + // final val ??? = 179 final val METHODtype = 180 final val MATCHtype = 190 @@ -603,7 +605,6 @@ object TastyFormat { def isModifierTag(tag: Int): Boolean = tag match { case PRIVATE - | INTERNAL | PROTECTED | ABSTRACT | FINAL @@ -667,7 +668,6 @@ object TastyFormat { case TRUEconst => "TRUEconst" case NULLconst => "NULLconst" case PRIVATE => "PRIVATE" - case INTERNAL => "INTERNAL" case PROTECTED => "PROTECTED" case ABSTRACT => "ABSTRACT" case FINAL => "FINAL" diff --git a/tasty/test/dotty/tools/tasty/TastyHeaderUnpicklerTest.scala b/tasty/test/dotty/tools/tasty/TastyHeaderUnpicklerTest.scala index b9f2aff3f564..9f54c4b3061b 100644 --- a/tasty/test/dotty/tools/tasty/TastyHeaderUnpicklerTest.scala +++ b/tasty/test/dotty/tools/tasty/TastyHeaderUnpicklerTest.scala @@ -57,8 +57,8 @@ object TastyHeaderUnpicklerTest { buf.writeNat(exp) buf.writeNat(compilerBytes.length) buf.writeBytes(compilerBytes, compilerBytes.length) - buf.writeUncompressedLong(237478l) - buf.writeUncompressedLong(324789l) + buf.writeUncompressedLong(237478L) + buf.writeUncompressedLong(324789L) buf } diff --git a/tasty/test/dotty/tools/tasty/TastyVersionFormatTest.scala b/tasty/test/dotty/tools/tasty/TastyVersionFormatTest.scala index b5cb58910e36..3e29c9baaf81 100644 --- a/tasty/test/dotty/tools/tasty/TastyVersionFormatTest.scala +++ b/tasty/test/dotty/tools/tasty/TastyVersionFormatTest.scala @@ -11,61 +11,69 @@ class TastyVersionFormatTest { import TastyVersionFormatTest._ /** aliases `TastyVersion.apply` */ - def compiler(major: Int, minor: Int, experimental: Int) = TastyVersion(major, minor, experimental) + def compiler(major: Int, minor: Int, experimental: Experimental) = TastyVersion(major, minor, experimental) /** aliases `TastyVersion.apply` */ - def file(major: Int, minor: Int, experimental: Int) = TastyVersion(major, minor, experimental) + def file(major: Int, minor: Int, experimental: Experimental) = TastyVersion(major, minor, experimental) @Test def accept_ExperimentalReadEQExperimental_EQMinor: Unit = { - assert(file(28,1,1) <:< compiler(28,1,1)) // same minor, same experimental + assert(file(28,1,Exp(1)) <:< compiler(28,1,Exp(1))) // same minor, same experimental } @Test def accept_ExperimentalReadFinal_LTMinor: Unit = { - assert(file(28,0,0) <:< compiler(28,1,1)) // preceding minor + assert(file(28,0,Final) <:< compiler(28,1,Exp(1))) // preceding minor } @Test def accept_FinalReadFinal_LTEqualMinor: Unit = { - assert(file(28,0,0) <:< compiler(28,1,0)) // preceding minor - assert(file(28,0,0) <:< compiler(28,0,0)) // same minor + assert(file(28,0,Final) <:< compiler(28,1,Final)) // preceding minor + assert(file(28,0,Final) <:< compiler(28,0,Final)) // same minor } /** these cases are unrelated because a final compiler can only read final tasty of <= minor version */ @Test def reject_FinalReadFinal_GTMinor: Unit = { - assert(file(28,2,0) unrelatedTo compiler(28,1,0)) // succeeding minor + assert(file(28,2,Final) unrelatedTo compiler(28,1,Final)) // succeeding minor } /** these cases are unrelated because a final compiler can not read experimental tasty */ @Test def reject_FinalReadExperimental: Unit = { - assert(file(28,0,1) unrelatedTo compiler(28,1,0)) // preceding minor - assert(file(28,1,1) unrelatedTo compiler(28,1,0)) // same minor - assert(file(28,2,1) unrelatedTo compiler(28,1,0)) // succeeding minor + assert(file(28,0,Exp(1)) unrelatedTo compiler(28,1,Final)) // preceding minor + assert(file(28,1,Exp(1)) unrelatedTo compiler(28,1,Final)) // same minor + assert(file(28,2,Exp(1)) unrelatedTo compiler(28,1,Final)) // succeeding minor } /** These cases are unrelated because an experimental compiler can only read final tasty of < minor version */ @Test def reject_ExperimentalReadFinal_GTEqualMinor: Unit = { - assert(file(28,2,0) unrelatedTo compiler(28,1,1)) // succeeding minor - assert(file(28,1,0) unrelatedTo compiler(28,1,1)) // equal minor + assert(file(28,2,Final) unrelatedTo compiler(28,1,Exp(1))) // succeeding minor + assert(file(28,1,Final) unrelatedTo compiler(28,1,Exp(1))) // equal minor } /**These cases are unrelated because both compiler and file are experimental, * and with unequal experimental part. */ @Test def reject_ExperimentalReadNEExperimental: Unit = { - assert(file(28,1,2) unrelatedTo compiler(28,1,1)) // same minor version, succeeding experimental - assert(file(28,1,1) unrelatedTo compiler(28,1,2)) // same minor version, preceding experimental + assert(file(28,1,Exp(2)) unrelatedTo compiler(28,1,Exp(1))) // same minor version, succeeding experimental + assert(file(28,1,Exp(1)) unrelatedTo compiler(28,1,Exp(2))) // same minor version, preceding experimental } /** these cases are unrelated because the major version must be identical */ @Test def reject_NEMajor: Unit = { - assert(file(27,0,0) unrelatedTo compiler(28,0,0)) // less than - assert(file(29,0,0) unrelatedTo compiler(28,0,0)) // greater than + assert(file(27,0,Final) unrelatedTo compiler(28,0,Final)) // less than + assert(file(29,0,Final) unrelatedTo compiler(28,0,Final)) // greater than } } object TastyVersionFormatTest { - case class TastyVersion(major: Int, minor: Int, experimental: Int) { file => + type Experimental = Int + val Final: Experimental = 0 + def Exp(i: Int): Experimental = i.ensuring(_ > 0) + + case class TastyVersion(major: Int, minor: Int, experimental: Experimental) { file => + assert(major >= 0) + assert(minor >= 0) + assert(experimental >= 0) + def <:<(compiler: TastyVersion): Boolean = TastyFormat.isVersionCompatible( fileMajor = file.major, fileMinor = file.minor, diff --git a/tests/bench/power-macro/PowerMacro.scala b/tests/bench/power-macro/PowerMacro.scala index 03a77d927287..f55d9f1926a3 100644 --- a/tests/bench/power-macro/PowerMacro.scala +++ b/tests/bench/power-macro/PowerMacro.scala @@ -5,7 +5,7 @@ object PowerMacro { inline def power(inline n: Long, x: Double) = ${ powerCode('n, 'x) } def powerCode(n: Expr[Long], x: Expr[Double])(using Quotes): Expr[Double] = - powerCode(n.valueOrError, x) + powerCode(n.valueOrAbort, x) def powerCode(n: Long, x: Expr[Double])(using Quotes): Expr[Double] = if (n == 0) '{1.0} diff --git a/tests/cmdTest-sbt-tests/README.md b/tests/cmdTest-sbt-tests/README.md new file mode 100644 index 000000000000..3738c0861fb1 --- /dev/null +++ b/tests/cmdTest-sbt-tests/README.md @@ -0,0 +1,7 @@ +# Readme + +Do not use this directory for testing sbt projects in general, add a test case to `dotty/sbt-test` + +This directory is for sbt tests that can not be reproduced with sbt scripted tests. + +Adding a test here will reduce the performance of running all tests. diff --git a/tests/cmdTest-sbt-tests/sourcepath-with-inline-api-hash/build.sbt b/tests/cmdTest-sbt-tests/sourcepath-with-inline-api-hash/build.sbt new file mode 100644 index 000000000000..4bff160ff55a --- /dev/null +++ b/tests/cmdTest-sbt-tests/sourcepath-with-inline-api-hash/build.sbt @@ -0,0 +1,17 @@ +import java.util.Properties + +val prepareSources = taskKey[Unit]("Copy changes to the src directory") +val copyChanges = taskKey[Unit]("Copy changes to the src directory") + +val srcDir = settingKey[File]("The directory to copy changes to") +val changesDir = settingKey[File]("The directory to copy changes from") + +srcDir := (ThisBuild / baseDirectory).value / "src" / "main" / "scala" +changesDir := (ThisBuild / baseDirectory).value / "changes" + +prepareSources := IO.copyFile(changesDir.value / "zz.original.scala", srcDir.value / "a" / "zz.scala") +copyChanges := IO.copyFile(changesDir.value / "zz.new.scala", srcDir.value / "a" / "zz.scala") + +(Compile / scalacOptions) ++= Seq( + "-sourcepath", (Compile / sourceDirectories).value.map(_.getAbsolutePath).distinct.mkString(java.io.File.pathSeparator), +) diff --git a/tests/cmdTest-sbt-tests/sourcepath-with-inline-api-hash/changes/zz.new.scala b/tests/cmdTest-sbt-tests/sourcepath-with-inline-api-hash/changes/zz.new.scala new file mode 100644 index 000000000000..fbf5cf7fb5e0 --- /dev/null +++ b/tests/cmdTest-sbt-tests/sourcepath-with-inline-api-hash/changes/zz.new.scala @@ -0,0 +1,7 @@ +package a + +object Foo: // note that `Foo` is defined in `zz.scala` + class Local + inline def foo(using Local): Nothing = + ??? + ??? diff --git a/tests/cmdTest-sbt-tests/sourcepath-with-inline-api-hash/changes/zz.original.scala b/tests/cmdTest-sbt-tests/sourcepath-with-inline-api-hash/changes/zz.original.scala new file mode 100644 index 000000000000..17a7488ccb1a --- /dev/null +++ b/tests/cmdTest-sbt-tests/sourcepath-with-inline-api-hash/changes/zz.original.scala @@ -0,0 +1,6 @@ +package a + +object Foo: // note that `Foo` is defined in `zz.scala` + class Local + inline def foo(using Local): Nothing = + ??? diff --git a/tests/cmdTest-sbt-tests/sourcepath-with-inline-api-hash/project/build.properties b/tests/cmdTest-sbt-tests/sourcepath-with-inline-api-hash/project/build.properties new file mode 100644 index 000000000000..3161d2146c63 --- /dev/null +++ b/tests/cmdTest-sbt-tests/sourcepath-with-inline-api-hash/project/build.properties @@ -0,0 +1 @@ +sbt.version=1.6.1 diff --git a/tests/cmdTest-sbt-tests/sourcepath-with-inline-api-hash/src/main/scala/a/Bar.scala b/tests/cmdTest-sbt-tests/sourcepath-with-inline-api-hash/src/main/scala/a/Bar.scala new file mode 100644 index 000000000000..4d4b7eebe09e --- /dev/null +++ b/tests/cmdTest-sbt-tests/sourcepath-with-inline-api-hash/src/main/scala/a/Bar.scala @@ -0,0 +1,5 @@ +package a + +object Bar: + given Foo.Local() + inline def bar = Foo.foo // `Bar.bar` is inline, it will hash the body of `Foo.foo` diff --git a/tests/cmdTest-sbt-tests/sourcepath-with-inline/build.sbt b/tests/cmdTest-sbt-tests/sourcepath-with-inline/build.sbt new file mode 100644 index 000000000000..4bff160ff55a --- /dev/null +++ b/tests/cmdTest-sbt-tests/sourcepath-with-inline/build.sbt @@ -0,0 +1,17 @@ +import java.util.Properties + +val prepareSources = taskKey[Unit]("Copy changes to the src directory") +val copyChanges = taskKey[Unit]("Copy changes to the src directory") + +val srcDir = settingKey[File]("The directory to copy changes to") +val changesDir = settingKey[File]("The directory to copy changes from") + +srcDir := (ThisBuild / baseDirectory).value / "src" / "main" / "scala" +changesDir := (ThisBuild / baseDirectory).value / "changes" + +prepareSources := IO.copyFile(changesDir.value / "zz.original.scala", srcDir.value / "a" / "zz.scala") +copyChanges := IO.copyFile(changesDir.value / "zz.new.scala", srcDir.value / "a" / "zz.scala") + +(Compile / scalacOptions) ++= Seq( + "-sourcepath", (Compile / sourceDirectories).value.map(_.getAbsolutePath).distinct.mkString(java.io.File.pathSeparator), +) diff --git a/tests/cmdTest-sbt-tests/sourcepath-with-inline/changes/zz.new.scala b/tests/cmdTest-sbt-tests/sourcepath-with-inline/changes/zz.new.scala new file mode 100644 index 000000000000..fbf5cf7fb5e0 --- /dev/null +++ b/tests/cmdTest-sbt-tests/sourcepath-with-inline/changes/zz.new.scala @@ -0,0 +1,7 @@ +package a + +object Foo: // note that `Foo` is defined in `zz.scala` + class Local + inline def foo(using Local): Nothing = + ??? + ??? diff --git a/tests/cmdTest-sbt-tests/sourcepath-with-inline/changes/zz.original.scala b/tests/cmdTest-sbt-tests/sourcepath-with-inline/changes/zz.original.scala new file mode 100644 index 000000000000..17a7488ccb1a --- /dev/null +++ b/tests/cmdTest-sbt-tests/sourcepath-with-inline/changes/zz.original.scala @@ -0,0 +1,6 @@ +package a + +object Foo: // note that `Foo` is defined in `zz.scala` + class Local + inline def foo(using Local): Nothing = + ??? diff --git a/tests/cmdTest-sbt-tests/sourcepath-with-inline/project/build.properties b/tests/cmdTest-sbt-tests/sourcepath-with-inline/project/build.properties new file mode 100644 index 000000000000..3161d2146c63 --- /dev/null +++ b/tests/cmdTest-sbt-tests/sourcepath-with-inline/project/build.properties @@ -0,0 +1 @@ +sbt.version=1.6.1 diff --git a/tests/cmdTest-sbt-tests/sourcepath-with-inline/src/main/scala/a/Bar.scala b/tests/cmdTest-sbt-tests/sourcepath-with-inline/src/main/scala/a/Bar.scala new file mode 100644 index 000000000000..79af4eb2cebd --- /dev/null +++ b/tests/cmdTest-sbt-tests/sourcepath-with-inline/src/main/scala/a/Bar.scala @@ -0,0 +1,5 @@ +package a + +object Bar: + given Foo.Local() + def Bar = Foo.foo diff --git a/tests/disabled/pos-macros/forwardCompat-3.1/Macro_1_r3.1.scala b/tests/disabled/pos-macros/forwardCompat-3.1/Macro_1_r3.1.scala new file mode 100644 index 000000000000..fb06e93f91c0 --- /dev/null +++ b/tests/disabled/pos-macros/forwardCompat-3.1/Macro_1_r3.1.scala @@ -0,0 +1,20 @@ +import scala.quoted.* + +object Macros: + + inline def power(x: Double, inline n: Int) = ${ powerCode('x, 'n) } + + private def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = + unrolledPowerCode(x, n.valueOrError) + + private def unrolledPowerCode(x: Expr[Double], n: Int)(using Quotes): Expr[Double] = + if n == 0 then '{ 1.0 } // tests simple quotes without splices + else if n % 2 == 1 then '{ $x * ${ unrolledPowerCode(x, n - 1) } } // tests simple splices + else '{ val y = $x * $x; ${ unrolledPowerCode('y, n / 2) } } // tests splice with term capture + + + inline def let[T, U](x: T)(inline body: T => U): U = ${ letCode('x, 'body) } + + private def letCode[T: Type, U: Type](x: Expr[T], body: Expr[T => U])(using Quotes): Expr[U] = + // tests use of Type + '{ val y: T = $x; $body(y): U } diff --git a/tests/disabled/pos-macros/forwardCompat-3.1/Test_2_c3.1.0.scala b/tests/disabled/pos-macros/forwardCompat-3.1/Test_2_c3.1.0.scala new file mode 100644 index 000000000000..8c0a8004b9cf --- /dev/null +++ b/tests/disabled/pos-macros/forwardCompat-3.1/Test_2_c3.1.0.scala @@ -0,0 +1,15 @@ +import Macros.* + +def powerTest(x: Double): Unit = + power(x, 0) + power(x, 1) + power(x, 5) + power(x, 10) + +def letTest: Unit = + let(0) { _ + 1 } + let(0) { _.toString } + let((4, 'a')) { _.swap } + let(new Foo) { _.hashCode } + +class Foo diff --git a/tests/disabled/pos-macros/forwardCompat-3.1/why.md b/tests/disabled/pos-macros/forwardCompat-3.1/why.md new file mode 100644 index 000000000000..f281f9c08662 --- /dev/null +++ b/tests/disabled/pos-macros/forwardCompat-3.1/why.md @@ -0,0 +1 @@ +Disabled until https://github.com/lampepfl/dotty/issues/14306 is fixed diff --git a/tests/run-custom-args/erased/erased-2.check b/tests/disabled/run-custom-args/erased/erased-2.check similarity index 100% rename from tests/run-custom-args/erased/erased-2.check rename to tests/disabled/run-custom-args/erased/erased-2.check diff --git a/tests/run-custom-args/erased/erased-2.scala b/tests/disabled/run-custom-args/erased/erased-2.scala similarity index 100% rename from tests/run-custom-args/erased/erased-2.scala rename to tests/disabled/run-custom-args/erased/erased-2.scala diff --git a/tests/explicit-nulls/neg/AnyValOrNullSelect.scala b/tests/explicit-nulls/neg/AnyValOrNullSelect.scala new file mode 100644 index 000000000000..44e8b4e7edfb --- /dev/null +++ b/tests/explicit-nulls/neg/AnyValOrNullSelect.scala @@ -0,0 +1,13 @@ +case class MyVal(i: Int) extends AnyVal: + def printVal: Unit = + println(i) + +class Test: + val v: MyVal | Null = MyVal(1) + + def f1 = + v.printVal // error: value printVal is not a member of MyVal | Null + + def f1 = + import scala.language.unsafeNulls + v.printVal // error: value printVal is not a member of MyVal | Null diff --git a/tests/explicit-nulls/neg/basic.scala b/tests/explicit-nulls/neg/basic.scala index cafe4b156d85..66284de4a9f9 100644 --- a/tests/explicit-nulls/neg/basic.scala +++ b/tests/explicit-nulls/neg/basic.scala @@ -9,6 +9,9 @@ class Basic { val any1: Any = null val any2: Any = n + val matchable1: Matchable = null + val matchable2: Matchable = n + val s1: String = null // error val s2: String = n // error val s3: String | Null = null diff --git a/tests/explicit-nulls/pos/AnyValOrNull.scala b/tests/explicit-nulls/pos/AnyValOrNull.scala new file mode 100644 index 000000000000..098d3eba973d --- /dev/null +++ b/tests/explicit-nulls/pos/AnyValOrNull.scala @@ -0,0 +1,36 @@ +case class MyVal(i: Boolean) extends AnyVal + +class Test1: + + def test1 = + val v: AnyVal | Null = null + if v == null then + println("null") + + def test2 = + val v: Int | Null = 1 + if v != null then + println(v) + + def test3 = + val v: MyVal | Null = MyVal(false) + if v != null then + println(v) + +class Test2: + import scala.language.unsafeNulls + + def test1 = + val v: AnyVal | Null = null + if v == null then + println("null") + + def test2 = + val v: Int | Null = 1 + if v != null then + println(v) + + def test3 = + val v: MyVal | Null = MyVal(false) + if v != null then + println(v) diff --git a/tests/explicit-nulls/pos/flow-match.scala b/tests/explicit-nulls/pos/flow-match.scala index 9e3806c97363..260068b3ac3f 100644 --- a/tests/explicit-nulls/pos/flow-match.scala +++ b/tests/explicit-nulls/pos/flow-match.scala @@ -1,11 +1,15 @@ -// Test flow-typing when NotNullInfos are from non-null cases +// Test flow-typing when NotNullInfos are from cases object MatchTest { - locally { - val s: String|Null = ??? - s match { - case _: String => println(s.length) - case _ => println(0) - } + + def lengthOfStringOrNull(s: String | Null): Int = s match { + case _: String => s.length + case _ => 0 + } + + def stringOrNullToString(s: String | Null): String = s match { + case null => "null" + // after the null case, s becomes non-nullable + case _ => s } } diff --git a/tests/explicit-nulls/pos/i11694.scala b/tests/explicit-nulls/pos/i11694.scala new file mode 100644 index 000000000000..8098775a8430 --- /dev/null +++ b/tests/explicit-nulls/pos/i11694.scala @@ -0,0 +1,4 @@ +def test = { + val x = new java.util.ArrayList[String]() + val y = x.stream().nn.filter(s => s.nn.length > 0) +} \ No newline at end of file diff --git a/tests/explicit-nulls/pos/i13040/Impl.scala b/tests/explicit-nulls/pos/i13040/Impl.scala new file mode 100644 index 000000000000..d26927db2e94 --- /dev/null +++ b/tests/explicit-nulls/pos/i13040/Impl.scala @@ -0,0 +1,5 @@ +class Impl extends Intf: + override def test(x: Object | Null*): Unit = ??? + +class Impl2 extends Intf: + override def test(x: Object*): Unit = ??? diff --git a/tests/explicit-nulls/pos/i13040/Intf.java b/tests/explicit-nulls/pos/i13040/Intf.java new file mode 100644 index 000000000000..816c94485cc4 --- /dev/null +++ b/tests/explicit-nulls/pos/i13040/Intf.java @@ -0,0 +1,3 @@ +interface Intf { + void test(Object... x); +} diff --git a/tests/explicit-nulls/pos/i13197.scala b/tests/explicit-nulls/pos/i13197.scala new file mode 100644 index 000000000000..85835f9850c5 --- /dev/null +++ b/tests/explicit-nulls/pos/i13197.scala @@ -0,0 +1,7 @@ +trait Bar: + def b: String | Null + +class Foo(a: String = "", b: String) + +object Foo: + def foo(bar: Bar) = Foo(b = bar.b.nn) \ No newline at end of file diff --git a/tests/explicit-nulls/pos/i13486.scala b/tests/explicit-nulls/pos/i13486.scala new file mode 100644 index 000000000000..d9fafc157d4c --- /dev/null +++ b/tests/explicit-nulls/pos/i13486.scala @@ -0,0 +1,7 @@ +class MyPrintStream extends java.io.PrintStream(??? : java.io.OutputStream): + override def printf(format: String | Null, args: Array[? <: Object | Null]) + : java.io.PrintStream | Null = ??? + +class MyPrintStream2 extends java.io.PrintStream(??? : java.io.OutputStream): + override def printf(format: String, args: Array[? <: Object]) + : java.io.PrintStream = ??? diff --git a/tests/explicit-nulls/pos/i13608.scala b/tests/explicit-nulls/pos/i13608.scala new file mode 100644 index 000000000000..29f9f8692086 --- /dev/null +++ b/tests/explicit-nulls/pos/i13608.scala @@ -0,0 +1,3 @@ +import scala.util.control.NoStackTrace + +case class ParseException(line: Int, character: Int, message: String) extends NoStackTrace diff --git a/tests/explicit-nulls/pos/matchable.scala b/tests/explicit-nulls/pos/matchable.scala new file mode 100644 index 000000000000..ef9c5e7eae07 --- /dev/null +++ b/tests/explicit-nulls/pos/matchable.scala @@ -0,0 +1 @@ +def foo2[T <: Matchable](t: T) = t match { case null => () } \ No newline at end of file diff --git a/tests/explicit-nulls/run/nn.scala b/tests/explicit-nulls/run/nn.scala index 3ffff69649cf..12c6c2ddb3c8 100644 --- a/tests/explicit-nulls/run/nn.scala +++ b/tests/explicit-nulls/run/nn.scala @@ -15,7 +15,7 @@ object Test { val y: String|Null = null assertThrowsNPE(y.nn) assertThrowsNPE(null.nn) - assertThrowsNPE(len(null)) + assertThrowsNPE(len(null)) assertThrowsNPE(load(null)) } } diff --git a/tests/explicit-nulls/unsafe-common/unsafe-java-call/J.java b/tests/explicit-nulls/unsafe-common/unsafe-java-call/J.java new file mode 100644 index 000000000000..554b91749889 --- /dev/null +++ b/tests/explicit-nulls/unsafe-common/unsafe-java-call/J.java @@ -0,0 +1,17 @@ +public class J { + public String f1() { + return ""; + } + + public int f2() { + return 0; + } + + public T g1() { + return null; + } +} + +class J2 { + public T x = null; +} \ No newline at end of file diff --git a/tests/explicit-nulls/unsafe-common/unsafe-java-call/S.scala b/tests/explicit-nulls/unsafe-common/unsafe-java-call/S.scala new file mode 100644 index 000000000000..3b5108bacfa9 --- /dev/null +++ b/tests/explicit-nulls/unsafe-common/unsafe-java-call/S.scala @@ -0,0 +1,37 @@ +// Check Java calls have been cast to non-nullable. + +val j: J = new J + +val s1: String = j.f1() // error + +val s1n: String | Null = j.f1() + +val i1: Int = j.f2() + +val s2: String = j.g1[String]() // error + +val s2n: String | Null = j.g1[String]() + +val s3: String = j.g1[String | Null]() // error + +val s3n: String | Null = j.g1[String | Null]() + +val i2: Int = j.g1[Int]() // error + +val a1: Any = j.g1[Any]() + +val ar1: AnyRef = j.g1[AnyRef]() // error + +val n1: Null = j.g1[Null]() + +val ar2: AnyRef = j.g1[Null]() // error + +def clo1[T]: T = j.g1[T]() // error + +def clo2[T <: AnyRef]: T = j.g1[T | Null]() // error + +def clo3[T >: Null <: AnyRef | Null]: T = j.g1[T]() + +def testJ2[T]: T = + val j2: J2[T] = new J2 + j2.x // error diff --git a/tests/fuzzy/07b22d72e723607d8f0dfef3f4c8fb8076387a7b.scala b/tests/fuzzy/07b22d72e723607d8f0dfef3f4c8fb8076387a7b.scala new file mode 100644 index 000000000000..ab55d7506fdc --- /dev/null +++ b/tests/fuzzy/07b22d72e723607d8f0dfef3f4c8fb8076387a7b.scala @@ -0,0 +1 @@ +val a = macro ??? // error diff --git a/tests/generic-java-signatures/aliases.check b/tests/generic-java-signatures/aliases.check new file mode 100644 index 000000000000..3e2613531871 --- /dev/null +++ b/tests/generic-java-signatures/aliases.check @@ -0,0 +1 @@ +scala.collection.immutable.List diff --git a/tests/generic-java-signatures/aliases.scala b/tests/generic-java-signatures/aliases.scala new file mode 100644 index 000000000000..6c22fa71b2db --- /dev/null +++ b/tests/generic-java-signatures/aliases.scala @@ -0,0 +1,10 @@ +class Foo { + type A = List[String] + def foo(): A = Nil +} + +object Test { + def main(args: Array[String]): Unit = { + println(classOf[Foo].getDeclaredMethod("foo").getGenericReturnType) + } +} diff --git a/tests/generic-java-signatures/derivedNames.scala b/tests/generic-java-signatures/derivedNames.scala index 23df59ff561d..0cc5bc8a1ab7 100644 --- a/tests/generic-java-signatures/derivedNames.scala +++ b/tests/generic-java-signatures/derivedNames.scala @@ -4,12 +4,10 @@ object Test { def main(args: Array[String]): Unit = { val objectB = classOf[Foo[Any]].getClasses val returnType = objectB(1).getDeclaredMethod("m").getGenericReturnType.asInstanceOf[ParameterizedType] - val out1 = "Test$Foo.Test$Foo$A.B$>" // Windows and OSX - val out2 = "Test$Foo$A$B$>" // Linux and sometimes Windows + val out1 = "Test$Foo.Test$Foo$A.B$>" // Windows + val out2 = "Test$Foo$A$B$>" // Linux, OSX and sometimes Windows if (scala.util.Properties.isWin) assert(returnType.toString == out1 || returnType.toString == out2) - else if (scala.util.Properties.isMac) - assert(returnType.toString == out1, s"$returnType != $out1") else assert(returnType.toString == out2) } diff --git a/tests/generic-java-signatures/i10834.check b/tests/generic-java-signatures/i10834.check new file mode 100644 index 000000000000..24413b10401f --- /dev/null +++ b/tests/generic-java-signatures/i10834.check @@ -0,0 +1,6 @@ +0 + +public A1(X) +0 + +public A2(java.lang.Object) diff --git a/tests/generic-java-signatures/i10834.scala b/tests/generic-java-signatures/i10834.scala new file mode 100644 index 000000000000..91fb9d8c6ad1 --- /dev/null +++ b/tests/generic-java-signatures/i10834.scala @@ -0,0 +1,16 @@ +class A1[X](val x: X) +class A2[F[_]](val x: F[String]) + +object Test { + def test(clazz: Class[?]): Unit = { + val List(constructor) = clazz.getConstructors().toList + println(constructor.getTypeParameters().length) + println(constructor.getTypeParameters().mkString(", ")) + println(constructor.toGenericString()) + } + + def main(args: Array[String]): Unit = { + test(classOf[A1[?]]) + test(classOf[A2[?]]) + } +} diff --git a/tests/init/crash/i8892.scala b/tests/init/crash/i8892.scala new file mode 100644 index 000000000000..0a7e7223fe06 --- /dev/null +++ b/tests/init/crash/i8892.scala @@ -0,0 +1,28 @@ +trait Reporter: + def report(m: String): Unit + +class Dummy extends Reporter: + def report(m: String) = () + + object ABug { + sealed trait Nat { + transparent inline def ++ : Succ[this.type] = Succ(this) + + transparent inline def +(inline that: Nat): Nat = + inline this match { + case Zero => that + case Succ(p) => p + that.++ + } + } + + case object Zero extends Nat + case class Succ[N <: Nat](p: N) extends Nat + + transparent inline def toIntg(inline n: Nat): Int = + inline n match { + case Zero => 0 + case Succ(p) => toIntg(p) + 1 + } + + val j31 = toIntg(Zero.++.++.++ + Zero.++) + } \ No newline at end of file diff --git a/tests/init/neg/apply.scala b/tests/init/neg/apply.scala new file mode 100644 index 000000000000..2847115cc73c --- /dev/null +++ b/tests/init/neg/apply.scala @@ -0,0 +1,47 @@ +case class A(b: B) + +object A: + def foo(b: B) = new A(b) + inline def bar(b: B) = new A(b) + +class B: + val a = A(this) + val a2 = A.foo(this) // error + val a3 = A.bar(this) + +// test receiver is ThisRef + +object O: + case class A(b: B) + + object A: + def foo(b: B) = new A(b) + inline def bar(b: B) = new A(b) + + class B: + val a = A(this) + val a2 = A.foo(this) // error + val a3 = A.bar(this) + + val b = new B +end O + + +// test receiver is Warm + +class M(n: N): + case class A(b: B) + + object A: + def foo(b: B) = new A(b) + inline def bar(b: B) = new A(b) + + class B: + val a = A(this) + val a2 = A.foo(this) // error + val a3 = A.bar(this) +end M + +class N: + val m = new M(this) + val b = new m.B diff --git a/tests/init/neg/apply2.scala b/tests/init/neg/apply2.scala new file mode 100644 index 000000000000..6a42cc9a3acc --- /dev/null +++ b/tests/init/neg/apply2.scala @@ -0,0 +1,10 @@ +object O: + case class A(b: B): + println(n) + + class B: + val a = A(this) + + val b = new B + val n = 10 // error +end O diff --git a/tests/init/neg/as-instance-of-cold-field-access.scala b/tests/init/neg/as-instance-of-cold-field-access.scala new file mode 100644 index 000000000000..ec567326b4eb --- /dev/null +++ b/tests/init/neg/as-instance-of-cold-field-access.scala @@ -0,0 +1,7 @@ +final class MyAsInstanceOfClass(o: MyAsInstanceOfClass) { + val other: MyAsInstanceOfClass = { + if (o.asInstanceOf[MyAsInstanceOfClass].oRef ne null) o // error + else new MyAsInstanceOfClass(this) + } + val oRef = o +} diff --git a/tests/init/neg/closureLeak.check b/tests/init/neg/closureLeak.check new file mode 100644 index 000000000000..3bd3cdd09b9b --- /dev/null +++ b/tests/init/neg/closureLeak.check @@ -0,0 +1,7 @@ +-- Error: tests/init/neg/closureLeak.scala:11:14 ----------------------------------------------------------------------- +11 | l.foreach(a => a.addX(this)) // error + | ^^^^^^^^^^^^^^^^^ + | Cannot prove that the value is fully initialized. Only initialized values may be used as arguments. + | + | The unsafe promotion may cause the following problem: + | Cannot prove that the value is fully initialized. Only initialized values may be used as arguments. diff --git a/tests/init/neg/closureLeak.scala b/tests/init/neg/closureLeak.scala new file mode 100644 index 000000000000..ae7db5124028 --- /dev/null +++ b/tests/init/neg/closureLeak.scala @@ -0,0 +1,13 @@ +class Outer { + val x = 10 + class A(x: Int) { + var y: Int = x + def addX(o: Outer): Unit = { + y = y + o.x + } + } + + val l: List[A] = List(new A(5), new A(10)) + l.foreach(a => a.addX(this)) // error + val p = 10 +} diff --git a/tests/init/neg/cycle-structure.check b/tests/init/neg/cycle-structure.check new file mode 100644 index 000000000000..79e38ee80bb4 --- /dev/null +++ b/tests/init/neg/cycle-structure.check @@ -0,0 +1,14 @@ +-- Error: tests/init/neg/cycle-structure.scala:2:15 -------------------------------------------------------------------- +2 | val x1 = b.x // error + | ^^^ + | Access field A.this.b.x on a value with an unknown initialization status. Calling trace: + | -> val x = A(this) [ cycle-structure.scala:9 ] + | -> case class A(b: B) { [ cycle-structure.scala:1 ] +-- Error: tests/init/neg/cycle-structure.scala:8:15 -------------------------------------------------------------------- +8 | val x1 = a.x // error + | ^^^ + | Access field B.this.a.x on a value with an unknown initialization status. Calling trace: + | -> val x = A(this) [ cycle-structure.scala:9 ] + | -> case class A(b: B) { [ cycle-structure.scala:1 ] + | -> val x = B(this) [ cycle-structure.scala:3 ] + | -> case class B(a: A) { [ cycle-structure.scala:7 ] diff --git a/tests/init/neg/cycle-structure.scala b/tests/init/neg/cycle-structure.scala new file mode 100644 index 000000000000..937df774ee14 --- /dev/null +++ b/tests/init/neg/cycle-structure.scala @@ -0,0 +1,11 @@ +case class A(b: B) { + val x1 = b.x // error + val x = B(this) + val y = x.a +} + +case class B(a: A) { + val x1 = a.x // error + val x = A(this) + val h = x.b +} diff --git a/tests/init/neg/cycle.scala b/tests/init/neg/cycle.scala new file mode 100644 index 000000000000..55b7c28acbef --- /dev/null +++ b/tests/init/neg/cycle.scala @@ -0,0 +1,11 @@ +class A(x: B) { + println(x.b) // error + val a = new B(this) + val d = a.b +} + +class B(x: A) { + println(x.a) // error + val b = new A(this) + val d = b.a +} \ No newline at end of file diff --git a/tests/init/neg/default-this.check b/tests/init/neg/default-this.check new file mode 100644 index 000000000000..b9a74db0dc95 --- /dev/null +++ b/tests/init/neg/default-this.check @@ -0,0 +1,5 @@ +-- Error: tests/init/neg/default-this.scala:9:8 ------------------------------------------------------------------------ +9 | compare() // error + | ^^^^^^^ + |Cannot prove that the value is fully initialized. Only initialized values may be used as arguments. Calling trace: + | -> val result = updateThenCompare(5) [ default-this.scala:11 ] diff --git a/tests/init/neg/default-this.scala b/tests/init/neg/default-this.scala new file mode 100644 index 000000000000..1b0173fc134f --- /dev/null +++ b/tests/init/neg/default-this.scala @@ -0,0 +1,12 @@ +class A { + var x: Int = 10 + def compare(c: Int = 5, a: A = this): Boolean = if (c == a.x) true else false +} + +class B extends A { + def updateThenCompare(c: Int): Boolean = { + x = c + compare() // error + } + val result = updateThenCompare(5) +} diff --git a/tests/init/neg/early-promote.scala b/tests/init/neg/early-promote.scala new file mode 100644 index 000000000000..ac1a7c8fe82e --- /dev/null +++ b/tests/init/neg/early-promote.scala @@ -0,0 +1,38 @@ +class Y { + class X { + class B { + def g = f + def g2 = n + } + val f = 42 + val b = new B // warm(B, X.this) + } + + val n = 10 + val x = new X + List(x.b) // unsafe promotion + +} + +class A { // checking A + class B { + def bf = 42 + class C { + def x = bf // uses outer[C], but never outer[B] + } + List((new C).x) + def c = new C + } + val b = new B() + List(b) // error: the checker simply issue warnings for objects that contain inner classes + val af = 42 +} + +class RecursiveF { + val a = f + def f: RecursiveF = f + class B(x: Int) + + println(new a.B(5)) + val n = 10 +} diff --git a/tests/init/neg/early-promote2.scala b/tests/init/neg/early-promote2.scala new file mode 100644 index 000000000000..514aed36a8ed --- /dev/null +++ b/tests/init/neg/early-promote2.scala @@ -0,0 +1,6 @@ +class M { + println(this) // error + foo() + private val a = 5 // error + def foo() = a +} diff --git a/tests/init/neg/early-promote3.scala b/tests/init/neg/early-promote3.scala new file mode 100644 index 000000000000..ecb5bbedca69 --- /dev/null +++ b/tests/init/neg/early-promote3.scala @@ -0,0 +1,11 @@ +abstract class A { + bar() + private val a = 5 + def foo() = a + def bar(): Unit +} + +class M extends A { + def bar() = promote(this) // error + def promote(m: M) = m.foo() +} diff --git a/tests/init/neg/early-promote4.scala b/tests/init/neg/early-promote4.scala new file mode 100644 index 000000000000..65f917553974 --- /dev/null +++ b/tests/init/neg/early-promote4.scala @@ -0,0 +1,20 @@ +abstract class A { + bar() + def bar(): Unit +} + +class Outer { + val a: Int = 5 + trait B { + def bar() = assert(a == 5) + } +} + +class M(val o: Outer) extends A with o.B { + val n: Int = 10 +} + +class Dummy { + val m: Int = n + 4 + val n: Int = 10 // error +} \ No newline at end of file diff --git a/tests/init/neg/early-promote5.scala b/tests/init/neg/early-promote5.scala new file mode 100644 index 000000000000..be21b5e0133b --- /dev/null +++ b/tests/init/neg/early-promote5.scala @@ -0,0 +1,25 @@ +abstract class A { + bar(this) // error + def bar(x: A): Unit +} + +class Outer { + val a: Int = 4 + trait B { + def bar(x: A) = println(a) + } +} + +class M(val o: Outer, c: Container) extends A with o.B + +class Container { + val o = new Outer + val m = new M(o, this) + val s = "hello" +} + +class Dummy { + val m: Int = n + 4 + val n: Int = 10 // error +} + diff --git a/tests/init/neg/enum-desugared.check b/tests/init/neg/enum-desugared.check new file mode 100644 index 000000000000..6417b8c5cea7 --- /dev/null +++ b/tests/init/neg/enum-desugared.check @@ -0,0 +1,18 @@ +-- Error: tests/init/neg/enum-desugared.scala:17:15 -------------------------------------------------------------------- +17 | Array(this.LazyErrorId, this.NoExplanationID) // error // error + | ^^^^^^^^^^^^^^^^ + | Cannot prove that the value is fully initialized. May only use initialized value as method arguments. + | + | The unsafe promotion may cause the following problem: + | Calling the external method method name may cause initialization errors. Calling trace: + | -> Array(this.LazyErrorId, this.NoExplanationID) // error // error [ enum-desugared.scala:17 ] + | -> override def productPrefix: String = this.name() [ enum-desugared.scala:29 ] +-- Error: tests/init/neg/enum-desugared.scala:17:33 -------------------------------------------------------------------- +17 | Array(this.LazyErrorId, this.NoExplanationID) // error // error + | ^^^^^^^^^^^^^^^^^^^^ + | Cannot prove that the value is fully initialized. May only use initialized value as method arguments. + | + | The unsafe promotion may cause the following problem: + | Calling the external method method ordinal may cause initialization errors. Calling trace: + | -> Array(this.LazyErrorId, this.NoExplanationID) // error // error [ enum-desugared.scala:17 ] + | -> def errorNumber: Int = this.ordinal() - 2 [ enum-desugared.scala:8 ] diff --git a/tests/init/neg/enum-desugared.scala b/tests/init/neg/enum-desugared.scala new file mode 100644 index 000000000000..eb80f112a06c --- /dev/null +++ b/tests/init/neg/enum-desugared.scala @@ -0,0 +1,35 @@ +package example + +import language.`3.0-migration` + +sealed abstract class ErrorMessageID($name: String, _$ordinal: Int) + extends java.lang.Enum[ErrorMessageID]($name, _$ordinal) with scala.reflect.Enum { + + def errorNumber: Int = this.ordinal() - 2 +} + +object ErrorMessageID { + + final val LazyErrorId = $new(0, "LazyErrorId") + final val NoExplanationID = $new(1, "NoExplanationID") + + private[this] val $values: Array[ErrorMessageID] = + Array(this.LazyErrorId, this.NoExplanationID) // error // error + + def values: Array[ErrorMessageID] = $values.clone() + + def valueOf($name: String): ErrorMessageID = $name match { + case "LazyErrorId" => this.LazyErrorId + case "NoExplanationID" => this.NoExplanationID + case _ => throw new IllegalArgumentException("enum case not found: " + $name) + } + + private[this] def $new(_$ordinal: Int, $name: String): ErrorMessageID = + new ErrorMessageID($name, _$ordinal) with scala.runtime.EnumValue { + override def productPrefix: String = this.name() + } + + def fromOrdinal(ordinal: Int): ErrorMessageID = + try ErrorMessageID.$values.apply(ordinal) + catch { case _ => throw new NoSuchElementException(ordinal.toString()) } +} \ No newline at end of file diff --git a/tests/init/neg/enum.check b/tests/init/neg/enum.check new file mode 100644 index 000000000000..eb48920b47ea --- /dev/null +++ b/tests/init/neg/enum.check @@ -0,0 +1,8 @@ +-- Error: tests/init/neg/enum.scala:4:8 -------------------------------------------------------------------------------- +4 | NoExplanationID // error + | ^ + | Cannot prove that the value is fully initialized. May only use initialized value as method arguments. + | + | The unsafe promotion may cause the following problem: + | Calling the external method method name may cause initialization errors. Calling trace: + | -> NoExplanationID // error [ enum.scala:4 ] diff --git a/tests/init/neg/enum.scala b/tests/init/neg/enum.scala new file mode 100644 index 000000000000..925d61b620a4 --- /dev/null +++ b/tests/init/neg/enum.scala @@ -0,0 +1,6 @@ +enum ErrorMessageID extends java.lang.Enum[ErrorMessageID] { + case + LazyErrorId, + NoExplanationID // error + def errorNumber = ordinal - 2 +} diff --git a/tests/init/neg/function-loop.scala b/tests/init/neg/function-loop.scala new file mode 100644 index 000000000000..12048860c3a6 --- /dev/null +++ b/tests/init/neg/function-loop.scala @@ -0,0 +1,6 @@ +class Foo { + val f: Int => Foo = (x: Int) => if x > 0 then f(x) else this + f(10).n + + val n = 10 // error +} \ No newline at end of file diff --git a/tests/init/neg/function1.scala b/tests/init/neg/function1.scala index e01864ae1f47..15427f3de750 100644 --- a/tests/init/neg/function1.scala +++ b/tests/init/neg/function1.scala @@ -4,7 +4,7 @@ class Foo { val fun2: Int => Int = n => 1 + n + list.size fun2(5) - List(5, 9).map(n => 2 + n + list.size) + List(5, 9).map(n => 2 + n + list.size) // error final val list = List(1, 2, 3) // error diff --git a/tests/init/neg/hybrid2.scala b/tests/init/neg/hybrid2.scala index d5c8b037a324..a9f8246fd58d 100644 --- a/tests/init/neg/hybrid2.scala +++ b/tests/init/neg/hybrid2.scala @@ -13,7 +13,7 @@ class Y { } val x = new X - x.b.g // error + x.b.g - val n = 10 + val n = 10 // error } diff --git a/tests/init/neg/i10549b.scala b/tests/init/neg/i10549b.scala new file mode 100644 index 000000000000..7353844b3927 --- /dev/null +++ b/tests/init/neg/i10549b.scala @@ -0,0 +1,9 @@ +class Wrap { + def qux[T](e: E[T]) = e.foo + + abstract class E[+T] { def foo: T } + object E { + final val A: E[Nothing] = new E { def foo = ref } + val ref = qux(A) // error + } +} diff --git a/tests/init/neg/i12544.scala b/tests/init/neg/i12544.scala new file mode 100644 index 000000000000..2692c27134e0 --- /dev/null +++ b/tests/init/neg/i12544.scala @@ -0,0 +1,19 @@ +enum Enum: + case Case + case Case2(x: Int) + +def g(b: Enum.B): Int = b.foo() + +object Enum: + object nested: + val a: Enum = Case + + val b: Enum = f(nested.a) + + def f(e: Enum): Enum = e + + class B() { def foo() = n + 1 } + g(new B()) // error + val n: Int = 10 + +@main def main(): Unit = println(Enum.b) diff --git a/tests/init/neg/inherit-non-hot.check b/tests/init/neg/inherit-non-hot.check new file mode 100644 index 000000000000..af95b2a7284b --- /dev/null +++ b/tests/init/neg/inherit-non-hot.check @@ -0,0 +1,16 @@ +-- Error: tests/init/neg/inherit-non-hot.scala:6:34 -------------------------------------------------------------------- +6 | if b == null then b = new B(this) // error + | ^^^^^^^^^^^ + | Cannot prove that the value is fully initialized. May only assign fully initialized value. + | Calling trace: + | -> val c = new C [ inherit-non-hot.scala:19 ] + | -> class C extends A { [ inherit-non-hot.scala:15 ] + | -> val bAgain = toB.getBAgain [ inherit-non-hot.scala:16 ] + | + | The unsafe promotion may cause the following problem: + | Call method Foo.B.this.aCopy.toB on a value with an unknown initialization. Calling trace: + | -> val c = new C [ inherit-non-hot.scala:19 ] + | -> class C extends A { [ inherit-non-hot.scala:15 ] + | -> val bAgain = toB.getBAgain [ inherit-non-hot.scala:16 ] + | -> if b == null then b = new B(this) // error [ inherit-non-hot.scala:6 ] + | -> def getBAgain: B = aCopy.toB [ inherit-non-hot.scala:12 ] diff --git a/tests/init/neg/inherit-non-hot.scala b/tests/init/neg/inherit-non-hot.scala new file mode 100644 index 000000000000..44be67351630 --- /dev/null +++ b/tests/init/neg/inherit-non-hot.scala @@ -0,0 +1,21 @@ +// This is a minimized test for the warning in Names.scala:174 +object Foo { + abstract class A { + var b: B = null + def toB: B = + if b == null then b = new B(this) // error + b + } + + class B(a: A) { + var aCopy: A = a + def getBAgain: B = aCopy.toB + } + + class C extends A { + val bAgain = toB.getBAgain + } + + val c = new C + assert(c.b == c.bAgain) +} \ No newline at end of file diff --git a/tests/init/neg/inner-loop.scala b/tests/init/neg/inner-loop.scala index c56f31a96757..c6d5c615580c 100644 --- a/tests/init/neg/inner-loop.scala +++ b/tests/init/neg/inner-loop.scala @@ -1,6 +1,6 @@ class Outer { outer => class Inner extends Outer { - val x = 5 + outer.n // error + val x = 5 + outer.n } val inner = new Inner val n = 6 // error @@ -13,3 +13,22 @@ class Outer2 { outer => val inner = new Inner val n = 6 } + +class Test { + class Outer3 { outer => + class Inner extends Outer3 { + val x = 5 + n + } + val inner = new Inner + val n = 6 + } + + val outer = new Outer3 + // Warm objects with inner classes not checked. + // If we change policy to check more eagerly, + // the check has to avoid loop here. + + println(outer) // error + + val m = 10 +} \ No newline at end of file diff --git a/tests/init/neg/inner1.scala b/tests/init/neg/inner1.scala index bef2fd2d159e..d39c3ba2ac77 100644 --- a/tests/init/neg/inner1.scala +++ b/tests/init/neg/inner1.scala @@ -4,7 +4,7 @@ class Foo { val list = List(1, 2, 3) // error, as Inner access `this.list` val inner: Inner = new this.Inner // ok, `list` is instantiated - lib.escape(inner) // error + lib.escape(inner) // ok, can promote inner early val name = "good" diff --git a/tests/init/neg/inner17.scala b/tests/init/neg/inner17.scala index 441ed02767b3..756278cd5130 100644 --- a/tests/init/neg/inner17.scala +++ b/tests/init/neg/inner17.scala @@ -5,7 +5,7 @@ class A { val a = f } - println(new B) // error + println(new B) // OK, can promote B early } class C extends A { diff --git a/tests/init/neg/inner19.scala b/tests/init/neg/inner19.scala index 2089f777dc52..2e502eddc24c 100644 --- a/tests/init/neg/inner19.scala +++ b/tests/init/neg/inner19.scala @@ -14,6 +14,6 @@ class A { class B extends A { println((new O.B).f) - O.C(4) // error - override val n = 50 // error + O.C(4) + override val n = 50 // error because line 16 } \ No newline at end of file diff --git a/tests/init/neg/inner30.scala b/tests/init/neg/inner30.scala new file mode 100644 index 000000000000..01bb5754d485 --- /dev/null +++ b/tests/init/neg/inner30.scala @@ -0,0 +1,21 @@ +object Scanners { + enum IndentWidth { + case Run(ch: Char, n: Int) + case Conc(l: IndentWidth, r: Run) + } + + import IndentWidth.* + + class Scanner { + def foo() = + Conc(Run('a', 3), Run('b', 4)) + new LookAheadScanner + + class LookAheadScanner() extends Scanner + + foo() + } + + val m: Int = n * 2 + val n = 10 // error +} \ No newline at end of file diff --git a/tests/init/neg/java1.scala b/tests/init/neg/java1.scala new file mode 100644 index 000000000000..1eff09cb6ff1 --- /dev/null +++ b/tests/init/neg/java1.scala @@ -0,0 +1,11 @@ +import java.util.Spliterator +import java.util.function.Consumer + +class A extends Spliterator.OfDouble: + def characteristics() = 10 + def estimateSize() = 10 + def trySplit() = ??? + def tryAdvance(x$0: java.util.function.DoubleConsumer): Boolean = false + + val m = n + 1 + val n = 10 // error diff --git a/tests/init/neg/leak-warm.check b/tests/init/neg/leak-warm.check new file mode 100644 index 000000000000..8219283b3c16 --- /dev/null +++ b/tests/init/neg/leak-warm.check @@ -0,0 +1,8 @@ +-- Error: tests/init/neg/leak-warm.scala:18:26 ------------------------------------------------------------------------- +18 | val l: List[A] = List(c, d) // error // error + | ^ + | Cannot prove that the value is fully initialized. May only use initialized value as method arguments. +-- Error: tests/init/neg/leak-warm.scala:18:29 ------------------------------------------------------------------------- +18 | val l: List[A] = List(c, d) // error // error + | ^ + | Cannot prove that the value is fully initialized. May only use initialized value as method arguments. diff --git a/tests/init/neg/leak-warm.scala b/tests/init/neg/leak-warm.scala new file mode 100644 index 000000000000..f562ab4ec416 --- /dev/null +++ b/tests/init/neg/leak-warm.scala @@ -0,0 +1,20 @@ +object leakWarm { + abstract class A(tag: Int) { + class B(x: Int) { + val y = x + } + def m(): B + } + + class C(tag1: Int, tag2: Int) extends A(tag1) { + def m() = new B(5) + } + + class D(tag1: Int, tag2: Int) extends A(tag1 + tag2) { + def m() = new B(tag1) + } + val c = new C(1, 2) + val d = new D(3, 4) + val l: List[A] = List(c, d) // error // error + val l2 = l.map(_.m()) +} diff --git a/tests/init/neg/local-warm4.check b/tests/init/neg/local-warm4.check new file mode 100644 index 000000000000..fda1ee1b928c --- /dev/null +++ b/tests/init/neg/local-warm4.check @@ -0,0 +1,11 @@ +-- Error: tests/init/neg/local-warm4.scala:18:20 ----------------------------------------------------------------------- +18 | a = newA // error + | ^^^^ + | Cannot prove that the value is fully initialized. May only assign fully initialized value. Calling trace: + | -> val a = new A(5) [ local-warm4.scala:26 ] + | -> class A(x: Int) extends Foo(x) { [ local-warm4.scala:6 ] + | -> val b = new B(y) [ local-warm4.scala:10 ] + | -> class B(x: Int) extends A(x) { [ local-warm4.scala:13 ] + | -> class A(x: Int) extends Foo(x) { [ local-warm4.scala:6 ] + | -> increment() [ local-warm4.scala:9 ] + | -> updateA() [ local-warm4.scala:21 ] diff --git a/tests/init/neg/local-warm4.scala b/tests/init/neg/local-warm4.scala new file mode 100644 index 000000000000..a1b3030ba4de --- /dev/null +++ b/tests/init/neg/local-warm4.scala @@ -0,0 +1,27 @@ +object localWarm { + abstract class Foo(x: Int) { + def increment(): Unit + } + + class A(x: Int) extends Foo(x) { + var y = x + override def increment(): Unit = y = y + 1 + increment() + val b = new B(y) + } + + class B(x: Int) extends A(x) { + var a: A = this + override def increment(): Unit = { + def updateA(): Unit = { + val newA = new A(y) + a = newA // error + } + y = y + 1 + updateA() + } + if y < 10 then increment() + val z = b.y + } + val a = new A(5) +} diff --git a/tests/init/neg/polyfun.scala b/tests/init/neg/polyfun.scala new file mode 100644 index 000000000000..a3a3ecc76814 --- /dev/null +++ b/tests/init/neg/polyfun.scala @@ -0,0 +1,8 @@ +class Test { + val m: [T] => (arg: T) => T = + [T] => (arg: T) => { + println(n) + arg + } + val n = m.apply(arg = 23) // error +} diff --git a/tests/init/neg/promotion-loop.check b/tests/init/neg/promotion-loop.check new file mode 100644 index 000000000000..b53dd676081f --- /dev/null +++ b/tests/init/neg/promotion-loop.check @@ -0,0 +1,7 @@ +-- Error: tests/init/neg/promotion-loop.scala:16:10 -------------------------------------------------------------------- +16 | println(b) // error + | ^ + | Cannot prove that the value is fully initialized. Only initialized values may be used as arguments. + | + | The unsafe promotion may cause the following problem: + | Cannot prove that the value is fully initialized. Only initialized values may be used as arguments. diff --git a/tests/init/neg/promotion-loop.scala b/tests/init/neg/promotion-loop.scala new file mode 100644 index 000000000000..7f6856c34cae --- /dev/null +++ b/tests/init/neg/promotion-loop.scala @@ -0,0 +1,19 @@ +class Test { test => + class A { + val self = this + } + + val a = new A + println(a) + + + class B { + val self = this + val outer = test + } + + val b = new B + println(b) // error + + val n = 10 +} \ No newline at end of file diff --git a/tests/init/neg/scodec.scala b/tests/init/neg/scodec.scala new file mode 100644 index 000000000000..19c2983b589d --- /dev/null +++ b/tests/init/neg/scodec.scala @@ -0,0 +1,30 @@ +trait Codec[A] { self => + final def withContext(context: String): Codec[A] = + class X extends Codec[A] { + def decode(bits: String) = 10 + override def toString = s"$self" + } + new X + + def decode(bits: String): Int + + def decodeOnly[AA >: A]: Codec[AA] = { + val sup = this.decodeOnly[AA] + class Y extends Codec[AA] { + def decode(bits: String) = sup.decode(bits) + } + new Y + } + +} + +object codecs { + class Z extends Codec[String] { + override def decode(bits: String): Int = 0 + } + val codec = new Z + + println(codec) // error + + val n = 10 // prevent early promotion +} diff --git a/tests/init/neg/secondary-ctor.scala b/tests/init/neg/secondary-ctor.scala new file mode 100644 index 000000000000..22eabcd57438 --- /dev/null +++ b/tests/init/neg/secondary-ctor.scala @@ -0,0 +1,19 @@ +class A(b: B, x: Int) { + def this(b: B) = { + this(b, 5) + println(b.n) // error + } +} + +class B(val d: D) { + val n: Int = 10 +} + +class C(b: B) extends A(b) { + def this(b: B, x: Int) = this(b) +} + +class D { + val b = new B(this) + val c = new C(b, 5) +} diff --git a/tests/init/neg/secondary-ctor2.scala b/tests/init/neg/secondary-ctor2.scala new file mode 100644 index 000000000000..1260cfbd51d1 --- /dev/null +++ b/tests/init/neg/secondary-ctor2.scala @@ -0,0 +1,25 @@ +class A(b: B, x: Int) { + def this(b: B) = { + this(b, 5) + class Inner() { + def foo() = println(b.n) // error: calling method on cold + } + Inner().foo() + + val f = () => new A(b, 3) + f() + } +} + +class B(val d: D) { + val n: Int = 10 +} + +class C(b: B) extends A(b) { + def this(b: B, x: Int) = this(b) +} + +class D { + val b = new B(this) + val c = new C(b, 5) +} diff --git a/tests/init/neg/secondary-ctor3.scala b/tests/init/neg/secondary-ctor3.scala new file mode 100644 index 000000000000..0001e74fc6b7 --- /dev/null +++ b/tests/init/neg/secondary-ctor3.scala @@ -0,0 +1,39 @@ +def foo() = + class L1(x: Int) { val n: Int = 5 } + + class A(b: B, x: Int) { + class L2(x: Int) { val n: Int = 5 } + + def this(b: B) = { + this(b, 5) + class Inner() { + def foo() = println(b.n) // error + } + Inner().foo() + + val l1 = new L1(3) + println(l1.n) + + val l2 = new L2(3) + println(l2.n) + + (() => new A(b, 3))() // ok + } + } + + class B(val d: D) { + val n: Int = 10 + } + + trait T { + val m: Int = 10 + } + + class C(b: B) extends A(b) with T { + def this(b: B, x: Int) = this(b) + } + + class D { + val b = new B(this) + val c = new C(b, 5) + } diff --git a/tests/init/neg/secondary-ctor4.scala b/tests/init/neg/secondary-ctor4.scala new file mode 100644 index 000000000000..e43c063541ed --- /dev/null +++ b/tests/init/neg/secondary-ctor4.scala @@ -0,0 +1,61 @@ +trait D { + val n: Int = 10 +} + +class M(x: Int) { + + def this(d: D) = { + this(d.n) + + class L1(x: Int) { val n: Int = 5 } + + class A(b: B, x: Int) { + println(d.n) // error + + class L2(x: Int) { val n: Int = 5 } + + def this(b: B) = { + this(b, 5) + println(d.n) // error + + class Inner() { + println(d.n) // error + println(b.n) // error + def foo() = println(b.n) // error + } + Inner().foo() + + val l1 = new L1(3) + println(l1.n) + + val l2 = new L2(3) + println(l2.n) + + (() => new A(b, 3))() // ok + } + } + + class B(val d: D) { + val n: Int = 10 + } + + new A(new B(new D)) + + trait T { + val m: Int = 10 + } + + class C(b: B) extends A(b) with T { + def this(b: B, x: Int) = this(b) + } + + class D { + val b = new B(this) + val c = new C(b, 5) + } + } +} + +class N(d: D) extends M(d) { + val msg = "Scala" +} diff --git a/tests/init/neg/soundness1.scala b/tests/init/neg/soundness1.scala index 8e048a8f72d8..ac4cb2721ef5 100644 --- a/tests/init/neg/soundness1.scala +++ b/tests/init/neg/soundness1.scala @@ -1,7 +1,29 @@ class A(b: B) { - val b2 = new B(this) // error + val b2 = new B(this) } class B(a: A) { - val a2 = new A(this) // error + val a2 = new A(this) } + +object Test2: + class A(b: B) { + val b2 = new B(this) + val c = b2.a2 + } + + class B(a: A) { + val a2 = new A(this) + val c = a2.b2 + } + +object Test3: + class A(b: B) { + println(b.a2) // error + val b2 = new B(this) + } + + class B(a: A) { + println(a.b2) // error + val a2 = new A(this) + } diff --git a/tests/init/neg/soundness2.scala b/tests/init/neg/soundness2.scala index 9d460849c6b0..34d225e953de 100644 --- a/tests/init/neg/soundness2.scala +++ b/tests/init/neg/soundness2.scala @@ -1,3 +1,4 @@ class C(c: C) { - val c2 = new C(this) // error + val d = c.c2 // error + val c2 = new C(this) } diff --git a/tests/init/neg/soundness6.scala b/tests/init/neg/soundness6.scala index 09d55dba292c..a3f80df11d1b 100644 --- a/tests/init/neg/soundness6.scala +++ b/tests/init/neg/soundness6.scala @@ -1,5 +1,5 @@ class C(c: C) { - println(c.n) - val c2 = new C(this) // error + println(c.n) // error + val c2 = new C(this) val n = 10 } diff --git a/tests/init/neg/structural.scala b/tests/init/neg/structural.scala new file mode 100644 index 000000000000..27b37a04bef7 --- /dev/null +++ b/tests/init/neg/structural.scala @@ -0,0 +1,12 @@ +import reflect.Selectable.reflectiveSelectable + +class Test { + trait A + val m: A { def apply(x: Int): Int } = + new A { + def apply(x: Int): Int = + n + x + } + + val n = m(23) // error +} diff --git a/tests/init/neg/super.scala b/tests/init/neg/super.scala new file mode 100644 index 000000000000..5a8e72cce65f --- /dev/null +++ b/tests/init/neg/super.scala @@ -0,0 +1,30 @@ +trait A: + def foo() = 1 + +trait B: + def foo() = 1 + +trait C: + def foo() = n + def n: Int + +class Foo extends A, B, C: + super[A].foo() + + override def foo() = n + + val n = 10 + +class Bar extends A, B, C: + super[C].foo() + + override def foo() = n * n + + val n = 10 // error + +class Qux extends A, B, C: + super.foo() + + override def foo() = n * n + + val n = 10 // error diff --git a/tests/init/neg/t3273.check b/tests/init/neg/t3273.check index 74c016ef521f..531e8f6f5ad3 100644 --- a/tests/init/neg/t3273.check +++ b/tests/init/neg/t3273.check @@ -1,22 +1,16 @@ -- Error: tests/init/neg/t3273.scala:4:42 ------------------------------------------------------------------------------ 4 | val num1: LazyList[Int] = 1 #:: num1.map(_ + 1) // error | ^^^^^^^^^^^^^^^ - | Promoting the value to fully-initialized is unsafe. - | Calling trace: - | -> val num1: LazyList[Int] = 1 #:: num1.map(_ + 1) // error [ t3273.scala:4 ] - | - | The unsafe promotion may cause the following problem(s): + | Cannot prove that the value is fully initialized. Only initialized values may be used as arguments. | - | 1. Access non-initialized value num1. Calling trace: + | The unsafe promotion may cause the following problem: + | Access non-initialized value num1. Calling trace: | -> val num1: LazyList[Int] = 1 #:: num1.map(_ + 1) // error [ t3273.scala:4 ] -- Error: tests/init/neg/t3273.scala:5:61 ------------------------------------------------------------------------------ 5 | val num2: LazyList[Int] = 1 #:: num2.iterator.map(_ + 1).to(LazyList) // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | Promoting the value to fully-initialized is unsafe. - | Calling trace: - | -> val num2: LazyList[Int] = 1 #:: num2.iterator.map(_ + 1).to(LazyList) // error [ t3273.scala:5 ] - | - | The unsafe promotion may cause the following problem(s): + | Cannot prove that the value is fully initialized. Only initialized values may be used as arguments. | - | 1. Access non-initialized value num2. Calling trace: + | The unsafe promotion may cause the following problem: + | Access non-initialized value num2. Calling trace: | -> val num2: LazyList[Int] = 1 #:: num2.iterator.map(_ + 1).to(LazyList) // error [ t3273.scala:5 ] diff --git a/tests/init/neg/trait1.scala b/tests/init/neg/trait1.scala new file mode 100644 index 000000000000..48eae248fee7 --- /dev/null +++ b/tests/init/neg/trait1.scala @@ -0,0 +1,10 @@ +abstract class A(x: Int) { + def foo(): Unit + foo() +} + +trait B(val y: Int) // error + +class C extends A(10) with B(20) { + def foo(): Unit = println(y) +} \ No newline at end of file diff --git a/tests/init/neg/trait2.scala b/tests/init/neg/trait2.scala new file mode 100644 index 000000000000..6011c259b73f --- /dev/null +++ b/tests/init/neg/trait2.scala @@ -0,0 +1,14 @@ +abstract class A(x: Int) { + def foo(): Unit + foo() +} + +trait B(val y: Int) // error + +class D { + class C extends A(10) with B(20) { + def foo(): Unit = println(y) + } + + val c = new C +} \ No newline at end of file diff --git a/tests/init/neg/unsound1.check b/tests/init/neg/unsound1.check new file mode 100644 index 000000000000..54e24546845c --- /dev/null +++ b/tests/init/neg/unsound1.check @@ -0,0 +1,4 @@ +-- Error: tests/init/neg/unsound1.scala:2:35 --------------------------------------------------------------------------- +2 | if (m > 0) println(foo(m - 1).a2.n) // error + | ^^^^^^^^^^^^^^^ + | Access field A.this.foo(A.this.m.-(1)).a2.n on a value with an unknown initialization status. diff --git a/tests/init/neg/unsound1.scala b/tests/init/neg/unsound1.scala new file mode 100644 index 000000000000..3854504c8478 --- /dev/null +++ b/tests/init/neg/unsound1.scala @@ -0,0 +1,11 @@ +class A(m: Int) { + if (m > 0) println(foo(m - 1).a2.n) // error + def foo(n: Int): B = + if (n % 2 == 0) + new B(new A(n - 1), foo(n - 1).a1) + else + new B(this, new A(n - 1)) + var n: Int = 10 +} + +class B(val a1: A, val a2: A) \ No newline at end of file diff --git a/tests/init/neg/unsound2.check b/tests/init/neg/unsound2.check new file mode 100644 index 000000000000..346caec4cb1b --- /dev/null +++ b/tests/init/neg/unsound2.check @@ -0,0 +1,6 @@ +-- Error: tests/init/neg/unsound2.scala:5:26 --------------------------------------------------------------------------- +5 | def getN: Int = a.n // error + | ^^^ + | Access field B.this.a.n on a value with an unknown initialization status. Calling trace: + | -> println(foo(x).getB) [ unsound2.scala:8 ] + | -> def foo(y: Int): B = if (y > 10) then B(bar(y - 1), foo(y - 1).getN) else B(bar(y), 10) [ unsound2.scala:2 ] diff --git a/tests/init/neg/unsound2.scala b/tests/init/neg/unsound2.scala new file mode 100644 index 000000000000..5ae0c624c32e --- /dev/null +++ b/tests/init/neg/unsound2.scala @@ -0,0 +1,10 @@ +case class A(x: Int) { + def foo(y: Int): B = if (y > 10) then B(bar(y - 1), foo(y - 1).getN) else B(bar(y), 10) + def bar(y: Int): A = if (y > 10) then A(y - 1) else this + class B(a: A, b: Int) { + def getN: Int = a.n // error + def getB: Int = b + } + println(foo(x).getB) + val n: Int = 10 +} \ No newline at end of file diff --git a/tests/init/neg/unsound3.check b/tests/init/neg/unsound3.check new file mode 100644 index 000000000000..71766cf2d10b --- /dev/null +++ b/tests/init/neg/unsound3.check @@ -0,0 +1,5 @@ +-- Error: tests/init/neg/unsound3.scala:10:38 -------------------------------------------------------------------------- +10 | if (x < 12) then foo().getC().b else newB // error + | ^^^^^^^^^^^^^^ + | Access field C.this.foo().getC().b on a value with an unknown initialization status. Calling trace: + | -> val b = foo() [ unsound3.scala:12 ] diff --git a/tests/init/neg/unsound3.scala b/tests/init/neg/unsound3.scala new file mode 100644 index 000000000000..9ede5c7f97d0 --- /dev/null +++ b/tests/init/neg/unsound3.scala @@ -0,0 +1,13 @@ +class B(c: C) { + def getC() = c +} + +class C { + var x = 10 + def foo(): B = { + x += 1 + val newB = new B(this) + if (x < 12) then foo().getC().b else newB // error + } + val b = foo() +} \ No newline at end of file diff --git a/tests/init/neg/unsound4.check b/tests/init/neg/unsound4.check new file mode 100644 index 000000000000..4ed254444928 --- /dev/null +++ b/tests/init/neg/unsound4.check @@ -0,0 +1,6 @@ +-- Error: tests/init/neg/unsound4.scala:3:8 ---------------------------------------------------------------------------- +3 | val aAgain = foo(5) // error + | ^ + | Access non-initialized value aAgain. Calling trace: + | -> val aAgain = foo(5) // error [ unsound4.scala:3 ] + | -> def foo(x: Int): A = if (x < 5) then this else foo(x - 1).aAgain [ unsound4.scala:2 ] diff --git a/tests/init/neg/unsound4.scala b/tests/init/neg/unsound4.scala new file mode 100644 index 000000000000..8a6e26fe8a6b --- /dev/null +++ b/tests/init/neg/unsound4.scala @@ -0,0 +1,4 @@ +class A { + def foo(x: Int): A = if (x < 5) then this else foo(x - 1).aAgain + val aAgain = foo(5) // error +} \ No newline at end of file diff --git a/tests/init/pos/Desugar.scala b/tests/init/pos/Desugar.scala new file mode 100644 index 000000000000..4b8f7ca8b460 --- /dev/null +++ b/tests/init/pos/Desugar.scala @@ -0,0 +1,15 @@ +trait A + +class Tree[-A >: Int] { + val x: Int = 10 +} + +case class C[-T >: Int] (lhs: Int, rhs: Tree[T]) extends A { + val x = rhs.x +} + +object DesugarError { + val f: PartialFunction[A, Int] = {case C(_, rhs) => rhs.x} +} + + diff --git a/tests/init/pos/InteractiveDriver.scala b/tests/init/pos/InteractiveDriver.scala new file mode 100644 index 000000000000..a1f3a4d197bc --- /dev/null +++ b/tests/init/pos/InteractiveDriver.scala @@ -0,0 +1,23 @@ +trait InteractiveDriver[A <: AnyVal] { + val x: AnyVal + def g(x: A): A +} + +class C[A <: AnyVal] extends InteractiveDriver[A] { + val x = 0 + def g(x: A) = x +} + +class D[A <: AnyVal] extends InteractiveDriver[A] { + val x = 1.5 + def g(x: A) = x +} + +object InteractiveDriver { + def h(x: InteractiveDriver[?]): C[?] = x match { + case c: C[?] => c + case _ => new C[Int] + } + val l: Seq[Any] = Seq(1, 2, new C[Double], new D[Int]) + val l2: Seq[C[?]] = l.collect{ case x: InteractiveDriver[?] => h(x) } +} diff --git a/tests/init/pos/Parsers.scala b/tests/init/pos/Parsers.scala new file mode 100644 index 000000000000..2fa3f16839ba --- /dev/null +++ b/tests/init/pos/Parsers.scala @@ -0,0 +1,10 @@ +object Parsers { + enum Location(val inParens: Boolean, val inPattern: Boolean, val inArgs: Boolean): + case InParens extends Location(true, false, false) + case InArgs extends Location(true, false, true) + case InPattern extends Location(false, true, false) + case InGuard extends Location(false, false, false) + case InPatternArgs extends Location(false, true, true) // InParens not true, since it might be an alternative + case InBlock extends Location(false, false, false) + case ElseWhere extends Location(false, false, false) +} \ No newline at end of file diff --git a/tests/init/pos/enums.scala b/tests/init/pos/enums.scala new file mode 100644 index 000000000000..76722e084ab1 --- /dev/null +++ b/tests/init/pos/enums.scala @@ -0,0 +1,6 @@ +enum Color(val x: Int) { + case Green extends Color(3) + // case Red extends Color(2) + case Violet extends Color(Green.x + 1) + // case RGB(xx: Int) extends Color(xx) +} diff --git a/tests/init/pos/eq-ne-always-valid.scala b/tests/init/pos/eq-ne-always-valid.scala new file mode 100644 index 000000000000..8f1348e69587 --- /dev/null +++ b/tests/init/pos/eq-ne-always-valid.scala @@ -0,0 +1,13 @@ +final class MyNeClass(o: MyNeClass) { + val other: MyNeClass = { + if (o ne null) o // o is cold, but ne is always valid + else new MyNeClass(this) + } +} + +final class MyEqClass(o: MyEqClass) { + val other: MyEqClass = { + if (o eq null) new MyEqClass(this) // o is cold, but eq is always valid + else o + } +} diff --git a/tests/init/pos/i10549a.scala b/tests/init/pos/i10549a.scala new file mode 100644 index 000000000000..23ccbff747f4 --- /dev/null +++ b/tests/init/pos/i10549a.scala @@ -0,0 +1,7 @@ +class Wrap { + class E + object E { + final val A = new E {} + val $values = Array(A) + } +} diff --git a/tests/init/pos/i12544.scala b/tests/init/pos/i12544.scala new file mode 100644 index 000000000000..e5e3c9a2ade2 --- /dev/null +++ b/tests/init/pos/i12544.scala @@ -0,0 +1,12 @@ +enum Enum: + case Case + +object Enum: + object nested: + val a: Enum = Case + + val b: Enum = f(nested.a) + + def f(e: Enum): Enum = e + +@main def main(): Unit = println(Enum.b) diff --git a/tests/init/pos/instance-of-always-valid.scala b/tests/init/pos/instance-of-always-valid.scala new file mode 100644 index 000000000000..5f6f71c5335c --- /dev/null +++ b/tests/init/pos/instance-of-always-valid.scala @@ -0,0 +1,22 @@ +final class MyIsInstanceOfClass(o: MyIsInstanceOfClass) { + val other: MyIsInstanceOfClass = { + if (!o.isInstanceOf[Object]) new MyIsInstanceOfClass(this) // o is cold, but isInstanceOf is always valid + else o + } +} + +final class MyAsInstanceOfClass(o: MyAsInstanceOfClass) { + val other: MyAsInstanceOfClass = { + if (o.asInstanceOf[Object] ne null) o // o is cold, but ne and AsInstanceOf is always valid + else new MyAsInstanceOfClass(this) + } +} + +final class MyAsInstanceOfFieldClass(o: MyAsInstanceOfFieldClass) { + val oRef = o + val other: MyAsInstanceOfFieldClass = { + if (this.asInstanceOf[MyAsInstanceOfFieldClass].oRef ne null) oRef // o is cold, but ne and AsInstanceOf is always valid + else new MyAsInstanceOfFieldClass(this) + } +} + diff --git a/tests/init/neg/lazylist1.scala b/tests/init/pos/lazylist1.scala similarity index 92% rename from tests/init/neg/lazylist1.scala rename to tests/init/pos/lazylist1.scala index 75556d73aa74..3ff0f9bc449d 100644 --- a/tests/init/neg/lazylist1.scala +++ b/tests/init/pos/lazylist1.scala @@ -2,7 +2,7 @@ class LazyList[A] object LazyList { inline implicit def toDeferred[A](l: LazyList[A]): Deferred[A] = - new Deferred(l) // error + new Deferred(l) final class Deferred[A](l: => LazyList[A]) { def #:: [B >: A](elem: => B): LazyList[B] = ??? diff --git a/tests/init/pos/leak-this-inner.scala b/tests/init/pos/leak-this-inner.scala new file mode 100644 index 000000000000..bdabde0dbf7a --- /dev/null +++ b/tests/init/pos/leak-this-inner.scala @@ -0,0 +1,8 @@ +class A { + val x = 10 + class B(a: A) { + val anotherX = A.this.x + } + val b = B(this) // error + val xAgain = b.anotherX +} diff --git a/tests/init/pos/leak-this.scala b/tests/init/pos/leak-this.scala new file mode 100644 index 000000000000..7c6fbeafeb51 --- /dev/null +++ b/tests/init/pos/leak-this.scala @@ -0,0 +1,11 @@ +class Parent { + val child: Child = new Child(this) // error +} + +class Child(parent: Parent) { + val friend = new Friend(this.parent) +} + +class Friend(parent: Parent) { + val tag = 10 +} diff --git a/tests/init/pos/local-warm.scala b/tests/init/pos/local-warm.scala new file mode 100644 index 000000000000..04dea6b5e3c6 --- /dev/null +++ b/tests/init/pos/local-warm.scala @@ -0,0 +1,16 @@ +abstract class A { + def m() = 10 + def m1(): B = new B + def m2(): Int = m1().m() + class B extends A { + def x = 10 + } +} + +class C extends A { + def g() = { + val t = m1() // error + t.x + } + val x = g() +} diff --git a/tests/init/pos/local-warm2.scala b/tests/init/pos/local-warm2.scala new file mode 100644 index 000000000000..ebe5c8900c92 --- /dev/null +++ b/tests/init/pos/local-warm2.scala @@ -0,0 +1,14 @@ +class A { + var v = 5 + val x = { + class B { + def doubleAndReturnV(): Int = { + v = v * 2 + v + } + } + val b = new B + b.doubleAndReturnV() + } + val y = v +} \ No newline at end of file diff --git a/tests/init/pos/local-warm3.scala b/tests/init/pos/local-warm3.scala new file mode 100644 index 000000000000..047dd0d7a9e8 --- /dev/null +++ b/tests/init/pos/local-warm3.scala @@ -0,0 +1,10 @@ +class A() { + var x = 5 + def decreaseX(): Unit = { + val self = this + self.x -= 1 + } + def decreaseXToZero(): Unit = if x > 0 then decreaseX() + decreaseXToZero() + val y = x +} \ No newline at end of file diff --git a/tests/init/pos/local-warm5.scala b/tests/init/pos/local-warm5.scala new file mode 100644 index 000000000000..705b92362c3f --- /dev/null +++ b/tests/init/pos/local-warm5.scala @@ -0,0 +1,12 @@ +object leakWarm5 { + case class A(x: Int) { + def double(): A = { + class C { + def double(): A = if x < 10 then A(x * 2).double() else A.this + } + val c = new C + c.double() + } + } + val a = A(2).double() +} diff --git a/tests/init/pos/methodAtLast.scala b/tests/init/pos/methodAtLast.scala new file mode 100644 index 000000000000..02fbf1501bd8 --- /dev/null +++ b/tests/init/pos/methodAtLast.scala @@ -0,0 +1,11 @@ +class Foo(x: Int) { + var y: Int = x + case class Bar(z: Int) extends Foo(z) + def updateY(n: Int): Unit = { + if (y < 20) { + val b = new Bar(x + n) + y = b.z + } + } + updateY(5) +} diff --git a/tests/init/pos/second-ctor-fields.scala b/tests/init/pos/second-ctor-fields.scala new file mode 100644 index 000000000000..70e7b46791b6 --- /dev/null +++ b/tests/init/pos/second-ctor-fields.scala @@ -0,0 +1,17 @@ + +class A(b: B) { + def this(b: B, m: Int) = { + this(b) + def foo = m // resolved to parameter `m` + class C { foo } // resolved to parameter `m`, as hidden field of `A` + new C + } +} + +class D(b: B) extends A(b, 10) { + val n = 10 +} + +class B { + val a = new D(this) +} diff --git a/tests/init/special/i12128/Macro_2.scala b/tests/init/special/i12128/Macro_2.scala new file mode 100644 index 000000000000..0f5b866e6f1c --- /dev/null +++ b/tests/init/special/i12128/Macro_2.scala @@ -0,0 +1,21 @@ +import scala.quoted._ +import scala.language.experimental.macros + +class Location(val line: Int) + +object MacroCompat { + trait LocationMacro { + inline implicit def generate: Location = ${ locationImpl() } + implicit def generate: Location = macro MacroCompatScala2.locationImpl + } + + def locationImpl()(using Quotes): Expr[Location] = '{ new Location(${Expr(0)}) } +} + +object MacroCompatScala2 { + def locationImpl(c: Context): c.Tree = { + import c.universe._ + val line = Literal(Constant(c.enclosingPosition.line)) + New(c.mirror.staticClass(classOf[Location].getName()), line) + } +} diff --git a/tests/init/special/i12128/Reflect_1.scala b/tests/init/special/i12128/Reflect_1.scala new file mode 100644 index 000000000000..2c2e39de73a3 --- /dev/null +++ b/tests/init/special/i12128/Reflect_1.scala @@ -0,0 +1,30 @@ +trait Context: // Dummy scala.reflect.macros.Context + type Tree = universe.Tree + val universe: Universe + + trait Universe { + type Tree >: Null <: AnyRef with TreeApi + type Literal >: Null <: LiteralApi with TermTree + type TermTree >: Null <: TermTreeApi with Tree + + trait TermTreeApi extends TreeApi { this: TermTree => } + trait LiteralApi extends TermTreeApi { this: Literal => } + trait TreeApi extends Product { this: Tree => } + + type Constant + + type Type + + def Literal(const: Constant): Tree + def Constant(i: Int): Constant + def New(tpe: Type, arg: Tree): Tree + } + + def enclosingPosition: Position + + trait Mirror { + def staticClass(name: String): universe.Type + } + val mirror: Mirror + +class Position(val line: Int) diff --git a/tests/init/special/i12128/Test_3.scala b/tests/init/special/i12128/Test_3.scala new file mode 100644 index 000000000000..070465a7e950 --- /dev/null +++ b/tests/init/special/i12128/Test_3.scala @@ -0,0 +1 @@ +object Test extends MacroCompat.LocationMacro diff --git a/tests/invalid/neg/typelevel-erased-leak.scala b/tests/invalid/neg/typelevel-erased-leak.scala index ce44137109f6..4875678dbdcc 100644 --- a/tests/invalid/neg/typelevel-erased-leak.scala +++ b/tests/invalid/neg/typelevel-erased-leak.scala @@ -1,6 +1,6 @@ object typelevel { - erased def erasedValue[T]: T = ??? + erased def erasedValue[T]: T = compiletime.erasedValue } object Test { diff --git a/tests/invalid/run/Tuple.scala b/tests/invalid/run/Tuple.scala index 9da7e941b098..5d9f013d1f5a 100644 --- a/tests/invalid/run/Tuple.scala +++ b/tests/invalid/run/Tuple.scala @@ -2,7 +2,7 @@ import annotation.showAsInfix // This version of Tuple requires full retyping of untyped trees on inlining object typelevel { - erased def erasedValue[T]: T = ??? + erased def erasedValue[T]: T = compiletime.erasedValue class Typed[T](val value: T) { type Type = T } } diff --git a/tests/neg-custom-args/deprecation/14034b.scala b/tests/neg-custom-args/deprecation/14034b.scala new file mode 100644 index 000000000000..d22a945fe10d --- /dev/null +++ b/tests/neg-custom-args/deprecation/14034b.scala @@ -0,0 +1,14 @@ + +@deprecated trait Exp +@deprecated val exp = 1 + +def test1 = exp // error +def test2(a: Exp) = () // error + +type Foo0 = Exp // error +type Foo = Option[Exp] // error +type Bar = Option[exp.type] // error +type Baz = Exp | Int // error +type Quux = [X] =>> X match // error + case Exp => Int +type Quuz[A <: Exp] = Int // error diff --git a/tests/neg-custom-args/deprecation/i12597.scala b/tests/neg-custom-args/deprecation/i12597.scala new file mode 100644 index 000000000000..7927dded0cbc --- /dev/null +++ b/tests/neg-custom-args/deprecation/i12597.scala @@ -0,0 +1,5 @@ +@main def Test = + val a: IArray[Int] = IArray(2) + val b: IArray[Any] = a + val c = b.toArray // error: deprecated + c(0) = "" diff --git a/tests/neg-custom-args/deprecation/manifest-summoning.check b/tests/neg-custom-args/deprecation/manifest-summoning.check new file mode 100644 index 000000000000..aa1462f8baba --- /dev/null +++ b/tests/neg-custom-args/deprecation/manifest-summoning.check @@ -0,0 +1,14 @@ +-- Error: tests/neg-custom-args/deprecation/manifest-summoning.scala:1:34 ---------------------------------------------- +1 |val foo = manifest[List[? <: Int]] // error + | ^ + | Compiler synthesis of Manifest and OptManifest is deprecated, instead + | replace with the type `scala.reflect.ClassTag[List[? <: Int]]`. + | Alternatively, consider using the new metaprogramming features of Scala 3, + | see https://docs.scala-lang.org/scala3/reference/metaprogramming.html +-- Error: tests/neg-custom-args/deprecation/manifest-summoning.scala:2:41 ---------------------------------------------- +2 |val bar = optManifest[Array[? <: String]] // error + | ^ + | Compiler synthesis of Manifest and OptManifest is deprecated, instead + | replace with the type `scala.reflect.ClassTag[Array[? <: String]]`. + | Alternatively, consider using the new metaprogramming features of Scala 3, + | see https://docs.scala-lang.org/scala3/reference/metaprogramming.html diff --git a/tests/neg-custom-args/deprecation/manifest-summoning.scala b/tests/neg-custom-args/deprecation/manifest-summoning.scala new file mode 100644 index 000000000000..7e9d9ee2cc9d --- /dev/null +++ b/tests/neg-custom-args/deprecation/manifest-summoning.scala @@ -0,0 +1,2 @@ +val foo = manifest[List[? <: Int]] // error +val bar = optManifest[Array[? <: String]] // error diff --git a/tests/neg-custom-args/deprecation/t3235-minimal.check b/tests/neg-custom-args/deprecation/t3235-minimal.check new file mode 100644 index 000000000000..665caab69d8d --- /dev/null +++ b/tests/neg-custom-args/deprecation/t3235-minimal.check @@ -0,0 +1,16 @@ +-- Error: tests/neg-custom-args/deprecation/t3235-minimal.scala:3:21 --------------------------------------------------- +3 | assert(123456789.round == 123456789) // error + | ^^^^^^^^^^^^^^^ + |method round in class RichInt is deprecated since 2.11.0: this is an integer type; there is no reason to round it. Perhaps you meant to call this on a floating-point value? +-- Error: tests/neg-custom-args/deprecation/t3235-minimal.scala:4:16 --------------------------------------------------- +4 | assert(math.round(123456789) == 123456789) // error + | ^^^^^^^^^^ + |method round in package scala.math is deprecated since 2.11.0: This is an integer type; there is no reason to round it. Perhaps you meant to call this with a floating-point value? +-- Error: tests/neg-custom-args/deprecation/t3235-minimal.scala:5:32 --------------------------------------------------- +5 | assert(1234567890123456789L.round == 1234567890123456789L) // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ + |method round in class RichLong is deprecated since 2.11.0: this is an integer type; there is no reason to round it. Perhaps you meant to call this on a floating-point value? +-- Error: tests/neg-custom-args/deprecation/t3235-minimal.scala:6:16 --------------------------------------------------- +6 | assert(math.round(1234567890123456789L) == 1234567890123456789L) // error + | ^^^^^^^^^^ + |method round in package scala.math is deprecated since 2.11.0: This is an integer type; there is no reason to round it. Perhaps you meant to call this with a floating-point value? diff --git a/tests/neg-custom-args/explicit-nulls/byname-nullables.check b/tests/neg-custom-args/explicit-nulls/byname-nullables.check index efe94a496c19..ffd0a637563e 100644 --- a/tests/neg-custom-args/explicit-nulls/byname-nullables.check +++ b/tests/neg-custom-args/explicit-nulls/byname-nullables.check @@ -3,8 +3,8 @@ | ^ | Found: (x : String | Null) | Required: String - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` -- Error: tests/neg-custom-args/explicit-nulls/byname-nullables.scala:43:32 -------------------------------------------- 43 | if x != null then f(identity(x), 1) // error: dropping not null check fails typing | ^^^^^^^^^^^ diff --git a/tests/neg-custom-args/explicit-nulls/i7883.check b/tests/neg-custom-args/explicit-nulls/i7883.check index 9a1165a6440e..579ea0b4cd00 100644 --- a/tests/neg-custom-args/explicit-nulls/i7883.check +++ b/tests/neg-custom-args/explicit-nulls/i7883.check @@ -10,11 +10,11 @@ 6 | case r(hd, tl) => Some((hd, tl)) // error // error // error | ^^ | Not found: hd - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` -- [E006] Not Found Error: tests/neg-custom-args/explicit-nulls/i7883.scala:6:34 --------------------------------------- 6 | case r(hd, tl) => Some((hd, tl)) // error // error // error | ^^ | Not found: tl - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/fatal-warnings/i11333.check b/tests/neg-custom-args/fatal-warnings/i11333.check new file mode 100644 index 000000000000..beef37c6460a --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i11333.check @@ -0,0 +1,30 @@ +-- [E167] Lossy Conversion Error: tests/neg-custom-args/fatal-warnings/i11333.scala:2:19 ------------------------------- +2 | val f1: Float = 123456789 // error + | ^^^^^^^^^ + | Widening conversion from Int to Float loses precision. + | Write `.toFloat` instead. +-- [E167] Lossy Conversion Error: tests/neg-custom-args/fatal-warnings/i11333.scala:3:19 ------------------------------- +3 | val d1: Double = 1234567890123456789L // error + | ^^^^^^^^^^^^^^^^^^^^ + | Widening conversion from Long to Double loses precision. + | Write `.toDouble` instead. +-- [E167] Lossy Conversion Error: tests/neg-custom-args/fatal-warnings/i11333.scala:4:19 ------------------------------- +4 | val f2: Float = 123456789L // error + | ^^^^^^^^^^ + | Widening conversion from Long to Float loses precision. + | Write `.toFloat` instead. +-- [E167] Lossy Conversion Error: tests/neg-custom-args/fatal-warnings/i11333.scala:10:21 ------------------------------ +10 | val f1_b: Float = i1 // error + | ^^ + | Widening conversion from Int to Float loses precision. + | Write `.toFloat` instead. +-- [E167] Lossy Conversion Error: tests/neg-custom-args/fatal-warnings/i11333.scala:11:21 ------------------------------ +11 | val d1_b: Double = l1 // error + | ^^ + | Widening conversion from Long to Double loses precision. + | Write `.toDouble` instead. +-- [E167] Lossy Conversion Error: tests/neg-custom-args/fatal-warnings/i11333.scala:12:21 ------------------------------ +12 | val f2_b: Float = l2 // error + | ^^ + | Widening conversion from Long to Float loses precision. + | Write `.toFloat` instead. diff --git a/tests/neg-custom-args/fatal-warnings/i11333.scala b/tests/neg-custom-args/fatal-warnings/i11333.scala new file mode 100644 index 000000000000..3ba39efeb29e --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i11333.scala @@ -0,0 +1,12 @@ +class C: + val f1: Float = 123456789 // error + val d1: Double = 1234567890123456789L // error + val f2: Float = 123456789L // error + + inline val i1 = 123456789 + inline val l1 = 1234567890123456789L + inline val l2 = 123456789L + + val f1_b: Float = i1 // error + val d1_b: Double = l1 // error + val f2_b: Float = l2 // error diff --git a/tests/neg-custom-args/fatal-warnings/i11963a.scala b/tests/neg-custom-args/fatal-warnings/i11963a.scala new file mode 100644 index 000000000000..58d64d061162 --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i11963a.scala @@ -0,0 +1 @@ +open trait Foo // error diff --git a/tests/neg-custom-args/fatal-warnings/i11963b.scala b/tests/neg-custom-args/fatal-warnings/i11963b.scala new file mode 100644 index 000000000000..9fae92747d53 --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i11963b.scala @@ -0,0 +1 @@ +open abstract class Foo // error diff --git a/tests/neg-custom-args/fatal-warnings/i11963c.scala b/tests/neg-custom-args/fatal-warnings/i11963c.scala new file mode 100644 index 000000000000..ebd56e1127c8 --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i11963c.scala @@ -0,0 +1,6 @@ +object Test { + def foo: Any = { + open class Bar // error + new Bar + } +} diff --git a/tests/neg-custom-args/fatal-warnings/i12188/Macro.scala b/tests/neg-custom-args/fatal-warnings/i12188/Macro.scala new file mode 100644 index 000000000000..9b49f80f9046 --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i12188/Macro.scala @@ -0,0 +1,20 @@ +import scala.quoted.* + +object MatchTest { + inline def test[T](inline obj: T): Unit = ${testImpl('obj)} + + def testImpl[T](objExpr: Expr[T])(using qctx: Quotes, t: Type[T]): Expr[Unit] = { + import qctx.reflect.* + + val obj = objExpr.asTerm + + val cases = obj.tpe.typeSymbol.children.map { child => + val subtype = TypeIdent(child) + val bind = Symbol.newBind(Symbol.spliceOwner, "c", Flags.EmptyFlags, subtype.tpe) + CaseDef(Bind(bind, Typed(Ref(bind), subtype)), None, '{()}.asTerm) + } + val result = Match(obj, cases) + //println(result.show(using Printer.TreeAnsiCode)) + result.asExprOf[Unit] + } +} diff --git a/tests/neg-custom-args/fatal-warnings/i12188/Test.scala b/tests/neg-custom-args/fatal-warnings/i12188/Test.scala new file mode 100644 index 000000000000..3bea42ac3032 --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i12188/Test.scala @@ -0,0 +1,9 @@ +sealed trait P +case class PC1(a: String) extends P +case class PC2(b: Int) extends P + +def Test = MatchTest.test(PC2(10): P) + +def foo(x: P): Unit = + x match // error + case _: PC1 => \ No newline at end of file diff --git a/tests/neg-custom-args/fatal-warnings/i12253.check b/tests/neg-custom-args/fatal-warnings/i12253.check new file mode 100644 index 000000000000..1cda504511af --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i12253.check @@ -0,0 +1,8 @@ +-- Error: tests/neg-custom-args/fatal-warnings/i12253.scala:11:10 ------------------------------------------------------ +11 | case extractors.InlinedLambda(_, Select(_, name)) => Expr(name) // error // error + | ^ + | the type test for extractors.q2.reflect.Term cannot be checked at runtime +-- Error: tests/neg-custom-args/fatal-warnings/i12253.scala:11:38 ------------------------------------------------------ +11 | case extractors.InlinedLambda(_, Select(_, name)) => Expr(name) // error // error + | ^ + | the type test for q1.reflect.Select cannot be checked at runtime diff --git a/tests/neg-custom-args/fatal-warnings/i12253.scala b/tests/neg-custom-args/fatal-warnings/i12253.scala new file mode 100644 index 000000000000..5fdf1c83504d --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i12253.scala @@ -0,0 +1,28 @@ +import scala.quoted.{given, *} +import deriving.*, compiletime.* + +object MacroUtils: + transparent inline def extractNameFromSelector[To, T](inline code: To => T) = ${extractNameFromSelectorImpl('code)} + + def extractNameFromSelectorImpl[To: Type, T: Type](code: Expr[To => T])(using q1: Quotes): Expr[String] = + import quotes.reflect.* + val extractors = new Extractors + code.asTerm match + case extractors.InlinedLambda(_, Select(_, name)) => Expr(name) // error // error + case t => report.throwError(s"Illegal argument to extractor: ${code.show}, in tasty: $t") + + class Extractors(using val q2: Quotes): + //attempt to strip away consecutive inlines in AST and extract only final lambda + import quotes.reflect.* + + object InlinedLambda: + def unapply(arg: Term): Option[(List[ValDef], Term)] = + arg match + case Inlined(_, _, Lambda(vals, term)) => Some((vals, term)) + case Inlined(_, _, nested) => InlinedLambda.unapply(nested) + case t => None + end InlinedLambda + + end Extractors +end MacroUtils + diff --git a/tests/neg-custom-args/fatal-warnings/i13011.scala b/tests/neg-custom-args/fatal-warnings/i13011.scala new file mode 100644 index 000000000000..ae534394bf96 --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i13011.scala @@ -0,0 +1,21 @@ +class i13011 { + lazy implicit val simple1: String = simple1 // error + def f: Unit = { + lazy val simple2: String = simple2 // error + } + + lazy val simple3: String = if true then this.simple3 else "a" // error + + def firstDigitIsEven(n: Int): Boolean = if n % 10 == n then n % 2 == 0 else firstDigitIsEven(n / 10) + + lazy val simple4: String = if firstDigitIsEven(22) then this.simple4 else "a" // ok + + lazy val simple5: String = identity(this.simple5) // error + + lazy val simple6: String = { // error + this.simple6 + "aa" + } + + lazy val simple7: Function0[Any] = () => this.simple7 // Ok +} diff --git a/tests/neg-custom-args/fatal-warnings/i13440.check b/tests/neg-custom-args/fatal-warnings/i13440.check new file mode 100644 index 000000000000..fde8133419b6 --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i13440.check @@ -0,0 +1,12 @@ +-- Error: tests/neg-custom-args/fatal-warnings/i13440.scala:3:4 -------------------------------------------------------- +3 |def given = 42 // error + | ^ + | given is now a keyword, write `given` instead of given to keep it as an identifier +-- Error: tests/neg-custom-args/fatal-warnings/i13440.scala:5:13 ------------------------------------------------------- +5 |case class C(enum: List[Int] = Nil) { // error + | ^ + | enum is now a keyword, write `enum` instead of enum to keep it as an identifier +-- Error: tests/neg-custom-args/fatal-warnings/i13440.scala:6:11 ------------------------------------------------------- +6 | val s = s"$enum" // error + | ^ + | enum is now a keyword, write `enum` instead of enum to keep it as an identifier diff --git a/tests/neg-custom-args/fatal-warnings/i13440.scala b/tests/neg-custom-args/fatal-warnings/i13440.scala new file mode 100644 index 000000000000..6cb4956e7434 --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i13440.scala @@ -0,0 +1,7 @@ +import language.`3.0-migration` + +def given = 42 // error + +case class C(enum: List[Int] = Nil) { // error + val s = s"$enum" // error +} diff --git a/tests/neg-custom-args/fatal-warnings/i13542.scala b/tests/neg-custom-args/fatal-warnings/i13542.scala new file mode 100644 index 000000000000..3da18c233583 --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i13542.scala @@ -0,0 +1,50 @@ +import scala.language.implicitConversions + +case class Foo(i: Int) extends AnyVal: + def toFoo = this + +case class Bar(i: Int) extends AnyVal + +class BarOps(bar: Bar): + def toFoo = Foo(bar.i) + +implicit def augmentBar(bar: Bar): BarOps = BarOps(bar) + +def lazyIdentity[T](x: => T): T = x +def repIdentity[T](x: T*): T = x(0) + +val x1 = + implicit def barToFoo(bar: Bar): Foo = bar.toFoo // error: infinite loop in function body + val foo: Foo = Bar(1) + +val x2 = + implicit def barToFoo2(bar: Bar): Foo = + identity(bar.toFoo) // error + val foo: Foo = Bar(1) + +val x3 = + implicit def barToFoo3(bar: Bar): Foo = + lazyIdentity(bar.toFoo) // OK + val foo: Foo = Bar(1) + +val x4 = + implicit def barToFoo4(bar: Bar): Foo = + repIdentity(bar.toFoo) // error + val foo: Foo = Bar(1) + +val x5 = + implicit def barToFoo4(bar: Bar): Foo = + val y = bar.toFoo // error + y + val foo: Foo = Bar(1) + +val x6 = + implicit def barToFoo4(bar: Bar): Foo = + lazy val y = bar.toFoo // error + if false then y else ??? + val foo: Foo = Bar(1) + + + + + diff --git a/tests/neg-custom-args/fatal-warnings/i13820.scala b/tests/neg-custom-args/fatal-warnings/i13820.scala new file mode 100644 index 000000000000..234c1a55450e --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i13820.scala @@ -0,0 +1,5 @@ +trait Expr { type T } + +def foo[A](e: Expr { type T = A }) = e match + case e1: Expr { type T <: Int } => // error: type test cannot be checked at runtime + val i: Int = ??? : e1.T \ No newline at end of file diff --git a/tests/neg/i8427.scala b/tests/neg-custom-args/fatal-warnings/i8427.scala similarity index 100% rename from tests/neg/i8427.scala rename to tests/neg-custom-args/fatal-warnings/i8427.scala diff --git a/tests/neg-custom-args/fatal-warnings/i8711.check b/tests/neg-custom-args/fatal-warnings/i8711.check index 0abda7a77ed6..0035af0755d4 100644 --- a/tests/neg-custom-args/fatal-warnings/i8711.check +++ b/tests/neg-custom-args/fatal-warnings/i8711.check @@ -1,8 +1,8 @@ --- Error: tests/neg-custom-args/fatal-warnings/i8711.scala:7:9 --------------------------------------------------------- +-- [E030] Match case Unreachable Error: tests/neg-custom-args/fatal-warnings/i8711.scala:7:9 --------------------------- 7 | case x: B => x // error: this case is unreachable since class A is not a subclass of class B - | ^ - | this case is unreachable since type A and class B are unrelated --- Error: tests/neg-custom-args/fatal-warnings/i8711.scala:12:9 -------------------------------------------------------- + | ^^^^ + | Unreachable case +-- [E030] Match case Unreachable Error: tests/neg-custom-args/fatal-warnings/i8711.scala:12:9 -------------------------- 12 | case x: C => x // error - | ^ - | this case is unreachable since type A | B and class C are unrelated + | ^^^^ + | Unreachable case diff --git a/tests/neg-custom-args/fatal-warnings/i9241.scala b/tests/neg-custom-args/fatal-warnings/i9241.scala new file mode 100644 index 000000000000..d3be9bc9278d --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i9241.scala @@ -0,0 +1,41 @@ +class Foo { + def unary_~() : Foo = this // error + def unary_-(using Int)(): Foo = this // error + def unary_+()(implicit i: Int): Foo = this // error + def unary_![T](): Foo = this // error +} + +class Bar { + def unary_~ : Bar = this + def unary_-(using Int): Bar = this + def unary_+(implicit i: Int): Bar = this + def unary_![T]: Bar = this +} + +final class Baz private (val x: Int) extends AnyVal { + def unary_- : Baz = ??? + def unary_+[T] : Baz = ??? + def unary_!() : Baz = ??? // error + def unary_~(using Int) : Baz = ??? +} + +extension (x: Int) + def unary_- : Int = ??? + def unary_+[T] : Int = ??? + def unary_!() : Int = ??? // error + def unary_~(using Int) : Int = ??? +end extension + +extension [T](x: Short) + def unary_- : Int = ??? + def unary_+[U] : Int = ??? + def unary_!() : Int = ??? // error + def unary_~(using Int) : Int = ??? +end extension + +extension (using Int)(x: Byte) + def unary_- : Int = ??? + def unary_+[U] : Int = ??? + def unary_!() : Int = ??? // error + def unary_~(using Int) : Int = ??? +end extension diff --git a/tests/neg-custom-args/fatal-warnings/i9880.scala b/tests/neg-custom-args/fatal-warnings/i9880.scala new file mode 100644 index 000000000000..d9d857110543 --- /dev/null +++ b/tests/neg-custom-args/fatal-warnings/i9880.scala @@ -0,0 +1,30 @@ +opaque type Bytes = Array[Byte] +object Bytes: + extension (self: Bytes) + def size: Int = (self: Array[Byte]).size // error + +// + +object Module1: + opaque type State[S, +A] = S => (S, A) + object State: + extension [S, A](self: State[S, A]) + def map[B](f: A => B): State[S, B] = + s => { val (s2, a) = self(s); (s2, f(a)) } +object Module2: + import Module1.State + trait RNG + opaque type Gen[+A] = State[RNG, A] + object Gen: + extension [A](self: Gen[A]) + def map[B](f: A => B): Gen[B] = + self.map(f) // error + +// + +class Sym(val owner: Sym) + +extension (sym: Sym) + def isSomething: Boolean = false + def isFoo: Boolean = sym.isSomething && sym.owner.isFoo // was: Infinite loop in function body + def isBar: Boolean = sym.isSomething || sym.owner.isBar // was: Infinite loop in function body diff --git a/tests/neg-custom-args/hidden-type-errors.check b/tests/neg-custom-args/hidden-type-errors.check new file mode 100644 index 000000000000..a373e409af2f --- /dev/null +++ b/tests/neg-custom-args/hidden-type-errors.check @@ -0,0 +1,28 @@ +-- [E007] Type Mismatch Error: tests/neg-custom-args/hidden-type-errors/Test.scala:6:24 -------------------------------- +6 | val x = X.doSomething("XXX") // error + | ^^^^^^^^^^^^^^^^^^^^ + | Found: String + | Required: Int + | This location contains code that was inlined from Test.scala:6 + +Explanation +=========== + +Tree: t12717.A.bar("XXX") + +I tried to show that + String +conforms to + Int +but the comparison trace ended with `false`: + + ==> String <: Int + ==> String <: Int (recurring) + ==> String <: Int (recurring) + <== String <: Int (recurring) = false + <== String <: Int (recurring) = false + <== String <: Int = false + +The tests were made under the empty constraint + +1 error found diff --git a/tests/neg-custom-args/hidden-type-errors/Macro.scala b/tests/neg-custom-args/hidden-type-errors/Macro.scala new file mode 100644 index 000000000000..17b4f3d52ffe --- /dev/null +++ b/tests/neg-custom-args/hidden-type-errors/Macro.scala @@ -0,0 +1,22 @@ +package t12717 + +import scala.quoted._ + +object A: + + def foo(x:Int): Int = ??? + + def bar(x:String): String = ??? + + +object X: + + inline def doSomething[T](inline x:T):Any = ${ + doSomethingImpl('x) + } + + def doSomethingImpl[T:Type](x:Expr[T])(using Quotes):Expr[Any] = + import quotes.reflect._ + val aTerm = '{A}.asTerm + val xBar = Apply(Select.unique(aTerm,"bar"),List(x.asTerm)) + Apply(Select.unique(aTerm,"foo"), List(xBar)).asExpr diff --git a/tests/neg-custom-args/hidden-type-errors/Test.scala b/tests/neg-custom-args/hidden-type-errors/Test.scala new file mode 100644 index 000000000000..180aa07cfb50 --- /dev/null +++ b/tests/neg-custom-args/hidden-type-errors/Test.scala @@ -0,0 +1,6 @@ +package t12717 + + +object Test: + + val x = X.doSomething("XXX") // error diff --git a/tests/neg-custom-args/i11637.check b/tests/neg-custom-args/i11637.check index b0479b633efc..0664a05f4f86 100644 --- a/tests/neg-custom-args/i11637.check +++ b/tests/neg-custom-args/i11637.check @@ -1,77 +1,56 @@ -- [E057] Type Mismatch Error: tests/neg-custom-args/i11637.scala:11:33 ------------------------------------------------ 11 | var h = new HKT3_1[FunctorImpl](); // error // error | ^ - | Type argument test2.FunctorImpl does not conform to upper bound [Generic2[T] <: Set[T]] =>> Any - -Explanation -=========== - -I tried to show that - test2.FunctorImpl -conforms to - [Generic2[T] <: Set[T]] =>> Any -but the comparison trace ended with `false`: - - ==> test2.FunctorImpl <: [Generic2[T] <: Set[T]] =>> Any - ==> test2.FunctorImpl <: [Generic2[T] <: Set[T]] =>> Any (recurring) - ==> type bounds [[T] <: Set[T]] <: type bounds [[T] <: Iterable[T]] - ==> type bounds [[T] <: Set[T]] <: type bounds [[T] <: Iterable[T]] (recurring) - ==> [T <: String] =>> Set[T] <: Iterable - ==> [T <: String] =>> Set[T] <: Iterable (recurring) - ==> type bounds [] <: type bounds [ <: String] - ==> type bounds [] <: type bounds [ <: String] (recurring) - ==> Any <: String - ==> Any <: String (recurring) - ==> Any <: String (recurring) - <== Any <: String (recurring) = false - <== Any <: String (recurring) = false - <== Any <: String = false - <== type bounds [] <: type bounds [ <: String] (recurring) = false - <== type bounds [] <: type bounds [ <: String] = false - <== [T <: String] =>> Set[T] <: Iterable (recurring) = false - <== [T <: String] =>> Set[T] <: Iterable = false - <== type bounds [[T] <: Set[T]] <: type bounds [[T] <: Iterable[T]] (recurring) = false - <== type bounds [[T] <: Set[T]] <: type bounds [[T] <: Iterable[T]] = false - <== test2.FunctorImpl <: [Generic2[T] <: Set[T]] =>> Any (recurring) = false - <== test2.FunctorImpl <: [Generic2[T] <: Set[T]] =>> Any = false - -The tests were made under the empty constraint - + | Type argument test2.FunctorImpl does not conform to upper bound [Generic2[T <: String] <: Set[T]] =>> Any + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | I tried to show that + | test2.FunctorImpl + | conforms to + | [Generic2[T <: String] <: Set[T]] =>> Any + | but the comparison trace ended with `false`: + | + | ==> test2.FunctorImpl <: [Generic2[T <: String] <: Set[T]] =>> Any + | ==> type bounds [[T <: String] <: Set[T]] <: type bounds [[T] <: Iterable[T]] + | ==> [T <: String] =>> Set[T] <: Iterable + | ==> type bounds [] <: type bounds [ <: String] + | ==> Any <: String + | ==> Any <: String + | <== Any <: String = false + | <== Any <: String = false + | <== type bounds [] <: type bounds [ <: String] = false + | <== [T <: String] =>> Set[T] <: Iterable = false + | <== type bounds [[T <: String] <: Set[T]] <: type bounds [[T] <: Iterable[T]] = false + | <== test2.FunctorImpl <: [Generic2[T <: String] <: Set[T]] =>> Any = false + | + | The tests were made under the empty constraint + -------------------------------------------------------------------------------------------------------------------- -- [E057] Type Mismatch Error: tests/neg-custom-args/i11637.scala:11:21 ------------------------------------------------ 11 | var h = new HKT3_1[FunctorImpl](); // error // error | ^ - | Type argument test2.FunctorImpl does not conform to upper bound [Generic2[T] <: Set[T]] =>> Any - -Explanation -=========== - -I tried to show that - test2.FunctorImpl -conforms to - [Generic2[T] <: Set[T]] =>> Any -but the comparison trace ended with `false`: - - ==> test2.FunctorImpl <: [Generic2[T] <: Set[T]] =>> Any - ==> test2.FunctorImpl <: [Generic2[T] <: Set[T]] =>> Any (recurring) - ==> type bounds [[T] <: Set[T]] <: type bounds [[T] <: Iterable[T]] - ==> type bounds [[T] <: Set[T]] <: type bounds [[T] <: Iterable[T]] (recurring) - ==> [T <: String] =>> Set[T] <: Iterable - ==> [T <: String] =>> Set[T] <: Iterable (recurring) - ==> type bounds [] <: type bounds [ <: String] - ==> type bounds [] <: type bounds [ <: String] (recurring) - ==> Any <: String - ==> Any <: String (recurring) - ==> Any <: String (recurring) - <== Any <: String (recurring) = false - <== Any <: String (recurring) = false - <== Any <: String = false - <== type bounds [] <: type bounds [ <: String] (recurring) = false - <== type bounds [] <: type bounds [ <: String] = false - <== [T <: String] =>> Set[T] <: Iterable (recurring) = false - <== [T <: String] =>> Set[T] <: Iterable = false - <== type bounds [[T] <: Set[T]] <: type bounds [[T] <: Iterable[T]] (recurring) = false - <== type bounds [[T] <: Set[T]] <: type bounds [[T] <: Iterable[T]] = false - <== test2.FunctorImpl <: [Generic2[T] <: Set[T]] =>> Any (recurring) = false - <== test2.FunctorImpl <: [Generic2[T] <: Set[T]] =>> Any = false - -The tests were made under the empty constraint + | Type argument test2.FunctorImpl does not conform to upper bound [Generic2[T <: String] <: Set[T]] =>> Any + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | I tried to show that + | test2.FunctorImpl + | conforms to + | [Generic2[T <: String] <: Set[T]] =>> Any + | but the comparison trace ended with `false`: + | + | ==> test2.FunctorImpl <: [Generic2[T <: String] <: Set[T]] =>> Any + | ==> type bounds [[T <: String] <: Set[T]] <: type bounds [[T] <: Iterable[T]] + | ==> [T <: String] =>> Set[T] <: Iterable + | ==> type bounds [] <: type bounds [ <: String] + | ==> Any <: String + | ==> Any <: String + | <== Any <: String = false + | <== Any <: String = false + | <== type bounds [] <: type bounds [ <: String] = false + | <== [T <: String] =>> Set[T] <: Iterable = false + | <== type bounds [[T <: String] <: Set[T]] <: type bounds [[T] <: Iterable[T]] = false + | <== test2.FunctorImpl <: [Generic2[T <: String] <: Set[T]] =>> Any = false + | + | The tests were made under the empty constraint + -------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg-custom-args/i12650.check b/tests/neg-custom-args/i12650.check new file mode 100644 index 000000000000..62ca140ef6b9 --- /dev/null +++ b/tests/neg-custom-args/i12650.check @@ -0,0 +1,4 @@ +-- Error: tests/neg-custom-args/i12650.scala:2:58 ---------------------------------------------------------------------- +2 | type This <: FooBase { type This <: FooBase.this.This } & FooBase { type This <: FooBase.this.This } // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | cyclic reference involving type This diff --git a/tests/neg-custom-args/i12650.scala b/tests/neg-custom-args/i12650.scala new file mode 100644 index 000000000000..f071b040f2ee --- /dev/null +++ b/tests/neg-custom-args/i12650.scala @@ -0,0 +1,3 @@ +trait FooBase { + type This <: FooBase { type This <: FooBase.this.This } & FooBase { type This <: FooBase.this.This } // error +} \ No newline at end of file diff --git a/tests/neg-custom-args/i13026.check b/tests/neg-custom-args/i13026.check new file mode 100644 index 000000000000..e610aa51336c --- /dev/null +++ b/tests/neg-custom-args/i13026.check @@ -0,0 +1,18 @@ +-- [E007] Type Mismatch Error: tests/neg-custom-args/i13026.scala:1:13 ------------------------------------------------- +1 |val x: Int = "not an int" // error + | ^^^^^^^^^^^^ + | Found: ("not an int" : String) + | Required: Int + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg-custom-args/i13026.scala:2:13 ------------------------------------------------- +2 |val y: Int = "not an int" // error + | ^^^^^^^^^^^^ + | Found: ("not an int" : String) + | Required: Int + | + | longer explanation available when compiling with `-explain` +-- [E008] Not Found Error: tests/neg-custom-args/i13026.scala:3:20 ----------------------------------------------------- +3 |def foo(x: Any) = x.foo // error + | ^^^^^ + | value foo is not a member of Any diff --git a/tests/neg-custom-args/i13026.scala b/tests/neg-custom-args/i13026.scala new file mode 100644 index 000000000000..9ecf909f7122 --- /dev/null +++ b/tests/neg-custom-args/i13026.scala @@ -0,0 +1,3 @@ +val x: Int = "not an int" // error +val y: Int = "not an int" // error +def foo(x: Any) = x.foo // error diff --git a/tests/neg-custom-args/i13838.check b/tests/neg-custom-args/i13838.check new file mode 100644 index 000000000000..f6866dc6bf2a --- /dev/null +++ b/tests/neg-custom-args/i13838.check @@ -0,0 +1,26 @@ +-- Error: tests/neg-custom-args/i13838.scala:10:5 ---------------------------------------------------------------------- +10 | foo // error + | ^ + |no implicit argument of type Order[X] was found for parameter x$1 of method foo in object FooT + | + |where: X is a type variable + |. + |I found: + | + | FooT.OrderFFooA[F, A](FooT.OrderFFooA[F, A](/* missing */summon[Order[F[Foo[A]]]])) + | + |But given instance OrderFFooA in object FooT produces a diverging implicit search when trying to match type Order[F[Foo[A]]]. +-- [E168] Type Warning: tests/neg-custom-args/i13838.scala:10:5 -------------------------------------------------------- +10 | foo // error + | ^ + | Implicit search problem too large. + | an implicit search was terminated with failure after trying 1000 expressions. + | The root candidate for the search was: + | + | given instance OrderFFooA in object FooT for Order[Any]} + | + | You can change the behavior by setting the `-Ximplicit-search-limit` value. + | Smaller values cause the search to fail faster. + | Larger values might make a very large search problem succeed. + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/i13838.scala b/tests/neg-custom-args/i13838.scala new file mode 100644 index 000000000000..c99d3fa1f82d --- /dev/null +++ b/tests/neg-custom-args/i13838.scala @@ -0,0 +1,42 @@ +implicit def catsSyntaxEq[A: Eq](a: A): Foo[A] = ??? + +class Foo[A] +object Foo: + given [A: Eq]: Eq[Foo[A]] = ??? + +object FooT: + + def foo[X](using Order[X]): Unit = ??? + foo // error + + def map[F[_], A](ffa: F[Foo[A]])(f: A): Nothing = ??? + + given OrderFFooA[F[_], A](using Ord: Order[F[Foo[A]]]): Order[F[Foo[A]]] = ??? + +trait Eq[A] +trait Order[A] extends Eq[A] + +object Eq { + given catsKernelOrderForTuple1[A0](using A0: Order[A0]): Order[Tuple1[A0]] = ??? + given catsKernelOrderForTuple2[A0, A1](using A0: Order[A0], A1: Order[A1]): Order[(A0, A1)] = ??? + given catsKernelOrderForTuple3[A0, A1, A2](using A0: Order[A0], A1: Order[A1], A2: Order[A2]): Order[(A0, A1, A2)] = ??? + given catsKernelOrderForTuple4[A0, A1, A2, A3](using A0: Order[A0], A1: Order[A1], A2: Order[A2], A3: Order[A3]): Order[(A0, A1, A2, A3)] = ??? + given catsKernelOrderForTuple5[A0, A1, A2, A3, A4](using A0: Order[A0], A1: Order[A1], A2: Order[A2], A3: Order[A3], A4: Order[A4]): Order[(A0, A1, A2, A3, A4)] = ??? + given catsKernelOrderForTuple6[A0, A1, A2, A3, A4, A5](using A0: Order[A0], A1: Order[A1], A2: Order[A2], A3: Order[A3], A4: Order[A4], A5: Order[A5]): Order[(A0, A1, A2, A3, A4, A5)] = ??? + given catsKernelOrderForTuple7[A0, A1, A2, A3, A4, A5, A6](using A0: Order[A0], A1: Order[A1], A2: Order[A2], A3: Order[A3], A4: Order[A4], A5: Order[A5], A6: Order[A6]): Order[(A0, A1, A2, A3, A4, A5, A6)] = ??? + given catsKernelOrderForTuple8[A0, A1, A2, A3, A4, A5, A6, A7](using A0: Order[A0], A1: Order[A1], A2: Order[A2], A3: Order[A3], A4: Order[A4], A5: Order[A5], A6: Order[A6], A7: Order[A7]): Order[(A0, A1, A2, A3, A4, A5, A6, A7)] = ??? + given catsKernelOrderForTuple9[A0, A1, A2, A3, A4, A5, A6, A7, A8](using A0: Order[A0], A1: Order[A1], A2: Order[A2], A3: Order[A3], A4: Order[A4], A5: Order[A5], A6: Order[A6], A7: Order[A7], A8: Order[A8]): Order[(A0, A1, A2, A3, A4, A5, A6, A7, A8)] = ??? + given catsKernelOrderForTuple10[A0, A1, A2, A3, A4, A5, A6, A7, A8, A9](using A0: Order[A0], A1: Order[A1], A2: Order[A2], A3: Order[A3], A4: Order[A4], A5: Order[A5], A6: Order[A6], A7: Order[A7], A8: Order[A8], A9: Order[A9]): Order[(A0, A1, A2, A3, A4, A5, A6, A7, A8, A9)] = ??? + given catsKernelOrderForTuple11[A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10](using A0: Order[A0], A1: Order[A1], A2: Order[A2], A3: Order[A3], A4: Order[A4], A5: Order[A5], A6: Order[A6], A7: Order[A7], A8: Order[A8], A9: Order[A9], A10: Order[A10]): Order[(A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10)] = ??? + given catsKernelOrderForTuple12[A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11](using A0: Order[A0], A1: Order[A1], A2: Order[A2], A3: Order[A3], A4: Order[A4], A5: Order[A5], A6: Order[A6], A7: Order[A7], A8: Order[A8], A9: Order[A9], A10: Order[A10], A11: Order[A11]): Order[(A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11)] = ??? + given catsKernelOrderForTuple13[A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12](using A0: Order[A0], A1: Order[A1], A2: Order[A2], A3: Order[A3], A4: Order[A4], A5: Order[A5], A6: Order[A6], A7: Order[A7], A8: Order[A8], A9: Order[A9], A10: Order[A10], A11: Order[A11], A12: Order[A12]): Order[(A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12)] = ??? + given catsKernelOrderForTuple14[A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13](using A0: Order[A0], A1: Order[A1], A2: Order[A2], A3: Order[A3], A4: Order[A4], A5: Order[A5], A6: Order[A6], A7: Order[A7], A8: Order[A8], A9: Order[A9], A10: Order[A10], A11: Order[A11], A12: Order[A12], A13: Order[A13]): Order[(A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13)] = ??? + given catsKernelOrderForTuple15[A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14](using A0: Order[A0], A1: Order[A1], A2: Order[A2], A3: Order[A3], A4: Order[A4], A5: Order[A5], A6: Order[A6], A7: Order[A7], A8: Order[A8], A9: Order[A9], A10: Order[A10], A11: Order[A11], A12: Order[A12], A13: Order[A13], A14: Order[A14]): Order[(A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14)] = ??? + given catsKernelOrderForTuple16[A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15](using A0: Order[A0], A1: Order[A1], A2: Order[A2], A3: Order[A3], A4: Order[A4], A5: Order[A5], A6: Order[A6], A7: Order[A7], A8: Order[A8], A9: Order[A9], A10: Order[A10], A11: Order[A11], A12: Order[A12], A13: Order[A13], A14: Order[A14], A15: Order[A15]): Order[(A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15)] = ??? + given catsKernelOrderForTuple17[A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16](using A0: Order[A0], A1: Order[A1], A2: Order[A2], A3: Order[A3], A4: Order[A4], A5: Order[A5], A6: Order[A6], A7: Order[A7], A8: Order[A8], A9: Order[A9], A10: Order[A10], A11: Order[A11], A12: Order[A12], A13: Order[A13], A14: Order[A14], A15: Order[A15], A16: Order[A16]): Order[(A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16)] = ??? + given catsKernelOrderForTuple18[A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17](using A0: Order[A0], A1: Order[A1], A2: Order[A2], A3: Order[A3], A4: Order[A4], A5: Order[A5], A6: Order[A6], A7: Order[A7], A8: Order[A8], A9: Order[A9], A10: Order[A10], A11: Order[A11], A12: Order[A12], A13: Order[A13], A14: Order[A14], A15: Order[A15], A16: Order[A16], A17: Order[A17]): Order[(A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17)] = ??? + given catsKernelOrderForTuple19[A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18](using A0: Order[A0], A1: Order[A1], A2: Order[A2], A3: Order[A3], A4: Order[A4], A5: Order[A5], A6: Order[A6], A7: Order[A7], A8: Order[A8], A9: Order[A9], A10: Order[A10], A11: Order[A11], A12: Order[A12], A13: Order[A13], A14: Order[A14], A15: Order[A15], A16: Order[A16], A17: Order[A17], A18: Order[A18]): Order[(A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18)] = ??? + given catsKernelOrderForTuple20[A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19](using A0: Order[A0], A1: Order[A1], A2: Order[A2], A3: Order[A3], A4: Order[A4], A5: Order[A5], A6: Order[A6], A7: Order[A7], A8: Order[A8], A9: Order[A9], A10: Order[A10], A11: Order[A11], A12: Order[A12], A13: Order[A13], A14: Order[A14], A15: Order[A15], A16: Order[A16], A17: Order[A17], A18: Order[A18], A19: Order[A19]): Order[(A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19)] = ??? + given catsKernelOrderForTuple21[A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20](using A0: Order[A0], A1: Order[A1], A2: Order[A2], A3: Order[A3], A4: Order[A4], A5: Order[A5], A6: Order[A6], A7: Order[A7], A8: Order[A8], A9: Order[A9], A10: Order[A10], A11: Order[A11], A12: Order[A12], A13: Order[A13], A14: Order[A14], A15: Order[A15], A16: Order[A16], A17: Order[A17], A18: Order[A18], A19: Order[A19], A20: Order[A20]): Order[(A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20)] = ??? + given catsKernelOrderForTuple22[A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21](using A0: Order[A0], A1: Order[A1], A2: Order[A2], A3: Order[A3], A4: Order[A4], A5: Order[A5], A6: Order[A6], A7: Order[A7], A8: Order[A8], A9: Order[A9], A10: Order[A10], A11: Order[A11], A12: Order[A12], A13: Order[A13], A14: Order[A14], A15: Order[A15], A16: Order[A16], A17: Order[A17], A18: Order[A18], A19: Order[A19], A20: Order[A20], A21: Order[A21]): Order[(A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21)] = ??? +} diff --git a/tests/neg-custom-args/i13946/BadPrinter.scala b/tests/neg-custom-args/i13946/BadPrinter.scala new file mode 100644 index 000000000000..3ab935de9711 --- /dev/null +++ b/tests/neg-custom-args/i13946/BadPrinter.scala @@ -0,0 +1,4 @@ +// in BadPrinter.scala +import language.future +class BadPrinter extends Printer: // error + override def print(s: String): Unit = println("Bad!!!") \ No newline at end of file diff --git a/tests/neg-custom-args/i13946/Printer.scala b/tests/neg-custom-args/i13946/Printer.scala new file mode 100644 index 000000000000..bc9056de932c --- /dev/null +++ b/tests/neg-custom-args/i13946/Printer.scala @@ -0,0 +1,3 @@ +// in Printer.scala +class Printer: + def print(s: String): Unit = println(s) diff --git a/tests/neg-custom-args/isInstanceOf/enum-approx2.scala b/tests/neg-custom-args/isInstanceOf/enum-approx2.scala index 8350f9cf4b9c..516b765ec64b 100644 --- a/tests/neg-custom-args/isInstanceOf/enum-approx2.scala +++ b/tests/neg-custom-args/isInstanceOf/enum-approx2.scala @@ -5,5 +5,6 @@ class Test { def eval(e: Fun[Int, Int]) = e match { case Fun(x: Fun[Int, Double]) => ??? // error case Fun(x: Exp[Int => String]) => ??? // error + case _ => } } \ No newline at end of file diff --git a/tests/neg-custom-args/jdk-9-app.check b/tests/neg-custom-args/jdk-9-app.check new file mode 100644 index 000000000000..0eacc7510766 --- /dev/null +++ b/tests/neg-custom-args/jdk-9-app.check @@ -0,0 +1,6 @@ +-- [E006] Not Found Error: tests/neg-custom-args/jdk-9-app.scala:4:10 -------------------------------------------------- +4 | println(ProcessHandle.current().pid()) // error: not found + | ^^^^^^^^^^^^^ + | Not found: ProcessHandle + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/jdk-9-app.scala b/tests/neg-custom-args/jdk-9-app.scala new file mode 100644 index 000000000000..3dfd55332492 --- /dev/null +++ b/tests/neg-custom-args/jdk-9-app.scala @@ -0,0 +1,5 @@ +import java.lang.ProcessHandle + +object Jdk9App extends App { + println(ProcessHandle.current().pid()) // error: not found +} diff --git a/tests/neg-custom-args/kind-projector-underscores.check b/tests/neg-custom-args/kind-projector-underscores.check new file mode 100644 index 000000000000..2a832ae3d7a2 --- /dev/null +++ b/tests/neg-custom-args/kind-projector-underscores.check @@ -0,0 +1,28 @@ +-- Error: tests/neg-custom-args/kind-projector-underscores.scala:7:23 -------------------------------------------------- +7 |class Bar3 extends Foo[λ[List[x] => Int]] // error + | ^^^^^^^^^^^^^^^^^ + | λ requires a single argument of the form X => ... or (X, Y) => ... +-- [E095] Syntax Error: tests/neg-custom-args/kind-projector-underscores.scala:10:8 ------------------------------------ +10 | type -_ = Int // error -_ not allowed as a type def name without backticks + | ^ + | =, >:, or <: expected, but '_' found + | + | longer explanation available when compiling with `-explain` +-- [E095] Syntax Error: tests/neg-custom-args/kind-projector-underscores.scala:11:8 ------------------------------------ +11 | type +_ = Int // error +_ not allowed as a type def name without backticks + | ^ + | =, >:, or <: expected, but '_' found + | + | longer explanation available when compiling with `-explain` +-- Error: tests/neg-custom-args/kind-projector-underscores.scala:14:51 ------------------------------------------------- +14 |class BacktickUnderscoreIsNotFine extends Foo[List[`_`]] // error wildcard invalid as backquoted identifier + | ^ + | wildcard invalid as backquoted identifier +-- Error: tests/neg-custom-args/kind-projector-underscores.scala:5:23 -------------------------------------------------- +5 |class Bar1 extends Foo[Either[_, _]] // error + | ^^^^^^^^^^^^ + | Type argument Either does not have the same kind as its bound [_$1] +-- Error: tests/neg-custom-args/kind-projector-underscores.scala:6:22 -------------------------------------------------- +6 |class Bar2 extends Foo[_] // error + | ^ + | Type argument _ does not have the same kind as its bound [_$1] diff --git a/tests/neg-custom-args/kind-projector-underscores.scala b/tests/neg-custom-args/kind-projector-underscores.scala new file mode 100644 index 000000000000..64d46b16a7c6 --- /dev/null +++ b/tests/neg-custom-args/kind-projector-underscores.scala @@ -0,0 +1,14 @@ +package kind_projector_neg + +trait Foo[F[_]] + +class Bar1 extends Foo[Either[_, _]] // error +class Bar2 extends Foo[_] // error +class Bar3 extends Foo[λ[List[x] => Int]] // error + +object Test { + type -_ = Int // error -_ not allowed as a type def name without backticks + type +_ = Int // error +_ not allowed as a type def name without backticks +} + +class BacktickUnderscoreIsNotFine extends Foo[List[`_`]] // error wildcard invalid as backquoted identifier diff --git a/tests/neg-custom-args/kind-projector.check b/tests/neg-custom-args/kind-projector.check index 1375cf22b3f9..f6c258c5c58d 100644 --- a/tests/neg-custom-args/kind-projector.check +++ b/tests/neg-custom-args/kind-projector.check @@ -9,4 +9,4 @@ -- Error: tests/neg-custom-args/kind-projector.scala:6:22 -------------------------------------------------------------- 6 |class Bar2 extends Foo[*] // error | ^ - | Type argument _$4 does not have the same kind as its bound [_$1] + | Type argument _ does not have the same kind as its bound [_$1] diff --git a/tests/neg-custom-args/no-experimental/14034.scala b/tests/neg-custom-args/no-experimental/14034.scala new file mode 100644 index 000000000000..c0b4cc6899db --- /dev/null +++ b/tests/neg-custom-args/no-experimental/14034.scala @@ -0,0 +1,12 @@ +import annotation.experimental + +@experimental trait Exp +@experimental val exp = 1 + +type Foo0 = Exp // error +type Foo = Option[Exp] // error +type Bar = Option[exp.type] // error +type Baz = Exp | Int // error +type Quux = [X] =>> X match // error + case Exp => Int +type Quuz[A <: Exp] = Int // error diff --git a/tests/neg-custom-args/no-experimental/experimental-2.scala b/tests/neg-custom-args/no-experimental/experimental-2.scala new file mode 100644 index 000000000000..e2a8dcef58b8 --- /dev/null +++ b/tests/neg-custom-args/no-experimental/experimental-2.scala @@ -0,0 +1,5 @@ +class Test7 { + import scala.language.experimental + import experimental.genericNumberLiterals // error: no aliases can be used to refer to a language import + val x: BigInt = 13232202002020202020202 // error +} diff --git a/tests/neg-custom-args/no-experimental/experimental-erased.scala b/tests/neg-custom-args/no-experimental/experimental-erased.scala new file mode 100644 index 000000000000..c80c3e0d4b49 --- /dev/null +++ b/tests/neg-custom-args/no-experimental/experimental-erased.scala @@ -0,0 +1,9 @@ +import language.experimental.erasedDefinitions +import annotation.experimental + +@experimental +erased class CanThrow[-E <: Exception] + +erased class CanThrow2[-E <: Exception] // error + +def other = 1 diff --git a/tests/neg-custom-args/no-experimental/experimental-imports.scala b/tests/neg-custom-args/no-experimental/experimental-imports.scala new file mode 100644 index 000000000000..63a150978b1c --- /dev/null +++ b/tests/neg-custom-args/no-experimental/experimental-imports.scala @@ -0,0 +1,46 @@ +import annotation.experimental + +@experimental +object Object1: + import language.experimental.fewerBraces + import language.experimental.namedTypeArguments + import language.experimental.genericNumberLiterals + import language.experimental.erasedDefinitions + erased def f = 1 + +object Object2: + import language.experimental.fewerBraces // error + import language.experimental.namedTypeArguments // error + import language.experimental.genericNumberLiterals // error + import language.experimental.erasedDefinitions + erased def f = 1 + +@experimental +object Class1: + import language.experimental.fewerBraces + import language.experimental.namedTypeArguments + import language.experimental.genericNumberLiterals + import language.experimental.erasedDefinitions + erased def f = 1 + +object Class2: + import language.experimental.fewerBraces // error + import language.experimental.namedTypeArguments // error + import language.experimental.genericNumberLiterals // error + import language.experimental.erasedDefinitions + erased def f = 1 + +@experimental +def fun1 = + import language.experimental.fewerBraces + import language.experimental.namedTypeArguments + import language.experimental.genericNumberLiterals + import language.experimental.erasedDefinitions + erased def f = 1 + +def fun2 = + import language.experimental.fewerBraces // error + import language.experimental.namedTypeArguments // error + import language.experimental.genericNumberLiterals // error + import language.experimental.erasedDefinitions + erased def f = 1 diff --git a/tests/neg-custom-args/no-experimental/experimental-nested-imports-2.scala b/tests/neg-custom-args/no-experimental/experimental-nested-imports-2.scala new file mode 100644 index 000000000000..85076cca723a --- /dev/null +++ b/tests/neg-custom-args/no-experimental/experimental-nested-imports-2.scala @@ -0,0 +1,33 @@ +import annotation.experimental + +class Class1: + import language.experimental.fewerBraces // error + import language.experimental.namedTypeArguments // error + import language.experimental.genericNumberLiterals // error + import language.experimental.erasedDefinitions // ok: only check at erased definition + @experimental def f = 1 + def g = 1 + +object Object1: + import language.experimental.fewerBraces // error + import language.experimental.namedTypeArguments // error + import language.experimental.genericNumberLiterals // error + import language.experimental.erasedDefinitions // ok: only check at erased definition + @experimental def f = 1 + def g = 1 + +def fun1 = + import language.experimental.fewerBraces // error + import language.experimental.namedTypeArguments // error + import language.experimental.genericNumberLiterals // error + import language.experimental.erasedDefinitions // ok: only check at erased definition + @experimental def f = 1 + def g = 1 + +val value1 = + import language.experimental.fewerBraces // error + import language.experimental.namedTypeArguments // error + import language.experimental.genericNumberLiterals // error + import language.experimental.erasedDefinitions // ok: only check at erased definition + @experimental def f = 1 + def g = 1 \ No newline at end of file diff --git a/tests/neg-custom-args/no-experimental/experimental-nested-imports-3.scala b/tests/neg-custom-args/no-experimental/experimental-nested-imports-3.scala new file mode 100644 index 000000000000..1af04918b1d9 --- /dev/null +++ b/tests/neg-custom-args/no-experimental/experimental-nested-imports-3.scala @@ -0,0 +1,25 @@ +import annotation.experimental + +class Class1: + import language.experimental.fewerBraces // error + import language.experimental.namedTypeArguments // error + import language.experimental.genericNumberLiterals // error + import language.experimental.erasedDefinitions // ok: only check at erased definition + +object Object1: + import language.experimental.fewerBraces // error + import language.experimental.namedTypeArguments // error + import language.experimental.genericNumberLiterals // error + import language.experimental.erasedDefinitions // ok: only check at erased definition + +def fun1 = + import language.experimental.fewerBraces // error + import language.experimental.namedTypeArguments // error + import language.experimental.genericNumberLiterals // error + import language.experimental.erasedDefinitions // ok: only check at erased definition + +val value1 = + import language.experimental.fewerBraces // error + import language.experimental.namedTypeArguments // error + import language.experimental.genericNumberLiterals // error + import language.experimental.erasedDefinitions // ok: only check at erased definition diff --git a/tests/neg-custom-args/no-experimental/experimental-nested-imports.scala b/tests/neg-custom-args/no-experimental/experimental-nested-imports.scala new file mode 100644 index 000000000000..b9fc38dc4915 --- /dev/null +++ b/tests/neg-custom-args/no-experimental/experimental-nested-imports.scala @@ -0,0 +1,29 @@ +import annotation.experimental + +class Class1: + import language.experimental.fewerBraces // error + import language.experimental.namedTypeArguments // error + import language.experimental.genericNumberLiterals // error + import language.experimental.erasedDefinitions // ok: only check at erased definition + @experimental def f = 1 + +object Object1: + import language.experimental.fewerBraces // error + import language.experimental.namedTypeArguments // error + import language.experimental.genericNumberLiterals // error + import language.experimental.erasedDefinitions // ok: only check at erased definition + @experimental def f = 1 + +def fun1 = + import language.experimental.fewerBraces // error + import language.experimental.namedTypeArguments // error + import language.experimental.genericNumberLiterals // error + import language.experimental.erasedDefinitions // ok: only check at erased definition + @experimental def f = 1 + +val value1 = + import language.experimental.fewerBraces // error + import language.experimental.namedTypeArguments // error + import language.experimental.genericNumberLiterals // error + import language.experimental.erasedDefinitions // ok: only check at erased definition + @experimental def f = 1 diff --git a/tests/neg-custom-args/no-experimental/experimental-package-imports.scala b/tests/neg-custom-args/no-experimental/experimental-package-imports.scala new file mode 100644 index 000000000000..90ec387b1036 --- /dev/null +++ b/tests/neg-custom-args/no-experimental/experimental-package-imports.scala @@ -0,0 +1,24 @@ +import annotation.experimental + +package foo { + import language.experimental.fewerBraces // error + import language.experimental.namedTypeArguments // error + import language.experimental.genericNumberLiterals // error + import language.experimental.erasedDefinitions // ok: only check at erased definition + + package bar { + def foo = 1 + } +} + +package foo2 { + // ok: all definitions are top-level @experimental + import language.experimental.fewerBraces + import language.experimental.namedTypeArguments + import language.experimental.genericNumberLiterals + import language.experimental.erasedDefinitions + + package bar { + @experimental def foo = 1 + } +} diff --git a/tests/neg-custom-args/no-experimental/experimental.scala b/tests/neg-custom-args/no-experimental/experimental.scala index 26f9ba3d21c7..42d0d8066c35 100644 --- a/tests/neg-custom-args/no-experimental/experimental.scala +++ b/tests/neg-custom-args/no-experimental/experimental.scala @@ -7,7 +7,7 @@ class Test0 { } class Test1 { - import scala.language.experimental.erasedDefinitions // error + import scala.language.experimental.erasedDefinitions import scala.compiletime.erasedValue type UnivEq[A] object UnivEq: @@ -26,9 +26,3 @@ class Test2 { class Test6 { import scala.language.experimental // ok } - -class Test7 { - import scala.language.experimental - import experimental.genericNumberLiterals // error: no aliases can be used to refer to a language import - val x: BigInt = 13232202002020202020202 // error -} \ No newline at end of file diff --git a/tests/neg-custom-args/no-experimental/experimentalAnnot.scala b/tests/neg-custom-args/no-experimental/experimentalAnnot.scala new file mode 100644 index 000000000000..e6dfbf28f8bb --- /dev/null +++ b/tests/neg-custom-args/no-experimental/experimentalAnnot.scala @@ -0,0 +1,22 @@ +import scala.annotation.experimental + +@experimental class myExperimentalAnnot extends scala.annotation.Annotation + +@myExperimentalAnnot // error +def test1: Unit = () + +@experimental +@myExperimentalAnnot +def test2: Unit = () + +@experimental +class Foo { + @myExperimentalAnnot + def test3: Unit = () + + def test4: Unit = { + @myExperimentalAnnot + val f: Unit = () + f + } +} diff --git a/tests/neg-custom-args/no-experimental/experimentalCaseClass.scala b/tests/neg-custom-args/no-experimental/experimentalCaseClass.scala new file mode 100644 index 000000000000..b112c8a1213a --- /dev/null +++ b/tests/neg-custom-args/no-experimental/experimentalCaseClass.scala @@ -0,0 +1,26 @@ +import scala.annotation.experimental + +@experimental +case class Foo(a: Int) + +@experimental +case class Bar(a: Int) + +object Bar: + def f(): Unit = () + +def test: Unit = + Foo(2) // error + val x: Foo = ??? // error + + x match + case Foo(a) => // error + + + Bar(2) // error + val y: Bar = ??? // error + + y match + case Bar(a) => // error + + Bar.f() // error diff --git a/tests/neg-custom-args/no-experimental/experimentalDefaultParams.scala b/tests/neg-custom-args/no-experimental/experimentalDefaultParams.scala new file mode 100644 index 000000000000..4dedb3afa11d --- /dev/null +++ b/tests/neg-custom-args/no-experimental/experimentalDefaultParams.scala @@ -0,0 +1,27 @@ +import scala.annotation.experimental + +@experimental def x = 2 + +def test1( + p6: Any = x // error: def x is marked @experimental and therefore ... +): Any = ??? + +@experimental def test2( + p6: Any = x +): Any = ??? + +class Test1( + p6: Any = x // error +) {} + +@experimental class Test2( + p6: Any = x +) {} + +trait Test3( + p6: Any = x // error +) {} + +@experimental trait Test4( + p6: Any = x +) {} diff --git a/tests/neg-custom-args/no-experimental/experimentalEnum.scala b/tests/neg-custom-args/no-experimental/experimentalEnum.scala new file mode 100644 index 000000000000..1cbe78ca5427 --- /dev/null +++ b/tests/neg-custom-args/no-experimental/experimentalEnum.scala @@ -0,0 +1,12 @@ +import scala.annotation.experimental + +@experimental +enum E: + case A + case B + +def test: Unit = + E.A // error + E.B // error + val e: E = ??? // error + () diff --git a/tests/neg-custom-args/no-experimental/experimentalErased.scala b/tests/neg-custom-args/no-experimental/experimentalErased.scala new file mode 100644 index 000000000000..6fcb11a3cc2f --- /dev/null +++ b/tests/neg-custom-args/no-experimental/experimentalErased.scala @@ -0,0 +1,22 @@ +import language.experimental.erasedDefinitions +import annotation.experimental + +@experimental +erased class Foo + +erased class Bar // error + +@experimental +erased def foo = 2 + +erased def bar = 2 // error + +@experimental +erased val foo2 = 2 + +erased val bar2 = 2 // error + +@experimental +def foo3(erased a: Int) = 2 + +def bar3(erased a: Int) = 2 // error diff --git a/tests/neg-custom-args/no-experimental/experimentalInheritance.scala b/tests/neg-custom-args/no-experimental/experimentalInheritance.scala new file mode 100644 index 000000000000..f6eab1224310 --- /dev/null +++ b/tests/neg-custom-args/no-experimental/experimentalInheritance.scala @@ -0,0 +1,14 @@ +import scala.annotation.experimental + +@experimental def x = 2 + +@experimental class A1(x: Any) +class A2(x: Any) + + +@experimental class B1 extends A1(1) +class B2 // error: extension of experimental class A1 must have @experimental annotation +extends A1(1) // error: class A1 is marked @experimental ... + +@experimental class C1 extends A2(x) +class C2 extends A2(x) // error def x is marked @experimental and therefore diff --git a/tests/neg-custom-args/no-experimental/experimentalInline.scala b/tests/neg-custom-args/no-experimental/experimentalInline.scala new file mode 100644 index 000000000000..8827fd42e36a --- /dev/null +++ b/tests/neg-custom-args/no-experimental/experimentalInline.scala @@ -0,0 +1,8 @@ +import scala.annotation.experimental + +@experimental +inline def g() = () + +def test: Unit = + g() // errors + () diff --git a/tests/neg-custom-args/no-experimental/experimentalMembers.scala b/tests/neg-custom-args/no-experimental/experimentalMembers.scala new file mode 100644 index 000000000000..e30f27b069a8 --- /dev/null +++ b/tests/neg-custom-args/no-experimental/experimentalMembers.scala @@ -0,0 +1,39 @@ +import scala.annotation.experimental + +@experimental def x = 2 + +@experimental class A { + def f = x // ok because A is experimental +} + +@experimental class B { + def f = x // ok because A is experimental +} + +@experimental object C { + def f = x // ok because A is experimental +} + +@experimental class D { + def f = { + object B { + x // ok because A is experimental + } + } +} + +@experimental class E { + def f = { + def g = { + x // ok because A is experimental + } + } +} + +class F { + def f = { + def g = { + x // error + } + } +} diff --git a/tests/neg-custom-args/no-experimental/experimentalOverride.scala b/tests/neg-custom-args/no-experimental/experimentalOverride.scala new file mode 100644 index 000000000000..653bd3b23da4 --- /dev/null +++ b/tests/neg-custom-args/no-experimental/experimentalOverride.scala @@ -0,0 +1,39 @@ +import scala.annotation.experimental + +@experimental +class A: + def f() = 1 + +@experimental +class B extends A: + override def f() = 2 + +class C: + @experimental + def f() = 1 + +class D extends C: + override def f() = 2 + +trait A2: + @experimental + def f(): Int + +trait B2: + def f(): Int + +class C2 extends A2, B2: + def f(): Int = 1 + +def test: Unit = + val a: A = ??? // error + val b: B = ??? // error + val c: C = ??? + val d: D = ??? + val c2: C2 = ??? + a.f() // error + b.f() // error + c.f() // error + d.f() // ok because D.f is a stable API + c2.f() // ok because B2.f is a stable API + () diff --git a/tests/neg-custom-args/no-experimental/experimentalRHS.scala b/tests/neg-custom-args/no-experimental/experimentalRHS.scala new file mode 100644 index 000000000000..27143c120b96 --- /dev/null +++ b/tests/neg-custom-args/no-experimental/experimentalRHS.scala @@ -0,0 +1,16 @@ +import scala.annotation.experimental + +@experimental +def x = () + +def d1 = x // error: value x is marked @experimental and therefore ... +@experimental def d2 = x + +val v1 = x // error: value x is marked @experimental and therefore ... +@experimental val v2 = x + +var vr1 = x // error: value x is marked @experimental and therefore ... +@experimental var vr2 = x + +lazy val lv1 = x // error: value x is marked @experimental and therefore ... +@experimental lazy val lv2 = x diff --git a/tests/neg-custom-args/no-experimental/experimentalSam.scala b/tests/neg-custom-args/no-experimental/experimentalSam.scala new file mode 100644 index 000000000000..cdc9e61858d9 --- /dev/null +++ b/tests/neg-custom-args/no-experimental/experimentalSam.scala @@ -0,0 +1,11 @@ +import scala.annotation.experimental + +@experimental +trait ExpSAM { + def foo(x: Int): Int +} +def bar(f: ExpSAM): Unit = {} // error + +def test: Unit = + bar(x => x) // error + () diff --git a/tests/neg-custom-args/no-experimental/experimentalSignature.scala b/tests/neg-custom-args/no-experimental/experimentalSignature.scala new file mode 100644 index 000000000000..9b1d3c5e999f --- /dev/null +++ b/tests/neg-custom-args/no-experimental/experimentalSignature.scala @@ -0,0 +1,54 @@ +import scala.annotation.experimental + +@experimental class A +@experimental type X +@experimental type Y = Int +@experimental opaque type Z = Int + +def test1( + p1: A, // error: class A is marked @experimental and therefore ... + p2: List[A], // error: class A is marked @experimental and therefore ... + p3: X, // error: type X is marked @experimental and therefore ... + p4: Y, // error: type Y is marked @experimental and therefore ... + p5: Z, // error: type Z is marked @experimental and therefore ... +): A = ??? // error: class A is marked @experimental and therefore ... + +@experimental def test2( + p1: A, + p2: List[A], + p3: X, + p4: Y, + p5: Z, +): A = ??? + +class Test1( + p1: A, // error + p2: List[A], // error + p3: X, // error + p4: Y, // error + p5: Z, // error +) {} + +@experimental class Test2( + p1: A, + p2: List[A], + p3: X, + p4: Y, + p5: Z, +) {} + +trait Test3( + p1: A, // error + p2: List[A], // error + p3: X, // error + p4: Y, // error + p5: Z, // error +) {} + +@experimental trait Test4( + p1: A, + p2: List[A], + p3: X, + p4: Y, + p5: Z, +) {} diff --git a/tests/neg-custom-args/no-experimental/experimentalTerms.scala b/tests/neg-custom-args/no-experimental/experimentalTerms.scala new file mode 100644 index 000000000000..09b69d2da381 --- /dev/null +++ b/tests/neg-custom-args/no-experimental/experimentalTerms.scala @@ -0,0 +1,27 @@ +import scala.annotation.experimental + +@experimental +val x = () + +@experimental +def f() = () + +@experimental +object X: + def fx() = 1 + +def test1: Unit = + f() // error: def f is marked @experimental and therefore ... + x // error: value x is marked @experimental and therefore ... + X.fx() // error: object X is marked @experimental and therefore ... + import X.fx + fx() // error: object X is marked @experimental and therefore ... + +@experimental +def test2: Unit = + // references to f, x and X are ok because `test2` is experimental + f() + x + X.fx() + import X.fx + fx() diff --git a/tests/neg-custom-args/no-experimental/experimentalTests.scala b/tests/neg-custom-args/no-experimental/experimentalTests.scala new file mode 100644 index 000000000000..f3fbcf8c587c --- /dev/null +++ b/tests/neg-custom-args/no-experimental/experimentalTests.scala @@ -0,0 +1,15 @@ +import scala.annotation.experimental + +@experimental def x = 2 + +class MyTests { + /*@Test*/ def test1 = x // error + @experimental + /*@Test*/ def test2 = x +} + +@experimental +class MyExperimentalTests { + /*@Test*/ def test1 = x + /*@Test*/ def test2 = x +} diff --git a/tests/neg-custom-args/no-experimental/experimentalType.scala b/tests/neg-custom-args/no-experimental/experimentalType.scala new file mode 100644 index 000000000000..f4013788796a --- /dev/null +++ b/tests/neg-custom-args/no-experimental/experimentalType.scala @@ -0,0 +1,22 @@ +import scala.annotation.experimental + +@experimental +class A + +@experimental +class B extends A + +@experimental +type X + +@experimental +type Y = Int + +@experimental +opaque type Z = Int + +type AA = A // error +type BB = Z // error +type XX = Z // error +type YY = Z // error +type ZZ = Z // error diff --git a/tests/neg-custom-args/no-experimental/experimentalTypeRHS.scala b/tests/neg-custom-args/no-experimental/experimentalTypeRHS.scala new file mode 100644 index 000000000000..3aaeb960bae9 --- /dev/null +++ b/tests/neg-custom-args/no-experimental/experimentalTypeRHS.scala @@ -0,0 +1,6 @@ +import scala.annotation.experimental + +@experimental type E + +type A = E // error +@experimental type B = E diff --git a/tests/neg-custom-args/no-experimental/experimentalTypes2.scala b/tests/neg-custom-args/no-experimental/experimentalTypes2.scala new file mode 100644 index 000000000000..706fd39fd15c --- /dev/null +++ b/tests/neg-custom-args/no-experimental/experimentalTypes2.scala @@ -0,0 +1,14 @@ +import scala.annotation.experimental + +@experimental class A +@experimental type X +@experimental type Y = Int +@experimental opaque type Z = Int + +def test2: Unit = + new A // error: class A is marked @experimental and therefore ... + val i0: A = ??? // error: class A is marked @experimental and therefore ... + val i1: X = ??? // error: type X is marked @experimental and therefore ... + val i2: Y = ??? // error: type Y is marked @experimental and therefore ... + val i3: Z = ??? // error: type Z is marked @experimental and therefore ... + () diff --git a/tests/neg-custom-args/no-experimental/experimentalUnapply.scala b/tests/neg-custom-args/no-experimental/experimentalUnapply.scala new file mode 100644 index 000000000000..0ba338a15a96 --- /dev/null +++ b/tests/neg-custom-args/no-experimental/experimentalUnapply.scala @@ -0,0 +1,18 @@ +import scala.annotation.experimental + +@experimental +class A + +object Extractor1: + def unapply(s: Any): Option[A] = ??? // error + +object Extractor2: + @experimental + def unapply(s: Any): Option[Int] = ??? + +def test: Unit = + (??? : Any) match + case _: A => // error // error + case Extractor1(_) => // error + case Extractor2(_) => // error + () diff --git a/tests/neg-custom-args/no-experimental/i13091.scala b/tests/neg-custom-args/no-experimental/i13091.scala new file mode 100644 index 000000000000..2b08788ebbc1 --- /dev/null +++ b/tests/neg-custom-args/no-experimental/i13091.scala @@ -0,0 +1,5 @@ +import annotation.experimental + +@experimental class Foo + +def test: Unit = new Foo // error: class Foo is marked @experimental ... diff --git a/tests/neg-custom-args/no-experimental/i13848.scala b/tests/neg-custom-args/no-experimental/i13848.scala new file mode 100644 index 000000000000..886ab1e85d67 --- /dev/null +++ b/tests/neg-custom-args/no-experimental/i13848.scala @@ -0,0 +1,7 @@ +import annotation.experimental + +@main +def run(): Unit = f // error + +@experimental +def f = 2 diff --git a/tests/neg-custom-args/nowarn/nowarn-parser-error.check b/tests/neg-custom-args/nowarn/nowarn-parser-error.check new file mode 100644 index 000000000000..ce0d9113cf7f --- /dev/null +++ b/tests/neg-custom-args/nowarn/nowarn-parser-error.check @@ -0,0 +1,13 @@ +-- [E040] Syntax Error: tests/neg-custom-args/nowarn/nowarn-parser-error.scala:3:6 ------------------------------------- +3 | def def // error + | ^^^ + | an identifier expected, but 'def' found + | + | longer explanation available when compiling with `-explain` +-- [E000] Syntax Warning: tests/neg-custom-args/nowarn/nowarn-parser-error.scala:2:10 ---------------------------------- +2 | def a = try 1 // warn + | ^^^^^ + | A try without catch or finally is equivalent to putting + | its body in a block; no exceptions are handled. + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/nowarn/nowarn-parser-error.scala b/tests/neg-custom-args/nowarn/nowarn-parser-error.scala new file mode 100644 index 000000000000..2c65d3cf1838 --- /dev/null +++ b/tests/neg-custom-args/nowarn/nowarn-parser-error.scala @@ -0,0 +1,4 @@ +class C { + def a = try 1 // warn + def def // error +} diff --git a/tests/neg-custom-args/nowarn/nowarn-typer-error.check b/tests/neg-custom-args/nowarn/nowarn-typer-error.check new file mode 100644 index 000000000000..a6c915807dca --- /dev/null +++ b/tests/neg-custom-args/nowarn/nowarn-typer-error.check @@ -0,0 +1,6 @@ +-- [E006] Not Found Error: tests/neg-custom-args/nowarn/nowarn-typer-error.scala:4:11 ---------------------------------- +4 | def t1 = / // error + | ^ + | Not found: / + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-custom-args/nowarn/nowarn-typer-error.scala b/tests/neg-custom-args/nowarn/nowarn-typer-error.scala new file mode 100644 index 000000000000..8ab871b108f6 --- /dev/null +++ b/tests/neg-custom-args/nowarn/nowarn-typer-error.scala @@ -0,0 +1,7 @@ +import annotation.nowarn +object T { + @deprecated def f = 1 + def t1 = / // error + @nowarn // unused-nowarn is not issued if earlier phase has an error. + def t2 = f // no warning, refchecks doesn't run if typer has an error +} diff --git a/tests/neg-custom-args/nowarn/nowarn.check b/tests/neg-custom-args/nowarn/nowarn.check new file mode 100644 index 000000000000..d542643dbbbc --- /dev/null +++ b/tests/neg-custom-args/nowarn/nowarn.check @@ -0,0 +1,108 @@ +-- [E000] Syntax Warning: tests/neg-custom-args/nowarn/nowarn.scala:9:10 ----------------------------------------------- +9 |def t1a = try 1 // warning (parser) + | ^^^^^ + | A try without catch or finally is equivalent to putting + | its body in a block; no exceptions are handled. + | + | longer explanation available when compiling with `-explain` +-- [E000] Syntax Warning: tests/neg-custom-args/nowarn/nowarn.scala:23:25 ---------------------------------------------- +23 |@nowarn(o.inl) def t2d = try 1 // two warnings (`inl` is not a compile-time constant) + | ^^^^^ + | A try without catch or finally is equivalent to putting + | its body in a block; no exceptions are handled. + | + | longer explanation available when compiling with `-explain` +-- [E000] Syntax Warning: tests/neg-custom-args/nowarn/nowarn.scala:31:26 ---------------------------------------------- +31 |@nowarn("id=1") def t4d = try 1 // error and warning (unused nowarn, wrong id) + | ^^^^^ + | A try without catch or finally is equivalent to putting + | its body in a block; no exceptions are handled. + | + | longer explanation available when compiling with `-explain` +-- [E000] Syntax Warning: tests/neg-custom-args/nowarn/nowarn.scala:33:28 ---------------------------------------------- +33 |@nowarn("verbose") def t5 = try 1 // warning with details + | ^^^^^ + | A try without catch or finally is equivalent to putting + | its body in a block; no exceptions are handled. +Matching filters for @nowarn or -Wconf: + - id=E0 + - name=EmptyCatchOrFinallyBlock + | + | longer explanation available when compiling with `-explain` +-- [E129] Potential Issue Warning: tests/neg-custom-args/nowarn/nowarn.scala:13:11 ------------------------------------- +13 |def t2 = { 1; 2 } // warning (the invalid nowarn doesn't silence anything) + | ^ + | A pure expression does nothing in statement position; you may be omitting necessary parentheses + | + | longer explanation available when compiling with `-explain` +-- Warning: tests/neg-custom-args/nowarn/nowarn.scala:12:8 ------------------------------------------------------------- +12 |@nowarn("wat?") // warning (typer, invalid filter) + | ^^^^^^ + | Invalid message filter + | unknown filter: wat? +-- [E129] Potential Issue Warning: tests/neg-custom-args/nowarn/nowarn.scala:16:12 ------------------------------------- +16 |def t2a = { 1; 2 } // warning (invalid nowarn doesn't silence) + | ^ + | A pure expression does nothing in statement position; you may be omitting necessary parentheses + | + | longer explanation available when compiling with `-explain` +-- Warning: tests/neg-custom-args/nowarn/nowarn.scala:15:8 ------------------------------------------------------------- +15 |@nowarn(t1a.toString) // warning (typer, argument not a compile-time constant) + | ^^^^^^^^^^^^ + | filter needs to be a compile-time constant string +-- Warning: tests/neg-custom-args/nowarn/nowarn.scala:23:10 ------------------------------------------------------------ +23 |@nowarn(o.inl) def t2d = try 1 // two warnings (`inl` is not a compile-time constant) + | ^^^^^ + | filter needs to be a compile-time constant string +-- Deprecation Warning: tests/neg-custom-args/nowarn/nowarn.scala:37:10 ------------------------------------------------ +37 |def t6a = f // warning (refchecks, deprecation) + | ^ + | method f is deprecated +-- Deprecation Warning: tests/neg-custom-args/nowarn/nowarn.scala:40:30 ------------------------------------------------ +40 |@nowarn("msg=fish") def t6d = f // error (unused nowarn), warning (deprecation) + | ^ + | method f is deprecated +-- Deprecation Warning: tests/neg-custom-args/nowarn/nowarn.scala:47:10 ------------------------------------------------ +47 |def t7c = f: // warning (deprecation) + | ^ + | method f is deprecated +-- Unchecked Warning: tests/neg-custom-args/nowarn/nowarn.scala:53:7 --------------------------------------------------- +53 | case _: List[Int] => 0 // warning (patmat, unchecked) + | ^ + | the type test for List[Int] cannot be checked at runtime +-- Error: tests/neg-custom-args/nowarn/nowarn.scala:31:1 --------------------------------------------------------------- +31 |@nowarn("id=1") def t4d = try 1 // error and warning (unused nowarn, wrong id) + |^^^^^^^^^^^^^^^ + |@nowarn annotation does not suppress any warnings +-- Error: tests/neg-custom-args/nowarn/nowarn.scala:40:1 --------------------------------------------------------------- +40 |@nowarn("msg=fish") def t6d = f // error (unused nowarn), warning (deprecation) + |^^^^^^^^^^^^^^^^^^^ + |@nowarn annotation does not suppress any warnings +-- Error: tests/neg-custom-args/nowarn/nowarn.scala:48:3 --------------------------------------------------------------- +48 | @nowarn("msg=fish") // error (unused nowarn) + | ^^^^^^^^^^^^^^^^^^^ + | @nowarn annotation does not suppress any warnings +-- Error: tests/neg-custom-args/nowarn/nowarn.scala:60:0 --------------------------------------------------------------- +60 |@nowarn def t9a = { 1: @nowarn; 2 } // error (outer @nowarn is unused) + |^^^^^^^ + |@nowarn annotation does not suppress any warnings +-- Error: tests/neg-custom-args/nowarn/nowarn.scala:61:27 -------------------------------------------------------------- +61 |@nowarn def t9b = { 1: Int @nowarn; 2 } // error (inner @nowarn is unused, it covers the type, not the expression) + | ^^^^^^^ + | @nowarn annotation does not suppress any warnings +-- Error: tests/neg-custom-args/nowarn/nowarn.scala:66:0 --------------------------------------------------------------- +66 |@nowarn @ann(f) def t10b = 0 // error (unused nowarn) + |^^^^^^^ + |@nowarn annotation does not suppress any warnings +-- Error: tests/neg-custom-args/nowarn/nowarn.scala:67:8 --------------------------------------------------------------- +67 |@ann(f: @nowarn) def t10c = 0 // error (unused nowarn), should be silent + | ^^^^^^^ + | @nowarn annotation does not suppress any warnings +-- Error: tests/neg-custom-args/nowarn/nowarn.scala:70:0 --------------------------------------------------------------- +70 |@nowarn class I1a { // error (unused nowarn) + |^^^^^^^ + |@nowarn annotation does not suppress any warnings +-- Error: tests/neg-custom-args/nowarn/nowarn.scala:75:0 --------------------------------------------------------------- +75 |@nowarn class I1b { // error (unused nowarn) + |^^^^^^^ + |@nowarn annotation does not suppress any warnings diff --git a/tests/neg-custom-args/nowarn/nowarn.scala b/tests/neg-custom-args/nowarn/nowarn.scala new file mode 100644 index 000000000000..39ecde91517f --- /dev/null +++ b/tests/neg-custom-args/nowarn/nowarn.scala @@ -0,0 +1,87 @@ +import scala.annotation.{ nowarn, Annotation } + +// This test doesn't run with `-Werror`, because once there's an error, later phases are skipped and we would not see +// their warnings. +// Instead, this test runs with `-Wunused:nowarn -Wconf:msg=@nowarn annotation does not suppress any warnings:e`. +// Only "unused nowarn" warnings are reported as errors. Since these warnings are reported at the very end, all other +// phases of the compiler run normally. + +def t1a = try 1 // warning (parser) +@nowarn("msg=try without catch") def t1b = try 1 + +@nowarn("wat?") // warning (typer, invalid filter) +def t2 = { 1; 2 } // warning (the invalid nowarn doesn't silence anything) + +@nowarn(t1a.toString) // warning (typer, argument not a compile-time constant) +def t2a = { 1; 2 } // warning (invalid nowarn doesn't silence) + +object o: + final val const = "msg=try" + inline def inl = "msg=try" + +@nowarn(o.const) def t2c = try 1 // no warning +@nowarn(o.inl) def t2d = try 1 // two warnings (`inl` is not a compile-time constant) + +@nowarn("id=E129") def t3a = { 1; 2 } +@nowarn("name=PureExpressionInStatementPosition") def t3b = { 1; 2 } + +@nowarn("id=E000") def t4a = try 1 +@nowarn("id=E0") def t4b = try 1 +@nowarn("id=0") def t4c = try 1 +@nowarn("id=1") def t4d = try 1 // error and warning (unused nowarn, wrong id) + +@nowarn("verbose") def t5 = try 1 // warning with details + +@deprecated def f = 0 + +def t6a = f // warning (refchecks, deprecation) +@nowarn("cat=deprecation") def t6b = f +@nowarn("msg=deprecated") def t6c = f +@nowarn("msg=fish") def t6d = f // error (unused nowarn), warning (deprecation) +@nowarn("") def t6e = f +@nowarn def t6f = f + +def t7a = f: @nowarn("cat=deprecation") +def t7b = f: + @nowarn("msg=deprecated") +def t7c = f: // warning (deprecation) + @nowarn("msg=fish") // error (unused nowarn) +def t7d = f: @nowarn("") +def t7e = f: @nowarn + +def t8a(x: Any) = x match + case _: List[Int] => 0 // warning (patmat, unchecked) + case _ => 1 + +@nowarn("cat=unchecked") def t8(x: Any) = x match + case _: List[Int] => 0 + case _ => 1 + +@nowarn def t9a = { 1: @nowarn; 2 } // error (outer @nowarn is unused) +@nowarn def t9b = { 1: Int @nowarn; 2 } // error (inner @nowarn is unused, it covers the type, not the expression) + +class ann(a: Any) extends Annotation + +@ann(f) def t10a = 0 // should be a deprecation warning, but currently isn't +@nowarn @ann(f) def t10b = 0 // error (unused nowarn) +@ann(f: @nowarn) def t10c = 0 // error (unused nowarn), should be silent + +def forceCompletionOfI1a = (new I1a).m +@nowarn class I1a { // error (unused nowarn) + @nowarn def m = { 1; 2 } +} + +// completion during type checking +@nowarn class I1b { // error (unused nowarn) + @nowarn def m = { 1; 2 } +} + +@nowarn class I1c { + def m = { 1; 2 } +} + +trait T { + @nowarn val t1 = { 0; 1 } +} + +class K extends T diff --git a/tests/neg-custom-args/typeclass-derivation2.scala b/tests/neg-custom-args/typeclass-derivation2.scala index 75e549413027..be54d7697994 100644 --- a/tests/neg-custom-args/typeclass-derivation2.scala +++ b/tests/neg-custom-args/typeclass-derivation2.scala @@ -117,7 +117,7 @@ object TypeLevel { type Subtype[t] = Type[_, t] type Supertype[t] = Type[t, _] type Exactly[t] = Type[t, t] - erased def typeOf[T]: Type[T, T] = ??? + erased def typeOf[T]: Type[T, T] = compiletime.erasedValue } // An algebraic datatype diff --git a/tests/neg-macros/beta-reduce-inline-result.check b/tests/neg-macros/beta-reduce-inline-result.check index 0dd6098fb820..7b615feef308 100644 --- a/tests/neg-macros/beta-reduce-inline-result.check +++ b/tests/neg-macros/beta-reduce-inline-result.check @@ -4,5 +4,5 @@ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | Found: Int | Required: (4 : Int) - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-macros/delegate-match-1.check b/tests/neg-macros/delegate-match-1.check index 6d88d021416f..93b20beba183 100644 --- a/tests/neg-macros/delegate-match-1.check +++ b/tests/neg-macros/delegate-match-1.check @@ -4,4 +4,3 @@ | ^ | AmbiguousImplicits | both value a1 in class Test1 and value a2 in class Test1 match type A - | This location contains code that was inlined from Test_2.scala:6 diff --git a/tests/neg-macros/delegate-match-2.check b/tests/neg-macros/delegate-match-2.check index 112e5ee00474..e26742452b5b 100644 --- a/tests/neg-macros/delegate-match-2.check +++ b/tests/neg-macros/delegate-match-2.check @@ -4,4 +4,3 @@ | ^ | DivergingImplicit | method a1 in class Test produces a diverging implicit search when trying to match type A - | This location contains code that was inlined from Test_2.scala:5 diff --git a/tests/neg-macros/delegate-match-3.check b/tests/neg-macros/delegate-match-3.check index 278aa92a4117..2b6fda278fda 100644 --- a/tests/neg-macros/delegate-match-3.check +++ b/tests/neg-macros/delegate-match-3.check @@ -4,4 +4,3 @@ | ^ | NoMatchingImplicits | no implicit values were found that match type A - | This location contains code that was inlined from Test_2.scala:3 diff --git a/tests/neg-macros/i11386.check b/tests/neg-macros/i11386.check index 0377ca6389db..f74ea5b7ff20 100644 --- a/tests/neg-macros/i11386.check +++ b/tests/neg-macros/i11386.check @@ -3,11 +3,21 @@ 6 | dummy(0) // error | ^ | test - | This location contains code that was inlined from Test_2.scala:6 - | This location contains code that was inlined from Macro_1.scala:7 + |--------------------------------------------------------------------------------------------------------------------- + |Inline stack trace + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from Test_2.scala:6 +7 | notNull(i) + | ^^^^^^^^^^ + --------------------------------------------------------------------------------------------------------------------- -- Error: tests/neg-macros/i11386/Test_2.scala:8:20 -------------------------------------------------------------------- 8 | dummy(int2String(0)) // error | ^^^^^^^^^^^^^ | test - | This location contains code that was inlined from Test_2.scala:8 - | This location contains code that was inlined from Macro_1.scala:7 + |--------------------------------------------------------------------------------------------------------------------- + |Inline stack trace + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from Test_2.scala:8 +7 | notNull(i) + | ^^^^^^^^^^ + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg-macros/i11795.scala b/tests/neg-macros/i11795.scala new file mode 100644 index 000000000000..2a7f89831e0e --- /dev/null +++ b/tests/neg-macros/i11795.scala @@ -0,0 +1,10 @@ +import scala.quoted._ +import scala.deriving._ + +def blah[P <: Product] + (m: Mirror.ProductOf[P]) + (using Quotes, Type[m.MirroredElemLabels], Type[m.MirroredElemTypes]) = { + type z = Tuple.Zip[m.MirroredElemLabels, m.MirroredElemTypes] + Type.of[z] // error + () +} diff --git a/tests/neg-macros/i12225.scala b/tests/neg-macros/i12225.scala new file mode 100644 index 000000000000..66d619aad634 --- /dev/null +++ b/tests/neg-macros/i12225.scala @@ -0,0 +1,7 @@ +object TestMacro { + inline def test[T](inline t: T): T = ${ identity('{ identity(${ identity('{ identity(${ identity('t) }) }) }) }) } // error +} + +object Test { + TestMacro.test("x") +} diff --git a/tests/neg-macros/i12471.scala b/tests/neg-macros/i12471.scala new file mode 100644 index 000000000000..d09e51c5a10a --- /dev/null +++ b/tests/neg-macros/i12471.scala @@ -0,0 +1,7 @@ +import scala.quoted.* + +def impl1[A](using t: Type[A])(using q: Quotes) = + '{ Type.of[A] ; ??? } // error + +def impl2[A](using t: Type[A])(using q: Quotes) = + '{ Type.of[A](using q) ; ??? } // error diff --git a/tests/neg-macros/i12788/Macro.scala b/tests/neg-macros/i12788/Macro.scala new file mode 100644 index 000000000000..42f1dd527ec9 --- /dev/null +++ b/tests/neg-macros/i12788/Macro.scala @@ -0,0 +1,21 @@ +import scala.quoted.* + +class InlinedInt[T <: Int] + +object DFType: + trait Width[T]: + type Out <: Int + transparent inline given [T]: Width[T] = ${ getWidthMacro[T] } + def getWidthMacro[T](using Quotes, Type[T]): Expr[Width[T]] = + '{ + new Width[T] { + type Out = 1 + } + } + + trait DFBits[W <: Int] + object DFBits extends DFBitsCompanion + +trait DFToken[T] +extension [T](token: DFToken[T])(using w: DFType.Width[T]) + def width: InlinedInt[w.Out] = ??? \ No newline at end of file diff --git a/tests/neg-macros/i12788/Test.scala b/tests/neg-macros/i12788/Test.scala new file mode 100644 index 000000000000..b4ac8a484a12 --- /dev/null +++ b/tests/neg-macros/i12788/Test.scala @@ -0,0 +1,4 @@ +// nopos-error +trait DFBitsCompanion: + type Token[W <: Int] = DFToken[DFType.DFBits[W]] + extension [LW <: Int](lhs: DFType.DFBits.Token[LW]) def foo: Unit = lhs.width diff --git a/tests/neg-macros/i13033/Macro_1.scala b/tests/neg-macros/i13033/Macro_1.scala new file mode 100644 index 000000000000..788d82ad4c9b --- /dev/null +++ b/tests/neg-macros/i13033/Macro_1.scala @@ -0,0 +1,12 @@ +import scala.quoted.* + +object Macro: + def positiveImpl(using Quotes): Expr[Any] = + '{ 0.0 } match + case '{ -0.0 } => '{1} + inline def positive: Any = ${positiveImpl} + + def negativeImpl(using Quotes): Expr[Any] = + '{ -0.0 } match + case '{ 0.0 } => '{-1} + inline def negative: Any = ${negativeImpl} diff --git a/tests/neg-macros/i13033/Test_2.scala b/tests/neg-macros/i13033/Test_2.scala new file mode 100644 index 000000000000..a5e2edc39ea3 --- /dev/null +++ b/tests/neg-macros/i13033/Test_2.scala @@ -0,0 +1,3 @@ +@main def test(): Unit = + println(Macro.positive) // error + println(Macro.negative) // error diff --git a/tests/neg-macros/i13516/Persistance.scala b/tests/neg-macros/i13516/Persistance.scala new file mode 100644 index 000000000000..dbabf59c7267 --- /dev/null +++ b/tests/neg-macros/i13516/Persistance.scala @@ -0,0 +1,5 @@ +import scala.quoted.* +object Persistance: + inline def nameOf(inline e: Any): String = ${ nameOfImpl('e) } + private def nameOfImpl(e: Expr[Any])(using Quotes): Expr[String] = Expr("") + def foo(p: Versioned): Unit = {} diff --git a/tests/neg-macros/i13516/Versioned.scala b/tests/neg-macros/i13516/Versioned.scala new file mode 100644 index 000000000000..a7e10b2c8549 --- /dev/null +++ b/tests/neg-macros/i13516/Versioned.scala @@ -0,0 +1,3 @@ +// nopos-error +class Versioned: + def serialize: String = Persistance.nameOf(0) diff --git a/tests/neg-macros/i13809/Macros_1.scala b/tests/neg-macros/i13809/Macros_1.scala new file mode 100644 index 000000000000..3c39a40a7c51 --- /dev/null +++ b/tests/neg-macros/i13809/Macros_1.scala @@ -0,0 +1,265 @@ +package x + +import scala.annotation._ +import scala.quoted._ + +trait CB[+T] + +object CBM: + def pure[T](t:T):CB[T] = ??? + def map[A,B](fa:CB[A])(f: A=>B):CB[B] = ??? + def flatMap[A,B](fa:CB[A])(f: A=>CB[B]):CB[B] = ??? + def spawn[A](op: =>CB[A]): CB[A] = ??? + + +@compileTimeOnly("await should be inside async block") +def await[T](f: CB[T]): T = ??? + + +trait CpsExpr[T:Type](prev: Seq[Expr[?]]): + + def fLast(using Quotes): Expr[CB[T]] + def prependExprs(exprs: Seq[Expr[?]]): CpsExpr[T] + def append[A:Type](chunk: CpsExpr[A])(using Quotes): CpsExpr[A] + def syncOrigin(using Quotes): Option[Expr[T]] + def map[A:Type](f: Expr[T => A])(using Quotes): CpsExpr[A] = + MappedCpsExpr[T,A](Seq(),this,f) + def flatMap[A:Type](f: Expr[T => CB[A]])(using Quotes): CpsExpr[A] = + FlatMappedCpsExpr[T,A](Seq(),this,f) + + def transformed(using Quotes): Expr[CB[T]] = + import quotes.reflect._ + Block(prev.toList.map(_.asTerm), fLast.asTerm).asExprOf[CB[T]] + + +case class GenericSyncCpsExpr[T:Type](prev: Seq[Expr[?]],last: Expr[T]) extends CpsExpr[T](prev): + + override def fLast(using Quotes): Expr[CB[T]] = + '{ CBM.pure(${last}:T) } + + override def prependExprs(exprs: Seq[Expr[?]]): CpsExpr[T] = + copy(prev = exprs ++: prev) + + override def syncOrigin(using Quotes): Option[Expr[T]] = + import quotes.reflect._ + Some(Block(prev.toList.map(_.asTerm), last.asTerm).asExprOf[T]) + + override def append[A:Type](e: CpsExpr[A])(using Quotes) = + e.prependExprs(Seq(last)).prependExprs(prev) + + override def map[A:Type](f: Expr[T => A])(using Quotes): CpsExpr[A] = + copy(last = '{ $f($last) }) + + override def flatMap[A:Type](f: Expr[T => CB[A]])(using Quotes): CpsExpr[A] = + GenericAsyncCpsExpr[A](prev, '{ CBM.flatMap(CBM.pure($last))($f) } ) + + +abstract class AsyncCpsExpr[T:Type]( + prev: Seq[Expr[?]] + ) extends CpsExpr[T](prev): + + override def append[A:Type](e: CpsExpr[A])(using Quotes): CpsExpr[A] = + flatMap( '{ (x:T) => ${e.transformed} }) + + override def syncOrigin(using Quotes): Option[Expr[T]] = None + + + +case class GenericAsyncCpsExpr[T:Type]( + prev: Seq[Expr[?]], + fLastExpr: Expr[CB[T]] + ) extends AsyncCpsExpr[T](prev): + + override def fLast(using Quotes): Expr[CB[T]] = fLastExpr + + override def prependExprs(exprs: Seq[Expr[?]]): CpsExpr[T] = + copy(prev = exprs ++: prev) + + override def map[A:Type](f: Expr[T => A])(using Quotes): CpsExpr[A] = + MappedCpsExpr(Seq(),this,f) + + override def flatMap[A:Type](f: Expr[T => CB[A]])(using Quotes): CpsExpr[A] = + FlatMappedCpsExpr(Seq(),this,f) + + + +case class MappedCpsExpr[S:Type, T:Type]( + prev: Seq[Expr[?]], + point: CpsExpr[S], + mapping: Expr[S=>T] + ) extends AsyncCpsExpr[T](prev): + + override def fLast(using Quotes): Expr[CB[T]] = + '{ CBM.map(${point.transformed})($mapping) } + + override def prependExprs(exprs: Seq[Expr[?]]): CpsExpr[T] = + copy(prev = exprs ++: prev) + + + +case class FlatMappedCpsExpr[S:Type, T:Type]( + prev: Seq[Expr[?]], + point: CpsExpr[S], + mapping: Expr[S => CB[T]] + ) extends AsyncCpsExpr[T](prev): + + override def fLast(using Quotes): Expr[CB[T]] = + '{ CBM.flatMap(${point.transformed})($mapping) } + + override def prependExprs(exprs: Seq[Expr[?]]): CpsExpr[T] = + copy(prev = exprs ++: prev) + + +class ValRhsFlatMappedCpsExpr[T:Type, V:Type](using thisQuotes: Quotes) + ( + prev: Seq[Expr[?]], + oldValDef: quotes.reflect.ValDef, + cpsRhs: CpsExpr[V], + next: CpsExpr[T] + ) + extends AsyncCpsExpr[T](prev) { + + override def fLast(using Quotes):Expr[CB[T]] = + import quotes.reflect._ + next.syncOrigin match + case Some(nextOrigin) => + // owner of this block is incorrect + '{ + CBM.map(${cpsRhs.transformed})((vx:V) => + ${buildAppendBlockExpr('vx, nextOrigin)}) + } + case None => + '{ + CBM.flatMap(${cpsRhs.transformed})((v:V)=> + ${buildAppendBlockExpr('v, next.transformed)}) + } + + + override def prependExprs(exprs: Seq[Expr[?]]): CpsExpr[T] = + ValRhsFlatMappedCpsExpr(using thisQuotes)(exprs ++: prev,oldValDef,cpsRhs,next) + + override def append[A:quoted.Type](e: CpsExpr[A])(using Quotes) = + ValRhsFlatMappedCpsExpr(using thisQuotes)(prev,oldValDef,cpsRhs,next.append(e)) + + + private def buildAppendBlock(using Quotes)(rhs:quotes.reflect.Term, + exprTerm:quotes.reflect.Term): quotes.reflect.Term = + import quotes.reflect._ + import scala.quoted.Expr + + val castedOldValDef = oldValDef.asInstanceOf[quotes.reflect.ValDef] + val valDef = ValDef(castedOldValDef.symbol, Some(rhs.changeOwner(castedOldValDef.symbol))) + exprTerm match + case Block(stats,last) => + Block(valDef::stats, last) + case other => + Block(valDef::Nil,other) + + private def buildAppendBlockExpr[A:Type](using Quotes)(rhs: Expr[V], expr:Expr[A]):Expr[A] = + import quotes.reflect._ + buildAppendBlock(rhs.asTerm,expr.asTerm).asExprOf[A] + +} + + +object CpsExpr: + + def sync[T:Type](f: Expr[T]): CpsExpr[T] = + GenericSyncCpsExpr[T](Seq(), f) + + def async[T:Type](f: Expr[CB[T]]): CpsExpr[T] = + GenericAsyncCpsExpr[T](Seq(), f) + + +object Async: + + transparent inline def transform[T](inline expr: T) = ${ + Async.transformImpl[T]('expr) + } + + def transformImpl[T:Type](f: Expr[T])(using Quotes): Expr[CB[T]] = + import quotes.reflect._ + // println(s"before transformed: ${f.show}") + val cpsExpr = rootTransform[T](f) + val r = '{ CBM.spawn(${cpsExpr.transformed}) } + // println(s"transformed value: ${r.show}") + r + + def rootTransform[T:Type](f: Expr[T])(using Quotes): CpsExpr[T] = { + import quotes.reflect._ + f match + case '{ while ($cond) { $repeat } } => + val cpsRepeat = rootTransform(repeat.asExprOf[Unit]) + CpsExpr.async('{ + def _whilefun():CB[Unit] = + if ($cond) { + ${cpsRepeat.flatMap('{(x:Unit) => _whilefun()}).transformed} + } else { + CBM.pure(()) + } + _whilefun() + }.asExprOf[CB[T]]) + case _ => + val fTree = f.asTerm + fTree match { + case fun@Apply(fun1@TypeApply(obj2,targs2), args1) => + if (obj2.symbol.name == "await") { + val awaitArg = args1.head + CpsExpr.async(awaitArg.asExprOf[CB[T]]) + } else { + ??? + } + case Assign(left,right) => + left match + case id@Ident(x) => + right.tpe.widen.asType match + case '[r] => + val cpsRight = rootTransform(right.asExprOf[r]) + CpsExpr.async( + cpsRight.map[T]( + '{ (x:r) => ${Assign(left,'x.asTerm).asExprOf[T] } + }).transformed ) + case _ => ??? + case Block(prevs,last) => + val rPrevs = prevs.map[CpsExpr[?]]{ p => + p match + case v@ValDef(vName,vtt,optRhs) => + optRhs.get.tpe.widen.asType match + case '[l] => + val cpsRight = rootTransform(optRhs.get.asExprOf[l]) + ValRhsFlatMappedCpsExpr(using quotes)(Seq(), v, cpsRight, CpsExpr.sync('{})) + case t: Term => + // TODO: rootTransform + t.asExpr match + case '{ $p: tp } => + rootTransform(p) + case other => + printf(other.show) + throw RuntimeException(s"can't handle term in block: $other") + case other => + printf(other.show) + throw RuntimeException(s"unknown tree type in block: $other") + } + val rLast = rootTransform(last.asExprOf[T]) + val blockResult = rPrevs.foldRight(rLast)((e,s) => e.append(s)) + val retval = CpsExpr.async(blockResult.transformed) + retval + //BlockTransform(cpsCtx).run(prevs,last) + case id@Ident(name) => + CpsExpr.sync(id.asExprOf[T]) + case tid@Typed(Ident(name), tp) => + CpsExpr.sync(tid.asExprOf[T]) + case matchTerm@Match(scrutinee, caseDefs) => + val nCases = caseDefs.map{ old => + CaseDef.copy(old)(old.pattern, old.guard, rootTransform(old.rhs.asExprOf[T]).transformed.asTerm) + } + CpsExpr.async(Match(scrutinee, nCases).asExprOf[CB[T]]) + case inlinedTerm@ Inlined(call,List(),body) => + rootTransform(body.asExprOf[T]) + case constTerm@Literal(_)=> + CpsExpr.sync(constTerm.asExprOf[T]) + case _ => + throw RuntimeException(s"language construction is not supported: ${fTree}") + } + } + diff --git a/tests/neg-macros/i13809/Test_2.scala b/tests/neg-macros/i13809/Test_2.scala new file mode 100644 index 000000000000..3360de7ab018 --- /dev/null +++ b/tests/neg-macros/i13809/Test_2.scala @@ -0,0 +1,19 @@ +package x + +object VP1: + + ///* + def allocateServiceOperator(optInUsername: Option[String]): CB[Unit] = Async.transform { // error + val username = optInUsername match + case None => + while(false) { + val nextResult = await(op1()) + val countResult = await(op1()) + } + case Some(inUsername) => + val x = await(op1()) + inUsername + } + //*/ + + def op1(): CB[String] = ??? diff --git a/tests/neg-macros/i13991.check b/tests/neg-macros/i13991.check new file mode 100644 index 000000000000..ca9f924a6693 --- /dev/null +++ b/tests/neg-macros/i13991.check @@ -0,0 +1,16 @@ + +-- Error: tests/neg-macros/i13991/Test_2.scala:6:5 --------------------------------------------------------------------- +6 | v2 // error + | ^^ + | Error + |--------------------------------------------------------------------------------------------------------------------- + |Inline stack trace + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from Test_2.scala:3 +3 | inline def v2 = InlineMac.sample("foo") + | ^^^^^ + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from Test_2.scala:3 +3 | inline def v2 = InlineMac.sample("foo") + | ^^^^^^^^^^^^^^^^^^^^^^^ + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg-macros/i13991/Macro_1.scala b/tests/neg-macros/i13991/Macro_1.scala new file mode 100644 index 000000000000..76cfa93cdde5 --- /dev/null +++ b/tests/neg-macros/i13991/Macro_1.scala @@ -0,0 +1,10 @@ +import scala.quoted.* + +object InlineMac: + + inline def sample(inline expr: String): Int = + ${ sampleImpl('expr) } + + def sampleImpl(expr: Expr[String])(using Quotes): Expr[Int] = + import quotes.reflect.* + report.errorAndAbort("Error", expr) diff --git a/tests/neg-macros/i13991/Test_2.scala b/tests/neg-macros/i13991/Test_2.scala new file mode 100644 index 000000000000..572ad324b129 --- /dev/null +++ b/tests/neg-macros/i13991/Test_2.scala @@ -0,0 +1,6 @@ +object Main: + def main(args: Array[String]): Unit = + inline def v2 = InlineMac.sample("foo") + inline def v1 = v2 + + v2 // error diff --git a/tests/neg-macros/i14039/Macro_1.scala b/tests/neg-macros/i14039/Macro_1.scala new file mode 100644 index 000000000000..d91a93a1dded --- /dev/null +++ b/tests/neg-macros/i14039/Macro_1.scala @@ -0,0 +1,7 @@ +import scala.quoted.* + +object Macro: + inline def apply(): Any = ${Macro.impl} + + def impl(using Quotes): Expr[Any] = + quotes.reflect.report.errorAndAbort("my message") diff --git a/tests/neg-macros/i14039/Test_2.scala b/tests/neg-macros/i14039/Test_2.scala new file mode 100644 index 000000000000..ff29836b379f --- /dev/null +++ b/tests/neg-macros/i14039/Test_2.scala @@ -0,0 +1,5 @@ +class Dsl { + val entries = Seq.newBuilder[Any] + inline def get(): Unit = entries += Macro.apply() + def test = get() // error +} diff --git a/tests/neg-macros/i14137/Macro_1.scala b/tests/neg-macros/i14137/Macro_1.scala new file mode 100644 index 000000000000..3b3d23fa34ee --- /dev/null +++ b/tests/neg-macros/i14137/Macro_1.scala @@ -0,0 +1,23 @@ +package x + +import scala.quoted._ + +object Macro: + + inline def genOp(inline f:Int): Int = ${ + genOpImpl('f) + } + + def genOpImpl(f: Expr[Int])(using Quotes): Expr[Int] = { + + def firstOp(): Expr[Int] = + '{ + var x=1 + ${secondOp('x,f)} + } + + def secondOp(x:Expr[Int], y:Expr[Int]): Expr[Int] = + '{ $x + $y } + + firstOp() + } diff --git a/tests/neg-macros/i14137/Test_2.scala b/tests/neg-macros/i14137/Test_2.scala new file mode 100644 index 000000000000..917e68dc7583 --- /dev/null +++ b/tests/neg-macros/i14137/Test_2.scala @@ -0,0 +1,6 @@ +package x + +object Main: + + def main(args: Array[String]):Unit = + Macro.genOp(10) // error diff --git a/tests/neg-macros/i6432.check b/tests/neg-macros/i6432.check index 0e01f99be404..2ea6f3ba0300 100644 --- a/tests/neg-macros/i6432.check +++ b/tests/neg-macros/i6432.check @@ -3,14 +3,11 @@ 4 | foo"abc${"123"}xyz${"456"}fgh" // error // error // error | ^^^ | abc - | This location contains code that was inlined from Test_2.scala:4 -- Error: tests/neg-macros/i6432/Test_2.scala:4:17 --------------------------------------------------------------------- 4 | foo"abc${"123"}xyz${"456"}fgh" // error // error // error | ^^^ | xyz - | This location contains code that was inlined from Test_2.scala:4 -- Error: tests/neg-macros/i6432/Test_2.scala:4:28 --------------------------------------------------------------------- 4 | foo"abc${"123"}xyz${"456"}fgh" // error // error // error | ^^^ | fgh - | This location contains code that was inlined from Test_2.scala:4 diff --git a/tests/neg-macros/i6432b.check b/tests/neg-macros/i6432b.check index 4dd1be84fa3c..395d089c92e9 100644 --- a/tests/neg-macros/i6432b.check +++ b/tests/neg-macros/i6432b.check @@ -3,14 +3,11 @@ 4 | foo"""abc${"123"}xyz${"456"}fgh""" // error // error // error | ^^^ | abc - | This location contains code that was inlined from Test_2.scala:4 -- Error: tests/neg-macros/i6432b/Test_2.scala:4:19 -------------------------------------------------------------------- 4 | foo"""abc${"123"}xyz${"456"}fgh""" // error // error // error | ^^^ | xyz - | This location contains code that was inlined from Test_2.scala:4 -- Error: tests/neg-macros/i6432b/Test_2.scala:4:30 -------------------------------------------------------------------- 4 | foo"""abc${"123"}xyz${"456"}fgh""" // error // error // error | ^^^ | fgh - | This location contains code that was inlined from Test_2.scala:4 diff --git a/tests/neg-macros/i6436.check b/tests/neg-macros/i6436.check index d861ad90717d..0b04bb9ddae2 100644 --- a/tests/neg-macros/i6436.check +++ b/tests/neg-macros/i6436.check @@ -6,5 +6,5 @@ 6 | val ps: Seq[Expr[String]] = parts // error | ^^^^^ | Not found: parts - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-macros/i6976.check b/tests/neg-macros/i6976.check index 2057e55550b4..0c30aa856ed1 100644 --- a/tests/neg-macros/i6976.check +++ b/tests/neg-macros/i6976.check @@ -6,4 +6,10 @@ | scala.MatchError: Inlined(EmptyTree,List(),Literal(Constant(2))) (of class dotty.tools.dotc.ast.Trees$Inlined) | at playground.macros$.mcrImpl(Macro_1.scala:10) | - | This location contains code that was inlined from Test_2.scala:5 + |--------------------------------------------------------------------------------------------------------------------- + |Inline stack trace + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from Macro_1.scala:6 +6 | inline def mcr(x: => Any) = ${mcrImpl('x)} + | ^^^^^^^^^^^^^^ + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg-macros/i7068-c.scala b/tests/neg-macros/i7068-c.scala index 7c6714d525c0..b5d416b3240d 100644 --- a/tests/neg-macros/i7068-c.scala +++ b/tests/neg-macros/i7068-c.scala @@ -2,7 +2,7 @@ def species(using quoted.Quotes) = '{ case object Bar // error case class FooT() // error ${ - case object Baz // ok + case object Baz // error ??? } FooT() diff --git a/tests/neg-macros/i7603.scala b/tests/neg-macros/i7603.scala index c781513fb1ba..49bf1fe62471 100644 --- a/tests/neg-macros/i7603.scala +++ b/tests/neg-macros/i7603.scala @@ -2,6 +2,6 @@ import scala.quoted.* class Foo { def f[T2: Type](e: Expr[T2])(using Quotes) = e match { case '{ $x: ${'[List[$t]]} } => // error - case '{ $x: ${y @ '[List[$t]]} } => // error // error + case '{ $x: ${y @ '[List[$t]]} } => // error } } diff --git a/tests/neg-macros/i7839.check b/tests/neg-macros/i7839.check index 197867ab809b..3e3fc7b2c0f1 100644 --- a/tests/neg-macros/i7839.check +++ b/tests/neg-macros/i7839.check @@ -6,3 +6,4 @@ | Parameters may only be: | * Quoted parameters or fields | * Literal values of primitive types + | * References to `inline val`s diff --git a/tests/neg-macros/i9014.check b/tests/neg-macros/i9014.check index 509eac067fc8..c0d08a42ef2a 100644 --- a/tests/neg-macros/i9014.check +++ b/tests/neg-macros/i9014.check @@ -3,4 +3,3 @@ 1 |val tests = summon[Bar] // error | ^ | Failed to expand! - | This location contains code that was inlined from Test_2.scala:1 diff --git a/tests/neg-macros/i9014/Macros_1.scala b/tests/neg-macros/i9014/Macros_1.scala index 3bca2bae4dc3..36ae6cafeac2 100644 --- a/tests/neg-macros/i9014/Macros_1.scala +++ b/tests/neg-macros/i9014/Macros_1.scala @@ -1,4 +1,4 @@ import scala.quoted.* trait Bar inline given Bar = ${ impl } -def impl(using Quotes): Expr[Bar] = quotes.reflect.report.throwError("Failed to expand!") +def impl(using Quotes): Expr[Bar] = quotes.reflect.report.errorAndAbort("Failed to expand!") diff --git a/tests/neg-macros/i9014b/Macros_1.scala b/tests/neg-macros/i9014b/Macros_1.scala index 0dcb45d513cb..5af84224ff04 100644 --- a/tests/neg-macros/i9014b/Macros_1.scala +++ b/tests/neg-macros/i9014b/Macros_1.scala @@ -1,4 +1,4 @@ import scala.quoted._ trait Bar transparent inline given Bar = ${ impl } -def impl(using Quotes): Expr[Bar] = quotes.reflect.report.throwError("Failed to expand!") +def impl(using Quotes): Expr[Bar] = quotes.reflect.report.errorAndAbort("Failed to expand!") diff --git a/tests/neg-macros/ill-abort.check b/tests/neg-macros/ill-abort.check new file mode 100644 index 000000000000..2f76c89d88dd --- /dev/null +++ b/tests/neg-macros/ill-abort.check @@ -0,0 +1,12 @@ + +-- Error: tests/neg-macros/ill-abort/quoted_2.scala:1:15 --------------------------------------------------------------- +1 |def test = fail() // error + | ^^^^^^ + |Macro expansion was aborted by the macro without any errors reported. Macros should issue errors to end-users to facilitate debugging when aborting a macro expansion. + |--------------------------------------------------------------------------------------------------------------------- + |Inline stack trace + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from quoted_1.scala:3 +3 |inline def fail(): Unit = ${ impl } + | ^^^^^^^^^ + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg-macros/ill-abort/quoted_1.scala b/tests/neg-macros/ill-abort/quoted_1.scala new file mode 100644 index 000000000000..0392ef5ae571 --- /dev/null +++ b/tests/neg-macros/ill-abort/quoted_1.scala @@ -0,0 +1,7 @@ +import scala.quoted.* + +inline def fail(): Unit = ${ impl } + +private def impl(using Quotes) : Expr[Unit] = + // should never be done without reporting error before (see docs) + throw new scala.quoted.runtime.StopMacroExpansion diff --git a/tests/neg-macros/ill-abort/quoted_2.scala b/tests/neg-macros/ill-abort/quoted_2.scala new file mode 100644 index 000000000000..fae7c594b861 --- /dev/null +++ b/tests/neg-macros/ill-abort/quoted_2.scala @@ -0,0 +1 @@ +def test = fail() // error diff --git a/tests/neg-macros/inline-macro-staged-interpreter/Macro_1.scala b/tests/neg-macros/inline-macro-staged-interpreter/Macro_1.scala index 4e56b946207a..f93082afc747 100644 --- a/tests/neg-macros/inline-macro-staged-interpreter/Macro_1.scala +++ b/tests/neg-macros/inline-macro-staged-interpreter/Macro_1.scala @@ -6,7 +6,7 @@ object E { inline def eval[T](inline x: E[T]): T = ${ impl('x) } - def impl[T: Type](x: Expr[E[T]]) (using Quotes): Expr[T] = x.valueOrError.lift + def impl[T: Type](x: Expr[E[T]]) (using Quotes): Expr[T] = x.valueOrAbort.lift implicit def ev1[T: Type]: FromExpr[E[T]] = new FromExpr { def unapply(x: Expr[E[T]])(using Quotes) = x match { diff --git a/tests/neg-macros/inline-option/Macro_1.scala b/tests/neg-macros/inline-option/Macro_1.scala index f2e3e453fdd2..d436285cd93e 100644 --- a/tests/neg-macros/inline-option/Macro_1.scala +++ b/tests/neg-macros/inline-option/Macro_1.scala @@ -2,7 +2,7 @@ import scala.quoted.* object Macro { - def impl(opt: Expr[Option[Int]]) (using Quotes): Expr[Int] = opt.valueOrError match { + def impl(opt: Expr[Option[Int]]) (using Quotes): Expr[Int] = opt.valueOrAbort match { case Some(i) => Expr(i) case None => '{-1} } diff --git a/tests/neg-macros/inline-tuples-1/Macro_1.scala b/tests/neg-macros/inline-tuples-1/Macro_1.scala index 031fb01cabe4..e0ce4f20419a 100644 --- a/tests/neg-macros/inline-tuples-1/Macro_1.scala +++ b/tests/neg-macros/inline-tuples-1/Macro_1.scala @@ -2,26 +2,26 @@ import scala.quoted.* object Macros { - def tup1(tup: Expr[Tuple1[Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup2(tup: Expr[Tuple2[Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup3(tup: Expr[Tuple3[Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup4(tup: Expr[Tuple4[Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup5(tup: Expr[Tuple5[Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup6(tup: Expr[Tuple6[Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup7(tup: Expr[Tuple7[Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup8(tup: Expr[Tuple8[Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup9(tup: Expr[Tuple9[Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup10(tup: Expr[Tuple10[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup11(tup: Expr[Tuple11[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup12(tup: Expr[Tuple12[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup13(tup: Expr[Tuple13[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup14(tup: Expr[Tuple14[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup15(tup: Expr[Tuple15[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup16(tup: Expr[Tuple16[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup17(tup: Expr[Tuple17[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup18(tup: Expr[Tuple18[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup19(tup: Expr[Tuple19[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup20(tup: Expr[Tuple20[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup21(tup: Expr[Tuple21[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup22(tup: Expr[Tuple22[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) + def tup1(tup: Expr[Tuple1[Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup2(tup: Expr[Tuple2[Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup3(tup: Expr[Tuple3[Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup4(tup: Expr[Tuple4[Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup5(tup: Expr[Tuple5[Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup6(tup: Expr[Tuple6[Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup7(tup: Expr[Tuple7[Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup8(tup: Expr[Tuple8[Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup9(tup: Expr[Tuple9[Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup10(tup: Expr[Tuple10[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup11(tup: Expr[Tuple11[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup12(tup: Expr[Tuple12[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup13(tup: Expr[Tuple13[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup14(tup: Expr[Tuple14[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup15(tup: Expr[Tuple15[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup16(tup: Expr[Tuple16[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup17(tup: Expr[Tuple17[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup18(tup: Expr[Tuple18[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup19(tup: Expr[Tuple19[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup20(tup: Expr[Tuple20[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup21(tup: Expr[Tuple21[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup22(tup: Expr[Tuple22[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) } diff --git a/tests/neg-macros/macro-class-not-found-1.check b/tests/neg-macros/macro-class-not-found-1.check index 445523c6ade2..f16aac4efe3e 100644 --- a/tests/neg-macros/macro-class-not-found-1.check +++ b/tests/neg-macros/macro-class-not-found-1.check @@ -5,4 +5,10 @@ | java.lang.NoClassDefFoundError | at Foo$.aMacroImplementation(Foo.scala:8) | - | This location contains code that was inlined from Bar.scala:4 + |--------------------------------------------------------------------------------------------------------------------- + |Inline stack trace + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from Foo.scala:5 +5 | inline def myMacro(): Unit = ${ aMacroImplementation } + | ^^^^^^^^^^^^^^^^^^^^^^^^^ + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg-macros/macro-class-not-found-2.check b/tests/neg-macros/macro-class-not-found-2.check index 3cb7506e6bb2..3665ea1a9431 100644 --- a/tests/neg-macros/macro-class-not-found-2.check +++ b/tests/neg-macros/macro-class-not-found-2.check @@ -5,4 +5,10 @@ | java.lang.NoClassDefFoundError: this.is.not.a.Class | at Foo$.aMacroImplementation(Foo.scala:8) | - | This location contains code that was inlined from Bar.scala:4 + |--------------------------------------------------------------------------------------------------------------------- + |Inline stack trace + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from Foo.scala:5 +5 | inline def myMacro(): Unit = ${ aMacroImplementation } + | ^^^^^^^^^^^^^^^^^^^^^^^^^ + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg-macros/macros-in-same-project-6.check b/tests/neg-macros/macros-in-same-project-6.check index 485f5062db92..17f696508259 100644 --- a/tests/neg-macros/macros-in-same-project-6.check +++ b/tests/neg-macros/macros-in-same-project-6.check @@ -2,4 +2,3 @@ 4 | Foo.myMacro() // error | ^^^^^^^^^^^^^ | some error - | This location contains code that was inlined from Bar.scala:4 diff --git a/tests/neg-macros/quote-error-2/Macro_1.scala b/tests/neg-macros/quote-error-2/Macro_1.scala index 7a98d7145698..a4c8960f04ab 100644 --- a/tests/neg-macros/quote-error-2/Macro_1.scala +++ b/tests/neg-macros/quote-error-2/Macro_1.scala @@ -3,7 +3,7 @@ import quoted.* object Macro_1 { inline def foo(inline b: Boolean): Unit = ${ fooImpl('b) } def fooImpl(b: Expr[Boolean])(using Quotes): Expr[Unit] = - '{println(${msg(b.valueOrError)})} + '{println(${msg(b.valueOrAbort)})} def msg(b: Boolean)(using Quotes): Expr[String] = if (b) '{"foo(true)"} diff --git a/tests/neg-macros/quote-error/Macro_1.scala b/tests/neg-macros/quote-error/Macro_1.scala index 668f2b99c33b..d553eac2b004 100644 --- a/tests/neg-macros/quote-error/Macro_1.scala +++ b/tests/neg-macros/quote-error/Macro_1.scala @@ -3,6 +3,6 @@ import quoted.* object Macro_1 { inline def foo(inline b: Boolean): Unit = ${fooImpl('b)} def fooImpl(b: Expr[Boolean])(using Quotes) : Expr[Unit] = - if (b.valueOrError) '{println("foo(true)")} + if (b.valueOrAbort) '{println("foo(true)")} else { quotes.reflect.report.error("foo cannot be called with false"); '{ ??? } } } diff --git a/tests/neg-macros/quote-exception/Macro_1.scala b/tests/neg-macros/quote-exception/Macro_1.scala index 2fa482a81c56..02b2d049499b 100644 --- a/tests/neg-macros/quote-exception/Macro_1.scala +++ b/tests/neg-macros/quote-exception/Macro_1.scala @@ -3,6 +3,6 @@ import quoted.* object Macro_1 { inline def foo(inline b: Boolean): Unit = ${fooImpl('b)} def fooImpl(b: Expr[Boolean]) (using Quotes): Expr[Unit] = - if (b.valueOrError) '{println("foo(true)")} + if (b.valueOrAbort) '{println("foo(true)")} else ??? } diff --git a/tests/neg-macros/quote-whitebox/Macro_1.scala b/tests/neg-macros/quote-whitebox/Macro_1.scala index a18fccbc166e..78b7c36ccce1 100644 --- a/tests/neg-macros/quote-whitebox/Macro_1.scala +++ b/tests/neg-macros/quote-whitebox/Macro_1.scala @@ -2,7 +2,7 @@ import scala.quoted.* object Macros { transparent inline def defaultOf(inline str: String): Any = ${ defaultOfImpl('str) } - def defaultOfImpl(str: Expr[String]) (using Quotes): Expr[Any] = str.valueOrError match { + def defaultOfImpl(str: Expr[String]) (using Quotes): Expr[Any] = str.valueOrAbort match { case "int" => '{1} case "string" => '{"a"} } diff --git a/tests/neg-macros/reflect-inline/assert_1.scala b/tests/neg-macros/reflect-inline/assert_1.scala index 2c5b324450c5..4ea6f9871ba3 100644 --- a/tests/neg-macros/reflect-inline/assert_1.scala +++ b/tests/neg-macros/reflect-inline/assert_1.scala @@ -5,6 +5,6 @@ object api { ${ stripImpl('x) } private def stripImpl(x: Expr[String])(using Quotes): Expr[String] = - Expr(x.valueOrError.stripMargin) + Expr(x.valueOrAbort.stripMargin) } diff --git a/tests/neg-macros/toexproftuple.scala b/tests/neg-macros/toexproftuple.scala index f33bfd5f6dfb..20ae2f08ff8d 100644 --- a/tests/neg-macros/toexproftuple.scala +++ b/tests/neg-macros/toexproftuple.scala @@ -1,10 +1,63 @@ -import scala.quoted._, scala.deriving.* +import scala.quoted._, scala.deriving.* // error +// ^ +// Match type reduction failed since selector ((2 : Int), quoted.Expr[(3 : Int)]) +// matches none of the cases +// +// case quoted.Expr[x] *: t => x *: scala.Tuple.InverseMap[t, quoted.Expr] +// case EmptyTuple => EmptyTuple + +inline def mcr: Any = ${mcrImpl} // error +// ^ +// Match type reduction failed since selector ((2 : Int), quoted.Expr[(3 : Int)]) +// matches none of the cases +// +// case quoted.Expr[x] *: t => x *: scala.Tuple.InverseMap[t, quoted.Expr] +// case EmptyTuple => EmptyTuple + +def mcrImpl(using ctx: Quotes): Expr[Any] = { // error // error + //^ + // Match type reduction failed since selector ((2 : Int), quoted.Expr[(3 : Int)]) + // matches none of the cases + // + // case quoted.Expr[x] *: t => x *: scala.Tuple.InverseMap[t, quoted.Expr] + // case EmptyTuple => EmptyTuple + + // ^ + // Match type reduction failed since selector ((2 : Int), quoted.Expr[(3 : Int)]) + // matches none of the cases + // + // case quoted.Expr[x] *: t => x *: scala.Tuple.InverseMap[t, quoted.Expr] + // case EmptyTuple => EmptyTuple -inline def mcr: Any = ${mcrImpl} -def mcrImpl(using ctx: Quotes): Expr[Any] = { val tpl: (Expr[1], Expr[2], Expr[3]) = ('{1}, '{2}, '{3}) '{val res: (1, 3, 3) = ${Expr.ofTuple(tpl)}; res} // error + // ^^^^^^^^^^^^^^^^^ + // Found: quoted.Expr[(1 : Int) *: (2 : Int) *: (3 : Int) *: EmptyTuple] + // Required: quoted.Expr[((1 : Int), (3 : Int), (3 : Int))] val tpl2: (Expr[1], 2, Expr[3]) = ('{1}, 2, '{3}) - '{val res = ${Expr.ofTuple(tpl2)}; res} // error + '{val res = ${Expr.ofTuple(tpl2)}; res} // error // error // error // error + // ^ + // Cannot prove that (quoted.Expr[(1 : Int)], (2 : Int), quoted.Expr[(3 : Int)]) =:= scala.Tuple.Map[ + // scala.Tuple.InverseMap[ + // (quoted.Expr[(1 : Int)], (2 : Int), quoted.Expr[(3 : Int)]) + // , quoted.Expr] + // , quoted.Expr]. + + // ^ + // Match type reduction failed since selector ((2 : Int), quoted.Expr[(3 : Int)]) + // matches none of the cases + // + // case quoted.Expr[x] *: t => x *: scala.Tuple.InverseMap[t, quoted.Expr] + // case EmptyTuple => EmptyTuple + + // ^ + // Cyclic reference involving val res + + // ^ + // Match type reduction failed since selector ((2 : Int), quoted.Expr[(3 : Int)]) + // matches none of the cases + // + // case quoted.Expr[x] *: t => x *: scala.Tuple.InverseMap[t, quoted.Expr] + // case EmptyTuple => EmptyTuple } diff --git a/tests/neg-macros/type-splice-in-val-pattern.check b/tests/neg-macros/type-splice-in-val-pattern.check index 26a457127acb..627aa2d5755d 100644 --- a/tests/neg-macros/type-splice-in-val-pattern.check +++ b/tests/neg-macros/type-splice-in-val-pattern.check @@ -2,5 +2,5 @@ 5 | val '[ *:[$t] ] = ??? // error | ^^ | Type splices cannot be used in val patterns. Consider using `match` instead. - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-scalajs/js-enums.check b/tests/neg-scalajs/js-enums.check index eb358f16e57a..e54b2a5af2da 100644 --- a/tests/neg-scalajs/js-enums.check +++ b/tests/neg-scalajs/js-enums.check @@ -4,9 +4,9 @@ |MyEnum extends scala.reflect.Enum which does not extend js.Any. 5 | case Foo -- Error: tests/neg-scalajs/js-enums.scala:9:5 ------------------------------------------------------------------------- -7 |@js.native -8 |@JSGlobal -9 |enum MyEnumNative extends js.Object: // error + 7 |@js.native + 8 |@JSGlobal + 9 |enum MyEnumNative extends js.Object: // error |^ |MyEnumNative extends scala.reflect.Enum which does not extend js.Any. 10 | case Bar diff --git a/tests/neg-scalajs/js-native-exports.check b/tests/neg-scalajs/js-native-exports.check new file mode 100644 index 000000000000..1003580b64ca --- /dev/null +++ b/tests/neg-scalajs/js-native-exports.check @@ -0,0 +1,16 @@ +-- Error: tests/neg-scalajs/js-native-exports.scala:17:11 -------------------------------------------------------------- +17 | export bag.{str, int, bool, dbl} // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Native JS traits, classes and objects cannot contain exported definitions. +-- Error: tests/neg-scalajs/js-native-exports.scala:23:11 -------------------------------------------------------------- +23 | export bag.{str, int, bool, dbl} // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Native JS traits, classes and objects cannot contain exported definitions. +-- Error: tests/neg-scalajs/js-native-exports.scala:30:11 -------------------------------------------------------------- +30 | export bag.{str, int, bool, dbl} // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Native JS traits, classes and objects cannot contain exported definitions. +-- Error: tests/neg-scalajs/js-native-exports.scala:35:11 -------------------------------------------------------------- +35 | export bag.{str, int, bool, dbl} // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Non-native JS traits cannot contain exported definitions. diff --git a/tests/neg-scalajs/js-native-exports.scala b/tests/neg-scalajs/js-native-exports.scala new file mode 100644 index 000000000000..73c0ffe93573 --- /dev/null +++ b/tests/neg-scalajs/js-native-exports.scala @@ -0,0 +1,38 @@ +import scala.scalajs.js +import scala.scalajs.js.annotation.* + +object A { + + @js.native + trait Bag extends js.Any { + val str: String + def int: Int + def bool(): Boolean + def dbl(dbl: Double): Double + } + + @js.native + @JSGlobal("BagHolder_GlobalClass") + final class BagHolder(val bag: Bag) extends js.Object { + export bag.{str, int, bool, dbl} // error + } + + @js.native + trait BagHolderTrait extends js.Any { + val bag: Bag + export bag.{str, int, bool, dbl} // error + } + + @js.native + @JSGlobal("BagHolderModule_GlobalVar") + object BagHolderModule extends js.Object { + val bag: Bag = js.native + export bag.{str, int, bool, dbl} // error + } + + trait NonNativeBagHolderTrait extends js.Any { + val bag: Bag + export bag.{str, int, bool, dbl} // error + } + +} diff --git a/tests/neg-scalajs/js-non-native-members-conflicts.check b/tests/neg-scalajs/js-non-native-members-conflicts.check new file mode 100644 index 000000000000..011b52c93be8 --- /dev/null +++ b/tests/neg-scalajs/js-non-native-members-conflicts.check @@ -0,0 +1,12 @@ +-- Error: tests/neg-scalajs/js-non-native-members-conflicts.scala:7:6 -------------------------------------------------- +7 | def b: Unit = () // error + | ^ + | Cannot disambiguate overloads for getter a with types + | (): Unit + | (): Unit +-- Error: tests/neg-scalajs/js-non-native-members-conflicts.scala:10:2 ------------------------------------------------- +10 | object B1 // error + | ^ + | Cannot disambiguate overloads for getter B1 with types + | (): A.this.B1 + | (): Object diff --git a/tests/neg-scalajs/js-non-native-members-conflicts.scala b/tests/neg-scalajs/js-non-native-members-conflicts.scala new file mode 100644 index 000000000000..d3c44c489f15 --- /dev/null +++ b/tests/neg-scalajs/js-non-native-members-conflicts.scala @@ -0,0 +1,11 @@ +import scala.scalajs.js +import scala.scalajs.js.annotation.* + +class A extends js.Object { + def a: Unit = () + @JSName("a") + def b: Unit = () // error + + class B1 extends js.Object + object B1 // error +} diff --git a/tests/neg-scalajs/js-type-bad-parents.check b/tests/neg-scalajs/js-type-bad-parents.check index 0375f2fd30c0..ae5e23566f8e 100644 --- a/tests/neg-scalajs/js-type-bad-parents.check +++ b/tests/neg-scalajs/js-type-bad-parents.check @@ -45,7 +45,7 @@ -- Error: tests/neg-scalajs/js-type-bad-parents.scala:36:7 ------------------------------------------------------------- 36 |object C3 extends ScalaClass with js.Any // error |^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - |C3$ extends ScalaClass which does not extend js.Any. + |C3 extends ScalaClass which does not extend js.Any. -- Error: tests/neg-scalajs/js-type-bad-parents.scala:39:6 ------------------------------------------------------------- 38 |@js.native 39 |trait C4 extends ScalaClass with js.Any // error @@ -60,7 +60,7 @@ 42 |@js.native @JSGlobal 43 |object C6 extends ScalaClass with js.Any // error |^ - |C6$ extends ScalaClass which does not extend js.Any. + |C6 extends ScalaClass which does not extend js.Any. -- Error: tests/neg-scalajs/js-type-bad-parents.scala:45:6 ------------------------------------------------------------- 45 |trait C7 extends js.Object with ScalaTrait // error |^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -72,7 +72,7 @@ -- Error: tests/neg-scalajs/js-type-bad-parents.scala:47:7 ------------------------------------------------------------- 47 |object C9 extends js.Object with ScalaTrait // error |^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - |C9$ extends ScalaTrait which does not extend js.Any. + |C9 extends ScalaTrait which does not extend js.Any. -- Error: tests/neg-scalajs/js-type-bad-parents.scala:50:6 ------------------------------------------------------------- 49 |@js.native 50 |trait C10 extends js.Object with ScalaTrait // error @@ -87,4 +87,4 @@ 53 |@js.native @JSGlobal 54 |object C12 extends js.Object with ScalaTrait // error |^ - |C12$ extends ScalaTrait which does not extend js.Any. + |C12 extends ScalaTrait which does not extend js.Any. diff --git a/tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.check b/tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.check index 123706ae3776..57be080100e9 100644 --- a/tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.check +++ b/tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.check @@ -13,7 +13,7 @@ -- Error: tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.scala:17:27 ----------------------------------------- 17 | val d = js.constructorOf[NativeJSClass { def bar: Int }] // error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | NativeJSClass{bar: => Int} is not a class type + | NativeJSClass{bar: Int} is not a class type -- Error: tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.scala:19:27 ----------------------------------------- 19 | val e = js.constructorOf[JSTrait] // error | ^^^^^^^ @@ -29,7 +29,7 @@ -- Error: tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.scala:23:27 ----------------------------------------- 23 | val h = js.constructorOf[JSClass { def bar: Int }] // error | ^^^^^^^^^^^^^^^^^^^^^^^^ - | JSClass{bar: => Int} is not a class type + | JSClass{bar: Int} is not a class type -- Error: tests/neg-scalajs/jsconstructorof-error-in-prepjsinterop.scala:25:42 ----------------------------------------- 25 | def foo[A <: js.Any] = js.constructorOf[A] // error | ^ diff --git a/tests/neg-scalajs/jsconstructorof-error-in-typer.check b/tests/neg-scalajs/jsconstructorof-error-in-typer.check index 391ffa4c6933..c0d25674dec5 100644 --- a/tests/neg-scalajs/jsconstructorof-error-in-typer.check +++ b/tests/neg-scalajs/jsconstructorof-error-in-typer.check @@ -2,17 +2,17 @@ 9 | val a = js.constructorOf[ScalaClass] // error | ^ | Type argument ScalaClass does not conform to upper bound scala.scalajs.js.Any - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` -- [E057] Type Mismatch Error: tests/neg-scalajs/jsconstructorof-error-in-typer.scala:10:27 ---------------------------- 10 | val b = js.constructorOf[ScalaTrait] // error | ^ | Type argument ScalaTrait does not conform to upper bound scala.scalajs.js.Any - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` -- [E057] Type Mismatch Error: tests/neg-scalajs/jsconstructorof-error-in-typer.scala:11:27 ---------------------------- 11 | val c = js.constructorOf[ScalaObject.type] // error | ^ | Type argument ScalaObject.type does not conform to upper bound scala.scalajs.js.Any - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.check b/tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.check index 6916c33b9cc7..7b4c98a95008 100644 --- a/tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.check +++ b/tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.check @@ -13,7 +13,7 @@ -- Error: tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.scala:17:59 ---------------------------------------- 17 | val d = js.constructorTag[NativeJSClass { def bar: Int }] // error | ^ - | NativeJSClass{bar: => Int} is not a class type + | NativeJSClass{bar: Int} is not a class type -- Error: tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.scala:19:36 ---------------------------------------- 19 | val e = js.constructorTag[JSTrait] // error | ^ @@ -29,7 +29,7 @@ -- Error: tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.scala:23:53 ---------------------------------------- 23 | val h = js.constructorTag[JSClass { def bar: Int }] // error | ^ - | JSClass{bar: => Int} is not a class type + | JSClass{bar: Int} is not a class type -- Error: tests/neg-scalajs/jsconstructortag-error-in-prepjsinterop.scala:25:45 ---------------------------------------- 25 | def foo[A <: js.Any] = js.constructorTag[A] // error | ^ diff --git a/tests/neg-scalajs/jsconstructortag-error-in-typer.check b/tests/neg-scalajs/jsconstructortag-error-in-typer.check index a18ed693383e..22212957403f 100644 --- a/tests/neg-scalajs/jsconstructortag-error-in-typer.check +++ b/tests/neg-scalajs/jsconstructortag-error-in-typer.check @@ -4,7 +4,7 @@ |no implicit argument of type scala.scalajs.js.ConstructorTag[ScalaClass] was found for parameter tag of method constructorTag in package scala.scalajs.js. |I found: | - | scala.scalajs.js.ConstructorTag.materialize[Nothing] + | scala.scalajs.js.ConstructorTag.materialize[T] | |But method materialize in object ConstructorTag does not match type scala.scalajs.js.ConstructorTag[ScalaClass]. -- Error: tests/neg-scalajs/jsconstructortag-error-in-typer.scala:10:39 ------------------------------------------------ @@ -13,7 +13,7 @@ |no implicit argument of type scala.scalajs.js.ConstructorTag[ScalaTrait] was found for parameter tag of method constructorTag in package scala.scalajs.js. |I found: | - | scala.scalajs.js.ConstructorTag.materialize[Nothing] + | scala.scalajs.js.ConstructorTag.materialize[T] | |But method materialize in object ConstructorTag does not match type scala.scalajs.js.ConstructorTag[ScalaTrait]. -- Error: tests/neg-scalajs/jsconstructortag-error-in-typer.scala:11:45 ------------------------------------------------ @@ -22,6 +22,6 @@ |no implicit argument of type scala.scalajs.js.ConstructorTag[ScalaObject.type] was found for parameter tag of method constructorTag in package scala.scalajs.js. |I found: | - | scala.scalajs.js.ConstructorTag.materialize[Nothing] + | scala.scalajs.js.ConstructorTag.materialize[T] | |But method materialize in object ConstructorTag does not match type scala.scalajs.js.ConstructorTag[ScalaObject.type]. diff --git a/tests/neg-scalajs/jsname-argument.check b/tests/neg-scalajs/jsname-argument.check index c7640631e730..bccdee995b81 100644 --- a/tests/neg-scalajs/jsname-argument.check +++ b/tests/neg-scalajs/jsname-argument.check @@ -22,6 +22,10 @@ 42 | @JSName(new NamesClass().a) // error | ^^^^^^^^^^^^^^^^^^ | A js.Symbol argument to JSName must be a static, stable identifier +-- Warning: tests/neg-scalajs/jsname-argument.scala:49:10 -------------------------------------------------------------- +49 | @JSName(a) // warning, untested + | ^ + |This symbol is defined in the same object as the annotation's target. This will cause a stackoverflow at runtime -- Error: tests/neg-scalajs/jsname-argument.scala:57:3 ----------------------------------------------------------------- 57 | @JSName(Names.sym) // error | ^^^^^^^^^^^^^^^^^^ diff --git a/tests/neg-scalajs/type-mismatch.check b/tests/neg-scalajs/type-mismatch.check index 2ea5f1f23945..73bb52079e3d 100644 --- a/tests/neg-scalajs/type-mismatch.check +++ b/tests/neg-scalajs/type-mismatch.check @@ -3,5 +3,5 @@ | ^^^ | Found: (msg : String) | Required: Int - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/10349.scala b/tests/neg/10349.scala new file mode 100644 index 000000000000..4ea683f6a8fb --- /dev/null +++ b/tests/neg/10349.scala @@ -0,0 +1,11 @@ +object Firsts: + + type First[X] = X match + case Map[_, v] => First[Option[v]] + + def first[X](x: X): First[X] = x match + case x: Map[_, _] => first(x.values.headOption) // error + + @main + def runFirsts2(): Unit = + assert(first(Map.empty[Int, Int]) == None) // error diff --git a/tests/neg/10747.scala b/tests/neg/10747.scala new file mode 100644 index 000000000000..a299f2a6590c --- /dev/null +++ b/tests/neg/10747.scala @@ -0,0 +1,5 @@ +type Foo[A] = A match { + case Int => String +} + +type B = Foo[Boolean] // error diff --git a/tests/neg/12050.scala b/tests/neg/12050.scala new file mode 100644 index 000000000000..a8e073bb353b --- /dev/null +++ b/tests/neg/12050.scala @@ -0,0 +1,34 @@ +class P[X, Y] + +type Swap[X] = X match + case P[x, y] => Swap[P[y, x]] + +val z: P[String, Int] = ??? : Swap[P[Int, String]] // error +// ^ +// Recursion limit exceeded. +// Maybe there is an illegal cyclic reference? +// If that's not the case, you could also try to increase the stacksize using the -Xss JVM option. +// A recurring operation is (inner to outer): +// +// normalizing P[String, Int] match ... +// normalizing P[Int, String] match ... +// normalizing P[String, Int] match ... +// normalizing P[Int, String] match ... +// normalizing P[String, Int] match ... +// normalizing P[Int, String] match ... +// normalizing P[String, Int] match ... +// normalizing P[Int, String] match ... +// normalizing P[String, Int] match ... +// normalizing P[Int, String] match ... +// ... +// +// normalizing P[String, Int] match ... +// normalizing P[Int, String] match ... +// normalizing P[String, Int] match ... +// normalizing P[Int, String] match ... +// normalizing P[String, Int] match ... +// normalizing P[Int, String] match ... +// normalizing P[String, Int] match ... +// normalizing P[Int, String] match ... +// normalizing P[String, Int] match ... +// normalizing P[Int, String] match ... diff --git a/tests/neg/12249.scala b/tests/neg/12249.scala new file mode 100644 index 000000000000..81e8ce053584 --- /dev/null +++ b/tests/neg/12249.scala @@ -0,0 +1,9 @@ + +export java.util.UUID + +def bar = println(new UUID(1, 2)) // OK + +def foo = println(UUID.randomUUID()) // error +// ^^^^ +// Not found: UUID + diff --git a/tests/neg/12261.scala b/tests/neg/12261.scala new file mode 100644 index 000000000000..70120671a05b --- /dev/null +++ b/tests/neg/12261.scala @@ -0,0 +1,15 @@ +type M0[X] = X match { + case ? => String // error: Unbound wildcard type +} + +type M1[X] = X match { + case Any => _ // error: Unbound wildcard type +} + +type M2[X] = X match { + case Any => ? // error: Unbound wildcard type +} + +val a = "" match { case _: _ => () } // error: Unbound wildcard type + +val b = try { } catch { case _: _ => () } // error: Unbound wildcard type diff --git a/tests/neg/12549.scala b/tests/neg/12549.scala new file mode 100644 index 000000000000..4e2a5531efa0 --- /dev/null +++ b/tests/neg/12549.scala @@ -0,0 +1,18 @@ +enum Bool { + case True + case False +} + +import Bool.* + +type Not[B <: Bool] = B match { + case True.type => False.type + case False.type => True.type + case _ => "unreachable" +} + +def foo[B <: Bool & Singleton]: Unit = { + implicitly[Not[B] =:= "unreachable"] // error + + () +} diff --git a/tests/neg/12800.scala b/tests/neg/12800.scala new file mode 100644 index 000000000000..164276396bec --- /dev/null +++ b/tests/neg/12800.scala @@ -0,0 +1,21 @@ +object Test { + type FieldType2[K, +V] = V with KeyTag2[K, V] + trait KeyTag2[K, +V] extends Any + + type WrapUpper = Tuple + type Wrap[A] = Tuple1[A] + + type Extract[A <: WrapUpper] = A match { + case Wrap[h] => h + } + + summon[Extract[Wrap[FieldType2["foo", Int]]] =:= FieldType2["foo", Int]] // error + // ^ + // Cannot prove that Main.Extract[Tuple1[Main.FieldType2[("foo" : String), Int]]] =:= Main.FieldType2[("foo" : String), Int]. + // + // Note: a match type could not be fully reduced: + // + // trying to reduce Main.Extract[Tuple1[Main.FieldType2[("foo" : String), Int]]] + // failed since selector Tuple1[Main.FieldType2[("foo" : String), Int]] + // is uninhabited. +} diff --git a/tests/neg/12974.scala b/tests/neg/12974.scala new file mode 100644 index 000000000000..90edcc916471 --- /dev/null +++ b/tests/neg/12974.scala @@ -0,0 +1,35 @@ +package example + +object RecMap { + + object Record { + // use this scope to bound who can see inside the opaque type + opaque type Rec[A <: Tuple] = Map[String, Any] + + object Rec { + type HasKey[A <: Tuple, K] = + A match + case (K, t) *: _ => t + case _ *: t => HasKey[t, K] + + val empty: Rec[EmptyTuple] = Map.empty + + extension [A <: Tuple](toMap: Rec[A]) + def fetch[K <: String & Singleton](key: K): HasKey[A, K] = + toMap(key).asInstanceOf[HasKey[A, K]] + } + } + + def main(args: Array[String]) = + import Record._ + + val foo: Any = Rec.empty.fetch("foo") // error + // ^ + // Match type reduction failed since selector EmptyTuple.type + // matches none of the cases + // + // case (("foo" : String), t) *: _ => t + // case _ *: t => example.RecMap.Record.Rec.HasKey[t, ("foo" : String)] + + end main +} diff --git a/tests/neg/3559d.check b/tests/neg/3559d.check index 128093d8579a..0f0e05f784b2 100644 --- a/tests/neg/3559d.check +++ b/tests/neg/3559d.check @@ -2,5 +2,5 @@ 7 | this(f) // error | ^ | Not found: f - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/6697.check b/tests/neg/6697.check new file mode 100644 index 000000000000..3249ad7aa4af --- /dev/null +++ b/tests/neg/6697.check @@ -0,0 +1,13 @@ +-- [E057] Type Mismatch Error: tests/neg/6697.scala:6:35 --------------------------------------------------------------- +6 | type Copy[O <: Off] = Of[Sup[O], Sub[O]] // error + | ^ + | Type argument Test.Sub[O] does not conform to upper bound Test.Sup[O] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Test.Sub[O] + | failed since selector O + | does not match case Test.Of[sup, sub] => sub + | and cannot be shown to be disjoint from it either. + | + | longer explanation available when compiling with `-explain` diff --git a/tests/pos/6697.scala b/tests/neg/6697.scala similarity index 79% rename from tests/pos/6697.scala rename to tests/neg/6697.scala index 18a59d5b61f4..bd3b30f8079b 100644 --- a/tests/pos/6697.scala +++ b/tests/neg/6697.scala @@ -3,5 +3,5 @@ object Test { case class Of[sup, sub <: sup]() extends Off type Sup[O <: Off] = O match { case Of[sup, sub] => sup } type Sub[O <: Off] = O match { case Of[sup, sub] => sub } - type Copy[O <: Off] = Of[Sup[O], Sub[O]] + type Copy[O <: Off] = Of[Sup[O], Sub[O]] // error } diff --git a/tests/neg/7380.scala b/tests/neg/7380.scala new file mode 100644 index 000000000000..20f64db90447 --- /dev/null +++ b/tests/neg/7380.scala @@ -0,0 +1,14 @@ +import scala.deriving.Mirror + +object Test { + summon[Mirror.Of[(Int, String)] { + type MirroredElemTypes = (Int, Int, Int) + }] // error + // MirroredElemTypes missmatch, expected: (Int, String), found: (Int, Int, Int). + + summon[Mirror.Of[(Int, String)] { + type MirroredElemLabels = ("_1", "_2", "_3") + }] // error + // MirroredElemLabels missmatch, expected: (("_1" : String), ("_2" : String)), + // found: (("_1" : String), ("_2" : String), ("_3" : String)). +} diff --git a/tests/neg/8702.scala b/tests/neg/8702.scala new file mode 100644 index 000000000000..ae4fe38ffb50 --- /dev/null +++ b/tests/neg/8702.scala @@ -0,0 +1,63 @@ +// All three All type positions in a match type (scrutinee, patterns, bodies) +// are considered invariant, as showed by the following examples: + +trait TA[+Plus] { type M[X] = Plus match { case Int => String } } // error +// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +// covariant type Plus occurs in invariant position in type [X] = +// Plus match { +// case Int => String +// } of type M + +trait TB[+Plus] { type M[X] = X match { case Plus => String } } // error +// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +// covariant type Plus occurs in invariant position in type [X] = +// X match { +// case Plus => String +// } of type M + +trait TC[+Plus] { type M[X] = X match { case Int => Plus } } // error +// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +// covariant type Plus occurs in invariant position in type [X] = +// X match { +// case Int => Plus +// } of type M + +trait TD[-Minus] { type M[X] = Minus match { case Int => String } } // error +// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +// contravariant type Minus occurs in invariant position in type [X] = +// Minus match { +// case Int => String +// } of type M + +trait TE[-Minus] { type M[X] = X match { case Minus => String } } // error +// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +// contravariant type Minus occurs in invariant position in type [X] = +// X match { +// case Minus => String +// } of type M + +trait TF[-Minus] { type M[X] = X match { case Int => Minus } } // error +// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +// contravariant type Minus occurs in invariant position in type [X] = +// X match { +// case Int => Minus +// } of type M + +// Furthermore, both unannotated type parameters and unannotated type bindings +// in patterns are invariant, as showed by the following examples: + +trait Cov[+X] +trait Contra[-X] + +// As usual: +type Test0[X] = Cov[X] // OK +type Test1[X] = Contra[X] // OK +type Test2[+X] = Contra[X] // error: covariant type parameter X occurs in + // in contravariant position in Contra[X] +type Test3[-X] = Cov[X] // error: contravariant type parameter X occurs in + // covariant position in Cov[X] + +type M0[X] = X match { case Int => Cov[X] } +type M1[X] = X match { case Int => Contra[X] } +type M2[X] = X match { case Cov[x] => Contra[x] } +type M3[X] = X match { case Contra[x] => Cov[x] } diff --git a/tests/neg/9107.scala b/tests/neg/9107.scala new file mode 100644 index 000000000000..a0cbabd50b4d --- /dev/null +++ b/tests/neg/9107.scala @@ -0,0 +1,14 @@ +trait M[F[_]] +trait Inv[T] + +object Test { + def ev[X] = implicitly[ + (X match { case Inv[t] => Int }) =:= + (X match { case Inv[t] => t }) + ] // error + + def ev2[X] = implicitly[ + (M[[t] =>> runtime.MatchCase[Inv[t], Int]]) =:= + (M[[t] =>> runtime.MatchCase[Inv[t], t]]) + ] // error +} diff --git a/tests/neg/EmptyCaseClassParams.check b/tests/neg/EmptyCaseClassParams.check index 6cd019571c73..67ecda360cd4 100644 --- a/tests/neg/EmptyCaseClassParams.check +++ b/tests/neg/EmptyCaseClassParams.check @@ -2,17 +2,17 @@ 2 | case class A[T] // error | ^ | A case class must have at least one parameter list - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` -- [E004] Syntax Error: tests/neg/EmptyCaseClassParams.scala:5:13 ------------------------------------------------------ 5 | case class B[T] // error | ^ | A case class must have at least one parameter list - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` -- [E004] Syntax Error: tests/neg/EmptyCaseClassParams.scala:9:9 ------------------------------------------------------- 9 | case D[T] extends Foo[T] // error | ^ | A case class must have at least one parameter list - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/EmptyEnum.check b/tests/neg/EmptyEnum.check index 91281e6904e7..2ee3b74b4c15 100644 --- a/tests/neg/EmptyEnum.check +++ b/tests/neg/EmptyEnum.check @@ -2,5 +2,5 @@ 1 |enum EmptyEnum {} // error | ^^^^^^^^^ | Enumerations must contain at least one case - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/OpaqueEscape.scala b/tests/neg/OpaqueEscape.scala index a6f701fd4e3b..1596003d4244 100644 --- a/tests/neg/OpaqueEscape.scala +++ b/tests/neg/OpaqueEscape.scala @@ -7,7 +7,7 @@ def unwrap(i:Wrapped):Int def wrap(i:Int):Wrapped } class Escaper extends EscaperBase{ // error: needs to be abstract - override def unwrap(i:Int):Int = i // error overriding method unwrap + override def unwrap(i:Int):Int = i // was error overriding method unwrap, now OK override def wrap(i:Int):Int = i // error overriding method wrap } val e = new Escaper:EscaperBase diff --git a/tests/neg/SummonFrom.check b/tests/neg/SummonFrom.check index e8bb4ad521f5..f0a3d4d1dbee 100644 --- a/tests/neg/SummonFrom.check +++ b/tests/neg/SummonFrom.check @@ -2,11 +2,11 @@ 4 | case x => ??? // error | ^ | Unexpected pattern for summonFrom. Expected `x: T` or `_` - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` -- [E153] Syntax Error: tests/neg/SummonFrom.scala:8:7 ----------------------------------------------------------------- 8 | case x@String => ??? // error | ^^^^^^^^ | Unexpected pattern for summonFrom. Expected `x: T` or `_` - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/abstract-givens.check b/tests/neg/abstract-givens.check index e51e3bded95f..a74d0097b091 100644 --- a/tests/neg/abstract-givens.check +++ b/tests/neg/abstract-givens.check @@ -1,7 +1,7 @@ -- Error: tests/neg/abstract-givens.scala:11:8 ------------------------------------------------------------------------- 11 | given s[T](using T): Seq[T] with // error | ^ - |instance cannot be created, since def iterator: => Iterator[A] in trait IterableOnce in package scala.collection is not defined + |instance cannot be created, since def iterator: Iterator[A] in trait IterableOnce in package scala.collection is not defined -- [E164] Declaration Error: tests/neg/abstract-givens.scala:8:8 ------------------------------------------------------- 8 | given y(using Int): String = summon[Int].toString * 22 // error | ^ @@ -12,5 +12,5 @@ | ^ | error overriding given instance z in trait T of type [T](using x$1: T): List[T]; | given instance z of type [T](using x$1: T): Seq[T] has incompatible type - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/ambiref.check b/tests/neg/ambiref.check index d222940e0dbc..95b542c7aae3 100644 --- a/tests/neg/ambiref.check +++ b/tests/neg/ambiref.check @@ -4,29 +4,29 @@ | Reference to x is ambiguous, | it is both defined in object Test | and inherited subsequently in class D - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` -- [E049] Reference Error: tests/neg/ambiref.scala:10:14 --------------------------------------------------------------- 10 | println(x) // error | ^ | Reference to x is ambiguous, | it is both defined in object Test | and inherited subsequently in anonymous class test1.C {...} - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` -- [E049] Reference Error: tests/neg/ambiref.scala:17:14 --------------------------------------------------------------- 17 | println(y) // error | ^ | Reference to y is ambiguous, | it is both defined in method c | and inherited subsequently in anonymous class D {...} - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` -- [E049] Reference Error: tests/neg/ambiref.scala:25:16 --------------------------------------------------------------- 25 | println(y) // error | ^ | Reference to y is ambiguous, | it is both defined in method c | and inherited subsequently in class E - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/annot-printing.check b/tests/neg/annot-printing.check new file mode 100644 index 000000000000..c16112fe18b9 --- /dev/null +++ b/tests/neg/annot-printing.check @@ -0,0 +1,7 @@ +-- [E007] Type Mismatch Error: tests/neg/annot-printing.scala:5:46 ----------------------------------------------------- +5 |def x: Int @nowarn @main @Foo @Bar("hello") = "abc" // error + | ^^^^^ + | Found: ("abc" : String) + | Required: Int @nowarn() @main @Foo @Bar("hello") + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/annot-printing.scala b/tests/neg/annot-printing.scala new file mode 100644 index 000000000000..b917422801ac --- /dev/null +++ b/tests/neg/annot-printing.scala @@ -0,0 +1,6 @@ +import scala.annotation.* +class Foo() extends Annotation +class Bar(s: String) extends Annotation + +def x: Int @nowarn @main @Foo @Bar("hello") = "abc" // error + diff --git a/tests/neg/arg-eof.scala b/tests/neg/arg-eof.scala new file mode 100644 index 000000000000..ba860b5dfae0 --- /dev/null +++ b/tests/neg/arg-eof.scala @@ -0,0 +1,3 @@ +object Test: + case class Widget(name: String, other: Int = 5) + Widget(name = "foo", // error // error \ No newline at end of file diff --git a/tests/neg/bad-unapplies.check b/tests/neg/bad-unapplies.check index a2ee445b7db5..44633ca6950a 100644 --- a/tests/neg/bad-unapplies.check +++ b/tests/neg/bad-unapplies.check @@ -5,35 +5,35 @@ | (x: B): Option[String] | (x: A): Option[String] | both match arguments (C) - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` -- [E127] Syntax Error: tests/neg/bad-unapplies.scala:23:9 ------------------------------------------------------------- 23 | case B("2") => // error (cannot be used as an extractor) | ^ | B cannot be used as an extractor in a pattern because it lacks an unapply or unapplySeq method - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` -- [E127] Syntax Error: tests/neg/bad-unapplies.scala:24:9 ------------------------------------------------------------- 24 | case D("2") => // error (cannot be used as an extractor) | ^ | D cannot be used as an extractor in a pattern because it lacks an unapply or unapplySeq method - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` -- [E050] Type Error: tests/neg/bad-unapplies.scala:25:9 --------------------------------------------------------------- 25 | case E("2") => // error (value unapply in object E does not take parameters) | ^ | value unapply in object E does not take parameters - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` -- [E107] Syntax Error: tests/neg/bad-unapplies.scala:26:10 ------------------------------------------------------------ 26 | case F("2") => // error (Wrong number of argument patterns for F; expected: ()) | ^^^^^^ | Wrong number of argument patterns for F; expected: () - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` -- [E006] Not Found Error: tests/neg/bad-unapplies.scala:27:9 ---------------------------------------------------------- 27 | case G("2") => // error (Not found: G) | ^ | Not found: G - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/cannot-reduce-inline-match.check b/tests/neg/cannot-reduce-inline-match.check index ceca40a74cbd..10ffaecbd2e8 100644 --- a/tests/neg/cannot-reduce-inline-match.check +++ b/tests/neg/cannot-reduce-inline-match.check @@ -4,4 +4,12 @@ | cannot reduce inline match with | scrutinee: "f" : ("f" : String) | patterns : case _:Int - | This location contains code that was inlined from cannot-reduce-inline-match.scala:3 + |--------------------------------------------------------------------------------------------------------------------- + |Inline stack trace + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from cannot-reduce-inline-match.scala:3 +3 | inline x match { + | ^ +4 | case _: Int => +5 | } + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/derive-eq.scala b/tests/neg/derive-eq.scala index fa73cf63a5bf..2a9e35a24a19 100644 --- a/tests/neg/derive-eq.scala +++ b/tests/neg/derive-eq.scala @@ -20,10 +20,10 @@ object Test extends App { val y: Triple[List[Two], One, Two] = ??? val z: Triple[One, List[Two], One] = ??? x == y // OK + y == x // OK x == x // OK y == y // OK - y == x // error x == z // error z == y // error } diff --git a/tests/neg/doubleDefinition.check b/tests/neg/doubleDefinition.check index 7edfcdefcc64..b02e9446cf9b 100644 --- a/tests/neg/doubleDefinition.check +++ b/tests/neg/doubleDefinition.check @@ -20,25 +20,25 @@ | ^ | Double definition: | val foo: Int in class Test4 at line 25 and - | def foo: => Int in class Test4 at line 26 + | def foo: Int in class Test4 at line 26 -- [E120] Naming Error: tests/neg/doubleDefinition.scala:31:4 ---------------------------------------------------------- 31 | val foo = 1 // error | ^ | Double definition: - | def foo: => Int in class Test4b at line 30 and + | def foo: Int in class Test4b at line 30 and | val foo: Int in class Test4b at line 31 -- [E120] Naming Error: tests/neg/doubleDefinition.scala:36:4 ---------------------------------------------------------- 36 | var foo = 1 // error | ^ | Double definition: - | def foo: => Int in class Test4c at line 35 and + | def foo: Int in class Test4c at line 35 and | var foo: Int in class Test4c at line 36 -- [E120] Naming Error: tests/neg/doubleDefinition.scala:41:4 ---------------------------------------------------------- 41 | def foo = 2 // error | ^ | Double definition: | var foo: Int in class Test4d at line 40 and - | def foo: => Int in class Test4d at line 41 + | def foo: Int in class Test4d at line 41 -- [E120] Naming Error: tests/neg/doubleDefinition.scala:55:4 ---------------------------------------------------------- 55 | def foo(x: List[B]): Function1[B, B] = ??? // error: same jvm signature | ^ @@ -61,70 +61,70 @@ | ^ | Double definition: | val foo: Int in class Test8 at line 66 and - | def foo: => Int in class Test8 at line 67 + | def foo: Int in class Test8 at line 67 -- [E120] Naming Error: tests/neg/doubleDefinition.scala:72:4 ---------------------------------------------------------- 72 | val foo = 1 // error | ^ | Double definition: - | def foo: => Int in class Test8b at line 71 and + | def foo: Int in class Test8b at line 71 and | val foo: Int in class Test8b at line 72 -- [E120] Naming Error: tests/neg/doubleDefinition.scala:77:4 ---------------------------------------------------------- 77 | var foo = 1 // error | ^ | Double definition: - | def foo: => Int in class Test8c at line 76 and + | def foo: Int in class Test8c at line 76 and | var foo: Int in class Test8c at line 77 -- [E120] Naming Error: tests/neg/doubleDefinition.scala:82:4 ---------------------------------------------------------- 82 | def foo = 2 // error | ^ | Double definition: | var foo: Int in class Test8d at line 81 and - | def foo: => Int in class Test8d at line 82 + | def foo: Int in class Test8d at line 82 -- [E120] Naming Error: tests/neg/doubleDefinition.scala:88:4 ---------------------------------------------------------- 88 | def foo: String // error | ^ | Double definition: | val foo: Int in class Test9 at line 87 and - | def foo: => String in class Test9 at line 88 + | def foo: String in class Test9 at line 88 -- [E120] Naming Error: tests/neg/doubleDefinition.scala:92:4 ---------------------------------------------------------- 92 | def foo: Int // error | ^ | Double definition: | val foo: Int in class Test10 at line 91 and - | def foo: => Int in class Test10 at line 92 + | def foo: Int in class Test10 at line 92 -- [E120] Naming Error: tests/neg/doubleDefinition.scala:96:4 ---------------------------------------------------------- 96 | def foo: String // error | ^ | Double definition: | val foo: Int in class Test11 at line 95 and - | def foo: => String in class Test11 at line 96 + | def foo: String in class Test11 at line 96 -- [E120] Naming Error: tests/neg/doubleDefinition.scala:100:4 --------------------------------------------------------- 100 | def foo: Int // error | ^ | Double definition: | val foo: Int in class Test12 at line 99 and - | def foo: => Int in class Test12 at line 100 + | def foo: Int in class Test12 at line 100 -- [E120] Naming Error: tests/neg/doubleDefinition.scala:104:4 --------------------------------------------------------- 104 | def foo: String // error | ^ | Double definition: | var foo: Int in class Test13 at line 103 and - | def foo: => String in class Test13 at line 104 + | def foo: String in class Test13 at line 104 -- [E120] Naming Error: tests/neg/doubleDefinition.scala:108:4 --------------------------------------------------------- 108 | def foo: Int // error | ^ | Double definition: | var foo: Int in class Test14 at line 107 and - | def foo: => Int in class Test14 at line 108 + | def foo: Int in class Test14 at line 108 -- [E120] Naming Error: tests/neg/doubleDefinition.scala:112:4 --------------------------------------------------------- 112 | def foo: String // error | ^ | Double definition: | var foo: Int in class Test15 at line 111 and - | def foo: => String in class Test15 at line 112 + | def foo: String in class Test15 at line 112 -- [E120] Naming Error: tests/neg/doubleDefinition.scala:116:4 --------------------------------------------------------- 116 | def foo: Int // error | ^ | Double definition: | var foo: Int in class Test16 at line 115 and - | def foo: => Int in class Test16 at line 116 + | def foo: Int in class Test16 at line 116 diff --git a/tests/neg/enumWithType.check b/tests/neg/enumWithType.check index fcc06d3532a4..76bc966e2955 100644 --- a/tests/neg/enumWithType.check +++ b/tests/neg/enumWithType.check @@ -2,5 +2,5 @@ 2 | case C[U](u: U) // error | ^ | explicit extends clause needed because both enum case and enum class have type parameters - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/enumsLabel-singleimpl.scala b/tests/neg/enumsLabel-singleimpl.scala index 8f72a6cc71f2..a246ac9e5461 100644 --- a/tests/neg/enumsLabel-singleimpl.scala +++ b/tests/neg/enumsLabel-singleimpl.scala @@ -1,13 +1,13 @@ enum Ordinalled { - case A // error: method ordinal of type => Int needs `override` modifier + case A - def ordinal: Int = -1 + def ordinal: Int = -1 // error: the ordinal method of enum class Ordinalled can not be defined by the user } trait HasOrdinal { def ordinal: Int = 23 } -enum MyEnum extends HasOrdinal { - case Foo // error: method ordinal of type => Int needs `override` modifier +enum MyEnum extends HasOrdinal { // error: enum class MyEnum can not inherit the concrete ordinal method of trait HasOrdinal + case Foo } diff --git a/tests/neg/equality1.scala b/tests/neg/equality1.scala index 77ddb371051a..74bd45b18c12 100644 --- a/tests/neg/equality1.scala +++ b/tests/neg/equality1.scala @@ -3,4 +3,133 @@ object equality1 { class A class B new A == new B // error: cannot compare + + case class Foo(n: Int) derives CanEqual + + sealed trait Status derives CanEqual + object Status { + case class Active(since: Int) extends Status + case object Pending extends Status + case object Inactive extends Status + } + + enum Color derives CanEqual { + case Red + case Green + case Blue + } + + val option1a: Option[Int] = Some(1) + val option1b: Option[Int] = Some(1) + option1a == option1b + + option1a match { + case Some(1) => + println("1") + case Some(n) => + println("Not 1") + case None => // This None case doesn't work without CanEqual.canEqualOption[T] + println("None") + } + + 1 == '1' + val option2a: Option[Int] = Some(1) + val option2b: Option[Char] = Some('1') + option2a == option2b + + val option3a: Option[Foo] = Some(Foo(1)) + val option3b: Option[Foo] = Some(Foo(1)) + option3a == option3b + + val option4a: Option[Status] = Some(Status.Active(2020)) + val option4b: Option[Status] = Some(Status.Pending) + val option4c: Option[Status] = Some(Status.Inactive) + option4a == option4b + option4b == option4c + + val option5a: Option[Color] = Some(Color.Red) + val option5b: Option[Color] = Some(Color.Green) + val option5c: Option[Color] = Some(Color.Blue) + option5a == option5b + option5b == option5c + + val optionError1a: Option[Int] = Some(1) + val optionError1b: Option[String] = Some("1") + optionError1a == optionError1b // error: cannot compare + + val optionError2a: Option[Char] = Some('a') + val optionError2b: Option[String] = Some("a") + optionError2a == optionError2b // error: cannot compare + + val optionTuple1a: Option[(Int, String)] = Some((1, "OK")) + val optionTuple1b: Option[(Int, String)] = Some((1, "OK")) + optionTuple1a == optionTuple1b + + 'a' == 97 + val optionTuple2a: Option[(Int, Char)] = Some((1, 'a')) + val optionTuple2b: Option[(Int, Int)] = Some((1, 97)) + optionTuple2a == optionTuple2b + + val optionTupleError1a: Option[(Int, String)] = Some((1, "OK")) + val optionTupleError1b: Option[(String, Int)] = Some(("OK", 1)) + optionTupleError1a == optionTupleError1b // error: cannot compare + + val eitherL1a: Either[String, Int] = Left("Error") + val eitherL1b: Either[String, Int] = Left("Error") + eitherL1a == eitherL1b + + val eitherR1a: Either[String, Int] = Right(999) + val eitherR1b: Either[String, Int] = Right(999) + eitherR1a == eitherR1b + + val eitherErrorL1a: Either[String, Int] = Left("Error") + val eitherErrorL1b: Either[Char, Int] = Left('E') + eitherErrorL1a == eitherErrorL1b // error: cannot compare + + val eitherErrorR1a: Either[String, Int] = Right(999) + val eitherErrorR1b: Either[String, String] = Right("999") + eitherErrorR1a == eitherErrorR1b // error: cannot compare + + + val eitherTupleL1a: Either[(String, Long), (Int, Boolean)] = Left(("Error", 123L)) + val eitherTupleL1b: Either[(String, Long), (Int, Boolean)] = Left(("Error", 123L)) + eitherTupleL1a == eitherTupleL1b + + val eitherTupleR1a: Either[(String, Long), (Int, Boolean)] = Right((999, true)) + val eitherTupleR1b: Either[(String, Long), (Int, Boolean)] = Right((999, true)) + eitherTupleR1a == eitherTupleR1b + + val eitherTupleErrorL1a: Either[(String, Long), (Int, Boolean)] = Left(("Error", 123L)) + val eitherTupleErrorL1b: Either[(Long, String), (Int, Boolean)] = Left((123L, "Error")) + eitherTupleErrorL1a == eitherTupleErrorL1b // error: cannot compare + + val eitherTupleErrorR1a: Either[(String, Long), (Int, Boolean)] = Right((999, true)) + val eitherTupleErrorR1b: Either[(String, Long), (Boolean, Int)] = Right((true, 999)) + eitherTupleErrorR1a == eitherTupleErrorR1b // error: cannot compare + + (1, "a") == (1, "a") + (1, "a", true) == (1, "a", true) + (1, "a", true, 't') == (1, "a", true, 't') + (1, "a", true, 't', 10L) == (1, "a", true, 't', 10L) + + (1, "a") == (1, 'a') // error: cannot compare + (1, "a") == ("a", 1) // error: cannot compare + (1, "a") == (1, "a", true) // error: cannot compare + (1, "a", true, 't', 10L) == (1, "a", 1.5D, 't', 10L) // error: cannot compare + + + val ns1 = List(1, 2, 3, 4, 5) + val ns2 = List(1, 2, 3, 4, 5) + ns1 == ns2 + + val ss = List("1", "2", "3", "4", "5") + ns1 == ss // error: cannot compare + + ns1 match { + case n :: ns => + println(s"head: $n, tail: ${ns.mkString("[", ",", "]")}") + case Nil => + println("empty") + } + } diff --git a/tests/neg/erased-class.scala b/tests/neg/erased-class.scala index 29b28d7d5275..96a1c8769bb1 100644 --- a/tests/neg/erased-class.scala +++ b/tests/neg/erased-class.scala @@ -1,9 +1,10 @@ import language.experimental.erasedDefinitions +import scala.annotation.compileTimeOnly erased class AA erased class BB extends AA // ok @main def Test = - val f1: Array[AA] = ??? // error - def f2(x: Int): Array[AA] = ??? // error - def bar: AA = ??? // ok - val baz: AA = ??? // ok + val f1: Array[AA] = compiletime.erasedValue // error // error + def f2(x: Int): Array[AA] = compiletime.erasedValue // error // error + def bar: AA = compiletime.erasedValue // ok + val baz: AA = compiletime.erasedValue // ok diff --git a/tests/neg/experimentalInheritance.scala b/tests/neg/experimentalInheritance.scala new file mode 100644 index 000000000000..8b6c0b11afa3 --- /dev/null +++ b/tests/neg/experimentalInheritance.scala @@ -0,0 +1,44 @@ +import scala.annotation.experimental + +@experimental +class A + +@experimental +trait T + +class B extends A // error + +@experimental +class B2 extends A + +class C extends T // error + +@experimental +class C2 extends T + +@experimental +class O: + class X + + @experimental + class Y + + object Z + +@experimental +object O: + class A + + @experimental + class B + + object C + +class OA extends O.A // error +class OB extends O.B // error + +@experimental +class OA2 extends O.A + +@experimental +class OB2 extends O.B diff --git a/tests/neg/experimentalInheritance2.scala b/tests/neg/experimentalInheritance2.scala new file mode 100644 index 000000000000..84668ac5850f --- /dev/null +++ b/tests/neg/experimentalInheritance2.scala @@ -0,0 +1,6 @@ +import scala.annotation.experimental + +@experimental class A + +class B // // error: extension of experimental class A1 must have @experimental annotation + extends A diff --git a/tests/neg/experimentalOverloads.scala b/tests/neg/experimentalOverloads.scala new file mode 100644 index 000000000000..7adaf0b78840 --- /dev/null +++ b/tests/neg/experimentalOverloads.scala @@ -0,0 +1,11 @@ +import scala.annotation.experimental + +trait A: + def f: Int + def g: Int = 3 +trait B extends A: + @experimental + def f: Int = 4 // error + + @experimental + override def g: Int = 5 // error diff --git a/tests/neg/exports.check b/tests/neg/exports.check index 8eeea9f5db8d..577f9e6b47ce 100644 --- a/tests/neg/exports.check +++ b/tests/neg/exports.check @@ -11,13 +11,13 @@ 25 | export printUnit.bitmap // error: no eligible member | ^ | non-private given instance bitmap in class Copier refers to private value printUnit - | in its type signature => Copier.this.printUnit.bitmap$ + | in its type signature => Copier.this.printUnit.bitmap -- [E120] Naming Error: tests/neg/exports.scala:23:33 ------------------------------------------------------------------ 23 | export printUnit.{stat => _, _} // error: double definition | ^ | Double definition: - | def status: => List[String] in class Copier at line 28 and - | final def status: => List[String] in class Copier at line 23 + | def status: List[String] in class Copier at line 28 and + | final def status: List[String] in class Copier at line 23 | have the same type after erasure. | | Consider adding a @targetName annotation to one of the conflicting definitions @@ -26,8 +26,8 @@ 24 | export scanUnit._ // error: double definition | ^ | Double definition: - | final def status: => List[String] in class Copier at line 23 and - | final def status: => List[String] in class Copier at line 24 + | final def status: List[String] in class Copier at line 23 and + | final def status: List[String] in class Copier at line 24 | have the same type after erasure. | | Consider adding a @targetName annotation to one of the conflicting definitions @@ -36,8 +36,8 @@ 26 | export printUnit.status // error: double definition | ^ | Double definition: - | final def status: => List[String] in class Copier at line 24 and - | final def status: => List[String] in class Copier at line 26 + | final def status: List[String] in class Copier at line 24 and + | final def status: List[String] in class Copier at line 26 | have the same type after erasure. | | Consider adding a @targetName annotation to one of the conflicting definitions @@ -57,4 +57,4 @@ | ^ | Double definition: | val bar: Bar in class Baz at line 45 and - | final def bar: => (Baz.this.bar.bar : => (Baz.this.bar.baz.bar : Bar)) in class Baz at line 46 + | final def bar: (Baz.this.bar.bar : => (Baz.this.bar.baz.bar : Bar)) in class Baz at line 46 diff --git a/tests/neg/extend-matchable.scala b/tests/neg/extend-matchable.scala index 18dee7a1fb53..031c12f21d3e 100644 --- a/tests/neg/extend-matchable.scala +++ b/tests/neg/extend-matchable.scala @@ -12,9 +12,8 @@ class E1 extends AnyRef, M // OK class F1 extends Any, M // error: Any does not have a constructor class C2 extends M0 // OK inferred base type is AnyRef -class D2(x: Int) extends AnyVal, M0 // error: illegal trait inheritance class E2 extends AnyRef, M0 // OK -class F2 extends Any, M0 // error: Any does not have a constructor // error: illegal trait inheritance +class F2 extends Any, M0 // error: Any does not have a constructor diff --git a/tests/neg/extension-specificity2.scala b/tests/neg/extension-specificity2.scala new file mode 100644 index 000000000000..0087dbbe7165 --- /dev/null +++ b/tests/neg/extension-specificity2.scala @@ -0,0 +1,10 @@ +trait Bla1[A]: + extension (x: A) def foo(y: A): Int +trait Bla2[A]: + extension (x: A) def foo(y: A): Int + +def test = + given bla1[T <: Int]: Bla1[T] = ??? + given bla2[S <: Int]: Bla2[S] = ??? + + 1.foo(2) // error: never extension is more specific than the other diff --git a/tests/neg/f-interpolator-neg.check b/tests/neg/f-interpolator-neg.check new file mode 100644 index 000000000000..ea8df052589e --- /dev/null +++ b/tests/neg/f-interpolator-neg.check @@ -0,0 +1,200 @@ +-- Error: tests/neg/f-interpolator-neg.scala:4:4 ----------------------------------------------------------------------- +4 | new StringContext().f() // error + | ^^^^^^^^^^^^^^^^^^^^^ + | there are no parts +-- Error: tests/neg/f-interpolator-neg.scala:5:4 ----------------------------------------------------------------------- +5 | new StringContext("", " is ", "%2d years old").f(s) // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | too few arguments for interpolated string +-- Error: tests/neg/f-interpolator-neg.scala:6:4 ----------------------------------------------------------------------- +6 | new StringContext("", " is ", "%2d years old").f(s, d, d) // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | too many arguments for interpolated string +-- Error: tests/neg/f-interpolator-neg.scala:7:4 ----------------------------------------------------------------------- +7 | new StringContext("", "").f() // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | too few arguments for interpolated string +-- Error: tests/neg/f-interpolator-neg.scala:11:7 ---------------------------------------------------------------------- +11 | f"$s%b" // error + | ^ + | Found: (s : String), Required: Boolean, Null +-- Error: tests/neg/f-interpolator-neg.scala:12:7 ---------------------------------------------------------------------- +12 | f"$s%c" // error + | ^ + | Found: (s : String), Required: Char, Byte, Short, Int +-- Error: tests/neg/f-interpolator-neg.scala:13:7 ---------------------------------------------------------------------- +13 | f"$f%c" // error + | ^ + | Found: (f : Double), Required: Char, Byte, Short, Int +-- Error: tests/neg/f-interpolator-neg.scala:14:7 ---------------------------------------------------------------------- +14 | f"$s%x" // error + | ^ + | Found: (s : String), Required: Int, Long, Byte, Short, BigInt +-- Error: tests/neg/f-interpolator-neg.scala:15:7 ---------------------------------------------------------------------- +15 | f"$b%d" // error + | ^ + | Found: (b : Boolean), Required: Int, Long, Byte, Short, BigInt +-- Error: tests/neg/f-interpolator-neg.scala:16:7 ---------------------------------------------------------------------- +16 | f"$s%d" // error + | ^ + | Found: (s : String), Required: Int, Long, Byte, Short, BigInt +-- Error: tests/neg/f-interpolator-neg.scala:17:7 ---------------------------------------------------------------------- +17 | f"$f%o" // error + | ^ + | Found: (f : Double), Required: Int, Long, Byte, Short, BigInt +-- Error: tests/neg/f-interpolator-neg.scala:18:7 ---------------------------------------------------------------------- +18 | f"$s%e" // error + | ^ + | Found: (s : String), Required: Double, Float, BigDecimal +-- Error: tests/neg/f-interpolator-neg.scala:19:7 ---------------------------------------------------------------------- +19 | f"$b%f" // error + | ^ + | Found: (b : Boolean), Required: Double, Float, BigDecimal +-- Error: tests/neg/f-interpolator-neg.scala:20:9 ---------------------------------------------------------------------- +20 | f"$s%i" // error + | ^ + | illegal conversion character 'i' +-- Error: tests/neg/f-interpolator-neg.scala:24:9 ---------------------------------------------------------------------- +24 | f"$s%+ 0,(s" // error + | ^^^^^ + | Illegal flag '+' +-- Error: tests/neg/f-interpolator-neg.scala:25:9 ---------------------------------------------------------------------- +25 | f"$c%#+ 0,(c" // error + | ^^^^^^ + | Only '-' allowed for c conversion +-- Error: tests/neg/f-interpolator-neg.scala:26:9 ---------------------------------------------------------------------- +26 | f"$d%#d" // error + | ^ + | # not allowed for d conversion +-- Error: tests/neg/f-interpolator-neg.scala:27:9 ---------------------------------------------------------------------- +27 | f"$d%,x" // error + | ^ + | ',' only allowed for d conversion of integral types +-- Error: tests/neg/f-interpolator-neg.scala:28:9 ---------------------------------------------------------------------- +28 | f"$d%+ (x" // error + | ^^^ + | only use '+' for BigInt conversions to o, x, X +-- Error: tests/neg/f-interpolator-neg.scala:29:9 ---------------------------------------------------------------------- +29 | f"$f%,(a" // error + | ^^ + | ',' not allowed for a, A +-- Error: tests/neg/f-interpolator-neg.scala:30:9 ---------------------------------------------------------------------- +30 | f"$t%#+ 0,(tT" // error + | ^^^^^^ + | Only '-' allowed for date/time conversions +-- Error: tests/neg/f-interpolator-neg.scala:31:7 ---------------------------------------------------------------------- +31 | f"%-#+ 0,(n" // error + | ^^^^^^^ + | flags not allowed +-- Error: tests/neg/f-interpolator-neg.scala:32:7 ---------------------------------------------------------------------- +32 | f"%#+ 0,(%" // error + | ^^^^^^ + | Illegal flag '#' +-- Error: tests/neg/f-interpolator-neg.scala:36:9 ---------------------------------------------------------------------- +36 | f"$c%.2c" // error + | ^^ + | precision not allowed +-- Error: tests/neg/f-interpolator-neg.scala:37:9 ---------------------------------------------------------------------- +37 | f"$d%.2d" // error + | ^^ + | precision not allowed +-- Error: tests/neg/f-interpolator-neg.scala:38:7 ---------------------------------------------------------------------- +38 | f"%.2%" // error + | ^^ + | precision not allowed +-- Error: tests/neg/f-interpolator-neg.scala:39:7 ---------------------------------------------------------------------- +39 | f"%.2n" // error + | ^^ + | precision not allowed +-- Error: tests/neg/f-interpolator-neg.scala:40:9 ---------------------------------------------------------------------- +40 | f"$f%.2a" // error + | ^^ + | precision not allowed +-- Error: tests/neg/f-interpolator-neg.scala:41:9 ---------------------------------------------------------------------- +41 | f"$t%.2tT" // error + | ^^ + | precision not allowed +-- Error: tests/neg/f-interpolator-neg.scala:45:7 ---------------------------------------------------------------------- +45 | f"% Any } // error + + type Baz[T] = T match { case String => Wildcard } // error + + trait Wrapped[T] + trait WrappedWildcard extends Wrapped[Wildcard] // error + trait WrappedLikeWildcard[W <: Wildcard] extends Wrapped[W] // error + + class Box(w: Wildcard) // error + + // The inferred result type also gets reported even though it's not written explicitly + def castToWildcard(x: Any) = x.asInstanceOf[Wildcard] // error // error + + // 2 errors reported because at the stage of compilation when this is checked (already after some code transformations) the illegal type is referred to more than once + val selectable: Any = new Selectable.WithoutPreciseParameterTypes {} // error // error diff --git a/tests/neg/forwardCompat-invalidSince.scala b/tests/neg/forwardCompat-invalidSince.scala new file mode 100644 index 000000000000..dfd128e84061 --- /dev/null +++ b/tests/neg/forwardCompat-invalidSince.scala @@ -0,0 +1,21 @@ +package scala.test + +import annotation.since + +@since("") // error +val x = 1 + +@since("1.2.3.4") // error +val y = "abc" + +@since("xyz") // error +class Foo + +@since("-3") // error +trait Bar + +@since("3.0.2") // error +type Baz = Int + +@since("3.0 ") // error +given String = "" \ No newline at end of file diff --git a/tests/neg/forwardCompat-leakingImplicit/Test_r3.0.scala b/tests/neg/forwardCompat-leakingImplicit/Test_r3.0.scala new file mode 100644 index 000000000000..c9f0835bc053 --- /dev/null +++ b/tests/neg/forwardCompat-leakingImplicit/Test_r3.0.scala @@ -0,0 +1 @@ +val canEq = summon[CanEqual[EmptyTuple, EmptyTuple]] // error diff --git a/tests/neg/forwardCompat-nestedSumMirror/Lib_1_r3.0.scala b/tests/neg/forwardCompat-nestedSumMirror/Lib_1_r3.0.scala new file mode 100644 index 000000000000..aaac31229228 --- /dev/null +++ b/tests/neg/forwardCompat-nestedSumMirror/Lib_1_r3.0.scala @@ -0,0 +1,8 @@ +// Adapted from i11050 + +sealed trait TreeValue + +sealed trait SubLevel extends TreeValue + +case class Leaf1(value: String) extends TreeValue +case class Leaf2(value: Int) extends SubLevel diff --git a/tests/neg/forwardCompat-nestedSumMirror/Test_2_c3.0.2.scala b/tests/neg/forwardCompat-nestedSumMirror/Test_2_c3.0.2.scala new file mode 100644 index 000000000000..9fa2e4003002 --- /dev/null +++ b/tests/neg/forwardCompat-nestedSumMirror/Test_2_c3.0.2.scala @@ -0,0 +1,5 @@ +import scala.deriving._ + +object Test: + def main(args: Array[String]): Unit = + println(summon[Mirror.Of[TreeValue]]) // error diff --git a/tests/neg/forwardCompat-rejectNewerTasty/Bar_2_r3.0.scala b/tests/neg/forwardCompat-rejectNewerTasty/Bar_2_r3.0.scala new file mode 100644 index 000000000000..d78004d6dedf --- /dev/null +++ b/tests/neg/forwardCompat-rejectNewerTasty/Bar_2_r3.0.scala @@ -0,0 +1 @@ +def bar = foo // nopos-error diff --git a/tests/neg/forwardCompat-rejectNewerTasty/Foo_1_r3.1.scala b/tests/neg/forwardCompat-rejectNewerTasty/Foo_1_r3.1.scala new file mode 100644 index 000000000000..fe04f0623a1c --- /dev/null +++ b/tests/neg/forwardCompat-rejectNewerTasty/Foo_1_r3.1.scala @@ -0,0 +1 @@ +def foo = 1 diff --git a/tests/neg/gadt-approximation-interaction.scala b/tests/neg/gadt-approximation-interaction.scala index 683d0668f4a2..5f010e4b784d 100644 --- a/tests/neg/gadt-approximation-interaction.scala +++ b/tests/neg/gadt-approximation-interaction.scala @@ -47,7 +47,7 @@ object ImplicitConversion { def foo[T](t: T, ev: T SUB Int) = ev match { case SUB.Refl() => - t ** 2 // error // implementation limitation + t ** 2 } def bar[T](t: T, ev: T SUB Int) = @@ -67,7 +67,7 @@ object GivenConversion { def foo[T](t: T, ev: T SUB Int) = ev match { case SUB.Refl() => - t ** 2 // error (implementation limitation) + t ** 2 } def bar[T](t: T, ev: T SUB Int) = diff --git a/tests/neg/gadt-contradictory-pattern.scala b/tests/neg/gadt-contradictory-pattern.scala new file mode 100644 index 000000000000..561c0c23d518 --- /dev/null +++ b/tests/neg/gadt-contradictory-pattern.scala @@ -0,0 +1,13 @@ +object Test { + sealed abstract class Foo[T] + case object Bar1 extends Foo[Int] + case object Bar2 extends Foo[String] + case object Bar3 extends Foo[AnyRef] + + def fail4[T <: AnyRef](xx: (Foo[T], Foo[T])) = xx match { + case (Bar1, Bar1) => () // error // error + case (Bar2, Bar3) => () + case (Bar3, _) => () + } + +} diff --git a/tests/neg/gadt-injectivity-alt-2.scala b/tests/neg/gadt-injectivity-alt-2.scala new file mode 100644 index 000000000000..a48555d557f8 --- /dev/null +++ b/tests/neg/gadt-injectivity-alt-2.scala @@ -0,0 +1,35 @@ +/** A modified version of gadt-injectivity-alt.scala. */ +object test { + + enum SUB[-F, +G] { + case Refl[S]() extends SUB[S, S] + } + + enum KSUB[-F[_], +G[_]] { + case Refl[S[_]]() extends KSUB[S, S] + } + + def foo[F[_], G[_], A]( + keq: (F KSUB Option, Option KSUB F), + ksub: Option KSUB G, + sub: F[Option[A]] SUB G[Option[Int]], + a: A + ) = + keq._1 match { case KSUB.Refl() => + keq._2 match { case KSUB.Refl() => + ksub match { case KSUB.Refl() => + sub match { case SUB.Refl() => + // F = Option + // & G >: Option + // & F[Option[A]] <: G[Option[Int]] + // =X=> + // A <: Int + // + // counterexample: G = [T] => Any + val i: Int = a // error + () + } + } + } + } +} diff --git a/tests/neg/genericTupleMembers.scala b/tests/neg/genericTupleMembers.scala new file mode 100644 index 000000000000..13c70fa73b6f --- /dev/null +++ b/tests/neg/genericTupleMembers.scala @@ -0,0 +1,38 @@ +def Test: Unit = + val tup1 = 1 *: EmptyTuple + val tup2 = 1 *: 2 *: EmptyTuple + val tup3 = 1 *: 2 *: 3 *: EmptyTuple + val tup4 = 1 *: 2 *: 3 *: 4 *: EmptyTuple + val tup5 = 1 *: 2 *: 3 *: 4 *: 5 *: EmptyTuple + val tup22 = 1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: 18 *: 19 *: 20 *: 21 *: 22 *: EmptyTuple + val tup23 = 1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: 18 *: 19 *: 20 *: 21 *: 22 *: 23 *: EmptyTuple + + tup1._2 // error + + tup2._3 // error + + tup22._23 // error + + tup23._1 // error + tup23._2 // error + tup23._3 // error + tup23._4 // error + tup23._5 // error + tup23._6 // error + tup23._7 // error + tup23._8 // error + tup23._9 // error + tup23._10 // error + tup23._11 // error + tup23._12 // error + tup23._13 // error + tup23._14 // error + tup23._15 // error + tup23._16 // error + tup23._17 // error + tup23._18 // error + tup23._19 // error + tup23._20 // error + tup23._21 // error + tup23._22 // error + tup23._23 // error diff --git a/tests/neg/given-pattern.scala b/tests/neg/given-pattern.scala index 72f5213c9f06..68e322a2201c 100644 --- a/tests/neg/given-pattern.scala +++ b/tests/neg/given-pattern.scala @@ -4,11 +4,11 @@ class Test { import scala.collection.immutable.{TreeSet, HashSet} def f2[T](x: Ordering[T]) = { - val (given Ordering[T]) = x - new TreeSet[T] // error: no implicit ordering defined for T + val (given Ordering[T]) = x // error: given Ordering[T] not allowed here + new TreeSet[T] // error: no implicit ordering defined for T } def f3[T](x: Ordering[T]) = { - val given Ordering[T] = x - new TreeSet[T] // error: no implicit ordering defined for T + val given Ordering[T] = x // error: given Ordering[T] not allowed here + new TreeSet[T] // error: no implicit ordering defined for T } -} \ No newline at end of file +} diff --git a/tests/neg/hidden.check b/tests/neg/hidden.check new file mode 100644 index 000000000000..759871959340 --- /dev/null +++ b/tests/neg/hidden.check @@ -0,0 +1,6 @@ +-- Error: tests/neg/hidden.scala:6:13 ---------------------------------------------------------------------------------- +6 | summon[Int] // error + | ^ + | no implicit argument of type Int was found for parameter x of method summon in object Predef + | + | Note: given instance given_Int in object A was not considered because it was not imported with `import given`. diff --git a/tests/neg/higher-order-quoted-expressions.scala b/tests/neg/higher-order-quoted-expressions.scala new file mode 100644 index 000000000000..a5d137ff8fc5 --- /dev/null +++ b/tests/neg/higher-order-quoted-expressions.scala @@ -0,0 +1,44 @@ +// Draft of higher order expressions +// --------------------------------- +// A higher order expression is a an expression that may contain unbound variables. +// Unbound variables are listed as arguments of the type lambda of the expression. +// Splicing can only be done on Expr[_ <: Any]. +// `apply methods on higher order expression provide a way to replace the unbound variables. + +// This can be used for HOAS quoted patterns to not return a lambda (see HOAS patterns section in https://infoscience.epfl.ch/record/288718?ln=en). +// The use would be the same as each higher order expression would have an apply method. +// But as it would be an expression, the expression would showable. +// The expression could also be directly transformed into a Term of the reflection API. + +// Question: How to extend this to allow unbound type (Type) in the expression? + +class Expr[+T <: AnyKind] +object Expr: + extension [T, R](hoe: Expr[[_ >: T <: T] =>> R]) def apply(x1: Expr[T]): Expr[R] = ??? + extension [T1, T2, R](hoe: Expr[[_ >: T1 <: T1, _ >: T2 <: T2] =>> R]) def apply(x1: Expr[T1], x2: Expr[T2]): Expr[R] = ??? + // Are lower bounds in lambda parameters necessary? + // How could this be generalized to n arguments? + + +def `'`[T](e: T): Expr[T] = ??? +def `$`[T](e: Expr[T]): T = ??? + +def f(): Unit = + val e: Expr[Int] = ??? + val hoe1: Expr[[T >: Int <: Int] =>> Int] = ??? // assumed to come from HOAS pattern with one unbound variable of type Int + val hoe2: Expr[[T1 >: Int <: Int, T2 >: Int <: Int] =>> Int] = ??? // assumed to come from HOAS pattern with 2 unbound variables of type Int + val e2: Expr[Int] = hoe1(e) + val e3: Expr[Int] = hoe2(e, e) + + { + `$`{e} + `$`{e2} + `$`{e3} + `$`{hoe1(e)} + `$`{hoe2(e, e)} + `$`{hoe1} // error: Found: Expr[[T >: Int <: Int] =>> Int]), Required: Expr[Any] // may contain references to 1 unbound variable + `$`{hoe2} // error // may contain references to 2 unbound variables + } + `'`{1} + `'`{`$`{e}} + `'`{??? : ([T >: Int <: Int] =>> Int)} // error: Missing type parameter for [T >: Int <: Int] =>> Int // not a valid quote literal expression diff --git a/tests/pos/i10082.scala b/tests/neg/i10082.scala similarity index 100% rename from tests/pos/i10082.scala rename to tests/neg/i10082.scala diff --git a/tests/neg/i10268.check b/tests/neg/i10268.check index e08571f7463d..abe39586b373 100644 --- a/tests/neg/i10268.check +++ b/tests/neg/i10268.check @@ -2,5 +2,5 @@ 2 | def test[T] = ?//test // error | ^ | Not found: ? - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i10546.scala b/tests/neg/i10546.scala index 3a5f42ec1fdf..e40f7117a3d4 100644 --- a/tests/neg/i10546.scala +++ b/tests/neg/i10546.scala @@ -1,5 +1,5 @@ object test: def times(num : Int)(block : => Unit) : Unit = () - times(10): println("ah") // error: end of statement expected but '(' found // error + times(10): println("ah") // error: end of statement expected but '(' found def foo: Set(Int) = Set(1) \ No newline at end of file diff --git a/tests/neg/i10552a.scala b/tests/neg/i10552a.scala new file mode 100644 index 000000000000..beae787609ad --- /dev/null +++ b/tests/neg/i10552a.scala @@ -0,0 +1,4 @@ +transparent inline def foo[T]: Int = 10 + +def test = + foo[List] // error diff --git a/tests/neg/i10552b.check b/tests/neg/i10552b.check new file mode 100644 index 000000000000..abaf55d2a947 --- /dev/null +++ b/tests/neg/i10552b.check @@ -0,0 +1,24 @@ +-- Error: tests/neg/i10552b.scala:10:17 -------------------------------------------------------------------------------- +10 | println(foo1["hi"]) // error + | ^^^^ + | ("hi" : String) does not conform to bound <: Int +-- Error: tests/neg/i10552b.scala:11:17 -------------------------------------------------------------------------------- +11 | println(foo1[String]) // error + | ^^^^^^ + | String does not conform to bound <: Int +-- Error: tests/neg/i10552b.scala:12:17 -------------------------------------------------------------------------------- +12 | println(foo1[Any]) // error + | ^^^ + | Any does not conform to bound <: Int +-- Error: tests/neg/i10552b.scala:13:17 -------------------------------------------------------------------------------- +13 | println(foo1[AnyKind]) // error + | ^^^^^^^ + | AnyKind does not conform to bound <: Int +-- Error: tests/neg/i10552b.scala:15:17 -------------------------------------------------------------------------------- +15 | println(foo2["hi"]) // error + | ^^^^ + | ("hi" : String) does not conform to bound >: Int <: Int +-- Error: tests/neg/i10552b.scala:17:17 -------------------------------------------------------------------------------- +17 | println(foo3[X]) // error + | ^ + | Foo.this.X does not conform to bound <: Any. Note that this type is higher-kinded. diff --git a/tests/neg/i10552b.scala b/tests/neg/i10552b.scala new file mode 100644 index 000000000000..58191c24645d --- /dev/null +++ b/tests/neg/i10552b.scala @@ -0,0 +1,17 @@ +class Foo: + transparent inline def foo1[A <: Int]: Int = valueOf[A] + transparent inline def foo2[A >: Int <: Int]: Int = valueOf[A] + transparent inline def foo3[A]: Int = ??? + + type X >: AnyKind <: AnyKind + + def run = + println(foo1[Int]) + println(foo1["hi"]) // error + println(foo1[String]) // error + println(foo1[Any]) // error + println(foo1[AnyKind]) // error + + println(foo2["hi"]) // error + + println(foo3[X]) // error diff --git a/tests/neg/i10666.check b/tests/neg/i10666.check index 65ac39e451af..a70aa9815dc5 100644 --- a/tests/neg/i10666.check +++ b/tests/neg/i10666.check @@ -1,8 +1,8 @@ -- Error: tests/neg/i10666.scala:8:6 ----------------------------------------------------------------------------------- 8 |class Bar extends Foo { // error | ^ - | class Bar needs to be abstract, since def foo: [T <: B](tx: T): Unit in trait Foo is not defined + | class Bar needs to be abstract, since def foo[T <: B](tx: T): Unit in trait Foo is not defined | (Note that - | parameter T in def foo: [T <: B](tx: T): Unit in trait Foo does not match - | parameter T in def foo: [T <: A](tx: T): Unit in class Bar + | parameter T in def foo[T <: B](tx: T): Unit in trait Foo does not match + | parameter T in def foo[T <: A](tx: T): Unit in class Bar | class B is a subclass of class A, but method parameter types must match exactly.) diff --git a/tests/neg/i10757.check b/tests/neg/i10757.check new file mode 100644 index 000000000000..3a46c4e6cc53 --- /dev/null +++ b/tests/neg/i10757.check @@ -0,0 +1,6 @@ +-- [E107] Syntax Error: tests/neg/i10757.scala:4:10 -------------------------------------------------------------------- +4 | case Var(name, _) => name: String // error + | ^^^^^^^^^^^^ + | Wrong number of argument patterns for Var; expected: (String) + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i10757.scala b/tests/neg/i10757.scala new file mode 100644 index 000000000000..d75e79d061fb --- /dev/null +++ b/tests/neg/i10757.scala @@ -0,0 +1,4 @@ +case class Var(name: String) + +@main def f = Var("a") match + case Var(name, _) => name: String // error diff --git a/tests/neg/i11103.scala b/tests/neg/i11103.scala new file mode 100644 index 000000000000..6892f9ad30b2 --- /dev/null +++ b/tests/neg/i11103.scala @@ -0,0 +1,16 @@ +@main def test: Unit = { + class Foo + class Bar + + trait UpBnd[+A] + trait P extends UpBnd[Foo] + + def pmatch[A, T <: UpBnd[A]](s: T): A = s match { + case p: P => + new Foo // error + } + + class UpBndAndB extends UpBnd[Bar] with P + // ClassCastException: Foo cannot be cast to Bar + val x = pmatch(new UpBndAndB) +} diff --git a/tests/neg/i11208.scala b/tests/neg/i11208.scala new file mode 100644 index 000000000000..c00e16b96993 --- /dev/null +++ b/tests/neg/i11208.scala @@ -0,0 +1,8 @@ +import scala.reflect.ClassTag + +@main def run = println(Foo) + +abstract class Bar[T](implicit val thisClassTag: ClassTag[T]) + +class Foo +object Foo extends Bar[Foo] // error \ No newline at end of file diff --git a/tests/neg/i11208a.scala b/tests/neg/i11208a.scala new file mode 100644 index 000000000000..8b2f0917038c --- /dev/null +++ b/tests/neg/i11208a.scala @@ -0,0 +1,6 @@ +class Foo(implicit val foo: Foo) + +object Test extends App { + implicit object Bar extends Foo // error + Bar.foo +} \ No newline at end of file diff --git a/tests/neg/i11225.check b/tests/neg/i11225.check index 60805a27b22d..c0f74bc04a14 100644 --- a/tests/neg/i11225.check +++ b/tests/neg/i11225.check @@ -42,4 +42,10 @@ 30 | var x7: Int = uni // error | ^^^ | `uninitialized` can only be used as the right hand side of a mutable field definition - | This location contains code that was inlined from i11225.scala:25 + |-------------------------------------------------------------------------------------------------------------------- + |Inline stack trace + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i11225.scala:25 +25 | transparent inline def uni = uninitialized + | ^^^^^^^^^^^^^ + -------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/i11437.check b/tests/neg/i11437.check index f327d6a6a8d2..c8d7a2535964 100644 --- a/tests/neg/i11437.check +++ b/tests/neg/i11437.check @@ -2,5 +2,5 @@ 1 |transparent inline def foo = error // error | ^^^^^ | Not found: error - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i11544.check b/tests/neg/i11544.check index b4dba8a072c4..15469f0c37a5 100644 --- a/tests/neg/i11544.check +++ b/tests/neg/i11544.check @@ -7,5 +7,5 @@ | both match arguments ((23 : Int)) | | Note: Overloaded definitions introduced by refinements cannot be resolved - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i11561.check b/tests/neg/i11561.check index b1792511b0d0..96bf1ec6accf 100644 --- a/tests/neg/i11561.check +++ b/tests/neg/i11561.check @@ -9,5 +9,5 @@ 3 | val updateText2 = copy(text = (_: String)) // error | ^^^^^^^^^^^^^^^^^^ | Reassignment to val text - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i11694.scala b/tests/neg/i11694.scala new file mode 100644 index 000000000000..67138fd5a7eb --- /dev/null +++ b/tests/neg/i11694.scala @@ -0,0 +1,26 @@ +def test1 = { + def f11: (Int => Int) | Unit = x => x + 1 + def f12: Null | (Int => Int) = x => x + 1 + + def f21: (Int => Int) | Null = x => x + 1 + def f22: Null | (Int => Int) = x => x + 1 + + def f31: (Int => Int) | (Int => Int) = x => x + 1 + def f32: (Int => Int) | (Int => Int) | Unit = x => x + 1 + + def f41: (Int => Int) & (Int => Int) = x => x + 1 + def f42: (Int => Int) & (Int => Int) & Any = x => x + 1 +} + +def test2 = { + def f1: (Int => String) | (Int => Int) | Null = x => x + 1 // error + def f2: (Int => String) | Function[String, Int] | Null = x => "" + x // error + def f3: Function[Int, Int] | Function[String, Int] | Null = x => x + 1 // error + def f4: (Int => Int) & (Int => Int) & Unit = x => x + 1 // error +} + +def test3 = { + import java.util.function.Function + val f1: Function[String, Int] | Unit = x => x.length + val f2: Function[String, Int] | Null = x => x.length +} diff --git a/tests/neg/i11828.check b/tests/neg/i11828.check new file mode 100644 index 000000000000..80824e9b1334 --- /dev/null +++ b/tests/neg/i11828.check @@ -0,0 +1,8 @@ +-- Error: tests/neg/i12828.scala:7:7 ----------------------------------------------------------------------------------- +7 |object Baz extends Bar[Int] // error overriding foo: incompatible type + | ^ + | object creation impossible, since def foo(x: A): Unit in trait Foo is not defined + | (Note that + | parameter A in def foo(x: A): Unit in trait Foo does not match + | parameter Int & String in def foo(x: A & String): Unit in trait Bar + | ) diff --git a/tests/neg/i11897.check b/tests/neg/i11897.check new file mode 100644 index 000000000000..10191735122a --- /dev/null +++ b/tests/neg/i11897.check @@ -0,0 +1,45 @@ +-- Error: tests/neg/i11897.scala:11:10 --------------------------------------------------------------------------------- +11 | val (x, given A) = (1, A(23)) // error + | ^^^^^^^ + | given patterns are not allowed in a val definition, + | please bind to an identifier and use an alias given. +-- Error: tests/neg/i11897.scala:12:10 --------------------------------------------------------------------------------- +12 | val (_, given B) = (true, B(false)) // error + | ^^^^^^^ + | given patterns are not allowed in a val definition, + | please bind to an identifier and use an alias given. +-- Error: tests/neg/i11897.scala:13:8 ---------------------------------------------------------------------------------- +13 | val D(given C) = D(C("c")) // error + | ^^^^^^^ + | given patterns are not allowed in a val definition, + | please bind to an identifier and use an alias given. +-- Error: tests/neg/i11897.scala:14:11 --------------------------------------------------------------------------------- +14 | val F(y, given E) = F(47, E(93)) // error + | ^^^^^^^ + | given patterns are not allowed in a val definition, + | please bind to an identifier and use an alias given. +-- Error: tests/neg/i11897.scala:15:11 --------------------------------------------------------------------------------- +15 | val H(z, q @ given G) = H(47, G(101)) // error + | ^^^^^^^^^^^ + | given patterns are not allowed in a val definition, + | please bind to an identifier and use an alias given. +-- Error: tests/neg/i11897.scala:16:18 --------------------------------------------------------------------------------- +16 | assert(summon[A] == A(23)) // error + | ^ + | no implicit argument of type A was found for parameter x of method summon in object Predef +-- Error: tests/neg/i11897.scala:17:18 --------------------------------------------------------------------------------- +17 | assert(summon[B] == B(false)) // error + | ^ + | no implicit argument of type B was found for parameter x of method summon in object Predef +-- Error: tests/neg/i11897.scala:18:18 --------------------------------------------------------------------------------- +18 | assert(summon[C] == C("c")) // error + | ^ + | no implicit argument of type C was found for parameter x of method summon in object Predef +-- Error: tests/neg/i11897.scala:19:18 --------------------------------------------------------------------------------- +19 | assert(summon[E] == E(93)) // error + | ^ + | no implicit argument of type E was found for parameter x of method summon in object Predef +-- Error: tests/neg/i11897.scala:20:18 --------------------------------------------------------------------------------- +20 | assert(summon[G] == G(101)) // error + | ^ + | no implicit argument of type G was found for parameter x of method summon in object Predef diff --git a/tests/neg/i11897.scala b/tests/neg/i11897.scala new file mode 100644 index 000000000000..897a0026eecd --- /dev/null +++ b/tests/neg/i11897.scala @@ -0,0 +1,20 @@ +case class A(i: Int) +case class B(b: Boolean) +case class C(s: String) +case class D(c: C) +case class E(i: Int) +case class F(i: Int, e: E) +case class G(i: Int) +case class H(i: Int, e: G) + +def Test = + val (x, given A) = (1, A(23)) // error + val (_, given B) = (true, B(false)) // error + val D(given C) = D(C("c")) // error + val F(y, given E) = F(47, E(93)) // error + val H(z, q @ given G) = H(47, G(101)) // error + assert(summon[A] == A(23)) // error + assert(summon[B] == B(false)) // error + assert(summon[C] == C("c")) // error + assert(summon[E] == E(93)) // error + assert(summon[G] == G(101)) // error diff --git a/tests/neg/i11938.scala b/tests/neg/i11938.scala new file mode 100644 index 000000000000..e8f4e747915b --- /dev/null +++ b/tests/neg/i11938.scala @@ -0,0 +1,16 @@ +import java.util.function.Function + +object Test { + def foo[V](v: V): Int = 1 + def foo[U](fn: Function[Int, U]): Int = 2 + + def main(args: Array[String]): Unit = { + val f: Int => Int = x => x + foo(f) // error + // Like Scala 2, we emit an error here because the Function1 argument was + // deemed SAM-convertible to Function, even though it's not a lambda + // expression and therefore not convertible. If we wanted to support this, + // we would have to tweak TestApplication#argOK to look at the shape of + // `arg` and turn off SAM conversions when it's a non-closure tree. + } +} diff --git a/tests/neg/i12049.check b/tests/neg/i12049.check new file mode 100644 index 000000000000..44b8897605f0 --- /dev/null +++ b/tests/neg/i12049.check @@ -0,0 +1,77 @@ +-- [E007] Type Mismatch Error: tests/neg/i12049.scala:6:16 ------------------------------------------------------------- +6 |val x: String = ??? : M[B] // error + | ^^^^^^^^^^ + | Found: M[B] + | Required: String + | + | Note: a match type could not be fully reduced: + | + | trying to reduce M[B] + | failed since selector B + | does not match case A => Int + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case B => String + | + | longer explanation available when compiling with `-explain` +-- Error: tests/neg/i12049.scala:14:23 --------------------------------------------------------------------------------- +14 |val y3: String = ??? : Last[Int *: Int *: Boolean *: String *: EmptyTuple] // error + | ^ + | Match type reduction failed since selector EmptyTuple.type + | matches none of the cases + | + | case _ *: _ *: t => Last[t] + | case t *: EmptyTuple => t +-- Error: tests/neg/i12049.scala:22:26 --------------------------------------------------------------------------------- +22 |val z3: (A, B, A) = ??? : Reverse[(A, B, A)] // error + | ^ + | Match type reduction failed since selector A *: EmptyTuple.type + | matches none of the cases + | + | case t1 *: t2 *: ts => Tuple.Concat[Reverse[ts], (t2, t1)] + | case EmptyTuple => EmptyTuple +-- Error: tests/neg/i12049.scala:24:20 --------------------------------------------------------------------------------- +24 |val _ = summon[M[B]] // error + | ^ + | no implicit argument of type M[B] was found for parameter x of method summon in object Predef + | + | Note: a match type could not be fully reduced: + | + | trying to reduce M[B] + | failed since selector B + | does not match case A => Int + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case B => String +-- Error: tests/neg/i12049.scala:25:26 --------------------------------------------------------------------------------- +25 |val _ = summon[String =:= Last[Int *: Int *: Boolean *: String *: EmptyTuple]] // error + | ^ + | Match type reduction failed since selector EmptyTuple.type + | matches none of the cases + | + | case _ *: _ *: t => Last[t] + | case t *: EmptyTuple => t +-- Error: tests/neg/i12049.scala:26:29 --------------------------------------------------------------------------------- +26 |val _ = summon[(A, B, A) =:= Reverse[(A, B, A)]] // error + | ^ + | Match type reduction failed since selector A *: EmptyTuple.type + | matches none of the cases + | + | case t1 *: t2 *: ts => Tuple.Concat[Reverse[ts], (t2, t1)] + | case EmptyTuple => EmptyTuple +-- [E008] Not Found Error: tests/neg/i12049.scala:28:21 ---------------------------------------------------------------- +28 |val _ = (??? : M[B]).length // error + | ^^^^^^^^^^^^^^^^^^^ + | value length is not a member of M[B] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce M[B] + | failed since selector B + | does not match case A => Int + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case B => String diff --git a/tests/neg/i12049.scala b/tests/neg/i12049.scala new file mode 100644 index 000000000000..e69ac9e7a217 --- /dev/null +++ b/tests/neg/i12049.scala @@ -0,0 +1,29 @@ +trait A +trait B +type M[X] = X match + case A => Int + case B => String +val x: String = ??? : M[B] // error + +type Last[X <: Tuple] = X match + case _ *: _ *: t => Last[t] + case t *: EmptyTuple => t + +val y1: Int = ??? : Last[Int *: EmptyTuple] +val y2: String = ??? : Last[Int *: Boolean *: String *: EmptyTuple] +val y3: String = ??? : Last[Int *: Int *: Boolean *: String *: EmptyTuple] // error + +type Reverse[X <: Tuple] = X match + case t1 *: t2 *: ts => Tuple.Concat[Reverse[ts], (t2, t1)] + case EmptyTuple => EmptyTuple + +val z1: (B, A) = ??? : Reverse[(A, B)] +val z2: (B, A, B, A) = ??? : Reverse[(A, B, A, B)] +val z3: (A, B, A) = ??? : Reverse[(A, B, A)] // error + +val _ = summon[M[B]] // error +val _ = summon[String =:= Last[Int *: Int *: Boolean *: String *: EmptyTuple]] // error +val _ = summon[(A, B, A) =:= Reverse[(A, B, A)]] // error + +val _ = (??? : M[B]).length // error + diff --git a/tests/neg/i12116.scala b/tests/neg/i12116.scala new file mode 100644 index 000000000000..4f07f4c3db48 --- /dev/null +++ b/tests/neg/i12116.scala @@ -0,0 +1,15 @@ +import compiletime.erasedValue + +object test1: + + transparent inline def length[T]: Int = + erasedValue[T] match + case _: (h *: t) => 1 + length[t] + case _: EmptyTuple => 0 + + transparent inline def foo(): Int = 1 + foo() + + val y = length[(1, 2, 3)] // error + val x = foo() // error + + diff --git a/tests/neg/i12116a.scala b/tests/neg/i12116a.scala new file mode 100644 index 000000000000..4407e42e649b --- /dev/null +++ b/tests/neg/i12116a.scala @@ -0,0 +1,15 @@ +import compiletime.erasedValue + +object test1: + + inline def length[T]: Int = + erasedValue[T] match + case _: (h *: t) => 1 + length[t] + case _: EmptyTuple => 0 + + inline def foo(): Int = 1 + foo() + + val y = length[(1, 2, 3)] // error + val x = foo() // error + + diff --git a/tests/neg/i12150.check b/tests/neg/i12150.check new file mode 100644 index 000000000000..60f559cde474 --- /dev/null +++ b/tests/neg/i12150.check @@ -0,0 +1,12 @@ +-- [E018] Syntax Error: tests/neg/i12150.scala:1:13 -------------------------------------------------------------------- +1 |def f: Unit = // error + | ^ + | expression expected but end found + | + | longer explanation available when compiling with `-explain` +-- [E129] Potential Issue Warning: tests/neg/i12150.scala:1:11 --------------------------------------------------------- +1 |def f: Unit = // error + | ^ + | A pure expression does nothing in statement position; you may be omitting necessary parentheses + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i12150.scala b/tests/neg/i12150.scala new file mode 100644 index 000000000000..06fb5714f27d --- /dev/null +++ b/tests/neg/i12150.scala @@ -0,0 +1,3 @@ +def f: Unit = // error + +end f diff --git a/tests/neg/i12169.scala b/tests/neg/i12169.scala new file mode 100644 index 000000000000..78507c369b1c --- /dev/null +++ b/tests/neg/i12169.scala @@ -0,0 +1,37 @@ +object Var: + class Expanded[T <: Txn[T], B] extends Form[T] + +trait Txn[T <: Txn[T]] + +trait Form[T] + +class TT extends Txn[TT] + +private final class FlatVarCellView[T <: Txn[T], B]( + firstVr : Option[Var.Expanded[TT, B]] +) + +def Test = + val opt: Option[Form[TT]] = ??? + val firstVr = opt match + case Some(ex: Var.Expanded[TT, _]) => Some(ex) + case _ => None + new FlatVarCellView(firstVr) // error + // Found: (firstVr : Option[Var.Expanded[TT, ?]]) + // Required: Option[Var.Expanded[TT, B]] + // + // where: B is a type variable + // + // Note that we cannot do capture conversion since the `?` does not appear as an argument + // of the a type. It's dubious whether capture conversion for more deeply nested types + // would be sound. + + // Remedy: + opt match + case Some(ex: Var.Expanded[TT, _]) => new FlatVarCellView(Some(ex)) + // here, we instantiate `B` with the unnamed second parameter of `Var.Expanded` + case _ => new FlatVarCellView(None) + opt match + case Some(ex: Var.Expanded[TT, t]) => new FlatVarCellView[TT, t](Some(ex)) + // the same as above, spelt out + case _ => new FlatVarCellView(None) diff --git a/tests/neg/i12177.check b/tests/neg/i12177.check new file mode 100644 index 000000000000..2918dbfe0533 --- /dev/null +++ b/tests/neg/i12177.check @@ -0,0 +1,24 @@ +-- Error: tests/neg/i12177.scala:2:17 ---------------------------------------------------------------------------------- +2 | inline val v = null // error + | ^^^^ + | `inline val` with `null` is not supported. + | + | To inline a `null` consider using `inline def` +-- Error: tests/neg/i12177.scala:4:17 ---------------------------------------------------------------------------------- +4 | inline val u = () // error + | ^^ + | `inline val` of type `Unit` is not supported. + | + | To inline a `Unit` consider using `inline def` +-- Error: tests/neg/i12177.scala:6:18 ---------------------------------------------------------------------------------- +6 | inline val u2 = { println(); () } // error + | ^^^^^^^^^^^^^^^^^ + | `inline val` of type `Unit` is not supported. + | + | To inline a `Unit` consider using `inline def` +-- Error: tests/neg/i12177.scala:7:18 ---------------------------------------------------------------------------------- +7 | inline val u3 = { } // error + | ^^^ + | `inline val` of type `Unit` is not supported. + | + | To inline a `Unit` consider using `inline def` diff --git a/tests/neg/i12177.scala b/tests/neg/i12177.scala new file mode 100644 index 000000000000..cf8d0a7ffe56 --- /dev/null +++ b/tests/neg/i12177.scala @@ -0,0 +1,8 @@ +object Test1 { + inline val v = null // error + inline def d = null + inline val u = () // error + inline def e = () + inline val u2 = { println(); () } // error + inline val u3 = { } // error +} diff --git a/tests/neg/i12196.scala b/tests/neg/i12196.scala new file mode 100644 index 000000000000..91619ad3c387 --- /dev/null +++ b/tests/neg/i12196.scala @@ -0,0 +1,10 @@ +import scala.quoted.* + +def qqq(s: String)(using Quotes): Expr[Unit] = '{()} + +inline val InlineVal = "i" +inline def InlineDef = "i" + +inline def withInlineVal = ${ qqq(InlineVal) } +inline def withInlineDef = ${ qqq(InlineDef) } // error +inline def withString = ${ qqq("i") } diff --git a/tests/neg/i12196b.scala b/tests/neg/i12196b.scala new file mode 100644 index 000000000000..3bd606d9b62e --- /dev/null +++ b/tests/neg/i12196b.scala @@ -0,0 +1,10 @@ +import scala.quoted.* + +def qqq(s: String)(using Quotes): Expr[Unit] = '{()} + +abstract class Foo: + inline val InlineVal: String + +val foo: Foo = ??? + +inline def withInlineVal = ${ qqq(foo.InlineVal) } // error diff --git a/tests/neg/i12207.scala b/tests/neg/i12207.scala new file mode 100644 index 000000000000..08c1e76f5ecd --- /dev/null +++ b/tests/neg/i12207.scala @@ -0,0 +1,8 @@ +package example + +extension [T](t: T) inline def pi[P <: Tuple](using P): T = ??? + +inline def env[P <: Tuple, T](op: P ?=> T): P ?=> T = op + +@main def Test = + env { pi[String] } // error // error diff --git a/tests/neg/i12208.scala b/tests/neg/i12208.scala new file mode 100644 index 000000000000..5131464430ff --- /dev/null +++ b/tests/neg/i12208.scala @@ -0,0 +1 @@ +val pi: 3.14 = 3 // error diff --git a/tests/neg/i12211.scala b/tests/neg/i12211.scala new file mode 100644 index 000000000000..c7f29b24dc2b --- /dev/null +++ b/tests/neg/i12211.scala @@ -0,0 +1,15 @@ + +import reflect.Selectable.* + +val x: { def f(x: Any): String } = new { def f(x: Any) = x.toString } +val y: { def f(x: String): String } = x // error: type mismatch (different signatures) + +class Sink[A] { def put(x: A): Unit = {} } +class Sink1[A] extends Sink[A] { def put(x: "123") = ??? } + +@main def Test = + println(y.f("abc")) + val a = new Sink[String] + val b: { def put(x: String): Unit } = a // error: type mismatch (different signatures) + b.put("") // gave a NoSuchMethodException: Sink.put(java.lang.String) + val c: Sink[String] = Sink1[String]() diff --git a/tests/neg/i12220.scala b/tests/neg/i12220.scala new file mode 100644 index 000000000000..c1380b8781b5 --- /dev/null +++ b/tests/neg/i12220.scala @@ -0,0 +1,3 @@ +val a: List[Any] = List(List(1,2), List(3,4)) +val _ = for(b <- a ; c <- b.asInstanceOf[List]) { println(c) } // error + diff --git a/tests/neg/i12232.check b/tests/neg/i12232.check new file mode 100644 index 000000000000..2dd7adc02a16 --- /dev/null +++ b/tests/neg/i12232.check @@ -0,0 +1,12 @@ +-- Error: tests/neg/i12232.scala:17:15 --------------------------------------------------------------------------------- +17 | foo(min(3, 4)) // error: works in Scala 2, not in 3 + | ^ + | no implicit argument of type Op[Int, Int, V] was found for parameter op of method min in object Foo + | + | where: V is a type variable with constraint <: Double +-- Error: tests/neg/i12232.scala:19:16 --------------------------------------------------------------------------------- +19 | foo(minR(3, 4)) // error: works in Scala 2, not in 3 + | ^ + | no implicit argument of type Op[Int, Int, R] was found for parameter op of method minR in object Foo + | + | where: R is a type variable with constraint <: Double diff --git a/tests/neg/i12232.scala b/tests/neg/i12232.scala new file mode 100644 index 000000000000..3ca080a6fe56 --- /dev/null +++ b/tests/neg/i12232.scala @@ -0,0 +1,20 @@ +trait Op[T1, T2, +R] { + def apply(t1: T1, t2: T2): R +} + +object Op { + implicit val compInt: Op[Int, Int, Int] = new Op[Int, Int, Int] { + def apply(x: Int, y: Int) = scala.math.min(x, y) + } +} + +object Foo { + def foo(x: Double) = x + 1.0 + def min[T, U, V](x: T, y: U)(implicit op: Op[T, U, V]): V = op(x, y) + def minInt(x: Int, y: Int)(implicit op: Op[Int, Int, Int]): Int = op(x, y) + def minR[R](x: Int, y: Int)(implicit op: Op[Int, Int, R]): R = op(x, y) + min(3, 4) // works in both + foo(min(3, 4)) // error: works in Scala 2, not in 3 + foo(minInt(3, 4)) // works in both + foo(minR(3, 4)) // error: works in Scala 2, not in 3 +} diff --git a/tests/neg/i12245.scala b/tests/neg/i12245.scala new file mode 100644 index 000000000000..219f73f0702c --- /dev/null +++ b/tests/neg/i12245.scala @@ -0,0 +1,5 @@ +package dotty.tools.dotc.core + +def round(f: Float, digits: Int = 0): Float = ??? +//@scala.annotation.targetName("roundDouble") // does not change anything +def round(d: Double, digits: Int = 0): Double = ??? // error diff --git a/tests/neg/i12284.check b/tests/neg/i12284.check new file mode 100644 index 000000000000..7d61acd012a5 --- /dev/null +++ b/tests/neg/i12284.check @@ -0,0 +1,9 @@ +-- [E007] Type Mismatch Error: tests/neg/i12284.scala:6:26 ------------------------------------------------------------- +6 | val y: Vector[F[Any]] = xx // error + | ^^ + | Found: (xx : Vector[Any]) + | Required: Vector[F[Any]] + | + | where: F is a type in method magic with bounds <: [_] =>> Any + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i12284.scala b/tests/neg/i12284.scala new file mode 100644 index 000000000000..11635a1638cb --- /dev/null +++ b/tests/neg/i12284.scala @@ -0,0 +1,7 @@ +trait I[F[_], A] + +def magic[F[_], A](in: I[F, A]): F[A] = + val deps: Vector[I[F, _]] = ??? + val xx = deps.map(i => magic(i)) + val y: Vector[F[Any]] = xx // error + ??? diff --git a/tests/neg/i12299.scala b/tests/neg/i12299.scala new file mode 100644 index 000000000000..424be6cc4fd2 --- /dev/null +++ b/tests/neg/i12299.scala @@ -0,0 +1,24 @@ +object Outer { + + object Inner { + class Bar(x: Int) + object Bar + } + + export Inner.Bar._ + + val _ = apply(2) // error (constructor proxies are not exported) + +} +object Outer2 { + + object Inner { + class Bar(x: Int) + object Bar + } + + export Inner.Bar.apply // error: no eligible member + + val _ = apply(2) // error (constructor proxies are not exported) + +} diff --git a/tests/neg/i12344.scala b/tests/neg/i12344.scala new file mode 100644 index 000000000000..88daf535e699 --- /dev/null +++ b/tests/neg/i12344.scala @@ -0,0 +1,26 @@ +import scala.quoted.* + +class C(using q: Quotes)(i: Int = 1, f: q.reflect.Flags = q.reflect.Flags.EmptyFlags) + +def test1a(using q: Quotes) = new C() // error +def test2a(using q: Quotes) = new C(1) // error +def test3a(using q: Quotes) = new C(1, q.reflect.Flags.Lazy) // error +def test4a(using q: Quotes) = new C(f = q.reflect.Flags.Lazy) // error + +def test1b(using q: Quotes) = C() // error +def test2b(using q: Quotes) = C(1) // error +def test3b(using q: Quotes) = C(1, q.reflect.Flags.Lazy) // error +def test4b(using q: Quotes) = C(f = q.reflect.Flags.Lazy) // error + +def test1c(using q: Quotes) = new C(using q)() +def test2c(using q: Quotes) = new C(using q)(1) +def test3c(using q: Quotes) = new C(using q)(1, q.reflect.Flags.Lazy) +def test4c(using q: Quotes) = new C(using q)(f = q.reflect.Flags.Lazy) + +def test1d(using q: Quotes) = C(using q)() +def test2d(using q: Quotes) = C(using q)(1) +def test3d(using q: Quotes) = C(using q)(1, q.reflect.Flags.Lazy) +def test4d(using q: Quotes) = C(using q)(f = q.reflect.Flags.Lazy) + +def test1e(using q: Quotes) = new C()() +def test2e(using q: Quotes) = C()() diff --git a/tests/neg/i12348.check b/tests/neg/i12348.check new file mode 100644 index 000000000000..ccc2b9f7ed00 --- /dev/null +++ b/tests/neg/i12348.check @@ -0,0 +1,8 @@ +-- [E040] Syntax Error: tests/neg/i12348.scala:2:15 -------------------------------------------------------------------- +2 | given inline x: Int = 0 // error + | ^ + | 'with' expected, but identifier found +-- [E040] Syntax Error: tests/neg/i12348.scala:3:10 -------------------------------------------------------------------- +3 |} // error + | ^ + | '}' expected, but eof found diff --git a/tests/neg/i12348.scala b/tests/neg/i12348.scala new file mode 100644 index 000000000000..69fc77fb532e --- /dev/null +++ b/tests/neg/i12348.scala @@ -0,0 +1,3 @@ +object A { + given inline x: Int = 0 // error +} // error \ No newline at end of file diff --git a/tests/neg/i12361.scala b/tests/neg/i12361.scala new file mode 100644 index 000000000000..dfef0321d7c1 --- /dev/null +++ b/tests/neg/i12361.scala @@ -0,0 +1,3 @@ +object Test { + foo = macro Impls . foo [ U ] += // error // error +} diff --git a/tests/neg/i12384.scala b/tests/neg/i12384.scala new file mode 100644 index 000000000000..fef2ce018ee3 --- /dev/null +++ b/tests/neg/i12384.scala @@ -0,0 +1,8 @@ +object Nats { + trait Fold { + trait Nat + } + type Inc = Fold { + type Apply[N <: Nat] = Succ // error + } +} \ No newline at end of file diff --git a/tests/neg/i12430.scala b/tests/neg/i12430.scala new file mode 100644 index 000000000000..075c3bba4f90 --- /dev/null +++ b/tests/neg/i12430.scala @@ -0,0 +1,4 @@ +class i0 { + new collection // error + new io // error +} \ No newline at end of file diff --git a/tests/neg/i12486.scala b/tests/neg/i12486.scala new file mode 100644 index 000000000000..8ae605c43bc8 --- /dev/null +++ b/tests/neg/i12486.scala @@ -0,0 +1,10 @@ +val hello: String = "hello" + +object MyObj { + val a: Int = 123 + val b: Double = 456.789 + val c: String = "ABC" +} + +val stringFromSingleton: String = new hello.type() // error: not a class type +val myObjFromSingleton: MyObj.type = new MyObj.type() // error: not a class type diff --git a/tests/neg/i12555.scala b/tests/neg/i12555.scala new file mode 100644 index 000000000000..3a5996159e10 --- /dev/null +++ b/tests/neg/i12555.scala @@ -0,0 +1,6 @@ +trait Noop { + inline def noop: String +} + +inline def boom: String = (??? : Noop).noop +def test: Unit = boom // error diff --git a/tests/neg/i12555b.scala b/tests/neg/i12555b.scala new file mode 100644 index 000000000000..5931bda4ba27 --- /dev/null +++ b/tests/neg/i12555b.scala @@ -0,0 +1,18 @@ +trait Noop[T]: + //note: making this "not inline" fixes the result + inline def noop(fa: T): T + +object Noop { + inline def noop[T](alg: T)(using n: Noop[T]): T = n.noop(alg) +} + +import Noop.* + +final case class User(name: String, age: Int) + +inline given Noop[User] = a => a + +val u = User("hello", 45) + +@main +def run = println(Noop.noop(u)) // error diff --git a/tests/neg/i12555c.scala b/tests/neg/i12555c.scala new file mode 100644 index 000000000000..9d2a10aff4c9 --- /dev/null +++ b/tests/neg/i12555c.scala @@ -0,0 +1,4 @@ +trait Noop { + inline def noop: String +} +def test2: Unit = (??? : Noop).noop // error diff --git a/tests/neg/i12557.scala b/tests/neg/i12557.scala new file mode 100644 index 000000000000..7e2d301e2662 --- /dev/null +++ b/tests/neg/i12557.scala @@ -0,0 +1,14 @@ +package example + +abstract class X[P <: Product](using val m: scala.deriving.Mirror.ProductOf[P]) { + def unapply(p: P): m.MirroredElemTypes = ??? +} + +case class A(a: Int) +object A extends X[A] // error + +object Main { + def main(args: Array[String]): Unit = { + A.unapply(A(2)) + } +} diff --git a/tests/neg/i12591.check b/tests/neg/i12591.check new file mode 100644 index 000000000000..694eb68e3299 --- /dev/null +++ b/tests/neg/i12591.check @@ -0,0 +1,4 @@ +-- Error: tests/neg/i12591/Inner.scala:12:31 --------------------------------------------------------------------------- +12 |val badSummon = summon[TC[Bar]] // error here + | ^ + |ambiguous implicit arguments: both outer.inner.Foo.ofFoo and outer.Foo.ofFoo match type outer.inner.Foo.TC[outer.Bar] of parameter x of method summon in object Predef diff --git a/tests/neg/i12591/Inner.scala b/tests/neg/i12591/Inner.scala new file mode 100644 index 000000000000..aae9bd5b9234 --- /dev/null +++ b/tests/neg/i12591/Inner.scala @@ -0,0 +1,13 @@ +package outer +package inner + +sealed trait Foo +object Foo: + trait TC[T] + given ofFoo[T <: Foo]: TC[T] = ??? + trait Bar extends Foo + +import Foo.TC +//Adding import Foo.Bar resolves the issue +val badSummon = summon[TC[Bar]] // error here + diff --git a/tests/neg/i12591/Outer.scala b/tests/neg/i12591/Outer.scala new file mode 100644 index 000000000000..5c810ec785fe --- /dev/null +++ b/tests/neg/i12591/Outer.scala @@ -0,0 +1,3 @@ +package outer +export inner.Foo +export Foo.Bar diff --git a/tests/neg/i12605.scala b/tests/neg/i12605.scala new file mode 100644 index 000000000000..bcd7aedc606b --- /dev/null +++ b/tests/neg/i12605.scala @@ -0,0 +1,4 @@ +object Foo: + def joe(): List[(Int, Int)] = + List((2, 3), (3, 4)).filter case (a, b) => b > a // error // error +// error \ No newline at end of file diff --git a/tests/neg/i12606.scala b/tests/neg/i12606.scala new file mode 100644 index 000000000000..53f88387af36 --- /dev/null +++ b/tests/neg/i12606.scala @@ -0,0 +1,8 @@ +import scala.quoted._ + +trait ReflectModule { + implicit def q: Quotes + import quotes.reflect._ // error + + def foo(x: TypeRepr): Unit = ??? +} \ No newline at end of file diff --git a/tests/neg/i12664.scala b/tests/neg/i12664.scala new file mode 100644 index 000000000000..5e4b784475aa --- /dev/null +++ b/tests/neg/i12664.scala @@ -0,0 +1,14 @@ +trait Step { + type Self + type Next[A] +} + +trait DynamicNextStep { + type OneOf[Self, Next[_]] + def apply(s: Step): OneOf[s.Self, s.Next] +} + +object X extends DynamicNextStep { + override type OneOf[Self] = Self // error + override def apply(s: Step) = ??? +} diff --git a/tests/neg/i12729.scala b/tests/neg/i12729.scala new file mode 100644 index 000000000000..50f8f06d9bce --- /dev/null +++ b/tests/neg/i12729.scala @@ -0,0 +1,7 @@ +class Test(i: Int): + val `` = "init" // error: Illegal backquoted identifier: `` and `` are forbidden + val `` = "clinit" // error: Illegal backquoted identifier: `` and `` are forbidden + class ``: // error: Illegal backquoted identifier: `` and `` are forbidden + def ``(in: String) = ??? // error: Illegal backquoted identifier: `` and `` are forbidden + class ``: // error: Illegal backquoted identifier: `` and `` are forbidden + def ``(in: String) = ??? // error: Illegal backquoted identifier: `` and `` are forbidden diff --git a/tests/neg/i12736a.scala b/tests/neg/i12736a.scala new file mode 100644 index 000000000000..509465ea3f07 --- /dev/null +++ b/tests/neg/i12736a.scala @@ -0,0 +1,6 @@ +object Test { + def apply[S](r: Any): Any = r + + def test = + (x: Int) => Test(doesntexist, x) // error +} diff --git a/tests/neg/i12736b.scala b/tests/neg/i12736b.scala new file mode 100644 index 000000000000..c7db2f7a92ea --- /dev/null +++ b/tests/neg/i12736b.scala @@ -0,0 +1,6 @@ +object Test { + def apply[S](r: Any)(using DoesntExist): Any = r // error + + def test(o: Option[Any]) = + o.map(x => Test(doesntExist, x)) // error +} diff --git a/tests/neg/i12754.scala b/tests/neg/i12754.scala new file mode 100644 index 000000000000..ee29f8f97885 --- /dev/null +++ b/tests/neg/i12754.scala @@ -0,0 +1,15 @@ +transparent inline def transform(inline a: Any): Any = inline a match { + case x: Byte => x + case x: Short => x + case x: Int => x + case x: Long => x + case x: Float => x + case x: Double => x + case _ => a +} + +inline def lt(inline a: Any, inline b: Double): Boolean = transform(a) < b // error + +def test = { + println(lt(0, 5)) +} diff --git a/tests/neg/i12802.scala b/tests/neg/i12802.scala new file mode 100644 index 000000000000..a73a4193f120 --- /dev/null +++ b/tests/neg/i12802.scala @@ -0,0 +1,12 @@ +trait M: + type X + object X: + def foo(): X = ??? + +transparent inline def m(using m: M): m.type = m + +def Test1 = + given M = new M{} + import m.* // error: no implicit argument of type M was found + val x: X = X.foo() + println(x) diff --git a/tests/neg/i12828.check b/tests/neg/i12828.check new file mode 100644 index 000000000000..070633fc35b3 --- /dev/null +++ b/tests/neg/i12828.check @@ -0,0 +1,8 @@ +-- Error: tests/neg/i12828.scala:7:7 ----------------------------------------------------------------------------------- +7 |object Baz extends Bar[Int] // error: not implemented + | ^ + | object creation impossible, since def foo(x: A): Unit in trait Foo is not defined + | (Note that + | parameter A in def foo(x: A): Unit in trait Foo does not match + | parameter Int & String in def foo(x: A & String): Unit in trait Bar + | ) diff --git a/tests/neg/i12828.scala b/tests/neg/i12828.scala new file mode 100644 index 000000000000..d8d099b71d08 --- /dev/null +++ b/tests/neg/i12828.scala @@ -0,0 +1,9 @@ +trait Foo[A]: + def foo(x: A): Unit + +trait Bar[A] extends Foo[A]: + def foo(x: A & String): Unit = println(x.toUpperCase) + +object Baz extends Bar[Int] // error: not implemented + +@main def run() = Baz.foo(42) diff --git a/tests/neg/i12828c.scala b/tests/neg/i12828c.scala new file mode 100644 index 000000000000..d36e7a719984 --- /dev/null +++ b/tests/neg/i12828c.scala @@ -0,0 +1,12 @@ +abstract class Foo[A] { + def foo(x: A): Unit +} +abstract class Bar[A] extends Foo[A] { + def foo(x: A with String): Unit = println(x.toUpperCase) +} +object Baz extends Bar[Int] // error: not implemented (same as Scala 2) + // Scala 2 gives: object creation impossible. Missing implementation for `foo` + +object Test { + def main(args: Array[String]) = Baz.foo(42) +} diff --git a/tests/neg/i12828d.scala b/tests/neg/i12828d.scala new file mode 100644 index 000000000000..45a95501835d --- /dev/null +++ b/tests/neg/i12828d.scala @@ -0,0 +1,18 @@ +trait A[X] { + def foo(x: X): Unit = + println("A.foo") +} +trait B[X] extends A[X] { + def foo(x: Int): Unit = + println("B.foo") +} +object C extends B[Int] // error: conflicting members + // Scala 2: same + +object Test { + def main(args: Array[String]) = { + C.foo(1) + val a: A[Int] = C + a.foo(1) + } +} \ No newline at end of file diff --git a/tests/neg/i12941.scala b/tests/neg/i12941.scala new file mode 100644 index 000000000000..0948cf7ce545 --- /dev/null +++ b/tests/neg/i12941.scala @@ -0,0 +1,28 @@ +object A: + def myFun(op: String ?=> Unit) = () + +@main def func: Unit = + A.myFun { + val res: String = summon[String] + println(ress) // error + } + +class I: + def runSth: Int = 1 + +abstract class A: + def myFun(op: I ?=> Unit) = + op(using I()) + 1 + +class B extends A + +def assertEquals(x: String, y: Int, z: Int): Unit = () + +@main def hello: Unit = + + B().myFun { + val res = summon[I].runSth + assertEquals("", 1, res, "asd") // error + println("Hello!") + } \ No newline at end of file diff --git a/tests/neg/i12986a/Bar.scala b/tests/neg/i12986a/Bar.scala new file mode 100644 index 000000000000..fcc12aca6259 --- /dev/null +++ b/tests/neg/i12986a/Bar.scala @@ -0,0 +1,2 @@ +class Bar(i: Int): + inline def this() = this(0) // error diff --git a/tests/neg/i12986a/Test.scala b/tests/neg/i12986a/Test.scala new file mode 100644 index 000000000000..84399837c008 --- /dev/null +++ b/tests/neg/i12986a/Test.scala @@ -0,0 +1 @@ +val bar = new Bar() diff --git a/tests/neg/i12986b.scala b/tests/neg/i12986b.scala new file mode 100644 index 000000000000..d8de9dbfb51c --- /dev/null +++ b/tests/neg/i12986b.scala @@ -0,0 +1,4 @@ +class Bar(i: Int): + transparent inline def this() = this(0) // error + +val bar = Bar() diff --git a/tests/neg/i13044.check b/tests/neg/i13044.check new file mode 100644 index 000000000000..c5584aadf767 --- /dev/null +++ b/tests/neg/i13044.check @@ -0,0 +1,146 @@ +-- Error: tests/neg/i13044.scala:50:40 --------------------------------------------------------------------------------- +50 | implicit def typeSchema: Schema[A] = Schema.gen // error // error + | ^^^^^^^^^^ + | given instance gen is declared as `inline`, but was not inlined + | + | Try increasing `-Xmax-inlines` above 32 + |-------------------------------------------------------------------------------------------------------------------- + |Inline stack trace + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i13044.scala:17 +17 | val builder = summonInline[Schema[t]].asInstanceOf[Schema[Any]] + | ^ + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i13044.scala:17 +31 | lazy val fields = recurse[m.MirroredElemTypes] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i13044.scala:17 +37 | inline given gen[A]: Schema[A] = derived + | ^^^^^^^ + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i13044.scala:17 +17 | val builder = summonInline[Schema[t]].asInstanceOf[Schema[Any]] + | ^ + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i13044.scala:17 +31 | lazy val fields = recurse[m.MirroredElemTypes] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i13044.scala:17 +37 | inline given gen[A]: Schema[A] = derived + | ^^^^^^^ + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i13044.scala:17 +17 | val builder = summonInline[Schema[t]].asInstanceOf[Schema[Any]] + | ^ + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i13044.scala:17 +31 | lazy val fields = recurse[m.MirroredElemTypes] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i13044.scala:17 +37 | inline given gen[A]: Schema[A] = derived + | ^^^^^^^ + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i13044.scala:17 +17 | val builder = summonInline[Schema[t]].asInstanceOf[Schema[Any]] + | ^ + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i13044.scala:17 +31 | lazy val fields = recurse[m.MirroredElemTypes] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i13044.scala:17 +37 | inline given gen[A]: Schema[A] = derived + | ^^^^^^^ + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i13044.scala:17 +17 | val builder = summonInline[Schema[t]].asInstanceOf[Schema[Any]] + | ^ + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i13044.scala:17 +18 | builder :: recurse[ts] + | ^^^^^^^^^^^ + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i13044.scala:17 +31 | lazy val fields = recurse[m.MirroredElemTypes] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i13044.scala:17 +37 | inline given gen[A]: Schema[A] = derived + | ^^^^^^^ + -------------------------------------------------------------------------------------------------------------------- +-- Error: tests/neg/i13044.scala:50:40 --------------------------------------------------------------------------------- +50 | implicit def typeSchema: Schema[A] = Schema.gen // error // error + | ^^^^^^^^^^ + | method recurse is declared as `inline`, but was not inlined + | + | Try increasing `-Xmax-inlines` above 32 + |-------------------------------------------------------------------------------------------------------------------- + |Inline stack trace + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i13044.scala:18 +18 | builder :: recurse[ts] + | ^^^^^^^ + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i13044.scala:18 +31 | lazy val fields = recurse[m.MirroredElemTypes] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i13044.scala:18 +37 | inline given gen[A]: Schema[A] = derived + | ^^^^^^^ + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i13044.scala:18 +17 | val builder = summonInline[Schema[t]].asInstanceOf[Schema[Any]] + | ^ + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i13044.scala:18 +31 | lazy val fields = recurse[m.MirroredElemTypes] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i13044.scala:18 +37 | inline given gen[A]: Schema[A] = derived + | ^^^^^^^ + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i13044.scala:18 +17 | val builder = summonInline[Schema[t]].asInstanceOf[Schema[Any]] + | ^ + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i13044.scala:18 +31 | lazy val fields = recurse[m.MirroredElemTypes] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i13044.scala:18 +37 | inline given gen[A]: Schema[A] = derived + | ^^^^^^^ + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i13044.scala:18 +17 | val builder = summonInline[Schema[t]].asInstanceOf[Schema[Any]] + | ^ + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i13044.scala:18 +31 | lazy val fields = recurse[m.MirroredElemTypes] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i13044.scala:18 +37 | inline given gen[A]: Schema[A] = derived + | ^^^^^^^ + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i13044.scala:18 +17 | val builder = summonInline[Schema[t]].asInstanceOf[Schema[Any]] + | ^ + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i13044.scala:18 +18 | builder :: recurse[ts] + | ^^^^^^^^^^^ + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i13044.scala:18 +31 | lazy val fields = recurse[m.MirroredElemTypes] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i13044.scala:18 +37 | inline given gen[A]: Schema[A] = derived + | ^^^^^^^ + -------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/i13044.scala b/tests/neg/i13044.scala new file mode 100644 index 000000000000..081b642c604c --- /dev/null +++ b/tests/neg/i13044.scala @@ -0,0 +1,51 @@ +import scala.deriving.Mirror +import scala.compiletime._ + +trait Schema[T] { + def build: T +} + +object Schema extends SchemaDerivation { + implicit lazy val int: Schema[Int] = ??? + implicit def option[A](implicit ev: Schema[A]): Schema[Option[A]] = ??? +} + +trait SchemaDerivation { + inline def recurse[A <: Tuple]: List[Schema[Any]] = + inline erasedValue[A] match { + case _: (t *: ts) => + val builder = summonInline[Schema[t]].asInstanceOf[Schema[Any]] + builder :: recurse[ts] + case _: EmptyTuple => Nil + } + + inline def derived[A]: Schema[A] = + inline summonInline[Mirror.Of[A]] match { + case m: Mirror.SumOf[A] => + lazy val subTypes = recurse[m.MirroredElemTypes] + new Schema[A] { + def build: A = ??? + } + + case m: Mirror.ProductOf[A] => + lazy val fields = recurse[m.MirroredElemTypes] + new Schema[A] { + def build: A = ??? + } + } + + inline given gen[A]: Schema[A] = derived +} + +case class H(i: Int) +case class G(h: H) +case class F(g: G) +case class E(f: Option[F]) +case class D(e: E) +case class C(d: D) +case class B(c: C) +case class A(a: A, b: B) + +object TestApp { + implicit def typeSchema: Schema[A] = Schema.gen // error // error +} diff --git a/tests/neg/i13075.scala b/tests/neg/i13075.scala new file mode 100644 index 000000000000..2a5ff70c0481 --- /dev/null +++ b/tests/neg/i13075.scala @@ -0,0 +1,40 @@ +object Implementing_Tuples: + + sealed trait Tup + case class ConsTup[T, H <: Tup](head: T, tail: H) extends Tup + case object EmptyTup extends Tup + + val *: = ConsTup // for unapply + type *:[H, T <: Tup] = ConsTup[H, T] // for type matching + type EmptyTup = EmptyTup.type // for type matching + + extension [H](head: H) + def *:[T <: Tup](tail: T) = ConsTup(head, tail) + + type Fold[T <: Tup, Seed, F[_,_]] = T match + case EmptyTup => Seed + case h *: t => Fold[t, F[Seed, h], F] + + extension [T <: Tup](v: T) + def fold[Seed, F[_,_]](seed: Seed)( + fn: [C, Acc] => (C, Acc) => F[C, Acc] + ): Fold[T, Seed, F] = + (v match + case EmptyTup => seed + case h *: t => t.fold(fn(h, seed))(fn) + ).asInstanceOf[Fold[T, Seed, F]] + + extension [T <: Tup](v: T) def reversed: Tup = + v.fold[EmptyTup, [C, Acc] =>> Acc match { + case h *: t => C *: h *: t + }](EmptyTup)( + [C, Acc] => (c: C, acc: Acc) => acc match + case _@(_ *: _) => c *: acc // error + ) + + @main def testProperFold = + val t = (1 *: '2' *: "foo" *: EmptyTup) + val reversed: (String *: Char *: Int *: EmptyTup) = t.reversed // error + println(reversed) + +end Implementing_Tuples \ No newline at end of file diff --git a/tests/neg/i13076.check b/tests/neg/i13076.check new file mode 100644 index 000000000000..641bf5aa2677 --- /dev/null +++ b/tests/neg/i13076.check @@ -0,0 +1,10 @@ +-- Error: tests/neg/i13076.scala:6:6 ----------------------------------------------------------------------------------- +6 |class C[X, Y] extends T[X, Y] {} // error + | ^ + | class C needs to be abstract, since: + | it has 2 unimplemented members. + | /** As seen from class C, the missing signatures are as follows. + | * For convenience, these are usable as stub implementations. + | */ + | def f(a: X): Y = ??? + | def g(a: X, b: Y): Unit = ??? diff --git a/tests/neg/i13076.scala b/tests/neg/i13076.scala new file mode 100644 index 000000000000..fa37123995c9 --- /dev/null +++ b/tests/neg/i13076.scala @@ -0,0 +1,6 @@ +trait T[A, B] { + def f(a: A): B + def g(a: A, b: B): Unit +} + +class C[X, Y] extends T[X, Y] {} // error diff --git a/tests/neg/i13089.check b/tests/neg/i13089.check new file mode 100644 index 000000000000..ed0f9d5effd9 --- /dev/null +++ b/tests/neg/i13089.check @@ -0,0 +1,12 @@ +-- [E161] Naming Error: tests/neg/i13089.scala:6:8 --------------------------------------------------------------------- +6 | def fails : Unit = {} // error + | ^^^^^^^^^^^^^^^^^^^^^ + | fails is already defined as method fails in tests/neg/i13089.scala + | + | Note that overloaded methods must all be defined in the same group of toplevel definitions +-- [E161] Naming Error: tests/neg/i13089.scala:8:6 --------------------------------------------------------------------- +8 | def baz(x: String): Boolean = true // error + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | baz is already defined as method baz in tests/neg/i13089.scala + | + | Note that overloaded methods must all be defined in the same group of toplevel definitions diff --git a/tests/neg/i13089.scala b/tests/neg/i13089.scala new file mode 100644 index 000000000000..5b91996de837 --- /dev/null +++ b/tests/neg/i13089.scala @@ -0,0 +1,21 @@ +package pkg: + + trait Bar + + extension (bar : Bar) + def fails : Unit = {} // error + + def baz(x: String): Boolean = true // error + +package pkg: + + trait Foo + extension (foo : Foo) + def fails : Unit = {} + def works : Unit = {} + + extension (bar : Bar) + def works : Unit = {} + + def baz(x: Int): Boolean = true + diff --git a/tests/neg/i13101.scala b/tests/neg/i13101.scala new file mode 100644 index 000000000000..f32ba35ef2c8 --- /dev/null +++ b/tests/neg/i13101.scala @@ -0,0 +1,24 @@ +trait Vehicle +trait Car extends Vehicle + +trait Encoder[A] +object Encoder { + implicit val encodeVehicle: Encoder[Vehicle] = ??? + implicit val encodeCar: Encoder[Car] = ??? +} + +trait Route +trait Printer +trait Marshaller[-A] // must be contravariant + +object Test { + implicit def marshaller[A: Encoder](implicit p: Printer = ???): Marshaller[A] = ??? + // the `Printer` implicit arg seems to be necessary, either with default value, or no implicit in scope + + def foo[A](v: A)(implicit m: Marshaller[A]): Route = ??? + + val route: Route = identity { + val f: (Car => Route) => Route = ??? // ok if s/Car/Vehicle/ + f(vehicle => foo(vehicle)) // error: ambiguous implicit + } +} diff --git a/tests/neg/i13114/A.scala b/tests/neg/i13114/A.scala new file mode 100644 index 000000000000..30d3cecc795f --- /dev/null +++ b/tests/neg/i13114/A.scala @@ -0,0 +1,3 @@ +def f = 42 + +class C diff --git a/tests/neg/i13114/B.scala b/tests/neg/i13114/B.scala new file mode 100644 index 000000000000..be778f749ace --- /dev/null +++ b/tests/neg/i13114/B.scala @@ -0,0 +1,7 @@ +class D2 extends C + +package p { + class D extends C // error: not found + + @main def test = println(new D) +} diff --git a/tests/neg/i13218.scala b/tests/neg/i13218.scala new file mode 100644 index 000000000000..e033a3637461 --- /dev/null +++ b/tests/neg/i13218.scala @@ -0,0 +1,14 @@ +class TagTest extends AnyFreeSpec: + "a" - { + "b" in { + class TF[F[_]] + meow // error + } + } + +trait AnyFreeSpec: + protected class Wrapper(s: String): + def -(f: => Unit): Unit = ??? + def in(f: => Unit): Unit = ??? + + implicit def wrap(s: String): Wrapper = ??? diff --git a/tests/neg/i13320.check b/tests/neg/i13320.check new file mode 100644 index 000000000000..1e336d8fa7bf --- /dev/null +++ b/tests/neg/i13320.check @@ -0,0 +1,12 @@ +-- [E008] Not Found Error: tests/neg/i13320.scala:8:24 ----------------------------------------------------------------- +8 | type t = Option[Foo.Boo] // error + | ^^^^^^^ + | type Boo is not a member of object Foo - did you mean Foo.Boo.type? +-- [E008] Not Found Error: tests/neg/i13320.scala:4:11 ----------------------------------------------------------------- +4 |var x: Foo.Booo = Foo.Booo // error // error + | ^^^^^^^^ + | type Booo is not a member of object Foo - did you mean Foo.Boo.type? +-- [E008] Not Found Error: tests/neg/i13320.scala:4:22 ----------------------------------------------------------------- +4 |var x: Foo.Booo = Foo.Booo // error // error + | ^^^^^^^^ + | value Booo is not a member of object Foo - did you mean Foo.Boo? \ No newline at end of file diff --git a/tests/neg/i13320.scala b/tests/neg/i13320.scala new file mode 100644 index 000000000000..05edd7e17008 --- /dev/null +++ b/tests/neg/i13320.scala @@ -0,0 +1,8 @@ +object Foo: + case object Boo + +var x: Foo.Booo = Foo.Booo // error // error + +object Main: + def main(args: Array[String]) = + type t = Option[Foo.Boo] // error diff --git a/tests/neg/i13340.scala b/tests/neg/i13340.scala new file mode 100644 index 000000000000..1db3f32af52d --- /dev/null +++ b/tests/neg/i13340.scala @@ -0,0 +1,3 @@ +case class Field(name: String, subQuery: Option[Query] = None) +case class Query(fields: Seq[Field]) +val x = Query(Seq(Field("a", subQuery=Some(Query(Seq(Field("b")), Nil)))), Nil) // error diff --git a/tests/neg/i13377/LeakFoo_1.scala b/tests/neg/i13377/LeakFoo_1.scala new file mode 100644 index 000000000000..e8d679f627a3 --- /dev/null +++ b/tests/neg/i13377/LeakFoo_1.scala @@ -0,0 +1,3 @@ +import scala.util.NotGiven +type LeakFoo[T] = core.LeakingFoo[T] +val ok = summon[NotGiven[LeakFoo[1] =:= LeakFoo[2]]] diff --git a/tests/neg/i13377/Test_2.scala b/tests/neg/i13377/Test_2.scala new file mode 100644 index 000000000000..02971c189679 --- /dev/null +++ b/tests/neg/i13377/Test_2.scala @@ -0,0 +1,3 @@ +import scala.util.NotGiven +val x: LeakFoo[1] = ??? : LeakFoo[2] // error +val notok = summon[NotGiven[LeakFoo[1] =:= LeakFoo[2]]] // ok diff --git a/tests/neg/i13377/coreFoo_1.scala b/tests/neg/i13377/coreFoo_1.scala new file mode 100644 index 000000000000..cb80a6efb776 --- /dev/null +++ b/tests/neg/i13377/coreFoo_1.scala @@ -0,0 +1,4 @@ +package core + +opaque type Foo[T] <: Int = Int +type LeakingFoo[T] = Foo[T] diff --git a/tests/neg/i13407.scala b/tests/neg/i13407.scala new file mode 100644 index 000000000000..a0f9efdbf173 --- /dev/null +++ b/tests/neg/i13407.scala @@ -0,0 +1,19 @@ +import scala.quoted.Type + +trait Tensor[S <: Tuple] { + def sum[Axis <: Shape: Type](axis: Axis): Tensor[S] = { // error + Tensor.mk + } +} + +object Tensor { + def mk[S <: Tuple]: Tensor[S] = new Tensor {} +} + +object Foo { + val t1: Tensor[("batch", "len", "embed")] = Tensor.mk + def foo(x: Any) = { + + } + foo(foo(t1.sum("len"))) // error +} diff --git a/tests/neg/i13435.check b/tests/neg/i13435.check new file mode 100644 index 000000000000..ee2027a3dfe2 --- /dev/null +++ b/tests/neg/i13435.check @@ -0,0 +1,12 @@ +-- [E029] Pattern Match Exhaustivity Warning: tests/neg/i13435.scala:7:2 ----------------------------------------------- +7 | s match + | ^ + | match may not be exhaustive. + | + | It would fail on pattern case: (_), ((_, _), (_, _)) + | + | longer explanation available when compiling with `-explain` +-- Error: tests/neg/i13435.scala:8:10 ---------------------------------------------------------------------------------- +8 | case (dim: Axis, size: Int) => dim // error + | ^^^^^^^^^ + | trait Singleton cannot be used in runtime type tests diff --git a/tests/neg/i13435.scala b/tests/neg/i13435.scala new file mode 100644 index 000000000000..eaff4976eb71 --- /dev/null +++ b/tests/neg/i13435.scala @@ -0,0 +1,8 @@ +type Axis = String&Singleton +type ShapeTuple = Tuple1[(Axis, Int)]|Tuple2[(Axis, Int), (Axis, Int)] +type Shape = (Axis, Int) |ShapeTuple + + +def mkSchema(s: Shape) = + s match + case (dim: Axis, size: Int) => dim // error \ No newline at end of file diff --git a/tests/neg/i13466.check b/tests/neg/i13466.check new file mode 100644 index 000000000000..a15ae059427f --- /dev/null +++ b/tests/neg/i13466.check @@ -0,0 +1,11 @@ +-- Error: tests/neg/i13466.scala:9:6 ----------------------------------------------------------------------------------- +9 |given none: SomeTrait[Finally] with {} // error + | ^ + | object creation impossible, since: + | it has 3 unimplemented members. + | /** As seen from module class none$, the missing signatures are as follows. + | * For convenience, these are usable as stub implementations. + | */ + | def foo: Finally[Unit] = ??? + | def withTV[A]: Finally[A] = ??? + | def withTV2[A, B]: Finally[(A, B)] = ??? diff --git a/tests/neg/i13466.scala b/tests/neg/i13466.scala new file mode 100644 index 000000000000..3d18fc3dc637 --- /dev/null +++ b/tests/neg/i13466.scala @@ -0,0 +1,9 @@ +opaque type Finally[A] = A + +trait SomeTrait[F[_]] { + def foo: F[Unit] + def withTV[A]: F[A] + def withTV2[A, B]: F[(A, B)] +} + +given none: SomeTrait[Finally] with {} // error diff --git a/tests/neg/i13487.check b/tests/neg/i13487.check new file mode 100644 index 000000000000..486124661ea4 --- /dev/null +++ b/tests/neg/i13487.check @@ -0,0 +1,4 @@ +-- Error: tests/neg/i13487.scala:6:32 ---------------------------------------------------------------------------------- +6 |case class Foo[A](a: A) derives TC // error + | ^^ + | Foo cannot be unified with the type argument of TC diff --git a/tests/neg/i13487.scala b/tests/neg/i13487.scala new file mode 100644 index 000000000000..e06258087846 --- /dev/null +++ b/tests/neg/i13487.scala @@ -0,0 +1,6 @@ +trait TC[F[_, _[_]]] +object TC { + def derived[F[_, _[_]]]: TC[F] = ??? +} + +case class Foo[A](a: A) derives TC // error diff --git a/tests/neg/i13497.scala b/tests/neg/i13497.scala new file mode 100644 index 000000000000..83f1cd7519b9 --- /dev/null +++ b/tests/neg/i13497.scala @@ -0,0 +1,13 @@ +trait Foo +trait Bar +object Foo: + given (using Bar): Foo = ??? + +object Bug: + def demonstrate: Unit = + Option.empty[Unit].flatMap { _ => + Option.empty[Unit].map { _ => + val foo = summon[Foo] // error: Foo.given_Foo(/* missing */summon[Bar]) + Option.empty[Unit] + } + } diff --git a/tests/neg/i13513.scala b/tests/neg/i13513.scala new file mode 100644 index 000000000000..637f432ca504 --- /dev/null +++ b/tests/neg/i13513.scala @@ -0,0 +1,5 @@ +final case class TwoTypes[F, A](value: A) +class Minimal { + def x[C[_]]: C[Int] = ??? + x[TwoTypes].value // error: Type argument TwoTypes does not conform to upper bound [_] =>> Any +} diff --git a/tests/neg/i13558.check b/tests/neg/i13558.check new file mode 100644 index 000000000000..4c468a854781 --- /dev/null +++ b/tests/neg/i13558.check @@ -0,0 +1,22 @@ +-- [E008] Not Found Error: tests/neg/i13558.scala:23:14 ---------------------------------------------------------------- +23 | println(a.id) // error + | ^^^^ + | value id is not a member of testcode.A. + | An extension method was tried, but could not be fully constructed: + | + | testcode.ExtensionA.id(a) failed with + | + | Reference to id is ambiguous, + | it is both imported by import testcode.ExtensionB._ + | and imported subsequently by import testcode.ExtensionA._ +-- [E008] Not Found Error: tests/neg/i13558.scala:29:14 ---------------------------------------------------------------- +29 | println(a.id) // error + | ^^^^ + | value id is not a member of testcode.A. + | An extension method was tried, but could not be fully constructed: + | + | testcode.ExtensionB.id(a) failed with + | + | Reference to id is ambiguous, + | it is both imported by import testcode.ExtensionA._ + | and imported subsequently by import testcode.ExtensionB._ diff --git a/tests/neg/i13558.scala b/tests/neg/i13558.scala new file mode 100644 index 000000000000..1d4e1c506e43 --- /dev/null +++ b/tests/neg/i13558.scala @@ -0,0 +1,31 @@ +package testcode + +class A + +class B + +object ExtensionA { + extension (self: A) { + def id = "A" + } +} +object ExtensionB { + extension (self: B) { + def id = "B" + } +} + +object Main { + def main1(args: Array[String]): Unit = { + import ExtensionB._ + import ExtensionA._ + val a = A() + println(a.id) // error + } + def main2(args: Array[String]): Unit = { + import ExtensionA._ + import ExtensionB._ + val a = A() + println(a.id) // error + } +} \ No newline at end of file diff --git a/tests/neg/i13570.check b/tests/neg/i13570.check new file mode 100644 index 000000000000..9c7edc6fdfe0 --- /dev/null +++ b/tests/neg/i13570.check @@ -0,0 +1,18 @@ +-- Error: tests/neg/i13570.scala:8:5 ----------------------------------------------------------------------------------- +8 | jim(Seq(1,2)) // error + | ^^^^^^^^^^^^^ + | cannot reduce inline match with + | scrutinee: seq$proxy1 : (seq$proxy1 : Seq[Int]) + | patterns : case s @ _:Seq[Int] if s.isEmpty + | case s @ _:Seq[Int] + | case _ + |--------------------------------------------------------------------------------------------------------------------- + |Inline stack trace + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i13570.scala:3 +3 | inline seq match + | ^ +4 | case s: Seq[Int] if s.isEmpty => println("seq is empty") +5 | case s: Seq[Int] => println("seq is not empty") +6 | case _ => println("somthing hinky happened") + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/i13570.scala b/tests/neg/i13570.scala new file mode 100644 index 000000000000..1376dedbaf19 --- /dev/null +++ b/tests/neg/i13570.scala @@ -0,0 +1,8 @@ +object Test: + inline def jim(seq: Seq[Int]) = + inline seq match + case s: Seq[Int] if s.isEmpty => println("seq is empty") + case s: Seq[Int] => println("seq is not empty") + case _ => println("somthing hinky happened") + + jim(Seq(1,2)) // error diff --git a/tests/neg/i13701.scala b/tests/neg/i13701.scala new file mode 100644 index 000000000000..0e038359856a --- /dev/null +++ b/tests/neg/i13701.scala @@ -0,0 +1,2 @@ +@main def Test() = String // error + diff --git a/tests/neg/i13703.check b/tests/neg/i13703.check new file mode 100644 index 000000000000..34f37cc1502f --- /dev/null +++ b/tests/neg/i13703.check @@ -0,0 +1,5 @@ +-- Error: tests/neg/i13703.scala:3:17 ---------------------------------------------------------------------------------- +3 |val f: Foo { var i: Int } = new Foo { var i: Int = 0 } // error + | ^^^^^^^^^^ + | refinement cannot be a mutable var. + | You can use an explicit getter i and setter i_= instead diff --git a/tests/neg/i13703.scala b/tests/neg/i13703.scala new file mode 100644 index 000000000000..c81d1b6e3e0b --- /dev/null +++ b/tests/neg/i13703.scala @@ -0,0 +1,5 @@ +trait Foo extends reflect.Selectable + +val f: Foo { var i: Int } = new Foo { var i: Int = 0 } // error + +val f2: Foo { val i: Int; def i_=(x: Int): Unit } = new Foo { var i: Int = 0 } // OK diff --git a/tests/neg/i13769.check b/tests/neg/i13769.check new file mode 100644 index 000000000000..3d7af1bd06a1 --- /dev/null +++ b/tests/neg/i13769.check @@ -0,0 +1,10 @@ +-- Error: tests/neg/i13769.scala:2:18 ---------------------------------------------------------------------------------- +2 |val te = tup.map((x: _ <: Int) => List(x)) // error // error + | ^^^^^^^^^^^ + | not a legal formal parameter +-- [E006] Not Found Error: tests/neg/i13769.scala:2:39 ----------------------------------------------------------------- +2 |val te = tup.map((x: _ <: Int) => List(x)) // error // error + | ^ + | Not found: x + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i13769.scala b/tests/neg/i13769.scala new file mode 100644 index 000000000000..67575e821334 --- /dev/null +++ b/tests/neg/i13769.scala @@ -0,0 +1,2 @@ +val tup = (1, "s") +val te = tup.map((x: _ <: Int) => List(x)) // error // error diff --git a/tests/neg/i13808.check b/tests/neg/i13808.check new file mode 100644 index 000000000000..e9b55a5ddec2 --- /dev/null +++ b/tests/neg/i13808.check @@ -0,0 +1,8 @@ +-- Error: tests/neg/i13808.scala:13:37 --------------------------------------------------------------------------------- +13 |case class Boom[A](value: A) derives OpaqueType, Foo // error // error + | ^^^^^^^^^^ + | OpaqueTypes.OpaqueType is not a class type +-- Error: tests/neg/i13808.scala:13:49 --------------------------------------------------------------------------------- +13 |case class Boom[A](value: A) derives OpaqueType, Foo // error // error + | ^^^ + | FooModule.Foo is not a class type diff --git a/tests/neg/i13808.scala b/tests/neg/i13808.scala new file mode 100644 index 000000000000..6dd568c69423 --- /dev/null +++ b/tests/neg/i13808.scala @@ -0,0 +1,14 @@ +object OpaqueTypes: + opaque type OpaqueType[A] = List[A] + object OpaqueType: + def derived[A]: OpaqueType[A] = Nil + +object FooModule: + type Foo[A] + object Foo: + def derived[A]: Foo[A] = Nil.asInstanceOf[Foo[A]] + +import FooModule.Foo +import OpaqueTypes.OpaqueType +case class Boom[A](value: A) derives OpaqueType, Foo // error // error + diff --git a/tests/neg/i13838a.scala b/tests/neg/i13838a.scala new file mode 100644 index 000000000000..9fcb7be7bdcf --- /dev/null +++ b/tests/neg/i13838a.scala @@ -0,0 +1,47 @@ +object TooSlow { + trait EqSyntax { + implicit def catsSyntaxEq[A: Eq](a: A): EqOps[A] = ??? + } + + final class EqOps[A] + + object eq extends EqSyntax + + import eq._ + + sealed abstract class Foo[A] + object Foo { + implicit def eqFoo[A: Eq]: Eq[Foo[A]] = ??? + } + + type FooT[F[_], A] = F[Foo[A]] + object FooT { + def liftF[F[_], A](fa: F[A]): F[Foo[A]] = + map(fa)(???) // error + + def map[F[_], A, B](ffa: F[Foo[A]])(f: A => B): F[Foo[B]] = + ??? + } + + trait Order[A] extends Eq[A] + + trait Eq[A] + + object Eq { + implicit def catsKernelOrderForTuple14[A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13](implicit A0: Order[A0], A1: Order[A1], A2: Order[A2], A3: Order[A3], A4: Order[A4], A5: Order[A5], A6: Order[A6], A7: Order[A7], A8: Order[A8], A9: Order[A9], A10: Order[A10], A11: Order[A11], A12: Order[A12], A13: Order[A13]): Order[(A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13)] = ??? + implicit def catsKernelOrderForTuple13[A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12](implicit A0: Order[A0], A1: Order[A1], A2: Order[A2], A3: Order[A3], A4: Order[A4], A5: Order[A5], A6: Order[A6], A7: Order[A7], A8: Order[A8], A9: Order[A9], A10: Order[A10], A11: Order[A11], A12: Order[A12]): Order[(A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12)] = ??? + implicit def catsKernelOrderForTuple12[A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11](implicit A0: Order[A0], A1: Order[A1], A2: Order[A2], A3: Order[A3], A4: Order[A4], A5: Order[A5], A6: Order[A6], A7: Order[A7], A8: Order[A8], A9: Order[A9], A10: Order[A10], A11: Order[A11]): Order[(A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11)] = ??? + implicit def catsKernelOrderForTuple11[A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10](implicit A0: Order[A0], A1: Order[A1], A2: Order[A2], A3: Order[A3], A4: Order[A4], A5: Order[A5], A6: Order[A6], A7: Order[A7], A8: Order[A8], A9: Order[A9], A10: Order[A10]): Order[(A0, A1, A2, A3, A4, A5, A6, A7, A8, A9, A10)] = ??? + implicit def catsKernelOrderForTuple10[A0, A1, A2, A3, A4, A5, A6, A7, A8, A9](implicit A0: Order[A0], A1: Order[A1], A2: Order[A2], A3: Order[A3], A4: Order[A4], A5: Order[A5], A6: Order[A6], A7: Order[A7], A8: Order[A8], A9: Order[A9]): Order[(A0, A1, A2, A3, A4, A5, A6, A7, A8, A9)] = ??? + implicit def catsKernelOrderForTuple9[A0, A1, A2, A3, A4, A5, A6, A7, A8](implicit A0: Order[A0], A1: Order[A1], A2: Order[A2], A3: Order[A3], A4: Order[A4], A5: Order[A5], A6: Order[A6], A7: Order[A7], A8: Order[A8]): Order[(A0, A1, A2, A3, A4, A5, A6, A7, A8)] = ??? + implicit def catsKernelOrderForTuple8[A0, A1, A2, A3, A4, A5, A6, A7](implicit A0: Order[A0], A1: Order[A1], A2: Order[A2], A3: Order[A3], A4: Order[A4], A5: Order[A5], A6: Order[A6], A7: Order[A7]): Order[(A0, A1, A2, A3, A4, A5, A6, A7)] = ??? + implicit def catsKernelOrderForTuple7[A0, A1, A2, A3, A4, A5, A6](implicit A0: Order[A0], A1: Order[A1], A2: Order[A2], A3: Order[A3], A4: Order[A4], A5: Order[A5], A6: Order[A6]): Order[(A0, A1, A2, A3, A4, A5, A6)] = ??? + implicit def catsKernelOrderForTuple6[A0, A1, A2, A3, A4, A5](implicit A0: Order[A0], A1: Order[A1], A2: Order[A2], A3: Order[A3], A4: Order[A4], A5: Order[A5]): Order[(A0, A1, A2, A3, A4, A5)] = ??? + implicit def catsKernelOrderForTuple5[A0, A1, A2, A3, A4](implicit A0: Order[A0], A1: Order[A1], A2: Order[A2], A3: Order[A3], A4: Order[A4]): Order[(A0, A1, A2, A3, A4)] = ??? + implicit def catsKernelOrderForTuple4[A0, A1, A2, A3](implicit A0: Order[A0], A1: Order[A1], A2: Order[A2], A3: Order[A3]): Order[(A0, A1, A2, A3)] = ??? + implicit def catsKernelOrderForTuple3[A0, A1, A2](implicit A0: Order[A0], A1: Order[A1], A2: Order[A2]): Order[(A0, A1, A2)] = ??? + implicit def catsKernelOrderForTuple2[A0, A1](implicit A0: Order[A0], A1: Order[A1]): Order[(A0, A1)] = ??? + implicit def catsKernelOrderForTuple1[A0](implicit A0: Order[A0]): Order[Tuple1[A0]] = ??? + } + +} \ No newline at end of file diff --git a/tests/neg/i13846.check b/tests/neg/i13846.check new file mode 100644 index 000000000000..69ea0f0e51ac --- /dev/null +++ b/tests/neg/i13846.check @@ -0,0 +1,17 @@ +-- Error: tests/neg/i13846.scala:3:22 ---------------------------------------------------------------------------------- +3 |def foo(): Int throws ArithmeticException = 1 / 0 // error + | ^^^^^^^^^^^^^^^^^^^ + | throws clause cannot be defined for RuntimeException +-- Error: tests/neg/i13846.scala:7:9 ----------------------------------------------------------------------------------- +7 | foo() // error + | ^ + | The capability to throw exception ArithmeticException is missing. + | The capability can be provided by one of the following: + | - Adding a using clause `(using CanThrow[ArithmeticException])` to the definition of the enclosing method + | - Adding `throws ArithmeticException` clause after the result type of the enclosing method + | - Wrapping this piece of code with a `try` block that catches ArithmeticException + | + | The following import might fix the problem: + | + | import unsafeExceptions.canThrowAny + | diff --git a/tests/neg/i13846.scala b/tests/neg/i13846.scala new file mode 100644 index 000000000000..ac2a8ebd8eef --- /dev/null +++ b/tests/neg/i13846.scala @@ -0,0 +1,9 @@ +import language.experimental.saferExceptions + +def foo(): Int throws ArithmeticException = 1 / 0 // error + +def test(): Unit = + try + foo() // error + catch + case _: ArithmeticException => println("Caught") diff --git a/tests/neg/i13849.check b/tests/neg/i13849.check new file mode 100644 index 000000000000..6dafaaa30ff1 --- /dev/null +++ b/tests/neg/i13849.check @@ -0,0 +1,5 @@ +-- Error: tests/neg/i13849.scala:16:11 --------------------------------------------------------------------------------- +16 | case _: Ex if false => println("Caught") // error + | ^^^^^ + | Implementation restriction: cannot generate CanThrow capability for this kind of catch. + | CanThrow capabilities can only be generated if no pattern guard is given. diff --git a/tests/neg/i13849.scala b/tests/neg/i13849.scala new file mode 100644 index 000000000000..9b734db4be7d --- /dev/null +++ b/tests/neg/i13849.scala @@ -0,0 +1,16 @@ +import annotation.experimental +import language.experimental.saferExceptions + +@experimental +case class Ex(i: Int) extends Exception(s"Exception: $i") + +@experimental +def foo(): Unit throws Ex = throw Ex(1) + +@experimental +object Main: + def main(args: Array[String]): Unit = + try + foo() + catch + case _: Ex if false => println("Caught") // error diff --git a/tests/neg/i13851.scala b/tests/neg/i13851.scala new file mode 100644 index 000000000000..fb7066a60b3f --- /dev/null +++ b/tests/neg/i13851.scala @@ -0,0 +1,11 @@ +opaque type One = 1 +inline val One: One = 1 // error + +opaque type Max = Int.MaxValue.type +inline val Max: Max = Int.MaxValue // error + +inline val MaxValue: Int.MaxValue.type = Int.MaxValue + +opaque type Two = 2 +type Bis = Two +inline val Two: Bis = 2 // error \ No newline at end of file diff --git a/tests/neg/i13851b.scala b/tests/neg/i13851b.scala new file mode 100644 index 000000000000..624735de0a49 --- /dev/null +++ b/tests/neg/i13851b.scala @@ -0,0 +1,10 @@ +object Num { + opaque type One = 1 + inline val One: One = 1 // error + + opaque type Two = 2 + inline def Two: Two = 2 +} + +def test1 = Num.One +def test2 = Num.Two diff --git a/tests/neg/i13852.scala b/tests/neg/i13852.scala new file mode 100644 index 000000000000..a0c5e726e1a8 --- /dev/null +++ b/tests/neg/i13852.scala @@ -0,0 +1,6 @@ +inline val `1`: 1 = 1 +def get1: 1 = `1` + +opaque type One = 1 +inline val One: One = 1 // error +def getOne: One = One diff --git a/tests/neg/i13864.check b/tests/neg/i13864.check new file mode 100644 index 000000000000..54e81ea82774 --- /dev/null +++ b/tests/neg/i13864.check @@ -0,0 +1,18 @@ +-- Error: tests/neg/i13864.scala:11:9 ---------------------------------------------------------------------------------- +11 | case Ex(i: Int) => println("Caught an Int") // error + | ^^^^^^^^^^ + | Implementation restriction: cannot generate CanThrow capability for this kind of catch. + | CanThrow capabilities can only be generated for cases of the form `ex: T` where `T` is fully defined. +-- Error: tests/neg/i13864.scala:9:10 ---------------------------------------------------------------------------------- +9 | foo(1) // error + | ^ + | The capability to throw exception Ex[Int] is missing. + | The capability can be provided by one of the following: + | - Adding a using clause `(using CanThrow[Ex[Int]])` to the definition of the enclosing method + | - Adding `throws Ex[Int]` clause after the result type of the enclosing method + | - Wrapping this piece of code with a `try` block that catches Ex[Int] + | + | The following import might fix the problem: + | + | import unsafeExceptions.canThrowAny + | diff --git a/tests/neg/i13864.scala b/tests/neg/i13864.scala new file mode 100644 index 000000000000..3053a2b12e87 --- /dev/null +++ b/tests/neg/i13864.scala @@ -0,0 +1,11 @@ +import language.experimental.saferExceptions + +case class Ex[A](a: A) extends Exception(s"Ex: $a") + +def foo[A](a: A): Unit throws Ex[A] = throw new Ex(a) + +def test(): Unit = + try + foo(1) // error + catch + case Ex(i: Int) => println("Caught an Int") // error diff --git a/tests/neg/i13960.check b/tests/neg/i13960.check new file mode 100644 index 000000000000..190f55690a5b --- /dev/null +++ b/tests/neg/i13960.check @@ -0,0 +1,6 @@ +-- [E108] Declaration Error: tests/neg/i13960.scala:13:10 -------------------------------------------------------------- +13 | case A() => // error + | ^^^ + | A is not a valid result type of an unapply method of an extractor. + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i13960.scala b/tests/neg/i13960.scala new file mode 100644 index 000000000000..54ae513598f1 --- /dev/null +++ b/tests/neg/i13960.scala @@ -0,0 +1,15 @@ +class A() extends Product { + override def canEqual(that: Any) = true + override def productArity = 0 + override def productElement(n: Int) = null +} + +object A { + def unapply(a: A): A = a +} + +object Main { + (new A) match { + case A() => // error + } +} diff --git a/tests/neg/i13987.scala b/tests/neg/i13987.scala new file mode 100644 index 000000000000..b27cd444cda6 --- /dev/null +++ b/tests/neg/i13987.scala @@ -0,0 +1,16 @@ +sealed trait Xa[T] +sealed trait Mu[T] extends Xa[T] +object Xa { + // bad + implicit def convertMu[X[x] <: Xa[x], A, B](implicit t: X[A] with Xa[A]): X[B] = t.asInstanceOf[X[B]] + // good +// implicit def convertMu[X[x] <: Xa[x], A, B](implicit t: X[A] with Mu[A]): X[B] = t.asInstanceOf[X[B]] +} +object Mu { + implicit def mu: Mu[Int] = new Mu[Int] {} +} + +object App extends App { + def constrain(a: Mu[Long]): Unit = println(a) + constrain(Xa.convertMu) // error +} \ No newline at end of file diff --git a/tests/neg/i13991.check b/tests/neg/i13991.check new file mode 100644 index 000000000000..009e116f95b1 --- /dev/null +++ b/tests/neg/i13991.check @@ -0,0 +1,15 @@ +-- Error: tests/neg/i13991.scala:5:7 ----------------------------------------------------------------------------------- + 5 | first[String] // error // before line 10 to test alignment of the error message `|` + | ^^^^^^^^^^^^^ + | no implicit argument of type Foo[String] was found + |-------------------------------------------------------------------------------------------------------------------- + |Inline stack trace + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i13991.scala:8 + 8 | compiletime.summonInline[Foo[A]].foo + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from i13991.scala:8 +11 | second[A] + 42 // after line 10 to test alignment of the error message `|` + | ^^^^^^^^^ + -------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/i13991.scala b/tests/neg/i13991.scala new file mode 100644 index 000000000000..af15891e82d3 --- /dev/null +++ b/tests/neg/i13991.scala @@ -0,0 +1,11 @@ +trait Foo[X]: + def foo: Int + +def foo = + first[String] // error // before line 10 to test alignment of the error message `|` + +inline def second[A]: Int = + compiletime.summonInline[Foo[A]].foo + +inline def first[A]: Int = + second[A] + 42 // after line 10 to test alignment of the error message `|` diff --git a/tests/neg/i13997.scala b/tests/neg/i13997.scala new file mode 100644 index 000000000000..77cca35d93bd --- /dev/null +++ b/tests/neg/i13997.scala @@ -0,0 +1,10 @@ + opaque type CovariantArray[+A] = Array[A] // error + + object CovariantArray: + def crash() = + val stringArray: CovariantArray[String] = Array("foo", "bar") + val anyArray: CovariantArray[Any] = stringArray + anyArray(0) = 42 + stringArray(0).length + + @main def Test = CovariantArray.crash() \ No newline at end of file diff --git a/tests/neg/i14013.scala b/tests/neg/i14013.scala new file mode 100644 index 000000000000..96b2e7c0cdd7 --- /dev/null +++ b/tests/neg/i14013.scala @@ -0,0 +1,15 @@ +object Foo1 { + case class Bar(i: Int) + + private implicit class BarOps(bar: Bar) { + def twice = Bar(bar.i * 2) + } +} + +class Foo { + def bar = Foo.Bar(1).twice // error +} + +object App extends App { + println((new Foo).bar) +} \ No newline at end of file diff --git a/tests/neg/i14039.check b/tests/neg/i14039.check new file mode 100644 index 000000000000..a9bae09c2415 --- /dev/null +++ b/tests/neg/i14039.check @@ -0,0 +1,4 @@ +-- Error: tests/neg/i14039.scala:4:14 ---------------------------------------------------------------------------------- +4 |def test = get() // error + | ^^^^^ + | my error diff --git a/tests/neg/i14039.scala b/tests/neg/i14039.scala new file mode 100644 index 000000000000..3d34a84f0e8d --- /dev/null +++ b/tests/neg/i14039.scala @@ -0,0 +1,4 @@ +val entries = Seq.newBuilder[Any] +inline def error(): Any = compiletime.error("my error") +inline def get(): Unit = entries += error() +def test = get() // error diff --git a/tests/neg/i14062.scala b/tests/neg/i14062.scala new file mode 100644 index 000000000000..e2e6625a97e0 --- /dev/null +++ b/tests/neg/i14062.scala @@ -0,0 +1,10 @@ +import annotation.* + +object Test: + + @targetName("") // error + def foo = println("ok") + + foo + + diff --git a/tests/neg/i14145.scala b/tests/neg/i14145.scala new file mode 100644 index 000000000000..e9619ae9d756 --- /dev/null +++ b/tests/neg/i14145.scala @@ -0,0 +1,4 @@ +val l: List[Option[Int]] = List(None, Some(1), None) + +@main def m15 = + l.collectFirst(Some.unapply.unlift[Option[Int], Int]) // error diff --git a/tests/neg/i14177a.scala b/tests/neg/i14177a.scala new file mode 100644 index 000000000000..3031271c369b --- /dev/null +++ b/tests/neg/i14177a.scala @@ -0,0 +1,6 @@ +import scala.compiletime.* + +trait C[A] + +inline given [Tup <: Tuple]: C[Tup] with + val cs = summonAll[Tuple.Map[Tup, C]] // error cannot reduce inline match with diff --git a/tests/neg/i14177c.scala b/tests/neg/i14177c.scala new file mode 100644 index 000000000000..d281938ca0ea --- /dev/null +++ b/tests/neg/i14177c.scala @@ -0,0 +1,15 @@ +class T + +transparent inline given fail1: T with // error + val cs = scala.compiletime.summonAll[EmptyTuple] +transparent inline given fail2[X]: T with // error + val cs = scala.compiletime.summonAll[EmptyTuple] +transparent inline given fail3(using DummyImplicit): T with // error + val cs = scala.compiletime.summonAll[EmptyTuple] + +transparent inline given ok1: T = new T: + val cs = scala.compiletime.summonAll[EmptyTuple] +transparent inline given ok2[X]: T = new T: + val cs = scala.compiletime.summonAll[EmptyTuple] +transparent inline given ok3(using DummyImplicit): T = new T: + val cs = scala.compiletime.summonAll[EmptyTuple] diff --git a/tests/neg/i14303/A_1_c3.0.0.scala b/tests/neg/i14303/A_1_c3.0.0.scala new file mode 100644 index 000000000000..4e1d7cc57472 --- /dev/null +++ b/tests/neg/i14303/A_1_c3.0.0.scala @@ -0,0 +1,5 @@ +def a: Int = { + 3 + 4 +} +def f: Int = match // error // error diff --git a/tests/neg/i14303/B_2.scala b/tests/neg/i14303/B_2.scala new file mode 100644 index 000000000000..39441cd7c630 --- /dev/null +++ b/tests/neg/i14303/B_2.scala @@ -0,0 +1 @@ +def test() = f // error \ No newline at end of file diff --git a/tests/neg/i14367.check b/tests/neg/i14367.check new file mode 100644 index 000000000000..94158bf73923 --- /dev/null +++ b/tests/neg/i14367.check @@ -0,0 +1,7 @@ +-- [E007] Type Mismatch Error: tests/neg/i14367.scala:2:16 ------------------------------------------------------------- +2 |val h2 = i => p(i) // error: Found (i : Seq[Int]), Required: Int + | ^ + | Found: (i : Seq[Int]) + | Required: Int + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i14367.scala b/tests/neg/i14367.scala new file mode 100644 index 000000000000..2778ade8dfa1 --- /dev/null +++ b/tests/neg/i14367.scala @@ -0,0 +1,5 @@ +def p(i: Int*) = i.sum +val h2 = i => p(i) // error: Found (i : Seq[Int]), Required: Int + // It would be more logical to fail with a "missing parameter type", however. + + diff --git a/tests/neg/i1501.scala b/tests/neg/i1501.scala index 68556640383a..045f2be1de2f 100644 --- a/tests/neg/i1501.scala +++ b/tests/neg/i1501.scala @@ -16,13 +16,3 @@ object Test { println(new C().foo) } } - -object Test2 { - class A - class SubA(x: Int) extends A - trait TA extends A - trait TSubA extends SubA(2) // error: trait TSubA may not call constructor of class SubA - - - class Foo extends TA with TSubA // error: missing argument for parameter x of constructor SubA: -} diff --git a/tests/neg/i1501a.scala b/tests/neg/i1501a.scala new file mode 100644 index 000000000000..4568c637f3de --- /dev/null +++ b/tests/neg/i1501a.scala @@ -0,0 +1,10 @@ + +object Test2 { + class A + class SubA(x: Int) extends A + trait TA extends A + trait TSubA extends SubA(2) // error: trait TSubA may not call constructor of class SubA + + + class Foo extends TA with TSubA // error: missing argument for parameter x of constructor SubA: +} diff --git a/tests/neg/i1653.scala b/tests/neg/i1653.scala index f21fc7d54d96..ef122b1a76b1 100644 --- a/tests/neg/i1653.scala +++ b/tests/neg/i1653.scala @@ -1,3 +1,3 @@ trait Foo { - def foo() = new Unit with Foo // error: cannot extend final class Unit // error: illegal trait inheritance + def foo() = new Unit with Foo // error: cannot extend final class Unit } diff --git a/tests/neg/i1679.scala b/tests/neg/i1679.scala index 08a33bb15596..cadeb85dc8db 100644 --- a/tests/neg/i1679.scala +++ b/tests/neg/i1679.scala @@ -1,5 +1,5 @@ class A[T] object o { // Testing compiler crash, this test should be modified when named type argument are completely implemented - val x: A[T=Int, T=Int] = ??? // error: ']' expected, but '=' found + val x: A[T=Int, T=Int] = ??? // error: ']' expected, but '=' found // error } diff --git a/tests/neg/i2033.check b/tests/neg/i2033.check index 4878ad153a47..5751d91f4f3a 100644 --- a/tests/neg/i2033.check +++ b/tests/neg/i2033.check @@ -7,5 +7,9 @@ | ^^^^^^^ | Found: Unit | Required: String - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` +-- Warning: tests/neg/i2033.scala:6:37 --------------------------------------------------------------------------------- +6 | val out = new ObjectOutputStream(println) + | ^^^^^^^ + |method println is eta-expanded even though java.io.OutputStream does not have the @FunctionalInterface annotation. diff --git a/tests/neg/i4004.scala b/tests/neg/i4004.scala new file mode 100644 index 000000000000..bf757a0863a7 --- /dev/null +++ b/tests/neg/i4004.scala @@ -0,0 +1,16 @@ +@main def Test = + "a".isInstanceOf[Null] // error + null.isInstanceOf[Null] // error + "a".isInstanceOf[Nothing] // error + "a".isInstanceOf[Singleton] // error + + "a" match + case _: Null => () // error + case _: Nothing => () // error + case _: Singleton => () // error + case _ => () + + null match + case _: Null => () // error + case _ => () + diff --git a/tests/neg/i4241.scala b/tests/neg/i4241.scala deleted file mode 100644 index 3d93a44a015a..000000000000 --- a/tests/neg/i4241.scala +++ /dev/null @@ -1,12 +0,0 @@ -class Test { - def test: Unit = { - val a: PartialFunction[Int, Int] = { case x => x } - val b: PartialFunction[Int, Int] = x => x match { case 1 => 1; case _ => 2 } - val c: PartialFunction[Int, Int] = x => { x match { case y => y } } - val d: PartialFunction[Int, Int] = x => { { x match { case y => y } } } - - val e: PartialFunction[Int, Int] = x => { println("foo"); x match { case y => y } } // error - val f: PartialFunction[Int, Int] = x => x // error - val g: PartialFunction[Int, String] = { x => x.toString } // error - } -} diff --git a/tests/neg/i4373.scala b/tests/neg/i4373.scala index 2a7d0586f9d6..458dfc09c150 100644 --- a/tests/neg/i4373.scala +++ b/tests/neg/i4373.scala @@ -15,8 +15,8 @@ class A4 extends _ with Base // error object Test { type T1 = _ // error - type T2 = _[Int] // error // error - type T3 = _ { type S } // error // error + type T2 = _[Int] // error + type T3 = _ { type S } // error type T4 = [X] =>> _ // error // Open questions: diff --git a/tests/neg/i4382.check b/tests/neg/i4382.check index 3d21702ffe0f..4905638ab62a 100644 --- a/tests/neg/i4382.check +++ b/tests/neg/i4382.check @@ -2,23 +2,23 @@ 3 | def v1: Id[_] = ??? // error | ^^^^^ | unreducible application of higher-kinded type App.Id to wildcard arguments - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` -- [E043] Type Error: tests/neg/i4382.scala:6:10 ----------------------------------------------------------------------- 6 | def v2: HkL[_] = ??? // error | ^^^^^^ | unreducible application of higher-kinded type App.HkL to wildcard arguments - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` -- [E043] Type Error: tests/neg/i4382.scala:9:10 ----------------------------------------------------------------------- 9 | def v3: HkU[_] = ??? // error | ^^^^^^ | unreducible application of higher-kinded type App.HkU to wildcard arguments - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` -- [E043] Type Error: tests/neg/i4382.scala:12:10 ---------------------------------------------------------------------- 12 | def v4: HkAbs[_] = ??? // error | ^^^^^^^^ | unreducible application of higher-kinded type App.HkAbs to wildcard arguments - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i4986a.check b/tests/neg/i4986a.check index 5375d0fd002b..3aac0a7b2cf3 100644 --- a/tests/neg/i4986a.check +++ b/tests/neg/i4986a.check @@ -4,6 +4,6 @@ |Cannot construct a collection of type List[String] with elements of type Int based on a collection of type List[Int].. |I found: | - | collection.BuildFrom.buildFromIterableOps[Nothing, Nothing, Nothing] + | collection.BuildFrom.buildFromIterableOps[CC, A0, A] | |But method buildFromIterableOps in trait BuildFromLowPriority2 does not match type collection.BuildFrom[List[Int], Int, List[String]]. diff --git a/tests/neg/i4986c.check b/tests/neg/i4986c.check index b93a5520b021..a5fe0cee26bf 100644 --- a/tests/neg/i4986c.check +++ b/tests/neg/i4986c.check @@ -61,4 +61,4 @@ -- Error: tests/neg/i4986c.scala:62:19 --------------------------------------------------------------------------------- 62 | i.m[Option[Long]] // error | ^ - | String; List; [A, _$6] =>> List[Option[?]]; Int; Option[Long]; + | String; List; [A, _] =>> List[Option[?]]; Int; Option[Long]; diff --git a/tests/neg/i5005.scala b/tests/neg/i5005.scala index 1734f79d934c..93ca0900fbb0 100644 --- a/tests/neg/i5005.scala +++ b/tests/neg/i5005.scala @@ -3,4 +3,4 @@ case class i0 (i0: i1) extends AnyVal // error trait i1 extends i0 // error trait F[x] extends AnyVal // error -case class G[x](a: F[x]) extends F[x] // error // error +case class G[x](a: F[x]) extends F[x] // error diff --git a/tests/run/i5094.scala b/tests/neg/i5094.scala similarity index 81% rename from tests/run/i5094.scala rename to tests/neg/i5094.scala index 556a1f0f07df..755e81addf09 100644 --- a/tests/run/i5094.scala +++ b/tests/neg/i5094.scala @@ -9,7 +9,7 @@ trait SOIO extends IO { } trait SOSO extends SOIO with SO abstract class AS extends SO -class L extends AS with SOSO +class L extends AS with SOSO // error: cannot override final member object Test { def main(args: Array[String]): Unit = { new L diff --git a/tests/neg/i5101.check b/tests/neg/i5101.check index 151b8cf2dd4e..4f4bac89aa44 100644 --- a/tests/neg/i5101.check +++ b/tests/neg/i5101.check @@ -2,5 +2,5 @@ 11 | case A0(_) => // error | ^^ | Not found: A0 - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i5311.check b/tests/neg/i5311.check index bd2940afc7a9..eb9122b94ed6 100644 --- a/tests/neg/i5311.check +++ b/tests/neg/i5311.check @@ -3,5 +3,5 @@ | ^^^^^^^^^^^^^^^^^^^ | Found: s.T[Int] => s.T[Int] | Required: m.Foo - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i6183.check b/tests/neg/i6183.check index acb2a58766ec..70c1afaae621 100644 --- a/tests/neg/i6183.check +++ b/tests/neg/i6183.check @@ -17,5 +17,5 @@ | [B](b: B)(using x$2: DummyImplicit): Char | [A](a: A): String | both match arguments ((42 : Int)) - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i6205.check b/tests/neg/i6205.check index caca487ee2db..9f35c21b5ef3 100644 --- a/tests/neg/i6205.check +++ b/tests/neg/i6205.check @@ -2,5 +2,5 @@ 4 | def foo = // error | ^ | Type argument Nothing does not conform to lower bound Null in inferred type Contra[Nothing] - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i6662.scala b/tests/neg/i6662.scala deleted file mode 100644 index ccad0675da71..000000000000 --- a/tests/neg/i6662.scala +++ /dev/null @@ -1,15 +0,0 @@ -opaque type Opt[A >: Null] = A - -extension [A >: Null](x: Opt[A]) inline def nonEmpty: Boolean = x.get != null // error: Implementation restriction -extension [A >: Null](x: Opt[A]) inline def isEmpty: Boolean = x.get == null // error: Implementation restriction -extension [A >: Null](x: Opt[A]) inline def isDefined: Boolean = x.nonEmpty // error: Implementation restriction -extension [A >: Null](x: Opt[A]) inline def get: A = Opt.unOpt(x) // error: Implementation restriction - -object Opt -{ - inline def unOpt[A >: Null](x: Opt[A]): A = x // error: Implementation restriction - inline def apply[A >: Null](x: A): Opt[A] = x // error: Implementation restriction - inline def some[A >: Null](x: A): Opt[A] = x // error: Implementation restriction - inline def none[A >: Null]: Opt[A] = null // error: Implementation restriction - inline def fromOption[A >: Null](x: Option[A]) = x.orNull // error: Implementation restriction -} diff --git a/tests/neg/i6762b.check b/tests/neg/i6762b.check index 9c0d571dd77b..a844fcf280bf 100644 --- a/tests/neg/i6762b.check +++ b/tests/neg/i6762b.check @@ -3,5 +3,5 @@ | ^^^^^^^^^^^ | Found: Expr[String] | Required: Expr[G[String]] - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i6779.check b/tests/neg/i6779.check index d2953c80f2b4..d895203221ec 100644 --- a/tests/neg/i6779.check +++ b/tests/neg/i6779.check @@ -3,8 +3,8 @@ | ^^^^^^^^^^^^^^^^^^^^^^^^ | Found: F[T] | Required: F[G[T]] - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` -- [E008] Not Found Error: tests/neg/i6779.scala:12:31 ----------------------------------------------------------------- 12 | def g2[T](x: T): F[G[T]] = x.f // error | ^^^ @@ -20,5 +20,5 @@ longer explanation available when compiling with `-explain` | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | Found: F[T] | Required: F[G[T]] - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i6854.scala b/tests/neg/i6854.scala deleted file mode 100644 index 5eb0f8bf6028..000000000000 --- a/tests/neg/i6854.scala +++ /dev/null @@ -1,12 +0,0 @@ -object Test { - import Lib.* - val xs: IArray2[Int] = IArray2(1) -} - -object Lib { - opaque type IArray2[+T] = Array[_ <: T] - - object IArray2 { - inline def apply(x: =>Int): IArray2[Int] = Array(x) // error - } -} diff --git a/tests/neg/i7056.check b/tests/neg/i7056.check deleted file mode 100644 index 2dff4dc66b13..000000000000 --- a/tests/neg/i7056.check +++ /dev/null @@ -1,7 +0,0 @@ --- [E008] Not Found Error: tests/neg/i7056.scala:19:10 ----------------------------------------------------------------- -19 |val z = x.idnt1 // error - | ^^^^^^^ - | value idnt1 is not a member of B. - | An extension method was tried, but could not be fully constructed: - | - | i7056$package.given_T1_T[T](given_PartialId_B).idnt1() diff --git a/tests/neg/i7195.check b/tests/neg/i7195.check index 0d393ca0e979..3c95a2073b58 100644 --- a/tests/neg/i7195.check +++ b/tests/neg/i7195.check @@ -6,5 +6,5 @@ | | where: T is a type in object B | T² is a type in object A - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i7325.scala b/tests/neg/i7325.scala new file mode 100644 index 000000000000..a47d416aaedb --- /dev/null +++ b/tests/neg/i7325.scala @@ -0,0 +1,9 @@ +import scala.collection._ + +class MySeq[A](private val underlying: Seq[A]) // error + extends Seq[A] + with SeqOps[A, MySeq, MySeq[A]] { + def apply(n: Int) = underlying(n) + def iterator = underlying.iterator + def length = underlying.size +} diff --git a/tests/neg/i7359-f.check b/tests/neg/i7359-f.check index d85a5fd0ad2e..5eabb4e1dd8c 100644 --- a/tests/neg/i7359-f.check +++ b/tests/neg/i7359-f.check @@ -2,7 +2,7 @@ 1 |trait SAMTrait: // error | ^ | Name clash between inherited members: - | def equals: [T >: Boolean <: Boolean](obj: Any): T in trait SAMTrait at line 3 and + | def equals[T >: Boolean <: Boolean](obj: Any): T in trait SAMTrait at line 3 and | def equals(x$0: Any): Boolean in class Any | have the same type after erasure. | diff --git a/tests/neg/i7359-g.check b/tests/neg/i7359-g.check index 43257ae2a596..e3f0c89715e4 100644 --- a/tests/neg/i7359-g.check +++ b/tests/neg/i7359-g.check @@ -3,5 +3,5 @@ | ^^^^^^^^^^^^^ | Found: () => String | Required: SAMTrait - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i7597.scala b/tests/neg/i7597.scala index 8b18b82b1db4..cc41a3c77e2d 100644 --- a/tests/neg/i7597.scala +++ b/tests/neg/i7597.scala @@ -6,8 +6,8 @@ object Test extends App { def apply(x: A): B } - class C[S <: String] extends Fn[String, Int] { - def apply(s: S): Int = 0 // error + class C[S <: String] extends Fn[String, Int] { // error + def apply(s: S): Int = 0 } foo("") diff --git a/tests/neg/i7745.scala b/tests/neg/i7745.scala new file mode 100644 index 000000000000..7b54be159661 --- /dev/null +++ b/tests/neg/i7745.scala @@ -0,0 +1,3 @@ +trait F[x] +implicit def foo[f[_], y, x <: f[y]](implicit ev: F[y]): F[x] = ??? +val test = implicitly // error \ No newline at end of file diff --git a/tests/neg/i7891.scala b/tests/neg/i7891.scala new file mode 100644 index 000000000000..4f305f74cad1 --- /dev/null +++ b/tests/neg/i7891.scala @@ -0,0 +1,6 @@ +// was previously ok in one compilation unit +def f22 = "hello, world" + +package p { + @main def m = println(f22) // error +} diff --git a/tests/neg/i8569.check b/tests/neg/i8569.check index c33cb68d750c..89173e1a8334 100644 --- a/tests/neg/i8569.check +++ b/tests/neg/i8569.check @@ -2,5 +2,5 @@ 13 | val x = outer.Inner(2) // error (at posttyper) | ^^^^^^^^^^^^^^ | Outer is not a valid class prefix, since it is not an immutable path - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i8569a.check b/tests/neg/i8569a.check index b6ba88cba3b6..966e80133d31 100644 --- a/tests/neg/i8569a.check +++ b/tests/neg/i8569a.check @@ -2,5 +2,5 @@ 13 | val x = new outer2.Inner(2) // error (at typer) | ^^^^^^ | (Test.outer2 : => Outer2) is not a valid type prefix, since it is not an immutable path - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i8623.check b/tests/neg/i8623.check index 9b9a20ce7bf5..b9d6e244e70e 100644 --- a/tests/neg/i8623.check +++ b/tests/neg/i8623.check @@ -7,5 +7,5 @@ | But the part corresponding to `` is not a reference that can be generated. | This might be because resolution yielded as given instance a function that is not | known to be total and side-effect free. - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i8731.scala b/tests/neg/i8731.scala index 0d4886c553ec..7eddcd42c030 100644 --- a/tests/neg/i8731.scala +++ b/tests/neg/i8731.scala @@ -11,9 +11,9 @@ object test: end if else // error: illegal start of definition () - end if // error: misaligned end marker + end if class Test { val test = 3 - end Test // error: misaligned end marker - } // error: eof expected, but unindent found \ No newline at end of file + end Test + } \ No newline at end of file diff --git a/tests/neg/i8736.check b/tests/neg/i8736.check index 80986464d035..e7a0d62cb4af 100644 --- a/tests/neg/i8736.check +++ b/tests/neg/i8736.check @@ -3,15 +3,15 @@ | ^^^^^^^^^^^^ | Found: Any | Required: String - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg/i8736.scala:30:29 ------------------------------------------------------------- 30 | def res2: Int = rec.get("v") // error: type mismatch | ^^^^^^^^^^^^ | Found: Any | Required: Int - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` -- [E051] Reference Error: tests/neg/i8736.scala:31:26 ----------------------------------------------------------------- 31 | def res3: Boolean = rec.get("z") // error: ambiguous | ^^^^^^^ @@ -22,5 +22,5 @@ longer explanation available when compiling with `-explain` | all match arguments (("z" : String)) | | Note: Overloaded definitions introduced by refinements cannot be resolved - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i8752.check b/tests/neg/i8752.check index 6d1cf47aa2fd..3ccdee55e676 100644 --- a/tests/neg/i8752.check +++ b/tests/neg/i8752.check @@ -3,9 +3,9 @@ | ^^^^ | C does not take type parameters | (Note that F-bounds of type parameters may not be type lambdas) - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` -- Error: tests/neg/i8752.scala:3:39 ----------------------------------------------------------------------------------- 3 |trait Arround1[C <:[X]=>>IterableOps[X,C,C[X]] ] // error // error | ^ - | Type argument C does not have the same kind as its bound [_] + | Type argument C does not have the same kind as its bound [_$$1] diff --git a/tests/neg/i8900.scala b/tests/neg/i8900.scala new file mode 100644 index 000000000000..c82f9cd18aa5 --- /dev/null +++ b/tests/neg/i8900.scala @@ -0,0 +1,28 @@ +trait Base { + type M +} +trait A { + type M >: Int | String +} +trait B { + type M <: Int & String +} +object Test { + def foo[T](z: T, x: A & B => T): T = z + def foo2[T](z: T, x: T): T = z + + def main(args: Array[String]): Unit = { + val x = foo(1, x => (??? : x.M)) + val x1: String = x // error (was: ClassCastException) + + val a = foo2(1, + if false then + val x: A & B = ??? + ??? : x.M + else 1 + ) + + val b: String = a // error (was: ClassCastException) + } +} + diff --git a/tests/neg/i8988.check b/tests/neg/i8988.check index 6952837128ed..d34e2cfd0047 100644 --- a/tests/neg/i8988.check +++ b/tests/neg/i8988.check @@ -3,5 +3,5 @@ | ^^ | Found: (ys : List[Int]) | Required: String - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i9329.check b/tests/neg/i9329.check index 8ddb0768a119..7e4968edf607 100644 --- a/tests/neg/i9329.check +++ b/tests/neg/i9329.check @@ -1,5 +1,5 @@ -- Error: tests/neg/i9329.scala:8:6 ------------------------------------------------------------------------------------ 8 |class GrandSon extends Son // error | ^ - |class GrandSon needs to be abstract, since def name: => String in trait Parent is not defined - |(The class implements abstract override def name: => String in trait Son but that definition still needs an implementation) + |class GrandSon needs to be abstract, since def name: String in trait Parent is not defined + |(The class implements abstract override def name: String in trait Son but that definition still needs an implementation) diff --git a/tests/neg/i9330.scala b/tests/neg/i9330.scala index 6ba57c033473..ca25582ef7e8 100644 --- a/tests/neg/i9330.scala +++ b/tests/neg/i9330.scala @@ -1,4 +1,4 @@ val x = { - () == "" + () == "" // error implicit def foo[A: A] // error // error // error } diff --git a/tests/neg/i9436.check b/tests/neg/i9436.check index 8938dbacb6cb..b5f9b5c470b1 100644 --- a/tests/neg/i9436.check +++ b/tests/neg/i9436.check @@ -2,8 +2,8 @@ 8 | println(x.f1) // error | ^^^^ | method f1 must be called with () argument - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` -- Error: tests/neg/i9436.scala:9:14 ----------------------------------------------------------------------------------- 9 | println(x.f2(1)) // error | ^^^^^^^ diff --git a/tests/neg/i9437.check b/tests/neg/i9437.check index d0b2226e5a0c..3bac99b07571 100644 --- a/tests/neg/i9437.check +++ b/tests/neg/i9437.check @@ -2,5 +2,5 @@ 7 | println(x.f1()) // error | ^^^^ | method selectDynamic in trait Selectable does not take parameters - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i9460.check b/tests/neg/i9460.check new file mode 100644 index 000000000000..2d1c08f6cf4d --- /dev/null +++ b/tests/neg/i9460.check @@ -0,0 +1,5 @@ +-- Error: tests/neg/i9460.scala:3:6 ------------------------------------------------------------------------------------ +3 |class C extends B // error + | ^ + | parameterized trait A is indirectly implemented, + | needs to be implemented directly so that arguments can be passed diff --git a/tests/neg/i9460.scala b/tests/neg/i9460.scala new file mode 100644 index 000000000000..9cc08bf2ad4d --- /dev/null +++ b/tests/neg/i9460.scala @@ -0,0 +1,4 @@ +trait A(val s: String) { println(s) } +trait B extends A { override val s = "B" } // requires override val s +class C extends B // error +@main def Test = C() diff --git a/tests/neg/i9568.check b/tests/neg/i9568.check index 737abcf21d41..1173d483ed02 100644 --- a/tests/neg/i9568.check +++ b/tests/neg/i9568.check @@ -1,9 +1,12 @@ -- Error: tests/neg/i9568.scala:13:10 ---------------------------------------------------------------------------------- 13 | blaMonad.foo(bla) // error: diverges | ^ - | no implicit argument of type => Monad[F] was found for parameter ev of method blaMonad in object Test. + | no implicit argument of type => Monad[F] was found for parameter ev of method blaMonad in object Test + | + | where: F is a type variable with constraint <: [_] =>> Any + | . | I found: | - | Test.blaMonad[Nothing, S](Test.blaMonad[F, S]) + | Test.blaMonad[F, S](Test.blaMonad[F, S]) | - | But method blaMonad in object Test does not match type => Monad[Nothing]. + | But method blaMonad in object Test does not match type => Monad[F]. diff --git a/tests/pos/i9740c.scala b/tests/neg/i9740c.scala similarity index 92% rename from tests/pos/i9740c.scala rename to tests/neg/i9740c.scala index 968355711e19..87881c9b20d7 100644 --- a/tests/pos/i9740c.scala +++ b/tests/neg/i9740c.scala @@ -11,6 +11,6 @@ class Foo { def bar[A <: Txn[A]](x: Exp[A]): Unit = x match case IntExp(x) => case StrExp(x) => - case UnitExp => + case UnitExp => // error case Obj(o) => } diff --git a/tests/neg/i9740d.scala b/tests/neg/i9740d.scala new file mode 100644 index 000000000000..9f2490b697b6 --- /dev/null +++ b/tests/neg/i9740d.scala @@ -0,0 +1,11 @@ +sealed trait Exp[T] +case class IntExp(x: Int) extends Exp[Int] +case class StrExp(x: String) extends Exp[String] +object UnitExp extends Exp[Unit] + +class Foo[U <: Int, T <: U] { + def bar[A <: T](x: Exp[A]): Unit = x match + case IntExp(x) => + case StrExp(x) => + case UnitExp => // error +} diff --git a/tests/neg/i9803.check b/tests/neg/i9803.check index 57e184487504..cc7d56d585b0 100644 --- a/tests/neg/i9803.check +++ b/tests/neg/i9803.check @@ -4,5 +4,5 @@ | Reference to f421 is ambiguous, | it is both imported by name by import bugs.shadowing.x.f421 | and imported by name subsequently by import bugs.shadowing.y.f421 - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i9958.check b/tests/neg/i9958.check index 3657c466dc8b..4da24bb23c7e 100644 --- a/tests/neg/i9958.check +++ b/tests/neg/i9958.check @@ -7,5 +7,5 @@ | ^ | Found: G[[A <: Int] =>> List[A]] | Required: G[List] - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/if-error.check b/tests/neg/if-error.check index c2f1b02b2067..d8350da1872b 100644 --- a/tests/neg/if-error.check +++ b/tests/neg/if-error.check @@ -4,5 +4,5 @@ | Found: Unit | Required: Int | Maybe you are missing an else part for the conditional? - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/implicit-package-object.scala b/tests/neg/implicit-package-object.scala index 1af3b413e04a..7b73d620b9b8 100644 --- a/tests/neg/implicit-package-object.scala +++ b/tests/neg/implicit-package-object.scala @@ -1,6 +1,9 @@ -trait ToString[A] { - def print(a: A): Unit -} +package toString: + trait ToString[A] { + def print(a: A): Unit + } + +import toString._ package A { case class AA(text: String) diff --git a/tests/neg/implicit-params.scala b/tests/neg/implicit-params.scala index 0282f34e098f..e1d846ca851e 100644 --- a/tests/neg/implicit-params.scala +++ b/tests/neg/implicit-params.scala @@ -11,8 +11,8 @@ object Test { def h(x: Int) given () = x // error: missing return type - given C: C(11) with {} - given D: D(11) with {} + given C: C(11) + given D: D(11) f(1) f(1)(using C) diff --git a/tests/neg/implicitSearch.check b/tests/neg/implicitSearch.check index efc3ac002739..9ff67e2c2638 100644 --- a/tests/neg/implicitSearch.check +++ b/tests/neg/implicitSearch.check @@ -4,7 +4,7 @@ | no implicit argument of type Test.Ord[List[List[T]]] was found for parameter o of method sort in object Test. | I found: | - | Test.listOrd[T](Test.listOrd[T](/* missing */summon[Test.Ord[T]])) + | Test.listOrd[List[T]](Test.listOrd[T](/* missing */summon[Test.Ord[T]])) | | But no implicit values were found that match type Test.Ord[T]. -- Error: tests/neg/implicitSearch.scala:15:38 ------------------------------------------------------------------------- diff --git a/tests/neg/indent.scala b/tests/neg/indent.scala index 66ad2c7ca957..e6d6550bee19 100644 --- a/tests/neg/indent.scala +++ b/tests/neg/indent.scala @@ -2,7 +2,7 @@ object Test { extension (x: Int) def gt(y: Int) = x > y val y3 = - if (1) max 10 gt 0 // error: end of statement expected but integer literal found // error // error // error + if (1) max 10 gt 0 // error: end of statement expected but integer literal found // error // error 1 else 2 diff --git a/tests/neg/inline-error-pos.check b/tests/neg/inline-error-pos.check index 121684fe14d6..5d2225903a8d 100644 --- a/tests/neg/inline-error-pos.check +++ b/tests/neg/inline-error-pos.check @@ -4,4 +4,11 @@ | cannot reduce inline match with | scrutinee: 2 : (2 : Int) | patterns : case 1 - | This location contains code that was inlined from inline-error-pos.scala:3 + |--------------------------------------------------------------------------------------------------------------------- + |Inline stack trace + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from inline-error-pos.scala:3 +3 | inline x match + | ^ +4 | case 1 => 9 + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/inline3.scala b/tests/neg/inline3.scala deleted file mode 100644 index 90a8ea9bb788..000000000000 --- a/tests/neg/inline3.scala +++ /dev/null @@ -1,28 +0,0 @@ -object K0 { - - type T = String - - opaque type ProductInstances[F[_], T] = ErasedProductInstances[F[T]] - - inline def summonAsArray[F[_], T]: Array[Any] = ??? // error: Implementation restriction: No inline methods allowed - - inline def mkProductInstances[F[_], T]: ProductInstances[F, T] = // error: Implementation restriction: No inline methods allowed - new ErasedProductInstances(summonAsArray[F, T]).asInstanceOf[ProductInstances[F, T]] - - val x: T = "" - - inline def foo(x: T): T = "foo".asInstanceOf[T] // error: Implementation restriction: No inline methods allowed - -} - -final class ErasedProductInstances[FT](is0: => Array[Any]) - -trait Monoid[A] -case class ISB(i: Int) - -object Test { - //val K0 = new K0 - K0.foo(K0.x) - K0.mkProductInstances[Monoid, ISB] - -} \ No newline at end of file diff --git a/tests/neg/interpolator-dollar.check b/tests/neg/interpolator-dollar.check new file mode 100644 index 000000000000..2de0c843725e --- /dev/null +++ b/tests/neg/interpolator-dollar.check @@ -0,0 +1,4 @@ +-- Error: tests/neg/interpolator-dollar.scala:5:20 --------------------------------------------------------------------- +5 | def oops = f"$s%s $ 3 -} + 1 // error // error +} + 1 // error diff --git a/tests/neg/matchtype-seq.check b/tests/neg/matchtype-seq.check new file mode 100644 index 000000000000..aba1e312da01 --- /dev/null +++ b/tests/neg/matchtype-seq.check @@ -0,0 +1,473 @@ +-- Error: tests/neg/matchtype-seq.scala:9:11 --------------------------------------------------------------------------- +9 | identity[T1[3]]("") // error + | ^ + | Match type reduction failed since selector (3 : Int) + | matches none of the cases + | + | case (1 : Int) => Int + | case (2 : Int) => String +-- Error: tests/neg/matchtype-seq.scala:10:11 -------------------------------------------------------------------------- +10 | identity[T1[3]](1) // error + | ^ + | Match type reduction failed since selector (3 : Int) + | matches none of the cases + | + | case (1 : Int) => Int + | case (2 : Int) => String +-- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:11:20 ----------------------------------------------------- +11 | identity[T1[Int]]("") // error + | ^^ + | Found: ("" : String) + | Required: Test.T1[Int] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Test.T1[Int] + | failed since selector Int + | does not match case (1 : Int) => Int + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case (2 : Int) => String + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:12:20 ----------------------------------------------------- +12 | identity[T1[Int]](1) // error + | ^ + | Found: (1 : Int) + | Required: Test.T1[Int] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Test.T1[Int] + | failed since selector Int + | does not match case (1 : Int) => Int + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case (2 : Int) => String + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:21:20 ----------------------------------------------------- +21 | identity[T2[Int]]("") // error + | ^^ + | Found: ("" : String) + | Required: Test.T2[Int] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Test.T2[Int] + | failed since selector Int + | does not match case (1 : Int) => Int + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case _ => String + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:22:18 ----------------------------------------------------- +22 | identity[T2[2]](1) // error + | ^ + | Found: (1 : Int) + | Required: String + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:23:20 ----------------------------------------------------- +23 | identity[T2[Int]](1) // error + | ^ + | Found: (1 : Int) + | Required: Test.T2[Int] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Test.T2[Int] + | failed since selector Int + | does not match case (1 : Int) => Int + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case _ => String + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:36:18 ----------------------------------------------------- +36 | identity[T3[A]](1) // error + | ^ + | Found: (1 : Int) + | Required: Test.T3[Test.A] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Test.T3[Test.A] + | failed since selector Test.A + | does not match case Test.B => Int + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case Test.C => String + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:37:18 ----------------------------------------------------- +37 | identity[T3[A]]("") // error + | ^^ + | Found: ("" : String) + | Required: Test.T3[Test.A] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Test.T3[Test.A] + | failed since selector Test.A + | does not match case Test.B => Int + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case Test.C => String + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:55:18 ----------------------------------------------------- +55 | identity[T5[A]](1) // error + | ^ + | Found: (1 : Int) + | Required: Test.T5[Test.A] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Test.T5[Test.A] + | failed since selector Test.A + | does not match case Test.C => String + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case Test.A => Int + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:56:18 ----------------------------------------------------- +56 | identity[T5[A]]("") // error + | ^^ + | Found: ("" : String) + | Required: Test.T5[Test.A] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Test.T5[Test.A] + | failed since selector Test.A + | does not match case Test.C => String + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case Test.A => Int + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:82:18 ----------------------------------------------------- +82 | identity[T7[D]]("") // error + | ^^ + | Found: ("" : String) + | Required: Test.T7[Test.D] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Test.T7[Test.D] + | failed since selector Test.D + | does not match case Test.A2 => Int + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case Test.D => String + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:83:18 ----------------------------------------------------- +83 | identity[T7[D]](1) // error + | ^ + | Found: (1 : Int) + | Required: Test.T7[Test.D] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Test.T7[Test.D] + | failed since selector Test.D + | does not match case Test.A2 => Int + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case Test.D => String + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:94:19 ----------------------------------------------------- +94 | identity[T8[E2]](1) // error + | ^ + | Found: (1 : Int) + | Required: Test.T8[Test.E2] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Test.T8[Test.E2] + | failed since selector Test.E2 + | does not match case Test.E1 => Int + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case Test.E2 => String + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:95:19 ----------------------------------------------------- +95 | identity[T8[E1]]("") // error + | ^^ + | Found: ("" : String) + | Required: Int + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:96:19 ----------------------------------------------------- +96 | identity[T8[E2]]("") // error + | ^^ + | Found: ("" : String) + | Required: Test.T8[Test.E2] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Test.T8[Test.E2] + | failed since selector Test.E2 + | does not match case Test.E1 => Int + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case Test.E2 => String + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:105:40 ---------------------------------------------------- +105 | identity[T9[Tuple2[Nothing, String]]](1) // error + | ^ + | Found: (1 : Int) + | Required: Test.T9[(Nothing, String)] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Test.T9[(Nothing, String)] + | failed since selector (Nothing, String) + | is uninhabited (there are no values of that type). + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:106:40 ---------------------------------------------------- +106 | identity[T9[Tuple2[String, Nothing]]]("1") // error + | ^^^ + | Found: ("1" : String) + | Required: Test.T9[(String, Nothing)] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Test.T9[(String, Nothing)] + | failed since selector (String, Nothing) + | is uninhabited (there are no values of that type). + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:107:37 ---------------------------------------------------- +107 | identity[T9[Tuple2[Int, Nothing]]](1) // error + | ^ + | Found: (1 : Int) + | Required: Test.T9[(Int, Nothing)] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Test.T9[(Int, Nothing)] + | failed since selector (Int, Nothing) + | is uninhabited (there are no values of that type). + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:108:37 ---------------------------------------------------- +108 | identity[T9[Tuple2[Nothing, Int]]]("1") // error + | ^^^ + | Found: ("1" : String) + | Required: Test.T9[(Nothing, Int)] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Test.T9[(Nothing, Int)] + | failed since selector (Nothing, Int) + | is uninhabited (there are no values of that type). + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:109:29 ---------------------------------------------------- +109 | identity[T9[Tuple2[_, _]]]("") // error + | ^^ + | Found: ("" : String) + | Required: Test.T9[(?, ?)] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Test.T9[(?, ?)] + | failed since selector (?, ?) + | does not match case (Int, String) => Int + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case (String, Int) => String + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:110:29 ---------------------------------------------------- +110 | identity[T9[Tuple2[_, _]]](1) // error + | ^ + | Found: (1 : Int) + | Required: Test.T9[(?, ?)] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Test.T9[(?, ?)] + | failed since selector (?, ?) + | does not match case (Int, String) => Int + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case (String, Int) => String + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:111:33 ---------------------------------------------------- +111 | identity[T9[Tuple2[Any, Any]]]("") // error + | ^^ + | Found: ("" : String) + | Required: Test.T9[(Any, Any)] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Test.T9[(Any, Any)] + | failed since selector (Any, Any) + | does not match case (Int, String) => Int + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case (String, Int) => String + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:112:33 ---------------------------------------------------- +112 | identity[T9[Tuple2[Any, Any]]](1) // error + | ^ + | Found: (1 : Int) + | Required: Test.T9[(Any, Any)] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Test.T9[(Any, Any)] + | failed since selector (Any, Any) + | does not match case (Int, String) => Int + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case (String, Int) => String + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:122:39 ---------------------------------------------------- +122 | identity[TA[Box2[Int, Int, String]]](1) // error + | ^ + | Found: (1 : Int) + | Required: Test.TA[Test.Box2[Int, Int, String]] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Test.TA[Test.Box2[Int, Int, String]] + | failed since selector Test.Box2[Int, Int, String] + | does not match case Test.Box2[Int, Int, Int] => Int + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case Test.Box2[Int, Int, String] => String + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:123:39 ---------------------------------------------------- +123 | identity[TA[Box2[Int, Int, String]]]("") // error + | ^^ + | Found: ("" : String) + | Required: Test.TA[Test.Box2[Int, Int, String]] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Test.TA[Test.Box2[Int, Int, String]] + | failed since selector Test.Box2[Int, Int, String] + | does not match case Test.Box2[Int, Int, Int] => Int + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case Test.Box2[Int, Int, String] => String + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:144:41 ---------------------------------------------------- +144 | identity[TD[Box2_C[Int, Int, String]]]("") // error + | ^^ + | Found: ("" : String) + | Required: Test.TD[Test.Box2_C[Int, Int, String]] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Test.TD[Test.Box2_C[Int, Int, String]] + | failed since selector Test.Box2_C[Int, Int, String] + | does not match case Test.Box2_C[Int, Int, Int] => Int + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case Test.Box2_C[Int, Int, String] => String + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:153:25 ---------------------------------------------------- +153 | def a[A]: M[Some[A]] = 1 // error + | ^ + | Found: (1 : Int) + | Required: Test2.M[Some[A]] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Test2.M[Some[A]] + | failed since selector Some[A] + | does not match case Option[Int] => String + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case Some[_] => Int + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:154:25 ---------------------------------------------------- +154 | def b[A]: M[Some[A]] = "" // error + | ^^ + | Found: ("" : String) + | Required: Test2.M[Some[A]] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Test2.M[Some[A]] + | failed since selector Some[A] + | does not match case Option[Int] => String + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case Some[_] => Int + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:168:23 ---------------------------------------------------- +168 | val a: M[Inv[A]] = 1 // error + | ^ + | Found: (1 : Int) + | Required: Test3.M[Test3.Inv[A]] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Test3.M[Test3.Inv[A]] + | failed since selector Test3.Inv[A] + | does not match case Test3.Inv[Int] => String + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case _ => Int + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:187:25 ---------------------------------------------------- +187 | val a: M[Inv[A]] = 1 // error + | ^ + | Found: (1 : Int) + | Required: Test4.M[Test4.Inv[Foo.this.A]] + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Test4.M[Test4.Inv[Foo.this.A]] + | failed since selector Test4.Inv[Foo.this.A] + | does not match case Test4.Inv[Int] => String + | and cannot be shown to be disjoint from it either. + | Therefore, reduction cannot advance to the remaining case + | + | case _ => Int + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/missing-implicit-2.check b/tests/neg/missing-implicit-2.check index 4ec525f4a34e..e1994c4bf02d 100644 --- a/tests/neg/missing-implicit-2.check +++ b/tests/neg/missing-implicit-2.check @@ -26,5 +26,5 @@ | | where: Byte is a class in package scala | Byte² is a class in package java.lang - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/missing-implicit-3.check b/tests/neg/missing-implicit-3.check index 206f97866186..45837ce11576 100644 --- a/tests/neg/missing-implicit-3.check +++ b/tests/neg/missing-implicit-3.check @@ -12,16 +12,14 @@ | | import concurrent.duration.pairIntToDuration | - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` -- [E008] Not Found Error: tests/neg/missing-implicit-3.scala:8:48 ----------------------------------------------------- 8 |val d2: scala.concurrent.duration.Duration = 10.days // error | ^^^^^^^ | value days is not a member of Int, but could be made available as an extension method. | - | One of the following imports might fix the problem: + | The following import might fix the problem: | | import concurrent.duration.DurationInt - | import concurrent.duration.DurationLong - | import concurrent.duration.DurationDouble | diff --git a/tests/neg/missing-implicit3.check b/tests/neg/missing-implicit3.check index dec05cecdad6..d58216b9173e 100644 --- a/tests/neg/missing-implicit3.check +++ b/tests/neg/missing-implicit3.check @@ -4,6 +4,6 @@ |no implicit argument of type ord.Ord[ord.Foo] was found for an implicit parameter of method sort in package ord. |I found: | - | ord.Ord.ordered[A](/* missing */summon[ord.Foo => Comparable[? >: ord.Foo]]) + | ord.Ord.ordered[ord.Foo](/* missing */summon[ord.Foo => Comparable[? >: ord.Foo]]) | |But no implicit values were found that match type ord.Foo => Comparable[? >: ord.Foo]. diff --git a/tests/neg/mixin-forwarder-clash1.check b/tests/neg/mixin-forwarder-clash1.check index e581e79d0975..8c1e2a7911ef 100644 --- a/tests/neg/mixin-forwarder-clash1.check +++ b/tests/neg/mixin-forwarder-clash1.check @@ -3,7 +3,7 @@ | ^ | Name clash between inherited members: | def concat(suffix: Int): X in trait One at line 4 and - | def concat: [Dummy](suffix: Int): Y in trait Two at line 8 + | def concat[Dummy](suffix: Int): Y in trait Two at line 8 | have the same type after erasure. | | Consider adding a @targetName annotation to one of the conflicting definitions diff --git a/tests/neg/mixin-forwarder-clash2.check b/tests/neg/mixin-forwarder-clash2.check index 02d839551046..8956d96e2071 100644 --- a/tests/neg/mixin-forwarder-clash2.check +++ b/tests/neg/mixin-forwarder-clash2.check @@ -4,7 +4,7 @@ | ^ | Name clash between inherited members: | def concat(suffix: Int): X in trait One at line 4 and - | def concat: [Dummy](suffix: Int): Y in trait Two at line 8 + | def concat[Dummy](suffix: Int): Y in trait Two at line 8 | have the same type after erasure. | | Consider adding a @targetName annotation to one of the conflicting definitions diff --git a/tests/neg/module-class-name.check b/tests/neg/module-class-name.check index 0888689553fb..448eebf240b4 100644 --- a/tests/neg/module-class-name.check +++ b/tests/neg/module-class-name.check @@ -3,29 +3,29 @@ | ^ | Found: Test.C.type | Required: Test.C - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg/module-class-name.scala:9:15 -------------------------------------------------- 9 | val y: C = f(C) // error | ^ | Found: Test.C.type | Required: Test.C - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg/module-class-name.scala:12:14 ------------------------------------------------- 12 | val z1: C = z // error | ^ | Found: Test.C.type | Required: Test.C - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg/module-class-name.scala:13:16 ------------------------------------------------- 13 | val z2: Int = z // error | ^ | Found: Test.C.type | Required: Int - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` -- [E008] Not Found Error: tests/neg/module-class-name.scala:15:4 ------------------------------------------------------ 15 | C.foo // error: value foo is not a member of object Test.C | ^^^^^ diff --git a/tests/neg/multi-file-error.check b/tests/neg/multi-file-error.check index bf3547333361..716023fc9ce8 100644 --- a/tests/neg/multi-file-error.check +++ b/tests/neg/multi-file-error.check @@ -2,11 +2,11 @@ 2 | foo() // error | ^^^ | Not found: foo - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` -- [E006] Not Found Error: tests/neg/multi-file-error/B.scala:2:2 ------------------------------------------------------ 2 | bar() // error | ^^^ | Not found: bar - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/multi-param-derives.scala b/tests/neg/multi-param-derives.scala index 1e228bf38bcf..2eb1a1b5785d 100644 --- a/tests/neg/multi-param-derives.scala +++ b/tests/neg/multi-param-derives.scala @@ -4,10 +4,10 @@ object Test extends App { { trait Show[T] object Show { - given Show[Int] with {} - given [T](using st: Show[T]): Show[Tuple1[T]] with {} - given t2[T, U](using st: Show[T], su: Show[U]): Show[(T, U)] with {} - given t3[T, U, V](using st: Show[T], su: Show[U], sv: Show[V]): Show[(T, U, V)] with {} + given Show[Int]() + given [T](using st: Show[T]): Show[Tuple1[T]]() + given t2[T, U](using st: Show[T], su: Show[U]): Show[(T, U)]() + given t3[T, U, V](using st: Show[T], su: Show[U], sv: Show[V]): Show[(T, U, V)]() def derived[T](using m: Mirror.Of[T], r: Show[m.MirroredElemTypes]): Show[T] = new Show[T] {} } @@ -22,10 +22,10 @@ object Test extends App { { trait Functor[F[_]] object Functor { - given [C]: Functor[[T] =>> C] with {} - given Functor[[T] =>> Tuple1[T]] with {} - given t2 [T]: Functor[[U] =>> (T, U)] with {} - given t3 [T, U]: Functor[[V] =>> (T, U, V)] with {} + given [C]: Functor[[T] =>> C]() + given Functor[[T] =>> Tuple1[T]]() + given t2 [T]: Functor[[U] =>> (T, U)]() + given t3 [T, U]: Functor[[V] =>> (T, U, V)]() def derived[F[_]](using m: Mirror { type MirroredType[X] = F[X] ; type MirroredElemTypes[_] }, r: Functor[m.MirroredElemTypes]): Functor[F] = new Functor[F] {} } @@ -40,8 +40,8 @@ object Test extends App { { trait FunctorK[F[_[_]]] object FunctorK { - given [C]: FunctorK[[F[_]] =>> C] with {} - given [T]: FunctorK[[F[_]] =>> Tuple1[F[T]]] with {} + given [C]: FunctorK[[F[_]] =>> C]() + given [T]: FunctorK[[F[_]] =>> Tuple1[F[T]]]() def derived[F[_[_]]](using m: Mirror { type MirroredType[X[_]] = F[X] ; type MirroredElemTypes[_[_]] }, r: FunctorK[m.MirroredElemTypes]): FunctorK[F] = new FunctorK[F] {} } @@ -56,10 +56,10 @@ object Test extends App { { trait Bifunctor[F[_, _]] object Bifunctor { - given [C]: Bifunctor[[T, U] =>> C] with {} - given Bifunctor[[T, U] =>> Tuple1[U]] with {} - given t2: Bifunctor[[T, U] =>> (T, U)] with {} - given t3 [T]: Bifunctor[[U, V] =>> (T, U, V)] with {} + given [C]: Bifunctor[[T, U] =>> C]() + given Bifunctor[[T, U] =>> Tuple1[U]]() + given t2: Bifunctor[[T, U] =>> (T, U)]() + given t3 [T]: Bifunctor[[U, V] =>> (T, U, V)]() def derived[F[_, _]](using m: Mirror { type MirroredType[X, Y] = F[X, Y] ; type MirroredElemTypes[_, _] }, r: Bifunctor[m.MirroredElemTypes]): Bifunctor[F] = ??? } diff --git a/tests/neg/multiLineOps.scala b/tests/neg/multiLineOps.scala index 78a6ba4c3910..8499cc9fe710 100644 --- a/tests/neg/multiLineOps.scala +++ b/tests/neg/multiLineOps.scala @@ -6,7 +6,7 @@ val b1 = { 22 * 22 // ok */*one more*/22 // error: end of statement expected // error: not found: * -} // error: ';' expected, but '}' found +} val b2: Boolean = { println(x) diff --git a/tests/neg/overloading-specifity.scala b/tests/neg/overloading-specifity.scala index b6c10ef1e9bc..7d7cba3ed21f 100644 --- a/tests/neg/overloading-specifity.scala +++ b/tests/neg/overloading-specifity.scala @@ -12,7 +12,7 @@ object Generic { object Test extends App { trait Context - //given ctx: Context with {} + //given ctx: Context() object a { def foo[T](implicit gen: Generic): Show[T] = new Show[T](1) diff --git a/tests/neg/parent-refinement.check b/tests/neg/parent-refinement.check new file mode 100644 index 000000000000..550430bd35a7 --- /dev/null +++ b/tests/neg/parent-refinement.check @@ -0,0 +1,4 @@ +-- Error: tests/neg/parent-refinement.scala:5:2 ------------------------------------------------------------------------ +5 | with Ordered[Year] { // error + | ^^^^ + | end of toplevel definition expected but 'with' found diff --git a/tests/neg/parent-refinement.scala b/tests/neg/parent-refinement.scala new file mode 100644 index 000000000000..ca2b88a75fd8 --- /dev/null +++ b/tests/neg/parent-refinement.scala @@ -0,0 +1,7 @@ + +trait Id { type Value } +case class Year(value: Int) extends AnyVal + with Id { type Value = Int } + with Ordered[Year] { // error + +} \ No newline at end of file diff --git a/tests/neg/parser-stability-17.scala b/tests/neg/parser-stability-17.scala index db1f212fc4ab..ff603a677378 100644 --- a/tests/neg/parser-stability-17.scala +++ b/tests/neg/parser-stability-17.scala @@ -1,2 +1,2 @@ trait x0[] { x0: x0 => } // error // error - class x0[x1] extends x0[x0 x0] x2 x0 // error // error // error + class x0[x1] extends x0[x0 x0] x2 x0 // error // error diff --git a/tests/neg/parser-stability-19.scala b/tests/neg/parser-stability-19.scala index 099c3d962c22..c320c7e8df74 100644 --- a/tests/neg/parser-stability-19.scala +++ b/tests/neg/parser-stability-19.scala @@ -1,5 +1,5 @@ object x0 { case class x0[](): // error def x0( ) ] // error - def x0 ( x0:x0 ):x0.type = x1 x0 // error // error // error + def x0 ( x0:x0 ):x0.type = x1 x0 // error // error // error \ No newline at end of file diff --git a/tests/neg/parser-stability-23.scala b/tests/neg/parser-stability-23.scala index d63059288b63..a27d79d5cc3e 100644 --- a/tests/neg/parser-stability-23.scala +++ b/tests/neg/parser-stability-23.scala @@ -1,3 +1,3 @@ object i0 { - import Ordering.{ implicitly as } (true: Boolean) match { case _: i1 as true } // error // error // error + import Ordering.{ implicitly as } (true: Boolean) match { case _: i1 as true } // error // error } diff --git a/tests/neg/print-infix-type.check b/tests/neg/print-infix-type.check new file mode 100644 index 000000000000..21ba753f2408 --- /dev/null +++ b/tests/neg/print-infix-type.check @@ -0,0 +1,7 @@ +-- [E007] Type Mismatch Error: tests/neg/print-infix-type.scala:8:29 --------------------------------------------------- +8 | val x: over[String, Int] = f // error + | ^ + | Found: Int over String + | Required: String over Int + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/print-infix-type.scala b/tests/neg/print-infix-type.scala new file mode 100644 index 000000000000..7ed71e34964d --- /dev/null +++ b/tests/neg/print-infix-type.scala @@ -0,0 +1,8 @@ +object A: + + opaque infix type over[A, B] = (A, B) + def f: over[Int, String] = (1, "") + +object B: + import A.* + val x: over[String, Int] = f // error diff --git a/tests/neg/safeThrowsStrawman.check b/tests/neg/safeThrowsStrawman.check index d0f0b8e60176..6bf1ecdae513 100644 --- a/tests/neg/safeThrowsStrawman.check +++ b/tests/neg/safeThrowsStrawman.check @@ -4,7 +4,7 @@ | The capability to throw exception scalax.Fail is missing. | The capability can be provided by one of the following: | - A using clause `(using CanThrow[scalax.Fail])` - | - A throws clause in a result type such as `X throws scalax.Fail` + | - A raises clause in a result type such as `X raises scalax.Fail` | - an enclosing `try` that catches scalax.Fail -- Error: tests/neg/safeThrowsStrawman.scala:27:15 --------------------------------------------------------------------- 27 | println(bar) // error @@ -12,5 +12,5 @@ | The capability to throw exception Exception is missing. | The capability can be provided by one of the following: | - A using clause `(using CanThrow[Exception])` - | - A throws clause in a result type such as `X throws Exception` + | - A raises clause in a result type such as `X raises Exception` | - an enclosing `try` that catches Exception diff --git a/tests/neg/safeThrowsStrawman.scala b/tests/neg/safeThrowsStrawman.scala index 08f169e91701..bc07eb0bb3f9 100644 --- a/tests/neg/safeThrowsStrawman.scala +++ b/tests/neg/safeThrowsStrawman.scala @@ -2,26 +2,26 @@ import language.experimental.erasedDefinitions import annotation.implicitNotFound object scalax: - @implicitNotFound("The capability to throw exception ${E} is missing.\nThe capability can be provided by one of the following:\n - A using clause `(using CanThrow[${E}])`\n - A throws clause in a result type such as `X throws ${E}`\n - an enclosing `try` that catches ${E}") + @implicitNotFound("The capability to throw exception ${E} is missing.\nThe capability can be provided by one of the following:\n - A using clause `(using CanThrow[${E}])`\n - A raises clause in a result type such as `X raises ${E}`\n - an enclosing `try` that catches ${E}") erased class CanThrow[-E <: Exception] - infix type throws[R, +E <: Exception] = CanThrow[E] ?=> R + infix type raises[R, +E <: Exception] = CanThrow[E] ?=> R class Fail extends Exception - def raise[E <: Exception](e: E): Nothing throws E = throw e + def raise[E <: Exception](e: E): Nothing raises E = throw e import scalax._ def foo(x: Boolean): Int = if x then 1 else raise(Fail()) // error -def bar: Int throws Exception = +def bar: Int raises Exception = raise(Fail()) @main def Test = try - erased given CanThrow[Fail] = ??? + erased given CanThrow[Fail] = compiletime.erasedValue println(foo(true)) println(foo(false)) println(bar) // error diff --git a/tests/neg/safeThrowsStrawman2.scala b/tests/neg/safeThrowsStrawman2.scala index 80e5139b1f8d..7d87baad6fa4 100644 --- a/tests/neg/safeThrowsStrawman2.scala +++ b/tests/neg/safeThrowsStrawman2.scala @@ -4,15 +4,15 @@ object scalax: erased class CanThrow[E <: Exception] type CTF = CanThrow[Fail] - infix type throws[R, E <: Exception] = CanThrow[E] ?=> R + infix type raises[R, E <: Exception] = CanThrow[E] ?=> R class Fail extends Exception - def raise[E <: Exception](e: E): Nothing throws E = throw e + def raise[E <: Exception](e: E): Nothing raises E = throw e import scalax._ -def foo(x: Boolean, y: CanThrow[Fail]): Int throws Fail = +def foo(x: Boolean, y: CanThrow[Fail]): Int raises Fail = if x then 1 else raise(Fail()) def bar(x: Boolean)(using CanThrow[Fail]): Int = @@ -20,7 +20,7 @@ def bar(x: Boolean)(using CanThrow[Fail]): Int = @main def Test = try - given ctf: CanThrow[Fail] = ??? + given ctf: CanThrow[Fail] = new CanThrow[Fail] val x = new CanThrow[Fail]() // OK, x is erased val y: Any = new CanThrow[Fail]() // error: illegal reference to erased class CanThrow val y2: Any = new CTF() // error: illegal reference to erased class CanThrow diff --git a/tests/neg/saferExceptions.check b/tests/neg/saferExceptions.check new file mode 100644 index 000000000000..5f51ce08d6db --- /dev/null +++ b/tests/neg/saferExceptions.check @@ -0,0 +1,26 @@ +-- Error: tests/neg/saferExceptions.scala:12:16 ------------------------------------------------------------------------ +12 | case 4 => throw Exception() // error + | ^^^^^^^^^^^^^^^^^ + | The capability to throw exception Exception is missing. + | The capability can be provided by one of the following: + | - Adding a using clause `(using CanThrow[Exception])` to the definition of the enclosing method + | - Adding `throws Exception` clause after the result type of the enclosing method + | - Wrapping this piece of code with a `try` block that catches Exception + | + | The following import might fix the problem: + | + | import unsafeExceptions.canThrowAny + | +-- Error: tests/neg/saferExceptions.scala:17:46 ------------------------------------------------------------------------ +17 | def baz(x: Int): Int throws Failure = bar(x) // error + | ^ + | The capability to throw exception java.io.IOException is missing. + | The capability can be provided by one of the following: + | - Adding a using clause `(using CanThrow[java.io.IOException])` to the definition of the enclosing method + | - Adding `throws java.io.IOException` clause after the result type of the enclosing method + | - Wrapping this piece of code with a `try` block that catches java.io.IOException + | + | The following import might fix the problem: + | + | import unsafeExceptions.canThrowAny + | diff --git a/tests/neg/saferExceptions.scala b/tests/neg/saferExceptions.scala new file mode 100644 index 000000000000..3ef2f8bb0067 --- /dev/null +++ b/tests/neg/saferExceptions.scala @@ -0,0 +1,17 @@ +object test: + import language.experimental.saferExceptions + import java.io.IOException + + class Failure extends Exception + + def bar(x: Int): Int throws Failure | IOException = + x match + case 1 => throw AssertionError() + case 2 => throw Failure() // ok + case 3 => throw java.io.IOException() // ok + case 4 => throw Exception() // error + case 5 => throw Throwable() // ok: Throwable is treated as unchecked + case _ => 0 + + def foo(x: Int): Int throws Exception = bar(x) + def baz(x: Int): Int throws Failure = bar(x) // error diff --git a/tests/neg/singleton-ops-any.check b/tests/neg/singleton-ops-any.check index 6de768fa0b2e..caab2b24e037 100644 --- a/tests/neg/singleton-ops-any.check +++ b/tests/neg/singleton-ops-any.check @@ -3,26 +3,40 @@ | ^^^^ | Found: (true : Boolean) | Required: (false : Boolean) - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg/singleton-ops-any.scala:7:22 -------------------------------------------------- 7 | val t35: 10 == 10 = false // error | ^^^^^ | Found: (false : Boolean) | Required: (true : Boolean) - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg/singleton-ops-any.scala:12:24 ------------------------------------------------- 12 | val t38: false != 5 = false // error | ^^^^^ | Found: (false : Boolean) | Required: (true : Boolean) - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg/singleton-ops-any.scala:13:22 ------------------------------------------------- 13 | val t39: 10 != 10 = true // error | ^^^^ | Found: (true : Boolean) | Required: (false : Boolean) - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/singleton-ops-any.scala:18:27 ------------------------------------------------- +18 | val t04: ToString[Int] = "Int" // error + | ^^^^^ + | Found: ("Int" : String) + | Required: compiletime.ops.any.ToString[Int] + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/singleton-ops-any.scala:32:26 ------------------------------------------------- +32 | val t48: IsConst[Any] = true // error + | ^^^^ + | Found: (true : Boolean) + | Required: (false : Boolean) + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/singleton-ops-any.scala b/tests/neg/singleton-ops-any.scala index 0d7c05b55fec..45f63cf68ff1 100644 --- a/tests/neg/singleton-ops-any.scala +++ b/tests/neg/singleton-ops-any.scala @@ -11,4 +11,30 @@ object Test { val t37: 0 != 1 = true val t38: false != 5 = false // error val t39: 10 != 10 = true // error + + val t01: ToString[1] = "1" + val t02: ToString[-2L] = "-2" + val t03: ToString[true] = "true" + val t04: ToString[Int] = "Int" // error + val t05: ToString[3.33] = "3.33" + val t06: ToString["123"] = "123" + + val t40: IsConst[1] = true + val t41: IsConst[2L] = true + val t42: IsConst[-1.0] = true + val t43: IsConst[false] = true + val t44: IsConst["hi"] = true + val t45: IsConst[Int] = false + val one : Int = 1 + val t46 : IsConst[one.type] = false + final val two = 2 + val t47 : IsConst[two.type] = true + val t48: IsConst[Any] = true // error + def isConst[X] : IsConst[X] = ??? + val t49 : true = isConst[1] + val t50 : false = isConst[one.type] + def isConst2[X <: Int, Y <: Int] : IsConst[X == Y] = ??? + val t51 : true = isConst2[1, 1] + val t52 : false = isConst2[1, one.type] + val t53 : true = isConst2[1, two.type] } diff --git a/tests/neg/singleton-ops-double.scala b/tests/neg/singleton-ops-double.scala new file mode 100644 index 000000000000..006b86270657 --- /dev/null +++ b/tests/neg/singleton-ops-double.scala @@ -0,0 +1,77 @@ +import scala.compiletime.ops.double.* + +object Test { + summon[2.0 + 3.0 =:= 6.0 - 1.0] + summon[1763.0 =:= 41.0 * 43.0] + summon[2.0 + 2.0 =:= 3.0] // error + summon[29.0 * 31.0 =:= 900.0] // error + summon[Double <:< Double + 1.0] // error + summon[1.0 + Double <:< Double] + + val t0: 2.0 + 3.0 = 5.0 + val t1: 2.0 + 2.0 = 5.0 // error + val t2: -1.0 + 1.0 = 0.0 + val t3: -5.0 + -5.0 = -11.0 // error + + val t4: 10.0 * 20.0 = 200.0 + val t5: 30.0 * 10.0 = 400.0 // error + val t6: -10.0 * 2.0 = -20.0 + val t7: -2.0 * -2.0 = 4.0 + + val t8: 10.0 / 2.0 = 5.0 + val t9: 11.0 / -2.0 = -5.5 + val t10: 2.0 / 4.0 = 2.0 // error + + val t12: 10.0 % 3.0 = 1.0 + val t13: 12.0 % 2.0 = 1.0 // error + val t14: 1.0 % -3.0 = 1.0 + + val t16: 1.0 < 0.0 = false + val t17: 0.0 < 1.0 = true + val t18: 10.0 < 5.0 = true // error + val t19: 5.0 < 10.0 = false // error + + val t20: 1.0 <= 0.0 = false + val t21: 1.0 <= 1.0 = true + val t22: 10.0 <= 5.0 = true // error + val t23: 5.0 <= 10.0 = false // error + + val t24: 1.0 > 0.0 = true + val t25: 0.0 > 1.0 = false + val t26: 10.0 > 5.0 = false // error + val t27: 5.0 > 10.0 = true // error + + val t28: 1.0 >= 1.0 = true + val t29: 0.0 >= 1.0 = false + val t30: 10.0 >= 5.0 = false // error + val t31: 5.0 >= 10.0 = true // error + + val t32: Abs[0.0] = 0.0 + val t33: Abs[-1.0] = 1.0 + val t34: Abs[-1.0] = -1.0 // error + val t35: Abs[1.0] = -1.0 // error + + val t36: Negate[-10.0] = 10.0 + val t37: Negate[10.0] = -10.0 + val t38: Negate[1.0] = 1.0 // error + val t39: Negate[-1.0] = -1.0 // error + + val t40: Max[-1.0, 10.0] = 10.0 + val t41: Max[4.0, 2.0] = 4.0 + val t42: Max[2.0, 2.0] = 1.0 // error + val t43: Max[-1.0, -1.0] = 0.0 // error + + val t44: Min[-1.0, 10.0] = -1.0 + val t45: Min[4.0, 2.0] = 2.0 + val t46: Min[2.0, 2.0] = 1.0 // error + val t47: Min[-1.0, -1.0] = 0.0 // error + + val t79: ToInt[1.0] = 1 + val t80: ToInt[3.0] = 2 // error + + val t81: ToLong[1.0] = 1L + val t82: ToLong[2.0] = 2 // error + + val t83: ToFloat[1.0] = 1.0f + val t84: ToFloat[2.0] = 2 // error +} diff --git a/tests/neg/singleton-ops-float.scala b/tests/neg/singleton-ops-float.scala new file mode 100644 index 000000000000..f7263fc804a6 --- /dev/null +++ b/tests/neg/singleton-ops-float.scala @@ -0,0 +1,77 @@ +import scala.compiletime.ops.float.* + +object Test { + summon[2.0f + 3.0f =:= 6.0f - 1.0f] + summon[1763.0f =:= 41.0f * 43.0f] + summon[2.0f + 2.0f =:= 3.0f] // error + summon[29.0f * 31.0f =:= 900.0f] // error + summon[Float <:< Float + 1.0f] // error + summon[1.0f + Float <:< Float] + + val t0: 2.0f + 3.0f = 5.0f + val t1: 2.0f + 2.0f = 5.0f // error + val t2: -1.0f + 1.0f = 0.0f + val t3: -5.0f + -5.0f = -11.0f // error + + val t4: 10.0f * 20.0f = 200.0f + val t5: 30.0f * 10.0f = 400.0f // error + val t6: -10.0f * 2.0f = -20.0f + val t7: -2.0f * -2.0f = 4.0f + + val t8: 10.0f / 2.0f = 5.0f + val t9: 11.0f / -2.0f = -5.5f + val t10: 2.0f / 4.0f = 2.0f // error + + val t12: 10.0f % 3.0f = 1.0f + val t13: 12.0f % 2.0f = 1.0f // error + val t14: 1.0f % -3.0f = 1.0f + + val t16: 1.0f < 0.0f = false + val t17: 0.0f < 1.0f = true + val t18: 10.0f < 5.0f = true // error + val t19: 5.0f < 10.0f = false // error + + val t20: 1.0f <= 0.0f = false + val t21: 1.0f <= 1.0f = true + val t22: 10.0f <= 5.0f = true // error + val t23: 5.0f <= 10.0f = false // error + + val t24: 1.0f > 0.0f = true + val t25: 0.0f > 1.0f = false + val t26: 10.0f > 5.0f = false // error + val t27: 5.0f > 10.0f = true // error + + val t28: 1.0f >= 1.0f = true + val t29: 0.0f >= 1.0f = false + val t30: 10.0f >= 5.0f = false // error + val t31: 5.0f >= 10.0f = true // error + + val t32: Abs[0.0f] = 0.0f + val t33: Abs[-1.0f] = 1.0f + val t34: Abs[-1.0f] = -1.0f // error + val t35: Abs[1.0f] = -1.0f // error + + val t36: Negate[-10.0f] = 10.0f + val t37: Negate[10.0f] = -10.0f + val t38: Negate[1.0f] = 1.0f // error + val t39: Negate[-1.0f] = -1.0f // error + + val t40: Max[-1.0f, 10.0f] = 10.0f + val t41: Max[4.0f, 2.0f] = 4.0f + val t42: Max[2.0f, 2.0f] = 1.0f // error + val t43: Max[-1.0f, -1.0f] = 0.0f // error + + val t44: Min[-1.0f, 10.0f] = -1.0f + val t45: Min[4.0f, 2.0f] = 2.0f + val t46: Min[2.0f, 2.0f] = 1.0f // error + val t47: Min[-1.0f, -1.0f] = 0.0f // error + + val t79: ToInt[1.0f] = 1 + val t80: ToInt[3.0f] = 2 // error + + val t81: ToLong[1.0f] = 1L + val t82: ToLong[2.0f] = 2 // error + + val t83: ToDouble[1.0f] = 1.0 + val t84: ToDouble[2.0f] = 2 // error +} diff --git a/tests/neg/singleton-ops-int.scala b/tests/neg/singleton-ops-int.scala index d2fd3a73afcd..e85b6204d1fa 100644 --- a/tests/neg/singleton-ops-int.scala +++ b/tests/neg/singleton-ops-int.scala @@ -9,6 +9,9 @@ object Test { summon[1 + Int <:< Int] val t0: 2 + 3 = 5 + final val two = 2 + final val three = 3 + val t0_b : two.type + three.type = 5 val t1: 2 + 2 = 5 // error val t2: -1 + 1 = 0 val t3: -5 + -5 = -11 // error @@ -70,8 +73,6 @@ object Test { val t48: ToString[213] = "213" val t49: ToString[-1] = "-1" - val t50: ToString[0] = "-0" // error - val t51: ToString[200] = "100" // error val t52: 1 ^ 2 = 3 val t53: 1 ^ 3 = 3 // error @@ -102,4 +103,17 @@ object Test { val t73: -7 >>> 3 = 536870911 val t74: -7 >>> 3 = -1 // error + val t75: NumberOfLeadingZeros[0] = 32 + val t76: NumberOfLeadingZeros[8] = 28 + val t77: NumberOfLeadingZeros[-1] = 0 + val t78: NumberOfLeadingZeros[-1] = 1 // error + + val t79: ToLong[1] = 1L + val t80: ToLong[2] = 2 // error + + val t81: ToFloat[1] = 1.0f + val t82: ToFloat[2] = 2 // error + + val t83: ToDouble[1] = 1.0 + val t84: ToDouble[2] = 2 // error } diff --git a/tests/neg/singleton-ops-long.scala b/tests/neg/singleton-ops-long.scala new file mode 100644 index 000000000000..5af2069beb27 --- /dev/null +++ b/tests/neg/singleton-ops-long.scala @@ -0,0 +1,113 @@ +import scala.compiletime.ops.long.* + +object Test { + summon[2L + 3L =:= 6L - 1L] + summon[1763L =:= 41L * 43L] + summon[2L + 2L =:= 3L] // error + summon[29L * 31L =:= 900L] // error + summon[Long <:< Long + 1L] // error + summon[1L + Long <:< Long] + + val t0: 2L + 3L = 5L + val t1: 2L + 2L = 5L // error + val t2: -1L + 1L = 0L + val t3: -5L + -5L = -11L // error + + val t4: 10L * 20L = 200L + val t5: 30L * 10L = 400L // error + val t6: -10L * 2L = -20L + val t7: -2L * -2L = 4L + + val t8: 10L / 2L = 5L + val t9: 11L / -2L = -5L // Integer division + val t10: 2L / 4L = 2L // error + val t11: -1L / 0L = 1L // error + + val t12: 10L % 3L = 1L + val t13: 12L % 2L = 1L // error + val t14: 1L % -3L = 1L + val t15: -3L % 0L = 0L // error + + val t16: 1L < 0L = false + val t17: 0L < 1L = true + val t18: 10L < 5L = true // error + val t19: 5L < 10L = false // error + + val t20: 1L <= 0L = false + val t21: 1L <= 1L = true + val t22: 10L <= 5L = true // error + val t23: 5L <= 10L = false // error + + val t24: 1L > 0L = true + val t25: 0L > 1L = false + val t26: 10L > 5L = false // error + val t27: 5L > 10L = true // error + + val t28: 1L >= 1L = true + val t29: 0L >= 1L = false + val t30: 10L >= 5L = false // error + val t31: 5L >= 10L = true // error + + val t32: Abs[0L] = 0L + val t33: Abs[-1L] = 1L + val t34: Abs[-1L] = -1L // error + val t35: Abs[1L] = -1L // error + + val t36: Negate[-10L] = 10L + val t37: Negate[10L] = -10L + val t38: Negate[1L] = 1L // error + val t39: Negate[-1L] = -1L // error + + val t40: Max[-1L, 10L] = 10L + val t41: Max[4L, 2L] = 4L + val t42: Max[2L, 2L] = 1L // error + val t43: Max[-1L, -1L] = 0L // error + + val t44: Min[-1L, 10L] = -1L + val t45: Min[4L, 2L] = 2L + val t46: Min[2L, 2L] = 1L // error + val t47: Min[-1L, -1L] = 0L // error + + val t52: 1L ^ 2L = 3L + val t53: 1L ^ 3L = 3L // error + val t54: -1L ^ -2L = 1L + val t55: -1L ^ -3L = 1L // error + + val t56: BitwiseOr[1L, 2L] = 3L + val t57: BitwiseOr[10L, 12L] = 13L // error + val t58: BitwiseOr[-11L, 12L] = -3L + val t59: BitwiseOr[-111L, -10L] = 0L // error + + val t60: BitwiseAnd[1L, 1L] = 1L + val t61: BitwiseAnd[1L, 2L] = 0L + val t62: BitwiseAnd[-1L, -3L] = 3L // error + val t63: BitwiseAnd[-1L, -1L] = 1L // error + + val t64: 1L << 1L = 2L + val t65: 1L << 2L = 4L + val t66: 1L << 3L = 8L + val t67: 1L << 4L = 0L // error + + val t68: 100L >> 2L = 25L + val t69: 123456789L >> 71L = 964506L + val t70: -7L >> 3L = -1L + val t71: -7L >> 3L = 0L // error + + val t72: -1L >>> 10000L = 281474976710655L + val t73: -7L >>> 3L = 2305843009213693951L + val t74: -7L >>> 3L = -1L // error + + val t75: NumberOfLeadingZeros[0L] = 64 + val t76: NumberOfLeadingZeros[8L] = 60 + val t77: NumberOfLeadingZeros[-1L] = 0 + val t78: NumberOfLeadingZeros[-1L] = 1 // error + + val t79: ToInt[1L] = 1 + val t80: ToInt[3L] = 2 // error + + val t81: ToFloat[1L] = 1.0f + val t82: ToFloat[2L] = 2 // error + + val t83: ToDouble[1L] = 1.0 + val t84: ToDouble[2L] = 2 // error +} diff --git a/tests/neg/singleton-ops-string.scala b/tests/neg/singleton-ops-string.scala index 46093121d3c4..d9cf2377564b 100644 --- a/tests/neg/singleton-ops-string.scala +++ b/tests/neg/singleton-ops-string.scala @@ -5,4 +5,14 @@ object Test { val t1: "" + "" = "" val t2: "3" + "" = "33" // error val t3: "Hello " + "world" = "error" // error + + val t4: Length["Hello"] = 5 + val t5: Length[""] = 0 + val t6: Length["1"] = 7 // error + + val t7: Substring["hamburger", 4, 8] = "urge" + val t8: Substring["hamburger", 4, 8] = "urger" // error + + val t9: Matches["hamburger", "ham.*"] = true + val t10: Matches["hamburger", "ham.*"] = false // error } diff --git a/tests/neg/spaces-vs-tabs.check b/tests/neg/spaces-vs-tabs.check index 51c2689f57bc..f513dbdf31b7 100644 --- a/tests/neg/spaces-vs-tabs.check +++ b/tests/neg/spaces-vs-tabs.check @@ -23,12 +23,14 @@ | Previous indent : 2 tabs | Latest indent : 1 space -- Error: tests/neg/spaces-vs-tabs.scala:14:2 -------------------------------------------------------------------------- -14 | else 2 // error // error +14 | else 2 // error | ^ | The start of this line does not match any of the previous indentation widths. | Indentation width of current line : 1 tab, 2 spaces | This falls between previous widths: 1 tab and 1 tab, 4 spaces --- [E040] Syntax Error: tests/neg/spaces-vs-tabs.scala:14:7 ------------------------------------------------------------ -14 | else 2 // error // error - | ^ - | ';' expected, but integer literal found +-- [E129] Potential Issue Warning: tests/neg/spaces-vs-tabs.scala:13:6 ------------------------------------------------- +13 | 1 + | ^ + | A pure expression does nothing in statement position; you may be omitting necessary parentheses + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/spaces-vs-tabs.scala b/tests/neg/spaces-vs-tabs.scala index ff8d1a1c328e..4f48d784eb7d 100644 --- a/tests/neg/spaces-vs-tabs.scala +++ b/tests/neg/spaces-vs-tabs.scala @@ -11,5 +11,5 @@ object Test: if true then 1 - else 2 // error // error + else 2 // error diff --git a/tests/neg/splice-pat.check b/tests/neg/splice-pat.check new file mode 100644 index 000000000000..32af3ad6308e --- /dev/null +++ b/tests/neg/splice-pat.check @@ -0,0 +1,10 @@ +-- [E032] Syntax Error: tests/neg/splice-pat.scala:12:16 --------------------------------------------------------------- +12 | case '{ foo(${ // error: pattern expected + | ^ + | pattern expected + | + | longer explanation available when compiling with `-explain` +-- [E040] Syntax Error: tests/neg/splice-pat.scala:15:5 ---------------------------------------------------------------- +15 | })} => ??? // error + | ^ + | '=>' expected, but ')' found diff --git a/tests/neg/splice-pat.scala b/tests/neg/splice-pat.scala new file mode 100644 index 000000000000..a43659c5e323 --- /dev/null +++ b/tests/neg/splice-pat.scala @@ -0,0 +1,15 @@ +import scala.quoted.* + +object MyMatcher { + def unapply(expr: Expr[Any])(using Quotes): Option[Expr[Int]] = ??? +} + +def foo(x: Any): Unit = ??? + +def bar(): Expr[Any] = ??? + +def f(expr: Expr[Any])(using Quotes): Expr[Int] = expr match + case '{ foo(${ // error: pattern expected + import scala.Int + bar() + })} => ??? // error diff --git a/tests/run/structuralNoSuchMethod.scala b/tests/neg/structuralNoSuchMethod.scala similarity index 83% rename from tests/run/structuralNoSuchMethod.scala rename to tests/neg/structuralNoSuchMethod.scala index 476d7ed8225c..c76900c27704 100644 --- a/tests/run/structuralNoSuchMethod.scala +++ b/tests/neg/structuralNoSuchMethod.scala @@ -11,10 +11,10 @@ object Test { def f(x: X, y: String): String = "f1" } - val x: T = new C[String] + val x: T = new C[String] // error def main(args: Array[String]) = - try println(x.f("", "")) // throws NoSuchMethodException + try println(x.f("", "")) // used to throw NoSuchMethodException catch { case ex: NoSuchMethodException => println("no such method") diff --git a/tests/neg/summonInline.check b/tests/neg/summonInline.check new file mode 100644 index 000000000000..6c3839266ce4 --- /dev/null +++ b/tests/neg/summonInline.check @@ -0,0 +1,22 @@ +-- Error: tests/neg/summonInline.scala:19:32 --------------------------------------------------------------------------- +19 |val missing1 = summonInlineCheck(1) // error + | ^^^^^^^^^^^^^^^^^^^^ + | Missing One + |-------------------------------------------------------------------------------------------------------------------- + |Inline stack trace + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from summonInline.scala:15 +15 | case 1 => summonInline[Missing1] + | ^^^^^^^^^^^^^^^^^^^^^^ + -------------------------------------------------------------------------------------------------------------------- +-- Error: tests/neg/summonInline.scala:20:32 --------------------------------------------------------------------------- +20 |val missing2 = summonInlineCheck(2) // error + | ^^^^^^^^^^^^^^^^^^^^ + | Missing Two + |-------------------------------------------------------------------------------------------------------------------- + |Inline stack trace + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from summonInline.scala:16 +16 | case 2 => summonInline[Missing2] + | ^^^^^^^^^^^^^^^^^^^^^^ + -------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/summonInline.scala b/tests/neg/summonInline.scala new file mode 100755 index 000000000000..463e506bdbdc --- /dev/null +++ b/tests/neg/summonInline.scala @@ -0,0 +1,21 @@ +import scala.compiletime.summonInline +import scala.annotation.implicitNotFound + +@implicitNotFound("Missing One") +trait Missing1 + +@implicitNotFound("Missing Two") +trait Missing2 + +trait NotMissing +given NotMissing = ??? + +transparent inline def summonInlineCheck[T <: Int](inline t : T) : Any = + inline t match + case 1 => summonInline[Missing1] + case 2 => summonInline[Missing2] + case _ => summonInline[NotMissing] + +val missing1 = summonInlineCheck(1) // error +val missing2 = summonInlineCheck(2) // error +val notMissing : NotMissing = summonInlineCheck(3) \ No newline at end of file diff --git a/tests/neg/t1625.check b/tests/neg/t1625.check new file mode 100644 index 000000000000..7e31f49f3729 --- /dev/null +++ b/tests/neg/t1625.check @@ -0,0 +1,8 @@ +-- [E040] Syntax Error: tests/neg/t1625.scala:2:20 --------------------------------------------------------------------- +2 | def foo(x: String*, y: String*, c: String*): Int // error: an identifier expected, but ',' found // error: an identifier expected, but ',' found + | ^ + | an identifier expected, but ',' found +-- [E040] Syntax Error: tests/neg/t1625.scala:2:32 --------------------------------------------------------------------- +2 | def foo(x: String*, y: String*, c: String*): Int // error: an identifier expected, but ',' found // error: an identifier expected, but ',' found + | ^ + | an identifier expected, but ',' found diff --git a/tests/neg/t1625.scala b/tests/neg/t1625.scala index e98ce985238e..41929f0ab476 100644 --- a/tests/neg/t1625.scala +++ b/tests/neg/t1625.scala @@ -1,3 +1,3 @@ trait T3 { - def foo(x: String*, y: String*, c: String*): Int // error: an identifier expected, but ',' found -} \ No newline at end of file + def foo(x: String*, y: String*, c: String*): Int // error: an identifier expected, but ',' found // error: an identifier expected, but ',' found +} diff --git a/tests/neg/t5702-neg-bad-and-wild.check b/tests/neg/t5702-neg-bad-and-wild.check new file mode 100644 index 000000000000..cecfb1dfa996 --- /dev/null +++ b/tests/neg/t5702-neg-bad-and-wild.check @@ -0,0 +1,44 @@ +-- [E032] Syntax Error: tests/neg/t5702-neg-bad-and-wild.scala:10:22 --------------------------------------------------- +10 | case List(1, _*,) => // error: pattern expected // error + | ^ + | pattern expected + | + | longer explanation available when compiling with `-explain` +-- [E032] Syntax Error: tests/neg/t5702-neg-bad-and-wild.scala:12:23 --------------------------------------------------- +12 | case List(1, _*3,) => // error: pattern expected // error // error + | ^ + | pattern expected + | + | longer explanation available when compiling with `-explain` +-- [E032] Syntax Error: tests/neg/t5702-neg-bad-and-wild.scala:15:18 --------------------------------------------------- +15 | case List(x*, 1) => // error: pattern expected + | ^ + | pattern expected + | + | longer explanation available when compiling with `-explain` +-- [E031] Syntax Error: tests/neg/t5702-neg-bad-and-wild.scala:17:18 --------------------------------------------------- +17 | case (1, x: _*) => // error: bad use of _* (sequence pattern not allowed) + | ^ + | * can be used only for last argument + | + | longer explanation available when compiling with `-explain` +-- [E032] Syntax Error: tests/neg/t5702-neg-bad-and-wild.scala:23:17 --------------------------------------------------- +23 | val K(ns @ _*, x) = k // error: pattern expected + | ^ + | pattern expected + | + | longer explanation available when compiling with `-explain` +-- Error: tests/neg/t5702-neg-bad-and-wild.scala:10:21 ----------------------------------------------------------------- +10 | case List(1, _*,) => // error: pattern expected // error + | ^ + | Values of types Null and Int cannot be compared with == or != +-- [E006] Not Found Error: tests/neg/t5702-neg-bad-and-wild.scala:12:20 ------------------------------------------------ +12 | case List(1, _*3,) => // error: pattern expected // error // error + | ^ + | Not found: * + | + | longer explanation available when compiling with `-explain` +-- Error: tests/neg/t5702-neg-bad-and-wild.scala:12:22 ----------------------------------------------------------------- +12 | case List(1, _*3,) => // error: pattern expected // error // error + | ^ + | Values of types Null and Int cannot be compared with == or != diff --git a/tests/neg/t5702-neg-bad-brace.check b/tests/neg/t5702-neg-bad-brace.check new file mode 100644 index 000000000000..92e9fe912a92 --- /dev/null +++ b/tests/neg/t5702-neg-bad-brace.check @@ -0,0 +1,10 @@ +-- [E032] Syntax Error: tests/neg/t5702-neg-bad-brace.scala:8:21 ------------------------------------------------------- +8 | case List(1, _*} => // error: pattern expected + | ^ + | pattern expected + | + | longer explanation available when compiling with `-explain` +-- [E040] Syntax Error: tests/neg/t5702-neg-bad-brace.scala:11:0 ------------------------------------------------------- +11 |} // error: eof expected, but '}' found + |^ + |eof expected, but '}' found diff --git a/tests/neg/t5702-neg-bad-brace.scala b/tests/neg/t5702-neg-bad-brace.scala new file mode 100644 index 000000000000..8a8e10f462aa --- /dev/null +++ b/tests/neg/t5702-neg-bad-brace.scala @@ -0,0 +1,11 @@ + +object Test { + + def main(args: Array[String]) = { + val is = List(1,2,3) + + is match { + case List(1, _*} => // error: pattern expected + } + } +} // error: eof expected, but '}' found diff --git a/tests/neg/t6124.check b/tests/neg/t6124.check index 9c7498d60f88..871533bf80c8 100644 --- a/tests/neg/t6124.check +++ b/tests/neg/t6124.check @@ -62,5 +62,5 @@ 21 | val x1 = _52 // error | ^^^ | Not found: _52 - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/t6476.scala b/tests/neg/t6476.scala index 9d1415f55dd3..bd7868abe3e5 100644 --- a/tests/neg/t6476.scala +++ b/tests/neg/t6476.scala @@ -6,4 +6,4 @@ class C { s"\ " s"\\" s"\" // error -} // error (should not be one) +} diff --git a/tests/neg/t6663.check b/tests/neg/t6663.check index 806f3357a0f6..4adb6f365082 100644 --- a/tests/neg/t6663.check +++ b/tests/neg/t6663.check @@ -3,5 +3,5 @@ | ^^^^^^^^^^^^^^^^^^^^^^^^^ | Found: String | Required: Int - -longer explanation available when compiling with `-explain` + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/t6810.check b/tests/neg/t6810.check index 55f9f7ca2443..147081e0daf5 100644 --- a/tests/neg/t6810.check +++ b/tests/neg/t6810.check @@ -22,3 +22,11 @@ 30 | val b = ' | ^ | unclosed character literal +-- Warning: tests/neg/t6810.scala:6:0 ---------------------------------------------------------------------------------- +6 |' // but not embedded EOL sequences not represented as escapes + |^ + |Line is indented too far to the left, or a `}` is missing +-- Warning: tests/neg/t6810.scala:31:0 --------------------------------------------------------------------------------- +31 |' // anypos-error CR seen as EOL by scanner; FSR, error only on open quote, unlike `y` + |^ + |Line is indented too far to the left, or a `}` is missing diff --git a/tests/neg/trailing-comma-pattern.check b/tests/neg/trailing-comma-pattern.check new file mode 100644 index 000000000000..d0ce15081263 --- /dev/null +++ b/tests/neg/trailing-comma-pattern.check @@ -0,0 +1,6 @@ +-- [E032] Syntax Error: tests/neg/trailing-comma-pattern.scala:3:8 ----------------------------------------------------- +3 |// error + | ^ + | pattern expected + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/trailing-comma-pattern.scala b/tests/neg/trailing-comma-pattern.scala new file mode 100644 index 000000000000..02ee158b7ae5 --- /dev/null +++ b/tests/neg/trailing-comma-pattern.scala @@ -0,0 +1,3 @@ +object Test: + val List(x, y, _*, +// error \ No newline at end of file diff --git a/tests/neg/trailing-comma-pattern2.check b/tests/neg/trailing-comma-pattern2.check new file mode 100644 index 000000000000..2759c648ff95 --- /dev/null +++ b/tests/neg/trailing-comma-pattern2.check @@ -0,0 +1,10 @@ +-- [E032] Syntax Error: tests/neg/trailing-comma-pattern2.scala:2:21 --------------------------------------------------- +2 | val List(x, y, _*, ) // error + | ^ + | pattern expected + | + | longer explanation available when compiling with `-explain` +-- [E040] Syntax Error: tests/neg/trailing-comma-pattern2.scala:3:8 ---------------------------------------------------- +3 |// error + | ^ + | '=' expected, but unindent found diff --git a/tests/neg/trailing-comma-pattern2.scala b/tests/neg/trailing-comma-pattern2.scala new file mode 100644 index 000000000000..2ada489ed234 --- /dev/null +++ b/tests/neg/trailing-comma-pattern2.scala @@ -0,0 +1,3 @@ +object Test: + val List(x, y, _*, ) // error +// error \ No newline at end of file diff --git a/tests/neg/transparent-trait.scala b/tests/neg/transparent-trait.scala new file mode 100644 index 000000000000..52beb624087c --- /dev/null +++ b/tests/neg/transparent-trait.scala @@ -0,0 +1,11 @@ +transparent trait A +transparent trait B +trait C + +object Test: + val x = identity(new A with B) // infers A with B (because there's no non-transparent trait in the intersection) + val x2: A with B = x // OK + + val y = identity(new A with B with C) // infers C + val y2: C = y // OK + val y3: A with B = y // error diff --git a/tests/neg/transparent.scala b/tests/neg/transparent.scala index f2aae0348b45..b4d89478b0ac 100644 --- a/tests/neg/transparent.scala +++ b/tests/neg/transparent.scala @@ -5,5 +5,5 @@ transparent class c // error transparent object y // error transparent trait t // ok transparent type T = c // error -transparent given c with {} // error +transparent given c() // error diff --git a/tests/neg/tuple-ops.scala b/tests/neg/tuple-ops.scala index 4e787c1932c8..47a8c00cc6ef 100644 --- a/tests/neg/tuple-ops.scala +++ b/tests/neg/tuple-ops.scala @@ -12,13 +12,13 @@ val r3: ((2, 1), (8, 2)) = c.zip(a) // error // Map case class Foo[X](x: X) -val r6: (Int, Int, String) = a.map[[t] =>> Int]([t] => x: t => x match { // error +val r6: (Int, Int, String) = a.map[[t] =>> Int]([t] => (x: t) => x match { // error case x: Int => x * x case _ => ??? }) val r7: ((1, Foo[1]), (2), (3, Foo[3])) = - a.map[[t] =>> (t, Foo[t])]( [t] => x: t => (x, Foo(x)) ) // error + a.map[[t] =>> (t, Foo[t])]( [t] => (x: t) => (x, Foo(x)) ) // error // More Zip val t1: Int *: Long *: Tuple = (1, 2l, 100, 200) diff --git a/tests/neg/type-qmark.check b/tests/neg/type-qmark.check index ec3e8306af11..85d3b8d3cceb 100644 --- a/tests/neg/type-qmark.check +++ b/tests/neg/type-qmark.check @@ -27,7 +27,7 @@ | ^ | `?` is not a valid type name -- Error: tests/neg/type-qmark.scala:31:8 ------------------------------------------------------------------------------ -31 | given ?[T]: Foo[T] with {} // error +31 | given ?[T]: Foo[T]() // error | ^ | `?` is not a valid type name -- Error: tests/neg/type-qmark.scala:3:8 ------------------------------------------------------------------------------- diff --git a/tests/neg/type-qmark.scala b/tests/neg/type-qmark.scala index bb89f72d6b3d..826a957aaa2e 100644 --- a/tests/neg/type-qmark.scala +++ b/tests/neg/type-qmark.scala @@ -28,5 +28,5 @@ object J { } object K { class Foo[T] - given ?[T]: Foo[T] with {} // error + given ?[T]: Foo[T]() // error } diff --git a/tests/neg/typeclass-encoding3.scala b/tests/neg/typeclass-encoding3.scala new file mode 100644 index 000000000000..ff403314cd1a --- /dev/null +++ b/tests/neg/typeclass-encoding3.scala @@ -0,0 +1,349 @@ +/** 1. Simple type classes with monomorphic implementations and direct extensions. + + trait SemiGroup extends TypeClass { + def add(that: This): This + } + + trait Monoid extends SemiGroup + common { + def unit: This + } + + extension IntOps for Int : Monoid { + def add(that: Int) = this + that + } + common { + def unit = 0 + } + + extension StringOps for String : Monoid { + def add(that: Int) = this ++ that + } + common { + def unit = "" + } + + enum Nat extends Monoid { + case Z + case S(n: Nat) + + def add(that: Nat): Nat = this match { + case S => that + case S(n) => S(n.add(that)) + } + } + common { + def unit = Z + } + + def sum[T: Monoid](xs: List[T]): T = + xs.foldLeft(Monod.impl[T].unit)(_ `add` _) +*/ +object runtime { + + trait TypeClass { + val commons: TypeClassCommon + type This = commons.This + } + + trait TypeClassCommon { self => + type This + type Instance <: TypeClass + def inject(x: This): Instance { val commons: self.type } + } + + trait TypeClassCompanion { + type Impl[T] <: TypeClassCommon { type This = T } + def impl[T](implicit ev: Impl[T]): Impl[T] = ev + } + + implicit def inject[From](x: From) + (implicit ev: TypeClassCommon { type This = From }): ev.Instance { type This = From } = + ev.inject(x) +} +import runtime.* + +object semiGroups { + + trait SemiGroup extends TypeClass { + val commons: SemiGroupCommon + import commons.* + def add(that: This): This + } + trait SemiGroupCommon extends TypeClassCommon { + type Instance <: SemiGroup + } + object SemiGroup extends TypeClassCompanion { + type Impl[T] = SemiGroupCommon { type This = T } + } + + trait Monoid extends SemiGroup { + val commons: MonoidCommon + import commons.* + } + trait MonoidCommon extends SemiGroupCommon { + type Instance <: Monoid + def unit: This + } + object Monoid extends TypeClassCompanion { + type Impl[T] = MonoidCommon { type This = T } + } + + implicit object IntOps extends MonoidCommon { + type This = Int + type Instance = Monoid + def unit: Int = 0 + def inject($this: Int) = new Monoid { + val commons: IntOps.this.type = IntOps.this + def add(that: this.This): this.This = $this + that + } + } + + implicit object StringOps extends MonoidCommon { + type This = String + type Instance = Monoid + def unit = "" + def inject($this: String) = new Monoid { + val commons: StringOps.this.type = StringOps.this + def add(that: this.This): this.This = $this.concat(that) + } + } + + enum Nat extends Monoid { + case Z + case S(n: Nat) + + def add(that: Nat): Nat = this match { + case Z => that + case S(n) => S(n.add(that)) + } + + val commons: Nat.type = Nat + } + object Nat extends MonoidCommon { + type This = Nat + type Instance = Nat + def unit = Nat.Z + def inject($this: Nat) = $this + } + import Nat.{Z, S} + + implicit def NatOps: Nat.type = Nat + + def sum[T](xs: List[T])(implicit ev: Monoid.Impl[T]) = + xs.foldLeft(Monoid.impl[T].unit)((x, y) => x `add` y) + + sum(List(1, 2, 3)) + sum(List("hello ", "world!")) + sum(List(Z, S(Z), S(S(Z)))) +} + +/** 2. Generic implementations of simple type classes. + + trait Ord extends TypeClass { + def compareTo(that: This): Int + def < (that: This) = compareTo(that) < 0 + def > (that: This) = compareTo(that) > 0 + } + common { + val minimum: This + } + + extension IntOrd for Int : Ord { + def compareTo(that: Int) = + if (this < that) -1 else if (this > that) +1 else 0 + } + common { + val minimum = Int.MinValue + } + + extension ListOrd[T : Ord] for List[T] : Ord { + def compareTo(that: List[T]): Int = (this, that) match { + case (Nil, Nil) => 0 + case (Nil, _) => -1 + case (_, Nil) => +1 + case (x :: xs, y :: ys) => + val fst = x.compareTo(y) + if (fst != 0) fst else xs.compareTo(ys) + } + } + common { + val minimum = Nil + } + + def min[T: Ord](x: T, y: T) = if (x < y) x else y + + def inf[T: Ord](xs: List[T]): T = (Ord.impl[T].minimum /: xs)(min) +*/ +object ord { + + trait Ord extends TypeClass { + val commons: OrdCommon + import commons.* + def compareTo(that: This): Int + def < (that: This) = compareTo(that) < 0 + def > (that: This) = compareTo(that) > 0 + } + trait OrdCommon extends TypeClassCommon { + type Instance <: Ord + def minimum: This + } + object Ord extends TypeClassCompanion { + type Impl[T] = OrdCommon { type This = T } + } + + implicit object IntOrd extends OrdCommon { + type This = Int + type Instance = Ord + val minimum: Int = Int.MinValue + def inject($this: Int) = new Ord { + val commons: IntOrd.this.type = IntOrd.this + import commons.* + def compareTo(that: this.This): Int = + if (this < that) -1 else if (this > that) +1 else 0 + } + } + + class ListOrd[T](implicit ev: Ord.Impl[T]) extends OrdCommon { self => + type This = List[T] + type Instance = Ord + def minimum: List[T] = Nil + def inject($this: List[T]) = new Ord { + val commons: self.type = self + import commons.* + def compareTo(that: List[T]): Int = ($this, that) match { + case (Nil, Nil) => 0 + case (Nil, _) => -1 + case (_, Nil) => +1 + case (x :: xs, y :: ys) => + val fst = x.compareTo(y) + if (fst != 0) fst else xs.compareTo(ys) + } + } + } + + implicit def listOrd[T](implicit ev: Ord.Impl[T]): ListOrd[T] = + new ListOrd[T] + + def min[T](x: T, y: T)(implicit ev: Ord.Impl[T]): T = + if (x < y) x else y + + def inf[T](xs: List[T])(implicit ev: Ord.Impl[T]): T = { + val smallest = Ord.impl[T].minimum + xs.foldLeft(smallest)(min) + } + + inf(List[Int]()) + inf(List(List(1, 2), List(1, 2, 3))) + inf(List(List(List(1), List(2)), List(List(1), List(2), List(3)))) +} + +/** 3. Higher-kinded type classes + + trait Functor[A] extends TypeClass1 { + def map[B](f: A => B): This[B] + } + common { + def pure[A](x: A): This[A] + } + + // Generically, `pure[A]{.map(f)}^n` + def develop[A, F[X] : Functor[X]](n: Int, f: A => A): F[A] = + if (n == 0) Functor.impl[F].pure[A] + else develop[A, F](n - 1, f).map(f) + + trait Monad[A] extends Functor[A] { + def flatMap[B](f: A => This[B]): This[B] + def map[B](f: A => B) = this.flatMap(f.andThen(pure)) + } + + extension ListMonad[T] for List[T] : Monad[T] { + static def pure[A] = Nil + + def flatMap[B](f: A => List[B]): List[B] = this match { + case x :: xs => f(x) ++ xs.flatMap(f) + case Nil => Nil + } + } + + extension MonadFlatten[T[X]: Monad[X]] for T[T[A]] { + def flatten: T[A] = this.flatMap(identity) + } +*/ +object runtime1 { + + trait TypeClass1 { + val commons: TypeClassCommon1 + type This = [X] =>> commons.This[X] + } + + trait TypeClassCommon1 { self => + type This[X] + type Instance[X] <: TypeClass1 + def inject[A](x: This[A]): Instance[A] { val commons: self.type } + } + + trait TypeClassCompanion1 { + type Impl[T[_]] <: TypeClassCommon1 { type This = [X] =>> T[X] } + def impl[T[_]](implicit ev: Impl[T]): Impl[T] = ev + } + + implicit def inject1[A, From[_]](x: From[A]) + (implicit ev: TypeClassCommon1 { + type This = [X] =>> From[X] + }): ev.Instance[A] { type This = [X] =>> From[X] } = + ev.inject(x) +} +import runtime1.* + +object functors { + + trait Functor[A] extends TypeClass1 { + val commons: FunctorCommon + import commons.* + def map[B](f: A => B): This[B] + } + trait FunctorCommon extends TypeClassCommon1 { + type Instance[X] <: Functor[X] + def pure[A](x: A): This[A] + } + object Functor extends TypeClassCompanion1 { + type Impl[T[_]] = FunctorCommon { type This = [X] =>> T[X] } + } + + trait Monad[A] extends Functor[A] { + val commons: MonadCommon + import commons.* + def flatMap[B](f: A => This[B]): This[B] + def map[B](f: A => B) = this.flatMap(f.andThen(commons.pure)) + } + trait MonadCommon extends FunctorCommon { + type Instance[X] <: Monad[X] + } + object Monad extends TypeClassCompanion1 { + type Impl[T[_]] = MonadCommon { type This = [X] =>> T[X] } + } + + def develop[A, F[X]](n: Int, x: A, f: A => A)(implicit ev: Functor.Impl[F]): F[A] = + if (n == 0) Functor.impl[F].pure(x) + else develop(n - 1, x, f).map(f).asInstanceOf + + implicit object ListMonad extends MonadCommon { + type This[+X] = List[X] + type Instance[X] = Monad[X] + def pure[A](x: A) = x :: Nil + def inject[A]($this: List[A]) = new Monad[A] { + val commons: ListMonad.this.type = ListMonad + import commons.* + def flatMap[B](f: A => List[B]): List[B] = $this.flatMap(f) + } + } + + object MonadFlatten { + def flattened[T[_], A]($this: T[T[A]])(implicit ev: Monad.Impl[T]): T[A] = + ??? // $this.flatMap[A](identity) disabled since it does not typecheck + } + + MonadFlatten.flattened(List(List(1, 2, 3), List(4, 5))) // ok, synthesizes (using ListMonad) + MonadFlatten.flattened(List(List(1, 2, 3), List(4, 5)))(using ListMonad) // error +} \ No newline at end of file diff --git a/tests/neg/validate-parsing-2.scala b/tests/neg/validate-parsing-2.scala new file mode 100644 index 000000000000..7457c649bad0 --- /dev/null +++ b/tests/neg/validate-parsing-2.scala @@ -0,0 +1 @@ +case class ByName(x: => Int) // error diff --git a/tests/neg/validate-parsing.scala b/tests/neg/validate-parsing.scala index d0eee526ae90..2b416c6eab27 100644 --- a/tests/neg/validate-parsing.scala +++ b/tests/neg/validate-parsing.scala @@ -10,4 +10,3 @@ class C () { } class D override() // error: ';' expected but 'override' found. -case class ByName(x: => Int) // error: `val' parameters may not be call-by-name diff --git a/tests/neg/varargs-annot-2.scala b/tests/neg/varargs-annot-2.scala new file mode 100644 index 000000000000..7b0fce1124ea --- /dev/null +++ b/tests/neg/varargs-annot-2.scala @@ -0,0 +1,9 @@ +import annotation.varargs + +trait C { + @varargs def v(i: Int*) = () +} + +class D extends C { // error: name clash between defined and inherited member + def v(i: Array[Int]) = () +} \ No newline at end of file diff --git a/tests/neg/varargs-annot.scala b/tests/neg/varargs-annot.scala index 490d2ab93695..d24b9d350d78 100644 --- a/tests/neg/varargs-annot.scala +++ b/tests/neg/varargs-annot.scala @@ -17,7 +17,7 @@ object Test { class D extends C { override def v(i: Int*) = () // error - def v(i: Array[Int]) = () // error + def v(i: Array[Int]) = () // ok, reported when used alone (see varargs-annot-2.scala) } @varargs def nov(a: Int) = 0 // error: A method without repeated parameters cannot be annotated with @varargs diff --git a/tests/neg/zipped.scala b/tests/neg/zipped.scala index c7d55da6000a..7f8909b605f3 100644 --- a/tests/neg/zipped.scala +++ b/tests/neg/zipped.scala @@ -1,38 +1,9 @@ -// This test shows some un-intuitive behavior of the `zipped` method. object Test { val xs: List[Int] = ??? - // 1. This works, since withFilter is not defined on Tuple3zipped. Instead, - // an implicit conversion from Tuple3zipped to Traversable[(Int, Int, Int)] is inserted. - // The subsequent map operation has the right type for this Traversable. xs.lazyZip(xs).lazyZip(xs) - .withFilter( (x: (Int, Int, Int)) => x match { case (x, y, z) => true } ) // OK - .map( (x: (Int, Int, Int)) => x match { case (x, y, z) => x + y + z }) // OK + .map( (x: (Int, Int, Int)) => x match { case (x, y, z) => x + y + z }) // ok - - // 2. This works as well, because of auto untupling i.e. `case` is inserted. - // But it does not work in Scala2. - xs.lazyZip(xs).lazyZip(xs) - .withFilter( (x: (Int, Int, Int)) => x match { case (x, y, z) => true } ) // OK - .map( (x: Int, y: Int, z: Int) => x + y + z ) // OK - // works, because of auto untupling i.e. `case` is inserted - // does not work in Scala2 - - // 3. Now, without withFilter, it's the opposite, we need the 3 parameter map. - xs.lazyZip(xs).lazyZip(xs) - .map( (x: Int, y: Int, z: Int) => x + y + z ) // OK - - // 4. The single parameter map does not work. - xs.lazyZip(xs).lazyZip(xs) - .map( (x: (Int, Int, Int)) => x match { case (x, y, z) => x + y + z }) // error - - // 5. If we leave out the parameter type, we get a "Wrong number of parameters" error instead xs.lazyZip(xs).lazyZip(xs) .map( x => x match { case (x, y, z) => x + y + z }) // error - - // This means that the following works in Dotty in normal mode, since a `withFilter` - // is inserted. But it does no work under -strict. And it will not work in Scala 3.1. - // The reason is that without -strict, the code below is mapped to (1), but with -strict - // it is mapped to (5). - for ((x, y, z) <- xs.lazyZip(xs).lazyZip(xs)) yield x + y + z } \ No newline at end of file diff --git a/tests/patmat/andtype-opentype-interaction.check b/tests/patmat/andtype-opentype-interaction.check index a9d8618adad0..b8f9fbc10ac9 100644 --- a/tests/patmat/andtype-opentype-interaction.check +++ b/tests/patmat/andtype-opentype-interaction.check @@ -1,5 +1,5 @@ -23: Pattern Match Exhaustivity: _: Trait & OpenTrait, _: Clazz & OpenTrait, _: AbstractClass & OpenTrait, _: SealedClass & OpenTrait -27: Pattern Match Exhaustivity: _: Trait & OpenTrait & OpenTrait2, _: Clazz & OpenTrait & OpenTrait2, _: AbstractClass & OpenTrait & OpenTrait2, _: SealedClass & OpenTrait & OpenTrait2 +23: Pattern Match Exhaustivity: _: Trait & OpenTrait, _: Clazz & OpenTrait, _: AbstractClass & OpenTrait +27: Pattern Match Exhaustivity: _: Trait & OpenTrait & OpenTrait2, _: Clazz & OpenTrait & OpenTrait2, _: AbstractClass & OpenTrait & OpenTrait2 31: Pattern Match Exhaustivity: _: Trait & OpenClass 35: Pattern Match Exhaustivity: _: Trait & OpenTrait & OpenClass 43: Pattern Match Exhaustivity: _: Trait & OpenAbstractClass diff --git a/tests/patmat/boxing.scala b/tests/patmat/boxing.scala new file mode 100644 index 000000000000..d148e23deca1 --- /dev/null +++ b/tests/patmat/boxing.scala @@ -0,0 +1,18 @@ +class C { + def matchUnboxed(i: Integer) = i match { + case null => 0 + case 1 => 1 + case _ => 9 + } + + def matchBoxed(i: Int) = i match { + case C.ZERO => 0 + case C.ONE => 1 + case _ => 9 + } +} + +object C { + final val ZERO: Integer = 0 + final val ONE: Integer = 1 +} diff --git a/tests/patmat/dotty-trees.scala b/tests/patmat/dotty-trees.scala new file mode 100644 index 000000000000..7fb1a86879b2 --- /dev/null +++ b/tests/patmat/dotty-trees.scala @@ -0,0 +1,11 @@ +abstract class Tree[-T >: Null] + +case class TypeTree[-T >: Null]() extends Tree[T] + +abstract class DerivedTypeTree() extends TypeTree[Null] + +def foo(tree: Tree[Null]): Unit = + tree match + case _: DerivedTypeTree => + case TypeTree() => + case _ => \ No newline at end of file diff --git a/tests/patmat/exhausting.check b/tests/patmat/exhausting.check index ff3536046ce5..cb1662883aa1 100644 --- a/tests/patmat/exhausting.check +++ b/tests/patmat/exhausting.check @@ -3,4 +3,4 @@ 32: Pattern Match Exhaustivity: List(_, _*) 39: Pattern Match Exhaustivity: Bar3 44: Pattern Match Exhaustivity: (Bar2, Bar2) -50: Pattern Match Exhaustivity: (Bar2, Bar2) +49: Pattern Match Exhaustivity: (Bar2, Bar2) diff --git a/tests/patmat/exhausting.scala b/tests/patmat/exhausting.scala index 640c9e88b100..9f17fae9def5 100644 --- a/tests/patmat/exhausting.scala +++ b/tests/patmat/exhausting.scala @@ -42,7 +42,6 @@ object Test { } // fails for: (Bar2, Bar2) def fail4[T <: AnyRef](xx: (Foo[T], Foo[T])) = xx match { - case (Bar1, Bar1) => () case (Bar2, Bar3) => () case (Bar3, _) => () } diff --git a/tests/patmat/i10174b.check b/tests/patmat/i10174b.check index a8e6f96b4bac..582ce090b23d 100644 --- a/tests/patmat/i10174b.check +++ b/tests/patmat/i10174b.check @@ -1,2 +1,2 @@ -2: Pattern Match Exhaustivity: _: Int -58: Match case Unreachable +3: Pattern Match Exhaustivity: _: Int +59: Match case Unreachable diff --git a/tests/patmat/i10174b.scala b/tests/patmat/i10174b.scala index f227f68c909b..875e19e8f037 100644 --- a/tests/patmat/i10174b.scala +++ b/tests/patmat/i10174b.scala @@ -1,3 +1,4 @@ +// scalac: -Ycheck-all-patmat def foo(x: Int): Unit = x match { case 1 => @@ -1001,4 +1002,4 @@ def foo(x: Int): Unit = case 998 => case 999 => case 1000 => - } \ No newline at end of file + } diff --git a/tests/patmat/i10667.scala b/tests/patmat/i10667.scala new file mode 100644 index 000000000000..fde5021929fc --- /dev/null +++ b/tests/patmat/i10667.scala @@ -0,0 +1,15 @@ +sealed trait A + +enum Nums { + case One + case Two extends Nums with A + case Three +} + +object Test { + val list = List[Nums & A](Nums.Two) + + list.map { + case Nums.Two => () + } +} diff --git a/tests/patmat/i12241.check b/tests/patmat/i12241.check new file mode 100644 index 000000000000..1ad41cfa2ac1 --- /dev/null +++ b/tests/patmat/i12241.check @@ -0,0 +1 @@ +54: Pattern Match Exhaustivity: (EndpointInput.Pair(), EndpointInput.MappedPair()), (EndpointInput.Pair(), EndpointInput.Pair2()), (EndpointInput.Pair(), EndpointInput.MappedPair2()), (EndpointInput.Pair(), EndpointInput.FixedMethod()), (EndpointInput.Pair(), EndpointInput.FixedPath()), (EndpointInput.Pair(), EndpointInput.PathCapture()) diff --git a/tests/patmat/i12241.scala b/tests/patmat/i12241.scala new file mode 100644 index 000000000000..4f61027e2f65 --- /dev/null +++ b/tests/patmat/i12241.scala @@ -0,0 +1,75 @@ +sealed trait EndpointInput[T] + +object EndpointInput { + case class Pair[T]() extends EndpointInput[T] + case class MappedPair[T]() extends EndpointInput[T] + case class Pair2[T]() extends EndpointInput[T] + case class MappedPair2[T]() extends EndpointInput[T] + case class FixedMethod[T]() extends EndpointInput[T] + case class FixedPath[T]() extends EndpointInput[T] + case class PathCapture[T]() extends EndpointInput[T] + case class PathsCapture[T]() extends EndpointInput[T] + case class Query[T]() extends EndpointInput[T] + case class QueryParams[T]() extends EndpointInput[T] + case class Cookie[T]() extends EndpointInput[T] + case class ExtractFromRequest[T]() extends EndpointInput[T] + case class ApiKey[T]() extends EndpointInput[T] + case class Http[T]() extends EndpointInput[T] + case class Body[R, T]() extends EndpointInput[T] + case class FixedHeader[T]() extends EndpointInput[T] + case class Header[T]() extends EndpointInput[T] + case class Headers[T]() extends EndpointInput[T] + case class StatusCode[T]() extends EndpointInput[T] + case class Empty[T]() extends EndpointInput[T] +} + +object Test extends App { + import EndpointInput._ + + def compare(left: EndpointInput[_], right: EndpointInput[_]): Boolean = + (left, right) match { + case (Pair(), Pair()) => true + case (MappedPair(), MappedPair()) => true + case (Pair2(), Pair2()) => true + case (MappedPair2(), MappedPair2()) => true + case (FixedMethod(), FixedMethod()) => true + case (FixedPath(), FixedPath()) => true + case (PathCapture(), PathCapture()) => true + case (PathsCapture(), PathsCapture()) => true + case (Query(), Query()) => true + case (QueryParams(), QueryParams()) => true + case (Cookie(), Cookie()) => true + case (ExtractFromRequest(), ExtractFromRequest()) => true + case (ApiKey(), ApiKey()) => true + case (Http(), Http()) => true + case (Body(), Body()) => true + case (FixedHeader(), FixedHeader()) => true + case (Header(), Header()) => true + case (Headers(), Headers()) => true + case (StatusCode(), StatusCode()) => true + case (_, _) => false + } + + def compare2(left: EndpointInput[_], right: EndpointInput[_]): Boolean = + (left, right) match { + case (Pair(), Pair()) => true + case (MappedPair(), MappedPair()) => true + case (Pair2(), Pair2()) => true + case (MappedPair2(), MappedPair2()) => true + case (FixedMethod(), FixedMethod()) => true + case (FixedPath(), FixedPath()) => true + case (PathCapture(), PathCapture()) => true + case (PathsCapture(), PathsCapture()) => true + case (Query(), Query()) => true + case (QueryParams(), QueryParams()) => true + case (Cookie(), Cookie()) => true + case (ExtractFromRequest(), ExtractFromRequest()) => true + case (ApiKey(), ApiKey()) => true + case (Http(), Http()) => true + case (Body(), Body()) => true + case (FixedHeader(), FixedHeader()) => true + case (Header(), Header()) => true + case (Headers(), Headers()) => true + case (StatusCode(), StatusCode()) => true + } +} diff --git a/tests/patmat/i12279.scala b/tests/patmat/i12279.scala new file mode 100644 index 000000000000..5ba517757861 --- /dev/null +++ b/tests/patmat/i12279.scala @@ -0,0 +1,9 @@ +import scala.reflect.Typeable + +def unionTypeTest[T: Typeable](m: Int|T) = + m match + case x: Int => println("Got Int") + case t: T => println("Got T") + +@main def run = + unionTypeTest(()) diff --git a/tests/patmat/i12337.check b/tests/patmat/i12337.check new file mode 100644 index 000000000000..2b314017a6dd --- /dev/null +++ b/tests/patmat/i12337.check @@ -0,0 +1,2 @@ +8: Pattern Match Exhaustivity: Foo(Inactive) +17: Pattern Match Exhaustivity: Foo(Status.Active(_)) diff --git a/tests/patmat/i12337.scala b/tests/patmat/i12337.scala new file mode 100644 index 000000000000..efa3e04168d6 --- /dev/null +++ b/tests/patmat/i12337.scala @@ -0,0 +1,26 @@ +sealed trait Status +object Status { + case class Active(since: Int) extends Status + case object Inactive extends Status +} + +case class Foo(status: Status) +def bar(foo: Foo): Unit = foo match { + case Foo(Status.Active(since)) => + println(s"active since $since") +} +// Expected: +// warning: match may not be exhaustive. +// It would fail on the following input: Foo(Inactive) +// def bar(foo: Foo): Unit = foo match { + +def baz(foo: Foo): Unit = foo match { + case Foo(Status.Active(2000)) => + println("active since 2000") + case Foo(Status.Inactive) => + println("inactive") +} +// Expected: +// warning: match may not be exhaustive. +// It would fail on the following input: Foo(Active((x: Int forSome x not in 2000))) +// def baz(foo: Foo): Unit = foo match { \ No newline at end of file diff --git a/tests/patmat/i12358.check b/tests/patmat/i12358.check new file mode 100644 index 000000000000..0ffca060173b --- /dev/null +++ b/tests/patmat/i12358.check @@ -0,0 +1 @@ +3: Pattern Match Exhaustivity: List(_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _*), List(_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _), List(_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _), List(_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _), List(_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _), List(_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _) diff --git a/tests/patmat/i12358.scala b/tests/patmat/i12358.scala new file mode 100644 index 000000000000..9b8a865ae000 --- /dev/null +++ b/tests/patmat/i12358.scala @@ -0,0 +1,4 @@ + +def foo(x: List[Int]): Unit = + x match + case x1 :: x2 :: x3 :: x4 :: x5 :: x6 :: x7 :: x8 :: x9 :: x10 :: x11 :: x12 :: x13 :: x14 :: x15 :: x16 :: x17 :: x18 :: x19 :: x20 :: x21 :: x22 :: Nil => \ No newline at end of file diff --git a/tests/patmat/i12408.check b/tests/patmat/i12408.check new file mode 100644 index 000000000000..60acc2cba84e --- /dev/null +++ b/tests/patmat/i12408.check @@ -0,0 +1,2 @@ +13: Pattern Match Exhaustivity: A(_), C(_) +21: Pattern Match diff --git a/tests/patmat/i12408.scala b/tests/patmat/i12408.scala new file mode 100644 index 000000000000..442e723cb195 --- /dev/null +++ b/tests/patmat/i12408.scala @@ -0,0 +1,24 @@ +class X[T] { + sealed trait P + + case class A(id: Long) extends P + case class B(id: Long, x: Long) extends P + case class C(id: Long) extends P + + def m(p: P): Unit = p match { + case B(_, x) => + case _ => + } + + def n(p: P): Unit = p match { + case B(_, x) => + } + + def o(p: P): Unit = p match { + case A(_) => + case B(_, x) => + case C(_) => + case _ => + } + +} diff --git a/tests/patmat/i12475.scala b/tests/patmat/i12475.scala new file mode 100644 index 000000000000..11ff389df318 --- /dev/null +++ b/tests/patmat/i12475.scala @@ -0,0 +1,20 @@ +sealed trait Ty { + type T +} + +class TUnit() extends Ty { + type T = Unit +} + +case object TUnit extends TUnit() + +final case class TFun(dom: Ty, cod: Ty) extends Ty { + type T = dom.T => cod.T +} + +def default(ty: Ty): ty.T = (ty: ty.type & Ty) match { + case a: (ty.type & TUnit) => (): a.T + case a: (ty.type & TFun) => + val f = { (x: a.dom.T) => default(a.cod) } + f: a.T +} diff --git a/tests/patmat/i12475b.scala b/tests/patmat/i12475b.scala new file mode 100644 index 000000000000..799d95c0477b --- /dev/null +++ b/tests/patmat/i12475b.scala @@ -0,0 +1,14 @@ +trait SomeRestriction + +enum ADT { + case A + case B extends ADT with SomeRestriction +} + +object MinimalExample { + val b: ADT & SomeRestriction = ADT.B + + b match { + case ADT.B => ??? + } +} diff --git a/tests/patmat/i12485.scala b/tests/patmat/i12485.scala new file mode 100644 index 000000000000..66dc0a7b6bb8 --- /dev/null +++ b/tests/patmat/i12485.scala @@ -0,0 +1,4 @@ +case class A(a: A) + +def foo(x: A) = x match + case A(a) => \ No newline at end of file diff --git a/tests/patmat/i12530.check b/tests/patmat/i12530.check new file mode 100644 index 000000000000..636347516cbc --- /dev/null +++ b/tests/patmat/i12530.check @@ -0,0 +1,2 @@ +6: Match case Unreachable +14: Match case Unreachable diff --git a/tests/patmat/i12530.scala b/tests/patmat/i12530.scala new file mode 100644 index 000000000000..acba5e9ebafa --- /dev/null +++ b/tests/patmat/i12530.scala @@ -0,0 +1,16 @@ +object Test { + def foo(a: Boolean, b: Boolean): Unit = + (a, b) match { + case (false, _) => + case (true, _) => + case (_, false) => // error: unreachable + } + + def bar(a: Option[Boolean], b: Boolean): Unit = + (a, b) match { + case (Some(false), _) => + case (Some(true), _) => + case (None, _) => + case (_, false) => // reachable: null + } +} diff --git a/tests/patmat/i12546.scala b/tests/patmat/i12546.scala new file mode 100644 index 000000000000..23cbfaa77c06 --- /dev/null +++ b/tests/patmat/i12546.scala @@ -0,0 +1,14 @@ +trait SomeRestriction + +enum ADT { + case A extends ADT + case B extends ADT with SomeRestriction +} + +object MinimalExample { + val b: ADT & SomeRestriction = ADT.B + + b match { + case ADT.B => ??? + } +} diff --git a/tests/patmat/i12559.check b/tests/patmat/i12559.check new file mode 100644 index 000000000000..34be8d4104f9 --- /dev/null +++ b/tests/patmat/i12559.check @@ -0,0 +1,2 @@ +10: Match case Unreachable +27: Match case Unreachable diff --git a/tests/patmat/i12559.scala b/tests/patmat/i12559.scala new file mode 100644 index 000000000000..1f043d21e5bd --- /dev/null +++ b/tests/patmat/i12559.scala @@ -0,0 +1,35 @@ +package akka.event + +object TestA: + sealed trait LogEvent + + object LogEvent: + def myOrdinal(e: LogEvent): Int = e match + case e: Error => 0 + // case e: Warning => 1 + case e: LogEventWithMarker => 2 + + + class Error() extends LogEvent + class Error2() extends Error() with LogEventWithMarker + + // case class Warning() extends LogEvent + + sealed trait LogEventWithMarker extends LogEvent + +object TestB: + sealed trait LogEvent + + object LogEvent: + def myOrdinal(e: LogEvent): Int = e match + case e: Error => 0 + case e: Warning => 1 + case e: LogEventWithMarker => 2 + + + case class Error() extends LogEvent + class Error2() extends Error() with LogEventWithMarker + + case class Warning() extends LogEvent + + sealed trait LogEventWithMarker extends LogEvent \ No newline at end of file diff --git a/tests/patmat/i12602.scala b/tests/patmat/i12602.scala new file mode 100644 index 000000000000..3dac513a930c --- /dev/null +++ b/tests/patmat/i12602.scala @@ -0,0 +1,2 @@ +sealed class Foo[T] +object Foo extends Foo[Nothing] diff --git a/tests/patmat/i12681.scala b/tests/patmat/i12681.scala new file mode 100644 index 000000000000..b367e1563611 --- /dev/null +++ b/tests/patmat/i12681.scala @@ -0,0 +1,20 @@ +object Examples { + + case class Leaf1() extends i.Root + case class Leaf2() extends i.Branch + + val i = new Inner() + + class Inner { + + sealed trait Root + sealed trait Branch extends Root + + // simulate ordinal method of a Mirror.SumOf generated at this call site + def myOrdinal(r: Root): Int = r match { + case _: Examples.Leaf1 => 0 + case _: Inner.this.Branch => 1 + } + } + +} \ No newline at end of file diff --git a/tests/patmat/i12805-fallout.scala b/tests/patmat/i12805-fallout.scala new file mode 100644 index 000000000000..b598b36159ea --- /dev/null +++ b/tests/patmat/i12805-fallout.scala @@ -0,0 +1,30 @@ +import scala.annotation.unchecked.uncheckedVariance + +type Untyped = Null + +class Type + +abstract class Tree[-T >: Untyped] { + type ThisTree[T >: Untyped] <: Tree[T] + + protected var myTpe: T @uncheckedVariance = _ + + def withType(tpe: Type): ThisTree[Type] = { + val tree = this.asInstanceOf[ThisTree[Type]] + tree.myTpe = tpe + tree + } +} + +case class Ident[-T >: Untyped]() extends Tree[T] +case class DefDef[-T >: Untyped]() extends Tree[T] +case class Inlined[-T >: Untyped]() extends Tree[T] +case class CaseDef[-T >: Untyped]() extends Tree[T] + +def test[T >: Untyped](tree: Tree[T], tp: Type) = tree.withType(tp) match { + case Ident() => 1 + case DefDef() => 2 + case _: Inlined[_] => 3 + case CaseDef() => 4 + case _ => 5 +} diff --git a/tests/patmat/i12805.check b/tests/patmat/i12805.check new file mode 100644 index 000000000000..e855c765d801 --- /dev/null +++ b/tests/patmat/i12805.check @@ -0,0 +1,3 @@ +10: Match case Unreachable +16: Match case Unreachable +22: Match case Unreachable diff --git a/tests/patmat/i12805.scala b/tests/patmat/i12805.scala new file mode 100644 index 000000000000..78240c2f9703 --- /dev/null +++ b/tests/patmat/i12805.scala @@ -0,0 +1,22 @@ +import scala.language.implicitConversions + +type Timeframe = "1m" | "2m" | "1H" +type TimeframeN = 1 | 2 | 60 + +def manualConvertToN(tf: Timeframe): TimeframeN = tf match + case "1m" => 1 + case "2m" => 2 + case "1H" => 60 + case "4H" => ??? // was: no reachability warning + +given Conversion[Timeframe, TimeframeN] = + case "1m" => 1 + case "2m" => 2 + case "1H" => 60 + case "4H" => ??? // was: no reachability warning + +given Conversion[TimeframeN, Timeframe] = + case 1 => "1m" + case 2 => "2m" + case 60 => "1H" + case 240 => ??? // was: no reachability warning diff --git a/tests/patmat/i12805b.check b/tests/patmat/i12805b.check new file mode 100644 index 000000000000..62ba8523f65e --- /dev/null +++ b/tests/patmat/i12805b.check @@ -0,0 +1,3 @@ +4: Match case Unreachable +9: Match case Unreachable +14: Match case Unreachable diff --git a/tests/patmat/i12805b.scala b/tests/patmat/i12805b.scala new file mode 100644 index 000000000000..cbff6ae0d2ca --- /dev/null +++ b/tests/patmat/i12805b.scala @@ -0,0 +1,14 @@ +def test1(a: 1 | 2) = a match + case 1 => true + case 2 => false + case 4 => ??? // unreachable case, was: no warning + +def test2(a: 1 | 2) = a match + case 1 => true + case 2 => false + case _ => ??? // unreachable + +def test3(a: 1 | 2) = a match + case 1 => true + case 2 => false + case a if a < 0 => ??? // unreachable diff --git a/tests/patmat/i12808.scala b/tests/patmat/i12808.scala new file mode 100644 index 000000000000..b0c5aad78a64 --- /dev/null +++ b/tests/patmat/i12808.scala @@ -0,0 +1,10 @@ +import scala.reflect.Typeable + +case class Err1() +case class Err2() + +def handleError[A: Typeable, B: Typeable](x: Either[A | B, Nothing]): Unit = + x match // false alarm warning: It would fail on pattern case: Left(_) + case Left(e: A) => println("A") + case Left(_: B) => println("B") + case Right(_) => println("Nothing") diff --git a/tests/patmat/i13003.check b/tests/patmat/i13003.check new file mode 100644 index 000000000000..19fb6a466549 --- /dev/null +++ b/tests/patmat/i13003.check @@ -0,0 +1,4 @@ +4: Pattern Match Exhaustivity: One(Two(None)) +7: Pattern Match Exhaustivity: Two(None) +10: Pattern Match Exhaustivity: None, Some(None) +13: Pattern Match Exhaustivity: None, Some(None), Some(Some(None)) diff --git a/tests/patmat/i13003.scala b/tests/patmat/i13003.scala new file mode 100644 index 000000000000..af9158e09c5e --- /dev/null +++ b/tests/patmat/i13003.scala @@ -0,0 +1,14 @@ +case class One(two: Two) +case class Two(o: Option[Int]) + +def matchOneTwo(one: One) = one match + case One(Two(Some(i))) => "match!" + +def matchTwo(two: Two) = two match + case Two(Some(i)) => "match!" + +def matchOO(oo: Option[Option[Int]]) = oo match + case Some(Some(i)) => "match!" + +def matchOOO(ooo: Option[Option[Option[Int]]]) = ooo match + case Some(Some(Some(i))) => "match!" diff --git a/tests/patmat/i13110.scala b/tests/patmat/i13110.scala new file mode 100644 index 000000000000..e5a143a948a9 --- /dev/null +++ b/tests/patmat/i13110.scala @@ -0,0 +1,12 @@ +object Test { + sealed trait Base + class Blub extends Base + object Blub { + def unapply(blub: Blub): Some[(Int, blub.type)] = + Some(1 -> blub) + } + + (null: Base) match { + case Blub(i, x) => println(i) + } +} diff --git a/tests/patmat/i13189.scala b/tests/patmat/i13189.scala new file mode 100644 index 000000000000..f633cbf9a514 --- /dev/null +++ b/tests/patmat/i13189.scala @@ -0,0 +1,89 @@ +// original report +def foo(opt: Option[Tuple.Tail[NonEmptyTuple]]): Unit = + opt match + case None => ??? + case Some(a) => ??? + + +// again with a mini-Tuple with the extra NonEmptyTupExtra parent, to test transitivity +object WithExtraParent: + sealed trait Tup + + object Tup { + type Tail[X <: NonEmptyTup] <: Tup = X match { + case _ **: xs => xs + } + } + + object EmptyTup extends Tup + + sealed trait NonEmptyTup extends Tup + sealed trait NonEmptyTupExtra extends NonEmptyTup + sealed abstract class **:[+H, +T <: Tup] extends NonEmptyTupExtra + + object **: { + def unapply[H, T <: Tup](x: H **: T): (H, T) = null + } + + def foo(opt: Option[Tup.Tail[NonEmptyTup]]): Unit = + opt match + case None => ??? + case Some(a) => ??? +end WithExtraParent + + +// again with a non-abstract parent +object WithNonAbstractParent: + sealed trait Tup + + object Tup { + type Tail[X <: NonEmptyTup] <: Tup = X match { + case _ **: xs => xs + } + } + + object EmptyTup extends Tup + + sealed class NonEmptyTup extends Tup + sealed class **:[+H, +T <: Tup] extends NonEmptyTup + + object **: { + def unapply[H, T <: Tup](x: H **: T): (H, T) = null + } + + def foo(opt: Option[Tup.Tail[NonEmptyTup]]): Unit = + opt match + case None => ??? + case Some(a) => ??? +end WithNonAbstractParent + + +// again with multiple children, but an exhaustive match +object WithExhaustiveMatch: + sealed trait Tup + + object Tup { + type Tail[X <: NonEmptyTup] <: Tup = X match { + case _ **: xs => xs + case _ *+: xs => xs + } + } + + object EmptyTup extends Tup + + sealed trait NonEmptyTup extends Tup + sealed abstract class **:[+H, +T <: Tup] extends NonEmptyTup + sealed abstract class *+:[+H, +T <: Tup] extends NonEmptyTup + + object **: { + def unapply[H, T <: Tup](x: H **: T): (H, T) = null + } + object *+: { + def unapply[H, T <: Tup](x: H *+: T): (H, T) = null + } + + def foo(opt: Option[Tup.Tail[NonEmptyTup]]): Unit = + opt match + case None => ??? + case Some(a) => ??? +end WithExhaustiveMatch diff --git a/tests/patmat/i13342-testing.check b/tests/patmat/i13342-testing.check new file mode 100644 index 000000000000..5a1a1e80447f --- /dev/null +++ b/tests/patmat/i13342-testing.check @@ -0,0 +1,5 @@ +13: Match case Unreachable +13: Match case Unreachable +14: Match case Unreachable +14: Match case Unreachable +15: Pattern Match Exhaustivity: Thu, Fri diff --git a/tests/patmat/i13342-testing.scala b/tests/patmat/i13342-testing.scala new file mode 100644 index 000000000000..8ea22d79917f --- /dev/null +++ b/tests/patmat/i13342-testing.scala @@ -0,0 +1,18 @@ +class C { + val bool: true = true + val not1: None.type = None + + def t1 = true match { case true => "inline true" } + def t2 = bool match { case true => "valdef true" } + def t3 = None match { case None => "inline None" } + def t4 = not1 match { case None => "valdef None" } + + val monday: Day.Mon.type = Day.Mon + val someday: Day = Day.Mon + + def t5 = Day.Mon match { case Day.Mon => 1 case Day.Tue => 2 case Day.Wed => 3 } + def t6 = monday match { case Day.Mon => 1 case Day.Tue => 2 case Day.Wed => 3 } + def t7 = someday match { case Day.Mon => 1 case Day.Tue => 2 case Day.Wed => 3 } +} + +enum Day { case Mon, Tue, Wed, Thu, Fri } diff --git a/tests/patmat/i13342.check b/tests/patmat/i13342.check new file mode 100644 index 000000000000..77e4f7cd4ed3 --- /dev/null +++ b/tests/patmat/i13342.check @@ -0,0 +1 @@ +8: Match case Unreachable diff --git a/tests/patmat/i13342.scala b/tests/patmat/i13342.scala new file mode 100644 index 000000000000..d706442dd692 --- /dev/null +++ b/tests/patmat/i13342.scala @@ -0,0 +1,10 @@ +class C { + def m(x: true) = x match { // was: match may not be exhaustive.\nIt would fail on pattern case: false + case true => println("the one true path") + } + + def n(x: true) = x match { + case true => 1 + case false => 2 // was: no reachability warning on this case + } +} diff --git a/tests/patmat/i13485.check b/tests/patmat/i13485.check new file mode 100644 index 000000000000..f9d066905a86 --- /dev/null +++ b/tests/patmat/i13485.check @@ -0,0 +1,2 @@ +11: Match case Unreachable +16: Match case Unreachable diff --git a/tests/patmat/i13485.scala b/tests/patmat/i13485.scala new file mode 100644 index 000000000000..72cc3a3d2cd3 --- /dev/null +++ b/tests/patmat/i13485.scala @@ -0,0 +1,16 @@ +// The intent of this test is test that changing the order of cases doesn't affect whether +// warnings, originally reachability warnings but exhaustivity warnings too, are emitted. +// To do so we need a case that typechecks but is statically assessed to be unreachable. +// How about... a type pattern on a sealed trait that the scrutinee type doesn't extend? + +sealed trait Foo + +class Bar + +def test1(bar: Bar) = bar match + case _: Foo => 1 + case _: Bar => 2 + +def test2(bar: Bar) = bar match + case _: Bar => 2 + case _: Foo => 1 diff --git a/tests/patmat/i13737.check b/tests/patmat/i13737.check new file mode 100644 index 000000000000..87d5896f5a1a --- /dev/null +++ b/tests/patmat/i13737.check @@ -0,0 +1 @@ +14: Pattern Match Exhaustivity: _: Success diff --git a/tests/patmat/i13737.scala b/tests/patmat/i13737.scala new file mode 100644 index 000000000000..f415e9028334 --- /dev/null +++ b/tests/patmat/i13737.scala @@ -0,0 +1,15 @@ +sealed trait Result + +case class Success(result: String, next: Int) extends Result { + def isEmpty: Boolean = 10 % 2 == 1 + def get: String = result +} + +object Success { + def unapply(x: Success): Success = x +} + +def main = + val res: Result = ??? + res match // error + case Success(v) => v diff --git a/tests/patmat/i13931.scala b/tests/patmat/i13931.scala new file mode 100644 index 000000000000..0d8d9eb9dcd3 --- /dev/null +++ b/tests/patmat/i13931.scala @@ -0,0 +1,7 @@ +class Test: + def test = Vector() match + case Seq() => println("empty") + case _ => println("non-empty") + + def test2 = IndexedSeq() match { case IndexedSeq() => case _ => } + def test3 = IndexedSeq() match { case IndexedSeq(1) => case _ => } diff --git a/tests/patmat/i14102.min.scala b/tests/patmat/i14102.min.scala new file mode 100644 index 000000000000..fe92c7bc164a --- /dev/null +++ b/tests/patmat/i14102.min.scala @@ -0,0 +1,7 @@ +trait Foo[-X] +case class Bar(n: Int) extends Foo[Nothing] + +def test[X](foo: Foo[X]) = foo match + case Bar(1) => + case Bar(_) => + case _ => diff --git a/tests/patmat/i14102.scala b/tests/patmat/i14102.scala new file mode 100644 index 000000000000..75aa983710fb --- /dev/null +++ b/tests/patmat/i14102.scala @@ -0,0 +1,8 @@ +trait T[-X] +case class CC[-X](x: List[T[X]]) extends T[Nothing] +case class Id[-X](x: String) extends T[X] + +def f[X](tree: T[X]) = tree match + case CC(Id("hi") :: Nil) => ??? + case CC(refs) => ??? + case _ => ??? diff --git a/tests/patmat/i6197d.check b/tests/patmat/i6197d.check index 1f4a130c3f86..cb1b11d4b326 100644 --- a/tests/patmat/i6197d.check +++ b/tests/patmat/i6197d.check @@ -1 +1 @@ -5: Pattern Match Exhaustivity: _: Array[String] +6: Pattern Match Exhaustivity: _: Array[String] diff --git a/tests/patmat/i6197d.scala b/tests/patmat/i6197d.scala index 247a5f12020d..419c6e7ec2fa 100644 --- a/tests/patmat/i6197d.scala +++ b/tests/patmat/i6197d.scala @@ -1,7 +1,8 @@ +// scalac: -Ycheck-all-patmat def foo(x: Array[String]) = x match { case _: Array[_] => } def bar(x: Array[String]) = x match { case _: Array[_ <: Int] => -} \ No newline at end of file +} diff --git a/tests/patmat/i6255b.check b/tests/patmat/i6255b.check index 582d2e65bc51..265ec9fa9c4b 100644 --- a/tests/patmat/i6255b.check +++ b/tests/patmat/i6255b.check @@ -1 +1 @@ -2: Pattern Match Exhaustivity: _: Expr[Int] +3: Pattern Match Exhaustivity: _: Expr[Int] diff --git a/tests/patmat/i6255b.scala b/tests/patmat/i6255b.scala index a5c52c8f28dc..02fba4ad9100 100644 --- a/tests/patmat/i6255b.scala +++ b/tests/patmat/i6255b.scala @@ -1,3 +1,4 @@ +// scalac: -Ycheck-all-patmat class Foo { def foo(x: quoted.Expr[Int])(using scala.quoted.Quotes): Unit = x match { case '{ 1 } => diff --git a/tests/patmat/i8922c.check b/tests/patmat/i8922c.check index be8f69feb884..acd56ccd2986 100644 --- a/tests/patmat/i8922c.check +++ b/tests/patmat/i8922c.check @@ -1 +1 @@ -26: Pattern Match Exhaustivity: (true, _: String, _), (true, _: Double, _), (true, true, _), (true, false, _), (true, (), _), (false, _: String, _) +26: Pattern Match Exhaustivity: (_: Int, _: String, _), (_: Int, _: Int, BANG), (_: Int, _: Int, BANG_EQUAL), (_: Int, _: Int, EQUAL), (_: Int, _: Int, EQUAL_EQUAL), (_: Int, true, _) diff --git a/tests/patmat/irrefutable.check b/tests/patmat/irrefutable.check index 383a04115664..3d07c35a5502 100644 --- a/tests/patmat/irrefutable.check +++ b/tests/patmat/irrefutable.check @@ -1,2 +1,2 @@ 22: Pattern Match Exhaustivity: _: A, _: B, C(_, _) -65: Pattern Match Exhaustivity: ExM(_, _) +65: Pattern Match Exhaustivity: _: M diff --git a/tests/patmat/null.check b/tests/patmat/null.check index da081e6b56c0..d9c265adf377 100644 --- a/tests/patmat/null.check +++ b/tests/patmat/null.check @@ -1,4 +1,3 @@ -6: Match case Unreachable +6: Pattern Match 13: Pattern Match -18: Match case Unreachable 20: Pattern Match diff --git a/tests/patmat/patmat-indent.check b/tests/patmat/patmat-indent.check index 4f0ec4dd99df..7689dd60d8bf 100644 --- a/tests/patmat/patmat-indent.check +++ b/tests/patmat/patmat-indent.check @@ -1,3 +1,3 @@ -9: Pattern Match Exhaustivity: Nil -23: Pattern Match Exhaustivity: true, false -27: Pattern Match Exhaustivity: _: Int +10: Pattern Match Exhaustivity: Nil +24: Pattern Match Exhaustivity: true, false +28: Pattern Match Exhaustivity: _: Int diff --git a/tests/patmat/patmat-indent.scala b/tests/patmat/patmat-indent.scala index a2b18e7fb1f8..f62d704f8ca0 100644 --- a/tests/patmat/patmat-indent.scala +++ b/tests/patmat/patmat-indent.scala @@ -1,3 +1,4 @@ +// scalac: -Ycheck-all-patmat object Test { val Nil: scala.collection.immutable.Nil.type = scala.collection.immutable.Nil val X = 5 @@ -27,4 +28,4 @@ object Test { def foo3(x: Int) = x match { case X => 0 } -} \ No newline at end of file +} diff --git a/tests/patmat/t10019.check b/tests/patmat/t10019.check index 6648193ae4e8..4eb648ea2ae4 100644 --- a/tests/patmat/t10019.check +++ b/tests/patmat/t10019.check @@ -1,2 +1,2 @@ -2: Pattern Match Exhaustivity: (List(_, _, _*), List(_, _*)), (Nil, List(_, _*)), (List(_, _*), List(_, _, _*)), (List(_, _*), Nil) +2: Pattern Match Exhaustivity: (List(_, _, _*), _: List), (Nil, List(_, _*)), (_: List, List(_, _, _*)), (List(_, _*), Nil) 11: Pattern Match Exhaustivity: (Foo(None), Foo(_)) diff --git a/tests/patmat/t10502.check b/tests/patmat/t10502.check index b92e0d7cae8a..11be4c628b63 100644 --- a/tests/patmat/t10502.check +++ b/tests/patmat/t10502.check @@ -1,4 +1,4 @@ -5: Pattern Match Exhaustivity: Perhaps(None) -15: Pattern Match Exhaustivity: Nil -31: Pattern Match Exhaustivity: Multi(None, _) -44: Pattern Match Exhaustivity: Prod(None, _) +6: Pattern Match Exhaustivity: Perhaps(None) +16: Pattern Match Exhaustivity: Nil +32: Pattern Match Exhaustivity: Multi(None, _) +45: Pattern Match Exhaustivity: Prod(None, _) diff --git a/tests/patmat/t10502.scala b/tests/patmat/t10502.scala index adf6b4df49fa..71066fdb7ae8 100644 --- a/tests/patmat/t10502.scala +++ b/tests/patmat/t10502.scala @@ -1,3 +1,4 @@ +// scalac: -Ycheck-all-patmat object Perhaps { def unapply[A](oa: Option[A]): Some[Option[A]] = Some(oa) diff --git a/tests/patmat/t3163.check b/tests/patmat/t3163.check index 51c58f9e3fe8..15f00dbfcebe 100644 --- a/tests/patmat/t3163.check +++ b/tests/patmat/t3163.check @@ -1 +1 @@ -2: Pattern Match Exhaustivity: _: AnyVal +3: Pattern Match Exhaustivity: _: AnyVal diff --git a/tests/patmat/t3163.scala b/tests/patmat/t3163.scala index 2e0f2c1d9482..19aa6fe93ca8 100644 --- a/tests/patmat/t3163.scala +++ b/tests/patmat/t3163.scala @@ -1,3 +1,4 @@ +// scalac: -Ycheck-all-patmat object Test { def foo(x : AnyVal) = x match {case b : Boolean => "It's a bool"} -} \ No newline at end of file +} diff --git a/tests/patmat/t4526.check b/tests/patmat/t4526.check index ac3ef01bcc23..17e8200e9b1d 100644 --- a/tests/patmat/t4526.check +++ b/tests/patmat/t4526.check @@ -1,3 +1,3 @@ -2: Pattern Match Exhaustivity: _: Int -7: Pattern Match Exhaustivity: (_, _) -12: Pattern Match Exhaustivity: (true, true), (false, false) +3: Pattern Match Exhaustivity: _: Int +8: Pattern Match Exhaustivity: (_, _) +13: Pattern Match Exhaustivity: (true, true), (false, false) diff --git a/tests/patmat/t4526.scala b/tests/patmat/t4526.scala index d531c6b34304..016f3782b65b 100644 --- a/tests/patmat/t4526.scala +++ b/tests/patmat/t4526.scala @@ -1,3 +1,4 @@ +// scalac: -Ycheck-all-patmat object Test{ def foo(a: Int) = a match { case 5 => "Five!" @@ -13,4 +14,4 @@ object Test{ case (true, false) => "tf" case (false, true) => "ft" } -} \ No newline at end of file +} diff --git a/tests/patmat/t4661b.check b/tests/patmat/t4661b.check index 50b0c4da452e..d19d4c6db744 100644 --- a/tests/patmat/t4661b.check +++ b/tests/patmat/t4661b.check @@ -1,2 +1,2 @@ -10: Pattern Match Exhaustivity: _: c.Foo -13: Match case Unreachable +11: Pattern Match Exhaustivity: _: c.Foo +14: Match case Unreachable diff --git a/tests/patmat/t4661b.scala b/tests/patmat/t4661b.scala index ee95ff08ca07..f2329020cb66 100644 --- a/tests/patmat/t4661b.scala +++ b/tests/patmat/t4661b.scala @@ -1,3 +1,4 @@ +// scalac: -Ycheck-all-patmat class C { trait Foo class One extends Foo diff --git a/tests/patmat/t9351.check b/tests/patmat/t9351.check index 379adffd5a61..b5baf54469fc 100644 --- a/tests/patmat/t9351.check +++ b/tests/patmat/t9351.check @@ -1,3 +1,3 @@ -8: Pattern Match Exhaustivity: _: A -17: Pattern Match Exhaustivity: (_, _) -28: Pattern Match Exhaustivity: (_, _) +9: Pattern Match Exhaustivity: _: A +18: Pattern Match Exhaustivity: (_, _) +29: Pattern Match Exhaustivity: (_, _) diff --git a/tests/patmat/t9351.scala b/tests/patmat/t9351.scala index 9b9bd4312ae7..b43fa948ce87 100644 --- a/tests/patmat/t9351.scala +++ b/tests/patmat/t9351.scala @@ -1,3 +1,4 @@ +// scalac: -Ycheck-all-patmat trait A {} case object B extends A {} case object C extends A {} diff --git a/tests/patmat/t9809.check b/tests/patmat/t9809.check index ff3fb4c63f64..6baed9fb58f0 100644 --- a/tests/patmat/t9809.check +++ b/tests/patmat/t9809.check @@ -1,2 +1,2 @@ -3: Pattern Match Exhaustivity: (_, _) -7: Pattern Match Exhaustivity: (_, _) +4: Pattern Match Exhaustivity: (_, _) +8: Pattern Match Exhaustivity: (_, _) diff --git a/tests/patmat/t9809.scala b/tests/patmat/t9809.scala index 45d4946cdbe4..72ff23b11e51 100644 --- a/tests/patmat/t9809.scala +++ b/tests/patmat/t9809.scala @@ -1,3 +1,4 @@ +// scalac: -Ycheck-all-patmat object Example { val op1: (Any, Any) => Unit = { case (_, b: Int) => diff --git a/tests/pending/i12194.scala b/tests/pending/i12194.scala new file mode 100644 index 000000000000..29916c461549 --- /dev/null +++ b/tests/pending/i12194.scala @@ -0,0 +1,16 @@ +import scala.annotation.implicitNotFound +import scala.compiletime.package$package.summonAll +import scala.util.Try +import scala.util.Success +import scala.util.Failure +import scala.util.NotGiven +import scala.deriving.* + +def f(): Unit = + var t = (??? : Tuple1[ValueOf["foo"]]); t.toList.map(identity) + (??? : Tuple1[ValueOf["foo"]]).toList.map(identity) + +@main def Test(): Unit = + println(summonAll[Tuple.Map[("foo", "bar"), ValueOf]].toList.map{ + case str: ValueOf[_] ⇒ str.value + }) \ No newline at end of file diff --git a/tests/plugins/custom/analyzer/Analyzer_1.scala b/tests/plugins/custom/analyzer/Analyzer_1.scala index 1a42119f8878..0e1cc53290d0 100644 --- a/tests/plugins/custom/analyzer/Analyzer_1.scala +++ b/tests/plugins/custom/analyzer/Analyzer_1.scala @@ -1,6 +1,6 @@ -// Similar code resides in scripted tests, which only runs on nightly: +// Similar code resides in scripted tests: // -// sbt-dotty/sbt-test/sbt-dotty/analyzer-plugin/plugin +// sbt-test/analyzer-plugin/plugin // // You may want to change the code there too diff --git a/tests/pos-custom-args/erased/i7878.scala b/tests/pos-custom-args/erased/i7878.scala index 18005e81eab3..63b082d52ca0 100644 --- a/tests/pos-custom-args/erased/i7878.scala +++ b/tests/pos-custom-args/erased/i7878.scala @@ -2,7 +2,7 @@ object Boom { import scala.compiletime.* trait Fail[A <: Int, B <: Int] - erased transparent inline given fail[X <: Int, Y <: Int]: Fail[X, Y] = { + transparent inline given fail[X <: Int, Y <: Int]: Fail[X, Y] = { scala.compiletime.summonFrom { case t: Fail[X, y] if constValue[y] < constValue[Y] => ??? } @@ -12,4 +12,4 @@ object Boom { given ev1: Fail[a.type, 2] = null summon[Fail[a.type, 3]] -} \ No newline at end of file +} diff --git a/tests/pos-custom-args/erasedInline.scala b/tests/pos-custom-args/erasedInline.scala new file mode 100644 index 000000000000..6230dfb3dcb2 --- /dev/null +++ b/tests/pos-custom-args/erasedInline.scala @@ -0,0 +1,4 @@ +import language.experimental.erasedDefinitions + +erased inline def f: Unit = () // error: illegal combination of modifiers: `erased` and `inline` for: method f +inline def g: Unit = () diff --git a/tests/pos-custom-args/help.scala b/tests/pos-custom-args/help.scala new file mode 100644 index 000000000000..257186338e40 --- /dev/null +++ b/tests/pos-custom-args/help.scala @@ -0,0 +1,4 @@ + +// dummy source for exercising information flags +// +class Help diff --git a/tests/pos-custom-args/i10383.scala b/tests/pos-custom-args/i10383.scala new file mode 100644 index 000000000000..ca54464eb514 --- /dev/null +++ b/tests/pos-custom-args/i10383.scala @@ -0,0 +1 @@ +def r = BigInt(1) to BigInt(3) // error diff --git a/tests/pos-custom-args/i13044.scala b/tests/pos-custom-args/i13044.scala new file mode 100644 index 000000000000..33a20b5800c8 --- /dev/null +++ b/tests/pos-custom-args/i13044.scala @@ -0,0 +1,51 @@ +import scala.deriving.Mirror +import scala.compiletime._ + +trait Schema[T] { + def build: T +} + +object Schema extends SchemaDerivation { + implicit lazy val int: Schema[Int] = ??? + implicit def option[A](implicit ev: Schema[A]): Schema[Option[A]] = ??? +} + +trait SchemaDerivation { + inline def recurse[A <: Tuple]: List[Schema[Any]] = + inline erasedValue[A] match { + case _: (t *: ts) => + val builder = summonInline[Schema[t]].asInstanceOf[Schema[Any]] + builder :: recurse[ts] + case _: EmptyTuple => Nil + } + + inline def derived[A]: Schema[A] = + inline summonInline[Mirror.Of[A]] match { + case m: Mirror.SumOf[A] => + lazy val subTypes = recurse[m.MirroredElemTypes] + new Schema[A] { + def build: A = ??? + } + + case m: Mirror.ProductOf[A] => + lazy val fields = recurse[m.MirroredElemTypes] + new Schema[A] { + def build: A = ??? + } + } + + inline given gen[A]: Schema[A] = derived +} + +case class H(i: Int) +case class G(h: H) +case class F(g: G) +case class E(f: Option[F]) +case class D(e: E) +case class C(d: D) +case class B(c: C) +case class A(a: A, b: B) + +object TestApp { + implicit def typeSchema: Schema[A] = Schema.gen +} diff --git a/tests/pos-custom-args/i13405/Macro.scala b/tests/pos-custom-args/i13405/Macro.scala new file mode 100644 index 000000000000..2996555a6e0c --- /dev/null +++ b/tests/pos-custom-args/i13405/Macro.scala @@ -0,0 +1,12 @@ +import scala.quoted.* + +sealed class Foo() +inline def hh(): Unit = ${ interpMacro() } + +private def interpMacro()(using Quotes): Expr[Unit] = + import quotes.reflect.* + '{ + val res: Either[String, (Foo, Foo)] = + Right((new Foo, new Foo)) + val (a, b) = res.toOption.get + } diff --git a/tests/pos-custom-args/i13405/Test.scala b/tests/pos-custom-args/i13405/Test.scala new file mode 100644 index 000000000000..385f4453adeb --- /dev/null +++ b/tests/pos-custom-args/i13405/Test.scala @@ -0,0 +1 @@ +@main def main: Unit = hh() diff --git a/tests/pos-custom-args/inline-match-gadt.scala b/tests/pos-custom-args/inline-match-gadt.scala index 0f22f7b96c22..f63d0fb5b68c 100644 --- a/tests/pos-custom-args/inline-match-gadt.scala +++ b/tests/pos-custom-args/inline-match-gadt.scala @@ -1,6 +1,6 @@ object `inline-match-gadt` { class Exactly[T] - erased def exactType[T]: Exactly[T] = ??? + erased def exactType[T]: Exactly[T] = compiletime.erasedValue inline def foo[T](t: T): T = inline exactType[T] match { diff --git a/tests/pos-custom-args/jdk-8-app.scala b/tests/pos-custom-args/jdk-8-app.scala new file mode 100644 index 000000000000..6a9d07155958 --- /dev/null +++ b/tests/pos-custom-args/jdk-8-app.scala @@ -0,0 +1,5 @@ +import java.time.LocalDate + +object Jdk8App extends App { + println(LocalDate.now()) +} diff --git a/tests/pos-custom-args/no-experimental/experimental-imports-empty.scala b/tests/pos-custom-args/no-experimental/experimental-imports-empty.scala new file mode 100644 index 000000000000..bb27629a6062 --- /dev/null +++ b/tests/pos-custom-args/no-experimental/experimental-imports-empty.scala @@ -0,0 +1,5 @@ +import annotation.experimental +import language.experimental.fewerBraces +import language.experimental.namedTypeArguments +import language.experimental.genericNumberLiterals +import language.experimental.erasedDefinitions diff --git a/tests/pos-custom-args/no-experimental/experimental-imports-top.scala b/tests/pos-custom-args/no-experimental/experimental-imports-top.scala new file mode 100644 index 000000000000..bee89d6ab6c8 --- /dev/null +++ b/tests/pos-custom-args/no-experimental/experimental-imports-top.scala @@ -0,0 +1,5 @@ +import language.experimental.erasedDefinitions +import annotation.experimental + +@experimental +erased def f = 1 diff --git a/tests/pos-custom-args/no-experimental/i13848.scala b/tests/pos-custom-args/no-experimental/i13848.scala new file mode 100644 index 000000000000..8b65ccb078e1 --- /dev/null +++ b/tests/pos-custom-args/no-experimental/i13848.scala @@ -0,0 +1,8 @@ +import annotation.experimental + +@main +@experimental +def run(): Unit = f + +@experimental +def f = 2 diff --git a/tests/pos-custom-args/no-experimental/i8945.scala b/tests/pos-custom-args/no-experimental/i8945.scala new file mode 100644 index 000000000000..5dded16f0160 --- /dev/null +++ b/tests/pos-custom-args/no-experimental/i8945.scala @@ -0,0 +1,27 @@ +// src-2/MacroImpl.scala +trait Context { + object universe { + type Literal + } +} + +class MacroImpl(val c: Context) { + import c.universe.* + def mono: Literal = ??? +} + +// src-3/Macros.scala +import scala.language.experimental.macros + +object Macros { + + object Bundles { + def mono: Unit = macro MacroImpl.mono + inline def mono: Unit = ${ Macros3.monoImpl } + } + + object Macros3 { + def monoImpl(using quoted.Quotes) = '{()} + } + +} \ No newline at end of file diff --git a/tests/pos-custom-args/phantom-Eq.scala b/tests/pos-custom-args/phantom-Eq.scala index 3bd16323524f..6ec5f77676ce 100644 --- a/tests/pos-custom-args/phantom-Eq.scala +++ b/tests/pos-custom-args/phantom-Eq.scala @@ -20,12 +20,12 @@ object EqUtil { extension [T](x: T) def ===[U](y: U)(using erased PhantomEq[T, U]) = x.equals(y) - erased given eqString: PhantomEqEq[String] = ??? - erased given eqInt: PhantomEqEq[Int] = ??? - erased given eqDouble: PhantomEqEq[Double] = ??? + erased given eqString: PhantomEqEq[String] = compiletime.erasedValue + erased given eqInt: PhantomEqEq[Int] = compiletime.erasedValue + erased given eqDouble: PhantomEqEq[Double] = compiletime.erasedValue - erased given eqByteNum: PhantomEq[Byte, Number] = ??? - erased given eqNumByte: PhantomEq[Number, Byte] = ??? + erased given eqByteNum: PhantomEq[Byte, Number] = compiletime.erasedValue + erased given eqNumByte: PhantomEq[Number, Byte] = compiletime.erasedValue - erased given eqSeq[T, U](using erased PhantomEq[T, U]): PhantomEq[Seq[T], Seq[U]] = ??? + erased given eqSeq[T, U](using erased PhantomEq[T, U]): PhantomEq[Seq[T], Seq[U]] = compiletime.erasedValue } diff --git a/tests/pos-custom-args/phantom-Evidence.scala b/tests/pos-custom-args/phantom-Evidence.scala index 414f96f3ef33..3a82cfe0c6e8 100644 --- a/tests/pos-custom-args/phantom-Evidence.scala +++ b/tests/pos-custom-args/phantom-Evidence.scala @@ -24,5 +24,5 @@ object WithNormalState { object Utils { type =::=[From, To] - erased given tpEquals[A]: A =::= A = ??? + erased given tpEquals[A]: A =::= A = compiletime.erasedValue } diff --git a/tests/pos-java-interop-separate/inner-fbounds/Configurer_1.java b/tests/pos-java-interop-separate/inner-fbounds/Configurer_1.java new file mode 100644 index 000000000000..f75aaf1743ed --- /dev/null +++ b/tests/pos-java-interop-separate/inner-fbounds/Configurer_1.java @@ -0,0 +1,8 @@ +abstract class AbstractConfigurer { + abstract class AbstractRegistry> {} +} + +public final class Configurer_1 extends AbstractConfigurer { + public final class Registry extends AbstractRegistry {} + public Registry registry() { return null; } +} diff --git a/tests/pos-java-interop-separate/inner-fbounds/Test_2.scala b/tests/pos-java-interop-separate/inner-fbounds/Test_2.scala new file mode 100644 index 000000000000..85455c91762d --- /dev/null +++ b/tests/pos-java-interop-separate/inner-fbounds/Test_2.scala @@ -0,0 +1,2 @@ +object Test: + def registry(cfg: Configurer_1) = cfg.registry diff --git a/tests/pos-java-interop/i13575/Builder.java b/tests/pos-java-interop/i13575/Builder.java new file mode 100644 index 000000000000..a267a21ffd6a --- /dev/null +++ b/tests/pos-java-interop/i13575/Builder.java @@ -0,0 +1,16 @@ +package com.lamoroso.example; + +import java.util.Collections; +import java.util.List; + +public abstract class Builder, R> { + + private List pools; + + public Builder withPool(String... pools) { + Collections.addAll(this.pools, pools); + return this; + } + + public Builder build(){return null;} +} \ No newline at end of file diff --git a/tests/pos-java-interop/i13575/Client.java b/tests/pos-java-interop/i13575/Client.java new file mode 100644 index 000000000000..a5b19d39d1f0 --- /dev/null +++ b/tests/pos-java-interop/i13575/Client.java @@ -0,0 +1,9 @@ +package com.lamoroso.example; + +public class Client { + + public static Builder builder() { + return null; + } + +} diff --git a/tests/pos-java-interop/i13575/RestClient.java b/tests/pos-java-interop/i13575/RestClient.java new file mode 100644 index 000000000000..feba30c8ac35 --- /dev/null +++ b/tests/pos-java-interop/i13575/RestClient.java @@ -0,0 +1,15 @@ +package com.lamoroso.example; + +public class RestClient { + + private Object instance; + + public RestClient(Object instance) { + this.instance = instance; + } + + public static RestClientBuilder builder() { + return new RestClientBuilder(); + } + +} diff --git a/tests/pos-java-interop/i13575/RestClientBuilder.java b/tests/pos-java-interop/i13575/RestClientBuilder.java new file mode 100644 index 000000000000..65907b157be7 --- /dev/null +++ b/tests/pos-java-interop/i13575/RestClientBuilder.java @@ -0,0 +1,19 @@ +package com.lamoroso.example; + +public class RestClientBuilder, R> { + + private Builder wrappedBuilder; + + protected RestClientBuilder() { + this.wrappedBuilder = Client.builder(); + } + + public RestClientBuilder withPool(String... pools) { + this.wrappedBuilder.withPool(pools); + return this; + } + + public RestClient build() { + return new RestClient(wrappedBuilder.build()); + } +} \ No newline at end of file diff --git a/tests/pos-java-interop/i13575/ScalaApp.scala b/tests/pos-java-interop/i13575/ScalaApp.scala new file mode 100644 index 000000000000..230940384f97 --- /dev/null +++ b/tests/pos-java-interop/i13575/ScalaApp.scala @@ -0,0 +1,5 @@ +package com.lamoroso.example + +object ScalaApp extends App { + RestClient.builder().withPool("hello").build() +} diff --git a/tests/pos-macros/backwardCompat-3.0/Macro_1_c3.0.0.scala b/tests/pos-macros/backwardCompat-3.0/Macro_1_c3.0.0.scala new file mode 100644 index 000000000000..fb06e93f91c0 --- /dev/null +++ b/tests/pos-macros/backwardCompat-3.0/Macro_1_c3.0.0.scala @@ -0,0 +1,20 @@ +import scala.quoted.* + +object Macros: + + inline def power(x: Double, inline n: Int) = ${ powerCode('x, 'n) } + + private def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = + unrolledPowerCode(x, n.valueOrError) + + private def unrolledPowerCode(x: Expr[Double], n: Int)(using Quotes): Expr[Double] = + if n == 0 then '{ 1.0 } // tests simple quotes without splices + else if n % 2 == 1 then '{ $x * ${ unrolledPowerCode(x, n - 1) } } // tests simple splices + else '{ val y = $x * $x; ${ unrolledPowerCode('y, n / 2) } } // tests splice with term capture + + + inline def let[T, U](x: T)(inline body: T => U): U = ${ letCode('x, 'body) } + + private def letCode[T: Type, U: Type](x: Expr[T], body: Expr[T => U])(using Quotes): Expr[U] = + // tests use of Type + '{ val y: T = $x; $body(y): U } diff --git a/tests/pos-macros/backwardCompat-3.0/Test_2.scala b/tests/pos-macros/backwardCompat-3.0/Test_2.scala new file mode 100644 index 000000000000..8c0a8004b9cf --- /dev/null +++ b/tests/pos-macros/backwardCompat-3.0/Test_2.scala @@ -0,0 +1,15 @@ +import Macros.* + +def powerTest(x: Double): Unit = + power(x, 0) + power(x, 1) + power(x, 5) + power(x, 10) + +def letTest: Unit = + let(0) { _ + 1 } + let(0) { _.toString } + let((4, 'a')) { _.swap } + let(new Foo) { _.hashCode } + +class Foo diff --git a/tests/pos-macros/backwardCompat-3.1/Macro_1_c3.1.0.scala b/tests/pos-macros/backwardCompat-3.1/Macro_1_c3.1.0.scala new file mode 100644 index 000000000000..fb06e93f91c0 --- /dev/null +++ b/tests/pos-macros/backwardCompat-3.1/Macro_1_c3.1.0.scala @@ -0,0 +1,20 @@ +import scala.quoted.* + +object Macros: + + inline def power(x: Double, inline n: Int) = ${ powerCode('x, 'n) } + + private def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = + unrolledPowerCode(x, n.valueOrError) + + private def unrolledPowerCode(x: Expr[Double], n: Int)(using Quotes): Expr[Double] = + if n == 0 then '{ 1.0 } // tests simple quotes without splices + else if n % 2 == 1 then '{ $x * ${ unrolledPowerCode(x, n - 1) } } // tests simple splices + else '{ val y = $x * $x; ${ unrolledPowerCode('y, n / 2) } } // tests splice with term capture + + + inline def let[T, U](x: T)(inline body: T => U): U = ${ letCode('x, 'body) } + + private def letCode[T: Type, U: Type](x: Expr[T], body: Expr[T => U])(using Quotes): Expr[U] = + // tests use of Type + '{ val y: T = $x; $body(y): U } diff --git a/tests/pos-macros/backwardCompat-3.1/Test_2.scala b/tests/pos-macros/backwardCompat-3.1/Test_2.scala new file mode 100644 index 000000000000..8c0a8004b9cf --- /dev/null +++ b/tests/pos-macros/backwardCompat-3.1/Test_2.scala @@ -0,0 +1,15 @@ +import Macros.* + +def powerTest(x: Double): Unit = + power(x, 0) + power(x, 1) + power(x, 5) + power(x, 10) + +def letTest: Unit = + let(0) { _ + 1 } + let(0) { _.toString } + let((4, 'a')) { _.swap } + let(new Foo) { _.hashCode } + +class Foo diff --git a/tests/pos-macros/baseCompat/Macro_1.scala b/tests/pos-macros/baseCompat/Macro_1.scala new file mode 100644 index 000000000000..fb06e93f91c0 --- /dev/null +++ b/tests/pos-macros/baseCompat/Macro_1.scala @@ -0,0 +1,20 @@ +import scala.quoted.* + +object Macros: + + inline def power(x: Double, inline n: Int) = ${ powerCode('x, 'n) } + + private def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = + unrolledPowerCode(x, n.valueOrError) + + private def unrolledPowerCode(x: Expr[Double], n: Int)(using Quotes): Expr[Double] = + if n == 0 then '{ 1.0 } // tests simple quotes without splices + else if n % 2 == 1 then '{ $x * ${ unrolledPowerCode(x, n - 1) } } // tests simple splices + else '{ val y = $x * $x; ${ unrolledPowerCode('y, n / 2) } } // tests splice with term capture + + + inline def let[T, U](x: T)(inline body: T => U): U = ${ letCode('x, 'body) } + + private def letCode[T: Type, U: Type](x: Expr[T], body: Expr[T => U])(using Quotes): Expr[U] = + // tests use of Type + '{ val y: T = $x; $body(y): U } diff --git a/tests/pos-macros/baseCompat/Test_2.scala b/tests/pos-macros/baseCompat/Test_2.scala new file mode 100644 index 000000000000..8c0a8004b9cf --- /dev/null +++ b/tests/pos-macros/baseCompat/Test_2.scala @@ -0,0 +1,15 @@ +import Macros.* + +def powerTest(x: Double): Unit = + power(x, 0) + power(x, 1) + power(x, 5) + power(x, 10) + +def letTest: Unit = + let(0) { _ + 1 } + let(0) { _.toString } + let((4, 'a')) { _.swap } + let(new Foo) { _.hashCode } + +class Foo diff --git a/tests/pos-macros/contruct-desturct/ConstructExpr.scala b/tests/pos-macros/contruct-desturct/ConstructExpr.scala new file mode 100644 index 000000000000..6a2a6f40f0c5 --- /dev/null +++ b/tests/pos-macros/contruct-desturct/ConstructExpr.scala @@ -0,0 +1,45 @@ +package scala.quoted +package util + +trait ConstructExpr[T] { + + type Elems <: Tuple + + def from(x: Elems)(using Quotes): Expr[T] + // def from(x: Tuple.Map[Elems, Expr])(using Quotes): Expr[T] // alternative + +} + +object ConstructExpr { + + def apply[T](using ce: ConstructExpr[T]): ce.type = ce + + /** Default implementation of `ConstructExpr[Tuple1[T]]` */ + given EmptyTupleConstructExpr[T: Type]: ConstructExpr[EmptyTuple] with { + type Elems = EmptyTuple + def from(x: Elems)(using Quotes): Expr[EmptyTuple] = + '{ EmptyTuple } + } + + /** Default implementation of `ConstructExpr[Tuple1[T]]` */ + given Tuple1ConstructExpr[T: Type]: ConstructExpr[Tuple1[T]] with { + type Elems = Tuple1[Expr[T]] + def from(x: Elems)(using Quotes): Expr[Tuple1[T]] = + '{ Tuple1[T](${x._1}) } + } + + /** Default implementation of `ConstructExpr[Tuple2[T1, T2]]` */ + given Tuple2ConstructExpr[T1: Type, T2: Type]: ConstructExpr[Tuple2[T1, T2]] with { + type Elems = (Expr[T1], Expr[T2]) + def from(x: Elems)(using Quotes): Expr[Tuple2[T1, T2]] = + '{ Tuple2[T1, T2](${x._1}, ${x._2}) } + } + + /** Default implementation of `ConstructExpr[Tuple3[T1, T2, T3]]` */ + given Tuple3ConstructExpr[T1: Type, T2: Type, T3: Type]: ConstructExpr[Tuple3[T1, T2, T3]] with { + type Elems = (Expr[T1], Expr[T2], Expr[T3]) + def from(x: Elems)(using Quotes): Expr[Tuple3[T1, T2, T3]] = + '{ Tuple3[T1, T2, T3](${x._1}, ${x._2}, ${x._3}) } + } + +} diff --git a/tests/pos-macros/contruct-desturct/DestructExpr.scala b/tests/pos-macros/contruct-desturct/DestructExpr.scala new file mode 100644 index 000000000000..b54be5e1116a --- /dev/null +++ b/tests/pos-macros/contruct-desturct/DestructExpr.scala @@ -0,0 +1,47 @@ +package scala.quoted +package util + +trait DestructExpr[T] { + + type Elems <: Tuple + + def unapply(x: Expr[T])(using Quotes): Option[Elems] + // def unapply(x: Expr[T])(using Quotes): Option[Tuple.Map[Elems, Expr]] // alternative + +} + +/** Default given instances of `DestructExpr` */ +object DestructExpr { + + def unapply[T](x: Expr[T])(using de: DestructExpr[T])(using Quotes): Option[de.Elems] = + de.unapply(x) + + + /** Default implementation of `DestructExpr[Tuple1[...]]` + * - Transform `'{Tuple1(x1)}` into `Some(Tuple1('{x1}))` + * - Otherwise returns `None` + */ + given DestructTuple1[T1](using Type[T1]): DestructExpr[Tuple1[T1]] with { + type Elems = Tuple1[Expr[T1]] + def unapply(x: Expr[Tuple1[T1]])(using Quotes) = x match { + case '{ new Tuple1[T1]($y) } => Some(Tuple1(y)) + case '{ Tuple1[T1]($y) } => Some(Tuple1(y)) + case _ => None + } + } + + /** Default implementation of `DestructExpr[Tuple2[...]]` + * - Transform `'{Tuple2(x1, x2)}` into `Some(Tuple2('{x1}, '{x2}))` + * - Otherwise returns `None` + */ + given DestructTuple2[T1, T2](using Type[T1], Type[T2]): DestructExpr[Tuple2[T1, T2]] with { + type Elems = (Expr[T1], Expr[T2]) + def unapply(x: Expr[Tuple2[T1, T2]])(using Quotes) = x match { + case '{ new Tuple2[T1, T2]($y1, $y2) } => Some(Tuple2(y1, y2)) + case '{ Tuple2[T1, T2]($y1, $y2) } => Some(Tuple2(y1, y2)) + case '{ ($y1: T1) -> ($y2: T2) } => Some(Tuple2(y1, y2)) + case _ => None + } + } + +} diff --git a/tests/pos-macros/contruct-desturct/Test.scala b/tests/pos-macros/contruct-desturct/Test.scala new file mode 100644 index 000000000000..a4d159ed7d35 --- /dev/null +++ b/tests/pos-macros/contruct-desturct/Test.scala @@ -0,0 +1,7 @@ +import scala.quoted.* +import scala.quoted.util.* + +def test(using Quotes) = + '{ Tuple2(1, 2) } match + case DestructExpr((a, b)) => + ConstructExpr[(Int, Int)].from((a, b)) diff --git a/tests/pos-macros/forwardCompat-3.0/Macro_1_r3.0.scala b/tests/pos-macros/forwardCompat-3.0/Macro_1_r3.0.scala new file mode 100644 index 000000000000..fb06e93f91c0 --- /dev/null +++ b/tests/pos-macros/forwardCompat-3.0/Macro_1_r3.0.scala @@ -0,0 +1,20 @@ +import scala.quoted.* + +object Macros: + + inline def power(x: Double, inline n: Int) = ${ powerCode('x, 'n) } + + private def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = + unrolledPowerCode(x, n.valueOrError) + + private def unrolledPowerCode(x: Expr[Double], n: Int)(using Quotes): Expr[Double] = + if n == 0 then '{ 1.0 } // tests simple quotes without splices + else if n % 2 == 1 then '{ $x * ${ unrolledPowerCode(x, n - 1) } } // tests simple splices + else '{ val y = $x * $x; ${ unrolledPowerCode('y, n / 2) } } // tests splice with term capture + + + inline def let[T, U](x: T)(inline body: T => U): U = ${ letCode('x, 'body) } + + private def letCode[T: Type, U: Type](x: Expr[T], body: Expr[T => U])(using Quotes): Expr[U] = + // tests use of Type + '{ val y: T = $x; $body(y): U } diff --git a/tests/pos-macros/forwardCompat-3.0/Test_2_c3.0.0.scala b/tests/pos-macros/forwardCompat-3.0/Test_2_c3.0.0.scala new file mode 100644 index 000000000000..8c0a8004b9cf --- /dev/null +++ b/tests/pos-macros/forwardCompat-3.0/Test_2_c3.0.0.scala @@ -0,0 +1,15 @@ +import Macros.* + +def powerTest(x: Double): Unit = + power(x, 0) + power(x, 1) + power(x, 5) + power(x, 10) + +def letTest: Unit = + let(0) { _ + 1 } + let(0) { _.toString } + let((4, 'a')) { _.swap } + let(new Foo) { _.hashCode } + +class Foo diff --git a/tests/pos-macros/i10151/Macro_1.scala b/tests/pos-macros/i10151/Macro_1.scala index c7a2b9a301c2..3b81ed00b2f8 100644 --- a/tests/pos-macros/i10151/Macro_1.scala +++ b/tests/pos-macros/i10151/Macro_1.scala @@ -55,7 +55,9 @@ object X: ) ) ) - case Block(stats, last) => Block(stats, transform(last)) + case Block(stats, last) => + val recoverdOwner = stats.headOption.map(_.symbol.owner).getOrElse(Symbol.spliceOwner) // hacky workaround to missing owner tracking in transform + Block(stats, transform(last).changeOwner(recoverdOwner)) case Inlined(x,List(),body) => transform(body) case l@Literal(x) => l.asExpr match diff --git a/tests/pos-macros/i11479/Macro_1.scala b/tests/pos-macros/i11479/Macro_1.scala index f4a8c0d13767..12f98826ebc5 100644 --- a/tests/pos-macros/i11479/Macro_1.scala +++ b/tests/pos-macros/i11479/Macro_1.scala @@ -1,9 +1,9 @@ trait Foo -given Foo: Foo with {} +given Foo: Foo() inline def summonFoo(): Foo = scala.compiletime.summonInline[Foo] package p: trait Bar - given Bar: Bar with {} + given Bar: Bar() inline def summonBar(): Bar = scala.compiletime.summonInline[Bar] diff --git a/tests/pos-macros/i11795.scala b/tests/pos-macros/i11795.scala new file mode 100644 index 000000000000..32eaccf2f4e2 --- /dev/null +++ b/tests/pos-macros/i11795.scala @@ -0,0 +1,7 @@ +import scala.quoted._ +import scala.deriving._ + +def blah2[P <: Product, MEL <: Tuple: Type, MET <: Tuple: Type](m: Mirror.ProductOf[P] { type MirroredElemLabels = MEL; type MirroredElemTypes = MET})(using Quotes) = { + Type.of[Tuple.Zip[MEL, MET]] + () +} diff --git a/tests/pos-macros/i11835/X.scala b/tests/pos-macros/i11835/X.scala index ccc05e9e57c3..c5f12c5b8271 100644 --- a/tests/pos-macros/i11835/X.scala +++ b/tests/pos-macros/i11835/X.scala @@ -6,7 +6,6 @@ object X: private def _blah(b: Expr[Boolean])(using Quotes): Expr[Unit] = import quotes.reflect.* - println("="*120) - println(b.asTerm) - println(b.valueOrError) + b.asTerm + b.valueOrAbort '{()} diff --git a/tests/pos-macros/i12072/Macro_1.scala b/tests/pos-macros/i12072/Macro_1.scala new file mode 100644 index 000000000000..d58160d8f00a --- /dev/null +++ b/tests/pos-macros/i12072/Macro_1.scala @@ -0,0 +1,12 @@ +import scala.quoted.* + +object M { + + transparent inline def f(inline s: String): String | Null = + ${ f('s) } + + def f(s: Expr[String])(using Quotes): Expr[String | Null] = { + s.valueOrError // required + '{ null } + } +} diff --git a/tests/pos-macros/i12072/Test_2.scala b/tests/pos-macros/i12072/Test_2.scala new file mode 100644 index 000000000000..772e21903fc5 --- /dev/null +++ b/tests/pos-macros/i12072/Test_2.scala @@ -0,0 +1,18 @@ +object T2 { + import M.f + + private inline val V = "V" + private inline def D = "D" + + trait Trait { def s: String } + + object MatchFV extends Trait { + override transparent inline def s: String = + inline f(V) match { case "V" => "o"; case _ => "x" } // error in RC1 + } + + object MatchFD extends Trait { + override transparent inline def s: String = + inline f(D) match { case "D" => "o"; case _ => "x" } + } +} diff --git a/tests/pos-macros/i12173.scala b/tests/pos-macros/i12173.scala new file mode 100644 index 000000000000..c5c56ed3e0e7 --- /dev/null +++ b/tests/pos-macros/i12173.scala @@ -0,0 +1,7 @@ +object TestMacro { + inline def test[T](inline t: T): T = ${ '{ ${ 't } } } +} + +object Test { + TestMacro.test("x") +} diff --git a/tests/pos-macros/i12173b.scala b/tests/pos-macros/i12173b.scala new file mode 100644 index 000000000000..ce638485b74c --- /dev/null +++ b/tests/pos-macros/i12173b.scala @@ -0,0 +1,7 @@ +object TestMacro { + inline def test[T](inline t: T): T = ${ '{ ${ '{ ${ 't } } } } } +} + +object Test { + TestMacro.test("x") +} diff --git a/tests/pos-macros/i12173c.scala b/tests/pos-macros/i12173c.scala new file mode 100644 index 000000000000..2806a2183fdb --- /dev/null +++ b/tests/pos-macros/i12173c.scala @@ -0,0 +1,7 @@ +object TestMacro { + inline def test[T](inline t: T): T = ${ '{ ${ '{ ${ '{ ${ '{ ${ '{ ${ '{ ${ '{ ${ 't } } } } } } } } } } } } } } } +} + +object Test { + TestMacro.test("x") +} diff --git a/tests/pos-macros/i12188b/Macro_1.scala b/tests/pos-macros/i12188b/Macro_1.scala new file mode 100644 index 000000000000..fa9ea10e666a --- /dev/null +++ b/tests/pos-macros/i12188b/Macro_1.scala @@ -0,0 +1,13 @@ +import scala.quoted.* + +object MatchTest { + inline def test[T](inline obj: Any): Unit = ${testImpl('obj)} + + def testImpl[T](objExpr: Expr[T])(using Quotes): Expr[Unit] = { + import quotes.reflect.* + // test that the extractors work + val Inlined(None, Nil, Block(Nil, Match(param @ Ident("a"), List(CaseDef(Literal(IntConstant(1)), None, Block(Nil, Literal(UnitConstant()))), CaseDef(Wildcard(), None, Block(Nil, Literal(UnitConstant()))))))) = objExpr.asTerm + // test that the constructors work + Block(Nil, Match(param, List(CaseDef(Literal(IntConstant(1)), None, Block(Nil, Literal(UnitConstant()))), CaseDef(Wildcard(), None, Block(Nil, Literal(UnitConstant())))))).asExprOf[Unit] + } +} diff --git a/tests/pos-macros/i12188b/Test_2.scala b/tests/pos-macros/i12188b/Test_2.scala new file mode 100644 index 000000000000..ee6ef1fdf075 --- /dev/null +++ b/tests/pos-macros/i12188b/Test_2.scala @@ -0,0 +1,5 @@ +def test(a: Int) = MatchTest.test { + a match + case 1 => + case _ => +} diff --git a/tests/pos-macros/i12188c/Macro_1.scala b/tests/pos-macros/i12188c/Macro_1.scala new file mode 100644 index 000000000000..978e2280759c --- /dev/null +++ b/tests/pos-macros/i12188c/Macro_1.scala @@ -0,0 +1,14 @@ +import scala.quoted.* + +object MatchTest { + inline def test(a: Int): Unit = ${testImpl('a)} + + def testImpl(a: Expr[Any])(using Quotes): Expr[Unit] = { + import quotes.reflect.* + val matchTree = Match(a.asTerm, List( + CaseDef(Literal(IntConstant(1)), None, Block(Nil, Literal(UnitConstant()))), + CaseDef(Alternatives(List(Literal(IntConstant(2)), Literal(IntConstant(3)), Literal(IntConstant(4)))), None, Block(Nil, Literal(UnitConstant()))), + CaseDef(Typed(Wildcard(), TypeIdent(defn.IntClass)), None, Block(Nil, Literal(UnitConstant()))))) + matchTree.asExprOf[Unit] + } +} \ No newline at end of file diff --git a/tests/pos-macros/i12188c/Test_2.scala b/tests/pos-macros/i12188c/Test_2.scala new file mode 100644 index 000000000000..2e7bd48e4a0b --- /dev/null +++ b/tests/pos-macros/i12188c/Test_2.scala @@ -0,0 +1 @@ +def test(a: Int) = MatchTest.test(a) diff --git a/tests/pos-macros/i12196/Macros_1.scala b/tests/pos-macros/i12196/Macros_1.scala new file mode 100644 index 000000000000..60e1f56b0b3d --- /dev/null +++ b/tests/pos-macros/i12196/Macros_1.scala @@ -0,0 +1,8 @@ + +import scala.quoted.* + +inline val InlineStringVal = "abc" + +inline def withInlineVal = ${ qqq(InlineStringVal) } + +def qqq(s: String)(using Quotes): Expr[String] = Expr(s) diff --git a/tests/pos-macros/i12196/Test_2.scala b/tests/pos-macros/i12196/Test_2.scala new file mode 100644 index 000000000000..a4b3e340cb52 --- /dev/null +++ b/tests/pos-macros/i12196/Test_2.scala @@ -0,0 +1 @@ +def test = withInlineVal diff --git a/tests/pos-macros/i12221/Macro_1.scala b/tests/pos-macros/i12221/Macro_1.scala new file mode 100644 index 000000000000..f0df529f8550 --- /dev/null +++ b/tests/pos-macros/i12221/Macro_1.scala @@ -0,0 +1,120 @@ +import scala.quoted.* +import scala.reflect.* + +object Macros { + + inline def show[A,B](inline expr: A => B): Unit = + ${printExpr('expr)} + + def printExpr[A,B](expr: Expr[A=>B])(using Quotes) = '{ + println(${showExpr3(expr)}) + } + + def showExpr3[A,B](expr: Expr[A=>B])(using Quotes): Expr[String] = + import quotes.reflect.* + + val sb = new StringBuilder + + // Makes us only print the body of thr function + def printDefFun(tree: Tree): Unit ={ + val acc = new TreeAccumulator[Unit]{ + def foldTree(s: Unit, tree: Tree)(owner: Symbol): Unit = + tree match + case deff : DefDef => + treePrint(deff.rhs.get, 0) + sb.append("++++++++++++++++\n") + sb.append(deff.rhs.get.show(using Printer.TreeStructure)).append('\n') + case _ => + foldOverTree(s, tree)(owner) + } + acc.foldTree(List(), tree)(tree.symbol) + } + + def treePrint(tree: Tree, level: Int): Unit = { + val pre = " " * level + tree match { + case body : Term => { + body match { + // Normal typed + case typed: Typed => + sb.append(pre + typed.getClass()).append('\n') + sb.append(pre + s"Typed with ${typed.tpt}:\n") + treePrint(typed.expr , level + 1) + case Block(statements, expr) => + sb.append(pre + "Block:{").append('\n') + statements.map(stat => stat match{ + case term: Term => treePrint(term, level + 1) + case deff: Definition => + sb.append(pre + "Definition statement\n") + treePrint(deff, level + 1) + case _ => + sb.append(pre + "Non-term statement\n") + sb.append(stat.show(using Printer.TreeStructure)).append('\n') + }) + treePrint(expr, level + 1) + sb.append(pre + "}\n") + + case Match(scrutinee, cases) => + sb.append(pre + "Match:\n") + treePrint(scrutinee, level + 1) + sb.append(pre + "with\n") + cases.map(treePrint(_, level +1)) + + case Ident(name) => + sb.append(pre + s"Identifier(${name})\n") + + case Apply(fun, args) => + sb.append(pre + "Apply\n") + treePrint(fun, level + 1) + if !args.isEmpty then + sb.append(pre + "with arguments\n") + args.zipWithIndex.map( + (arg, index) => + treePrint(arg, level +1) + if args.size > 1 && index < args.size -1 then + // Used to seperate list of parameters + sb.append(pre + ",\n") + ) + case _ => + sb.append("Term\n") + sb.append(tree.getClass()).append('\n') + sb.append(tree.show(using Printer.TreeStructure)).append('\n') + } + } + + case CaseDef(pattern, guard, rhs) => + sb.append(pre + "caseDef:\n" ) + treePrint(pattern, level + 1) + treePrint(rhs, level + 1) + + //Adding this unappy makes the typed get swallowed + /* + case Unapply(fun, implicits, pattern) => + sb.append(pre + "Unapply with function").append('\n') + treePrint(fun , level + 1) + sb.append(pre + "with patterns").append('\n') + pattern.map(treePrint(_ , level + 1)) + */ + case b: Bind => sb.append(pre + "Bind with stuff").append('\n') + + case typed : Typed => + //sb.append(pre + typed.getClass()).append('\n') + sb.append(pre + tree.getClass()).append('\n') + sb.append(pre + s"Typed2 with ${typed.tpt}:").append('\n') + treePrint(typed.expr , level + 1) + + case Unapply(_,_,_) => sb.append(pre + "Unapply with stuff").append('\n') + case _ => + tree match + case t: Term => sb.append("Term").append('\n') + case _ => () + sb.append(tree.getClass()).append('\n') + sb.append(tree.show(using Printer.TreeStructure)).append('\n') + } + } + + val tree: Term = expr.asTerm + printDefFun(tree) + sb.append("Finished").append('\n') + Expr(sb.result()) +} diff --git a/tests/pos-macros/i12221/Test_2.scala b/tests/pos-macros/i12221/Test_2.scala new file mode 100644 index 000000000000..5e2da9920594 --- /dev/null +++ b/tests/pos-macros/i12221/Test_2.scala @@ -0,0 +1,20 @@ +@main +def hello: Unit = { + abstract class A + case class Foo(a: Int) extends A + + //Will print the body of the given function, then its print (using Printer.TreeStructure) + Macros.show((x: A)=>{ + x match { + case Foo(a) => a + }:Int + }) + /* + val x = Foo(3) + Macros.show( + x match { + case Foo(1) => 3 + } + )*/ + +} diff --git a/tests/pos-macros/i12253/Macro_1.scala b/tests/pos-macros/i12253/Macro_1.scala new file mode 100644 index 000000000000..43eb73542c33 --- /dev/null +++ b/tests/pos-macros/i12253/Macro_1.scala @@ -0,0 +1,24 @@ + +import scala.quoted.* +import deriving.*, compiletime.* + +object MacroUtils: + transparent inline def extractNameFromSelector[To, T](inline code: To => T) = ${extractNameFromSelectorImpl('code)} + + def extractNameFromSelectorImpl[To: Type, T: Type](code: Expr[To => T])(using Quotes): Expr[String] = + import quotes.reflect.* + code.asTerm match + case InlinedLambda(_, Select(_, name)) => Expr(name) + case t => Expr("") + + object InlinedLambda: + def unapply(using Quotes)(arg: quotes.reflect.Term): Option[(List[quotes.reflect.ValDef], quotes.reflect.Term)] = + import quotes.reflect.* + arg match + case Inlined(_, _, Lambda(vals, term)) => Some((vals, term)) + case Inlined(_, _, nested) => InlinedLambda.unapply(nested) + case t => None + end InlinedLambda + +end MacroUtils + diff --git a/tests/pos-macros/i12253/Test_2.scala b/tests/pos-macros/i12253/Test_2.scala new file mode 100644 index 000000000000..61aae41d1ee0 --- /dev/null +++ b/tests/pos-macros/i12253/Test_2.scala @@ -0,0 +1,4 @@ +object Usage: + case class Bar(x: Int, y: String, z: (Double, Double)) + MacroUtils.extractNameFromSelector[Bar, String](_.y + "abc") +end Usage diff --git a/tests/pos-macros/i12309/Macro_1.scala b/tests/pos-macros/i12309/Macro_1.scala new file mode 100644 index 000000000000..91c0932682bb --- /dev/null +++ b/tests/pos-macros/i12309/Macro_1.scala @@ -0,0 +1,25 @@ +import scala.quoted.* + +object TestMacro { + def use(f: () => String): Unit = () + + inline def test: Unit = ${testImpl} + + def testImpl(using Quotes): Expr[Unit] = { + import quotes.reflect.* + + def resultDefBody(): Term = '{ + val result: String = "xxx" + result + }.asTerm + val resultDefSymbol = Symbol.newMethod(Symbol.spliceOwner, "getResult", MethodType(Nil)(_ => Nil, _ => TypeRepr.of[String])) + val resultDef = DefDef(resultDefSymbol, { case _ => Some(resultDefBody().changeOwner(resultDefSymbol)) }) + val resultExpr = Block(List(resultDef), Closure(Ref(resultDefSymbol), None)).asExprOf[() => String] + + // + + val r = '{ TestMacro.use($resultExpr) } + // println(r.asTerm.show(using Printer.TreeShortCode)) + r + } +} diff --git a/tests/pos-macros/i12309/Test_2.scala b/tests/pos-macros/i12309/Test_2.scala new file mode 100644 index 000000000000..b290481ebbd2 --- /dev/null +++ b/tests/pos-macros/i12309/Test_2.scala @@ -0,0 +1,3 @@ +object Test extends App { + TestMacro.test +} diff --git a/tests/pos-macros/i12343/Macro.scala b/tests/pos-macros/i12343/Macro.scala new file mode 100644 index 000000000000..03642dd1f4af --- /dev/null +++ b/tests/pos-macros/i12343/Macro.scala @@ -0,0 +1,13 @@ +package test + +import scala.quoted.* + +object Macro: + def covImpl(arg: Expr[Any])(using Quotes): Expr[Any] = arg match // Covariant (List) + case '{ $h : List[h] } => '{ $h : List[h] } + + def invImpl(arg: Expr[Any])(using Quotes): Expr[Any] = arg match // Invariant (Set) + case '{ $h : Set[h] } => '{ $h : Set[h] } + + transparent inline def cov(inline arg: Any): Any = ${ covImpl('arg) } + transparent inline def inv(inline arg: Any): Any = ${ invImpl('arg) } diff --git a/tests/pos-macros/i12343/Test.scala b/tests/pos-macros/i12343/Test.scala new file mode 100644 index 000000000000..ef23a9c16725 --- /dev/null +++ b/tests/pos-macros/i12343/Test.scala @@ -0,0 +1,7 @@ +package test + +object Test: + val cov1: List[Boolean] = Macro.cov(List(true)) + val inv1: Set[Boolean] = Macro.inv(Set(true)) + def cov2[X](a: List[X]): List[X] = Macro.cov(a) + def inv2[X](a: Set[X]): Set[X] = Macro.inv(a) // doesn't compile; Set[Nothing] is inferred diff --git a/tests/pos-macros/i12510/Macro_1.scala b/tests/pos-macros/i12510/Macro_1.scala new file mode 100644 index 000000000000..2c4427224363 --- /dev/null +++ b/tests/pos-macros/i12510/Macro_1.scala @@ -0,0 +1,11 @@ +object M { + import scala.quoted.* + + inline def valueOfUnit: ValueOf[Unit] = + ${ _valueOfUnit } + + def _valueOfUnit(using Quotes): Expr[ValueOf[Unit]] = { + import quotes.reflect.* + Expr.summon[ValueOf[Unit]] getOrElse sys.error("Not found") + } +} diff --git a/tests/pos-macros/i12510/Test_2.scala b/tests/pos-macros/i12510/Test_2.scala new file mode 100644 index 000000000000..457ff3cc6d19 --- /dev/null +++ b/tests/pos-macros/i12510/Test_2.scala @@ -0,0 +1,4 @@ +object T { + val ok = summon[ValueOf[Unit]] + val ko = M.valueOfUnit +} diff --git a/tests/pos-macros/i12850/Macros_1.scala b/tests/pos-macros/i12850/Macros_1.scala new file mode 100644 index 000000000000..a54bff9b12c2 --- /dev/null +++ b/tests/pos-macros/i12850/Macros_1.scala @@ -0,0 +1,19 @@ + +import scala.quoted.* + +inline def foo() = ${ expr } + +private def expr(using Quotes): Expr[Unit] = + import quotes.reflect.* + + // Option(1) match + // case Some(1) => () + // case None => () + val mtch2 = Match( + Apply(TypeApply(Ref(Symbol.requiredMethod("scala.Option.apply")), List(Inferred(TypeRepr.of[Int]))), List(Literal(IntConstant(1)))), + List( + CaseDef(/** FIXME: needs TypedTree from #12200; remove cast */Typed(Unapply(TypeApply(Ref(Symbol.requiredMethod("scala.Some.unapply")), List(Inferred(TypeRepr.of[Int]))), Nil, List(Literal(IntConstant(1)))).asInstanceOf[Term], Inferred(TypeRepr.of[Some[Int]])), None, Literal(UnitConstant())), + CaseDef(Ref(Symbol.requiredModule("scala.None")), None, Literal(UnitConstant()))) + ) + + mtch2.asExprOf[Unit] diff --git a/tests/pos-macros/i12850/Test_2.scala b/tests/pos-macros/i12850/Test_2.scala new file mode 100644 index 000000000000..08152921e268 --- /dev/null +++ b/tests/pos-macros/i12850/Test_2.scala @@ -0,0 +1 @@ +def test = foo() diff --git a/tests/pos-macros/i12948/Macros_1.scala b/tests/pos-macros/i12948/Macros_1.scala new file mode 100644 index 000000000000..a18d9daec2fe --- /dev/null +++ b/tests/pos-macros/i12948/Macros_1.scala @@ -0,0 +1,9 @@ +package mylib +import scala.quoted.* + +object Main: + protected def foo: Unit = {} + inline def fooCaller: Unit = foo + inline def fooCallerM: Unit = ${ fooMacro } + def fooMacro(using Quotes): Expr[Unit] = + '{ foo } diff --git a/tests/pos-macros/i12948/Test_2.scala b/tests/pos-macros/i12948/Test_2.scala new file mode 100644 index 000000000000..386f1b3bd8f5 --- /dev/null +++ b/tests/pos-macros/i12948/Test_2.scala @@ -0,0 +1,5 @@ +import mylib.Main + +object Test: + Main.fooCaller + Main.fooCallerM diff --git a/tests/pos-macros/i13095/Macro_1.scala b/tests/pos-macros/i13095/Macro_1.scala new file mode 100644 index 000000000000..776d2a18533a --- /dev/null +++ b/tests/pos-macros/i13095/Macro_1.scala @@ -0,0 +1,9 @@ + +import scala.quoted.* + +inline def testMacro = ${ test } + +def test(using Quotes): Expr[Unit] = + import quotes.reflect.* + assert(!Symbol.noSymbol.isDefinedInCurrentRun) + '{} diff --git a/tests/pos-macros/i13095/Test_2.scala b/tests/pos-macros/i13095/Test_2.scala new file mode 100644 index 000000000000..b301473885ac --- /dev/null +++ b/tests/pos-macros/i13095/Test_2.scala @@ -0,0 +1 @@ +def unitTest = testMacro diff --git a/tests/pos-macros/i13422/Macro_1.scala b/tests/pos-macros/i13422/Macro_1.scala new file mode 100644 index 000000000000..a141049523ff --- /dev/null +++ b/tests/pos-macros/i13422/Macro_1.scala @@ -0,0 +1,9 @@ +import scala.quoted._ + +inline def rule(inline r: Any): Unit = ${ ruleImpl('r) } + +def ruleImpl(r: Expr[Any])(using Quotes): Expr[Unit] = { + import quotes.reflect.* + r.asTerm.show + '{} +} diff --git a/tests/pos-macros/i13422/Test_2.scala b/tests/pos-macros/i13422/Test_2.scala new file mode 100644 index 000000000000..f6595825516a --- /dev/null +++ b/tests/pos-macros/i13422/Test_2.scala @@ -0,0 +1,12 @@ +def test = rule { + foo(bar(baz)) +} + +def foo[I](r: I): Nothing = ??? + +def bar(i: Baz): i.Out = ??? + +sealed trait Baz: + type Out = Nothing match { case Nothing => Nothing } + +def baz: Baz = ??? diff --git a/tests/pos-macros/i13477/Macro.scala b/tests/pos-macros/i13477/Macro.scala new file mode 100644 index 000000000000..fe58549bc7e5 --- /dev/null +++ b/tests/pos-macros/i13477/Macro.scala @@ -0,0 +1,8 @@ +package mylib +import scala.quoted.* + +private[mylib] object Main: + transparent inline def d(): Unit = ${interpMacro} + def interpMacro(using Quotes) : Expr[Unit] = '{} + +transparent inline def f(): Unit = Main.d() diff --git a/tests/pos-macros/i13477/Test.scala b/tests/pos-macros/i13477/Test.scala new file mode 100644 index 000000000000..60dadf0a21df --- /dev/null +++ b/tests/pos-macros/i13477/Test.scala @@ -0,0 +1,2 @@ +import mylib.* +val x = f() diff --git a/tests/pos-macros/i13546/Macros_1.scala b/tests/pos-macros/i13546/Macros_1.scala new file mode 100644 index 000000000000..7e90476c5f73 --- /dev/null +++ b/tests/pos-macros/i13546/Macros_1.scala @@ -0,0 +1,13 @@ +package mylib +import scala.quoted.* + +object Main: + protected def foo: Unit = {} + inline def fooCaller: Unit = + def f = foo + foo + inline def fooCallerM: Unit = ${ fooMacro } + def fooMacro(using Quotes): Expr[Unit] = + '{ foo } + val fooExpr = '{ foo } + '{ $fooExpr } diff --git a/tests/pos-macros/i13546/Test_2.scala b/tests/pos-macros/i13546/Test_2.scala new file mode 100644 index 000000000000..386f1b3bd8f5 --- /dev/null +++ b/tests/pos-macros/i13546/Test_2.scala @@ -0,0 +1,5 @@ +import mylib.Main + +object Test: + Main.fooCaller + Main.fooCallerM diff --git a/tests/pos-macros/i13557/Macro_1.scala b/tests/pos-macros/i13557/Macro_1.scala new file mode 100644 index 000000000000..73d2328b7e3a --- /dev/null +++ b/tests/pos-macros/i13557/Macro_1.scala @@ -0,0 +1,41 @@ +package x + +import scala.quoted.* + +def fun(x:Int): Int = ??? + +transparent inline def in1[T](inline expr: Int => Int): Int => Int = + ${ + M.transformLambdaImpl('expr) + } + +object M: + + def transformLambdaImpl(cexpr: Expr[Int => Int])(using Quotes): Expr[Int => Int] = + import quotes.reflect.* + + def extractLambda(f:Term): (ValDef, Term, Term => Term ) = + f match + case Inlined(call, bindings, body) => + val inner = extractLambda(body) + (inner._1, inner._2, t => Inlined(call, bindings, t) ) + case Lambda(params,body) => + params match + case List(vd) => (vd, body, identity) + case _ => report.throwError(s"lambda with one argument expected, we have ${params}",cexpr) + case Block(Nil,nested@Lambda(params,body)) => extractLambda(nested) + case _ => + report.throwError(s"lambda expected, have: ${f}", cexpr) + + val (oldValDef, body, inlineBack) = extractLambda(cexpr.asTerm) + val mt = MethodType(List(oldValDef.name))( _ => List(oldValDef.tpt.tpe), _ => TypeRepr.of[Int]) + val nLambda = Lambda(Symbol.spliceOwner, mt, (owner, params) => { + val argTransformer = new TreeMap() { + override def transformTerm(tree: Term)(owner: Symbol): Term = + tree match + case Ident(name) if (tree.symbol == oldValDef.symbol) => Ref(params.head.symbol) + case _ => super.transformTerm(tree)(owner) + } + argTransformer.transformTerm('{ fun(${body.asExprOf[Int]}) }.asTerm )(owner) + }) + inlineBack(nLambda).asExprOf[Int => Int] diff --git a/tests/pos-macros/i13557/Test_2.scala b/tests/pos-macros/i13557/Test_2.scala new file mode 100644 index 000000000000..c9468a538abe --- /dev/null +++ b/tests/pos-macros/i13557/Test_2.scala @@ -0,0 +1,9 @@ +package x + +object Main: + + def testSimpleContext(): Unit = + var x = 0 + val c = in1{ scope => + 1 + } diff --git a/tests/pos-macros/i14137/Macro_1.scala b/tests/pos-macros/i14137/Macro_1.scala new file mode 100644 index 000000000000..e4a6fdc4a7be --- /dev/null +++ b/tests/pos-macros/i14137/Macro_1.scala @@ -0,0 +1,23 @@ +package x + +import scala.quoted._ + +object Macro: + + inline def genOp(inline f:Int): Int = ${ + genOpImpl('f) + } + + def genOpImpl(f: Expr[Int])(using Quotes): Expr[Int] = { + + def firstOp()(using Quotes): Expr[Int] = + '{ + var x=1 + ${secondOp('x,f)} + } + + def secondOp(x:Expr[Int], y:Expr[Int])(using Quotes): Expr[Int] = + '{ $x + $y } + + firstOp() + } diff --git a/tests/pos-macros/i14137/Test_2.scala b/tests/pos-macros/i14137/Test_2.scala new file mode 100644 index 000000000000..4d9c893f5477 --- /dev/null +++ b/tests/pos-macros/i14137/Test_2.scala @@ -0,0 +1,6 @@ +package x + +object Main: + + def main(args: Array[String]):Unit = + Macro.genOp(10) diff --git a/tests/pos-macros/i14180/Macro_1.scala b/tests/pos-macros/i14180/Macro_1.scala new file mode 100644 index 000000000000..ee6396c1d1d8 --- /dev/null +++ b/tests/pos-macros/i14180/Macro_1.scala @@ -0,0 +1,3 @@ +import scala.quoted._ +def impl(using Quotes): Expr[Unit] = + '{ ([V] => (v: V) => println(v)).apply[Int](2) } diff --git a/tests/pos-macros/i14180/Test_2.scala b/tests/pos-macros/i14180/Test_2.scala new file mode 100644 index 000000000000..a98ba87e8729 --- /dev/null +++ b/tests/pos-macros/i14180/Test_2.scala @@ -0,0 +1,2 @@ +inline def foo = ${ impl } +def example = foo diff --git a/tests/pos-macros/i14185/Macro_1.scala b/tests/pos-macros/i14185/Macro_1.scala new file mode 100644 index 000000000000..c30f65dac67c --- /dev/null +++ b/tests/pos-macros/i14185/Macro_1.scala @@ -0,0 +1,60 @@ +import scala.quoted.* + +object Test { + inline def foo[A, M[_]]: Unit = ${ fooImpl[A, M] } + + private def fooImpl[A, M[_]]( + using + q: Quotes, + tc: Type[M], + pt: Type[A] + ): Expr[Unit] = { + import q.reflect.* + + val ptpe = TypeRepr.of[A] + val neededGivenType = TypeRepr.of[M](using tc).appliedTo(ptpe) + + val neededGiven: Option[Term] = Implicits.search(neededGivenType) match { + case suc: ImplicitSearchSuccess => + Some(suc.tree) + + case _ => + None + } + + neededGiven.map(_.show) + + '{ () } + } +} + +// --- + +/** Type level evidence that type `A` is not type `B`. */ +final class IsNot[A, B]() { + override val toString = "not" +} + +object IsNot { + implicit def defaultEvidence[A, B]: IsNot[A, B] = new IsNot[A, B]() + + @annotation.implicitAmbiguous("Could not prove type ${A} is not (IsNot) ${A}") + implicit def ambiguousEvidence1[A]: IsNot[A, A] = null + implicit def ambiguousEvidence2[A]: IsNot[A, A] = null +} + +// --- + +sealed trait SomeTypeclass[T] + +object SomeTypeclass extends SomeTypeclassLowPrio { + + given collection[T, Repr <: Iterable[T]]( + using SomeTypeclass[T], + Repr IsNot Option[T] + ): SomeTypeclass[Repr] = new SomeTypeclass[Repr] {} +} + +sealed trait SomeTypeclassLowPrio { + given int: SomeTypeclass[Int] = new SomeTypeclass[Int] {} +} diff --git a/tests/pos-macros/i14185/Test_2.scala b/tests/pos-macros/i14185/Test_2.scala new file mode 100644 index 000000000000..0dbe4b5ea3b7 --- /dev/null +++ b/tests/pos-macros/i14185/Test_2.scala @@ -0,0 +1,2 @@ +def test = + Test.foo[Seq[Int], SomeTypeclass] diff --git a/tests/pos-macros/i14393/Macro_1.scala b/tests/pos-macros/i14393/Macro_1.scala new file mode 100644 index 000000000000..16ce1e4d4d72 --- /dev/null +++ b/tests/pos-macros/i14393/Macro_1.scala @@ -0,0 +1,20 @@ +package i14393 +import scala.quoted.* + +object M { + + inline def useFoldTree[X](inline x:X):X = ${ + useFoldTreeImpl('x) + } + + def useFoldTreeImpl[X:Type](x:Expr[X])(using Quotes):Expr[X] = { + import quotes.reflect.* + val search = new TreeAccumulator[Int] { + def foldTree(s:Int, tree: Tree)(owner: Symbol): Int = + foldOverTree(s,tree)(owner) + } + search.foldTree(0,x.asTerm)(Symbol.spliceOwner) + x + } + +} diff --git a/tests/pos-macros/i14393/Test_2.scala b/tests/pos-macros/i14393/Test_2.scala new file mode 100644 index 000000000000..911e82da0424 --- /dev/null +++ b/tests/pos-macros/i14393/Test_2.scala @@ -0,0 +1,11 @@ +package i14393 + +def thing() = + M.useFoldTree { + + Option("") match + case None => + case Some(_) => + ??? + + } diff --git a/tests/pos-macros/i8208/Macros_1.scala b/tests/pos-macros/i8208/Macros_1.scala new file mode 100644 index 000000000000..ce21fadc947a --- /dev/null +++ b/tests/pos-macros/i8208/Macros_1.scala @@ -0,0 +1,22 @@ +package playground + +import scala.quoted._ + +object X { + + inline def power(n: Int, x: Double): Double = + ${ powerImpl('n, 'x) } + + private def powerImpl(nExpr: Expr[Int], xExpr: Expr[Double])(using Quotes): Expr[Double] = + nExpr match { + case Expr(n1) => '{ 42.0 } + case _ => '{ dynamicPower($nExpr, $xExpr) } + } + + private def dynamicPower(n: Int, x: Double): Double = { + println(s"dynamic: $n^$x") + if (n == 0) 1.0 + else if (n % 2 == 0) dynamicPower(n / 2, x * x) + else x * dynamicPower(n - 1, x) + } +} diff --git a/tests/pos-macros/i8208/Test_2.scala b/tests/pos-macros/i8208/Test_2.scala new file mode 100644 index 000000000000..c5e239e50970 --- /dev/null +++ b/tests/pos-macros/i8208/Test_2.scala @@ -0,0 +1,2 @@ +import playground.X +def test(x: Int) = X.power(x, 2) diff --git a/tests/pos-macros/null-by-name/Macro_1.scala b/tests/pos-macros/null-by-name/Macro_1.scala new file mode 100644 index 000000000000..7d57990dd136 --- /dev/null +++ b/tests/pos-macros/null-by-name/Macro_1.scala @@ -0,0 +1,10 @@ +import scala.quoted.* + +inline def foo(x: => Any): Unit = + ${ impl('x) } + +private def impl(x: Expr[Any])(using Quotes) : Expr[Unit] = { + '{ + val a = $x + } +} diff --git a/tests/pos-macros/null-by-name/Test_2.scala b/tests/pos-macros/null-by-name/Test_2.scala new file mode 100644 index 000000000000..34f5a3111ac6 --- /dev/null +++ b/tests/pos-macros/null-by-name/Test_2.scala @@ -0,0 +1 @@ +def test = foo(null) diff --git a/tests/pos-macros/power-macro-2/Macro_1.scala b/tests/pos-macros/power-macro-2/Macro_1.scala new file mode 100644 index 000000000000..57f7764f0969 --- /dev/null +++ b/tests/pos-macros/power-macro-2/Macro_1.scala @@ -0,0 +1,22 @@ + +import scala.quoted.* + +import math.Numeric.Implicits.infixNumericOps + +inline def power[Num](x: Num, inline n: Int)(using num: Numeric[Num]) = ${powerCode('x, 'n)(using 'num)} + +private def powerCode[Num: Type](x: Expr[Num], n: Expr[Int])(using Expr[Numeric[Num]])(using Quotes): Expr[Num] = + powerCode(x, n.valueOrAbort) + +private def powerCode[Num: Type](x: Expr[Num], n: Int)(using num: Expr[Numeric[Num]])(using Quotes): Expr[Num] = + if (n == 0) '{ $num.one } + else if (n % 2 == 0) '{ + given Numeric[Num] = $num + val y = $x * $x + ${ powerCode('y, n / 2) } + } + else '{ + given Numeric[Num] = $num + $x * ${powerCode(x, n - 1)} + } + diff --git a/tests/pos-macros/power-macro-2/Test_2.scala b/tests/pos-macros/power-macro-2/Test_2.scala new file mode 100644 index 000000000000..59644c3d51fb --- /dev/null +++ b/tests/pos-macros/power-macro-2/Test_2.scala @@ -0,0 +1,2 @@ +def test(x: Int) = power(x, 5) +def test(x: Double) = power(x, 5) diff --git a/tests/pos-macros/power-macro-3/Macro_1.scala b/tests/pos-macros/power-macro-3/Macro_1.scala new file mode 100644 index 000000000000..d8273c65faf8 --- /dev/null +++ b/tests/pos-macros/power-macro-3/Macro_1.scala @@ -0,0 +1,25 @@ + +import scala.quoted.* + +import math.Numeric.Implicits.infixNumericOps + +inline def power[Num](x: Num, inline n: Int)(using num: Numeric[Num]) = ${powerCode('x, 'n)(using 'num)} + +private def powerCode[Num: Type](x: Expr[Num], n: Expr[Int])(using Expr[Numeric[Num]])(using Quotes): Expr[Num] = + powerCode(x, n.valueOrAbort) + +private def powerCode[Num: Type](x: Expr[Num], n: Int)(using num: Expr[Numeric[Num]])(using Quotes): Expr[Num] = + if (n == 0) '{ $num.one } + else if (n % 2 == 0) '{ + withGiven($num) { + val y = $x * $x + ${ powerCode('y, n / 2) } + } + } + else '{ + withGiven($num) { + $x * ${powerCode(x, n - 1)} + } + } + +inline def withGiven[U, T](inline x: T)(inline body: T ?=> U): U = body(using x) diff --git a/tests/pos-macros/power-macro-3/Test_2.scala b/tests/pos-macros/power-macro-3/Test_2.scala new file mode 100644 index 000000000000..59644c3d51fb --- /dev/null +++ b/tests/pos-macros/power-macro-3/Test_2.scala @@ -0,0 +1,2 @@ +def test(x: Int) = power(x, 5) +def test(x: Double) = power(x, 5) diff --git a/tests/pos-macros/power-macro/Macro_1.scala b/tests/pos-macros/power-macro/Macro_1.scala index 009947d4fe1f..54ac52edf1e4 100644 --- a/tests/pos-macros/power-macro/Macro_1.scala +++ b/tests/pos-macros/power-macro/Macro_1.scala @@ -6,7 +6,7 @@ object PowerMacro { inline def power(inline n: Long, x: Double) = ${powerCode('n, 'x)} def powerCode(n: Expr[Long], x: Expr[Double]) (using Quotes): Expr[Double] = - powerCode(n.valueOrError, x) + powerCode(n.valueOrAbort, x) def powerCode(n: Long, x: Expr[Double])(using Quotes): Expr[Double] = if (n == 0) '{1.0} diff --git a/tests/pos-macros/quote-nested-object/Macro_1.scala b/tests/pos-macros/quote-nested-object/Macro_1.scala index 19746c270840..b6f71f00f131 100644 --- a/tests/pos-macros/quote-nested-object/Macro_1.scala +++ b/tests/pos-macros/quote-nested-object/Macro_1.scala @@ -9,7 +9,7 @@ object Macro { inline def plus(inline n: Int, m: Int): Int = ${ plus('n, 'm) } def plus(n: Expr[Int], m: Expr[Int]) (using Quotes): Expr[Int] = - if (n.valueOrError == 0) m + if (n.valueOrAbort == 0) m else '{ ${n} + $m } object Implementation2 { @@ -17,7 +17,7 @@ object Macro { inline def plus(inline n: Int, m: Int): Int = ${ plus('n, 'm) } def plus(n: Expr[Int], m: Expr[Int]) (using Quotes): Expr[Int] = - if (n.valueOrError == 0) m + if (n.valueOrAbort == 0) m else '{ ${n} + $m } } } diff --git a/tests/pos-macros/quote-whitebox-2/Macro_1.scala b/tests/pos-macros/quote-whitebox-2/Macro_1.scala index 8113cedbc113..f7a3261f0368 100644 --- a/tests/pos-macros/quote-whitebox-2/Macro_1.scala +++ b/tests/pos-macros/quote-whitebox-2/Macro_1.scala @@ -6,7 +6,7 @@ object Macro { transparent inline def charOrString(inline str: String): Any = ${ impl('str) } def impl(strExpr: Expr[String]) (using Quotes)= - val str = strExpr.valueOrError + val str = strExpr.valueOrAbort if (str.length == 1) Expr(str.charAt(0)) else Expr(str) } diff --git a/tests/pos-macros/splice-pat/Macro_1.scala b/tests/pos-macros/splice-pat/Macro_1.scala new file mode 100644 index 000000000000..b3fabb046da6 --- /dev/null +++ b/tests/pos-macros/splice-pat/Macro_1.scala @@ -0,0 +1,17 @@ +import scala.quoted.* + +object Macro { + object MyMatcher { + def unapply(expr: Expr[Any])(using Quotes): Option[Expr[Int]] = expr match { + case '{ (${a}: Int) + (${_}: Int) } => Some(a) + case _ => None + } + } + + def foo(x: Int): Int = x - 1 + + def impl(expr: Expr[Any])(using Quotes): Expr[(Int, Int)] = expr match + case '{foo(${bound@MyMatcher(x)})}=> '{($bound, $x)} + + inline def macr(inline x: Int): (Int, Int) = ${impl('x)} +} diff --git a/tests/pos-macros/splice-pat/Test_1.scala b/tests/pos-macros/splice-pat/Test_1.scala new file mode 100644 index 000000000000..672001167616 --- /dev/null +++ b/tests/pos-macros/splice-pat/Test_1.scala @@ -0,0 +1,3 @@ +object Test { + assert(Macro.macr(Macro.foo(1 + 2)) == (3, 1)) +} diff --git a/tests/pos-special/fatal-warnings/i10994.scala b/tests/pos-special/fatal-warnings/i10994.scala new file mode 100644 index 000000000000..99ae647466b1 --- /dev/null +++ b/tests/pos-special/fatal-warnings/i10994.scala @@ -0,0 +1,2 @@ +def foo = true match + case (b: Boolean): Boolean => () diff --git a/tests/pos-special/fatal-warnings/i11729.scala b/tests/pos-special/fatal-warnings/i11729.scala new file mode 100644 index 000000000000..7ba41d081e3b --- /dev/null +++ b/tests/pos-special/fatal-warnings/i11729.scala @@ -0,0 +1,26 @@ +type Return[X] = X match + case List[t] => List[t] + case Any => List[X] + +object Return: + def apply[A](a:A):Return[A] = a match + case a: List[t] => a + case a: Any => List(a) + +object Test1: + Return(1).map(x => x) + + +type Boxed[X] = X match + case Box[t] => Box[t] + case Any => Box[X] + +def box[X](x: X): Boxed[X] = x match + case b: Box[t] => b + case x: Any => Box(x) + +case class Box[A](a:A): + def map[B](f: A => B): Box[B] = Box(f(a)) + +object Test2: + box(box(1)).map(_ + 1) diff --git a/tests/pos-special/fatal-warnings/i13433.scala b/tests/pos-special/fatal-warnings/i13433.scala new file mode 100644 index 000000000000..47a4a520e63a --- /dev/null +++ b/tests/pos-special/fatal-warnings/i13433.scala @@ -0,0 +1,32 @@ +import scala.reflect.TypeTest + +type Matcher[A] = A match { case String => String } + +def patternMatch[A](a: Any)(using tt: TypeTest[Any, Matcher[A]]): Option[Matcher[A]] = { + // type T = RDF.Triple[Rdf] + a match { + case res: Matcher[A] => Some(res) + case _ => None + } +} + +def patternMatchWithAlias[A](a: Any)(using tt: TypeTest[Any, Matcher[A]]): Option[Matcher[A]] = { + type T = Matcher[A] + a match { + case res: T => Some(res) + case _ => None + } +} + + +@main def main = { + println(patternMatch[String]("abc")) + println(patternMatchWithAlias[String]("abc")) + println(patternMatch[String]("abc")(using (s: Any) => { + if s.isInstanceOf[Matcher[String]] then Some[s.type & Matcher[String]](s.asInstanceOf[s.type & Matcher[String]]) else None })) + println(patternMatchWithAlias[String]("abc")(using (s: Any) => { + if s.isInstanceOf[Matcher[String]] then Some[s.type & Matcher[String]](s.asInstanceOf[s.type & Matcher[String]]) else None })) + + println(patternMatch[String](1)) + println(patternMatchWithAlias[String](1)) +} diff --git a/tests/pos-special/fatal-warnings/i13433b.scala b/tests/pos-special/fatal-warnings/i13433b.scala new file mode 100644 index 000000000000..5e3625166fc0 --- /dev/null +++ b/tests/pos-special/fatal-warnings/i13433b.scala @@ -0,0 +1,28 @@ +import scala.reflect.ClassTag + +type Matcher[A] = A match { case String => String } + +def patternMatch[A](a: Any)(using tt: ClassTag[Matcher[A]]): Option[Matcher[A]] = { + // type T = RDF.Triple[Rdf] + a match { + case res: Matcher[A] => Some(res) + case _ => None + } +} + +def patternMatchWithAlias[A](a: Any)(using tt: ClassTag[Matcher[A]]): Option[Matcher[A]] = { + type T = Matcher[A] + a match { + case res: T => Some(res) + case _ => None + } +} + + +@main def main = { + println(patternMatch[String]("abc")) + println(patternMatchWithAlias[String]("abc")) + + println(patternMatch[String](1)) + println(patternMatchWithAlias[String](1)) +} diff --git a/tests/pos-special/fatal-warnings/i3589b.scala b/tests/pos-special/fatal-warnings/i3589b.scala index e6fb462736e1..115e74b8cee2 100644 --- a/tests/pos-special/fatal-warnings/i3589b.scala +++ b/tests/pos-special/fatal-warnings/i3589b.scala @@ -1,5 +1,5 @@ class Test { - def test(x: 1) = (x: @annotation.switch) match { + def test(x: 1 | 2 | 3) = (x: @annotation.switch) match { case 1 => 1 case 2 => 2 case 3 => 3 diff --git a/tests/pos-special/kind-projector-underscores.scala b/tests/pos-special/kind-projector-underscores.scala new file mode 100644 index 000000000000..06face862e53 --- /dev/null +++ b/tests/pos-special/kind-projector-underscores.scala @@ -0,0 +1,59 @@ +package kind_projector + +trait Foo[F[_]] +trait Qux[F[_, _]] +trait Baz[F[_], A, B] + +trait FooPlus[+F[+_]] +trait QuxPlus[+F[+_, +_]] +trait BazPlus[+F[+_], +A, +B] + +trait FooMinus[-F[-_]] +trait QuxMinus[-F[-_, -_]] +trait BazMinus[-F[-_], -A, -B] + +class Bar1 extends Foo[Either[Int, _]] +class Bar2 extends Foo[Either[_, Int]] +class Bar3 extends Foo[_ => Int] +class Bar4 extends Foo[Int => _] +class Bar5 extends Foo[(Int, _, Int)] +class Bar6 extends Foo[λ[x => Either[Int, x]]] +class Bar7 extends Qux[λ[(x, y) => Either[y, x]]] +class Bar8 extends Foo[Baz[Int => _, _, Int]] +class Bar9 extends Foo[λ[x => Baz[x => _, Int, x]]] + +class BarPlus1 extends FooPlus[Either[Int, +_]] +class BarPlus2 extends FooPlus[Either[+_, Int]] +class BarPlus3 extends FooPlus[Int => +_] +class BarPlus4 extends FooPlus[(Int, +_, Int)] +class BarPlus5 extends FooPlus[λ[`+x` => Either[Int, x]]] +class BarPlus6 extends QuxPlus[λ[(`+x`, `+y`) => Either[y, x]]] +class BarPlus7 extends FooPlus[BazPlus[Int => +_, +_, Int]] + +class BarMinus1 extends FooMinus[-_ => Int] + +class VarianceAnnotationIsActuallyIgnored1 extends FooPlus[Either[Int, -_]] +class VarianceAnnotationIsActuallyIgnored2 extends FooPlus[Either[-_, Int]] +class VarianceAnnotationIsActuallyIgnored3 extends FooMinus[+_ => Int] +class VarianceAnnotationIsActuallyIgnored4 extends FooPlus[Int => -_] +class VarianceAnnotationIsActuallyIgnored5 extends FooPlus[(Int, -_, Int)] +class VarianceAnnotationIsActuallyIgnored6 extends FooPlus[λ[`-x` => Either[Int, x]]] +class VarianceAnnotationIsActuallyIgnored7 extends QuxPlus[λ[(`-x`, `-y`) => Either[y, x]]] +class VarianceAnnotationIsActuallyIgnored8 extends FooPlus[BazPlus[Int => -_, -_, Int]] +class VarianceAnnotationIsActuallyIgnored9 extends Foo[λ[`-x` => BazPlus[x => -_, Int, x]]] + +class BackticksAreFine1 extends FooPlus[Either[Int, `-_`]] +class BackticksAreFine2 extends FooPlus[Either[`-_`, Int]] +class BackticksAreFine3 extends FooMinus[`+_` => Int] +class BackticksAreFine4 extends FooPlus[Int => `-_`] +class BackticksAreFine5 extends FooPlus[(Int, `-_`, Int)] +class BackticksAreFine6 extends FooPlus[BazPlus[Int => `-_`, `-_`, Int]] +class BackticksAreFine7 extends Foo[λ[`-x` => BazPlus[x => `-_`, Int, x]]] + +class SpacesAreFine1 extends FooPlus[Either[Int, - _ ]] +class SpacesAreFine2 extends FooPlus[Either[ - _ , Int]] +class SpacesAreFine3 extends FooMinus[ + _ => Int] +class SpacesAreFine4 extends FooPlus[Int => - _] +class SpacesAreFine5 extends FooPlus[(Int, - _, Int)] +class SpacesAreFine6 extends FooPlus[BazPlus[Int => - _ , - _, Int]] +class SpacesAreFine7 extends Foo[λ[`-x` => BazPlus[x => - _ , Int, x]]] diff --git a/tests/pos-special/kind-projector.scala b/tests/pos-special/kind-projector.scala index 1bfd9a36433a..9048ae90f41c 100644 --- a/tests/pos-special/kind-projector.scala +++ b/tests/pos-special/kind-projector.scala @@ -4,6 +4,14 @@ trait Foo[F[_]] trait Qux[F[_, _]] trait Baz[F[_], A, B] +trait FooPlus[+F[+_]] +trait QuxPlus[+F[+_, +_]] +trait BazPlus[+F[+_], +A, +B] + +trait FooMinus[-F[-_]] +trait QuxMinus[-F[-_, -_]] +trait BazMinus[-F[-_], -A, -B] + class Bar1 extends Foo[Either[Int, *]] class Bar2 extends Foo[Either[*, Int]] class Bar3 extends Foo[* => Int] @@ -13,3 +21,40 @@ class Bar6 extends Foo[λ[x => Either[Int, x]]] class Bar7 extends Qux[λ[(x, y) => Either[y, x]]] class Bar8 extends Foo[Baz[Int => *, *, Int]] class Bar9 extends Foo[λ[x => Baz[x => *, Int, x]]] + +class BarPlus1 extends FooPlus[Either[Int, +*]] +class BarPlus2 extends FooPlus[Either[+*, Int]] +class BarPlus3 extends FooPlus[Int => +*] +class BarPlus4 extends FooPlus[(Int, +*, Int)] +class BarPlus5 extends FooPlus[λ[`+x` => Either[Int, x]]] +class BarPlus6 extends QuxPlus[λ[(`+x`, `+y`) => Either[y, x]]] +class BarPlus7 extends FooPlus[BazPlus[Int => +*, +*, Int]] + +class BarMinus1 extends FooMinus[-* => Int] + +class VarianceAnnotationIsActuallyIgnored1 extends FooPlus[Either[Int, -*]] +class VarianceAnnotationIsActuallyIgnored2 extends FooPlus[Either[-*, Int]] +class VarianceAnnotationIsActuallyIgnored3 extends FooMinus[+* => Int] +class VarianceAnnotationIsActuallyIgnored4 extends FooPlus[Int => -*] +class VarianceAnnotationIsActuallyIgnored5 extends FooPlus[(Int, -*, Int)] +class VarianceAnnotationIsActuallyIgnored6 extends FooPlus[λ[`-x` => Either[Int, x]]] +class VarianceAnnotationIsActuallyIgnored7 extends QuxPlus[λ[(`-x`, `-y`) => Either[y, x]]] +class VarianceAnnotationIsActuallyIgnored8 extends FooPlus[BazPlus[Int => -*, -*, Int]] +class VarianceAnnotationIsActuallyIgnored9 extends Foo[λ[`-x` => BazPlus[x => -*, Int, x]]] + +class BackticksAreFine1 extends FooPlus[Either[Int, `-*`]] +class BackticksAreFine2 extends FooPlus[Either[`-*`, Int]] +class BackticksAreFine3 extends FooMinus[`+*` => Int] +class BackticksAreFine4 extends FooPlus[Int => `-*`] +class BackticksAreFine5 extends FooPlus[(Int, `-*`, Int)] +class BackticksAreFine6 extends FooPlus[BazPlus[Int => `-*`, `-*`, Int]] +class BackticksAreFine7 extends Foo[λ[`-x` => BazPlus[x => `-*`, Int, x]]] +class BackticksAreFine8 extends Foo[λ[`x` => BazPlus[x => `*`, Int, x]]] + +// https://github.com/lampepfl/dotty/issues/13141 +// i13141 +object A { + class X { type Blah = Int } + val * = new X + val a: *.Blah = 2 +} diff --git a/tests/pos-staging/quote-0.scala b/tests/pos-staging/quote-0.scala index a9eb68ffce5f..8a48c8a2d87b 100644 --- a/tests/pos-staging/quote-0.scala +++ b/tests/pos-staging/quote-0.scala @@ -16,7 +16,7 @@ object Macros { inline def power(inline n: Int, x: Double) = ${ powerCode('n, 'x) } def powerCode(n: Expr[Int], x: Expr[Double]) (using Quotes): Expr[Double] = - powerCode(n.valueOrError, x) + powerCode(n.valueOrAbort, x) def powerCode(n: Int, x: Expr[Double])(using Quotes): Expr[Double] = if (n == 0) '{1.0} diff --git a/tests/pos/10077.scala b/tests/pos/10077.scala new file mode 100644 index 000000000000..bdd326c23cb5 --- /dev/null +++ b/tests/pos/10077.scala @@ -0,0 +1,13 @@ +trait T[F[_[_]]] + +type Inner[x] = [X[_]] =>> x match { case T[f] => f[X] } + +trait Monad[F[_]] +type TMonad = T[Monad] + +trait U[T0]: + type T0_member = T0 + def f(x: Inner[T0][List]): Unit + +class X extends U[T[Monad]]: + def f(x: Inner[T0_member][List]): Unit = ??? diff --git a/tests/pos/10747-onnxmin.scala b/tests/pos/10747-onnxmin.scala new file mode 100644 index 000000000000..99d7e0a96d79 --- /dev/null +++ b/tests/pos/10747-onnxmin.scala @@ -0,0 +1,45 @@ +import scala.compiletime.ops.string.+ +import scala.compiletime.ops.int +import scala.compiletime.ops.int.{S, +, <, <=, *} +import scala.compiletime.ops.boolean.&& + +object OnnxMin { + type Index = Int & Singleton + + sealed trait Indices + final case class :::[+H <: Index, +T <: Indices](head: H, tail: T) extends Indices + + sealed trait INil extends Indices + case object INil extends INil + + + type Dimension = Int & Singleton + + sealed trait Shape extends Product with Serializable + + final case class #:[+H <: Dimension, +T <: Shape](head: H, tail: T) extends Shape + + sealed trait SNil extends Shape + case object SNil extends SNil + + type CContains[Haystack <: Indices, Needle <: Index] <: Boolean = Haystack match { + case head ::: tail => head match { + case Needle => true + case _ => CContains[tail, Needle] + } + case INil => false + } + + type AddGivenAxisSizeLoop[First <: Shape, Second <: Shape, AxisIndex <: Indices, I <: Index] <: Shape = First match { + case head #: tail => CContains[AxisIndex, I] match { + case true => Second match { + case secondHead #: secondTail => head #: tail + case SNil => AxisIndex match{ + case INil => SNil + } + } + } + } + + def ConcatV13: AddGivenAxisSizeLoop[Dimension #: Shape, Dimension #: Shape, Index ::: INil, 0] = ??? +} diff --git a/tests/pos/10747-shapeless-min.scala b/tests/pos/10747-shapeless-min.scala new file mode 100644 index 000000000000..fbb8012fc9f2 --- /dev/null +++ b/tests/pos/10747-shapeless-min.scala @@ -0,0 +1,12 @@ +trait Monoidal { + type to[_] <: Tuple +} + +object eithers extends Monoidal { + class Wrap[T] + + type to[t] <: Tuple = Wrap[t] match { + case Wrap[Either[hd, tl]] => hd *: to[tl] + case Wrap[Nothing] => EmptyTuple + } +} diff --git a/tests/pos/10867.scala b/tests/pos/10867.scala new file mode 100644 index 000000000000..e08b0c60a491 --- /dev/null +++ b/tests/pos/10867.scala @@ -0,0 +1,49 @@ +object Test { + type inserts[a, as <: Tuple] <: Tuple = + as match + case EmptyTuple => (a *: EmptyTuple) *: EmptyTuple + case y *: ys => (a *: y *: ys) *: Tuple.Map[inserts[a, ys], [t <: Tuple] =>> y *: t] + + type inserts2[a] = + [as <: Tuple] =>> inserts[a, as] + + type A = inserts [1, EmptyTuple] + type B = inserts2[1][EmptyTuple] + + summon[A =:= ((1 *: EmptyTuple) *: EmptyTuple)] + summon[B =:= ((1 *: EmptyTuple) *: EmptyTuple)] + summon[A =:= B] + + type H[t <: Tuple] = Tuple.Concat[t, EmptyTuple] + + summon[H[A] =:= H[B]] + + summon[Tuple.Concat[A, EmptyTuple] =:= Tuple.Concat[B, EmptyTuple]] +} + +object Minimized { + type Concombre[X <: Tuple, +Y <: Tuple] <: Tuple = X match { + case EmptyTuple => Y + case x1 *: xs1 => X + } + + type inserts[a, as <: Tuple] <: Tuple = + as match + case EmptyTuple => a *: EmptyTuple + + type inserts2[a] = + [as <: Tuple] =>> inserts[a, as] + + type A = inserts [1, EmptyTuple] + type B = inserts2[1][EmptyTuple] + type C = 1 *: EmptyTuple + + summon[A =:= B] + summon[A =:= C] + summon[B =:= C] + + type H[t <: Tuple] = Concombre[t, EmptyTuple] + + summon[H[C] =:= H[A]] + summon[H[C] =:= H[B]] +} diff --git a/tests/pos/11973.scala b/tests/pos/11973.scala new file mode 100644 index 000000000000..4c6e0bbd09a6 --- /dev/null +++ b/tests/pos/11973.scala @@ -0,0 +1,9 @@ +enum E: + case C + +trait T + +def f(x: E | T): Unit = x match { + case e: E => () + case t: T => () +} diff --git a/tests/pos/12093.scala b/tests/pos/12093.scala new file mode 100644 index 000000000000..3bf762abb1c9 --- /dev/null +++ b/tests/pos/12093.scala @@ -0,0 +1,47 @@ +import scala.compiletime.ops.int.{`*`, +} + +// HList +sealed trait Shape +final case class #:[H <: Int & Singleton, T <: Shape](head: H, tail: T) extends Shape +case object Ø extends Shape +type Ø = Ø.type + +// Reduce +def reduce[T, S <: Shape, A <: Shape](shape: S, axes: A): Reduce[S, A, 0] = ??? +type Reduce[S, Axes <: Shape, I <: Int] <: Shape = S match { + case head #: tail => Contains[Axes, I] match { + case true => Reduce[tail, Remove[Axes, I], I + 1] + case false => head #: Reduce[tail, Axes, I + 1] + } + case Ø => Axes match { + case Ø => Ø + // otherwise, do not reduce further + } +} +type Contains[Haystack <: Shape, Needle <: Int] <: Boolean = Haystack match { + case Ø => false + case head #: tail => head match { + case Needle => true + case _ => Contains[tail, Needle] + } +} +type Remove[From <: Shape, Value <: Int] <: Shape = From match { + case Ø => Ø + case head #: tail => head match { + case Value => Remove[tail, Value] + case _ => head #: Remove[tail, Value] + } +} + +// Reshape +def reshape[From <: Shape, To <: Shape](from: From, to: To) + (using ev: NumElements[From] =:= NumElements[To]): To = ??? +type NumElements[X <: Shape] <: Int = X match { + case Ø => 1 + case head #: tail => head * NumElements[tail] +} + +// Test cases +val input = #:(25, #:(256, #:(256, #:(3, Ø)))) +val reduced = reduce(input, #:(3, #:(1, #:(2, Ø)))) +val reshaped: 5 #: 5 #: Ø = reshape(reduced, #:(5, #:(5, Ø))) diff --git a/tests/pos/12278.scala b/tests/pos/12278.scala new file mode 100644 index 000000000000..226d1453bc6c --- /dev/null +++ b/tests/pos/12278.scala @@ -0,0 +1,16 @@ +import scala.compiletime.ops.int.* + +object Test { + type Fib[N <: Int] <: Int = N match { + case 0 => 0 + case 1 => 1 + case Any => Fib[N - 1] + Fib[N - 2] + } + val fib0: Fib[0] = 0 + val fib1: Fib[1] = 1 + val fib2: Fib[2] = 1 + val fib3: Fib[3] = 2 + val fib4: Fib[4] = 3 + val fib5: Fib[5] = 5 + val fib6: Fib[6] = 8 +} diff --git a/tests/pos/12944/12944_1.scala b/tests/pos/12944/12944_1.scala new file mode 100644 index 000000000000..e6b6286c43a3 --- /dev/null +++ b/tests/pos/12944/12944_1.scala @@ -0,0 +1,11 @@ +object Test1 { + type ++[L, R] = (L, R) match + case (Int, Int) => 2 + case (String, String) => "2" + case (String, Int) => "2" + case (Int, String) => "2" + + type Bar[W <: Int] = W ++ "" ++ W + + val works = summon[Bar[2] =:= "2"] +} diff --git a/tests/pos/12944/12944_2.scala b/tests/pos/12944/12944_2.scala new file mode 100644 index 000000000000..cb471a59d191 --- /dev/null +++ b/tests/pos/12944/12944_2.scala @@ -0,0 +1,5 @@ +import Test1._ + +object Test2 { + val fails = summon[Bar[2] =:= "2"] +} diff --git a/tests/pos/13455.scala b/tests/pos/13455.scala new file mode 100644 index 000000000000..62e0dae9c881 --- /dev/null +++ b/tests/pos/13455.scala @@ -0,0 +1,10 @@ +sealed class R + +type X[T] = T match { + case R => String + case (z => r) => Int +} +def x[T]: X[T] = ??? + +def i(i0: Int): Unit = ??? +val a = i(x[Int => String]) diff --git a/tests/pos/13469.scala b/tests/pos/13469.scala new file mode 100644 index 000000000000..7e4238373a4f --- /dev/null +++ b/tests/pos/13469.scala @@ -0,0 +1,47 @@ +object Meta: + type Shape = String | Tuple + + type Demote[S <: Tuple]<: Shape = S match + case Tuple1[t] => t & Shape + case Tuple => S + + type If[T <: Boolean, R1, R2] <: R1 | R2 = T match + case true => R1 + case false => R2 + + type Contains[T <: Tuple, X] <: Boolean = T match + case X *: r => true + case _ *: r => Contains[r, X] + case _ => false + + type RemoveStrict[T <: Tuple, X] <: Tuple = T match + case head *: tail => head match + case X => tail + case _ => head *: RemoveStrict[tail, X] + + type WithoutStrict[T <: Tuple, T2 <: Tuple] <: Tuple = T2 match + case head *: tail => WithoutStrict[RemoveStrict[T, head], tail] + case EmptyTuple => T + + /** Removes all elems from ToReplace and replaces the first replaced elem with replacement */ + type ReplaceAllStrict[T <: Tuple, ToReplace <: Tuple, Replacement] <: Tuple = T match + case head *: tail => + If[Contains[ToReplace, head], + Replacement *: WithoutStrict[tail, RemoveStrict[ToReplace, head]], + head *: ReplaceAllStrict[tail, ToReplace, Replacement]] + case EmptyTuple => T + + type Sub[S <: Tuple, ToReplace <: Tuple, Replacement <: String] = + Demote[ReplaceAllStrict[S, ToReplace, Replacement]] + +object Foo: + import Meta._ + val _0: Sub["batch" *: EmptyTuple, Int *: EmptyTuple, "batch"] = "batch" + val _1: Sub[("batch", "len"), ("batch", "len"), "batch"] = "batch" + val _2a: ReplaceAllStrict[("batch", "len", "embed"), "batch" *: EmptyTuple, "b"] = ("b", "len", "embed") + type S = ("batch", "len") + type ToReplace = "batch" *: EmptyTuple + type Replacement = "b" + val _2b: ReplaceAllStrict[S, ToReplace, Replacement] = ("b", "len") // ok + val _2c: Demote[ReplaceAllStrict[S, ToReplace, Replacement]] = ("b", "len") // ok + val _2d: Sub[S, ToReplace, Replacement] = ("b", "len") // error, see below diff --git a/tests/pos/13491.scala b/tests/pos/13491.scala new file mode 100644 index 000000000000..d16452cf922c --- /dev/null +++ b/tests/pos/13491.scala @@ -0,0 +1,97 @@ +import scala.annotation.unchecked.uncheckedVariance + +import scala.language.implicitConversions + +sealed trait HList extends Product with Serializable +final case class ::[+H, +T <: HList](head: H, tail: T) extends HList + +sealed trait HNil extends HList +case object HNil extends HNil + +trait HListable[T] { + type Out <: HList +} + +object HListable { + type HL0[T] <: HList = T match { + case Unit => HNil + case HNil => HNil + case ::[a, b] => ::[a, b] + case _ => T :: HNil + } + + implicit def calc[T]: HListable[T] { type Out = HL0[T] } = ??? +} + +sealed trait TailSwitch[L <: HList, T <: HList, R <: HList] { + type Out <: HList +} +object TailSwitch { + type Reverse0[Acc <: HList, L <: HList] <: HList = L match { + case HNil => Acc + case ::[h, t] => Reverse0[h :: Acc, t] + } + + type Reverse1[L <: HList] <: HList = L match { + case HNil => HNil + case ::[h, t] => Reverse0[h :: HNil, t] + } + + type Prepend0[A <: HList, B <: HList] <: HList = A match { + case HNil => B + case ::[h, t] => ::[h, Prepend0[t, B]] + } + + // type-level implementation of this algorithm: + // @tailrec def rec(L, LI, T, TI, R, RI) = + // if (TI <: L) R + // else if (LI <: T) RI.reverse ::: R + // else if (LI <: HNil) rec(L, HNil, T, TI.tail, R, RI) + // else if (TI <: HNil) rec(L, LI.tail, T, HNil, R, LI.head :: RI) + // else rec(L, LI.tail, T, TI.tail, R, LI.head :: RI) + // rec(L, L, T, T, R, HNil) + type TailSwitch0[L <: HList, LI <: HList, T <: HList, TI <: HList, R <: HList, RI <: HList] <: HList = TI match { + case L => R + case _ => + LI match { + case T => Prepend0[Reverse1[RI], R] + case HNil => + TI match { + case ::[_, t] => TailSwitch0[L, HNil, T, t, R, RI] + } + case ::[h, t] => + TI match { + case HNil => TailSwitch0[L, t, T, HNil, R, h :: RI] + case ::[_, tt] => TailSwitch0[L, t, T, tt, R, h :: RI] + } + } + } + + type Aux[L <: HList, LI <: HList, T <: HList, TI <: HList, R <: HList, RI <: HList, Out <: HList] = + TailSwitch[L, T, R] { type Out = TailSwitch0[L, L, T, T, R, HNil] } + + implicit def tailSwitch[L <: HList, T <: HList, R <: HList] + : TailSwitch[L, T, R] { type Out = TailSwitch0[L, L, T, T, R, HNil] } = ??? +} + +sealed class Rule[-I <: HList, +O <: HList] { + def ~[I2 <: HList, O2 <: HList](that: Rule[I2, O2])(implicit + i: TailSwitch[I2, O@uncheckedVariance, I@uncheckedVariance], + o: TailSwitch[O@uncheckedVariance, I2, O2] + ): Rule[i.Out, o.Out] = ??? + +} +object Rule { + type Rule0 = Rule[HNil, HNil] + type RuleN[+L <: HList] = Rule[HNil, L] + + def rule[I <: HList, O <: HList](r: Rule[I, O]): Rule[I, O] = ??? + implicit def valueMap[T](m: Map[String, T])(implicit h: HListable[T]): RuleN[h.Out] = ??? +} + +object Test { + import Rule._ + val colors: Map[String, Int] = Map("red" -> 1, "green" -> 2, "blue" -> 3) + def EOI: Rule0= ??? + val r = rule(colors ~ EOI) +} diff --git a/tests/pos/13495.scala b/tests/pos/13495.scala new file mode 100644 index 000000000000..a70864d5ca66 --- /dev/null +++ b/tests/pos/13495.scala @@ -0,0 +1,20 @@ +import scala.annotation.showAsInfix + +object Test { + trait Component + sealed trait Deleted extends Component + + type Deletable[L <: CList] <: CList = L match { + case h &: t => (h | Deleted) &: Deletable[t] + case CNil => CNil + } + + sealed trait CList + sealed trait CNil extends CList + @showAsInfix case class &:[+C <: Component, +L <: CList](h: C, t: L) extends CList + + case class A(x: Int, y: Int) extends Component + case class B(x: Int, y: Int) extends Component + + val x: Deletable[A &: B &: CNil] = ??? +} diff --git a/tests/pos/13633.scala b/tests/pos/13633.scala new file mode 100644 index 000000000000..ca0f7e68e81e --- /dev/null +++ b/tests/pos/13633.scala @@ -0,0 +1,50 @@ +import scala.compiletime.{constValueTuple, constValue} + +object Sums extends App: + + println(constValueTuple[Plus[(true, true, true), (true, true)]]) // works + println(constValueTuple[Plus1[(true, true, true), (true, true)]]) // fails + println(constValueTuple[ + Reverse[PlusLoop[Reverse[(true, true, true)], Reverse[(true, true)], false]]] + ) // also works despite it's just an unfold of `Plus1` application + + type Plus[A <: Tuple, B <: Tuple] <: Tuple = (A, B) match + case (EmptyTuple, EmptyTuple) => EmptyTuple + case (a, b) => Reverse[PlusLoop[Reverse[A], Reverse[B], false]] + + type Plus1[A <: Tuple, B <: Tuple] = Reverse[PlusLoop[Reverse[A], Reverse[B], false]] + + type ReverseLoop[A, XS <: Tuple] <: Tuple = A match { + case EmptyTuple => XS + case x *: xs => ReverseLoop[xs, x *: XS] + } + + type Reverse[A] = ReverseLoop[A, EmptyTuple] + + type PlusTri[A, B, C] = (A, B, C) match + case (false, false, false) => (false, false) + case (true, false, false) | (false, true, false) | (false, false, true) => (false, true) + case (true, true, false) | (true, false, true) | (false, true, true) => (true, false) + case (true, true, true) => (true, true) + + type Inc[A <: Tuple] <: Tuple = A match + case EmptyTuple => true *: EmptyTuple + case t *: as => + t match + case false => true *: as + case true => false *: Inc[as] + + type IncT[A <: Tuple, O <: Boolean] <: Tuple = O match + case false => A + case true => Inc[A] + + type PlusLoop[A <: Tuple, B <: Tuple, O] <: Tuple = (A, B) match + case (EmptyTuple, EmptyTuple) => + O match + case true => (true *: EmptyTuple) + case false => EmptyTuple + case (EmptyTuple, B) => IncT[B, O] + case (A, EmptyTuple) => IncT[A, O] + case (a *: as, b *: bs) => + PlusTri[a, b, O] match + case (x, y) => y *: PlusLoop[as, bs, x] diff --git a/tests/pos/13855.scala b/tests/pos/13855.scala new file mode 100644 index 000000000000..7c093ed98409 --- /dev/null +++ b/tests/pos/13855.scala @@ -0,0 +1,13 @@ +type A[X] = X match + case Int => Int + case _ => B[X] + +def a[X](x: X): A[X] = x match + case v: Int => v + case _ => b(x) + +type B[X] = X match + case String => String + +def b[X](x: X): B[X] = x match + case v: String => v diff --git a/tests/pos/7512.scala b/tests/pos/7512.scala new file mode 100644 index 000000000000..fbc8ae6a72e3 --- /dev/null +++ b/tests/pos/7512.scala @@ -0,0 +1,24 @@ +import scala.compiletime.ops.int.S + +object InfiniteLoopMatchType { + def main(args: Array[String]): Unit = { + testProd(2, 10) + } + + def testProd(a: Int, b: Int)(using ev: (a.type * b.type) =:= (b.type * a.type)) = true + + type *[A <: Int, B <: Int] <: Int = A match { + case 0 => 0 + case _ => MultiplyLoop[A, B, 0] + } + + type MultiplyLoop[A <: Int, B <: Int, Acc <: Int] <: Int = A match { + case 0 => Acc + case S[aMinusOne] => MultiplyLoop[aMinusOne, B, B + Acc] + } + + type +[A <: Int, B <: Int] <: Int = A match { + case 0 => B + case S[aMinusOne] => aMinusOne + S[B] + } +} diff --git a/tests/pos/8649.scala b/tests/pos/8649.scala new file mode 100644 index 000000000000..7e0351d52628 --- /dev/null +++ b/tests/pos/8649.scala @@ -0,0 +1,32 @@ +// This is crazy: +type Get0 = OK[Int, Unit] +def get0: Handler[Get0] = IO[Unit]() + +case class HandlerAlt[A](value: Handler[A]) + +type Handler[API] = handler.Go[API] + +case class IO[A]() +case class OK[A, B]() + +object handler: + // Starter for Handler reduction: + type Go[API] = API match + case _ => + HandlerSingle[API] + + type HandlerSingle[X] = X match + case OK[_, response] => + IO[response] + +object Minimized { + case class HandlerAlt[A](value: M2[A]) + + type M1[X] = X match { + case _ => M2[X] + } + + type M2[X] = X match { + case Int => String + } +} diff --git a/tests/pos/9239.scala b/tests/pos/9239.scala index 8f3ddfbd1ced..c9c0ec268218 100644 --- a/tests/pos/9239.scala +++ b/tests/pos/9239.scala @@ -24,5 +24,5 @@ object ABug: N match case Zero => One case One => One - case ? => ![--[N]] × (N) + case _ => ![--[N]] × (N) case ? :: ? => ![--[N]] × (N) diff --git a/tests/pos/9623.scala b/tests/pos/9623.scala new file mode 100644 index 000000000000..656afefb941e --- /dev/null +++ b/tests/pos/9623.scala @@ -0,0 +1,22 @@ +object A { + sealed trait TList + sealed trait TNil extends TList + sealed trait ++:[H, T <: TList] extends TList + + type :--[R <: TList, A] <: TList = R match { + case (A ++: t) => t + case (h ++: t) => h ++: (t :-- A) + } +} + +object B { + import A.* + + type X = (Int ++: String ++: Double ++: TNil) :-- String + + class T[A] + + def f(ta: T[X]) = () + + f(new T[Int ++: Double ++: TNil]) +} diff --git a/tests/pos/9675.scala b/tests/pos/9675.scala new file mode 100644 index 000000000000..63f01cf73a2f --- /dev/null +++ b/tests/pos/9675.scala @@ -0,0 +1,18 @@ +import scala.compiletime.ops.int.S + +sealed trait TList +sealed trait TNil extends TList +sealed trait ++:[H[_], T <: TList] extends TList + +type IndexOf[H[_], T <: TList] <: Int = T match + case H ++: _ => 0 + case _ ++: t => S[IndexOf[H, t]] + +// compiles fine +val a = summon[ValueOf[IndexOf[List, List ++: Option ++: TNil]]].value + +// causes an error +val b = summon[ValueOf[IndexOf[List, Option ++: List ++: TNil]]].value + +object T extends App: + println(a) diff --git a/tests/pos/9757.scala b/tests/pos/9757.scala new file mode 100644 index 000000000000..aeecfa0a472f --- /dev/null +++ b/tests/pos/9757.scala @@ -0,0 +1,11 @@ +type RemoveFrom[R, A] = R match { + case A & newType => newType +} + +def removeOnePart[R, PartR, A](f: R => A, partR: PartR): RemoveFrom[R, PartR] => A = ??? + +trait A {} +trait B {} + +val f: (A & B) => Int = ??? +val f2 = removeOnePart(f, new A {}) diff --git a/tests/pos/9890.scala b/tests/pos/9890.scala new file mode 100644 index 000000000000..96d6f1db33b7 --- /dev/null +++ b/tests/pos/9890.scala @@ -0,0 +1,49 @@ +object Test { + import scala.compiletime.ops.int._ + + trait x + + type Range[Min <: Int, Max <: Int] <: Tuple = Min match { + case Max => EmptyTuple + case _ => Min *: Range[Min + 1, Max] + } + + type TupleMap[Tup <: Tuple, Bound, F[_ <: Bound]] <: Tuple = Tup match { + case EmptyTuple => EmptyTuple + case h *: t => F[h] *: TupleMap[t, Bound, F] + } + type TupleDedup[Tup <: Tuple, Mask] <: Tuple = Tup match { + case EmptyTuple => EmptyTuple + case h *: t => h match { + case Mask => TupleDedup[t, Mask] + case _ => h *: TupleDedup[t, h | Mask] + } + } + + type CoordToPos[r <: Int, c <: Int] = r * 9 + c + type Cell[r <: Int, c <: Int, Board <: Tuple] = Tuple.Elem[Board, CoordToPos[r, c]] + type Col[c <: Int, Board <: Tuple] = TupleMap[Range[0, 9], Int, [r <: Int] =>> Cell[r, c, Board]] + + type ColFromPos[Pos <: Int] = Pos % 9 + + type Sudoku1 = ( + x, x, x, x, 1, x, 4, x, 6, + 8, x, 1, 6, 2, x, x, x, 9, + x, 3, x, x, x, 9, x, 2, x, + + 5, x, 9, 1, 3, x, x, 6, x, + x, 6, x, 9, x, 2, x, 4, x, + x, 2, x, x, 6, 7, 8, x, 5, + + x, 9, x, 5, x, x, x, 3, x, + 3, x, x, x, 4, 6, 9, x, 7, + 6, x, 7, x, 9, x, x, x, x, + ) + + //compiles fine + summon[Col[ColFromPos[0], Sudoku1] =:= (x, 8, x, 5, x, x, x, 3, 6)] + + summon[TupleDedup[(x, 8, x, 5, x, x, x, 3, 6), Nothing] =:= (x, 8, 5, 3, 6)] + //but this doesn't + summon[TupleDedup[Col[ColFromPos[0], Sudoku1], Nothing] =:= (x, 8, 5, 3, 6)] +} diff --git a/tests/pos/X.scala b/tests/pos/X.scala index ce5fcb547bbf..04150b72beeb 100644 --- a/tests/pos/X.scala +++ b/tests/pos/X.scala @@ -2,8 +2,8 @@ import scala.deriving.* trait FunctorK[F[_[_]]] object FunctorK { - given [C]: FunctorK[[F[_]] =>> C] with {} - given [T]: FunctorK[[F[_]] =>> Tuple1[F[T]]] with {} + given [C]: FunctorK[[F[_]] =>> C]() + given [T]: FunctorK[[F[_]] =>> Tuple1[F[T]]]() def derived[F[_[_]]](using m: Mirror { type MirroredType[X[_]] = F[X] ; type MirroredElemTypes[_[_]] }, r: FunctorK[m.MirroredElemTypes]): FunctorK[F] = new FunctorK[F] {} } diff --git a/tests/neg/case-semi.scala b/tests/pos/case-semi.scala similarity index 100% rename from tests/neg/case-semi.scala rename to tests/pos/case-semi.scala diff --git a/tests/pos/dependent-annot.scala b/tests/pos/dependent-annot.scala new file mode 100644 index 000000000000..28f0f8bd59e6 --- /dev/null +++ b/tests/pos/dependent-annot.scala @@ -0,0 +1,7 @@ +class C +class ann(x: Any*) extends annotation.Annotation + +def f(y: C, z: C) = + def g(): C @ann(y, z) = ??? + val ac: ((x: C) => Array[String @ann(x)]) = ??? + val dc = ac(g()) diff --git a/tests/pos/depfun.scala b/tests/pos/depfun.scala new file mode 100644 index 000000000000..683b04b32e39 --- /dev/null +++ b/tests/pos/depfun.scala @@ -0,0 +1,17 @@ +// The following test is derived from scala/reflect/TypeTest.scala, but using +// a dependent function instead of a dependent SAM. It shows that the special treatment +// using a DependentTypeTree is not needed for plain function types. +// But for SAM types, the treatment is needed, otherwise TypeTest.scala does +// not typecheck. Todo: Figure out the reason for this difference. +object Test: + + type F[S, T] = (x: S) => Option[x.type & T] + + /** Trivial type test that always succeeds */ + def identity[T]: F[T, T] = Some(_) + + val x: 1 = 1 + val y = identity(x) + val z: Option[1] = y + + diff --git a/tests/pos/experimental-erased-2.scala b/tests/pos/experimental-erased-2.scala new file mode 100644 index 000000000000..f3b524e18463 --- /dev/null +++ b/tests/pos/experimental-erased-2.scala @@ -0,0 +1,8 @@ +import language.experimental.erasedDefinitions +import annotation.experimental + +@experimental object Test: + + erased class CanThrow[-E <: Exception] + + def other = 1 diff --git a/tests/pos/experimental-erased.scala b/tests/pos/experimental-erased.scala new file mode 100644 index 000000000000..156ad639f42d --- /dev/null +++ b/tests/pos/experimental-erased.scala @@ -0,0 +1,11 @@ +import language.experimental.erasedDefinitions +import annotation.experimental + +@experimental +erased class CanThrow[-E <: Exception](val i: Int = 0) + +@experimental +object Foo + +@experimental +def bar = 1 diff --git a/tests/pos/experimentalErased.scala b/tests/pos/experimentalErased.scala new file mode 100644 index 000000000000..358c134c714a --- /dev/null +++ b/tests/pos/experimentalErased.scala @@ -0,0 +1,22 @@ +import language.experimental.erasedDefinitions +import annotation.experimental + +@experimental +erased class Foo + +erased class Bar + +@experimental +erased def foo = 2 + +erased def bar = 2 + +@experimental +erased val foo2 = 2 + +erased val bar2 = 2 + +@experimental +def foo3(erased a: Int) = 2 + +def bar3(erased a: Int) = 2 diff --git a/tests/pos/experimentalExperimental.scala b/tests/pos/experimentalExperimental.scala new file mode 100644 index 000000000000..4b57e5b94346 --- /dev/null +++ b/tests/pos/experimentalExperimental.scala @@ -0,0 +1 @@ +class MyExperimentalAnnot extends scala.annotation.experimental diff --git a/tests/pos/f112.scala b/tests/pos/f112.scala new file mode 100644 index 000000000000..ad19fc5c4b04 --- /dev/null +++ b/tests/pos/f112.scala @@ -0,0 +1,6 @@ +type X +val x: X = ??? +extension (x: X) def unapply(arg: Any): Boolean = true +def test = + ??? match + case x() => diff --git a/tests/pos/forwardCompat-excludedExport/Test_r3.0.scala b/tests/pos/forwardCompat-excludedExport/Test_r3.0.scala new file mode 100644 index 000000000000..c635935471d8 --- /dev/null +++ b/tests/pos/forwardCompat-excludedExport/Test_r3.0.scala @@ -0,0 +1,2 @@ +object A: + export Tuple.{canEqualEmptyTuple as _, canEqualTuple as _, given, *} diff --git a/tests/pos/gadt-hkt-hi-bounds.scala b/tests/pos/gadt-hkt-hi-bounds.scala new file mode 100644 index 000000000000..fca06f375b6b --- /dev/null +++ b/tests/pos/gadt-hkt-hi-bounds.scala @@ -0,0 +1,8 @@ +type Const = [X] =>> Int + +trait Expr[-F[_]] +case class ConstExpr() extends Expr[Const] + +def foo[F[_], A](e: Expr[F]) = e match + case _: ConstExpr => + val i: Int = ??? : F[A] diff --git a/tests/pos/gadt-hkt-lo-bounds.scala b/tests/pos/gadt-hkt-lo-bounds.scala new file mode 100644 index 000000000000..177b7fe044f3 --- /dev/null +++ b/tests/pos/gadt-hkt-lo-bounds.scala @@ -0,0 +1,7 @@ +type Const = [X] =>> Int + +trait Expr[+F[_]] +case class ConstExpr() extends Expr[Const] + +def foo[F[_], A](e: Expr[F]): F[A] = e match + case _: ConstExpr => 0 diff --git a/tests/pos/gadt-param-unification.scala b/tests/pos/gadt-param-unification.scala new file mode 100644 index 000000000000..39c944469d74 --- /dev/null +++ b/tests/pos/gadt-param-unification.scala @@ -0,0 +1,6 @@ +trait Expr[T] +final class Lit[T] extends Expr[T] + +def foo[X, T1 >: X, T2](m: Expr[T2]): T2 = m match { + case _: Lit[T1] => ??? : X +} diff --git a/tests/pos/i0239.scala b/tests/pos/i0239.scala index 258c3c987adc..5b01cd224af1 100644 --- a/tests/pos/i0239.scala +++ b/tests/pos/i0239.scala @@ -1,3 +1,5 @@ +package i0239 + package p { class C[A] { implicit def foo: M[A] = ??? diff --git a/tests/pos/i10295.scala b/tests/pos/i10295.scala index 3ec7eefca0ac..44400ce29de6 100644 --- a/tests/pos/i10295.scala +++ b/tests/pos/i10295.scala @@ -10,9 +10,11 @@ def doSomething(body: M ?=> Unit) = body(using new M{}) def Test1 = given M = new M{} - import m.* - val x: X = X.foo() - println(x) + locally { + import m.* + val x: X = X.foo() + println(x) + } def Test2 = diff --git a/tests/pos/i10347/A_1.scala b/tests/pos/i10347/A_1.scala new file mode 100644 index 000000000000..e6a87f14edec --- /dev/null +++ b/tests/pos/i10347/A_1.scala @@ -0,0 +1,7 @@ +trait L[+T] { def head: T } +class K(val s: String) extends AnyVal +object A { + def foo: L[String] = ??? + def bar: L[K] = ??? + def baz(k: K): L[String] = ??? +} diff --git a/tests/pos/i10347/C_2.java b/tests/pos/i10347/C_2.java new file mode 100644 index 000000000000..7525c5e7325d --- /dev/null +++ b/tests/pos/i10347/C_2.java @@ -0,0 +1,5 @@ +public class C_2 { + String hi = A.foo().head(); + String hy = A.bar().head(); + String hj = A.baz("").head(); +} diff --git a/tests/pos/i10389.scala b/tests/pos/i10389.scala new file mode 100644 index 000000000000..a23f0d269a21 --- /dev/null +++ b/tests/pos/i10389.scala @@ -0,0 +1,9 @@ +import scala.util._ + +object FooBar { + def foo = List("1","two","3").collect{ x => + Try(x.toInt) match { + case Success(int) => int + } + } +} diff --git a/tests/pos/i10897.scala b/tests/pos/i10897.scala new file mode 100644 index 000000000000..da583090992b --- /dev/null +++ b/tests/pos/i10897.scala @@ -0,0 +1,6 @@ +import Tuple.Union + +object Foo + +val x = summon[Union[(Foo.type, 1)] =:= (Foo.type | 1)] // doesn't compile +val y = summon[Union[(Foo.type, 1, String)] =:= (Foo.type | 1 | String)] // compiles diff --git a/tests/pos/i10900.scala b/tests/pos/i10900.scala new file mode 100644 index 000000000000..6b7754d7e38f --- /dev/null +++ b/tests/pos/i10900.scala @@ -0,0 +1,23 @@ +import scala.collection.IterableOps +def foo[CC[A] <: IterableOps[A, CC, CC[A]], A](collection: CC[A]) = + collection == collection + +object Test1 { + import scala.collection.IterableOps + implicit class RichCollection[CC[A] <: IterableOps[A, CC, CC[A]], A](val collection: CC[A]) { + def awm(update: CC[A] => CC[A]): CC[A] = { + val newCollection = update(collection) + if (newCollection == collection) collection else newCollection.awm(update) + } + } +} + +object Test2 { + import scala.collection.IterableOps + implicit class RichCollection[CC[A] <: IterableOps[A, CC, CC[A]], A](val collection: CC[A]) { + def awm(update: CC[A] => CC[A]): CC[A] = update(collection) match { + case `collection` => collection + case updated => updated.awm(update) + } + } +} diff --git a/tests/pos/i11045.scala b/tests/pos/i11045.scala deleted file mode 100644 index da5d66a7a633..000000000000 --- a/tests/pos/i11045.scala +++ /dev/null @@ -1,2 +0,0 @@ -abstract class Foo(x: Any) -class Boom(var x: Unit, y: Unit) extends Foo((x: Int) => x) \ No newline at end of file diff --git a/tests/pos/i11163.scala b/tests/pos/i11163.scala new file mode 100644 index 000000000000..acf5629d1ae9 --- /dev/null +++ b/tests/pos/i11163.scala @@ -0,0 +1,12 @@ +inline def summonA[T](using x: T): x.type = x +inline def summonB[T](using inline x: T): x.type = x +inline def summonC[T](using inline x: T): T = x + +trait Foo: + def f: Int = 9 + +def test(using Foo) = + summonA[Foo].f + summonB[Foo].f + summonC[Foo].f + () diff --git a/tests/pos/i11168.scala b/tests/pos/i11168.scala index d40d628ccd5a..4d4ee6d3c1ec 100644 --- a/tests/pos/i11168.scala +++ b/tests/pos/i11168.scala @@ -1,5 +1,5 @@ trait Foo -given foo: Foo with {} +given foo: Foo() extension (using Foo)(x: Any) def foo1[A] = ??? diff --git a/tests/pos/i11185.scala b/tests/pos/i11185.scala new file mode 100644 index 000000000000..3b4a308bbc9e --- /dev/null +++ b/tests/pos/i11185.scala @@ -0,0 +1,14 @@ +class Test: + def foo(a: Int, b: Int) = a + b + + Map(1 -> 2).map(foo _) + Map(1 -> 2).map(foo) + +class Test2: + def foo(a: Int, b: Int) = a + b + + def bar(f: ((Int, Int)) => Int) = "ok" + def bar(f: ((Int, Int)) => String)(using Int) = "ok" + + bar(foo) + bar(foo _) diff --git a/tests/pos/i11220.scala b/tests/pos/i11220.scala new file mode 100644 index 000000000000..f6d600280bf6 --- /dev/null +++ b/tests/pos/i11220.scala @@ -0,0 +1,8 @@ +import scala.annotation.tailrec +class Context { + type Tree +} + +final def loop3[C <: Context](): Unit = + @tailrec + def loop4[A <: C](c: A): c.Tree = loop4(c) \ No newline at end of file diff --git a/tests/pos/i11318a.scala b/tests/pos/i11318a.scala new file mode 100644 index 000000000000..6422b32a375e --- /dev/null +++ b/tests/pos/i11318a.scala @@ -0,0 +1,4 @@ +extension(a: Int) + def b: Int = ??? + def h: Unit = + [A] => (r: Int) => b diff --git a/tests/pos/i11318b.scala b/tests/pos/i11318b.scala new file mode 100644 index 000000000000..b8bf2203d218 --- /dev/null +++ b/tests/pos/i11318b.scala @@ -0,0 +1,13 @@ +type FunctionK[A[_], B[_]] = [Z] => A[Z] => B[Z] +type ~>:[A[_], B[_]] = FunctionK[A, B] + +trait RepresentableK[F[_[_], _]]: + type RepresentationK[_] + + def tabulateK[A[_], C](f: RepresentationK ~>: A): F[A, C] + + extension[A[_], C](fa: F[A, C]) + def indexK: RepresentationK ~>: A + + def mapK[B[_]] (f: A ~>: B): F[B, C] = + tabulateK([Z] => (r: RepresentationK[Z]) => f(indexK(r))) diff --git a/tests/pos/i11318c.scala b/tests/pos/i11318c.scala new file mode 100644 index 000000000000..b1da38d7f6da --- /dev/null +++ b/tests/pos/i11318c.scala @@ -0,0 +1,6 @@ +extension(a: Int) + def b: Int = ??? + def h: Unit = + new Function1[Int, Int] { + def apply(r: Int): Int = b + } diff --git a/tests/pos/i11481.scala b/tests/pos/i11481.scala new file mode 100644 index 000000000000..99fa6a250ff0 --- /dev/null +++ b/tests/pos/i11481.scala @@ -0,0 +1,2 @@ +case class Foo[F[_]](f: {def f(x: F[Int]): Object}) +case class Bar[F[_], G[_]](f: [B] => F[B] => G[B]) diff --git a/tests/pos/i11499.scala b/tests/pos/i11499.scala new file mode 100644 index 000000000000..2d64750c3b22 --- /dev/null +++ b/tests/pos/i11499.scala @@ -0,0 +1,11 @@ +trait Functor[F[_]] + +object data { + + type OptionT[F[_], A] = F[Option[A]] + + def fold[F[_], A, B](value: OptionT[F, A])(f: Functor[F]): F[B] = ??? + + def cata[F[_], A, B](value: OptionT[F, A])(f: Functor[F]): F[B] = + fold(value)(f) // error +} diff --git a/tests/pos/i11556.scala b/tests/pos/i11556.scala new file mode 100644 index 000000000000..782cdfeeac8f --- /dev/null +++ b/tests/pos/i11556.scala @@ -0,0 +1,24 @@ +type Traverser[-I, +O] = I => LazyList[(O)] +extension[I, O](ta: Traverser[I, O]) + def ~>[P](tb: Traverser[O, P]): Traverser[I, P] = ??? + +class Graph { class Node } + +case class Path[+E](e: E) +type Query[-I, +O] = Traverser[Path[I], Path[O]] + +def nodesQ(using g: Graph): Query[Nothing, g.Node] = ??? +def outsQ(using g: Graph): Query[g.Node, g.Node] = ??? + +object graphObj extends Graph +import graphObj._ +given graphObj.type = graphObj + +object Issue11556: + val q1: Query[Nothing, Node] = nodesQ ~> outsQ + implicitly[q1.type <:< Query[Nothing, Node]] + + val q2 = nodesQ ~> outsQ + val q3: Query[Nothing, Node] = q2 + implicitly[q2.type <:< Query[Nothing, Node]] +end Issue11556 diff --git a/tests/pos/i11631.scala b/tests/pos/i11631.scala new file mode 100644 index 000000000000..ecd6f267dafc --- /dev/null +++ b/tests/pos/i11631.scala @@ -0,0 +1,19 @@ +trait MyTrait: + def a(): String = "" + +class Nulll + +extension [T](x: T | Nulll) inline def nnn: x.type & T = ??? + +class MyClass: + var myTrait: MyTrait|Null = null + + def printA(): Unit = println(myTrait.nnn.a()) + +@main def runTest(): Unit = + val mt = new MyTrait: + override def a(): String = "hello world" + + val mc = MyClass() + mc.myTrait = mt + mc.printA() diff --git a/tests/pos/i11631b.scala b/tests/pos/i11631b.scala new file mode 100644 index 000000000000..6f91532158fe --- /dev/null +++ b/tests/pos/i11631b.scala @@ -0,0 +1,15 @@ +trait MyTrait: + def a(): String = "" + +class MyClass: + var myTrait: MyTrait|Null = null + + def printA(): Unit = println(myTrait.nn.a()) + +@main def runTest(): Unit = + val mt = new MyTrait: + override def a(): String = "hello world" + + val mc = MyClass() + mc.myTrait = mt + mc.printA() diff --git a/tests/pos/i11864.scala b/tests/pos/i11864.scala index da7140e57b8d..4f7735f1c8c5 100644 --- a/tests/pos/i11864.scala +++ b/tests/pos/i11864.scala @@ -40,7 +40,7 @@ final class CallbackTo[+A] { object CallbackTo { type MapGuard[A] = { type Out = A } - erased given MapGuard[A]: MapGuard[A] = ??? + erased given MapGuard[A]: MapGuard[A] = compiletime.erasedValue def traverse[A, B](ta: List[A]): CallbackTo[List[B]] = val x: CallbackTo[List[A] => List[B]] = ??? diff --git a/tests/pos/i11955.scala b/tests/pos/i11955.scala new file mode 100644 index 000000000000..b4a9f3148ccf --- /dev/null +++ b/tests/pos/i11955.scala @@ -0,0 +1,25 @@ +object Hello { + + sealed abstract class X[+A] { + type This[+A] <: X[A] + def asThis: This[A] + } + + class Y[+A] extends X[A] { + override type This[+AA] = Y[AA] + override def asThis: This[A] = this + } + + def hackBackToSelf[F[+u] <: X[u], A](f: F[Any])(f2: f.This[A]): F[A] = + f2.asInstanceOf[F[A]] + + case class G[F[+u] <: X[u], A](wrapped: F[A]) { + + def mapF[F2[+u] <: X[u]](f: F[A] => F2[A]): G[F2, A] = + G[F2, A](f(wrapped)) + + def test_ko_1: G[F, A] = mapF(ct => hackBackToSelf(ct)(ct.asThis)) // error + def test_ko_2: G[F, A] = mapF[F](ct => hackBackToSelf(ct)(ct.asThis)) // error + def test_ok : G[F, A] = mapF(ct => hackBackToSelf[F, A](ct)(ct.asThis)) // ok + } +} \ No newline at end of file diff --git a/tests/pos/i12072-b.scala b/tests/pos/i12072-b.scala new file mode 100644 index 000000000000..07bca25b68be --- /dev/null +++ b/tests/pos/i12072-b.scala @@ -0,0 +1,9 @@ +transparent inline def f: Null = null + +inline def g: Unit = + inline if f == "V" then 1 else 2 + inline if f != "V" then 3 else 4 + inline if "v" == f then 5 else 6 + inline if "v" != f then 7 else 8 + +def test = g diff --git a/tests/pos/i12072-c.scala b/tests/pos/i12072-c.scala new file mode 100644 index 000000000000..f99f0da9049f --- /dev/null +++ b/tests/pos/i12072-c.scala @@ -0,0 +1,86 @@ +object T { + + transparent inline def f(inline s: String): String | Null = + null + + inline val V = "V" + inline def D = "D" + + trait Trait { def s: String } + + // =========================================================================== + // inline {if,match} over inline {val,def} + + transparent inline def if_v: String = + inline if V == "V" then "o" else "x" + + transparent inline def if_d: String = + inline if D == "D" then "o" else "x" + + transparent inline def match_v: String = + inline V match { case "V" => "o"; case _ => "x" } + + transparent inline def match_d: String = + inline D match { case "D" => "o"; case _ => "x" } + + // =========================================================================== + // inline {if,match} over inline f(inline {val,def}) + + transparent inline def if_fv: String = + inline if f(V) == "V" then "o" else "x" + + transparent inline def if_fd: String = + inline if f(D) == "D" then "o" else "x" + + transparent inline def match_fv: String = + inline f(V) match { case "V" => "o"; case _ => "x" } + + transparent inline def match_fd: String = + inline f(D) match { case "D" => "o"; case _ => "x" } + + // =========================================================================== + // inline {if,match} over inline {val,def} in overridden method + + object IfV extends Trait { + override transparent inline def s: String = + inline if V == "V" then "o" else "x" + } + + object IfD extends Trait { + override transparent inline def s: String = + inline if D == "D" then "o" else "x" // <--------------------------- error + } + + object MatchV extends Trait { + override transparent inline def s: String = + inline V match { case "V" => "o"; case _ => "x" } + } + + object MatchD extends Trait { + override transparent inline def s: String = + inline D match { case "D" => "o"; case _ => "x" } + } + + // =========================================================================== + // inline {if,match} over inline f(inline {val,def}) in overridden method + + object IfFV extends Trait { + override transparent inline def s: String = + inline if f(V) == "V" then "o" else "x" // <------------------------ error + } + + object IfFD extends Trait { + override transparent inline def s: String = + inline if f(D) == "D" then "o" else "x" // <------------------------ error + } + + object MatchFV extends Trait { + override transparent inline def s: String = + inline f(V) match { case "V" => "o"; case _ => "x" } + } + + object MatchFD extends Trait { + override transparent inline def s: String = + inline f(D) match { case "D" => "o"; case _ => "x" } + } +} diff --git a/tests/pos/i12072-d.scala b/tests/pos/i12072-d.scala new file mode 100644 index 000000000000..486e9f478771 --- /dev/null +++ b/tests/pos/i12072-d.scala @@ -0,0 +1,4 @@ +class Test: + def n: Null = null + def test1: Boolean = n == null + def test2: Boolean = null == n diff --git a/tests/pos/i12072-e.scala b/tests/pos/i12072-e.scala new file mode 100644 index 000000000000..713c86fba909 --- /dev/null +++ b/tests/pos/i12072-e.scala @@ -0,0 +1,3 @@ +def test: Boolean = nn(42) == 42 + +def nn(x: Int): x.type & Int = ??? diff --git a/tests/pos/i12072.scala b/tests/pos/i12072.scala new file mode 100644 index 000000000000..ab85059d676a --- /dev/null +++ b/tests/pos/i12072.scala @@ -0,0 +1,8 @@ +inline def c: Int = 2 + +trait A: + def f: Unit + +class B extends A: + override inline def f: Unit = + inline if c == 2 then () else () diff --git a/tests/pos/i12073.scala b/tests/pos/i12073.scala new file mode 100644 index 000000000000..52917477c316 --- /dev/null +++ b/tests/pos/i12073.scala @@ -0,0 +1,70 @@ +inline def ff: Unit = + inline 1 match + case 1 | 2 => + +def test = ff + +transparent inline def f: Option[String] = + None + +object Override { + + trait Trait { def s: String } + + object OK extends Trait { + override transparent inline def s: String = + inline f match { + case Some("x") => "x" + case Some("y") => "y" + case None => "-" + } + } + + object KO_1 extends Trait { + override transparent inline def s: String = + inline f match { + case Some("x") => "x" + case Some("y") + | None => "0" + } + } + + object KO_2 extends Trait { + override transparent inline def s: String = + inline f match { + case Some("x") => "x" + case Some("y") => "y" + case Some(z) => "z" + case None => "0" + } + } +} + +object NonOverride { + + transparent inline def ok_1: String = + inline f match { + case Some("x") => "x" + case Some("y") => "y" + case None => "-" + } + + // ok: Some("y") | None + transparent inline def ok_2: String = + inline f match { + case Some("x") => "x" + case Some("y") + | None => "0" + } + + // ok: no None + transparent inline def ok_3: String = + inline f match { + case Some("x") => "x" + case Some("y") => "y" + case Some(z) => "z" + case None => "0" + } + + ok_1 + ok_2 ++ ok_3 +} \ No newline at end of file diff --git a/tests/pos/i12112.scala b/tests/pos/i12112.scala new file mode 100644 index 000000000000..e979779353c0 --- /dev/null +++ b/tests/pos/i12112.scala @@ -0,0 +1,10 @@ +object A: + object B: + object C + +object X { + import A.B + + B.C // ok + export B.C // error +} diff --git a/tests/pos/i12126.scala b/tests/pos/i12126.scala new file mode 100644 index 000000000000..cffa7fbcbbca --- /dev/null +++ b/tests/pos/i12126.scala @@ -0,0 +1,59 @@ +object Structures: + + trait Functor[F[_]]: + extension [A](fa: F[A]) + def map[B](f: A => B): F[B] + def as[B](b: B): F[B] = map(_ => b) + def void: F[Unit] = as(()) + + trait Applicative[F[_]] extends Functor[F]: + def pure[A](a: A): F[A] + def unit: F[Unit] = pure(()) + extension[A](fa: F[A]) + def map2[B, C](fb: F[B], f: (A, B) => C): F[C] + def map[B](f: A => B): F[B] = + fa.map2(unit, (a, _) => f(a)) + + trait Monad[F[_]] extends Applicative[F]: + extension[A](fa: F[A]) + def flatMap[B](f: A => F[B]): F[B] + override def map[B](f: A => B): F[B] = + flatMap(a => pure(f(a))) + def map2[B, C](fb: F[B], f: (A, B) => C): F[C] = + flatMap(a => fb.map(b => f(a, b))) + + given Monad[List] with + def pure[A](a: A) = List(a) + extension[A](fa: List[A]) + def flatMap[B](f: A => List[B]) = fa.flatMap(f) + + given Monad[Option] with + def pure[A](a: A) = Some(a) + extension[A](fa: Option[A]) + def flatMap[B](f: A => Option[B]) = fa.flatMap(f) + + + opaque type Kleisli[F[_], A, B] = A => F[B] + + extension [F[_], A, B](k: Kleisli[F, A, B]) + def apply(a: A): F[B] = k(a) + + object Kleisli: + def apply[F[_], A, B](f: A => F[B]): Kleisli[F, A, B] = f + + given [F[_], A](using F: Monad[F]): Monad[[B] =>> Kleisli[F, A, B]] with + def pure[B](b: B) = Kleisli(_ => F.pure(b)) + extension[B](k: Kleisli[F, A, B]) + def flatMap[C](f: B => Kleisli[F, A, C]) = + a => k(a).flatMap(b => f(b)(a)) + +end Structures + +@main def run = + import Structures.{*, given} + println(List(1, 2, 3).map2(List(4, 5, 6), (_, _))) + + val p: Kleisli[Option, Int, Int] = Kleisli((x: Int) => if x % 2 == 0 then Some(x) else None) + val q: Kleisli[Option, Int, Int] = summon[Applicative[[B] =>> Kleisli[Option, Int, B]]].pure(20) + println(p.map2(q, _ + _)(42)) + diff --git a/tests/pos/i12127.scala b/tests/pos/i12127.scala new file mode 100644 index 000000000000..ce0ff6b31546 --- /dev/null +++ b/tests/pos/i12127.scala @@ -0,0 +1,5 @@ +val x = Option((1, 2, 3)).map(_ + _ + _) +def foo[T <: Tuple : Tuple.IsMappedBy[Option]](t: T)(f: Tuple.InverseMap[T, Option] => Int) = null +val y = foo(Option(1), Option(2), Option(3))(_ + _ + _) + +//val x: (Tuple3[Int, Int, Int] => Int) = _ + _ + _ diff --git a/tests/pos/i12133.scala b/tests/pos/i12133.scala new file mode 100644 index 000000000000..46b65f534e99 --- /dev/null +++ b/tests/pos/i12133.scala @@ -0,0 +1,15 @@ +class B + +class A { + def foo(x: B) = ??? + def foo(str: String) = ??? +} + +//implicit class C(x: A) { +// def foo(s: Int*) = s.size +//} +extension (x: A) def foo(s: Int*) = s.size + +val a = new A + +def test: Unit = a.foo(1, 2) \ No newline at end of file diff --git a/tests/pos/i12140/Test.scala b/tests/pos/i12140/Test.scala new file mode 100644 index 000000000000..073d096e8490 --- /dev/null +++ b/tests/pos/i12140/Test.scala @@ -0,0 +1 @@ +@main def Test = println(example.Trait.get) \ No newline at end of file diff --git a/tests/pos/i12140/Trait.scala b/tests/pos/i12140/Trait.scala new file mode 100644 index 000000000000..23d5cde613a0 --- /dev/null +++ b/tests/pos/i12140/Trait.scala @@ -0,0 +1,14 @@ +// Trait.scala +package example + +import quoted._ + +trait Trait { + implicit val foo: Int = 23 +} + +object Trait { + inline def get: Trait = ${ getImpl } + + def getImpl(using Quotes): Expr[Trait] = '{ new Trait {} } +} diff --git a/tests/pos/i12141.scala b/tests/pos/i12141.scala new file mode 100644 index 000000000000..df0d748ed008 --- /dev/null +++ b/tests/pos/i12141.scala @@ -0,0 +1,47 @@ +case class Test1(); case class Test2(); case class Test3(); +case class Test4(); case class Test5(); case class Test6(); + +sealed abstract class DSL { + def cont [P1 >: this.type <: DSL, P2 <: DSL](continuation: => P2) = + Continue[P1, P2](() => this, () => continuation) +} +case class Continue [P1 <: DSL, P2 <: DSL](p1: () => P1, p2: () => P2) extends DSL + +trait More[-A] {} +case class Out[C <: More[A], A](c: C, v: A) extends DSL +case class Nop() extends DSL + +val decision1:Boolean = true; +val decision2:Boolean = false; + +type P[ +ChanA <: More[Test1|Test2], +ChanB <: More[Test3|Test4], +ChanC <: More[Test5|Test6]] = + ((Out[ChanA,Test1] Continue ((Out[ChanB,Test3] Continue Nop)|(Out[ChanB,Test4] Continue Nop))) //works if remove first 'Continue Nop' + | (Out[ChanA,Test2] Continue ((Out[ChanC,Test5] Continue Nop)|(Out[ChanC,Test6] Continue Nop)))) + + +def p( chanA: More[Test1|Test2], chanB: More[Test3|Test4], chanC: More[Test5|Test6]) + :P[chanA.type,chanB.type,chanC.type] ={ + if(decision1){ + Out(chanA,Test1()) cont { + if(decision2){ + Out(chanB,Test3()) cont Nop() //works if replace with 'Out(chanB,Test3())' + } + else{ + Out(chanB,Test4()) cont Nop() + } + } + } + else{ + Out(chanA,Test2()) cont { + if(decision2){ + Out(chanC,Test5()) cont Nop() + } + else{ + Out(chanC,Test6()) cont Nop() + } + } + } + } \ No newline at end of file diff --git a/tests/pos/i12168.scala b/tests/pos/i12168.scala new file mode 100644 index 000000000000..e01d9e024ca2 --- /dev/null +++ b/tests/pos/i12168.scala @@ -0,0 +1,9 @@ +package A { + opaque type T = Int + def t: T = 0 +} + +package B { + export A.T + val t: B.T = A.t +} diff --git a/tests/pos/i12169.scala b/tests/pos/i12169.scala new file mode 100644 index 000000000000..6ad5cc4fb621 --- /dev/null +++ b/tests/pos/i12169.scala @@ -0,0 +1,13 @@ +class Property[T] + +class VObject { + def properties() = { + List.empty[Property[?]].collect { + case p: Property[?] => List(p) + } + } +} + +class Event extends VObject { + override def properties() = ??? +} \ No newline at end of file diff --git a/tests/pos/i12178.scala b/tests/pos/i12178.scala new file mode 100644 index 000000000000..b2c97b44f0fc --- /dev/null +++ b/tests/pos/i12178.scala @@ -0,0 +1,24 @@ +opaque type LabelTagged[TLabel <: Singleton & String, TValue] = TValue + +object LabelTagged: + def apply[TLabel <: Singleton & String, TValue] + ( + label: TLabel, + value: TValue, + ) + : LabelTagged[TLabel, TValue] = value + +extension[TLabel <: Singleton & String, TValue] (labelTagged: LabelTagged[TLabel, TValue]) + def value + : TValue = labelTagged + + def label + (using label: ValueOf[TLabel]) + : TLabel + = label.value + +@main def hello(): Unit = { + val foo: LabelTagged["foo", Int] = LabelTagged("foo", 10) + println(label(foo)) // OK + println(foo.label) // was error, now OK +} diff --git a/tests/pos/i12180a.scala b/tests/pos/i12180a.scala new file mode 100644 index 000000000000..c8e6308ce6bc --- /dev/null +++ b/tests/pos/i12180a.scala @@ -0,0 +1,7 @@ +inline def n = null +inline def f = inline if n == null then 0 else 1 +inline def m = inline n match { case null => 0; case _ => 1 } + +@main def main = + println(f) // error + println(m) diff --git a/tests/pos/i12180b.scala b/tests/pos/i12180b.scala new file mode 100644 index 000000000000..56d436cf6772 --- /dev/null +++ b/tests/pos/i12180b.scala @@ -0,0 +1,9 @@ +inline def n = "im a string" +inline def f = inline if n == null then 0 else 1 +inline def g = inline if null == n then 0 else 1 +inline def m = inline n match { case null => 0; case _ => 1 } + +@main def main = + println(f) + println(g) + println(m) diff --git a/tests/pos/i12194-opaque.scala b/tests/pos/i12194-opaque.scala new file mode 100644 index 000000000000..b800d27bb140 --- /dev/null +++ b/tests/pos/i12194-opaque.scala @@ -0,0 +1,3 @@ +opaque type ProductK[F[_], T <: Tuple] = Tuple.Map[T, F] +object ProductK: + def of[F[_], T <: Tuple](t: Tuple.Map[T, F]): ProductK[F, T] = t diff --git a/tests/pos/i12211.scala b/tests/pos/i12211.scala new file mode 100644 index 000000000000..6c117e0bf474 --- /dev/null +++ b/tests/pos/i12211.scala @@ -0,0 +1,31 @@ + +def fst0[A, B[_]](a: A)(b: B[a.type]): a.type = a + +def fst[A, B[_]]: (a: A) => (b: B[a.type]) => a.type = + (a: A) => (b: B[a.type]) => a + +def snd[A, B[_]]: (a: A) => () => (b: B[a.type]) => b.type = + (a: A) => () => (b: B[a.type]) => b + +def fst1[A, B[_]]: (a: A) => (b: B[a.type]) => a.type = fst0 + +def test1[A, B[_]]: (a: A) => () => (b: B[a.type]) => Any = + snd[A, B] + +def test2[A, B[_]]: (a: A) => (b: B[a.type]) => A = fst[A, B] + +class AA +class BB[T] + +def test3: (a: AA) => (b: BB[a.type]) => BB[?] = + (a: AA) => (b: BB[a.type]) => b + +trait RelaxedSelectable extends Selectable.WithoutPreciseParameterTypes: + def applyDynamic(name: String, paramTypes: Class[_]*)(args: Any*): Any = ??? + +class Sink[A] extends RelaxedSelectable { + def put(x: A): Unit = {} +} +val a = new Sink[String] +val b: RelaxedSelectable { def put(x: String): Unit } = a +val _ = b.put("") diff --git a/tests/pos/i12218.scala b/tests/pos/i12218.scala new file mode 100644 index 000000000000..515e71f83fc8 --- /dev/null +++ b/tests/pos/i12218.scala @@ -0,0 +1,10 @@ +import language.experimental.fewerBraces +@main def runTest(): Unit = + val arr = Array(1,2,3) + if + arr.isEmpty + || : + val first = arr(0) + first != 1 + then println("invalid arr") + else println("valid arr") diff --git a/tests/pos/i12226.scala b/tests/pos/i12226.scala new file mode 100644 index 000000000000..b22e8b4cd428 --- /dev/null +++ b/tests/pos/i12226.scala @@ -0,0 +1,16 @@ +object Test extends App { + sealed trait P[T] + case class C1[T <: String](c1: T) extends P[T] + case class C2[T](c2: T) extends P[T] + + def test[T](p: P[T], t: T): Unit = p match { + case C1(_) => + // T <: String + val t : T = ??? + val s : String = t + def test = new C1[T](t) + println(1) + + case C2(_) => println(2) + } +} diff --git a/tests/pos/i12247.scala b/tests/pos/i12247.scala new file mode 100644 index 000000000000..b2b522f36044 --- /dev/null +++ b/tests/pos/i12247.scala @@ -0,0 +1,27 @@ +sealed abstract class CtorType +object CtorType { + final class Props extends CtorType + sealed trait Summoner { type CT <: CtorType } + implicit def summonP: Summoner {type CT = Props} = ??? +} + +final case class Builder() { + def build(using ctorType: CtorType.Summoner): Component[ctorType.CT] = ??? +} + +final class Component[CT <: CtorType] + +object Test { + + def assertTypeOf[A](a: => A) = new TestDsl[A] + class TestDsl[A] { + def is[B](implicit ev: A =:= B): Unit = () + } + + type Expect = Component[CtorType.Props] + + assertTypeOf( Builder().build ).is[Expect] // error + + val x = Builder().build + assertTypeOf(x).is[Expect] // ok +} diff --git a/tests/pos/i12264.scala b/tests/pos/i12264.scala new file mode 100644 index 000000000000..6be35b8e0e9f --- /dev/null +++ b/tests/pos/i12264.scala @@ -0,0 +1,40 @@ +object test1: + + object Html { + final opaque type Tag[+N] = String + def apply[N](name: String): Tag[N] = ??? + } + + object HtmlTags { + final def br: Html.Tag[Int] = Html("br") + final def p = Html[Long]("p") + } + + object Test { + type Expect = Html.Tag[Any] + + val x = List[Expect](HtmlTags.br, HtmlTags.p) // ok + + val y = List(HtmlTags.br, HtmlTags.p) + y: List[Expect] // was error + } + +class test2: + type Tag[+N] + object Html: + def apply[N](name: String): Tag[N] = ??? + + object HtmlTags { + final def br: Tag[Int] = Html("br") + final def p = Html[Long]("p") + } + + object Test { + type Expect = Tag[Any] + + val x = List[Expect](HtmlTags.br, HtmlTags.p) // ok + + val y = List(HtmlTags.br, HtmlTags.p) + y: List[Expect] // was error + } + diff --git a/tests/pos/i12265.scala b/tests/pos/i12265.scala new file mode 100644 index 000000000000..420d382a342b --- /dev/null +++ b/tests/pos/i12265.scala @@ -0,0 +1,14 @@ +object OK { + def apply(n: Int ): Unit = () + def apply(n: Long): Unit = () + apply(3) // ok + apply(3L) // ok +} + +object KO { + type Key = Int + def apply(n: Key ): Unit = () + def apply(n: Long): Unit = () + apply(3) // error + apply(3L) // ok +} diff --git a/tests/pos/i12273.scala b/tests/pos/i12273.scala new file mode 100644 index 000000000000..1201c2fc62bd --- /dev/null +++ b/tests/pos/i12273.scala @@ -0,0 +1,4 @@ +import scala.annotation.unchecked.uncheckedVariance + +final case class Outlet[T](out: T) +final case class SourceShape[+T](val out: Outlet[T @uncheckedVariance]) \ No newline at end of file diff --git a/tests/pos/i12277.scala b/tests/pos/i12277.scala new file mode 100644 index 000000000000..99eb7238da00 --- /dev/null +++ b/tests/pos/i12277.scala @@ -0,0 +1,18 @@ +def foo(f: => () => Unit): Unit = ??? +def boo(f: [A] => () => Unit): Unit = ??? + +object test: + foo { () => // okay + println(1) + println(2) + } + + boo { [A] => () => // error + println(1) + println(2) + } + + boo { [A] => () => { // okay + println(1) + println(2) + }} \ No newline at end of file diff --git a/tests/pos/i12286.scala b/tests/pos/i12286.scala new file mode 100644 index 000000000000..d638c1304ab2 --- /dev/null +++ b/tests/pos/i12286.scala @@ -0,0 +1,13 @@ +sealed trait Base { def str: String } +case class One(str: String) extends Base +case class Two(str: String) extends Base +case class Three(str: String) extends Base + +case class Item(_id: String) + +private def doWithItem[T <: (One | Two | Three)] + (item: Item, value: T, action: (T) => Item) = doWithItemId(item._id, value, action) +private def doWithItemId[U <: (One | Two | Three)] + (itemId: String, value: U, action: (U) => Item) = + println(value.str) + Item("_id") \ No newline at end of file diff --git a/tests/pos/i12299.scala b/tests/pos/i12299.scala new file mode 100644 index 000000000000..cabd12ed3b05 --- /dev/null +++ b/tests/pos/i12299.scala @@ -0,0 +1,54 @@ +object Outer0 { + + object Inner { + class Bar(x: Int): + def this() = this(0) + } + + export Inner.Bar + + val _ = Bar() + val _ = Bar(2) + +} + +object Outer2 { + + object Inner { + class Bar(x: Int): + def this() = this(0) + } + + object test2: + export Inner._ + + val x = Bar() + val y = Bar(2) + + object test3: + export Inner.Bar + def Bar: () => String = () => "" + val x = Bar() +} + +object Outer3 { + export Outer0._ + + private val x = Bar() + private val y = Bar(2) +} + +object Outer4 { + + object Inner { + class Bar(x: Int): + def this() = this(0) + object Bar + } + + export Inner._ + + val _ = Bar() + val _ = Bar(2) + +} diff --git a/tests/pos/i12299/Outer_1.scala b/tests/pos/i12299/Outer_1.scala new file mode 100644 index 000000000000..200c21beecb8 --- /dev/null +++ b/tests/pos/i12299/Outer_1.scala @@ -0,0 +1,9 @@ +object Outer { + + object Inner { + class Bar(x: Int): + def this() = this(0) + } + + export Inner.Bar +} diff --git a/tests/pos/i12299/Test_2.scala b/tests/pos/i12299/Test_2.scala new file mode 100644 index 000000000000..ce4c9390744d --- /dev/null +++ b/tests/pos/i12299/Test_2.scala @@ -0,0 +1,4 @@ +import Outer._ + +val x = Bar() +val y = Bar(2) diff --git a/tests/pos/i12299a.scala b/tests/pos/i12299a.scala new file mode 100644 index 000000000000..90819f0a0bca --- /dev/null +++ b/tests/pos/i12299a.scala @@ -0,0 +1,11 @@ +object Outer { + + object Wrap { + export Outer.Bar + } + + class Bar + + val wrapBar = Wrap.Bar() +} + diff --git a/tests/pos/i12306.scala b/tests/pos/i12306.scala new file mode 100644 index 000000000000..86638a2c8c36 --- /dev/null +++ b/tests/pos/i12306.scala @@ -0,0 +1,23 @@ +class Record(elems: Map[String, Any]) extends Selectable: + val fields = elems.toMap + def selectDynamic(name: String): Any = fields(name) +object Record: + def apply(elems: Map[String, Any]): Record = new Record(elems) +extension [A <: Record] (a:A) { + def join[B <: Record] (b:B): A & B = { + Record(a.fields ++ b.fields).asInstanceOf[A & B] + } +} + +type Person = Record { val name: String; val age: Int } +type Child = Record { val parent: String } +type PersonAndChild = Record { val name: String; val age: Int; val parent: String } + +@main def hello = { + val person = Record(Map("name" -> "Emma", "age" -> 42)).asInstanceOf[Person] + val child = Record(Map("parent" -> "Alice")).asInstanceOf[Child] + val personAndChild = person.join(child) + + val v1: PersonAndChild = personAndChild + val v2: PersonAndChild = person.join(child) +} \ No newline at end of file diff --git a/tests/pos/i12373.scala b/tests/pos/i12373.scala new file mode 100644 index 000000000000..61ac1e085892 --- /dev/null +++ b/tests/pos/i12373.scala @@ -0,0 +1,40 @@ +sealed case class Column[A](name: String) + +sealed trait ColumnSet { + type Append[That <: ColumnSet] <: ColumnSet + def ++[That <: ColumnSet](that: That): Append[That] +} + +object ColumnSet { + type Empty = Empty.type + type Singleton[A] = Cons[A, Empty] + + case object Empty extends ColumnSet { + type Append[That <: ColumnSet] = That + override def ++[That <: ColumnSet](that: That): Append[That] = that + } + + sealed case class Cons[A, B <: ColumnSet](head: Column[A], tail: B) extends ColumnSet { self => + type Append[That <: ColumnSet] = Cons[A, tail.Append[That]] + override def ++[That <: ColumnSet](that: That): Append[That] = Cons(head, tail ++ that) + } + + def long(name: String): Singleton[Long] = Cons(Column[Long](name), Empty) + def string(name: String): Singleton[String] = Cons(Column[String](name), Empty) +} + +object Example { + import ColumnSet._ + val schema0 = long("id") ++ string("first_name") + + // inferred type 3.0.0-RC3: Singleton[Long]#Append[Cons[String, Empty]]#Append[Singleton[String]] + // inferred type 2.13.5 : Cons[Long,Cons[String,Singleton[String]]] + val schema1 = long("id") ++ string("first_name") ++ string("last_name") + + // inferred type 3.0.0-RC3: error + // inferred type 2.13.5 : Cons[Long,Cons[String,Cons[String,Singleton[Long]]]] + val schema2 = long("id") ++ string("first_name") ++ string("last_name") ++ long("age") + + // inferred type 3.0.0-RC3: Singleton[Long]#Append[Cons[String, Empty]]#Append[Singleton[String]]#Append[Cons[Long, Empty]] + val schema3 = ((long("id") ++ string("first_name") ++ string("last_name")): Singleton[Long]#Append[ColumnSet.Cons[String, ColumnSet.Empty]]#Append[ColumnSet.Singleton[String]]) ++ long("age") +} diff --git a/tests/pos/i12379a.scala b/tests/pos/i12379a.scala new file mode 100644 index 000000000000..0a1bc8461226 --- /dev/null +++ b/tests/pos/i12379a.scala @@ -0,0 +1,12 @@ +inline def convFail[Of, From](inline from : From) : Unit = + val c = compiletime.summonInline[Conversion[from.type, Of]] + +inline def convOK[Of, From](inline from : From)(using c : Conversion[from.type, Of]) : Unit = {} + +class Bar[T](value : T) +given [T <: Int] : Conversion[T, Bar[T]] = Bar(_) + +@main def main : Unit = { + convOK[Bar[1],1](1) + convFail[Bar[1],1](1) //causes error +} diff --git a/tests/pos/i12379b.scala b/tests/pos/i12379b.scala new file mode 100644 index 000000000000..2a83f69bae3d --- /dev/null +++ b/tests/pos/i12379b.scala @@ -0,0 +1,12 @@ +inline def convFail[Of, From](inline from : From) : Unit = + val c = compiletime.summonInline[Conversion[From, Of]] + +inline def convOK[Of, From](inline from : From)(using c : Conversion[From, Of]) : Unit = {} + +class Bar[T](value : T) +given [T <: Int] : Conversion[T, Bar[T]] = Bar(_) + +@main def main : Unit = { + convOK[Bar[1],1](1) + convFail[Bar[1],1](1) //causes error +} diff --git a/tests/pos/i12390-gadt.scala b/tests/pos/i12390-gadt.scala new file mode 100644 index 000000000000..1525350b56f7 --- /dev/null +++ b/tests/pos/i12390-gadt.scala @@ -0,0 +1,9 @@ +enum Func[-A, +B] { + case Double extends Func[Int, Int] + case ToString extends Func[Float, String] + + def run: A => B = this match { + case Double => (x: Int) => x * 2 + case ToString => (x: Float) => x.toString + } +} diff --git a/tests/pos/i12395.scala b/tests/pos/i12395.scala new file mode 100644 index 000000000000..6915ba5655b9 --- /dev/null +++ b/tests/pos/i12395.scala @@ -0,0 +1,9 @@ +@main def main : Unit = + val x = 1 + + val y = x match + case 1 => 1 + case _ => + println("bad") + ??? + println(x) \ No newline at end of file diff --git a/tests/pos/i12396.scala b/tests/pos/i12396.scala new file mode 100644 index 000000000000..772cfaf49533 --- /dev/null +++ b/tests/pos/i12396.scala @@ -0,0 +1,7 @@ +object + { + val x = 1 +} + +object Check { + val y = +.x +} \ No newline at end of file diff --git a/tests/pos/i12474.scala b/tests/pos/i12474.scala new file mode 100644 index 000000000000..41762fba55c1 --- /dev/null +++ b/tests/pos/i12474.scala @@ -0,0 +1,17 @@ +package bugreport + +import scala.compiletime.erasedValue + +trait Show[A]: + def show(a: A): String + +inline def showTuple[Types]: Show[Types] = + inline erasedValue[Types] match + case _: (head *: tail) => + val instance = + new Show[head *: tail]: + def show(tuple: head *: tail): String = "dummy" + instance.asInstanceOf[Show[Types]] + +@main def run() = + showTuple[(Int, Int)] diff --git a/tests/pos/i12476.scala b/tests/pos/i12476.scala new file mode 100644 index 000000000000..1509f82cdc5f --- /dev/null +++ b/tests/pos/i12476.scala @@ -0,0 +1,10 @@ +object test { + def foo[A, B](m: B) = { + m match { + case _: A => + m match { + case _: B => // crash with -Yno-deep-subtypes + } + } + } +} diff --git a/tests/pos/i12583.scala b/tests/pos/i12583.scala new file mode 100644 index 000000000000..a680983e92e8 --- /dev/null +++ b/tests/pos/i12583.scala @@ -0,0 +1,5 @@ +case class Wrapper[A <: Singleton](a: A)(using ValueOf[A]) + +def test[A <: Singleton](blabla: Wrapper[A]): Unit = () + +val _ = test(Wrapper("yo")) \ No newline at end of file diff --git a/tests/pos/i12616.scala b/tests/pos/i12616.scala new file mode 100644 index 000000000000..ff5a6bf204fd --- /dev/null +++ b/tests/pos/i12616.scala @@ -0,0 +1,9 @@ +class Foo: + + //object Bar + val Bar = 22 + + object Baz: + def f(x: Any): Unit = + x match + case s: (Bar.type & x.type) => \ No newline at end of file diff --git a/tests/pos/i12632.scala b/tests/pos/i12632.scala new file mode 100644 index 000000000000..c17a4324486a --- /dev/null +++ b/tests/pos/i12632.scala @@ -0,0 +1,10 @@ +class CCC[S](val i: Int) { + def this() = + this( + { + val z = new Ordering[S] { + override def compare(x: S, y: S): Int = ??? + } + 3 + }) +} diff --git a/tests/pos/i12645.scala b/tests/pos/i12645.scala new file mode 100644 index 000000000000..ff7e333ec267 --- /dev/null +++ b/tests/pos/i12645.scala @@ -0,0 +1,11 @@ +case class TBox[A <: Tuple](v: A) +case class IBox[A <: Int](v: A) + +@main def m = + val t: TBox[EmptyTuple] = TBox(EmptyTuple) + val tt: Tuple.Map[(EmptyTuple, EmptyTuple), TBox] = (TBox(EmptyTuple), TBox(EmptyTuple)) + + val tt2: Tuple.Map[(1, 2), IBox] = (IBox(1), IBox(2)) + + type F[X] = (X, X) + val tt3: Tuple.FlatMap[(1, 2), F] = (1, 1, 2, 2) diff --git a/tests/pos/i12677.scala b/tests/pos/i12677.scala new file mode 100644 index 000000000000..339eb5594b34 --- /dev/null +++ b/tests/pos/i12677.scala @@ -0,0 +1,31 @@ +class F[A] +object F { + def apply[A](a: => A) = new F[A] +} + +trait TC[A] { type Out } +object TC { + implicit def tc[A]: TC[A] { type Out = String } = ??? +} + +// ==================================================================================== +object Bug { + final class CustomHook[A] { + def blah(implicit tc: TC[A]): CustomHook[tc.Out] = ??? + } + + def i: CustomHook[Int] = ??? + val f = F(i.blah) + f: F[CustomHook[String]] // error +} + +// ==================================================================================== +object Workaround { + final class CustomHook[A] { + def blah[B](implicit tc: TC[A] { type Out = B }): CustomHook[B] = ??? // raise type + } + + def i: CustomHook[Int] = ??? + val f = F(i.blah) + f: F[CustomHook[String]] // works +} \ No newline at end of file diff --git a/tests/pos/i12708.scala b/tests/pos/i12708.scala new file mode 100644 index 000000000000..f8149f0732d0 --- /dev/null +++ b/tests/pos/i12708.scala @@ -0,0 +1,37 @@ +import language.implicitConversions + +trait AdditiveSemigroup[A] + +final class AdditiveSemigroupOps[A](lhs: A)(implicit as: AdditiveSemigroup[A]) { + def +(rhs: A): A = ??? + def ^(rhs: A): A = ??? +} + +trait AdditiveSemigroupSyntax { + implicit def additiveSemigroupOps[A: AdditiveSemigroup](a: A): AdditiveSemigroupOps[A] = + new AdditiveSemigroupOps(a) +} + +object syntax { + object additiveSemigroup extends AdditiveSemigroupSyntax +} + +object App { + + def main(args: Array[String]): Unit = { + import syntax.additiveSemigroup._ + + implicit def IntAlgebra[A]: AdditiveSemigroup[Map[Int, A]] = ??? + + def res[A]: Map[Int, A] = { + val a: Map[Int, A] = Map.empty + val b: Map[Int, A] = Map.empty + // Calls the operator on AdditiveSemigroupOps + a ^ b + // Calls the operator + on AdditiveSemigroupOps only in Scala 2 + // In Scala 3 tries to call `+` on Map + a + b + } + } + +} \ No newline at end of file diff --git a/tests/pos/i12715/full.scala b/tests/pos/i12715/full.scala new file mode 100644 index 000000000000..2f10dd5aaaf5 --- /dev/null +++ b/tests/pos/i12715/full.scala @@ -0,0 +1,28 @@ +package repro + +import compiletime.{constValue, erasedValue} + +sealed trait ValidateExprInt + +class And[A <: ValidateExprInt, B <: ValidateExprInt] extends ValidateExprInt +class GreaterThan[T <: Int] extends ValidateExprInt + +object Repro: + inline def validate[E <: ValidateExprInt](v: Int): String = + val failMsg = validateV[E](v) + if failMsg == "neverPass" then "neverPass" + else "something else" + + transparent inline def validateV[E <: ValidateExprInt](v: Int): String = + inline erasedValue[E] match + case _: GreaterThan[t] => + "GreaterThan" + case _: And[a, b] => + inline validateV[a](v) match + case "" => + validateV[b](v) + case other => + other + + @main def test(): Unit = + println(validate[And[GreaterThan[10], GreaterThan[12]]](5)) diff --git a/tests/pos/i12715/minimized.scala b/tests/pos/i12715/minimized.scala new file mode 100644 index 000000000000..4bd8a2b1a404 --- /dev/null +++ b/tests/pos/i12715/minimized.scala @@ -0,0 +1,8 @@ +transparent inline def f: String = + inline 10 match + case _ => + inline "foo" match + case x : String => x + +def test = + inline val failMsg = f diff --git a/tests/pos/i12722.scala b/tests/pos/i12722.scala new file mode 100644 index 000000000000..3740229dd947 --- /dev/null +++ b/tests/pos/i12722.scala @@ -0,0 +1,8 @@ +trait JsAny extends AnyRef +class JsObject extends JsAny + +trait HTMLAttributes[T] extends JsObject +trait Component[P] extends JsObject +trait IPersonaSharedProps extends HTMLAttributes[PersonaCoinBase] +trait PersonaCoinBase extends Component[IPersonaCoinProps] +trait IPersonaCoinProps extends IPersonaSharedProps \ No newline at end of file diff --git a/tests/pos/i12723.scala b/tests/pos/i12723.scala new file mode 100644 index 000000000000..d1cab3ede638 --- /dev/null +++ b/tests/pos/i12723.scala @@ -0,0 +1,10 @@ +class Fun[|*|[_, _]] { + enum ->[A, B] { + case BiId[X, Y]() extends ((X |*| Y) -> (X |*| Y)) + } + + def go[A, B](f: A -> B): Unit = + f match { + case ->.BiId() => () + } +} diff --git a/tests/pos/i12730.scala b/tests/pos/i12730.scala new file mode 100644 index 000000000000..18a6ac921a6d --- /dev/null +++ b/tests/pos/i12730.scala @@ -0,0 +1,37 @@ +class ComponentSimple + +class Props { + def apply(props: Any): Any = ??? +} + +class Foo[C] { + def build: ComponentSimple = ??? +} + +class Bar[E] { + def render(r: E => Any): Unit = {} +} + +trait Conv[A, B] { + def apply(a: A): B +} + +object Test { + def toComponentCtor[F](c: ComponentSimple): Props = ??? + + def defaultToNoBackend[G, H](ev: G => Foo[H]): Conv[Foo[H], Bar[H]] = ??? + + def conforms[A]: A => A = ??? + + def problem = Main // crashes + + def foo[H]: Foo[H] = ??? + + val NameChanger = + foo + .build + + val Main = + defaultToNoBackend(conforms).apply(foo) + .render(_ => toComponentCtor(NameChanger)(13)) +} diff --git a/tests/pos/i12739-fallout.scala b/tests/pos/i12739-fallout.scala new file mode 100644 index 000000000000..7f685b3c1a0f --- /dev/null +++ b/tests/pos/i12739-fallout.scala @@ -0,0 +1,8 @@ +// This is a minimisation of the fallout that the original fix caused on Shapeless 3. + +type Foo = { type Bar } + +extension (foo: Foo) + def toBar(): foo.Bar = ??? + +def test(foo: Foo): foo.Bar = foo.toBar() diff --git a/tests/pos/i12739.scala b/tests/pos/i12739.scala new file mode 100644 index 000000000000..baf7a7ae2698 --- /dev/null +++ b/tests/pos/i12739.scala @@ -0,0 +1,26 @@ +object X { + + class CA[A] + type C = CA[_] + val c: C = ??? + def f[A](r: CA[A]) = () + def g(): CA[_] = CA() + def h(): C = ??? + + // works + f(c) + + // works + val x = c.asInstanceOf[C] + f(x) + + // was: error + f(c.asInstanceOf[C]) + + // works, error in Scala 2 + f(c.asInstanceOf[c.type]) + + f(c.asInstanceOf[CA[_]]) + f(g()) + f(h()) +} diff --git a/tests/pos/i12754.scala b/tests/pos/i12754.scala new file mode 100644 index 000000000000..0c4c54bf6883 --- /dev/null +++ b/tests/pos/i12754.scala @@ -0,0 +1,13 @@ +transparent inline def transform(inline a: Any): Any = inline a match { + case x: Byte => x + case x: Short => x + case x: Int => x + case x: Long => x + case x: Float => x + case x: Double => x + case _ => a +} + +def test = { + println(transform(0) < 5) +} diff --git a/tests/pos/i12757.scala b/tests/pos/i12757.scala new file mode 100644 index 000000000000..906458f58e63 --- /dev/null +++ b/tests/pos/i12757.scala @@ -0,0 +1,8 @@ +val x = Some(10) + +def f = + if x.exists + (x => x == 10) then + println("Yes") + else + println("No") \ No newline at end of file diff --git a/tests/pos/i12802.scala b/tests/pos/i12802.scala new file mode 100644 index 000000000000..e26ea433f2d6 --- /dev/null +++ b/tests/pos/i12802.scala @@ -0,0 +1,9 @@ +import scala.quoted._ + +object Boo: + def foo(using Quotes): Unit = + import quotes.reflect._ + given Option[Symbol] = Some[Symbol](???) + def bar(using Quotes): Unit = + import quotes.reflect.Symbol + given Option[Symbol] = Some[Symbol](???) diff --git a/tests/pos/i12803.scala b/tests/pos/i12803.scala new file mode 100644 index 000000000000..2b370c70fca6 --- /dev/null +++ b/tests/pos/i12803.scala @@ -0,0 +1,12 @@ +trait X { + type Y +} + +trait E[A] + +trait Test { + val x: X + def wrap(x: X): E[x.Y] = ??? + def run[I](i: E[I]): Unit = ??? + run(wrap(x)) +} diff --git a/tests/pos/i12834.scala b/tests/pos/i12834.scala new file mode 100644 index 000000000000..c007eaf6844f --- /dev/null +++ b/tests/pos/i12834.scala @@ -0,0 +1,2 @@ +class A(val ref: Option[B]) +class B extends A(None) diff --git a/tests/pos/i12897/A.scala b/tests/pos/i12897/A.scala new file mode 100644 index 000000000000..17caea956de3 --- /dev/null +++ b/tests/pos/i12897/A.scala @@ -0,0 +1,5 @@ + +package TestFoo +extension (text : String) def foo(defaultArg : Boolean = true) : String = "" + +//def foo(text : String) (defaultArg : Boolean = true) : String = "" diff --git a/tests/pos/i12897/B.scala b/tests/pos/i12897/B.scala new file mode 100644 index 000000000000..ac1fa8dccf3c --- /dev/null +++ b/tests/pos/i12897/B.scala @@ -0,0 +1,4 @@ + +package TestFoo + +val f = "123".foo() //error \ No newline at end of file diff --git a/tests/pos/i12909.scala b/tests/pos/i12909.scala new file mode 100644 index 000000000000..face34e2e5a3 --- /dev/null +++ b/tests/pos/i12909.scala @@ -0,0 +1,42 @@ +package example + +final case class Writer[W, A](run: (W, A)) { + def map[B](f: A => B): Writer[W, B] = ??? + + def flatMap[B](f: A => Writer[W, B]): Writer[W, B] = ??? +} + +object Main { + implicit class WriterOps[A](a: A) { + def set[W](w: W): Writer[W, A] = ??? + } + + def x1[A]: Writer[Vector[String], Option[A]] = ??? + + val failure = for { + a1 <- { + Option(1) match { + case Some(x) => + x1[Boolean] + case _ => + Option.empty[Boolean].set(Vector.empty[String]) + } + } + a2 <- x1[String] + } yield () + + val success = for { + a1 <- { + val temp = Option(1) match { + case Some(x) => + x1[Boolean] + case _ => + Option.empty[Boolean].set(Vector.empty[String]) + } + // why ??? + temp + } + a2 <- x1[String] + } yield () + +} \ No newline at end of file diff --git a/tests/pos/i12910.scala b/tests/pos/i12910.scala new file mode 100644 index 000000000000..e78408aa5e5f --- /dev/null +++ b/tests/pos/i12910.scala @@ -0,0 +1,27 @@ +trait Type[T]: + type Out + +type varchar + +given Type[varchar] with + type Out = String + +class Placeholder[T, U] + +object Placeholder: + def apply[T](using t: Type[T]): Placeholder[T, t.Out] = new Placeholder + +trait Encoder[P, X]: + def encode(x: X): String + +object Encoder: + def apply[P, X](placeholder: P)(using e: Encoder[P, X]): X => String = e.encode + + given [T, X]: Encoder[Placeholder[T, X], X] with + def encode(x: X): String = ??? + +def Test = + // the following compiles just fine + Encoder(new Placeholder[varchar, String])("hello") + // the following fails + Encoder(Placeholder[varchar])("hello") diff --git a/tests/pos/i12915.scala b/tests/pos/i12915.scala new file mode 100644 index 000000000000..657df2a5be8b --- /dev/null +++ b/tests/pos/i12915.scala @@ -0,0 +1,105 @@ +trait E[T] + +class X { + val e1: E[Int] = ??? + val e2: E[String] = ??? + val e3: E[List[Int]] = ??? + val e4: E[List[String]] = ??? + val e5: E[Double] = ??? + val e6: E[(String, String)] = ??? + val e7: E[(String, Int)] = ??? + val e8: E[(Int, List[String])] = ??? + val e9: E[Long] = ??? + val e10: E[(Long, Long)] = ??? + val e11: E[(Long, Long, Int)] = ??? + val e12: E[List[Long]] = ??? + val e13: E[List[Int]] = ??? + val e14: E[(String, String)] = ??? + val e15: E[(String, String, String)] = ??? + val e16: E[(Int, String)] = ??? + val e17: E[(String, Long, String)] = ??? + val e18: E[(Long, String, String)] = ??? + val e19: E[(String, String, Long)] = ??? + val e20: E[(String, Int, String)] = ??? + val e21: E[(Int, String, String)] = ??? + val e22: E[(String, String, Int)] = ??? + val e23: E[(String, String, Boolean)] = ??? + val e24: E[(Boolean, Boolean, String)] = ??? + val e25: E[(String, Int, Boolean)] = ??? + val e26: E[List[(String, String)]] = ??? + val e27: E[List[(Int, String)]] = ??? + val e28: E[List[(String, Int)]] = ??? + val e29: E[List[(Long, String)]] = ??? + val e30: E[List[(String, Long)]] = ??? + val e31: E[List[(Boolean, String)]] = ??? + val e32: E[List[(String, Boolean)]] = ??? + val e33: E[List[((String, String), String)]] = ??? + val e34: E[List[((String, Int), String)]] = ??? + val e35: E[List[((Long, String), String)]] = ??? + val e36: E[List[((Boolean, String), String)]] = ??? + val e37: E[List[((String, String), Int)]] = ??? + val e38: E[List[((String, String), (String, Int))]] = ??? + val e39: E[List[((Boolean, Long), (String, Int))]] = ??? + val e40: E[List[((Int, Long), (Boolean, Int))]] = ??? + val e41: E[List[((String, (Int, String)), (String, Int))]] = ??? + val e42: E[List[((Boolean, (Int, String)), (String, Int))]] = ??? + val e43: E[List[((String, (Int, String)), (Boolean, Int))]] = ??? + val e44: E[(Int, List[String], Long)] = ??? + val e45: E[(Int, List[Int], Long)] = ??? + val e46: E[(Int, List[Long], Long)] = ??? + val e47: E[(String, List[String], Long)] = ??? + val e48: E[(Int, List[String], Boolean)] = ??? + val e49: E[Char] = ??? + + val all = List( + e1, + e2, + e3, + e4, + e5, + e6, + e7, + e8, + e9, + e10, + e11, + e12, + e13, + e14, + e15, + e16, + e17, + e18, + e19, + e20, + e21, + e22, + e23, + e24, + e25, + e26, + e27, + e28, + e29, + e30, + e31, + e32, + e33, + e34, + e35, + e36, + e37, + e38, + e39, + e40, + e41, + e42, + e43, + e44, + e45, + e46, + e47, + e48, + e49 + ) +} \ No newline at end of file diff --git a/tests/pos/i12945/A_1.scala b/tests/pos/i12945/A_1.scala new file mode 100644 index 000000000000..890171a63051 --- /dev/null +++ b/tests/pos/i12945/A_1.scala @@ -0,0 +1,10 @@ +opaque type Lie[W <: Int] = Int +object Lie: + trait TC[-T]: + type Out + object TC: + given [W <: Int]: TC[Lie[W]] with + type Out = W + +val x = summon[Lie.TC[Lie[7]]] +val works = summon[x.Out =:= 7] diff --git a/tests/pos/i12945/B_2.scala b/tests/pos/i12945/B_2.scala new file mode 100644 index 000000000000..371754a87ff5 --- /dev/null +++ b/tests/pos/i12945/B_2.scala @@ -0,0 +1,3 @@ +object Test: + val x = summon[Lie.TC[Lie[7]]] + val fails = summon[x.Out =:= 7] diff --git a/tests/pos/i12949.scala b/tests/pos/i12949.scala new file mode 100644 index 000000000000..5a886aa894b3 --- /dev/null +++ b/tests/pos/i12949.scala @@ -0,0 +1,19 @@ +object Catch22: + trait TC[V] + object TC: + export Hodor.TC.given + +object Hodor: + object TC: + import Catch22.TC + given fromString[V <: String]: TC[V] = ??? + transparent inline given fromDouble[V <: Double]: TC[V] = + new TC[V]: + type Out = Double + given fromInt[V <: Int]: TC[V] with + type Out = Int + +object Test: + summon[Catch22.TC["hi"]] //works + summon[Catch22.TC[7.7]] //works + summon[Catch22.TC[1]] //error diff --git a/tests/pos/i12950/repro_1.scala b/tests/pos/i12950/repro_1.scala new file mode 100644 index 000000000000..4c4a12de8cd6 --- /dev/null +++ b/tests/pos/i12950/repro_1.scala @@ -0,0 +1,27 @@ +package repro +object repro: + object opq: + opaque type Lift[T] = Int + extension(v: Int) + def lift[T]: Lift[T] = v + extension[T](l: Lift[T]) + def value: Int = l + + export opq.Lift as Lift + export opq.lift as lift + + final type Two + + extension[TL](l: Lift[TL]) + def repro[TR](using m: Mul[TL, TR]): Int = l.value + m.value + + abstract class Mul[TL, TR]: + val value: Int + + transparent inline given mulGivenInt[TL <: Int & Singleton, TR <: Int & Singleton]: Mul[TL, TR] = + val m: Int = scala.compiletime.constValue[TL] * scala.compiletime.constValue[TR] + new Mul[TL, TR] { val value: Int = m } + + transparent inline given mulGivenTwo[TR <: Int & Singleton]: Mul[Two, TR] = + val m: Int = 2 * scala.compiletime.constValue[TR] + new Mul[Two, TR] { val value: Int = m } \ No newline at end of file diff --git a/tests/pos/i12950/test_2.scala b/tests/pos/i12950/test_2.scala new file mode 100644 index 000000000000..2640501f4388 --- /dev/null +++ b/tests/pos/i12950/test_2.scala @@ -0,0 +1,6 @@ +import repro.repro.{*, given} + +val x = 1.lift[Two] +val _ = x.repro[2] +val y = 1.lift[2] +val _ = y.repro[2] diff --git a/tests/pos/i12953.scala b/tests/pos/i12953.scala new file mode 100644 index 000000000000..a50bbda70187 --- /dev/null +++ b/tests/pos/i12953.scala @@ -0,0 +1,7 @@ +class Schema(impl: Class[_]) extends scala.annotation.StaticAnnotation + +class Ann[A] extends scala.annotation.StaticAnnotation + +case class Foo[A](@Schema(classOf[List[A]]) foo: String) +case class Bar[A](@Ann[A] foo: String) +def baz[A](@Ann[A] foo: String) = () diff --git a/tests/pos/i12955.scala b/tests/pos/i12955.scala new file mode 100644 index 000000000000..32ce3eb3b0ad --- /dev/null +++ b/tests/pos/i12955.scala @@ -0,0 +1,2 @@ +def test[A, B](using c: A <:< B) = + val b: B = ??? : A diff --git a/tests/pos/i12958.scala b/tests/pos/i12958.scala new file mode 100644 index 000000000000..4c6cfec1f786 --- /dev/null +++ b/tests/pos/i12958.scala @@ -0,0 +1,17 @@ +import scala.quoted.* + +package mylib: + object Export: + transparent inline def exported: Any = 1 + + object Import: + transparent inline def imported: Any = 1 + + export Export.* + + +import mylib.* +import Import.* +object Test: + val oneFail : 1 = exported //error + val oneWork : 1 = imported //works diff --git a/tests/pos/i12973.scala b/tests/pos/i12973.scala new file mode 100644 index 000000000000..53c6476d798e --- /dev/null +++ b/tests/pos/i12973.scala @@ -0,0 +1,4 @@ +// Compiled and placed on the REPL classpath in the bootstrappedOnlyCmdTests script +// to test that launching the REPL with `scala -cp ` works (issue #12973) +case class Bug12973(): + def check = s"$productPrefix is fixed" diff --git a/tests/pos/i12997.scala b/tests/pos/i12997.scala new file mode 100644 index 000000000000..288dcc6f4164 --- /dev/null +++ b/tests/pos/i12997.scala @@ -0,0 +1,15 @@ +import scala.compiletime._ + +// works +val a = { + given Int = 0 + summon[Int] +} + +// doesn't +inline def summonInt = { + given Int = 0 + summonInline[Int] +} + +val b = summonInt diff --git a/tests/pos/i13001/Main_1.scala b/tests/pos/i13001/Main_1.scala new file mode 100644 index 000000000000..ad46df9f2fe1 --- /dev/null +++ b/tests/pos/i13001/Main_1.scala @@ -0,0 +1,31 @@ +case class Foo(a: String) + +trait Arbitrary[T] +trait Gen[+T] + +object ArbitraryDerivation: + given deriveArb[A](using gen: DerivedGen[A]): Arbitrary[A] = ??? + +opaque type DerivedGen[A] = Gen[A] +object DerivedGen extends DerivedGenInstances + +sealed abstract class DerivedGenInstances: + inline given derived[A](using gen: K0.Generic[A]): DerivedGen[A] = + val dummy: DerivedGen[A] = ??? + gen.derive(dummy, dummy) + +// from shapeless3-deriving +import scala.deriving.* +object K0 { + type Kind[C, O] = C { type Kind = K0.type ; type MirroredType = O ; type MirroredElemTypes <: Tuple } + type Generic[O] = Kind[Mirror, O] + type ProductGeneric[O] = Kind[Mirror.Product, O] + type CoproductGeneric[O] = Kind[Mirror.Sum, O] + + extension [F[_], T](gen: Generic[T]) + inline def derive(f: => (ProductGeneric[T] & gen.type) ?=> F[T], g: => (CoproductGeneric[T] & gen.type) ?=> F[T]): F[T] = + inline gen match { + case p: ProductGeneric[T] => f(using p.asInstanceOf) + case c: CoproductGeneric[T] => g(using c.asInstanceOf) + } +} \ No newline at end of file diff --git a/tests/pos/i13001/Test_2.scala b/tests/pos/i13001/Test_2.scala new file mode 100644 index 000000000000..42cacb760027 --- /dev/null +++ b/tests/pos/i13001/Test_2.scala @@ -0,0 +1,4 @@ +class Test: + import ArbitraryDerivation.given + private def test[A: Arbitrary]: Unit = {} + test[Foo] \ No newline at end of file diff --git a/tests/pos/i13019.scala b/tests/pos/i13019.scala new file mode 100644 index 000000000000..491b6df03d0b --- /dev/null +++ b/tests/pos/i13019.scala @@ -0,0 +1,13 @@ + +trait Ok1 { var i: Int } +class Ok1C extends Ok1 { var i: Int = 1 } + +trait Ok2 { + def i: Int + def i_=(v: Int): Unit +} +class Ok2C extends Ok2 { override var i: Int = 1 } + +// was: variable i of type Int cannot override a mutable variable +trait NotOk {var i: Int} +class NotOkC extends NotOk { override var i: Int = 1 } diff --git a/tests/pos/i13043/Impl.scala b/tests/pos/i13043/Impl.scala new file mode 100644 index 000000000000..7db75ee99d89 --- /dev/null +++ b/tests/pos/i13043/Impl.scala @@ -0,0 +1,5 @@ +class Impl extends Intf { + override def thing(x: Int) = ??? + override def thing(y: String*) = ??? + override def thing2(y: String*) = ??? +} \ No newline at end of file diff --git a/tests/pos/i13043/Intf.java b/tests/pos/i13043/Intf.java new file mode 100644 index 000000000000..8b845bdca146 --- /dev/null +++ b/tests/pos/i13043/Intf.java @@ -0,0 +1,5 @@ +interface Intf { + public void thing(int x); + public void thing(String... y); + public void thing2(String... y); +} \ No newline at end of file diff --git a/tests/pos/i13046/A.java b/tests/pos/i13046/A.java new file mode 100644 index 000000000000..bed1b0be2576 --- /dev/null +++ b/tests/pos/i13046/A.java @@ -0,0 +1,3 @@ +interface A { + public boolean override(); +} diff --git a/tests/pos/i13046/B.scala b/tests/pos/i13046/B.scala new file mode 100644 index 000000000000..902105e97723 --- /dev/null +++ b/tests/pos/i13046/B.scala @@ -0,0 +1,3 @@ +class B extends A { + override def `override`: Boolean = true +} diff --git a/tests/pos/i13091.scala b/tests/pos/i13091.scala new file mode 100644 index 000000000000..fa255cd6c08f --- /dev/null +++ b/tests/pos/i13091.scala @@ -0,0 +1,3 @@ +import annotation.experimental +@experimental class Foo +val foo = new Foo diff --git a/tests/pos/i13106.java b/tests/pos/i13106.java new file mode 100644 index 000000000000..6a10fd3cfa9b --- /dev/null +++ b/tests/pos/i13106.java @@ -0,0 +1,7 @@ +final class A { + public static final class C { } + public static final class D { + public static final class C { } + public C foo() { return new C(); } + } +} diff --git a/tests/pos/i13120/Foo.scala b/tests/pos/i13120/Foo.scala new file mode 100644 index 000000000000..741babc6aa13 --- /dev/null +++ b/tests/pos/i13120/Foo.scala @@ -0,0 +1,8 @@ +import container.ir + +opaque type Foo = ir.Foo + +object Foo: + def bar(foo: Foo): Unit = {} + +export Foo.* \ No newline at end of file diff --git a/tests/pos/i13120/container.scala b/tests/pos/i13120/container.scala new file mode 100644 index 000000000000..305781cce555 --- /dev/null +++ b/tests/pos/i13120/container.scala @@ -0,0 +1,3 @@ +object container: + object ir: + sealed trait Foo diff --git a/tests/pos/i13128/A_1.scala b/tests/pos/i13128/A_1.scala new file mode 100644 index 000000000000..4a1c9335d2ec --- /dev/null +++ b/tests/pos/i13128/A_1.scala @@ -0,0 +1 @@ +opaque type Foo[T <: Int] = Int diff --git a/tests/pos/i13128/B_2.scala b/tests/pos/i13128/B_2.scala new file mode 100644 index 000000000000..797c917d3620 --- /dev/null +++ b/tests/pos/i13128/B_2.scala @@ -0,0 +1,3 @@ +def grabT[T <: Int](arg : Foo[T]) : T = ??? +final val t1 = grabT(??? : Foo[8]) +val t2 : 8 = t1 \ No newline at end of file diff --git a/tests/pos/i13134.scala b/tests/pos/i13134.scala new file mode 100644 index 000000000000..d3b70d389dcf --- /dev/null +++ b/tests/pos/i13134.scala @@ -0,0 +1,10 @@ +def test = + { + val end = 0 + assert(~end == -1) //Not found: ~ + } + + { + val end = false + assert(!end) // postfix operator `end` needs to be enabled + } // by making the implicit value scala.language.postfixOps visible. \ No newline at end of file diff --git a/tests/pos/i13190/A_1.scala b/tests/pos/i13190/A_1.scala new file mode 100644 index 000000000000..9bb9b20f2976 --- /dev/null +++ b/tests/pos/i13190/A_1.scala @@ -0,0 +1,3 @@ +object Opaque { + opaque type FieldType[K, +V] <: V = V +} \ No newline at end of file diff --git a/tests/pos/i13190/B_2.scala b/tests/pos/i13190/B_2.scala new file mode 100644 index 000000000000..2752778afa04 --- /dev/null +++ b/tests/pos/i13190/B_2.scala @@ -0,0 +1,15 @@ +import Opaque.* + +object Test { + type FindField[R <: scala.Tuple, K] = R match { + case FieldType[K, f] *: t => f + case _ *: t => FindField[t, K] + } + + val f: FieldType["A", Int] = ??? + val f1: Int = f + //val f2: Int = f + + type R = FieldType["A", Int] *: FieldType["B", Double] *: FieldType["C", String] *: FieldType["D", Boolean] *: EmptyTuple + summon[FindField[R, "B"] =:= Double] +} diff --git a/tests/pos/i13197.scala b/tests/pos/i13197.scala new file mode 100644 index 000000000000..cf8b2bb3ef0d --- /dev/null +++ b/tests/pos/i13197.scala @@ -0,0 +1,12 @@ +// this test is similar to explicit-nulls/pos/i13197.scala, but without explicit nulls + +extension [T](x: T | String) inline def forceString: x.type & String = + x.asInstanceOf + +trait Bar: + def b: String | Int + +class Foo(a: String = "", b: String) + +object Foo: + def foo(bar: Bar) = Foo(b = bar.b.forceString) \ No newline at end of file diff --git a/tests/pos/i13282.scala b/tests/pos/i13282.scala new file mode 100644 index 000000000000..2241fdc17941 --- /dev/null +++ b/tests/pos/i13282.scala @@ -0,0 +1,9 @@ +class Ptr[T](var value: T): + def `unary_!` : T = value + def `unary_!_=`(value: T): Unit = this.value = value +end Ptr + +def test = + val x = Ptr(9) + !x = 10 + println(!x) diff --git a/tests/pos/i13310.java b/tests/pos/i13310.java new file mode 100644 index 000000000000..7553cbc3130c --- /dev/null +++ b/tests/pos/i13310.java @@ -0,0 +1,2 @@ +// The entire contents of this file is intentionally commented out +// public class i13310 {} diff --git a/tests/pos/i13331.scala b/tests/pos/i13331.scala new file mode 100644 index 000000000000..d862898eaa04 --- /dev/null +++ b/tests/pos/i13331.scala @@ -0,0 +1,3 @@ +def Choice = (_: Any) match { case Int => Long; case Long => Int } // ok + +type Choice[A] = A match { case Int => Long ; case Long => Int } // error \ No newline at end of file diff --git a/tests/pos/i13349.scala b/tests/pos/i13349.scala new file mode 100644 index 000000000000..e4048424db24 --- /dev/null +++ b/tests/pos/i13349.scala @@ -0,0 +1,31 @@ +sealed trait Stream[+A]{ + import Stream.*; + + def foldRight[B](z: => B)(f: (A, => B) => B): B = + this match { + case Cons(h,t) => f(h(), t().foldRight(z)(f)) + case _ => z + } + + def append[B >: A](other : => Stream[B]) : Stream[B] = + foldRight(other : Stream[B])((elem, stream) => cons(elem, stream)) + +} + +case object Empty extends Stream[Nothing] +case class Cons[+A](h: () => A, t: () => Stream[A]) extends Stream[A] + +object Stream { + + def cons[A](hd: => A, tl: => Stream[A]): Stream[A] = { + lazy val head = hd + lazy val tail = tl + Cons(() => head, () => tail) + } + + def empty[A]: Stream[A] = Empty + + def apply[A](as: A*): Stream[A] = + if (as.isEmpty) empty else cons(as.head, apply(as.tail: _*)) + +} diff --git a/tests/pos/i13349min.scala b/tests/pos/i13349min.scala new file mode 100644 index 000000000000..da434744b638 --- /dev/null +++ b/tests/pos/i13349min.scala @@ -0,0 +1,3 @@ +class Foo: + def foo(x: => Foo) = bar(x: Foo) + def bar(x: => Foo) = x diff --git a/tests/pos/i13392.scala b/tests/pos/i13392.scala new file mode 100644 index 000000000000..614f711eebb5 --- /dev/null +++ b/tests/pos/i13392.scala @@ -0,0 +1,11 @@ +package scala +import language.experimental.erasedDefinitions +import annotation.{implicitNotFound, experimental} + +@experimental +@implicitNotFound("The capability to throw exception ${E} is missing.\nThe capability can be provided by one of the following:\n - A using clause `(using CanThrow[${E}])`\n - A `throws` clause in a result type such as `X throws ${E}`\n - an enclosing `try` that catches ${E}") +erased class CanThrow[-E <: Exception] + +@experimental +object unsafeExceptions: + given canThrowAny: CanThrow[Exception] = new CanThrow diff --git a/tests/pos/i13411.scala b/tests/pos/i13411.scala new file mode 100644 index 000000000000..d310cdbdad21 --- /dev/null +++ b/tests/pos/i13411.scala @@ -0,0 +1,12 @@ +class Foo +class Bar extends Foo + +inline def thingy(a: Int = 0, b: Int = 0, inline c: Foo = new Bar) = { + inline c match { + case _: Bar => + } +} + +def x = 1 + +def test = thingy(b = x) diff --git a/tests/pos/i13411b/Constants.java b/tests/pos/i13411b/Constants.java new file mode 100644 index 000000000000..0cb5fb31ac5e --- /dev/null +++ b/tests/pos/i13411b/Constants.java @@ -0,0 +1,5 @@ +public class Constants { + public static final int A = 0; + public static final int B = 2; + public static final int C = 3; +} diff --git a/tests/pos/i13411b/Test.scala b/tests/pos/i13411b/Test.scala new file mode 100644 index 000000000000..4ed586485d86 --- /dev/null +++ b/tests/pos/i13411b/Test.scala @@ -0,0 +1,32 @@ +class broken { + sealed trait Foo + case object A extends Foo + case object B extends Foo + case object C extends Foo + case object D extends Foo + + inline def foo(inline f: Foo) = inline f match { + case _: A.type => "the letter a" + case _: B.type => "the letter b" + case _: C.type => "the letter c" + case _: D.type => "the letter d" + } + + inline def thingy( + depthClampEnable: Boolean = false, + rasterizerDiscardEnable: Boolean = false, + polygonMode: Int = 0, + cullMode: Int = 0, + frontFace: Int = 0, + depthBiasEnable: Boolean = false, + depthBiasConstantFactor: Float = 0, + depthBiasClamp: Float = 0, + depthBiasSlopeFactor: Float = 0, + lineWidth: Float = 0, + inline f: Foo = A, + ) = { + foo(f) + } + + thingy(polygonMode = Constants.A, cullMode = Constants.B, frontFace = Constants.C, lineWidth = 1.0f) +} diff --git a/tests/pos/i13426.scala b/tests/pos/i13426.scala new file mode 100644 index 000000000000..38283fa49483 --- /dev/null +++ b/tests/pos/i13426.scala @@ -0,0 +1,2 @@ +val f = [R] => () => [A] => (a: A => R, b: A) => a(b) +val x = f[Int]()(_, 3) diff --git a/tests/pos/i13460.scala b/tests/pos/i13460.scala new file mode 100644 index 000000000000..fd57cd9b8521 --- /dev/null +++ b/tests/pos/i13460.scala @@ -0,0 +1,55 @@ +import scala.compiletime.* +import scala.deriving.Mirror + +class Lazy[A](obj: => A) { + lazy val value: A = obj +} +object Lazy { + given [A](using obj: => A ): Lazy[A] = new Lazy(obj) +} + +trait MyTypeClass[A] { + def makeString(a: A): String +} +object MyTypeClass { + + given IntTypeClass: MyTypeClass[Int] with + def makeString(a: Int): String = a.toString + + inline given derived[A](using m: Mirror.Of[A]): MyTypeClass[A] = + inline m match + case p: Mirror.ProductOf[A] => productConverter(p) + + + private inline def summonElementTypeClasses[A](m: Mirror.Of[A]): IArray[Object] = + // this doesn't work + summonAll[Tuple.Map[m.MirroredElemTypes, [A] =>> Lazy[MyTypeClass[A]]]].toIArray + // but this does + // summonAll[Tuple.Map[Tuple.Map[m.MirroredElemTypes, MyTypeClass], Lazy]].toIArray + + private inline def productConverter[A](m: Mirror.ProductOf[A]): MyTypeClass[A] = { + val elementTypeClasses = summonElementTypeClasses(m) + new MyTypeClass[A] { + def makeString(a: A): String = { + val product = a.asInstanceOf[Product] + elementTypeClasses + .view + .zipWithIndex + .map((obj, i) => { + val tc = obj.asInstanceOf[Lazy[MyTypeClass[Any]]].value + tc.makeString(product.productElement(i)) + }) + .mkString("[", ", ", "]") + } + } + } +} + +case class Example(a: Int, b: Int) derives MyTypeClass + +object Main { + def main(args: Array[String]): Unit = { + println("hello world") + println(summon[MyTypeClass[Example]].makeString(Example(1,2))) + } +} \ No newline at end of file diff --git a/tests/pos/i13476.scala b/tests/pos/i13476.scala new file mode 100644 index 000000000000..e1810d9cfe33 --- /dev/null +++ b/tests/pos/i13476.scala @@ -0,0 +1,7 @@ +private object Foo: + inline def d(arg : Int): Unit = {} + transparent inline def haha() : Unit = {} + +export Foo.* + +@main def main : Unit = haha() diff --git a/tests/pos/i13487.scala b/tests/pos/i13487.scala new file mode 100644 index 000000000000..ce08513f3c25 --- /dev/null +++ b/tests/pos/i13487.scala @@ -0,0 +1,6 @@ +trait TC[F[_[_], _]] +object TC { + def derived[F[_[_], _]]: TC[F] = ??? +} + +case class Foo[A](a: A) derives TC diff --git a/tests/pos/i13490.scala b/tests/pos/i13490.scala new file mode 100644 index 000000000000..c8f3213e0649 --- /dev/null +++ b/tests/pos/i13490.scala @@ -0,0 +1,17 @@ +object MyApi { + enum MyEnum(a: Int) { + case A extends MyEnum(1) + } + case class Foo(a: MyEnum) +} + +object Test { + export MyApi.* + import MyEnum.* + Foo(MyEnum.A) match { + case Foo(a) => + a match { + case A => + } + } +} diff --git a/tests/pos/i13503.scala b/tests/pos/i13503.scala new file mode 100644 index 000000000000..c60b0e05862c --- /dev/null +++ b/tests/pos/i13503.scala @@ -0,0 +1,8 @@ +trait First {type Out} +given First with {type Out = 123} + +trait Second {type Out} +transparent inline given (using f: First): Second = new Second {type Out = f.Out} + +val s = summon[Second] +val x = summon[s.Out =:= 123] \ No newline at end of file diff --git a/tests/pos/i13512.scala b/tests/pos/i13512.scala new file mode 100644 index 000000000000..b400809d47b9 --- /dev/null +++ b/tests/pos/i13512.scala @@ -0,0 +1,7 @@ +import scala.language.strictEquality + +class NotEquatable + +def f = List(new NotEquatable) match + case Nil => ??? + case _ => diff --git a/tests/pos/i13526.scala b/tests/pos/i13526.scala new file mode 100644 index 000000000000..3b3c29ca3830 --- /dev/null +++ b/tests/pos/i13526.scala @@ -0,0 +1,15 @@ +type ~>[Args <: Tuple, Return] = Args match { + case (arg1, arg2) => ((arg1, arg2) => Return) +} + +trait Builder[Args <: NonEmptyTuple] { + def apply(f: Args ~> String): String +} + +class BuilderImpl[Args <: NonEmptyTuple] extends Builder[Args] { + override def apply(f: Args ~> String): String = ??? +} + +val builder = BuilderImpl[Int *: String *: EmptyTuple]() +// builder { (i: Int, s: String) => "test" } // This line compiles +val _ = builder { (i, s) => "test" } // Does not compile \ No newline at end of file diff --git a/tests/pos/i13532/Bar.scala b/tests/pos/i13532/Bar.scala new file mode 100644 index 000000000000..aa7e7191f28b --- /dev/null +++ b/tests/pos/i13532/Bar.scala @@ -0,0 +1,7 @@ +package testcode + +import testcode.Foo + +class Bar(f: Foo) { + TestMacro.call() +} \ No newline at end of file diff --git a/tests/pos/i13532/Foo.scala b/tests/pos/i13532/Foo.scala new file mode 100644 index 000000000000..94d731858a1b --- /dev/null +++ b/tests/pos/i13532/Foo.scala @@ -0,0 +1,5 @@ +package testcode + +class Foo { + TestMacro.call() +} \ No newline at end of file diff --git a/tests/pos/i13532/TestMacro.scala b/tests/pos/i13532/TestMacro.scala new file mode 100644 index 000000000000..ef6f6edfb63f --- /dev/null +++ b/tests/pos/i13532/TestMacro.scala @@ -0,0 +1,8 @@ +package testcode + +import scala.quoted.Quotes + +object TestMacro { + private def impl()(using Quotes) = '{ 123 } + inline def call(): Int = ${ impl() } +} diff --git a/tests/pos/i13541.scala b/tests/pos/i13541.scala new file mode 100644 index 000000000000..36ec09409b17 --- /dev/null +++ b/tests/pos/i13541.scala @@ -0,0 +1,14 @@ +trait F[A] +trait Z +object Z: + given F[Z] = ??? + +type Foo[B] = [A] =>> Bar[A, B] +trait Bar[A, B] + +given fooUnit[A: F]: Foo[Unit][A] = ??? +//given bar[A: F]: Bar[A, Unit] = ??? + +def f[A: F](using Foo[Unit][A]): Nothing = ??? + +def broken: Nothing = f[Z] \ No newline at end of file diff --git a/tests/pos/i13554.scala b/tests/pos/i13554.scala new file mode 100644 index 000000000000..e30d3eb9bd9f --- /dev/null +++ b/tests/pos/i13554.scala @@ -0,0 +1,6 @@ +object StatusCode: + class Matcher + +enum StatusCode(m: StatusCode.Matcher): + case InternalServerError extends StatusCode(???) + diff --git a/tests/pos/i13554a.scala b/tests/pos/i13554a.scala new file mode 100644 index 000000000000..0c6f0c6c972f --- /dev/null +++ b/tests/pos/i13554a.scala @@ -0,0 +1,15 @@ +object StatusCode: + enum Matcher: + case ServerError extends Matcher + end Matcher +end StatusCode + +enum StatusCode(code: Int, m: StatusCode.Matcher): + case InternalServerError extends StatusCode(500, StatusCode.Matcher.ServerError) +end StatusCode + +object Main { + def main(args: Array[String]): Unit = { + println(StatusCode.InternalServerError) + } +} diff --git a/tests/pos/i13565.scala b/tests/pos/i13565.scala new file mode 100644 index 000000000000..00b7e393a016 --- /dev/null +++ b/tests/pos/i13565.scala @@ -0,0 +1,67 @@ +package ips.clang + +sealed trait PrimaryExpression extends PostfixExpression +sealed trait PostfixExpression extends UnaryExpression +sealed trait UnaryExpression extends CastExpression +sealed trait CastExpression extends MultiplicativeExpression +sealed trait MultiplicativeExpression extends AdditiveExpression +sealed trait AdditiveExpression extends ShiftExpression +sealed trait ShiftExpression extends RelationalExpression +sealed trait RelationalExpression extends EqualityExpression +sealed trait EqualityExpression extends PrecAndExpression +sealed trait PrecAndExpression extends PrecExclusiveOrExpression +sealed trait PrecExclusiveOrExpression extends PrecInclusiveOrExpression +sealed trait PrecInclusiveOrExpression extends PrecLogicalAndExpression +sealed trait PrecLogicalAndExpression extends PrecLogicalOrExpression +sealed trait PrecLogicalOrExpression extends PrecConditionalExpression +sealed trait PrecConditionalExpression extends PrecAssigmentExpression +sealed trait PrecAssigmentExpression extends Expression +sealed trait Expression +sealed trait BinaryExpression { def op: String; def frs: Expression; def snd: Expression } + +type TypeName = String +case class Identifier(value: String) extends PrimaryExpression +case class IntConstant(value: Int) extends PrimaryExpression +case class CharConstant(value: Char) extends PrimaryExpression +case class StringLiteral(value: String) extends PrimaryExpression +case class WrappedExpression(value: Expression) extends PrimaryExpression +case class ArrayIndexExpression(base: PostfixExpression, index: Expression) extends PostfixExpression +case class FunctionCallExpression(fun: PostfixExpression, arguments: List[PrecAssigmentExpression]) extends PostfixExpression +case class DotSelectExpression(qualifier: PostfixExpression, select: Identifier) extends PostfixExpression +case class ArrowSelectExpression(qualifier: PostfixExpression, select: Identifier) extends PostfixExpression +case class PostfixIncrementExpression(base: PostfixExpression) extends PostfixExpression +case class PostfixDecrementExpression(base: PostfixExpression) extends PostfixExpression +case class CompoundLiteral(typeName: TypeName, initializers: List[Int]) extends PostfixExpression +case class PrefixIncrementExpression(base: UnaryExpression) extends UnaryExpression +case class PrefixDecrementExpression(base: UnaryExpression) extends UnaryExpression +case class UnaryOperatorExpression(op: String, argument: CastExpression) extends UnaryExpression +case class SizeofConstExpression(expression: UnaryExpression) extends UnaryExpression +case class SizeofTypeExpression(typeName: TypeName) extends UnaryExpression +case class Cast(typeName: TypeName, argument: CastExpression) extends CastExpression +case class MultiplicativeBinaryExpression(op: String, frs: MultiplicativeExpression, snd: CastExpression) extends MultiplicativeExpression with BinaryExpression +case class AdditiveBinaryExpression(op: String, frs: MultiplicativeExpression, snd: CastExpression) extends MultiplicativeExpression with BinaryExpression +case class ShiftBinaryExpression(op: String, frs: MultiplicativeExpression, snd: CastExpression) extends MultiplicativeExpression with BinaryExpression +case class RelationalBinaryExpression(op: String, frs: RelationalExpression, snd: ShiftExpression) extends RelationalExpression with BinaryExpression +case class EqualityBinaryExpression(op: String, frs: RelationalExpression, snd: ShiftExpression) extends EqualityExpression with BinaryExpression +case class AndBinaryExpression(op: String, frs: PrecAndExpression, snd: EqualityExpression) extends PrecAndExpression with BinaryExpression +case class ExclusiveOrBinaryExpression(op: String, frs: PrecExclusiveOrExpression, snd: PrecAndExpression) extends PrecExclusiveOrExpression with BinaryExpression +case class InclusiveOrBinaryExpression(op: String, frs: PrecExclusiveOrExpression, snd: PrecAndExpression) extends PrecInclusiveOrExpression with BinaryExpression +case class LogicalAndBinaryExpression(op: String, frs: PrecLogicalAndExpression, snd: PrecInclusiveOrExpression) extends PrecLogicalAndExpression with BinaryExpression +case class LogicalOrBinaryExpression(op: String, frs: PrecLogicalAndExpression, snd: PrecInclusiveOrExpression) extends PrecLogicalOrExpression with BinaryExpression +case class ConditionalExpression(cond: PrecLogicalOrExpression, frs: Expression, snd: PrecConditionalExpression) extends PrecConditionalExpression +case class AssigmentExpression(op: String, frs: UnaryExpression, snd: PrecAssigmentExpression) extends PrecAssigmentExpression +case class CommaExpression(frs: Expression, snd: Expression) extends Expression +case class AltCommaExpression(frs: Expression, snd: Expression) extends Expression + +// each AdditionalUnaryExpressionX increase compilation time +sealed trait AdditionalUnaryExpression1 extends UnaryExpression +sealed trait AdditionalUnaryExpression2 extends UnaryExpression +sealed trait AdditionalUnaryExpression3 extends UnaryExpression +sealed trait AdditionalUnaryExpression4 extends UnaryExpression +sealed trait AdditionalUnaryExpression5 extends UnaryExpression + +class LongCompilation: + // this match used to take 2m30 to compile, with 1 cache it now takes 5s (30x speedup, aka 3000%) + def toCastExpression(expr: Expression): CastExpression = expr match + case x: CastExpression => x + case _ => WrappedExpression(expr) diff --git a/tests/pos/i13586.scala b/tests/pos/i13586.scala new file mode 100644 index 000000000000..353660334091 --- /dev/null +++ b/tests/pos/i13586.scala @@ -0,0 +1,5 @@ +class Foo: + inline def test(): Unit = this + +class Bar extends Foo: + def test(s: String) = super.test() diff --git a/tests/pos/i13586/Macro_1.scala b/tests/pos/i13586/Macro_1.scala new file mode 100644 index 000000000000..752ccd906df5 --- /dev/null +++ b/tests/pos/i13586/Macro_1.scala @@ -0,0 +1,6 @@ +import scala.quoted._ + +object Position { + def withPosition[T](fun: Expr[Unit => T])(using quotes: Quotes, typeOfT: Type[T]): Expr[T] = + '{${fun}.apply(null)} +} diff --git a/tests/pos/i13586/Test_2.scala b/tests/pos/i13586/Test_2.scala new file mode 100644 index 000000000000..2c7d23a06630 --- /dev/null +++ b/tests/pos/i13586/Test_2.scala @@ -0,0 +1,10 @@ +class Foo { + inline def test(): Unit = { + ${ Position.withPosition[Unit]('{ _ => this }) } + } +} + +class Bar extends Foo { + def test(s: String) = + super.test() +} diff --git a/tests/pos/i13660.scala b/tests/pos/i13660.scala new file mode 100644 index 000000000000..328a82d0e560 --- /dev/null +++ b/tests/pos/i13660.scala @@ -0,0 +1,15 @@ +type Const[A] = [_] =>> A +type FunctionK[A[_], B[_]] = [Z] => A[Z] => B[Z] + +type #~>#:[T, R] = FunctionK[Const[T], Const[R]] + +object FunctionK: + def liftConst[A, B](f: A => B): /*FunctionK[Const[A], Const[B]]*/ A #~>#: B = + [Z1] => (a: A) => f(a) + +trait FoldableK[F[_[_], _]]: + + def foldMapK1[A[_], C, B](fa: F[A, C])(f: FunctionK[A, Const[B]]): B + + def toListK[A, C](fa: F[Const[A], C]): List[A] = + foldMapK1(fa)(FunctionK.liftConst(List(_: A))) diff --git a/tests/pos/i13669.scala b/tests/pos/i13669.scala new file mode 100644 index 000000000000..330eb8d8478d --- /dev/null +++ b/tests/pos/i13669.scala @@ -0,0 +1,6 @@ +trait MyExtensions: + extension (lhs: Int) def bash: Unit = {} +object MyExtensions extends MyExtensions + +export MyExtensions.* +val fails = 1.bash diff --git a/tests/pos/i13739.scala b/tests/pos/i13739.scala new file mode 100644 index 000000000000..418e665f44e8 --- /dev/null +++ b/tests/pos/i13739.scala @@ -0,0 +1,46 @@ +import scala.language.strictEquality + +class Foo(i: Int) extends AnyVal + +val _ = summon[CanEqual[Nothing, Nothing]] + +val _ = summon[CanEqual[Int, Nothing]] +val _ = summon[CanEqual[Nothing, Int]] +val _ = summon[CanEqual[3, Nothing]] +val _ = summon[CanEqual[Nothing, 3]] + +val _ = summon[CanEqual[Byte, Nothing]] +val _ = summon[CanEqual[Nothing, Byte]] +val _ = summon[CanEqual[Short, Nothing]] +val _ = summon[CanEqual[Nothing, Short]] +val _ = summon[CanEqual[Float, Nothing]] +val _ = summon[CanEqual[Nothing, Float]] + +val _ = summon[CanEqual[Double, Nothing]] +val _ = summon[CanEqual[Nothing, Double]] +val _ = summon[CanEqual[3.0, Nothing]] +val _ = summon[CanEqual[Nothing, 3.0]] + +val _ = summon[CanEqual[String, Nothing]] +val _ = summon[CanEqual[Nothing, String]] +val _ = summon[CanEqual["foo", Nothing]] +val _ = summon[CanEqual[Nothing, "foo"]] + +val _ = summon[CanEqual[Char, Nothing]] +val _ = summon[CanEqual[Nothing, Char]] +val _ = summon[CanEqual['f', Nothing]] +val _ = summon[CanEqual[Nothing, 'f']] + +val _ = summon[CanEqual[Boolean, Nothing]] +val _ = summon[CanEqual[Nothing, Boolean]] +val _ = summon[CanEqual[true, Nothing]] +val _ = summon[CanEqual[Nothing, true]] + +val _ = summon[CanEqual[Foo, Nothing]] +val _ = summon[CanEqual[Nothing, Foo]] + +val _ = summon[CanEqual[Option[Int], None.type]] +val _ = summon[CanEqual[Option[Int], Option[Nothing]]] + +val _ = summon[CanEqual[Any & Nothing, Foo]] +val _ = summon[CanEqual[Nothing & Any, Foo]] diff --git a/tests/pos/i13816.scala b/tests/pos/i13816.scala new file mode 100644 index 000000000000..a87be4803936 --- /dev/null +++ b/tests/pos/i13816.scala @@ -0,0 +1,57 @@ +import language.experimental.saferExceptions + +class Ex1 extends Exception("Ex1") +class Ex2 extends Exception("Ex2") + +def foo1(i: Int): Unit throws Ex1 throws Ex2 = + if i > 0 then throw new Ex1 else throw new Ex2 + +def foo2(i: Int): Unit throws Ex1 | Ex2 = + if i > 0 then throw new Ex1 else throw new Ex2 + +def foo3(i: Int): Unit throws (Ex1 | Ex2) = + if i > 0 then throw new Ex1 else throw new Ex2 + +def foo4(i: Int)(using CanThrow[Ex1], CanThrow[Ex2]): Unit = + if i > 0 then throw new Ex1 else throw new Ex2 + +def foo5(i: Int)(using CanThrow[Ex1])(using CanThrow[Ex2]): Unit = + if i > 0 then throw new Ex1 else throw new Ex2 + +def foo6(i: Int)(using CanThrow[Ex1 | Ex2]): Unit = + if i > 0 then throw new Ex1 else throw new Ex2 + +def foo7(i: Int)(using CanThrow[Ex1]): Unit throws Ex2 = + if i > 0 then throw new Ex1 else throw new Ex2 + +def foo8(i: Int)(using CanThrow[Ex2]): Unit throws Ex1 = + if i > 0 then throw new Ex1 else throw new Ex2 + +def test(): Unit = + try + foo1(1) + foo2(1) + foo3(1) + foo4(1) + foo5(1) + foo6(1) + foo7(1) + foo8(1) + catch + case _: Ex1 => + case _: Ex2 => + + try + try + foo1(1) + foo2(1) + foo3(1) + foo4(1) + foo5(1) + // foo6(1) // As explained in the docs this won't work until we find a way to aggregate capabilities + foo7(1) + foo8(1) + catch + case _: Ex1 => + catch + case _: Ex2 => diff --git a/tests/pos/i13820.scala b/tests/pos/i13820.scala new file mode 100644 index 000000000000..1accdee53fb1 --- /dev/null +++ b/tests/pos/i13820.scala @@ -0,0 +1,5 @@ +trait Expr { type T } + +def foo[A](e: Expr { type T = A }) = e match + case e1: Expr { type T <: Int } => + val i: Int = ??? : e1.T \ No newline at end of file diff --git a/tests/pos/i13842.scala b/tests/pos/i13842.scala new file mode 100644 index 000000000000..7d29633619ec --- /dev/null +++ b/tests/pos/i13842.scala @@ -0,0 +1,14 @@ +class Parent { class E } + +object ChildA extends Parent + +object ChildB extends Parent + +class Printer[C <: Parent](val child: C): + def print22(e: child.E): String = "" + +def test = + Printer(ChildA).print22(new ChildA.E) // does not work + + //Printer[ChildA.type](ChildA).print22(new ChildA.E) // works + //val p = Printer(ChildA); p.print22(new ChildA.E) // works diff --git a/tests/pos/i13859.scala b/tests/pos/i13859.scala new file mode 100644 index 000000000000..4092de52fd94 --- /dev/null +++ b/tests/pos/i13859.scala @@ -0,0 +1,31 @@ +import scala.deriving.* + +object Test: + type Kind1[C, O[_]] = C { + type MirroredType[X] = O[X] + type MirroredMonoType = O[Any] + type MirroredElemTypes[_] <: Tuple + } + + type Kind2[C, O[_, _]] = C { + type MirroredType[X, Y] = O[X, Y] + type MirroredMonoType = O[Any, Any] + type MirroredElemTypes[_, _] <: Tuple + } + + type Test[X] = (X, Boolean) + type Swap[X, Y] = (Y, X) + + locally { + val x = summon[Kind1[Mirror.Product, Test]] + x: Mirror.Product { + type MirroredElemTypes[X] = (X, Boolean) + } + } + + locally { + val x = summon[Kind2[Mirror.Product, Swap]] + x: Mirror.Product { + type MirroredElemTypes[X, Y] = (Y, X) + } + } diff --git a/tests/pos/i13871.scala b/tests/pos/i13871.scala new file mode 100644 index 000000000000..3b1ed0f1f06c --- /dev/null +++ b/tests/pos/i13871.scala @@ -0,0 +1,10 @@ +import scala.compiletime.{error, codeOf} +import scala.compiletime.testing.* + +inline def testError(inline typeName: Any): String = error("Got error " + codeOf(typeName)) + +transparent inline def compileErrors(inline code: String): List[Error] = typeCheckErrors(code) + +def test = + typeCheckErrors("""testError("string")""") + compileErrors("""testError("string")""") diff --git a/tests/pos/i13941.scala b/tests/pos/i13941.scala new file mode 100644 index 000000000000..299470e455e3 --- /dev/null +++ b/tests/pos/i13941.scala @@ -0,0 +1,15 @@ +import scala.annotation.tailrec + +object A { + def b = Option("a").map { x => + @tailrec + def loop(): Int = { + try + 2 + catch + case _: Throwable => + loop() + } + x + } +} diff --git a/tests/pos/i13974.scala b/tests/pos/i13974.scala new file mode 100644 index 000000000000..957baab63eb7 --- /dev/null +++ b/tests/pos/i13974.scala @@ -0,0 +1,13 @@ +object Test { + class C + class Use[A] + case class UseC() extends Use[C] + class ConversionTarget + implicit def convert(c: C): ConversionTarget = ??? + def go[X](u: Use[X], x: X) = + u match { + case UseC() => + //val y: C = x + x: ConversionTarget + } +} diff --git a/tests/pos/i13974a.scala b/tests/pos/i13974a.scala new file mode 100644 index 000000000000..69e764abc14d --- /dev/null +++ b/tests/pos/i13974a.scala @@ -0,0 +1,12 @@ + +object Test2: + class Foo[+X] + enum SUB[-S, +T]: + case Refl[U]() extends SUB[U, U] + def f[A, B, C](sub : A SUB (B,C)) = + given Foo[A] = ??? + val x = summon[Foo[A]] + sub match + case SUB.Refl() => + val c: Foo[(B, C)] = summon[Foo[A]] + summon[Foo[(B, C)]] diff --git a/tests/pos/i13998.scala b/tests/pos/i13998.scala new file mode 100644 index 000000000000..474c6dca9e30 --- /dev/null +++ b/tests/pos/i13998.scala @@ -0,0 +1,10 @@ +case class Box[V](value: V) +object Box: + def apply[A](a: A): Box[A] = new Box[A](a) + def unapply[U](b: Box[U]): Box[U] = b + +class Test: + def value: Box[_ <: String] = Box("text") + + def test: String = value match + case Box(text) => text: String diff --git a/tests/pos/i14010.scala b/tests/pos/i14010.scala new file mode 100644 index 000000000000..a881f783f39b --- /dev/null +++ b/tests/pos/i14010.scala @@ -0,0 +1,46 @@ +abstract class LazyList[+T] { + def head: T + def tail: LazyList[T] + def isEmpty: Boolean + def push[E >: T](top: => E): LazyList[E] = + new Push[E](top, this) + //def map[R](f: T => R): LazyList[R] + def append[E >: T](that: => LazyList[E]): LazyList[E] +} + +private class Push[+T](top: => T, stack: => LazyList[T]) extends LazyList[T] { + override def head: T = + top + override def tail: LazyList[T] = + stack + override def isEmpty: Boolean = + false + //override def map[R](f: T => R): LazyList[R] = + // new Push[R](f(top), stack.map(f)) { + // override def map[R2](f2: R => R2): LazyList[R2] = + // Push.this.map(f2 compose f) + // } + override def append[E >: T](that: => LazyList[E]): LazyList[E] = + new Push[E](top, stack.append(that)) { + override def append[E2 >: E](that2: => LazyList[E2]): LazyList[E2] = + Push.this.append(that.append(that2)) + } +} + +object LazyList { + val empty = + new LazyList[Nothing] { + override def head: Nothing = + throw new NoSuchElementException + override def tail: LazyList[Nothing] = + throw new UnsupportedOperationException + override def isEmpty: Boolean = + true + //override def map[R](f: _ => R): LazyList[R] = + // this + override def append[E](that: => LazyList[E]): LazyList[E] = + that + } + def apply[T](elements: T*): LazyList[T] = + elements.foldRight[LazyList[T]](empty)(new Push(_, _)) +} \ No newline at end of file diff --git a/tests/pos/i14010a.scala b/tests/pos/i14010a.scala new file mode 100644 index 000000000000..5e77f55fdf62 --- /dev/null +++ b/tests/pos/i14010a.scala @@ -0,0 +1,3 @@ +class Foo(top: => Int) { + def foo: Any = new Foo(top) { } +} \ No newline at end of file diff --git a/tests/pos/i14013.scala b/tests/pos/i14013.scala new file mode 100644 index 000000000000..d20112392002 --- /dev/null +++ b/tests/pos/i14013.scala @@ -0,0 +1,63 @@ +import LightTypeTagInheritance._ + +trait LightTypeTagRef + +object LightTypeTagInheritance { + private final case class Ctx(self: LightTypeTagInheritance) { + def next(): Ctx = Ctx(self) + } + private implicit final class CtxExt(private val ctx: Ctx) extends AnyVal { + def isChild(selfT0: LightTypeTagRef, thatT0: LightTypeTagRef): Boolean = ctx.self.isChild(ctx.next())(selfT0, thatT0) + } +} + +class LightTypeTagInheritance { + + def isChild(s: LightTypeTagRef, t: LightTypeTagRef): Boolean = { + isChild(new Ctx(this))(s, t) + } + + private def isChild(ctx: Ctx)(s: LightTypeTagRef, t: LightTypeTagRef): Boolean = { + ctx.isChild(s, t) + } + +} + +object App extends App { + println(LightTypeTagInheritance) +} + + +object Foo { + case class Bar(i: Int) + + private implicit class BarOps(bar: Bar) { + def twice = Bar(bar.i * 2) + } +} + +class Foo { + def bar = Foo.Bar(1).twice +} + +object App2 extends App { + println((new Foo).bar) +} + +object Foo2 { + case class Bar(i: Int) + + private given BarOps: AnyRef with { + extension (bar: Bar) + def twice: Bar = Bar(bar.i * 2) + } +} + +class Foo2 { + def bar = Foo2.Bar(1).twice +} + +object App3 extends App { + println((new Foo2).bar) +} + diff --git a/tests/pos/i14048.scala b/tests/pos/i14048.scala new file mode 100644 index 000000000000..b815b68c72fd --- /dev/null +++ b/tests/pos/i14048.scala @@ -0,0 +1,5 @@ +class T: + inline def foo(): Unit = bar() + private inline def bar(): Unit = () + +def test(t: T) = t.foo() diff --git a/tests/pos/i14061.scala b/tests/pos/i14061.scala new file mode 100644 index 000000000000..1431c1248255 --- /dev/null +++ b/tests/pos/i14061.scala @@ -0,0 +1,10 @@ +def main(args: Array[String]): Unit = + val bar: PartialFunction[Throwable, Unit] = + case e: IllegalArgumentException => e.printStackTrace + case e: Throwable => e.printStackTrace + try + println("a") + catch + bar + finally + println("a") \ No newline at end of file diff --git a/tests/pos/i14152.scala b/tests/pos/i14152.scala new file mode 100644 index 000000000000..2377d5ffeae3 --- /dev/null +++ b/tests/pos/i14152.scala @@ -0,0 +1,30 @@ +val a1 = { + object O1 extends AnyRef + Array(O1) +} +val a2: Array[_ <: AnyRef] = aa1 + +val aa1 = { + object O1 extends AnyRef + Array(Array(O1)) +} +val aa2: Array[_ <: Array[_ <: AnyRef]] = aa1 + +val aaa1 = { + object O1 extends AnyRef + Array(Array(Array(O1))) +} +val aaa2: Array[_ <: Array[_ <: Array[_ <: AnyRef]]] = aaa1 + + +// Let's make sure avoidance still does the right thing given abstract type constructors + +class Inv[T](x: T) + +def foo[F[_]](fn: [A] => Inv[A] => F[A]) = + object O1 extends AnyRef + val res0 = fn(new Inv(fn(new Inv[O1.type](O1)))) + val res1: F[F[O1.type]] = res0 + res1 // checked with -Xprint:typer that this widens to Any + // instead of the original F[F[O1.type]] + // or the incorrectly avoided F[? <: F[? <: Object]] diff --git a/tests/pos/i14177b.scala b/tests/pos/i14177b.scala new file mode 100644 index 000000000000..6da9a72ae551 --- /dev/null +++ b/tests/pos/i14177b.scala @@ -0,0 +1,15 @@ +class T + +inline given fail1: T with + val cs = scala.compiletime.summonAll[EmptyTuple] +inline given fail2[X]: T with + val cs = scala.compiletime.summonAll[EmptyTuple] +inline given fail3(using DummyImplicit): T with + val cs = scala.compiletime.summonAll[EmptyTuple] + +inline given ok1: T = new T: + val cs = scala.compiletime.summonAll[EmptyTuple] +inline given ok2[X]: T = new T: + val cs = scala.compiletime.summonAll[EmptyTuple] +inline given ok3(using DummyImplicit): T = new T: + val cs = scala.compiletime.summonAll[EmptyTuple] diff --git a/tests/pos/i14214.scala b/tests/pos/i14214.scala new file mode 100644 index 000000000000..9bf1f8b5ff75 --- /dev/null +++ b/tests/pos/i14214.scala @@ -0,0 +1,16 @@ +class Dummy +given Dummy = ??? +trait Foo +given foo: Foo = ??? +trait Bar +given bar(using Dummy): Bar = ??? + +object Test: + summon[Dummy ?=> Foo] // was error + summon[Dummy ?=> Foo](using foo) // works + summon[Dummy ?=> Foo](using (_: Dummy) ?=> foo) // works + summon[Dummy ?=> Bar] + summon[Dummy ?=> Bar](using bar) // works + summon[Dummy ?=> Bar](using (_: Dummy) ?=> bar) // works + + diff --git a/tests/pos/i14282.scala b/tests/pos/i14282.scala new file mode 100644 index 000000000000..2cc3ff1226e2 --- /dev/null +++ b/tests/pos/i14282.scala @@ -0,0 +1,13 @@ +trait Foo[A] { + inline def foo(): Unit +} + +inline given FooA[A]: Foo[A] with { + inline def foo(): Unit = println() +} +def test1 = FooA.foo() + +inline given FooInt: Foo[Int] with { + inline def foo(): Unit = println() +} +def test2 = FooInt.foo() diff --git a/tests/pos/i14349.min.scala b/tests/pos/i14349.min.scala new file mode 100644 index 000000000000..8922d73056d6 --- /dev/null +++ b/tests/pos/i14349.min.scala @@ -0,0 +1,13 @@ +class Inv[M] + +class Module: + type X + type Y + type M = Module { + type X = Module.this.X + type Y = Module.this.Y + } + def expose = new Inv[M] + def test = this match { case m => m.expose } + // was: leak: `(m : Module)` in `m.expose: Inv[? <: Module { X = m.X }]` + def res: Inv[_ <: Module] = this match { case m => m.expose } diff --git a/tests/pos/i14349.scala b/tests/pos/i14349.scala new file mode 100644 index 000000000000..05e4e1e3603f --- /dev/null +++ b/tests/pos/i14349.scala @@ -0,0 +1,19 @@ +trait Module: + self => + type M <: Module { + type X = self.X + type Y = self.Y + } + type X + type Y + + def expose: Expose[X, Y, M] + +trait Expose[ + X0, + Y0, + M <: Module { type X = X0; type Y = Y0 } +] + +def test(ms: Seq[Option[Module]]): Seq[Expose[_, _, _]] = + ms.collect { case Some(module) => module.expose } diff --git a/tests/pos/i14367.scala b/tests/pos/i14367.scala new file mode 100644 index 000000000000..d74f0aa8373e --- /dev/null +++ b/tests/pos/i14367.scala @@ -0,0 +1,7 @@ +def m(i: Int*) = i.sum +val f1 = m +val f2 = i => m(i*) + +def n(i: Seq[Int]) = i.sum +val g1 = n +val g2 = i => n(i) diff --git a/tests/pos/i14442.scala b/tests/pos/i14442.scala new file mode 100644 index 000000000000..60d7e26a0b3f --- /dev/null +++ b/tests/pos/i14442.scala @@ -0,0 +1,4 @@ +class Foo(val id: Int) { + inline def ==(that: Foo): Boolean = true +} +case class FooWrapper(foo: Foo) diff --git a/tests/pos/i5978.scala b/tests/pos/i5978.scala index 630a0ec4ea6c..f1954b8c8275 100644 --- a/tests/pos/i5978.scala +++ b/tests/pos/i5978.scala @@ -1,3 +1,4 @@ +package test import scala.language.implicitConversions opaque type Position[Buffer] = Int diff --git a/tests/pos/i6662.scala b/tests/pos/i6662.scala new file mode 100644 index 000000000000..af0c939c7ed9 --- /dev/null +++ b/tests/pos/i6662.scala @@ -0,0 +1,20 @@ +object opt: + opaque type Opt[A >: Null] = A + object Opt: + inline def unOpt[A >: Null](x: Opt[A]): A = x + inline def apply[A >: Null](x: A): Opt[A] = x + inline def some[A >: Null](x: A): Opt[A] = x + inline def none[A >: Null]: Opt[A] = null + inline def fromOption[A >: Null](x: Option[A]) = x.orNull + +import opt.Opt +extension [A >: Null](x: Opt[A]) + inline def nonEmpty : Boolean = x.get != null + inline def isEmpty : Boolean = x.get == null + inline def isDefined: Boolean = x.nonEmpty + inline def get : A = Opt.unOpt(x) + +@main def Test = + val x: Opt[String] = Opt.some("abc") + assert(x.nonEmpty) + val y: String = Opt.unOpt(x) diff --git a/tests/pos/i6678.scala b/tests/pos/i6678.scala new file mode 100644 index 000000000000..e4fff1789595 --- /dev/null +++ b/tests/pos/i6678.scala @@ -0,0 +1,4 @@ +object O { + val f = (i:Int) => [T] => (s:String) => 2 + def m = f(1)("a") +} \ No newline at end of file diff --git a/tests/pos/i6854.scala b/tests/pos/i6854.scala new file mode 100644 index 000000000000..78ba9c19af7e --- /dev/null +++ b/tests/pos/i6854.scala @@ -0,0 +1,12 @@ +object Test { + import Lib.* + val xs: IArray2[Int] = IArray2(1) +} + +object Lib { + opaque type IArray2[+T] = Array[_ <: T] + + object IArray2 { + inline def apply(x: =>Int): IArray2[Int] = Array(x) + } +} diff --git a/tests/pos/i6864.scala b/tests/pos/i6864.scala index af0f3d22ba50..aa9376e0fc30 100644 --- a/tests/pos/i6864.scala +++ b/tests/pos/i6864.scala @@ -1,17 +1,17 @@ class A class B -given A with {} -given B with {} +given A() +given B() trait Foo trait Bar -given Foo with {} -given Bar with {} +given Foo() +given Bar() trait C trait Baz[A] -given C with {} -given [A]: Baz[A] with {} \ No newline at end of file +given C() +given [A]: Baz[A]() \ No newline at end of file diff --git a/tests/pos/i7034.scala b/tests/pos/i7034.scala new file mode 100644 index 000000000000..f9bce462aea8 --- /dev/null +++ b/tests/pos/i7034.scala @@ -0,0 +1,4780 @@ +val names = IArray( + "Aaron", + "Abdiel", + "Abdul", + "Abdullah", + "Abe", + "Abel", + "Abner", + "Abraham", + "Abram", + "Adalberto", + "Adam", + "Adan", + "Addison", + "Adelard", + "Adelbert", + "Aditya", + "Adolphus", + "Adonis", + "Adrain", + "Adrian", + "Adrien", + "Agustin", + "Ahmad", + "Ahmed", + "Aidan", + "Aiden", + "Akeem", + "Al", + "Alan", + "Albert", + "Alberto", + "Albin", + "Alden", + "Aldo", + "Alec", + "Alejandro", + "Alek", + "Alessandro", + "Alex", + "Alexander", + "Alexandre", + "Alexandrer", + "Alexandro", + "Alexis", + "Alf", + "Alfonso", + "Alfonzo", + "Alford", + "Alfred", + "Alfredo", + "Ali", + "Allan", + "Allen", + "Allie", + "Allison", + "Allyn", + "Alma", + "Alois", + "Alonso", + "Alonza", + "Alonzo", + "Aloysius", + "Alpha", + "Alphonse", + "Alphonso", + "Alton", + "Alva", + "Alvah", + "Alvaro", + "Alvie", + "Alvin", + "Alvis", + "Amado", + "Amador", + "Amanda", + "Amari", + "Ambrose", + "Americo", + "Amir", + "Amos", + "Amy", + "Anastacio", + "Anderson", + "Andra", + "Andre", + "Andrea", + "Andreas", + "Andres", + "Andrew", + "Andy", + "Angel", + "Angela", + "Angelo", + "Angus", + "Anibal", + "Anna", + "Ansel", + "Anselmo", + "Anthony", + "Antione", + "Antoine", + "Anton", + "Antone", + "Antonia", + "Antonio", + "Antony", + "Antwan", + "Antwon", + "Arcadio", + "Arch", + "Archibald", + "Archie", + "Ardell", + "Arden", + "Ari", + "Aric", + "Ariel", + "Arlan", + "Arlen", + "Arley", + "Arlie", + "Arlin", + "Arlis", + "Arlo", + "Arman", + "Armand", + "Armando", + "Armani", + "Armond", + "Arnaldo", + "Arne", + "Arno", + "Arnold", + "Arnoldo", + "Arnulfo", + "Aron", + "Arron", + "Art", + "Arther", + "Arthur", + "Artie", + "Artis", + "Arturo", + "Arvel", + "Arvid", + "Arvil", + "Arvin", + "Asa", + "Asher", + "Ashley", + "Ashton", + "Aubrey", + "Audie", + "Audrey", + "August", + "Augustine", + "Augustus", + "Aurelio", + "Austen", + "Austin", + "Auston", + "Austyn", + "Avery", + "Axel", + "Ayden", + "Bailey", + "Barbara", + "Barney", + "Baron", + "Barrett", + "Barron", + "Barry", + "Bart", + "Bartholome", + "Barton", + "Basil", + "Beau", + "Ben", + "Benedict", + "Benito", + "Benjamin", + "Bennett", + "Bennie", + "Benny", + "Benton", + "Bernard", + "Bernardo", + "Bernhard", + "Bernice", + "Bernie", + "Berry", + "Bert", + "Berton", + "Bertram", + "Bertrand", + "Beryl", + "Betty", + "Beverly", + "Bienvenido", + "Bill", + "Billie", + "Billy", + "Bishop", + "Blaine", + "Blair", + "Blaise", + "Blake", + "Blane", + "Bo", + "Bob", + "Bobbie", + "Bobby", + "Bonnie", + "Booker", + "Boris", + "Boyce", + "Boyd", + "Brad", + "Braden", + "Bradford", + "Bradley", + "Bradly", + "Brady", + "Braeden", + "Braedon", + "Braiden", + "Brain", + "Brandan", + "Branden", + "Brandon", + "Brandt", + "Brandy", + "Brandyn", + "Brannon", + "Branson", + "Brant", + "Braulio", + "Braxton", + "Brayan", + "Brayden", + "Braydon", + "Brendan", + "Brenden", + "Brendon", + "Brennan", + "Brennen", + "Brennon", + "Brent", + "Brenton", + "Bret", + "Brett", + "Brian", + "Brice", + "Bridger", + "Brien", + "Britt", + "Brock", + "Broderick", + "Brody", + "Bronson", + "Brook", + "Brooks", + "Brown", + "Bruce", + "Bruno", + "Bryan", + "Bryant", + "Bryce", + "Brycen", + "Bryon", + "Bryson", + "Buck", + "Bud", + "Buddie", + "Buddy", + "Buford", + "Burdette", + "Burl", + "Burley", + "Burnell", + "Burt", + "Burton", + "Buster", + "Butch", + "Byron", + "Cade", + "Caden", + "Caesar", + "Cale", + "Caleb", + "Calvin", + "Camden", + "Cameron", + "Camren", + "Camron", + "Candelario", + "Candido", + "Carey", + "Carl", + "Carleton", + "Carlo", + "Carlos", + "Carlton", + "Carlyle", + "Carmelo", + "Carmen", + "Carmine", + "Carnell", + "Carol", + "Carolyn", + "Carrol", + "Carroll", + "Carson", + "Carter", + "Cary", + "Casey", + "Casimer", + "Casimir", + "Casper", + "Cassidy", + "Cayden", + "Cecil", + "Cecilio", + "Cedric", + "Cedrick", + "Celestino", + "Cesar", + "Chad", + "Chadd", + "Chadrick", + "Chadwick", + "Chaim", + "Chance", + "Chandler", + "Charles", + "Charley", + "Charlie", + "Chas", + "Chase", + "Chauncey", + "Chaz", + "Chesley", + "Chester", + "Chet", + "Chip", + "Chris", + "Christ", + "Christian", + "Christina", + "Christion", + "Christop", + "Christoper", + "Christophe", + "Christopher", + "Chuck", + "Cicero", + "Clair", + "Claire", + "Clarance", + "Clare", + "Clarence", + "Clark", + "Claud", + "Claude", + "Claudie", + "Claudio", + "Clay", + "Clayton", + "Clem", + "Clemens", + "Clement", + "Cleo", + "Cleon", + "Cletus", + "Cleve", + "Cleveland", + "Cliff", + "Clifford", + "Clifton", + "Clint", + "Clinton", + "Clovis", + "Cloyd", + "Clyde", + "Coby", + "Codey", + "Codie", + "Cody", + "Colby", + "Cole", + "Coleman", + "Colin", + "Collin", + "Colt", + "Colten", + "Colton", + "Columbus", + "Conner", + "Connie", + "Connor", + "Conor", + "Conrad", + "Constantin", + "Cooper", + "Corbin", + "Cordell", + "Corey", + "Cornelius", + "Cornell", + "Cortez", + "Cortney", + "Cory", + "Coty", + "Courtney", + "Coy", + "Craig", + "Crawford", + "Cris", + "Cristian", + "Cristobal", + "Cristopher", + "Cruz", + "Cullen", + "Curt", + "Curtis", + "Curtiss", + "Cyril", + "Cyrus", + "Dakota", + "Dakotah", + "Dale", + "Dallas", + "Dallin", + "Dalton", + "Dameon", + "Damian", + "Damien", + "Damion", + "Damon", + "Dan", + "Dana", + "Dandre", + "Dane", + "Dangelo", + "Danial", + "Daniel", + "Dannie", + "Danny", + "Dante", + "Daquan", + "Darby", + "Darell", + "Daren", + "Darian", + "Darien", + "Darin", + "Dario", + "Darion", + "Darius", + "Darnell", + "Darold", + "Daron", + "Darrel", + "Darrell", + "Darren", + "Darrian", + "Darrick", + "Darrien", + "Darrin", + "Darrion", + "Darrius", + "Darron", + "Darryl", + "Darwin", + "Daryl", + "Daryle", + "Dashawn", + "Dave", + "Davey", + "David", + "Davie", + "Davin", + "Davion", + "Davis", + "Davon", + "Davonte", + "Davy", + "Dawson", + "Dax", + "Dayton", + "Dean", + "Deandre", + "Deane", + "Deangelo", + "Deborah", + "Debra", + "Declan", + "Dedric", + "Dedrick", + "Dee", + "Deion", + "Dejuan", + "Del", + "Delano", + "Delbert", + "Dell", + "Delmar", + "Delmas", + "Delmer", + "Delton", + "Delvin", + "Demarco", + "Demarcus", + "Demario", + "Demetris", + "Demetrius", + "Demond", + "Dempsey", + "Denis", + "Dennie", + "Dennis", + "Denny", + "Denton", + "Denver", + "Denzel", + "Denzil", + "Deon", + "Deondre", + "Deontae", + "Deonte", + "Dequan", + "Dereck", + "Derek", + "Deric", + "Derick", + "Derik", + "Deron", + "Derrell", + "Derrick", + "Derwin", + "Deshaun", + "Deshawn", + "Desmond", + "Destin", + "Devan", + "Devante", + "Deven", + "Devin", + "Devon", + "Devonta", + "Devontae", + "Devonte", + "Devyn", + "Dewayne", + "Dewey", + "Dewitt", + "Dexter", + "Diamond", + "Dick", + "Dickie", + "Diego", + "Dillan", + "Dillard", + "Dillion", + "Dillon", + "Dimitri", + "Dino", + "Dion", + "Dionisio", + "Dionte", + "Dirk", + "Dock", + "Dolores", + "Domenic", + "Domenick", + "Domenico", + "Domingo", + "Dominic", + "Dominick", + "Dominik", + "Dominique", + "Dominque", + "Don", + "Donal", + "Donald", + "Donato", + "Donavan", + "Donavon", + "Donell", + "Donn", + "Donna", + "Donnell", + "Donnie", + "Donny", + "Donovan", + "Donta", + "Dontae", + "Donte", + "Dorian", + "Doris", + "Dorman", + "Dorothy", + "Dorris", + "Dorsey", + "Doug", + "Douglas", + "Douglass", + "Doyle", + "Drake", + "Draven", + "Drew", + "Duane", + "Dudley", + "Duke", + "Duncan", + "Durward", + "Durwood", + "Dustin", + "Dusty", + "Duwayne", + "Dwain", + "Dwaine", + "Dwane", + "Dwayne", + "Dwight", + "Dylan", + "Dyllan", + "Dylon", + "Ean", + "Earl", + "Earle", + "Earlie", + "Early", + "Earnest", + "Easton", + "Ed", + "Edd", + "Eddie", + "Eddy", + "Edgar", + "Edgardo", + "Edison", + "Edmond", + "Edmund", + "Edsel", + "Eduardo", + "Edward", + "Edwardo", + "Edwin", + "Efrain", + "Efren", + "Einar", + "Eino", + "Eladio", + "Elbert", + "Elden", + "Eldon", + "Eldred", + "Eldridge", + "Elgin", + "Eli", + "Elias", + "Eliezer", + "Elijah", + "Eliot", + "Eliseo", + "Elisha", + "Elizabeth", + "Elliot", + "Elliott", + "Ellis", + "Ellsworth", + "Ellwood", + "Elmer", + "Elmo", + "Elmore", + "Eloy", + "Elroy", + "Elton", + "Elvin", + "Elvis", + "Elwin", + "Elwood", + "Elwyn", + "Elza", + "Elzie", + "Emanuel", + "Emerson", + "Emery", + "Emil", + "Emile", + "Emiliano", + "Emilio", + "Emma", + "Emmanuel", + "Emmet", + "Emmett", + "Emmit", + "Emmitt", + "Emory", + "Ennis", + "Enoch", + "Enos", + "Enrico", + "Enrique", + "Ephraim", + "Epifanio", + "Erasmo", + "Eric", + "Erich", + "Erick", + "Erik", + "Erin", + "Erling", + "Ernest", + "Ernesto", + "Ernie", + "Ernst", + "Errol", + "Ervin", + "Erwin", + "Esteban", + "Estel", + "Estevan", + "Ethan", + "Eugene", + "Eugenio", + "Eusebio", + "Evan", + "Evans", + "Everett", + "Everette", + "Evert", + "Ewald", + "Ewell", + "Ezekiel", + "Ezell", + "Ezequiel", + "Ezra", + "Fabian", + "Faron", + "Farrell", + "Faustino", + "Fay", + "Federico", + "Felipe", + "Felix", + "Felton", + "Ferdinand", + "Fermin", + "Fernand", + "Fernando", + "Fidel", + "Finis", + "Finley", + "Fletcher", + "Florence", + "Florencio", + "Florentino", + "Florian", + "Floyd", + "Ford", + "Forest", + "Forrest", + "Fortunato", + "Foster", + "Foy", + "Frances", + "Francesco", + "Francis", + "Francisco", + "Frank", + "Frankie", + "Franklin", + "Franklyn", + "Franz", + "Fred", + "Freddie", + "Freddy", + "Frederic", + "Frederick", + "Fredric", + "Fredrick", + "Fredy", + "Freeman", + "Friedrich", + "Fritz", + "Furman", + "Gabe", + "Gabriel", + "Gaetano", + "Gage", + "Gail", + "Gale", + "Galen", + "Gardner", + "Garett", + "Garfield", + "Garland", + "Garnet", + "Garnett", + "Garold", + "Garret", + "Garrett", + "Garrick", + "Garrison", + "Garry", + "Garth", + "Gary", + "Gaston", + "Gaven", + "Gavin", + "Gayle", + "Gaylon", + "Gaylord", + "Gearld", + "Geary", + "Genaro", + "Gene", + "General", + "Gennaro", + "Geo", + "Geoffrey", + "George", + "Gerald", + "Geraldo", + "Gerard", + "Gerardo", + "Gerhard", + "Germaine", + "German", + "Gerold", + "Gerry", + "Giancarlo", + "Gianni", + "Gil", + "Gilbert", + "Gilberto", + "Giles", + "Gino", + "Giovanni", + "Giovanny", + "Giuseppe", + "Glen", + "Glendon", + "Glenn", + "Glynn", + "Godfrey", + "Golden", + "Gonzalo", + "Gordon", + "Grace", + "Grady", + "Graham", + "Grant", + "Granville", + "Grayson", + "Green", + "Greg", + "Gregg", + "Greggory", + "Gregorio", + "Gregory", + "Greyson", + "Griffin", + "Grover", + "Guadalupe", + "Guido", + "Guillermo", + "Guiseppe", + "Gunnar", + "Gunner", + "Gus", + "Gust", + "Gustav", + "Gustave", + "Gustavo", + "Guy", + "Hakeem", + "Hal", + "Hallie", + "Hamilton", + "Hamza", + "Hank", + "Hans", + "Hansel", + "Harding", + "Hardy", + "Harlan", + "Harland", + "Harlen", + "Harley", + "Harmon", + "Harold", + "Harris", + "Harrison", + "Harry", + "Harvey", + "Haskell", + "Hassan", + "Hayden", + "Hayward", + "Haywood", + "Hazel", + "Heath", + "Heber", + "Hector", + "Helen", + "Helmer", + "Henderson", + "Henri", + "Henry", + "Herb", + "Herbert", + "Heriberto", + "Herman", + "Hermann", + "Herminio", + "Hermon", + "Hernan", + "Herschel", + "Hershel", + "Hezekiah", + "Hilario", + "Hilbert", + "Hillard", + "Hilliard", + "Hilton", + "Hipolito", + "Hiram", + "Hobart", + "Hobert", + "Holden", + "Hollie", + "Hollis", + "Homer", + "Hoover", + "Horace", + "Hosea", + "Houston", + "Howard", + "Howell", + "Hoyt", + "Hubert", + "Hudson", + "Huey", + "Hugh", + "Hugo", + "Humberto", + "Hunter", + "Hurley", + "Huston", + "Hyman", + "Ian", + "Ibrahim", + "Ignacio", + "Ignatius", + "Ike", + "Infant", + "Ira", + "Irvin", + "Irving", + "Irwin", + "Isaac", + "Isabel", + "Isadore", + "Isai", + "Isaiah", + "Isaias", + "Ishmael", + "Isiah", + "Isidore", + "Isidro", + "Ismael", + "Isom", + "Israel", + "Issac", + "Ivan", + "Ivey", + "Ivory", + "Ivy", + "Izaiah", + "Jabari", + "Jace", + "Jacinto", + "Jack", + "Jackie", + "Jackson", + "Jacky", + "Jacob", + "Jacoby", + "Jacques", + "Jacquez", + "Jade", + "Jaden", + "Jadon", + "Jaiden", + "Jaime", + "Jairo", + "Jake", + "Jakob", + "Jaleel", + "Jalen", + "Jalon", + "Jamaal", + "Jamal", + "Jamar", + "Jamarcus", + "Jamari", + "Jame", + "Jamel", + "James", + "Jameson", + "Jamey", + "Jamie", + "Jamil", + "Jamison", + "Jammie", + "Jan", + "Janis", + "Jaquan", + "Jared", + "Jaren", + "Jaret", + "Jarett", + "Jarod", + "Jaron", + "Jarred", + "Jarrell", + "Jarret", + "Jarrett", + "Jarrod", + "Jarvis", + "Jasen", + "Jason", + "Jasper", + "Javier", + "Javon", + "Javonte", + "Jaxon", + "Jaxson", + "Jay", + "Jayce", + "Jayden", + "Jaydon", + "Jaylan", + "Jaylen", + "Jaylin", + "Jaylon", + "Jayme", + "Jayson", + "Jean", + "Jed", + "Jedediah", + "Jedidiah", + "Jeff", + "Jefferey", + "Jefferson", + "Jeffery", + "Jeffrey", + "Jeffry", + "Jelani", + "Jennifer", + "Jennings", + "Jensen", + "Jerad", + "Jerald", + "Jeramie", + "Jeramy", + "Jere", + "Jered", + "Jerel", + "Jeremey", + "Jeremiah", + "Jeremie", + "Jeremy", + "Jermain", + "Jermaine", + "Jermey", + "Jerod", + "Jerold", + "Jerome", + "Jeromy", + "Jerrell", + "Jerrod", + "Jerrold", + "Jerry", + "Jess", + "Jesse", + "Jessica", + "Jessie", + "Jessy", + "Jesus", + "Jett", + "Jevon", + "Jewel", + "Jewell", + "Jim", + "Jimmie", + "Jimmy", + "Joan", + "Joaquin", + "Jodie", + "Jody", + "Joe", + "Joel", + "Joesph", + "Joey", + "Johan", + "Johann", + "Johathan", + "John", + "Johnathan", + "Johnathon", + "Johnie", + "Johnnie", + "Johnny", + "Johnpaul", + "Johnson", + "Jomar", + "Jon", + "Jonah", + "Jonas", + "Jonatan", + "Jonathan", + "Jonathon", + "Jonnie", + "Jordan", + "Jorden", + "Jordi", + "Jordon", + "Jordy", + "Jorge", + "Jose", + "Josef", + "Joseph", + "Josh", + "Joshua", + "Joshuah", + "Josiah", + "Josue", + "Jovan", + "Jovani", + "Jovanny", + "Jovany", + "Joy", + "Joyce", + "Juan", + "Judah", + "Judd", + "Jude", + "Judge", + "Judson", + "Jules", + "Julian", + "Julien", + "Julio", + "Julius", + "June", + "Junior", + "Junious", + "Junius", + "Justen", + "Justice", + "Justin", + "Justine", + "Justo", + "Juston", + "Justus", + "Justyn", + "Juwan", + "Kade", + "Kadeem", + "Kaden", + "Kadin", + "Kai", + "Kale", + "Kaleb", + "Kalen", + "Kalvin", + "Kameron", + "Kamron", + "Kane", + "Kareem", + "Karen", + "Karl", + "Karson", + "Kasey", + "Kay", + "Keagan", + "Keanu", + "Keaton", + "Keegan", + "Keenan", + "Kegan", + "Keith", + "Kelby", + "Kellen", + "Kelley", + "Kelly", + "Kelsey", + "Kelton", + "Kelvin", + "Ken", + "Kendal", + "Kendall", + "Kendrick", + "Kennedy", + "Kenneth", + "Kenney", + "Kennith", + "Kennth", + "Kenny", + "Kent", + "Kenton", + "Kenya", + "Kenyatta", + "Kenyon", + "Keon", + "Kermit", + "Kerry", + "Kerwin", + "Keshawn", + "Keven", + "Kevin", + "Kevon", + "Keyon", + "Keyshawn", + "Khalid", + "Khalil", + "Kiel", + "Kieran", + "Killian", + "Kim", + "Kimberly", + "King", + "Kip", + "Kirby", + "Kirk", + "Kirt", + "Kobe", + "Koby", + "Kody", + "Kolby", + "Kole", + "Kolton", + "Konnor", + "Korbin", + "Kordell", + "Korey", + "Kory", + "Kraig", + "Kris", + "Kristian", + "Kristofer", + "Kristoffer", + "Kristopher", + "Kurt", + "Kurtis", + "Kwame", + "Kylan", + "Kyle", + "Kyler", + "Kyree", + "Lacy", + "Ladarius", + "Laddie", + "Lafayette", + "Lamar", + "Lambert", + "Lamont", + "Lance", + "Landen", + "Landon", + "Lane", + "Lannie", + "Lanny", + "Laron", + "Larry", + "Lars", + "Lashawn", + "Latrell", + "Laurel", + "Lauren", + "Laurence", + "Lavern", + "Laverne", + "Lawerence", + "Lawrence", + "Lawson", + "Layne", + "Lazaro", + "Leamon", + "Leander", + "Lee", + "Leeroy", + "Leif", + "Leigh", + "Leland", + "Lemuel", + "Len", + "Lenard", + "Lennie", + "Lenny", + "Leo", + "Leon", + "Leonard", + "Leonardo", + "Leonel", + "Leopold", + "Leopoldo", + "Leroy", + "Les", + "Lesley", + "Leslie", + "Lester", + "Levar", + "Levi", + "Lew", + "Lewis", + "Liam", + "Lillian", + "Lincoln", + "Linda", + "Lindsay", + "Lindsey", + "Linwood", + "Lionel", + "Lisa", + "Llewellyn", + "Lloyd", + "Logan", + "Lois", + "Lon", + "Lonnie", + "Lonny", + "Lonzo", + "Loran", + "Loren", + "Lorenz", + "Lorenza", + "Lorenzo", + "Lorin", + "Lorne", + "Lou", + "Louie", + "Louis", + "Lowell", + "Loy", + "Loyal", + "Loyd", + "Lucas", + "Lucian", + "Luciano", + "Lucien", + "Lucious", + "Lucius", + "Ludwig", + "Luigi", + "Luis", + "Lukas", + "Luke", + "Lupe", + "Luther", + "Lyle", + "Lyman", + "Lyn", + "Lyndon", + "Lynn", + "Lynwood", + "Mac", + "Mack", + "Mackenzie", + "Madison", + "Magnus", + "Mahlon", + "Major", + "Malachi", + "Malcolm", + "Malcom", + "Malik", + "Malique", + "Manley", + "Manuel", + "Marc", + "Marcel", + "Marcelino", + "Marcellus", + "Marcelo", + "Marcial", + "Marco", + "Marcos", + "Marcus", + "Margaret", + "Margarito", + "Maria", + "Mariano", + "Marie", + "Mario", + "Marion", + "Mark", + "Markel", + "Markell", + "Markus", + "Marlin", + "Marlon", + "Marlyn", + "Marques", + "Marquez", + "Marquis", + "Marquise", + "Marshal", + "Marshall", + "Martin", + "Marty", + "Marvin", + "Mary", + "Mason", + "Mateo", + "Mathew", + "Mathias", + "Matt", + "Matteo", + "Matthew", + "Maurice", + "Mauricio", + "Maury", + "Maverick", + "Max", + "Maxie", + "Maximilian", + "Maximillia", + "Maximillian", + "Maximino", + "Maximo", + "Maxwell", + "Maynard", + "Mcarthur", + "Mckinley", + "Mearl", + "Mekhi", + "Mel", + "Melissa", + "Melton", + "Melville", + "Melvin", + "Melvyn", + "Meredith", + "Merl", + "Merle", + "Merlin", + "Merlyn", + "Merrill", + "Merritt", + "Merton", + "Mervin", + "Mervyn", + "Meyer", + "Micah", + "Michael", + "Michale", + "Micheal", + "Michel", + "Michele", + "Michelle", + "Mickey", + "Miguel", + "Miguelangel", + "Mike", + "Mikel", + "Milan", + "Milburn", + "Miles", + "Milford", + "Millard", + "Miller", + "Milo", + "Milton", + "Misael", + "Mitch", + "Mitchel", + "Mitchell", + "Modesto", + "Moe", + "Mohamed", + "Mohammad", + "Mohammed", + "Moises", + "Monroe", + "Monserrate", + "Montana", + "Monte", + "Montgomery", + "Monty", + "Morgan", + "Morris", + "Mortimer", + "Morton", + "Mose", + "Moses", + "Moshe", + "Muhammad", + "Murl", + "Murphy", + "Murray", + "Murry", + "Myles", + "Myron", + "Najee", + "Nakia", + "Nancy", + "Napoleon", + "Narciso", + "Nash", + "Nasir", + "Nat", + "Nathan", + "Nathanael", + "Nathanial", + "Nathaniel", + "Nathen", + "Neal", + "Ned", + "Neftali", + "Nehemiah", + "Neil", + "Nels", + "Nelson", + "Nestor", + "Newell", + "Newman", + "Newton", + "Nicholas", + "Nicholaus", + "Nick", + "Nicklaus", + "Nickolas", + "Nicky", + "Nico", + "Nicola", + "Nicolas", + "Nigel", + "Nikhil", + "Niko", + "Nikolas", + "Niles", + "Nils", + "Noah", + "Noble", + "Noe", + "Noel", + "Nolan", + "Norbert", + "Norberto", + "Norman", + "Normand", + "Norris", + "Norton", + "Norval", + "Norwood", + "Nunzio", + "Oakley", + "Obie", + "Ocie", + "Octavio", + "Odell", + "Odie", + "Odis", + "Okey", + "Olaf", + "Olan", + "Ole", + "Olen", + "Olin", + "Oliver", + "Ollie", + "Omar", + "Omari", + "Omer", + "Oneal", + "Ora", + "Oral", + "Oran", + "Oren", + "Orie", + "Orin", + "Orion", + "Orland", + "Orlando", + "Orlo", + "Orrin", + "Orval", + "Orvil", + "Orville", + "Osborne", + "Oscar", + "Osvaldo", + "Oswald", + "Otha", + "Otho", + "Otis", + "Ottis", + "Otto", + "Owen", + "Pablo", + "Palmer", + "Paris", + "Parker", + "Parrish", + "Pascual", + "Pasquale", + "Pat", + "Patricia", + "Patrick", + "Patsy", + "Paul", + "Paxton", + "Payton", + "Pearl", + "Pedro", + "Percival", + "Percy", + "Pernell", + "Perry", + "Pershing", + "Pete", + "Peter", + "Peyton", + "Phil", + "Philip", + "Phillip", + "Phoenix", + "Pierce", + "Pierre", + "Pietro", + "Pink", + "Porfirio", + "Porter", + "Preston", + "Price", + "Primitivo", + "Prince", + "Quentin", + "Quincy", + "Quinn", + "Quinten", + "Quintin", + "Quinton", + "Radames", + "Raekwon", + "Rafael", + "Raheem", + "Rahsaan", + "Rahul", + "Raleigh", + "Ralph", + "Ramiro", + "Ramon", + "Ramsey", + "Rand", + "Randal", + "Randall", + "Randel", + "Randell", + "Randle", + "Randolph", + "Randy", + "Ransom", + "Raoul", + "Raphael", + "Rashad", + "Rashawn", + "Rasheed", + "Raul", + "Raven", + "Ray", + "Rayburn", + "Rayford", + "Raymon", + "Raymond", + "Raymundo", + "Raynard", + "Rayshawn", + "Reagan", + "Reece", + "Reed", + "Reese", + "Refugio", + "Reggie", + "Reginal", + "Reginald", + "Regis", + "Reid", + "Reilly", + "Reinaldo", + "Reinhold", + "Reino", + "Remington", + "Rene", + "Reno", + "Reuben", + "Rex", + "Rexford", + "Rey", + "Reyes", + "Reynaldo", + "Reynold", + "Rhett", + "Ricardo", + "Rich", + "Richard", + "Richie", + "Richmond", + "Rick", + "Rickey", + "Ricki", + "Rickie", + "Ricky", + "Rico", + "Ridge", + "Rigoberto", + "Riley", + "Ritchie", + "River", + "Rob", + "Robb", + "Robbie", + "Robby", + "Robert", + "Roberto", + "Robin", + "Rocco", + "Rock", + "Rocky", + "Rod", + "Roderick", + "Rodger", + "Rodney", + "Rodolfo", + "Rodrick", + "Rodrigo", + "Roel", + "Rogelio", + "Roger", + "Rogers", + "Rohan", + "Roland", + "Rolando", + "Rolf", + "Rolla", + "Rolland", + "Rollie", + "Rollin", + "Roman", + "Romeo", + "Ron", + "Ronald", + "Ronaldo", + "Ronnie", + "Ronny", + "Roosevelt", + "Rory", + "Rosario", + "Roscoe", + "Rose", + "Rosendo", + "Rosevelt", + "Ross", + "Roswell", + "Rowan", + "Rowland", + "Roy", + "Royal", + "Royce", + "Ruben", + "Rubin", + "Ruby", + "Rudolf", + "Rudolph", + "Rudy", + "Rueben", + "Rufus", + "Rupert", + "Russ", + "Russel", + "Russell", + "Rusty", + "Ruth", + "Ryan", + "Ryder", + "Rylan", + "Ryne", + "Sage", + "Sal", + "Salvador", + "Salvatore", + "Sam", + "Samir", + "Sammie", + "Sammy", + "Samson", + "Samuel", + "Sandra", + "Sandy", + "Sanford", + "Santiago", + "Santo", + "Santos", + "Sarah", + "Saul", + "Savion", + "Sawyer", + "Schuyler", + "Scot", + "Scott", + "Scottie", + "Scotty", + "Seamus", + "Sean", + "Sebastian", + "Sedrick", + "Selmer", + "Semaj", + "Sergio", + "Seth", + "Seymour", + "Shad", + "Shamar", + "Shane", + "Shannon", + "Shanon", + "Shaquille", + "Sharon", + "Shaun", + "Shawn", + "Shay", + "Shayne", + "Shea", + "Shelby", + "Sheldon", + "Shelton", + "Shemar", + "Sherman", + "Sherrill", + "Sherwin", + "Sherwood", + "Shirley", + "Shon", + "Sid", + "Sidney", + "Sigmund", + "Sigurd", + "Silas", + "Silvio", + "Sim", + "Simeon", + "Simon", + "Sixto", + "Skylar", + "Skyler", + "Smith", + "Sol", + "Solomon", + "Solon", + "Sonny", + "Spencer", + "Spenser", + "Stacey", + "Stacy", + "Stan", + "Stanford", + "Stanley", + "Stanton", + "Stefan", + "Stephan", + "Stephanie", + "Stephen", + "Stephon", + "Sterling", + "Stetson", + "Stevan", + "Steve", + "Steven", + "Stevie", + "Stewart", + "Stone", + "Stoney", + "Storm", + "Stuart", + "Sumner", + "Susan", + "Sven", + "Sydney", + "Syed", + "Sylvan", + "Sylvester", + "Tad", + "Tahj", + "Taj", + "Talmadge", + "Talon", + "Tammy", + "Tanner", + "Tarik", + "Tariq", + "Tate", + "Taurean", + "Tavares", + "Tavian", + "Tavion", + "Tavon", + "Tayler", + "Taylor", + "Ted", + "Teddy", + "Telly", + "Teodoro", + "Terence", + "Terrance", + "Terrell", + "Terrence", + "Terrill", + "Terry", + "Tevin", + "Thad", + "Thaddeus", + "Theadore", + "Theo", + "Theodore", + "Theron", + "Thomas", + "Thornton", + "Thurman", + "Thurston", + "Tillman", + "Tim", + "Timmie", + "Timmothy", + "Timmy", + "Timothy", + "Tito", + "Titus", + "Tobias", + "Tobin", + "Toby", + "Tod", + "Todd", + "Tom", + "Tomas", + "Tommie", + "Tommy", + "Toney", + "Tony", + "Torey", + "Torrance", + "Torrey", + "Tory", + "Toy", + "Trace", + "Tracey", + "Tracy", + "Trae", + "Travis", + "Travon", + "Tre", + "Tremaine", + "Tremayne", + "Trent", + "Trenton", + "Trever", + "Trevin", + "Trevion", + "Trevon", + "Trevor", + "Trey", + "Treyvon", + "Trinidad", + "Trinity", + "Tristan", + "Tristen", + "Tristian", + "Tristin", + "Triston", + "Troy", + "Truman", + "Trystan", + "Tucker", + "Turner", + "Ty", + "Tyler", + "Tylor", + "Tyquan", + "Tyree", + "Tyreek", + "Tyreese", + "Tyrek", + "Tyreke", + "Tyrel", + "Tyrell", + "Tyrese", + "Tyrik", + "Tyriq", + "Tyrique", + "Tyron", + "Tyrone", + "Tyshawn", + "Tyson", + "Ulises", + "Ulysses", + "Urban", + "Uriah", + "Uriel", + "Val", + "Valentin", + "Valentine", + "Van", + "Vance", + "Vaughn", + "Vergil", + "Verl", + "Verlin", + "Vern", + "Verne", + "Vernell", + "Verner", + "Vernie", + "Vernon", + "Vester", + "Vicente", + "Victor", + "Vidal", + "Vince", + "Vincent", + "Vincenzo", + "Vinson", + "Virgil", + "Virgilio", + "Virginia", + "Vito", + "Vivian", + "Vladimir", + "Von", + + + "Wade", + "Waino", + "Waldemar", + "Waldo", + "Walker", + "Wallace", + "Wally", + "Walter", + "Walton", + "Ward", + "Wardell", + "Warner", + "Warren", + "Washington", + "Watson", + "Wayland", + "Waylon", + "Waymon", + "Wayne", + "Webster", + "Weldon", + "Wellington", + "Welton", + "Wendell", + "Werner", + "Wes", + "Wesley", + "Westley", + "Weston", + "Wheeler", + "Whitney", + "Wilber", + "Wilbert", + "Wilbur", + "Wilburn", + "Wiley", + "Wilford", + "Wilfred", + "Wilfredo", + "Wilfrid", + "Wilhelm", + "Will", + "Willard", + "William", + "Williams", + "Willian", + "Willie", + "Willis", + "Willy", + "Wilmer", + "Wilson", + "Wilton", + "Windell", + "Winfield", + "Winford", + "Winfred", + "Winston", + "Winton", + "Wm", + "Woodrow", + "Woody", + "Worth", + "Wyatt", + "Wylie", + "Wyman", + "Xander", + "Xavier", + "Yosef", + "Zachariah", + "Zachary", + "Zachery", + "Zack", + "Zackary", + "Zackery", + "Zaire", + "Zakary", + "Zane", + "Zechariah", + "Zigmund", + "Zion", + "Aaliyah", + "Abagail", + "Abbey", + "Abbie", + "Abbigail", + "Abby", + "Abigail", + "Abigale", + "Abigayle", + "Abril", + "Ada", + "Adah", + "Adaline", + "Addie", + "Addison", + "Adela", + "Adelaida", + "Adelaide", + "Adele", + "Adelia", + "Adelina", + "Adeline", + "Adell", + "Adella", + "Adelle", + "Adrian", + "Adriana", + "Adriane", + "Adrianna", + "Adrianne", + "Adrienne", + "Afton", + "Agatha", + "Agnes", + "Agustina", + "Aida", + "Aileen", + "Aimee", + "Aisha", + "Aiyana", + "Aja", + "Alaina", + "Alana", + "Alanis", + "Alanna", + "Alayna", + "Alba", + "Alberta", + "Albertha", + "Albertine", + "Albina", + "Alda", + "Aleah", + "Alecia", + "Aleen", + "Alejandra", + "Alejandrin", + "Alena", + "Alene", + "Alesha", + "Alesia", + "Alessandra", + "Aleta", + "Aletha", + "Alex", + "Alexa", + "Alexander", + "Alexandra", + "Alexandrea", + "Alexandria", + "Alexia", + "Alexis", + "Alexus", + "Alexys", + "Alfreda", + "Ali", + "Alia", + "Alice", + "Alicia", + "Alida", + "Alina", + "Aline", + "Alisa", + "Alisha", + "Alison", + "Alissa", + "Alivia", + "Aliya", + "Aliyah", + "Alize", + "Allene", + "Allie", + "Alline", + "Allison", + "Ally", + "Allyson", + "Allyssa", + "Alma", + "Almeda", + "Almeta", + "Almira", + "Alondra", + "Alpha", + "Alta", + "Altagracia", + "Altha", + "Althea", + "Alva", + "Alvena", + "Alvera", + "Alverta", + "Alvina", + "Alyce", + "Alycia", + "Alysa", + "Alyse", + "Alysha", + "Alysia", + "Alyson", + "Alyssa", + "Alyssia", + "Amalia", + "Amanda", + "Amani", + "Amari", + "Amaya", + "Amber", + "Amelia", + "America", + "Ami", + "Amie", + "Amira", + "Amparo", + "Amy", + "Ana", + "Anabel", + "Anahi", + "Anais", + "Anastasia", + "Andrea", + "Andrew", + "Andria", + "Angel", + "Angela", + "Angelia", + "Angelica", + "Angelina", + "Angeline", + "Angelique", + "Angelita", + "Angie", + "Anika", + "Anissa", + "Anita", + "Anitra", + "Aniya", + "Aniyah", + "Anjanette", + "Anjelica", + "Ann", + "Anna", + "Annabel", + "Annabell", + "Annabelle", + "Annamae", + "Annamarie", + "Anne", + "Annemarie", + "Annetta", + "Annette", + "Annie", + "Annika", + "Annis", + "Annmarie", + "Ansley", + "Anthony", + "Antionette", + "Antoinette", + "Antonetta", + "Antonette", + "Antonia", + "Antonietta", + "Antonina", + "Anya", + "April", + "Ara", + "Araceli", + "Ardell", + "Ardella", + "Ardis", + "Ardith", + "Aretha", + "Ariana", + "Arianna", + "Ariel", + "Arielle", + "Arleen", + "Arlene", + "Arlie", + "Arline", + "Armani", + "Armida", + "Arrie", + "Arthur", + "Artie", + "Arvilla", + "Asha", + "Ashanti", + "Ashely", + "Ashlee", + "Ashleigh", + "Ashley", + "Ashli", + "Ashlie", + "Ashly", + "Ashlyn", + "Ashlynn", + "Ashton", + "Ashtyn", + "Asia", + "Aspen", + "Assunta", + "Astrid", + "Athena", + "Aubree", + "Aubrey", + "Audie", + "Audra", + "Audrey", + "Audry", + "Augusta", + "Augustine", + "Aurea", + "Aurelia", + "Aurora", + "Aurore", + "Autumn", + "Ava", + "Avery", + "Avis", + "Awilda", + "Ayana", + "Ayanna", + "Ayla", + "Aylin", + "Bailee", + "Bailey", + "Bambi", + "Barb", + "Barbara", + "Barbra", + "Baylee", + "Beatrice", + "Beatriz", + "Beaulah", + "Becky", + "Belinda", + "Bell", + "Bella", + "Belle", + "Belva", + "Benita", + "Bennie", + "Berenice", + "Bernadette", + "Bernadine", + "Bernardine", + "Berneice", + "Bernice", + "Berniece", + "Bernita", + "Berta", + "Bertha", + "Bertie", + "Beryl", + "Bess", + "Bessie", + "Beth", + "Bethany", + "Bethel", + "Betsy", + "Bette", + "Bettie", + "Bettina", + "Betty", + "Bettye", + "Beulah", + "Beverlee", + "Beverley", + "Beverly", + "Bianca", + "Billie", + "Billy", + "Billye", + "Birdie", + "Blair", + "Blanca", + "Blanch", + "Blanche", + "Blossom", + "Bobbi", + "Bobbie", + "Bobby", + "Bobbye", + "Bonita", + "Bonnie", + "Bonny", + "Brandee", + "Brandi", + "Brandie", + "Brandon", + "Brandy", + "Brea", + "Breana", + "Breann", + "Breanna", + "Breanne", + "Bree", + "Brenda", + "Brenna", + "Breonna", + "Bria", + "Brian", + "Briana", + "Brianna", + "Brianne", + "Bridget", + "Bridgett", + "Bridgette", + "Bridie", + "Brielle", + "Brigette", + "Brigitte", + "Brionna", + "Britany", + "Britney", + "Britni", + "Brittani", + "Brittanie", + "Brittany", + "Brittney", + "Brittni", + "Brittny", + "Brook", + "Brooke", + "Brooklyn", + "Brooklynn", + "Brunilda", + "Bryana", + "Bryanna", + "Bryn", + "Brynn", + "Buffy", + "Bulah", + "Burnice", + "Caitlin", + "Caitlyn", + "Caitlynn", + "Caleigh", + "Cali", + "Calista", + "Callie", + "Cameron", + "Camila", + "Camilla", + "Camille", + "Camryn", + "Candace", + "Candi", + "Candice", + "Candida", + "Candis", + "Candy", + "Cara", + "Caren", + "Carey", + "Cari", + "Carie", + "Carina", + "Carisa", + "Carissa", + "Carl", + "Carla", + "Carlee", + "Carleen", + "Carlene", + "Carley", + "Carli", + "Carlie", + "Carlotta", + "Carly", + "Carma", + "Carmel", + "Carmela", + "Carmelita", + "Carmella", + "Carmen", + "Carol", + "Carolann", + "Carole", + "Carolee", + "Carolina", + "Caroline", + "Carolyn", + "Carolynn", + "Caron", + "Carri", + "Carrie", + "Carrol", + "Carroll", + "Carson", + "Cary", + "Caryl", + "Caryn", + "Casandra", + "Casey", + "Casie", + "Cassandra", + "Cassidy", + "Cassie", + "Catalina", + "Caterina", + "Catharine", + "Catherine", + "Cathey", + "Cathi", + "Cathie", + "Cathleen", + "Cathrine", + "Cathryn", + "Cathy", + "Catina", + "Catrina", + "Cayla", + "Cecelia", + "Cecil", + "Cecile", + "Cecilia", + "Ceil", + "Celeste", + "Celestine", + "Celia", + "Celina", + "Celine", + "Chana", + "Chanda", + "Chandler", + "Chandra", + "Chanel", + "Chanelle", + "Chantal", + "Chantel", + "Chantelle", + "Charissa", + "Charisse", + "Charity", + "Charla", + "Charleen", + "Charlene", + "Charles", + "Charlie", + "Charline", + "Charlotte", + "Charmaine", + "Charolette", + "Chase", + "Chasity", + "Chastity", + "Chaya", + "Chelsea", + "Chelsey", + "Chelsi", + "Chelsie", + "Cheri", + "Cherie", + "Cherise", + "Cherish", + "Cherri", + "Cherry", + "Cheryl", + "Cheryle", + "Cheyanne", + "Cheyenne", + "China", + "Chiquita", + "Chloe", + "Chris", + "Chrissy", + "Christa", + "Christal", + "Christeen", + "Christel", + "Christen", + "Christene", + "Christi", + "Christian", + "Christiana", + "Christie", + "Christin", + "Christina", + "Christine", + "Christophe", + "Christy", + "Chrystal", + "Chyna", + "Ciara", + "Ciera", + "Cierra", + "Cinda", + "Cindi", + "Cindy", + "Citlalli", + "Claire", + "Clara", + "Clarabelle", + "Clare", + "Claribel", + "Clarice", + "Clarine", + "Clarissa", + "Claudette", + "Claudia", + "Claudie", + "Claudine", + "Clementina", + "Clementine", + "Clemmie", + "Cleo", + "Cleora", + "Cleta", + "Clotilde", + "Clyde", + "Cody", + "Coleen", + "Colette", + "Colleen", + "Collette", + "Concepcion", + "Concetta", + "Connie", + "Constance", + "Consuelo", + "Cora", + "Coral", + "Cordelia", + "Cordia", + "Cordie", + "Corene", + "Corey", + "Cori", + "Corina", + "Corine", + "Corinna", + "Corinne", + "Corliss", + "Cornelia", + "Corrie", + "Corrine", + "Cortney", + "Cory", + "Courtney", + "Creola", + "Crissy", + "Cristal", + "Cristin", + "Cristina", + "Cristy", + "Cruz", + "Crystal", + "Cydney", + "Cyndi", + "Cynthia", + "Dagmar", + "Dagny", + "Daija", + "Daisha", + "Daisy", + "Dakota", + "Dale", + "Dalia", + "Dallas", + "Damaris", + "Dana", + "Danelle", + "Danette", + "Danica", + "Daniel", + "Daniela", + "Daniella", + "Danielle", + "Danita", + "Danna", + "Danyelle", + "Daphne", + "Dara", + "Darby", + "Darci", + "Darcie", + "Darcy", + "Daria", + "Darian", + "Darla", + "Darleen", + "Darlene", + "Darline", + "Daryl", + "Dasia", + "David", + "Davina", + "Dawn", + "Dawna", + "Dayna", + "Dean", + "Deana", + "Deann", + "Deanna", + "Deanne", + "Deasia", + "Debbi", + "Debbie", + "Debbra", + "Debby", + "Debi", + "Debora", + "Deborah", + "Debra", + "Debrah", + "Dedra", + "Dee", + "Deedee", + "Deena", + "Deidra", + "Deidre", + "Deirdre", + "Deja", + "Dejah", + "Delaney", + "Delfina", + "Delia", + "Delilah", + "Delisa", + "Dell", + "Della", + "Delma", + "Delois", + "Delores", + "Deloris", + "Delpha", + "Delphia", + "Delphine", + "Delta", + "Demetria", + "Demi", + "Dena", + "Deneen", + "Denice", + "Denise", + "Denisse", + "Desirae", + "Desiree", + "Dessie", + "Destany", + "Destinee", + "Destiney", + "Destini", + "Destiny", + "Devan", + "Devin", + "Devon", + "Devyn", + "Diamond", + "Dian", + "Diana", + "Diane", + "Diann", + "Dianna", + "Dianne", + "Dimple", + "Dina", + "Dinah", + "Dionne", + "Dixie", + "Dollie", + "Dolly", + "Dolores", + "Doloris", + "Domenica", + "Dominga", + "Dominique", + "Dominque", + "Domonique", + "Dona", + "Donald", + "Donita", + "Donna", + "Donnie", + "Dora", + "Dorathy", + "Dorcas", + "Doreen", + "Dorene", + "Doretha", + "Dori", + "Dorinda", + "Dorine", + "Doris", + "Dorotha", + "Dorothea", + "Dorothy", + "Dorris", + "Dortha", + "Dorthy", + "Dottie", + "Dovie", + "Drew", + "Drucilla", + "Dulce", + "Earlene", + "Earline", + "Earnestine", + "Easter", + "Ebba", + "Eboni", + "Ebony", + "Eda", + "Eddie", + "Eden", + "Edie", + "Edith", + "Edna", + "Edrie", + "Edward", + "Edwina", + "Edyth", + "Edythe", + "Effie", + "Eileen", + "Elaina", + "Elaine", + "Elayne", + "Elba", + "Elda", + "Eldora", + "Eleanor", + "Eleanora", + "Eleanore", + "Elease", + "Electa", + "Elena", + "Elenor", + "Elenora", + "Elfrieda", + "Eliana", + "Elida", + "Elinor", + "Elinore", + "Elisa", + "Elisabeth", + "Elise", + "Elisha", + "Elissa", + "Eliza", + "Elizabeth", + "Ella", + "Ellamae", + "Ellen", + "Ellie", + "Elma", + "Elmira", + "Elna", + "Elnora", + "Elois", + "Eloisa", + "Eloise", + "Elouise", + "Elsa", + "Else", + "Elsie", + "Elta", + "Elva", + "Elvera", + "Elvia", + "Elvie", + "Elvira", + "Elyse", + "Elyssa", + "Elza", + "Emelia", + "Emely", + "Emerald", + "Emilee", + "Emilia", + "Emilie", + "Emily", + "Emma", + "Emmalee", + "Emmie", + "Emmy", + "Emogene", + "Ena", + "Enid", + "Enola", + "Enriqueta", + "Era", + "Eric", + "Erica", + "Ericka", + "Erika", + "Erin", + "Eris", + "Erlene", + "Erlinda", + "Erma", + "Erna", + "Ernestina", + "Ernestine", + "Erykah", + "Eryn", + "Esmeralda", + "Esperanza", + "Essence", + "Essie", + "Esta", + "Estefania", + "Estela", + "Estell", + "Estella", + "Estelle", + "Ester", + "Esther", + "Estrella", + "Etha", + "Ethel", + "Ethelene", + "Ethelyn", + "Ethyl", + "Etta", + "Ettie", + "Eudora", + "Eugenia", + "Eugenie", + "Eula", + "Eulah", + "Eulalia", + "Euna", + "Eunice", + "Eura", + "Eva", + "Evalyn", + "Evangelina", + "Evangeline", + "Eve", + "Eveline", + "Evelyn", + "Evelyne", + "Evette", + "Evie", + "Evon", + "Evonne", + "Exie", + "Fabiola", + "Fae", + "Fairy", + "Faith", + "Fallon", + "Fannie", + "Fanny", + "Farrah", + "Fatima", + "Fay", + "Faye", + "Felecia", + "Felicia", + "Felicita", + "Felicity", + "Felipa", + "Felisha", + "Fern", + "Fernanda", + "Ferne", + "Filomena", + "Fiona", + "Fleta", + "Flo", + "Flora", + "Florence", + "Florene", + "Florida", + "Florine", + "Florrie", + "Flossie", + "Floy", + "Fonda", + "Fran", + "Frances", + "Francesca", + "Franchesca", + "Francheska", + "Francine", + "Francis", + "Francisca", + "Frank", + "Frankie", + "Freda", + "Freddie", + "Freeda", + "Freida", + "Frida", + "Frieda", + "Gabriel", + "Gabriela", + "Gabriella", + "Gabrielle", + "Gail", + "Gale", + "Garnet", + "Garnett", + "Gary", + "Gay", + "Gaye", + "Gayla", + "Gayle", + "Gaynell", + "Gearldine", + "Gena", + "Gene", + "Genesis", + "Geneva", + "Genevieve", + "Genoveva", + "George", + "Georgene", + "Georgette", + "Georgia", + "Georgiana", + "Georgianna", + "Georgie", + "Georgina", + "Georgine", + "Geraldine", + "Geralyn", + "Gerda", + "Geri", + "Germaine", + "Gerri", + "Gerry", + "Gertie", + "Gertrude", + "Gianna", + "Gigi", + "Gilda", + "Gillian", + "Gina", + "Ginger", + "Ginny", + "Giovanna", + "Giselle", + "Gisselle", + "Gladyce", + "Gladys", + "Glenda", + "Glenna", + "Glennie", + "Glinda", + "Gloria", + "Glynda", + "Glynis", + "Golda", + "Golden", + "Goldia", + "Goldie", + "Grace", + "Gracie", + "Graciela", + "Grayce", + "Gregoria", + "Greta", + "Gretchen", + "Griselda", + "Guadalupe", + "Gudrun", + "Guillermin", + "Gussie", + "Gwen", + "Gwendolyn", + "Hadley", + "Hailee", + "Hailey", + "Haleigh", + "Haley", + "Hali", + "Halie", + "Halle", + "Hallie", + "Hana", + "Hanna", + "Hannah", + "Harlene", + "Harley", + "Harmony", + "Harold", + "Harriet", + "Harriett", + "Harriette", + "Hassie", + "Hattie", + "Haven", + "Haydee", + "Hayden", + "Haylee", + "Hayley", + "Haylie", + "Hazel", + "Hazle", + "Heather", + "Heaven", + "Hedwig", + "Heidi", + "Helen", + "Helena", + "Helene", + "Helga", + "Hellen", + "Henrietta", + "Henriette", + "Henry", + "Hermina", + "Hermine", + "Herminia", + "Herta", + "Hertha", + "Hester", + "Hettie", + "Hilary", + "Hilda", + "Hildegard", + "Hildegarde", + "Hildred", + "Hildur", + "Hillary", + "Hilma", + "Holli", + "Hollie", + "Holly", + "Hope", + "Hortencia", + "Hortense", + "Hortensia", + "Hulda", + "Hunter", + "Icie", + "Ida", + "Idell", + "Idella", + "Iesha", + "Ila", + "Ileana", + "Ilene", + "Iliana", + "Ima", + "Imani", + "Imelda", + "Imogene", + "Ina", + "India", + "Ines", + "Inez", + "Infant", + "Inga", + "Ingeborg", + "Ingrid", + "Iola", + "Iona", + "Ione", + "Ira", + "Ireland", + "Irene", + "Iris", + "Irma", + "Isabel", + "Isabela", + "Isabell", + "Isabella", + "Isabelle", + "Isamar", + "Isis", + "Isobel", + "Itzel", + "Iva", + "Ivah", + "Ivana", + "Ivelisse", + "Ivette", + "Ivonne", + "Ivory", + "Ivy", + "Izabella", + "Izetta", + "Izora", + "Jacalyn", + "Jacey", + "Jack", + "Jackeline", + "Jackie", + "Jacklyn", + "Jaclyn", + "Jacque", + "Jacquelin", + "Jacqueline", + "Jacquelyn", + "Jacquline", + "Jada", + "Jade", + "Jaden", + "Jadyn", + "Jaelyn", + "Jaida", + "Jaiden", + "Jailene", + "Jaime", + "Jaimie", + "Jakayla", + "Jalisa", + "Jalyn", + "James", + "Jami", + "Jamie", + "Jamila", + "Jammie", + "Jan", + "Jana", + "Janae", + "Janay", + "Jane", + "Janeen", + "Janel", + "Janell", + "Janelle", + "Janessa", + "Janet", + "Janette", + "Janice", + "Janie", + "Janine", + "Janis", + "Jann", + "Janna", + "Jannette", + "Jannie", + "Jaqueline", + "Jasmin", + "Jasmine", + "Jasmyn", + "Jason", + "Jaunita", + "Jaycee", + "Jayda", + "Jayde", + "Jayden", + "Jayla", + "Jaylene", + "Jayme", + "Jayne", + "Jazlyn", + "Jazmin", + "Jazmine", + "Jazmyn", + "Jazmyne", + "Jean", + "Jeana", + "Jeane", + "Jeanette", + "Jeanie", + "Jeanine", + "Jeanna", + "Jeanne", + "Jeannette", + "Jeannie", + "Jeannine", + "Jena", + "Jenelle", + "Jenifer", + "Jenna", + "Jennie", + "Jennifer", + "Jenny", + "Jensen", + "Jeraldine", + "Jeri", + "Jerilyn", + "Jerline", + "Jerri", + "Jerrica", + "Jerrie", + "Jerry", + "Jesica", + "Jesse", + "Jessenia", + "Jessi", + "Jessica", + "Jessie", + "Jessika", + "Jesusa", + "Jettie", + "Jewel", + "Jewell", + "Jill", + "Jillian", + "Jimmie", + "Jo", + "Joan", + "Joana", + "Joanie", + "Joann", + "Joanna", + "Joanne", + "Jocelyn", + "Jocelyne", + "Jodi", + "Jodie", + "Jody", + "Joe", + "Joelle", + "Joellen", + "Joetta", + "Joey", + "Johanna", + "John", + "Johnie", + "Johnna", + "Johnnie", + "Jolene", + "Jonathan", + "Joni", + "Jonna", + "Jonnie", + "Jordan", + "Jordyn", + "Joretta", + "Josefa", + "Josefina", + "Joselyn", + "Joseph", + "Josephine", + "Josette", + "Joshua", + "Josie", + "Journey", + "Jovita", + "Joy", + "Joyce", + "Joycelyn", + "Juana", + "Juanita", + "Judi", + "Judie", + "Judith", + "Judy", + "Juli", + "Julia", + "Juliana", + "Juliann", + "Julianna", + "Julianne", + "Julie", + "Juliet", + "Juliette", + "Julissa", + "June", + "Justice", + "Justin", + "Justina", + "Justine", + "Kacey", + "Kaci", + "Kacie", + "Kacy", + "Kaela", + "Kaelyn", + "Kaila", + "Kailee", + "Kailey", + "Kailyn", + "Kaitlin", + "Kaitlyn", + "Kaitlynn", + "Kala", + "Kaleigh", + "Kaley", + "Kali", + "Kalie", + "Kallie", + "Kalyn", + "Kameron", + "Kami", + "Kamryn", + "Kandace", + "Kandi", + "Kandice", + "Kandy", + "Kara", + "Karan", + "Karen", + "Kari", + "Karie", + "Karin", + "Karina", + "Karissa", + "Karla", + "Karlee", + "Karlene", + "Karley", + "Karli", + "Karlie", + "Karly", + "Karol", + "Karolyn", + "Karon", + "Karren", + "Karri", + "Karrie", + "Karyn", + "Kasandra", + "Kasey", + "Kassandra", + "Kassidy", + "Kassie", + "Katarina", + "Kate", + "Katelin", + "Katelyn", + "Katelynn", + "Katerina", + "Kathaleen", + "Katharina", + "Katharine", + "Katherine", + "Katheryn", + "Kathi", + "Kathie", + "Kathleen", + "Kathlyn", + "Kathrine", + "Kathryn", + "Kathryne", + "Kathy", + "Kati", + "Katia", + "Katie", + "Katina", + "Katlin", + "Katlyn", + "Katlynn", + "Katrina", + "Kattie", + "Katy", + "Kay", + "Kaycee", + "Kaye", + "Kayla", + "Kaylah", + "Kaylan", + "Kaylee", + "Kayleigh", + "Kaylen", + "Kayley", + "Kayli", + "Kaylie", + "Kaylin", + "Kaylyn", + "Kaylynn", + "Keara", + "Kecia", + "Keeley", + "Keely", + "Keesha", + "Keila", + "Keisha", + "Keishla", + "Kelcie", + "Kelley", + "Kelli", + "Kellie", + "Kelly", + "Kelsea", + "Kelsey", + "Kelsi", + "Kelsie", + "Kendal", + "Kendall", + "Kendra", + "Kenia", + "Kenna", + "Kennedi", + "Kennedy", + "Kenneth", + "Kenya", + "Kenyatta", + "Kenzie", + "Keri", + "Kerri", + "Kerrie", + "Kerry", + "Kesha", + "Keshia", + "Kevin", + "Khadijah", + "Kia", + "Kiana", + "Kianna", + "Kiara", + "Kiarra", + "Kiera", + "Kierra", + "Kiersten", + "Kiley", + "Kim", + "Kimberlee", + "Kimberley", + "Kimberli", + "Kimberly", + "Kinsey", + "Kira", + "Kirsten", + "Kirstie", + "Kirstin", + "Kisha", + "Kittie", + "Kitty", + "Kizzy", + "Kori", + "Kortney", + "Kourtney", + "Kris", + "Krista", + "Kristal", + "Kristan", + "Kristen", + "Kristi", + "Kristian", + "Kristie", + "Kristin", + "Kristina", + "Kristine", + "Kristy", + "Kristyn", + "Krysta", + "Krystal", + "Krystina", + "Krystle", + "Kyara", + "Kyla", + "Kyle", + "Kylee", + "Kyleigh", + "Kylie", + "Kyra", + "Lacey", + "Laci", + "Lacie", + "Lacy", + "Ladonna", + "Lakeisha", + "Lakesha", + "Lakeshia", + "Lakisha", + "Lana", + "Lanette", + "Laquita", + "Lara", + "Laraine", + "Larissa", + "Larry", + "Larue", + "Lashanda", + "Lashawn", + "Lashonda", + "Latanya", + "Latasha", + "Latisha", + "Latonia", + "Latonya", + "Latosha", + "Latoya", + "Latrice", + "Latricia", + "Laura", + "Laureen", + "Laurel", + "Lauren", + "Lauretta", + "Laurette", + "Lauri", + "Laurie", + "Laurine", + "Lauryn", + "Lavada", + "Lavera", + "Lavern", + "Laverna", + "Laverne", + "Lavina", + "Lavinia", + "Lavon", + "Lavonne", + "Lawanda", + "Layla", + "Lea", + "Leah", + "Leandra", + "Leann", + "Leanna", + "Leanne", + "Leatha", + "Leatrice", + "Leda", + "Lee", + "Leeann", + "Leesa", + "Leigh", + "Leila", + "Leilani", + "Leisa", + "Lela", + "Lelah", + "Lelia", + "Lempi", + "Lena", + "Lenna", + "Lennie", + "Lenora", + "Lenore", + "Leola", + "Leona", + "Leone", + "Leonor", + "Leonora", + "Leonore", + "Leora", + "Leota", + "Lera", + "Lesa", + "Lesia", + "Lesley", + "Lesli", + "Leslie", + "Lesly", + "Lessie", + "Leta", + "Letha", + "Leticia", + "Letitia", + "Lettie", + "Lexi", + "Lexie", + "Lexis", + "Lexus", + "Lia", + "Liana", + "Libbie", + "Libby", + "Lida", + "Lidia", + "Lila", + "Lilia", + "Lilian", + "Liliana", + "Lilla", + "Lilliam", + "Lillian", + "Lillie", + "Lilly", + "Lily", + "Lilyan", + "Lina", + "Linda", + "Lindsay", + "Lindsey", + "Lindy", + "Linette", + "Linnea", + "Linnie", + "Linsey", + "Lisa", + "Lise", + "Lisette", + "Lisha", + "Lissa", + "Lissette", + "Liz", + "Liza", + "Lizabeth", + "Lizbeth", + "Lizeth", + "Lizette", + "Lizzie", + "Logan", + "Lois", + "Lola", + "Lolita", + "Loma", + "Lona", + "London", + "Loni", + "Lonie", + "Lonna", + "Lonnie", + "Lora", + "Loraine", + "Lorelei", + "Loren", + "Lorena", + "Lorene", + "Lorenza", + "Loretta", + "Lori", + "Loriann", + "Lorie", + "Lorine", + "Lorna", + "Lorraine", + "Lorri", + "Lorrie", + "Lottie", + "Lou", + "Louann", + "Louella", + "Louie", + "Louis", + "Louisa", + "Louise", + "Lourdes", + "Louvenia", + "Lovie", + "Loyce", + "Lu", + "Luana", + "Luann", + "Luanne", + "Lucero", + "Lucia", + "Lucie", + "Lucienne", + "Lucila", + "Lucile", + "Lucille", + "Lucinda", + "Lucretia", + "Lucy", + "Ludie", + "Lue", + "Luella", + "Luetta", + "Luisa", + "Lula", + "Lulu", + "Luna", + "Lupe", + "Lura", + "Lurline", + "Luvenia", + "Luz", + "Lyda", + "Lydia", + "Lyla", + "Lyn", + "Lynda", + "Lyndsay", + "Lyndsey", + "Lynette", + "Lynn", + "Lynne", + "Lynnette", + "Lynsey", + "Lyric", + "Mabel", + "Mabelle", + "Mable", + "Macey", + "Machelle", + "Maci", + "Macie", + "Mackenzie", + "Macy", + "Madaline", + "Madalyn", + "Maddison", + "Madeleine", + "Madeline", + "Madelyn", + "Madelynn", + "Madge", + "Madie", + "Madilyn", + "Madisen", + "Madison", + "Madisyn", + "Madonna", + "Madyson", + "Mae", + "Maegan", + "Maeve", + "Mafalda", + "Magdalen", + "Magdalena", + "Magdalene", + "Maggie", + "Magnolia", + "Maia", + "Maida", + "Maira", + "Makala", + "Makayla", + "Makenna", + "Makenzie", + "Malia", + "Malinda", + "Malissa", + "Mallie", + "Mallory", + "Malvina", + "Mamie", + "Mammie", + "Mandi", + "Mandy", + "Manuela", + "Mara", + "Maranda", + "Marcela", + "Marcelina", + "Marceline", + "Marcella", + "Marcelle", + "Marci", + "Marcia", + "Marcie", + "Marcy", + "Margaret", + "Margarete", + "Margarett", + "Margaretta", + "Margarette", + "Margarita", + "Marge", + "Margery", + "Margie", + "Margit", + "Margo", + "Margot", + "Margret", + "Marguerite", + "Margy", + "Mari", + "Maria", + "Mariah", + "Mariam", + "Marian", + "Mariana", + "Mariann", + "Marianna", + "Marianne", + "Maribel", + "Maribeth", + "Maricela", + "Marie", + "Mariel", + "Mariela", + "Marietta", + "Marilee", + "Marilou", + "Marilyn", + "Marilynn", + "Marina", + "Marion", + "Marisa", + "Marisela", + "Marisol", + "Marissa", + "Marita", + "Maritza", + "Marjorie", + "Marjory", + "Mark", + "Marla", + "Marlee", + "Marlena", + "Marlene", + "Marley", + "Marlo", + "Marlyn", + "Marlys", + "Marnie", + "Marquita", + "Marsha", + "Marta", + "Martha", + "Martina", + "Marva", + "Marvel", + "Mary", + "Maryann", + "Maryanne", + "Marybelle", + "Marybeth", + "Maryellen", + "Maryjane", + "Maryjo", + "Marylin", + "Marylou", + "Marylyn", + "Mathilda", + "Mathilde", + "Matilda", + "Matilde", + "Matthew", + "Mattie", + "Maud", + "Maude", + "Maudie", + "Maura", + "Maureen", + "Maurine", + "Mavis", + "Maxie", + "Maxine", + "May", + "Maya", + "Maybell", + "Maybelle", + "Maye", + "Mayme", + "Maymie", + "Mayra", + "Mazie", + "Mckayla", + "Mckenna", + "Mckenzie", + "Meagan", + "Meaghan", + "Mechelle", + "Meda", + "Meg", + "Megan", + "Meghan", + "Meghann", + "Melanie", + "Melba", + "Melina", + "Melinda", + "Melisa", + "Melissa", + "Mellie", + "Mellisa", + "Mellissa", + "Melodie", + "Melody", + "Melonie", + "Melva", + "Melvina", + "Meranda", + "Mercedes", + "Meredith", + "Merilyn", + "Merle", + "Merlene", + "Merry", + "Mertie", + "Meryl", + "Meta", + "Mia", + "Micaela", + "Micah", + "Michael", + "Michaela", + "Michele", + "Michelina", + "Michell", + "Michelle", + "Mickey", + "Mickie", + "Migdalia", + "Mikaela", + "Mikala", + "Mikayla", + "Milagros", + "Mildred", + "Millicent", + "Millie", + "Mimi", + "Mina", + "Mindi", + "Mindy", + "Minerva", + "Minna", + "Minnie", + "Miracle", + "Miranda", + "Mireya", + "Miriam", + "Mirta", + "Missouri", + "Missy", + "Misti", + "Misty", + "Mittie", + "Mitzi", + "Miya", + "Modesta", + "Moira", + "Mollie", + "Molly", + "Mona", + "Monica", + "Monika", + "Monique", + "Monserrate", + "Montana", + "Morgan", + "Moriah", + "Mossie", + "Mozell", + "Mozella", + "Mozelle", + "Muriel", + "Mya", + "Myah", + "Myra", + "Myranda", + "Myriam", + "Myrl", + "Myrle", + "Myrna", + "Myrtice", + "Myrtie", + "Myrtis", + "Myrtle", + "Nadia", + "Nadine", + "Nakia", + "Nan", + "Nanci", + "Nancie", + "Nancy", + "Nanette", + "Nannette", + "Nannie", + "Naoma", + "Naomi", + "Nashaly", + "Natalia", + "Natalie", + "Nataly", + "Natasha", + "Nathalie", + "Natividad", + "Natosha", + "Nautica", + "Nayeli", + "Nedra", + "Nelda", + "Nelida", + "Nell", + "Nella", + "Nelle", + "Nellie", + "Nelly", + "Neoma", + "Nereida", + "Neta", + "Nettie", + "Neva", + "Nia", + "Nichol", + "Nichole", + "Nicki", + "Nicola", + "Nicole", + "Nicolette", + "Nicolle", + "Niki", + "Nikita", + "Nikki", + "Nikole", + "Nila", + "Nilda", + "Nilsa", + "Nina", + "Nita", + "Noel", + "Noelia", + "Noelle", + "Noemi", + "Nola", + "Nona", + "Nora", + "Norah", + "Noreen", + "Norene", + "Norine", + "Norma", + "Nova", + "Novella", + "Nyasia", + "Nydia", + "Ocie", + "Octavia", + "Oda", + "Odalis", + "Odalys", + "Odell", + "Odessa", + "Odie", + "Ofelia", + "Ola", + "Olene", + "Oleta", + "Olga", + "Olive", + "Olivia", + "Ollie", + "Oma", + "Omayra", + "Ona", + "Onie", + "Opal", + "Ophelia", + "Ora", + "Oralia", + "Orpha", + "Ossie", + "Otha", + "Otilia", + "Ottie", + "Ottilie", + "Ouida", + "Ova", + "Ozella", + "Paige", + "Palma", + "Paloma", + "Pam", + "Pamala", + "Pamela", + "Pamella", + "Pansy", + "Paola", + "Paris", + "Parker", + "Pat", + "Patience", + "Patrica", + "Patrice", + "Patricia", + "Patsy", + "Patti", + "Pattie", + "Patty", + "Paul", + "Paula", + "Pauletta", + "Paulette", + "Paulina", + "Pauline", + "Payton", + "Pearl", + "Pearle", + "Pearlie", + "Pearline", + "Peggie", + "Peggy", + "Penelope", + "Penni", + "Pennie", + "Penny", + "Perla", + "Petra", + "Peyton", + "Phillis", + "Philomena", + "Phoebe", + "Phylis", + "Phyllis", + "Pilar", + "Pinkie", + "Piper", + "Polly", + "Porsha", + "Portia", + "Precious", + "Presley", + "Princess", + "Priscilla", + "Providenci", + "Prudence", + "Pura", + "Queen", + "Queenie", + "Quiana", + "Quinn", + "Rachael", + "Racheal", + "Rachel", + "Rachelle", + "Racquel", + "Rae", + "Raegan", + "Rafaela", + "Raina", + "Ramona", + "Ramonita", + "Randi", + "Randy", + "Raquel", + "Raven", + "Ray", + "Rayna", + "Reagan", + "Reanna", + "Reba", + "Rebeca", + "Rebecca", + "Rebekah", + "Regan", + "Regina", + "Reilly", + "Reina", + "Rena", + "Renae", + "Renata", + "Rene", + "Renea", + "Renee", + "Renita", + "Ressie", + "Reta", + "Retha", + "Retta", + "Reva", + "Reyna", + "Rhea", + "Rheta", + "Rhianna", + "Rhiannon", + "Rhoda", + "Rhonda", + "Richard", + "Richelle", + "Rikki", + "Riley", + "Rilla", + "Rita", + "Robbie", + "Robbin", + "Robert", + "Roberta", + "Robin", + "Robyn", + "Rochelle", + "Rocio", + "Rolanda", + "Roma", + "Romaine", + "Romona", + "Rona", + "Ronald", + "Ronda", + "Roni", + "Ronna", + "Ronnie", + "Rosa", + "Rosalee", + "Rosalia", + "Rosalie", + "Rosalina", + "Rosalind", + "Rosalinda", + "Rosaline", + "Rosalyn", + "Rosamond", + "Rosann", + "Rosanna", + "Rosanne", + "Rosario", + "Rosaura", + "Rose", + "Roseann", + "Roseanna", + "Roseanne", + "Rosella", + "Roselyn", + "Rosemarie", + "Rosemary", + "Rosetta", + "Rosia", + "Rosie", + "Rosina", + "Rosita", + "Roslyn", + "Rossie", + "Rowena", + "Roxana", + "Roxane", + "Roxann", + "Roxanna", + "Roxanne", + "Roxie", + "Rozella", + "Rubie", + "Ruby", + "Rubye", + "Ruth", + "Ruthann", + "Ruthe", + "Ruthie", + "Ryan", + "Ryann", + "Rylee", + "Ryleigh", + "Rylie", + "Sabina", + "Sabrina", + "Sade", + "Sadie", + "Sadye", + "Sage", + "Saige", + "Salina", + "Sallie", + "Sally", + "Salma", + "Salome", + "Samantha", + "Samara", + "Sammie", + "Sandi", + "Sandra", + "Sandy", + "Santa", + "Santana", + "Santina", + "Santos", + "Sara", + "Sarah", + "Sarai", + "Sarina", + "Sasha", + "Saundra", + "Savana", + "Savanah", + "Savanna", + "Savannah", + "Scarlett", + "Selena", + "Selina", + "Selma", + "Serena", + "Serenity", + "Serina", + "Shae", + "Shaina", + "Shakira", + "Shalonda", + "Shameka", + "Shamika", + "Shana", + "Shanda", + "Shani", + "Shania", + "Shanice", + "Shanika", + "Shaniqua", + "Shanita", + "Shaniya", + "Shanna", + "Shannan", + "Shannon", + "Shanon", + "Shanta", + "Shante", + "Shantel", + "Shantell", + "Shara", + "Sharen", + "Shari", + "Sharla", + "Sharlene", + "Sharon", + "Sharonda", + "Sharron", + "Sharyn", + "Shasta", + "Shauna", + "Shawanda", + "Shawn", + "Shawna", + "Shayla", + "Shaylee", + "Shayna", + "Shea", + "Sheena", + "Sheila", + "Shelba", + "Shelbi", + "Shelbie", + "Shelby", + "Shelia", + "Shelley", + "Shelli", + "Shellie", + "Shelly", + "Shelva", + "Sheree", + "Sheri", + "Sherie", + "Sherita", + "Sheron", + "Sherri", + "Sherrie", + "Sherrill", + "Sherron", + "Sherry", + "Sherryl", + "Sheryl", + "Shianne", + "Shiela", + "Shirlee", + "Shirlene", + "Shirley", + "Shonda", + "Shonna", + "Shyann", + "Shyanne", + "Shyla", + "Sibyl", + "Sidney", + "Sienna", + "Sierra", + "Signe", + "Sigrid", + "Silvia", + "Simone", + "Siobhan", + "Sister", + "Sky", + "Skye", + "Skyla", + "Skylar", + "Skyler", + "Socorro", + "Sofia", + "Soledad", + "Sondra", + "Sonia", + "Sonja", + "Sonya", + "Sophia", + "Sophie", + "Spring", + "Stacey", + "Staci", + "Stacia", + "Stacie", + "Stacy", + "Star", + "Starla", + "Starr", + "Stasia", + "Stefani", + "Stefanie", + "Stella", + "Stephani", + "Stephania", + "Stephanie", + "Stephany", + "Stephenie", + "Stevie", + "Stormy", + "Sudie", + "Sue", + "Suellen", + "Summer", + "Sunny", + "Sunshine", + "Susan", + "Susana", + "Susann", + "Susanna", + "Susanne", + "Susie", + "Suzan", + "Suzann", + "Suzanna", + "Suzanne", + "Suzette", + "Suzy", + "Sybil", + "Syble", + "Sydnee", + "Sydney", + "Sydni", + "Sydnie", + "Sylvia", + "Tabatha", + "Tabitha", + "Talia", + "Tamala", + "Tamara", + "Tamatha", + "Tameka", + "Tamela", + "Tamera", + "Tami", + "Tamia", + "Tamie", + "Tamika", + "Tamiko", + "Tammi", + "Tammie", + "Tammy", + "Tamra", + "Tana", + "Tanesha", + "Tangela", + "Tania", + "Tanika", + "Tanisha", + "Tanya", + "Tara", + "Tarah", + "Tarsha", + "Taryn", + "Tasha", + "Tasia", + "Tatiana", + "Tatianna", + "Tatum", + "Tatyana", + "Tatyanna", + "Tawana", + "Tawanda", + "Tawanna", + "Tawny", + "Tawnya", + "Taya", + "Tayla", + "Tayler", + "Taylor", + "Tea", + "Teagan", + "Teena", + "Tena", + "Tenisha", + "Tennie", + "Tennille", + "Tera", + "Teresa", + "Terese", + "Teresita", + "Teressa", + "Teri", + "Terra", + "Terri", + "Terrie", + "Terry", + "Tess", + "Tessa", + "Tessie", + "Thalia", + "Thea", + "Theda", + "Thelma", + "Theo", + "Theodora", + "Theola", + "Theresa", + "Therese", + "Theresia", + "Thomas", + "Thora", + "Thyra", + "Tia", + "Tiana", + "Tianna", + "Tiara", + "Tiera", + "Tierra", + "Tiffani", + "Tiffanie", + "Tiffany", + "Tillie", + "Tina", + "Tisha", + "Toby", + "Tomasa", + "Tomeka", + "Tomika", + "Tommie", + "Toni", + "Tonia", + "Tonja", + "Tonya", + "Tori", + "Tosha", + "Toya", + "Tracey", + "Traci", + "Tracie", + "Tracy", + "Trena", + "Tresa", + "Tressa", + "Tressie", + "Treva", + "Tricia", + "Trina", + "Trinidad", + "Trinity", + "Trisha", + "Trista", + "Tristan", + "Trudie", + "Trudy", + "Twila", + "Twyla", + "Tyesha", + "Tyler", + "Tyra", + "Una", + "Unique", + "Ursula", + "Vada", + "Valarie", + "Valencia", + "Valentina", + "Valentine", + "Valeria", + "Valerie", + "Vallie", + "Valorie", + "Vanesa", + "Vanessa", + "Veda", + "Velda", + "Vella", + "Velma", + "Velva", + "Vena", + "Venessa", + "Venita", + "Venus", + "Vera", + "Verda", + "Verdie", + "Vergie", + "Verla", + "Verlie", + "Verna", + "Vernell", + "Vernice", + "Vernie", + "Verona", + "Veronica", + "Versie", + "Vesta", + "Vicenta", + "Vickey", + "Vicki", + "Vickie", + "Vicky", + "Victoria", + "Vida", + "Vikki", + "Vilma", + "Vina", + "Vincenza", + "Vinnie", + "Viola", + "Violet", + "Violette", + "Virgie", + "Virgil", + "Virginia", + "Vita", + "Viva", + "Vivian", + "Viviana", + "Vivien", + "Vivienne", + "Vonda", + "Vonnie", + "Waleska", + "Walter", + "Wanda", + "Waneta", + "Wanita", + "Wava", + "Wendi", + "Wendy", + "Whitley", + "Whitney", + "Wilda", + "Wilhelmina", + "Wilhelmine", + "Willa", + "Willene", + "Willia", + "William", + "Willie", + "Willow", + "Wilma", + "Windy", + "Winifred", + "Winnie", + "Winnifred", + "Winona", + "Wynona", + "Xiomara", + "Yadira", + "Yahaira", + "Yajaira", + "Yanira", + "Yaritza", + "Yasmeen", + "Yasmin", + "Yasmine", + "Yazmin", + "Yesenia", + "Yessenia", + "Yetta", + "Yolanda", + "Yolonda", + "Yoshiko", + "Yulissa", + "Yvette", + "Yvonne", + "Zaida", + "Zaria", + "Zelda", + "Zella", + "Zelma", + "Zena", + "Zetta", + "Zina", + "Zita", + "Zoe", + "Zoey", + "Zoie", + "Zoila", + "Zola", + "Zona", + "Zora", + "Zoraida", + "Zula", + "Zulma", +) diff --git a/tests/neg/i7056.scala b/tests/pos/i7056.scala similarity index 84% rename from tests/neg/i7056.scala rename to tests/pos/i7056.scala index d16aa949000e..a347dfe7b519 100644 --- a/tests/neg/i7056.scala +++ b/tests/pos/i7056.scala @@ -16,4 +16,5 @@ given [T <: A](using PartialId[T]): T1[T] = new T1[T] { given PartialId[B] = ??? val x: B = ??? -val z = x.idnt1 // error +val z = x.idnt1 // used to be an error, now ok + diff --git a/tests/pos/i7304.scala b/tests/pos/i7304.scala new file mode 100644 index 000000000000..e8ee873a9a5a --- /dev/null +++ b/tests/pos/i7304.scala @@ -0,0 +1,9 @@ +@main def test = + val myregex_r = "\\s+".r + val text = "adggfgf dfg" + myregex_r findFirstMatchIn text + text takeRight 5 + val func = (a: Int) => a + 1 + List(1,2, 3) map func + text stripPrefix "adgg" + diff --git a/tests/pos/i7745.scala b/tests/pos/i7745.scala deleted file mode 100644 index de03d3995d33..000000000000 --- a/tests/pos/i7745.scala +++ /dev/null @@ -1,3 +0,0 @@ -trait F[x] -implicit def foo[f[_], y, x <: f[y]](implicit ev: F[y]): F[x] = ??? -val test = implicitly \ No newline at end of file diff --git a/tests/pos/i7888.scala b/tests/pos/i7888.scala new file mode 100644 index 000000000000..d188bb23e72e --- /dev/null +++ b/tests/pos/i7888.scala @@ -0,0 +1,8 @@ +def usingSeq[B](f: [A] => Seq[A] => B): B = { + f(Nil) +} +def crash() = { + usingSeq { [A] => (a: Seq[A]) => + a + } +} diff --git a/tests/pos/i7894.scala b/tests/pos/i7894.scala new file mode 100644 index 000000000000..74cd3e3cb247 --- /dev/null +++ b/tests/pos/i7894.scala @@ -0,0 +1,16 @@ +case class Box[T](t: T) + +type Boxed[T <: Tuple] <: Tuple = T match { + case EmptyTuple => EmptyTuple + case h *: t => Box[h] *: Boxed[t] +} + +trait Cmp[T <: Tuple] { def cmp(t: T, b: Boxed[T]): Boolean } + +object UnitCmp extends Cmp[EmptyTuple] { + def cmp(t: EmptyTuple, b: EmptyTuple): Boolean = true +} + +object UnitCmp2 extends Cmp[EmptyTuple] { + def cmp(t: EmptyTuple, b: Boxed[EmptyTuple]): Boolean = true +} diff --git a/tests/pos/i864.scala b/tests/pos/i864.scala index 8d2b859998e9..b372d00a569b 100644 --- a/tests/pos/i864.scala +++ b/tests/pos/i864.scala @@ -6,5 +6,5 @@ object C { trait X[T] implicit def u[A, B]: X[A | B] = new X[A | B] {} def y[T](implicit x: X[T]): T = ??? - val x: a.type & b.type | b.type & c.type = y + val x: a.type & b.type | b.type & c.type = y // error } diff --git a/tests/pos/i8666.scala b/tests/pos/i8666.scala new file mode 100644 index 000000000000..307a4617b172 --- /dev/null +++ b/tests/pos/i8666.scala @@ -0,0 +1,10 @@ +class Foo[A, B]() + +type FooSnd[X] = X match + case Foo[_, b] => b + +trait Bar[A]: + def bar(h: FooSnd[A]): Int + +val foo: Bar[Foo[String, Int]] = new Bar[Foo[String, Int]]: + def bar(h: FooSnd[Foo[String, Int]]) = h diff --git a/tests/pos/i8802a.scala b/tests/pos/i8802a.scala new file mode 100644 index 000000000000..9e3176b2557c --- /dev/null +++ b/tests/pos/i8802a.scala @@ -0,0 +1,18 @@ +trait Foo[A1, B1] { + type Out +} + +object Test { + + type Bar[A2] + + def unit: Bar[Unit] = ??? + def product[A3, B3](fst: Bar[A3], snd: Bar[B3])(implicit foo: Foo[A3, B3]): Bar[foo.Out] = ??? + + implicit def foo[A4]: Foo[A4, Unit] { type Out = A4 } = ??? + + def check[A5](bar: Bar[A5])(a: A5): Unit = {} + + check(product(unit, unit)) // ok + check(product(unit, unit)(summon[Foo[Unit, Unit]]))(()) // error +} \ No newline at end of file diff --git a/tests/pos/i8833.scala b/tests/pos/i8833.scala new file mode 100644 index 000000000000..3bb6fabe1e26 --- /dev/null +++ b/tests/pos/i8833.scala @@ -0,0 +1,4 @@ +@main def Test = { + implicitly[("k" | "v") <:< ("k" | "v")] + implicitly[("k" | "v") =:= ("k" | "v")] +} diff --git a/tests/pos/i8900-cycle.scala b/tests/pos/i8900-cycle.scala new file mode 100644 index 000000000000..3b6ae214cb42 --- /dev/null +++ b/tests/pos/i8900-cycle.scala @@ -0,0 +1,15 @@ +trait Contra[-A] +trait Cov[+B] + +trait Test { + def foo[S](x: S): S + def rec1[T <: Cov[T]]: Contra[T] + def rec2[T <: Cov[U], U <: T]: Contra[T] + + val a = foo({ + rec1 + }) + val b = foo({ + rec2 + }) +} diff --git a/tests/pos/i8900-polyfunction.scala b/tests/pos/i8900-polyfunction.scala new file mode 100644 index 000000000000..2836283b3c65 --- /dev/null +++ b/tests/pos/i8900-polyfunction.scala @@ -0,0 +1,5 @@ +object Test { + def f[F](f: [t] => t => F): Unit = () + + f([t] => (x: t) => x) +} diff --git a/tests/pos/i8900-promote.scala b/tests/pos/i8900-promote.scala new file mode 100644 index 000000000000..7d3a2ff96bed --- /dev/null +++ b/tests/pos/i8900-promote.scala @@ -0,0 +1,18 @@ +class Inv[A <: Singleton](x: A) +object Inv { + def empty[A <: Singleton]: Inv[A] = new Inv(???) +} + +class Inv2[A](x: A) +object Inv2 { + def empty[A]: Inv2[A] = new Inv2(???) +} + +object Test { + def inv(cond: Boolean) = // used to leak: Inv[x.type] + if (cond) + val x: Int = 1 + new Inv(x) + else + Inv.empty +} diff --git a/tests/pos/i8900-unflip.scala b/tests/pos/i8900-unflip.scala new file mode 100644 index 000000000000..568e9d22530e --- /dev/null +++ b/tests/pos/i8900-unflip.scala @@ -0,0 +1,18 @@ +// Minimized from PLens.scala in scalaz + +class PLensFamily[A1, A2, B1, B2] + +class LazyOption[A3] { + def fold[X](some: (=> A3) => X, none: => X): X = ??? +} +class IndexedStore[I, A4, B4](run: (A4 => B4, I)) + +object PL { + + def plensFamily[A1x, A2x, B1x, B2x](r: A1x => Option[IndexedStore[B1x, B2x, A2x]]): PLensFamily[A1x, A2x, B1x, B2x] = ??? + + def lazySome[T](a: => T): LazyOption[T] = ??? + + def lazySomePLensFamily[A1y, A2y]: PLensFamily[LazyOption[A1y], LazyOption[A2y], A1y, A2y] = + plensFamily(_.fold(z => Some(IndexedStore(lazySome(_), z)), None)) +} diff --git a/tests/pos/i8900a.scala b/tests/pos/i8900a.scala new file mode 100644 index 000000000000..d34b3dc34516 --- /dev/null +++ b/tests/pos/i8900a.scala @@ -0,0 +1,11 @@ +class Inv[T](val elem: T) +object Test { + def unwrap[Outer](inv: Inv[Outer]): Outer = inv.elem + def wrap[Inner](i: Inner): Inv[Inner] = new Inv(i) + + val a = unwrap({ + class Local + val local = new Local + wrap(local) + }) +} diff --git a/tests/pos/i8900a2.scala b/tests/pos/i8900a2.scala new file mode 100644 index 000000000000..50a263ff7cc2 --- /dev/null +++ b/tests/pos/i8900a2.scala @@ -0,0 +1,12 @@ +class ContraCo[-T, +S](val t: S) +class CoContra[+T, -S](val t: T) +object Test { + def unwrap[Outer](inv: CoContra[Outer, Outer]): Outer = inv.t + def wrap[Inner](i: Inner): CoContra[Inner, Inner] = new CoContra(i) + + val a = unwrap({ + class Local + val local = new Local + wrap(local) + }) +} diff --git a/tests/pos/i8900a3.scala b/tests/pos/i8900a3.scala new file mode 100644 index 000000000000..d43f19fcaab6 --- /dev/null +++ b/tests/pos/i8900a3.scala @@ -0,0 +1,13 @@ +class ContraCo[-T, +S](val t: S) +class CoContra[+T, -S](val t: T) +object Test { + type Id[T] = T + def unwrap[Outer](inv: CoContra[Outer, Outer]): Outer = inv.t + def wrap[Inner](i: Inner): CoContra[Id[Inner], Id[Inner]] = new CoContra(i) + + val a = unwrap({ + class Local + val local = new Local + wrap(local) + }) +} diff --git a/tests/pos/i8921.scala b/tests/pos/i8921.scala new file mode 100644 index 000000000000..19fe6ac17650 --- /dev/null +++ b/tests/pos/i8921.scala @@ -0,0 +1,7 @@ +type R[F[_], A] =[B] => (A => F[B]) => F[B] + +type M[F[_]] =[A, B] => (A => F[B]) => F[A] => F[B] + +def mr[F[_]]: M[[A] =>> R[F, A]] = + [A, B] => (f: A => R[F, B]) => (m: R[F, A]) => + [C] => (k: B => F[C]) => m(a => f(a)(k)) \ No newline at end of file diff --git a/tests/pos/i9363.scala b/tests/pos/i9363.scala new file mode 100644 index 000000000000..183779349944 --- /dev/null +++ b/tests/pos/i9363.scala @@ -0,0 +1,8 @@ +class Foo: + protected inline def fooProtected: Int = 12 + +object Foo extends Foo: + inline def foo: Int = fooProtected + +object Bar: + def bar: Int = Foo.foo diff --git a/tests/pos/i9482.scala b/tests/pos/i9482.scala new file mode 100644 index 000000000000..6549539e49a7 --- /dev/null +++ b/tests/pos/i9482.scala @@ -0,0 +1,11 @@ +import scala.reflect.OptManifest + +object Ref { + def make[A: OptManifest]: Ref[A] = ??? +} +trait Ref[A] + +trait Foo[A] { + val bar = Ref.make[Int] + val baz: Ref[A] = Ref.make +} diff --git a/tests/pos/i9531.scala b/tests/pos/i9531.scala new file mode 100644 index 000000000000..ff82f8e03e43 --- /dev/null +++ b/tests/pos/i9531.scala @@ -0,0 +1,10 @@ +trait Scope: + type Expr[+T] + +def exprQuote[T](x: T)(using s: Scope, dummy: Null = null): s.Expr[T] = ??? +def exprQuote[T <: Singleton](x: T)(using s: Scope): s.Expr[T] = ??? + +def test(using s: Scope): Unit = + val t1: s.Expr[1] = exprQuote(1) + val t2 = exprQuote(1) + val t3: s.Expr[1] = t2 diff --git a/tests/pos/i9740b.scala b/tests/pos/i9740b.scala deleted file mode 100644 index 412e8a95dc27..000000000000 --- a/tests/pos/i9740b.scala +++ /dev/null @@ -1,11 +0,0 @@ -sealed trait Exp[T] -case class IntExp(x: Int) extends Exp[Int] -case class StrExp(x: String) extends Exp[String] -object UnitExp extends Exp[Unit] - -class Foo[U <: Int, T <: U] { - def bar[A <: T](x: Exp[A]): Unit = x match - case IntExp(x) => - case StrExp(x) => - case UnitExp => -} \ No newline at end of file diff --git a/tests/pos/i9769.scala b/tests/pos/i9769.scala new file mode 100644 index 000000000000..1c57b4bb94d3 --- /dev/null +++ b/tests/pos/i9769.scala @@ -0,0 +1,9 @@ +object Main { + val lifeOfPi = 3.14159 + val fInterpolator = f"The approximate value of pi is $lifeOfPi%4.2f" + + def main(args: Array[String]): Unit = { + println(fInterpolator) + } + +} diff --git a/tests/pos/i9833.scala b/tests/pos/i9833.scala new file mode 100644 index 000000000000..4e5594707831 --- /dev/null +++ b/tests/pos/i9833.scala @@ -0,0 +1,7 @@ +object Main extends App: + enum Extends[A, B]: + case Ev[B, A <: B]() extends (A Extends B) + + def cast(a: A): B = this match { + case Extends.Ev() => a + } diff --git a/tests/pos/indent.scala b/tests/pos/indent.scala index 03502a26fe79..6e027e1db085 100644 --- a/tests/pos/indent.scala +++ b/tests/pos/indent.scala @@ -82,6 +82,14 @@ object Test: x < 10 do () + def sign(x: Int): Int = + if (x > 0) 1 + else if (x < 0) - 1 // was: value - is not a member of Boolean + else 0 + def lessPersuasively(x: Int): Unit = + while (x < 0) - 42 // was: value - is not a member of Boolean +end Test + class Test2: self => def foo(x: Int) = @@ -147,4 +155,4 @@ class Coder(words: List[String]): end Coder object Test22: - def foo: Int = 22 \ No newline at end of file + def foo: Int = 22 diff --git a/tests/pos/inline3.scala b/tests/pos/inline3.scala new file mode 100644 index 000000000000..d702ca4d7d47 --- /dev/null +++ b/tests/pos/inline3.scala @@ -0,0 +1,25 @@ +object K0 { + + type T = String + + opaque type ProductInstances[F[_], T] = ErasedProductInstances[F[T]] + + inline def summonAsArray[F[_], T]: Array[Any] = ??? + inline def mkProductInstances[F[_], T]: ProductInstances[F, T] = + new ErasedProductInstances(summonAsArray[F, T]).asInstanceOf[ProductInstances[F, T]] + + val x: T = "" + + inline def foo(x: T): T = "foo".asInstanceOf[T] +} + +final class ErasedProductInstances[FT](is0: => Array[Any]) + +trait Monoid[A] +case class ISB(i: Int) + +object Test { + K0.foo(K0.x) + K0.mkProductInstances[Monoid, ISB] + +} \ No newline at end of file diff --git a/tests/pos/inlined-the.scala b/tests/pos/inlined-the.scala index 5a3c43287724..96fec9af958a 100644 --- a/tests/pos/inlined-the.scala +++ b/tests/pos/inlined-the.scala @@ -5,7 +5,7 @@ object Instances { class C { def f() = { locally { - given d[T]: D[T] with {} + given d[T]: D[T]() summon[D[Int]] implicit val s: 3 = ??? val a: 3 = summon[3] @@ -14,7 +14,7 @@ object Instances { } locally { - given d[T]: D[T] with {} + given d[T]: D[T]() the2[D[Int]] implicit val s: 3 = ??? val a: 3 = the2[3] diff --git a/tests/pos/java-inherited-type/Client.scala b/tests/pos/java-inherited-type/Client.scala new file mode 100644 index 000000000000..a644363cdd4c --- /dev/null +++ b/tests/pos/java-inherited-type/Client.scala @@ -0,0 +1,19 @@ +object Client { + def test= { + Test.Outer.Nested.sig + Test.Outer.Nested.sig1 + Test.Outer.Nested.sig2 + val o = new Test.Outer + new o.Nested1().sig + new o.Nested1().sig1 + new o.Nested1().sig2 + } + + def test1 = { + val t = new Test + val o = new t.Outer1 + new o.Nested1().sig + new o.Nested1().sig1 + new o.Nested1().sig2 + } +} diff --git a/tests/pos/java-inherited-type/Test.java b/tests/pos/java-inherited-type/Test.java new file mode 100644 index 000000000000..ae89a6559a67 --- /dev/null +++ b/tests/pos/java-inherited-type/Test.java @@ -0,0 +1,30 @@ +public class Test { + static class OuterBase implements OuterBaseInterface { + static class StaticInner {} + class Inner {} + } + interface OuterBaseInterface { + interface InnerFromInterface {} + } + public static class Outer extends OuterBase { + public static class Nested { + public static P sig; // was: "type StaticInner", "not found: type Inner", "not found: type InnerFromInterface" + public static P sig1; // was: "type StaticInner is not a member of Test.Outer" + public static P sig2; + + } + public class Nested1 { + public P sig; // was: "not found: type StaticInner" + public P sig1; // was: "type StaticInner is not a member of Test.Outer" + public P sig2; + } + } + public class Outer1 extends OuterBase { + public class Nested1 { + public P sig; // was: "not found: type StaticInner" + public P sig1; // was: "type StaticInner is not a member of Test.Outer" + public P sig2; + } + } + public static class P{} +} diff --git a/tests/pos/java-inherited-type1/J.java b/tests/pos/java-inherited-type1/J.java new file mode 100644 index 000000000000..ba9963104699 --- /dev/null +++ b/tests/pos/java-inherited-type1/J.java @@ -0,0 +1,9 @@ +class J extends S { + // These references all work in Javac because `object O { class I }` erases to `O$I` + + void select1(S1.Inner1 i) { new S1.Inner1(); } + void ident(Inner i) {} + + void ident1(Inner1 i) {} + void select(S.Inner i) { new S.Inner(); } +} diff --git a/tests/pos/java-inherited-type1/S.scala b/tests/pos/java-inherited-type1/S.scala new file mode 100644 index 000000000000..155efc0e06ba --- /dev/null +++ b/tests/pos/java-inherited-type1/S.scala @@ -0,0 +1,9 @@ +class S extends S1 +object S { + class Inner +} + +class S1 +object S1 { + class Inner1 +} diff --git a/tests/pos/java-inherited-type1/Test.scala b/tests/pos/java-inherited-type1/Test.scala new file mode 100644 index 000000000000..082167342a02 --- /dev/null +++ b/tests/pos/java-inherited-type1/Test.scala @@ -0,0 +1,13 @@ +object Test { + val j = new J + // force completion of these signatures + j.ident(null); + j.ident1(null); + j.select(null); + j.select1(null); + + val message:TestMessage = null + val builder:TestMessage.Builder = message.toBuilder + builder.setName("name") + +} diff --git a/tests/pos/java-inherited-type1/TestMessage.java b/tests/pos/java-inherited-type1/TestMessage.java new file mode 100644 index 000000000000..fac373e302f7 --- /dev/null +++ b/tests/pos/java-inherited-type1/TestMessage.java @@ -0,0 +1,17 @@ +abstract class AbstractMessage { + public static abstract class Builder> { + } +} + +class TestMessage extends AbstractMessage { + + public Builder toBuilder() { + return null; + } + + public static class Builder extends AbstractMessage.Builder { + public Builder setName(String name) { + return this; + } + } +} diff --git a/tests/pos/manifest-summoning.scala b/tests/pos/manifest-summoning.scala new file mode 100644 index 000000000000..e9840d33b8c8 --- /dev/null +++ b/tests/pos/manifest-summoning.scala @@ -0,0 +1,22 @@ +object Foo { + + object opaques { + opaque type Inner = String + val i: Inner = "i" + } + + val singleton: opaques.Inner = opaques.i + + val om_Inner = optManifest[opaques.Inner] // NoManifest + val om_singleton = optManifest[singleton.type] // NoManifest + val ct_Inner = reflect.classTag[opaques.Inner] + val ct_singleton = reflect.classTag[singleton.type] +} + +val `List[Nothing]` = manifest[List[Nothing]] +val `List[Array[Nothing]]` = manifest[List[Array[Nothing]]] // ok when Nothing is not the argument of top-level array + +val `Array[Array[List[Int]]]` = manifest[Array[Array[List[Int]]]] + +trait Mixin[T <: Mixin[T]] { type Self = T } +class Baz extends Mixin[Baz] { val m = manifest[Self] } diff --git a/tests/pos/multi-given.scala b/tests/pos/multi-given.scala index 3be8bece1311..5553df36f1c9 100644 --- a/tests/pos/multi-given.scala +++ b/tests/pos/multi-given.scala @@ -5,6 +5,6 @@ trait C def fancy(using a: A, b: B, c: C) = "Fancy!" def foo(implicit a: A, b: B, c: C) = "foo" -given A with B with {} +given A() with B -given ops: A with B with {} \ No newline at end of file +given ops: A() with B() diff --git a/tests/pos/opaque-inline.scala b/tests/pos/opaque-inline.scala new file mode 100644 index 000000000000..a0279f02fbfe --- /dev/null +++ b/tests/pos/opaque-inline.scala @@ -0,0 +1,20 @@ + +object refined: + opaque type Positive = Int + + object Positive extends PositiveFactory + + trait PositiveFactory: + inline def apply(value: Int): Positive = value + + def f(x: Positive): Positive = x + inline def fapply(value: Int): Positive = + val vv = (value, value) // error: implementation restriction + f(vv._1) + +@main def run: Unit = + import refined.* + val x = 9 + val nine = Positive.apply(x) + val nine1 = Positive.fapply(x) + diff --git a/tests/pos/opaque-inline1-transparent.scala b/tests/pos/opaque-inline1-transparent.scala new file mode 100644 index 000000000000..9f05bcc34f85 --- /dev/null +++ b/tests/pos/opaque-inline1-transparent.scala @@ -0,0 +1,11 @@ + +object refined: + opaque type Positive = Int + transparent inline def Positive(value: Int): Positive = f(value) + def f(x: Positive): Positive = x + +object test: + def run: Unit = + val x = 9 + val nine = refined.Positive(x) + diff --git a/tests/pos/opaque-inline1.scala b/tests/pos/opaque-inline1.scala new file mode 100644 index 000000000000..b30eeafdbe77 --- /dev/null +++ b/tests/pos/opaque-inline1.scala @@ -0,0 +1,13 @@ + +object refined: + opaque type Positive = Int + inline def Positive(value: Int): Positive = f(value) + transparent inline def TPositive(value: Int): Positive = f(value) + def f(x: Positive): Positive = x + +object test: + def run: Unit = + val x = 9 + val nine = refined.Positive(x) + val tnine: refined.Positive = refined.TPositive(x) + diff --git a/tests/pos/opaque-inline2-transparent.scala b/tests/pos/opaque-inline2-transparent.scala new file mode 100644 index 000000000000..b0474c57b19e --- /dev/null +++ b/tests/pos/opaque-inline2-transparent.scala @@ -0,0 +1,28 @@ + +import compiletime.* + +object refined: + opaque type Positive = Int + + object Positive extends PositiveFactory + + trait PositiveFactory: + transparent inline def apply(inline value: Int): Positive = + inline if value < 0 then error(codeOf(value) + " is not positive.") + else value + + transparent inline def safe(value: Int): Positive | IllegalArgumentException = + if value < 0 then IllegalArgumentException(s"$value is not positive") + else value: Positive + +@main def Test: Unit = + import refined.* + val eight = Positive(8) + // val negative = Positive(-1) // This correctly produces a compile error "-1 is not positive." + // val random = Positive(scala.util.Random.nextInt()) // This correctly produces a compile error about being unable to inline the method call + val random = Positive.safe(scala.util.Random.nextInt()) + val safeNegative = Positive.safe(-1) + val safeFive = Positive.safe(5) + println(eight) + println(random) + println(safeFive) \ No newline at end of file diff --git a/tests/pos/opaque-inline2.check b/tests/pos/opaque-inline2.check new file mode 100644 index 000000000000..1a3888c02635 --- /dev/null +++ b/tests/pos/opaque-inline2.check @@ -0,0 +1,3 @@ +8 +java.lang.IllegalArgumentException: -1502782350 is not positive +5 diff --git a/tests/pos/opaque-inline2.scala b/tests/pos/opaque-inline2.scala new file mode 100644 index 000000000000..fda5f6756827 --- /dev/null +++ b/tests/pos/opaque-inline2.scala @@ -0,0 +1,28 @@ + +import compiletime.* + +object refined: + opaque type Positive = Int + + object Positive extends PositiveFactory + + trait PositiveFactory: + inline def apply(inline value: Int): Positive = + inline if value < 0 then error(codeOf(value) + " is not positive.") + else value + + transparent inline def safe(value: Int): Positive | IllegalArgumentException = + if value < 0 then IllegalArgumentException(s"$value is not positive") + else value: Positive + +@main def Test: Unit = + import refined.* + val eight = Positive(8) + // val negative = Positive(-1) // This correctly produces a compile error "-1 is not positive." + // val random = Positive(scala.util.Random.nextInt()) // This correctly produces a compile error about being unable to inline the method call + val random = Positive.safe(scala.util.Random.nextInt()) + val safeNegative = Positive.safe(-1) + val safeFive = Positive.safe(5) + println(eight) + println(random) + println(safeFive) \ No newline at end of file diff --git a/tests/pos/pickleSkolem.scala b/tests/pos/pickleSkolem.scala new file mode 100644 index 000000000000..852b1cee65ba --- /dev/null +++ b/tests/pos/pickleSkolem.scala @@ -0,0 +1,15 @@ +class A + +type B = A + +def f[T](x: T): x.type & T = ??? + +def g = { + var a: B = ??? + f[A](a) +} + +def testNN = { + var s: String = ??? + s.nn +} diff --git a/tests/pos/pos_valueclasses/t5953.scala b/tests/pos/pos_valueclasses/t5953.scala index 84e2243d1d6d..e05ec5b48976 100644 --- a/tests/pos/pos_valueclasses/t5953.scala +++ b/tests/pos/pos_valueclasses/t5953.scala @@ -1,3 +1,5 @@ +package t5953 + trait CBF[-F, -A, +C] trait GenTraversable[+A] trait Traversable[+A] extends GenTraversable[A] diff --git a/tests/pos/preserve-union.scala b/tests/pos/preserve-union.scala new file mode 100644 index 000000000000..a56fcbef1b84 --- /dev/null +++ b/tests/pos/preserve-union.scala @@ -0,0 +1,7 @@ +class A { + val a: Int | String = 1 + val b: AnyVal = 2 + + val c = List(a, b) + val c1: List[AnyVal | String] = c +} diff --git a/tests/pos/reference/delegates.scala b/tests/pos/reference/delegates.scala index 2a1594192761..1cc7a29eee7d 100644 --- a/tests/pos/reference/delegates.scala +++ b/tests/pos/reference/delegates.scala @@ -110,11 +110,11 @@ object Instances extends Common: println(summon[Context].value) } locally { - given d[T]: D[T] with {} + given d[T]: D[T]() println(summon[D[Int]]) } locally { - given (using Context): D[Int] with {} + given (using Context): D[Int]() println(summon[D[Int]]) } end C diff --git a/tests/pos/reference/extension-methods.scala b/tests/pos/reference/extension-methods.scala index 3dacf787a436..64fd23322c1b 100644 --- a/tests/pos/reference/extension-methods.scala +++ b/tests/pos/reference/extension-methods.scala @@ -70,7 +70,7 @@ object ExtMethods: end SafeDiv def test1 = - given ops1: IntOps with {} // brings safeMod into scope + given ops1: IntOps() // brings safeMod into scope 1.safeMod(2) class Lst[T](xs: T*): diff --git a/tests/pos/reference/inlines.scala b/tests/pos/reference/inlines.scala index a1eb215b4d22..657961671efd 100644 --- a/tests/pos/reference/inlines.scala +++ b/tests/pos/reference/inlines.scala @@ -41,6 +41,8 @@ object Test{ } } + val x = log[String]("some op", indentSetting)(_) + def main(args: Array[String]): Unit = println(factorial(33)) } diff --git a/tests/pos/reference/saferExceptions.scala b/tests/pos/reference/saferExceptions.scala new file mode 100644 index 000000000000..04dbd5fbfd87 --- /dev/null +++ b/tests/pos/reference/saferExceptions.scala @@ -0,0 +1,15 @@ +import language.experimental.saferExceptions + + +class LimitExceeded extends Exception + +val limit = 10e9 + +def f(x: Double): Double throws LimitExceeded = + if x < limit then x * x else throw LimitExceeded() + +@main def test(xs: Double*) = + try println(xs.map(f).sum) + catch case ex: LimitExceeded => println("too large") + + diff --git a/tests/pos/source-import-3-0-migration.scala b/tests/pos/source-import-3-0-migration.scala new file mode 100644 index 000000000000..b0f220fd4e40 --- /dev/null +++ b/tests/pos/source-import-3-0-migration.scala @@ -0,0 +1 @@ +import language.`3.0-migration` diff --git a/tests/pos/source-import-3-0.scala b/tests/pos/source-import-3-0.scala new file mode 100644 index 000000000000..d6d7f9201b08 --- /dev/null +++ b/tests/pos/source-import-3-0.scala @@ -0,0 +1 @@ +import language.`3.0` diff --git a/tests/pos/source-import-3-1-migration.scala b/tests/pos/source-import-3-1-migration.scala new file mode 100644 index 000000000000..6e6f9f905b99 --- /dev/null +++ b/tests/pos/source-import-3-1-migration.scala @@ -0,0 +1 @@ +import language.`3.1-migration` diff --git a/tests/pos/source-import-3-1.scala b/tests/pos/source-import-3-1.scala new file mode 100644 index 000000000000..38216d612864 --- /dev/null +++ b/tests/pos/source-import-3-1.scala @@ -0,0 +1 @@ +import language.`3.1` diff --git a/tests/pos/specs2-failure.scala b/tests/pos/specs2-failure.scala new file mode 100644 index 000000000000..c705c8e283ce --- /dev/null +++ b/tests/pos/specs2-failure.scala @@ -0,0 +1,20 @@ +import util.matching.Regex +import util.matching.Regex.Match + +// Demonstrate what used to be a failure in specs2, before we refined +// the scheme when not to typecheck a function argument again. +object Test: + + extension (s: String) + + def replaceAll(pairs: (String, String)*): String = + pairs.foldLeft(s) { (res, cur) => + res.replaceAll(cur._1, cur._2) + } + + def replaceAll(exp: String, f: String => String): String = + new Regex(exp).replaceAllIn(s, (m: Match) => f(m.group(0).replace("\\", "\\\\"))) + + def replaceInsideTag(tag: String, p: (String, String)*): String = + s.replaceAll(tag, (s: String) => java.util.regex.Matcher.quoteReplacement(s.replaceAll(p*))) + diff --git a/tests/pos/splice-pat.scala b/tests/pos/splice-pat.scala new file mode 100644 index 000000000000..6a8852b4a7c0 --- /dev/null +++ b/tests/pos/splice-pat.scala @@ -0,0 +1,24 @@ +import scala.quoted.* + +object MyMatcher { + def unapply(expr: Expr[Any])(using Quotes): Option[Expr[Int]] = ??? +} + +object MyMatcher2 { + def unapply(expr: Expr[Int])(using Quotes): Boolean = ??? +} + +def foo(x: Any): Unit = ??? +def bar(x: Int): Int = ??? + +def oneLevel(expr: Expr[Any])(using Quotes): Expr[Int] = expr match + case '{ foo(${MyMatcher(y@MyMatcher2())}) } => y + +def twoLevel(expr: Expr[Any])(using Quotes): Expr[Int] = expr match + case '{ foo(${MyMatcher('{ bar(${y@MyMatcher2()}).getClass}) }) } => y + +def bindQuote(expr: Expr[Any])(using Quotes): Expr[Int] = expr match + case '{ foo(${y@'{bar($_)}})} => y + +def noop(expr: Expr[Any])(using Quotes): Expr[Int] = expr match + case '{ bar(${ '{ $y } }) } => y diff --git a/tests/pos/stm.scala b/tests/pos/stm.scala new file mode 100644 index 000000000000..48ff946f9b5c --- /dev/null +++ b/tests/pos/stm.scala @@ -0,0 +1,10 @@ +class Inv[X] +class Ref[X] +object Ref { + def apply(i: Inv[Int], x: Int): Ref[Int] = ??? + def apply[Y](i: Inv[Y], x: Y): Ref[Y] = ??? +} + +class A { + val ref: Ref[List[AnyRef]] = Ref(new Inv[List[AnyRef]], List.empty) +} diff --git a/tests/pos/t0851.scala b/tests/pos/t0851.scala index fdc504af75c5..c7393723b148 100644 --- a/tests/pos/t0851.scala +++ b/tests/pos/t0851.scala @@ -1,9 +1,8 @@ package test object test1 { - case class Foo[T,T2](f : (T,T2) => String) extends (((T,T2)) => String){ + case class Foo[T,T2](f : (T,T2) => String) { def apply(t : T) = (s:T2) => f(t,s) - def apply(p : (T,T2)) = f(p._1,p._2) } implicit def g[T](f : (T,String) => String): Foo[T, String] = Foo(f) def main(args : Array[String]) : Unit = { diff --git a/tests/pos/t2913.scala b/tests/pos/t2913.scala index f91ed7b51318..9d7b898cbe9d 100644 --- a/tests/pos/t2913.scala +++ b/tests/pos/t2913.scala @@ -33,9 +33,8 @@ object TestNoAutoTupling { // t0851 is essentially the same: object test1 { - case class Foo[T,T2](f : (T,T2) => String) extends (((T,T2)) => String){ + case class Foo[T,T2](f : (T,T2) => String) { def apply(t : T) = (s:T2) => f(t,s) - def apply(p : (T,T2)) = f(p._1,p._2) } implicit def g[T](f : (T,String) => String): test1.Foo[T,String] = Foo(f) def main(args : Array[String]) : Unit = { diff --git a/tests/pos/the-given.scala b/tests/pos/the-given.scala index 97c0e5099215..4d8ed582ac6d 100644 --- a/tests/pos/the-given.scala +++ b/tests/pos/the-given.scala @@ -1,7 +1,7 @@ object Test { class Encoder { def apply(x: Int): Int = x } - given Encoder with {} + given Encoder() summon[Encoder](2) diff --git a/tests/pos/trailing-comma-pattern.scala b/tests/pos/trailing-comma-pattern.scala new file mode 100644 index 000000000000..878b3af7748a --- /dev/null +++ b/tests/pos/trailing-comma-pattern.scala @@ -0,0 +1,3 @@ +object Test: + val List(x, y, _*, + ) = List(1, 2, 3) diff --git a/tests/pos/typeclass-encoding2.scala b/tests/pos/typeclass-encoding2.scala deleted file mode 100644 index 13db93f91204..000000000000 --- a/tests/pos/typeclass-encoding2.scala +++ /dev/null @@ -1,348 +0,0 @@ -/** 1. Simple type classes with monomorphic implementations and direct extensions. - - trait SemiGroup extends TypeClass { - def add(that: This): This - } - - trait Monoid extends SemiGroup - common { - def unit: This - } - - extension IntOps for Int : Monoid { - def add(that: Int) = this + that - } - common { - def unit = 0 - } - - extension StringOps for String : Monoid { - def add(that: Int) = this ++ that - } - common { - def unit = "" - } - - enum Nat extends Monoid { - case Z - case S(n: Nat) - - def add(that: Nat): Nat = this match { - case S => that - case S(n) => S(n.add(that)) - } - } - common { - def unit = Z - } - - def sum[T: Monoid](xs: List[T]): T = - xs.foldLeft(Monod.impl[T].unit)(_ `add` _) -*/ -object runtime { - - trait TypeClass { - val commons: TypeClassCommon - type This = commons.This - } - - trait TypeClassCommon { self => - type This - type Instance <: TypeClass - def inject(x: This): Instance { val commons: self.type } - } - - trait TypeClassCompanion { - type Impl[T] <: TypeClassCommon { type This = T } - def impl[T](implicit ev: Impl[T]): Impl[T] = ev - } - - implicit def inject[From](x: From) - (implicit ev: TypeClassCommon { type This = From }): ev.Instance { type This = From } = - ev.inject(x) -} -import runtime.* - -object semiGroups { - - trait SemiGroup extends TypeClass { - val commons: SemiGroupCommon - import commons.* - def add(that: This): This - } - trait SemiGroupCommon extends TypeClassCommon { - type Instance <: SemiGroup - } - object SemiGroup extends TypeClassCompanion { - type Impl[T] = SemiGroupCommon { type This = T } - } - - trait Monoid extends SemiGroup { - val commons: MonoidCommon - import commons.* - } - trait MonoidCommon extends SemiGroupCommon { - type Instance <: Monoid - def unit: This - } - object Monoid extends TypeClassCompanion { - type Impl[T] = MonoidCommon { type This = T } - } - - implicit object IntOps extends MonoidCommon { - type This = Int - type Instance = Monoid - def unit: Int = 0 - def inject($this: Int) = new Monoid { - val commons: IntOps.this.type = IntOps.this - def add(that: this.This): this.This = $this + that - } - } - - implicit object StringOps extends MonoidCommon { - type This = String - type Instance = Monoid - def unit = "" - def inject($this: String) = new Monoid { - val commons: StringOps.this.type = StringOps.this - def add(that: this.This): this.This = $this.concat(that) - } - } - - enum Nat extends Monoid { - case Z - case S(n: Nat) - - def add(that: Nat): Nat = this match { - case Z => that - case S(n) => S(n.add(that)) - } - - val commons: Nat.type = Nat - } - object Nat extends MonoidCommon { - type This = Nat - type Instance = Nat - def unit = Nat.Z - def inject($this: Nat) = $this - } - import Nat.{Z, S} - - implicit def NatOps: Nat.type = Nat - - def sum[T](xs: List[T])(implicit ev: Monoid.Impl[T]) = - xs.foldLeft(Monoid.impl[T].unit)((x, y) => x `add` y) - - sum(List(1, 2, 3)) - sum(List("hello ", "world!")) - sum(List(Z, S(Z), S(S(Z)))) -} - -/** 2. Generic implementations of simple type classes. - - trait Ord extends TypeClass { - def compareTo(that: This): Int - def < (that: This) = compareTo(that) < 0 - def > (that: This) = compareTo(that) > 0 - } - common { - val minimum: This - } - - extension IntOrd for Int : Ord { - def compareTo(that: Int) = - if (this < that) -1 else if (this > that) +1 else 0 - } - common { - val minimum = Int.MinValue - } - - extension ListOrd[T : Ord] for List[T] : Ord { - def compareTo(that: List[T]): Int = (this, that) match { - case (Nil, Nil) => 0 - case (Nil, _) => -1 - case (_, Nil) => +1 - case (x :: xs, y :: ys) => - val fst = x.compareTo(y) - if (fst != 0) fst else xs.compareTo(ys) - } - } - common { - val minimum = Nil - } - - def min[T: Ord](x: T, y: T) = if (x < y) x else y - - def inf[T: Ord](xs: List[T]): T = (Ord.impl[T].minimum /: xs)(min) -*/ -object ord { - - trait Ord extends TypeClass { - val commons: OrdCommon - import commons.* - def compareTo(that: This): Int - def < (that: This) = compareTo(that) < 0 - def > (that: This) = compareTo(that) > 0 - } - trait OrdCommon extends TypeClassCommon { - type Instance <: Ord - def minimum: This - } - object Ord extends TypeClassCompanion { - type Impl[T] = OrdCommon { type This = T } - } - - implicit object IntOrd extends OrdCommon { - type This = Int - type Instance = Ord - val minimum: Int = Int.MinValue - def inject($this: Int) = new Ord { - val commons: IntOrd.this.type = IntOrd.this - import commons.* - def compareTo(that: this.This): Int = - if (this < that) -1 else if (this > that) +1 else 0 - } - } - - class ListOrd[T](implicit ev: Ord.Impl[T]) extends OrdCommon { self => - type This = List[T] - type Instance = Ord - def minimum: List[T] = Nil - def inject($this: List[T]) = new Ord { - val commons: self.type = self - import commons.* - def compareTo(that: List[T]): Int = ($this, that) match { - case (Nil, Nil) => 0 - case (Nil, _) => -1 - case (_, Nil) => +1 - case (x :: xs, y :: ys) => - val fst = x.compareTo(y) - if (fst != 0) fst else xs.compareTo(ys) - } - } - } - - implicit def listOrd[T](implicit ev: Ord.Impl[T]): ListOrd[T] = - new ListOrd[T] - - def min[T](x: T, y: T)(implicit ev: Ord.Impl[T]): T = - if (x < y) x else y - - def inf[T](xs: List[T])(implicit ev: Ord.Impl[T]): T = { - val smallest = Ord.impl[T].minimum - xs.foldLeft(smallest)(min) - } - - inf(List[Int]()) - inf(List(List(1, 2), List(1, 2, 3))) - inf(List(List(List(1), List(2)), List(List(1), List(2), List(3)))) -} - -/** 3. Higher-kinded type classes - - trait Functor[A] extends TypeClass1 { - def map[B](f: A => B): This[B] - } - common { - def pure[A](x: A): This[A] - } - - // Generically, `pure[A]{.map(f)}^n` - def develop[A, F[X] : Functor[X]](n: Int, f: A => A): F[A] = - if (n == 0) Functor.impl[F].pure[A] - else develop[A, F](n - 1, f).map(f) - - trait Monad[A] extends Functor[A] { - def flatMap[B](f: A => This[B]): This[B] - def map[B](f: A => B) = this.flatMap(f.andThen(pure)) - } - - extension ListMonad[T] for List[T] : Monad[T] { - static def pure[A] = Nil - - def flatMap[B](f: A => List[B]): List[B] = this match { - case x :: xs => f(x) ++ xs.flatMap(f) - case Nil => Nil - } - } - - extension MonadFlatten[T[X]: Monad[X]] for T[T[A]] { - def flatten: T[A] = this.flatMap(identity) - } -*/ -object runtime1 { - - trait TypeClass1 { - val commons: TypeClassCommon1 - type This = [X] =>> commons.This[X] - } - - trait TypeClassCommon1 { self => - type This[X] - type Instance[X] <: TypeClass1 - def inject[A](x: This[A]): Instance[A] { val commons: self.type } - } - - trait TypeClassCompanion1 { - type Impl[T[_]] <: TypeClassCommon1 { type This = [X] =>> T[X] } - def impl[T[_]](implicit ev: Impl[T]): Impl[T] = ev - } - - implicit def inject1[A, From[_]](x: From[A]) - (implicit ev: TypeClassCommon1 { - type This = [X] =>> From[X] - }): ev.Instance[A] { type This = [X] =>> From[X] } = - ev.inject(x) -} -import runtime1.* - -object functors { - - trait Functor[A] extends TypeClass1 { - val commons: FunctorCommon - import commons.* - def map[B](f: A => B): This[B] - } - trait FunctorCommon extends TypeClassCommon1 { - type Instance[X] <: Functor[X] - def pure[A](x: A): This[A] - } - object Functor extends TypeClassCompanion1 { - type Impl[T[_]] = FunctorCommon { type This = [X] =>> T[X] } - } - - trait Monad[A] extends Functor[A] { - val commons: MonadCommon - import commons.* - def flatMap[B](f: A => This[B]): This[B] - def map[B](f: A => B) = this.flatMap(f.andThen(commons.pure)) - } - trait MonadCommon extends FunctorCommon { - type Instance[X] <: Monad[X] - } - object Monad extends TypeClassCompanion1 { - type Impl[T[_]] = MonadCommon { type This = [X] =>> T[X] } - } - - def develop[A, F[X]](n: Int, x: A, f: A => A)(implicit ev: Functor.Impl[F]): F[A] = - if (n == 0) Functor.impl[F].pure(x) - else develop(n - 1, x, f).map(f).asInstanceOf - - implicit object ListMonad extends MonadCommon { - type This[+X] = List[X] - type Instance[X] = Monad[X] - def pure[A](x: A) = x :: Nil - def inject[A]($this: List[A]) = new Monad[A] { - val commons: ListMonad.this.type = ListMonad - import commons.* - def flatMap[B](f: A => List[B]): List[B] = $this.flatMap(f) - } - } - - object MonadFlatten { - def flattened[T[_], A]($this: T[T[A]])(implicit ev: Monad.Impl[T]): T[A] = - ??? // $this.flatMap[A](identity) disabled since it does not typecheck - } - - MonadFlatten.flattened(List(List(1, 2, 3), List(4, 5))) -} \ No newline at end of file diff --git a/tests/pos/unary-eq.scala b/tests/pos/unary-eq.scala new file mode 100644 index 000000000000..9e9d871a0912 --- /dev/null +++ b/tests/pos/unary-eq.scala @@ -0,0 +1,6 @@ +final class Baz private (val x: Int) extends AnyVal { + def `unary_!_=`() : Baz = ??? // parses ok, but will not be usable + def `unary_~_=`() : Baz = ??? // parses ok, but will not be usable + def `unary_+_=`() : Baz = ??? // parses ok, but will not be usable + def `unary_-_=`() : Baz = ??? // parses ok, but will not be usable +} diff --git a/tests/pos/unify-wildcard-patterns.scala b/tests/pos/unify-wildcard-patterns.scala new file mode 100644 index 000000000000..9f18cebae1f5 --- /dev/null +++ b/tests/pos/unify-wildcard-patterns.scala @@ -0,0 +1,23 @@ +// `case _ => expr` in a match expression should be equivalant to +// `case _: Any => expr`. Likewise, in a match type, `case _ => T` +// should be equivalant to `case Any => T`. + +object Test0 { + type M[X] = X match { case String => Int case Any => String } + def m[X](x: X): M[X] = x match { case _: String => 1 case _: Any => "s" } +} + +object Test1 { + type M[X] = X match { case String => Int case Any => String } + def m[X](x: X): M[X] = x match { case _: String => 1 case _ => "s" } +} + +object Test2 { + type M[X] = X match { case String => Int case _ => String } + def m[X](x: X): M[X] = x match { case _: String => 1 case _: Any => "s" } +} + +object Test3 { + type M[X] = X match { case String => Int case _ => String } + def m[X](x: X): M[X] = x match { case _: String => 1 case _ => "s" } +} diff --git a/tests/pos/widen-union.scala b/tests/pos/widen-union.scala index b0b64f0dc6c6..ec58f3da1941 100644 --- a/tests/pos/widen-union.scala +++ b/tests/pos/widen-union.scala @@ -5,6 +5,11 @@ object Test1: val z: Int | String = y object Test2: + val x: 3 | "a" = 3 + val y = x + val z: Int | String = y + +object Test3: type Sig = Int | String def consistent(x: Sig, y: Sig): Boolean = ???// x == y @@ -12,7 +17,7 @@ object Test2: xs.corresponds(ys)(consistent) // OK || xs.corresponds(ys)(consistent(_, _)) // error, found: Any, required: Int | String -object Test3: +object Test4: def g[X](x: X | String): Int = ??? def y: Boolean | String = ??? diff --git a/tests/pos/zipped.scala b/tests/pos/zipped.scala new file mode 100644 index 000000000000..a23fd62acbb0 --- /dev/null +++ b/tests/pos/zipped.scala @@ -0,0 +1,36 @@ +// This test shows some un-intuitive behavior of the `zipped` method. +object Test { + val xs: List[Int] = ??? + + // 1. This works, since withFilter is not defined on Tuple3zipped. Instead, + // an implicit conversion from Tuple3zipped to Traversable[(Int, Int, Int)] is inserted. + // The subsequent map operation has the right type for this Traversable. + xs.lazyZip(xs).lazyZip(xs) + .withFilter( (x: (Int, Int, Int)) => x match { case (x, y, z) => true } ) // OK + .map( (x: (Int, Int, Int)) => x match { case (x, y, z) => x + y + z }) // OK + + + // 2. This works as well, because of auto untupling i.e. `case` is inserted. + // But it does not work in Scala2. + xs.lazyZip(xs).lazyZip(xs) + .withFilter( (x: (Int, Int, Int)) => x match { case (x, y, z) => true } ) // OK + .map( (x: Int, y: Int, z: Int) => x + y + z ) // OK + // works, because of auto untupling i.e. `case` is inserted + // does not work in Scala2 + + // 3. Now, without withFilter, it's the opposite, we need the 3 parameter map. + xs.lazyZip(xs).lazyZip(xs) + .map( (x: Int, y: Int, z: Int) => x + y + z ) // OK + + // 4. The single parameter map works through an inserted conversion + xs.lazyZip(xs).lazyZip(xs) + .map( (x: (Int, Int, Int)) => x match { case (x, y, z) => x + y + z }) // now also OK + + // 5. But if that pone is deeper nested, it does not work since we don't retypecheck + // arguments deeply. + //xs.lazyZip(xs).lazyZip(xs) + // .map( x => x match { case (x, y, z) => x + y + z }) // now also OK + + // This means that the following works in Dotty 3.0 as well as 3.x + for ((x, y, z) <- xs.lazyZip(xs).lazyZip(xs)) yield x + y + z +} \ No newline at end of file diff --git a/tests/pos/zipper.scala b/tests/pos/zipper.scala new file mode 100644 index 000000000000..6ccd5462a43c --- /dev/null +++ b/tests/pos/zipper.scala @@ -0,0 +1,29 @@ +enum Tree[+A]: + case Branch(left: Tree[A], right: Tree[A]) + case Leaf(value: A) + +enum Context[+A]: + case Empty + case InLeft(right: Tree[A], parent: Context[A]) + case InRight(left: Tree[A], parent: Context[A]) + +import Tree.*, Context.* + +class Zipper[+A](val focus: Tree[A], val context: Context[A]): + def unfocus: Tree[A] = context match + case Empty => focus + case _ => moveUp.unfocus + def moveUp: Zipper[A] = context match + case Empty => this + case InLeft(right, parent) => Zipper(Branch(focus, right), parent) + case InRight(left, parent) => Zipper(Branch(left, focus), parent) + def moveLeft: Zipper[A] = focus match + case Leaf(_) => this + case Branch(left, right) => Zipper(left, InLeft(right, context)) + def moveRight: Zipper[A] = focus match + case Leaf(_) => this + case Branch(left, right) => Zipper(right, InRight(left, context)) + def replaceFocus[B >: A](newFocus: Tree[B]): Zipper[B] = + Zipper(newFocus, context) + +extension[A](tree: Tree[A]) def focus: Zipper[A] = Zipper(tree, Empty) diff --git a/tests/printing/annot-printing.check b/tests/printing/annot-printing.check new file mode 100644 index 000000000000..fc71f5730d78 --- /dev/null +++ b/tests/printing/annot-printing.check @@ -0,0 +1,22 @@ +[[syntax trees at end of typer]] // tests/printing/annot-printing.scala +package { + import scala.annotation.* + class Foo() extends annotation.Annotation() {} + class Bar(s: String) extends annotation.Annotation() { + private[this] val s: String + } + class Xyz(i: Int) extends annotation.Annotation() { + private[this] val i: Int + } + final lazy module val Xyz: Xyz = new Xyz() + final module class Xyz() extends AnyRef() { this: Xyz.type => + def $lessinit$greater$default$1: Int @uncheckedVariance = 23 + } + final lazy module val annot-printing$package: annot-printing$package = + new annot-printing$package() + final module class annot-printing$package() extends Object() { + this: annot-printing$package.type => + def x: Int @nowarn() @main @Xyz() @Foo @Bar("hello") = ??? + } +} + diff --git a/tests/printing/annot-printing.scala b/tests/printing/annot-printing.scala new file mode 100644 index 000000000000..7ef44b0190f3 --- /dev/null +++ b/tests/printing/annot-printing.scala @@ -0,0 +1,7 @@ +import scala.annotation.* + +class Foo() extends Annotation +class Bar(s: String) extends Annotation +class Xyz(i: Int = 23) extends Annotation + +def x: Int @nowarn @main @Xyz() @Foo @Bar("hello") = ??? diff --git a/tests/printing/dependent-annot.check b/tests/printing/dependent-annot.check new file mode 100644 index 000000000000..393d444d5350 --- /dev/null +++ b/tests/printing/dependent-annot.check @@ -0,0 +1,25 @@ +[[syntax trees at end of typer]] // tests/printing/dependent-annot.scala +package { + class C() extends Object() {} + class ann(x: Seq[Any] @Repeated) extends annotation.Annotation() { + private[this] val x: Seq[Any] @Repeated + } + final lazy module val dependent-annot$package: dependent-annot$package = + new dependent-annot$package() + final module class dependent-annot$package() extends Object() { + this: dependent-annot$package.type => + def f(y: C, z: C): Unit = + { + def g(): C @ann([y,z : Any]*) = ??? + val ac: + (C => Array[String]) + { + def apply(x: C): Array[String @ann([x : Any]*)] + } + = ??? + val dc: Array[String] = ac.apply(g()) + () + } + } +} + diff --git a/tests/printing/dependent-annot.scala b/tests/printing/dependent-annot.scala new file mode 100644 index 000000000000..28f0f8bd59e6 --- /dev/null +++ b/tests/printing/dependent-annot.scala @@ -0,0 +1,7 @@ +class C +class ann(x: Any*) extends annotation.Annotation + +def f(y: C, z: C) = + def g(): C @ann(y, z) = ??? + val ac: ((x: C) => Array[String @ann(x)]) = ??? + val dc = ac(g()) diff --git a/tests/printing/i13306.check b/tests/printing/i13306.check new file mode 100644 index 000000000000..053d2f861cf3 --- /dev/null +++ b/tests/printing/i13306.check @@ -0,0 +1,14 @@ +[[syntax trees at end of typer]] // tests/printing/i13306.scala +package example { + class MyClass() extends Object() {} + class MembersContainer() extends Object() { + type MyType[T >: Nothing <: example.MyClass] = Comparable[T] + } + final lazy module val Exports: example.Exports = new example.Exports() + final module class Exports() extends Object() { this: example.Exports.type => + val instance: example.MembersContainer = new example.MembersContainer() + export example.Exports.instance.* + final type MyType[T <: example.MyClass] = Comparable[T] + } +} + diff --git a/tests/printing/i13306.scala b/tests/printing/i13306.scala new file mode 100644 index 000000000000..790f55018404 --- /dev/null +++ b/tests/printing/i13306.scala @@ -0,0 +1,12 @@ +package example + +class MyClass + +class MembersContainer { + type MyType[T <: MyClass] = Comparable[T] +} + +object Exports { + val instance = new MembersContainer + export instance.* +} diff --git a/tests/printing/i620.check b/tests/printing/i620.check index 1dedb6d375a7..3780e2cf3045 100644 --- a/tests/printing/i620.check +++ b/tests/printing/i620.check @@ -1,4 +1,4 @@ -result of tests/printing/i620.scala after typer: +[[syntax trees at end of typer]] // tests/printing/i620.scala package O { package O.A { class D() extends Object() { @@ -30,3 +30,4 @@ package O { } } } + diff --git a/tests/printing/untyped/annot-printing.check b/tests/printing/untyped/annot-printing.check new file mode 100644 index 000000000000..b9ff9670cadd --- /dev/null +++ b/tests/printing/untyped/annot-printing.check @@ -0,0 +1,9 @@ +[[syntax trees at end of parser]] // tests/printing/untyped/annot-printing.scala +package { + import scala.annotation.* + class Foo() extends Annotation {} + class Bar(private[this] val s: String) extends Annotation {} + class Xyz(private[this] val i: Int = 23) extends Annotation {} + def x: Int @nowarn @main @Xyz @Foo @Bar("hello") = ??? +} + diff --git a/tests/printing/untyped/annot-printing.scala b/tests/printing/untyped/annot-printing.scala new file mode 100644 index 000000000000..7ef44b0190f3 --- /dev/null +++ b/tests/printing/untyped/annot-printing.scala @@ -0,0 +1,7 @@ +import scala.annotation.* + +class Foo() extends Annotation +class Bar(s: String) extends Annotation +class Xyz(i: Int = 23) extends Annotation + +def x: Int @nowarn @main @Xyz() @Foo @Bar("hello") = ??? diff --git a/tests/printing/untyped/dependent-annot.check b/tests/printing/untyped/dependent-annot.check new file mode 100644 index 000000000000..9322f9286fbf --- /dev/null +++ b/tests/printing/untyped/dependent-annot.check @@ -0,0 +1,13 @@ +[[syntax trees at end of parser]] // tests/printing/untyped/dependent-annot.scala +package { + class C {} + class ann(private[this] val x: Any *) extends annotation.Annotation {} + def f(y: C, z: C) = + { + def g(): C @ann(y, z) = ??? + val ac: ((x: C) => Array[String @ann(x)]) = ??? + val dc = ac(g()) + + } +} + diff --git a/tests/printing/untyped/dependent-annot.scala b/tests/printing/untyped/dependent-annot.scala new file mode 100644 index 000000000000..28f0f8bd59e6 --- /dev/null +++ b/tests/printing/untyped/dependent-annot.scala @@ -0,0 +1,7 @@ +class C +class ann(x: Any*) extends annotation.Annotation + +def f(y: C, z: C) = + def g(): C @ann(y, z) = ??? + val ac: ((x: C) => Array[String @ann(x)]) = ??? + val dc = ac(g()) diff --git a/tests/rewrites/i12340.check b/tests/rewrites/i12340.check new file mode 100644 index 000000000000..c6cb9af8bb57 --- /dev/null +++ b/tests/rewrites/i12340.check @@ -0,0 +1,17 @@ + +class C { + def f = 42 +} // end C + +def f(i: Int) = { + if i < 42 then + println(i) + end if + i +} // end f + +def g(i: Int) = + if i < 42 then + println(i) + end if +end g diff --git a/tests/rewrites/i12340.scala b/tests/rewrites/i12340.scala new file mode 100644 index 000000000000..bf907ef9f276 --- /dev/null +++ b/tests/rewrites/i12340.scala @@ -0,0 +1,17 @@ + +class C: + def f = 42 +end C + +def f(i: Int) = + if i < 42 then + println(i) + end if + i +end f + +def g(i: Int) = + if i < 42 then + println(i) + end if +end g diff --git a/tests/run-custom-args/Xmacro-settings/compileTimeEnv.check b/tests/run-custom-args/Xmacro-settings/compileTimeEnv.check new file mode 100644 index 000000000000..61075d3bb6aa --- /dev/null +++ b/tests/run-custom-args/Xmacro-settings/compileTimeEnv.check @@ -0,0 +1,6 @@ +I'm a info msg +I'm a warn msg +a = [] +b = [1] +c.b.a = [x.y.z=1] +wat is not defined diff --git a/tests/run-custom-args/Xmacro-settings/compileTimeEnv/Logging.scala b/tests/run-custom-args/Xmacro-settings/compileTimeEnv/Logging.scala new file mode 100644 index 000000000000..702097671924 --- /dev/null +++ b/tests/run-custom-args/Xmacro-settings/compileTimeEnv/Logging.scala @@ -0,0 +1,35 @@ +import scala.compiletime.* +import scala.quoted.* + + +object Logging { + + // Just use your imagination for now :) + private inline val Trace = 0 + private inline val Debug = 1 + private inline val Info = 2 + private inline val Warn = 3 + + private transparent inline def chosenThreshold: Int = ${ + choosenTresholdImpl + } + + + private def choosenTresholdImpl(using Quotes):Expr[Int] = + import quotes.reflect.* + MacroEnv.getInMacro("myLogger.level") match + case Some("TRACE") => Expr(Trace) + case Some("DEBUG") => Expr(Debug) + case Some("INFO") => Expr(Info) + case Some("WARN") => Expr(Warn) + case Some(x) => report.errorAndAbort("Unsupported logging level: " + x) + case None => Expr(Trace) + + private inline def log(inline lvl: Int, inline msg: String): Unit = + inline if lvl >= chosenThreshold then println(msg) + + inline def trace(inline msg: String): Unit = log(Trace, msg) + inline def debug(inline msg: String): Unit = log(Debug, msg) + inline def info (inline msg: String): Unit = log(Info , msg) + inline def warn (inline msg: String): Unit = log(Warn , msg) +} diff --git a/tests/run-custom-args/Xmacro-settings/compileTimeEnv/MacroEnv.scala b/tests/run-custom-args/Xmacro-settings/compileTimeEnv/MacroEnv.scala new file mode 100644 index 000000000000..bde3719396aa --- /dev/null +++ b/tests/run-custom-args/Xmacro-settings/compileTimeEnv/MacroEnv.scala @@ -0,0 +1,25 @@ +import scala.quoted.* + +object MacroEnv { + + transparent inline def get(inline key:String):Option[String] = ${ + getImpl('key) + } + + def getImpl(key:Expr[String])(using Quotes):Expr[Option[String]] = { + import quotes.reflect.* + val retval = getInMacro(key.valueOrAbort) + Expr(retval) + } + + def getInMacro(key:String)(using Quotes):Option[String] = { + import quotes.reflect.* + val keyEq = key + "=" + CompilationInfo.XmacroSettings.collectFirst{ + case v if v == key => "" + case v if v.startsWith(keyEq) => + v.substring(keyEq.length) + } + } + +} diff --git a/tests/run-custom-args/Xmacro-settings/compileTimeEnv/Test.scala b/tests/run-custom-args/Xmacro-settings/compileTimeEnv/Test.scala new file mode 100644 index 000000000000..7cd9b03b7bb6 --- /dev/null +++ b/tests/run-custom-args/Xmacro-settings/compileTimeEnv/Test.scala @@ -0,0 +1,30 @@ +import scala.compiletime.* + +object Test { + import Logging.* + + def main(args: Array[String]): Unit = { + runLog() + runBasic() + } + + def runLog(): Unit = { + trace("I'm a trace msg") + debug("I'm a debug msg") + info("I'm a info msg") + warn("I'm a warn msg") + } + + def runBasic(): Unit = { + printEnv("a") + printEnv("b") + printEnv("c.b.a") + printEnv("wat") + } + + inline def printEnv(inline k: String): Unit = + inline MacroEnv.get(k) match + case Some(v) => println(s"$k = [$v]") + case None => println(k + " is not defined") + +} diff --git a/tests/run-custom-args/Xmacro-settings/simple/M1.scala b/tests/run-custom-args/Xmacro-settings/simple/M1.scala new file mode 100644 index 000000000000..1210d1abe05e --- /dev/null +++ b/tests/run-custom-args/Xmacro-settings/simple/M1.scala @@ -0,0 +1,15 @@ +package x + +import scala.quoted.* + +object M: + + inline def settingsContains(inline x:String): Boolean = ${ + settingsContainsImpl('x) + } + + def settingsContainsImpl(x:Expr[String])(using Quotes): Expr[Boolean] = + import quotes.reflect.* + val v = x.valueOrAbort + val r = CompilationInfo.XmacroSettings.contains(v) + Expr(r) diff --git a/tests/run-custom-args/Xmacro-settings/simple/Test.scala b/tests/run-custom-args/Xmacro-settings/simple/Test.scala new file mode 100644 index 000000000000..97587362c835 --- /dev/null +++ b/tests/run-custom-args/Xmacro-settings/simple/Test.scala @@ -0,0 +1,10 @@ +import x.* + +object Test { + + def main(args: Array[String]):Unit = + assert(M.settingsContains("one")) + assert(!M.settingsContains("notwo")) + assert(M.settingsContains("two")) + +} diff --git a/tests/run-custom-args/Yread-comments/i12351/GetDocString_1.scala b/tests/run-custom-args/Yread-comments/i12351/GetDocString_1.scala new file mode 100644 index 000000000000..5bd3cb1c1f53 --- /dev/null +++ b/tests/run-custom-args/Yread-comments/i12351/GetDocString_1.scala @@ -0,0 +1,11 @@ +import scala.quoted.* + +/** HELLO */ +case class Data(test: Boolean) + +inline def getDocString[T]: Option[String] = ${ getDocStringImpl[T] } + +private def getDocStringImpl[T : Type](using Quotes): Expr[Option[String]] = { + import quotes.reflect.* + Expr(TypeRepr.of[T].typeSymbol.docstring) +} diff --git a/tests/run-custom-args/Yread-comments/i12351/Test_2.scala b/tests/run-custom-args/Yread-comments/i12351/Test_2.scala new file mode 100644 index 000000000000..5afd97061411 --- /dev/null +++ b/tests/run-custom-args/Yread-comments/i12351/Test_2.scala @@ -0,0 +1,4 @@ +@main def Test(): Unit = { + println(getDocString[Data]) + assert(getDocString[Data].nonEmpty) +} diff --git a/tests/run-custom-args/Yread-comments/i12352/Macro.scala b/tests/run-custom-args/Yread-comments/i12352/Macro.scala new file mode 100644 index 000000000000..4b4807750dcc --- /dev/null +++ b/tests/run-custom-args/Yread-comments/i12352/Macro.scala @@ -0,0 +1,8 @@ +import scala.quoted.* + +inline def getDocString[T]: Option[String] = ${ getDocStringImpl[T] } + +private def getDocStringImpl[T : Type](using Quotes): Expr[Option[String]] = { + import quotes.reflect.* + Expr(TypeRepr.of[T].typeSymbol.docstring) +} diff --git a/tests/run-custom-args/Yread-comments/i12352/Main.scala b/tests/run-custom-args/Yread-comments/i12352/Main.scala new file mode 100644 index 000000000000..78d0906652b2 --- /dev/null +++ b/tests/run-custom-args/Yread-comments/i12352/Main.scala @@ -0,0 +1,5 @@ +@main def Test(): Unit = { + val res = getDocString[scala.quoted.Quotes] + println(res) + assert(res.nonEmpty) +} diff --git a/tests/run-custom-args/Yretain-trees/tasty-definitions-2.check b/tests/run-custom-args/Yretain-trees/tasty-definitions-2.check index 3c2b67edfde0..206dda1448d6 100644 --- a/tests/run-custom-args/Yretain-trees/tasty-definitions-2.check +++ b/tests/run-custom-args/Yretain-trees/tasty-definitions-2.check @@ -1,3 +1,3 @@ DefDef("foo", Nil, TypeIdent("Int"), Some(Apply(Select(Literal(IntConstant(1)), "+"), List(Literal(IntConstant(2)))))) ValDef("bar", TypeIdent("Int"), Some(Apply(Select(Literal(IntConstant(2)), "+"), List(Literal(IntConstant(3)))))) -Bind("x", Ident("_")) +Bind("x", Wildcard()) diff --git a/tests/run-custom-args/Yretain-trees/tasty-definitions-3.check b/tests/run-custom-args/Yretain-trees/tasty-definitions-3.check index 3c2b67edfde0..206dda1448d6 100644 --- a/tests/run-custom-args/Yretain-trees/tasty-definitions-3.check +++ b/tests/run-custom-args/Yretain-trees/tasty-definitions-3.check @@ -1,3 +1,3 @@ DefDef("foo", Nil, TypeIdent("Int"), Some(Apply(Select(Literal(IntConstant(1)), "+"), List(Literal(IntConstant(2)))))) ValDef("bar", TypeIdent("Int"), Some(Apply(Select(Literal(IntConstant(2)), "+"), List(Literal(IntConstant(3)))))) -Bind("x", Ident("_")) +Bind("x", Wildcard()) diff --git a/tests/run-custom-args/erased/i11996.scala b/tests/run-custom-args/erased/i11996.scala new file mode 100644 index 000000000000..050d36370ef0 --- /dev/null +++ b/tests/run-custom-args/erased/i11996.scala @@ -0,0 +1,23 @@ +final class UnivEq[A] + +object UnivEq: + erased def force[A]: UnivEq[A] = + compiletime.erasedValue + +extension [A](a: A) + inline def ==*[B >: A](b: B)(using erased UnivEq[B]): Boolean = a == b + inline def !=*[B >: A](b: B)(using erased UnivEq[B]): Boolean = a != b + +case class I(i: Int) + +@main def Test = { + def test[A](a: A, b: A): Unit = { + erased given UnivEq[A] = UnivEq.force[A] + println(a ==* a) + println(a !=* b) + } + println("Test starting...") + test(I(1), I(2)) // error + test(1, 2) + test(true, false) +} diff --git a/tests/run-custom-args/fatal-warnings/i11050.scala b/tests/run-custom-args/fatal-warnings/i11050.scala new file mode 100644 index 000000000000..f0bdd23031fa --- /dev/null +++ b/tests/run-custom-args/fatal-warnings/i11050.scala @@ -0,0 +1,141 @@ +import scala.compiletime.* +import scala.deriving.* + +object OriginalReport: + sealed trait TreeValue + sealed trait SubLevel extends TreeValue + case class Leaf1(value: String) extends TreeValue + case class Leaf2(value: Int) extends SubLevel + +// Variants from the initial failure in akka.event.LogEvent +object FromAkkaCB: + sealed trait A + sealed trait B extends A + sealed trait C extends A + case class D() extends B, C + case class E() extends C, B + +object FromAkkaCB2: + sealed trait A + sealed trait N extends A + case class B() extends A + case class C() extends A, N + +object FromAkkaCB3: + sealed trait A + case class B() extends A + case class C() extends A + class D extends C // ignored pattern: class extending a case class + +object NoUnreachableWarnings: + sealed trait Top + object Top + + final case class MiddleA() extends Top with Bottom + final case class MiddleB() extends Top with Bottom + final case class MiddleC() extends Top with Bottom + + sealed trait Bottom extends Top + +object FromAkkaCB4: + sealed trait LogEvent + object LogEvent + case class Error() extends LogEvent + class Error2() extends Error() with LogEventWithMarker // ignored pattern + case class Warning() extends LogEvent + sealed trait LogEventWithMarker extends LogEvent // must be defined late + +object FromAkkaCB4simpler: + sealed trait LogEvent + object LogEvent + case class Error() extends LogEvent + class Error2() extends LogEventWithMarker // not a case class + case class Warning() extends LogEvent + sealed trait LogEventWithMarker extends LogEvent + +object Test: + def main(args: Array[String]): Unit = + testOriginalReport() + testFromAkkaCB() + testFromAkkaCB2() + end main + + def testOriginalReport() = + import OriginalReport._ + val m = summon[Mirror.SumOf[TreeValue]] + given Show[TreeValue] = Show.derived[TreeValue] + val leaf1 = Leaf1("1") + val leaf2 = Leaf2(2) + + assertEq(List(leaf1, leaf2).map(m.ordinal), List(1, 0)) + assertShow[TreeValue](leaf1, "[1] Leaf1(value = \"1\")") + assertShow[TreeValue](leaf2, "[0] [0] Leaf2(value = 2)") + end testOriginalReport + + def testFromAkkaCB() = + import FromAkkaCB._ + val m = summon[Mirror.SumOf[A]] + given Show[A] = Show.derived[A] + val d = D() + val e = E() + + assertEq(List(d, e).map(m.ordinal), List(0, 0)) + assertShow[A](d, "[0] [0] D") + assertShow[A](e, "[0] [1] E") + end testFromAkkaCB + + def testFromAkkaCB2() = + import FromAkkaCB2._ + val m = summon[Mirror.SumOf[A]] + val n = summon[Mirror.SumOf[N]] + given Show[A] = Show.derived[A] + val b = B() + val c = C() + + assertEq(List(b, c).map(m.ordinal), List(1, 0)) + assertShow[A](b, "[1] B") + assertShow[A](c, "[0] [0] C") + end testFromAkkaCB2 + + def assertEq[A](obt: A, exp: A) = assert(obt == exp, s"$obt != $exp (obtained != expected)") + def assertShow[A: Show](x: A, s: String) = assertEq(Show.show(x), s) +end Test + +trait Show[-T]: + def show(x: T): String + +object Show: + given Show[Int] with { def show(x: Int) = s"$x" } + given Show[Char] with { def show(x: Char) = s"'$x'" } + given Show[String] with { def show(x: String) = s"$"$x$"" } + + inline def show[T](x: T): String = summonInline[Show[T]].show(x) + + transparent inline def derived[T](implicit ev: Mirror.Of[T]): Show[T] = new { + def show(x: T): String = inline ev match { + case m: Mirror.ProductOf[T] => showProduct(x.asInstanceOf[Product], m) + case m: Mirror.SumOf[T] => showCases[m.MirroredElemTypes](0)(x, m.ordinal(x)) + } + } + + transparent inline def showProduct[T](x: Product, m: Mirror.ProductOf[T]): String = + constValue[m.MirroredLabel] + showElems[m.MirroredElemTypes, m.MirroredElemLabels](0, Nil)(x) + + transparent inline def showElems[Elems <: Tuple, Labels <: Tuple](n: Int, elems: List[String])(x: Product): String = + inline (erasedValue[Labels], erasedValue[Elems]) match { + case _: (label *: labels, elem *: elems) => + val value = show(x.productElement(n).asInstanceOf[elem]) + showElems[elems, labels](n + 1, s"${constValue[label]} = $value" :: elems)(x) + case _: (EmptyTuple, EmptyTuple) => + if elems.isEmpty then "" else elems.mkString(s"(", ", ", ")") + } + + transparent inline def showCases[Alts <: Tuple](n: Int)(x: Any, ord: Int): String = + inline erasedValue[Alts] match { + case _: (alt *: alts) => + if (ord == n) summonFrom { + case m: Mirror.Of[`alt`] => s"[$ord] " + derived[alt](using m).show(x.asInstanceOf[alt]) + } else showCases[alts](n + 1)(x, ord) + case _: EmptyTuple => throw new MatchError(x) + } +end Show diff --git a/tests/run-custom-args/generic-tuples.scala b/tests/run-custom-args/generic-tuples.scala index f134b9d864bb..71aa13427493 100644 --- a/tests/run-custom-args/generic-tuples.scala +++ b/tests/run-custom-args/generic-tuples.scala @@ -7,7 +7,7 @@ class HNil extends Tuple case object HNil extends HNil trait Pair[H, T <: Tuple] { - erased inline def size = ??? + erased inline def size = compiletime.erasedValue } } diff --git a/tests/run-custom-args/phantom-OnHList.scala b/tests/run-custom-args/phantom-OnHList.scala index 7c1ba18bbec4..81eddfe0d4a3 100644 --- a/tests/run-custom-args/phantom-OnHList.scala +++ b/tests/run-custom-args/phantom-OnHList.scala @@ -88,6 +88,6 @@ object Appender { object PhantomAppender { type Aux[L1 <: HList, L2 <: HList, O <: HList] - implicit erased def caseHNil[L <: HList]: Aux[HNil, L, L] = ??? - implicit erased def caseHCons[H, T <: HList, L <: HList, O <: HList] (using erased p: Aux[T, L, O]): Aux[H :: T, L, H :: O] = ??? + implicit erased def caseHNil[L <: HList]: Aux[HNil, L, L] = compiletime.erasedValue + implicit erased def caseHCons[H, T <: HList, L <: HList, O <: HList] (using erased p: Aux[T, L, O]): Aux[H :: T, L, H :: O] = compiletime.erasedValue } diff --git a/tests/run-custom-args/tasty-inspector/i13352.scala b/tests/run-custom-args/tasty-inspector/i13352.scala new file mode 100644 index 000000000000..d5b319a7e548 --- /dev/null +++ b/tests/run-custom-args/tasty-inspector/i13352.scala @@ -0,0 +1,30 @@ +import scala.quoted.* +import scala.tasty.inspector.* + +@main def Test = { + // Artefact of the current test infrastructure + // TODO improve infrastructure to avoid needing this code on each test + val classpath = dotty.tools.dotc.util.ClasspathFromClassloader(this.getClass.getClassLoader).split(java.io.File.pathSeparator).find(_.contains("runWithCompiler")).get + val allTastyFiles = dotty.tools.io.Path(classpath).walkFilter(_.extension == "tasty").map(_.toString).toList + val tastyFiles = allTastyFiles.filter(_.contains("CanEqual2")) + + TastyInspector.inspectTastyFiles(tastyFiles)(new MyInspector) +} + +class MyInspector extends Inspector: + + override def inspect(using Quotes)(tastys: List[Tasty[quotes.type]]): Unit = + import quotes.reflect.* + class Traverser extends TreeTraverser: + override def traverseTree(tree: Tree)(owner: Symbol) = + if tree.pos.startLine < 100 then + super.traverseTree(tree)(owner) + end Traverser + + val traverser = new Traverser + tastys.foreach { tasty => + traverser.traverseTree(tasty.ast)(tasty.ast.symbol) + } + + +class CanEqual2[T] diff --git a/tests/run-custom-args/typeclass-derivation2.scala b/tests/run-custom-args/typeclass-derivation2.scala index 228547bcccc1..90744d2f7ab6 100644 --- a/tests/run-custom-args/typeclass-derivation2.scala +++ b/tests/run-custom-args/typeclass-derivation2.scala @@ -119,7 +119,7 @@ object TypeLevel { type Subtype[t] = Type[_, t] type Supertype[t] = Type[t, _] type Exactly[t] = Type[t, t] - erased def typeOf[T]: Type[T, T] = ??? + erased def typeOf[T]: Type[T, T] = compiletime.erasedValue } // An algebraic datatype diff --git a/tests/run-custom-args/typeclass-derivation2c.scala b/tests/run-custom-args/typeclass-derivation2c.scala index ddf4d62e0498..f7eee2a3fafe 100644 --- a/tests/run-custom-args/typeclass-derivation2c.scala +++ b/tests/run-custom-args/typeclass-derivation2c.scala @@ -24,12 +24,12 @@ object Deriving { /** The number of cases in the sum. * Implemented by an inline method in concrete subclasses. */ - erased def numberOfCases: Int = ??? + erased def numberOfCases: Int = compiletime.erasedValue /** The Generic representations of the sum's alternatives. * Implemented by an inline method in concrete subclasses. */ - erased def alternative(n: Int): Generic[_ <: T] = ??? + erased def alternative(n: Int): Generic[_ <: T] = compiletime.erasedValue } /** The Generic for a product type */ diff --git a/tests/run-custom-args/typelevel-defaultValue.scala b/tests/run-custom-args/typelevel-defaultValue.scala index 7c3b0af46d25..a171bf8a153c 100644 --- a/tests/run-custom-args/typelevel-defaultValue.scala +++ b/tests/run-custom-args/typelevel-defaultValue.scala @@ -1,6 +1,6 @@ object compiletime { - erased def erasedValue[T]: T = ??? + erased def erasedValue[T]: T = compiletime.erasedValue } object Test extends App { diff --git a/tests/run-deep-subtype/Tuple-append.check b/tests/run-deep-subtype/Tuple-append.check new file mode 100644 index 000000000000..96c717f2aef0 --- /dev/null +++ b/tests/run-deep-subtype/Tuple-append.check @@ -0,0 +1,76 @@ +(0) +(0,1) +(0,1,2) +(0,1,2,3) +(0,1,2,3,4) +(0,1,2,3,4,5) +(0,1,2,3,4,5,6) +(0,1,2,3,4,5,6,7) +(0,1,2,3,4,5,6,7,8) +(0,1,2,3,4,5,6,7,8,9) +(0,1,2,3,4,5,6,7,8,9,10) +(0,1,2,3,4,5,6,7,8,9,10,11) +(0,1,2,3,4,5,6,7,8,9,10,11,12) +(0,1,2,3,4,5,6,7,8,9,10,11,12,13) +(0,1,2,3,4,5,6,7,8,9,10,11,12,13,14) +(0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15) +(0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16) +(0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17) +(0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18) +(0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19) +(0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20) +(0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21) +(0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22) +(0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23) +(0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24) +(0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25) +(1) +(1,2) +(1,2,3) +(1,2,3,4) +(1,2,3,4,5) +(1,2,3,4,5,6) +(1,2,3,4,5,6,7) +(1,2,3,4,5,6,7,8) +(1,2,3,4,5,6,7,8,9) +(1,2,3,4,5,6,7,8,9,10) +(1,2,3,4,5,6,7,8,9,10,11) +(1,2,3,4,5,6,7,8,9,10,11,12) +(1,2,3,4,5,6,7,8,9,10,11,12,13) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25) +(1) +(1,2) +(1,2,3) +(1,2,3,4) +(1,2,3,4,5) +(1,2,3,4,5,6) +(1,2,3,4,5,6,7) +(1,2,3,4,5,6,7,8) +(1,2,3,4,5,6,7,8,9) +(1,2,3,4,5,6,7,8,9,10) +(1,2,3,4,5,6,7,8,9,10,11) +(1,2,3,4,5,6,7,8,9,10,11,12) +(1,2,3,4,5,6,7,8,9,10,11,12,13) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25) diff --git a/tests/run-deep-subtype/Tuple-append.scala b/tests/run-deep-subtype/Tuple-append.scala new file mode 100644 index 000000000000..d913b4c98e1d --- /dev/null +++ b/tests/run-deep-subtype/Tuple-append.scala @@ -0,0 +1,65 @@ +import scala.reflect.ClassTag + +object Test { + def main(args: Array[String]): Unit = { + def testArray[T: ClassTag](n: Int, elem: Int => T): Unit = { + val t: Tuple = Tuple.fromArray(Array.tabulate(n)(elem)) + println(t :* n) + } + + for (i <- 0 to 25) + testArray(i, j => j) + + println(Tuple() :* 1) + println(Tuple1(1) :* 2) + println((1, 2) :* 3) + println((1, 2, 3) :* 4) + println((1, 2, 3, 4) :* 5) + println((1, 2, 3, 4, 5) :* 6) + println((1, 2, 3, 4, 5, 6) :* 7) + println((1, 2, 3, 4, 5, 6, 7) :* 8) + println((1, 2, 3, 4, 5, 6, 7, 8) :* 9) + println((1, 2, 3, 4, 5, 6, 7, 8, 9) :* 10) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10) :* 11) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11) :* 12) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12) :* 13) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13) :* 14) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14) :* 15) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15) :* 16) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16) :* 17) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17) :* 18) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18) :* 19) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19) :* 20) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20) :* 21) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21) :* 22) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22) :* 23) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23) :* 24) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24) :* 25) + + println(EmptyTuple :* 1) + println((1 *: Tuple()) :* 2) + println((1 *: 2 *: Tuple()) :* 3) + println((1 *: 2 *: 3 *: Tuple()) :* 4) + println((1 *: 2 *: 3 *: 4 *: Tuple()) :* 5) + println((1 *: 2 *: 3 *: 4 *: 5 *: Tuple()) :* 6) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: Tuple()) :* 7) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: Tuple()) :* 8) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: Tuple()) :* 9) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: Tuple()) :* 10) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: Tuple()) :* 11) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: Tuple()) :* 12) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: Tuple()) :* 13) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: Tuple()) :* 14) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: Tuple()) :* 15) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: Tuple()) :* 16) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: Tuple()) :* 17) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: Tuple()) :* 18) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: 18 *: Tuple()) :* 19) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: 18 *: 19 *: Tuple()) :* 20) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: 18 *: 19 *: 20 *: Tuple()) :* 21) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: 18 *: 19 *: 20 *: 21 *: Tuple()) :* 22) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: 18 *: 19 *: 20 *: 21 *: 22 *: Tuple()) :* 23) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: 18 *: 19 *: 20 *: 21 *: 22 *: 23 *: Tuple()) :* 24) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: 18 *: 19 *: 20 *: 21 *: 22 *: 23 *: 24 *: Tuple()) :* 25) + } +} diff --git a/tests/run-deep-subtype/Tuple-init.check b/tests/run-deep-subtype/Tuple-init.check new file mode 100644 index 000000000000..31c15bbd4e76 --- /dev/null +++ b/tests/run-deep-subtype/Tuple-init.check @@ -0,0 +1,76 @@ +() +(0) +(0,0) +(0,0,1) +(0,0,1,2) +(0,0,1,2,3) +(0,0,1,2,3,4) +(0,0,1,2,3,4,5) +(0,0,1,2,3,4,5,6) +(0,0,1,2,3,4,5,6,7) +(0,0,1,2,3,4,5,6,7,8) +(0,0,1,2,3,4,5,6,7,8,9) +(0,0,1,2,3,4,5,6,7,8,9,10) +(0,0,1,2,3,4,5,6,7,8,9,10,11) +(0,0,1,2,3,4,5,6,7,8,9,10,11,12) +(0,0,1,2,3,4,5,6,7,8,9,10,11,12,13) +(0,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14) +(0,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15) +(0,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16) +(0,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17) +(0,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18) +(0,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19) +(0,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20) +(0,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21) +(0,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22) +(0,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23) +() +(1) +(1,2) +(1,2,3) +(1,2,3,4) +(1,2,3,4,5) +(1,2,3,4,5,6) +(1,2,3,4,5,6,7) +(1,2,3,4,5,6,7,8) +(1,2,3,4,5,6,7,8,9) +(1,2,3,4,5,6,7,8,9,10) +(1,2,3,4,5,6,7,8,9,10,11) +(1,2,3,4,5,6,7,8,9,10,11,12) +(1,2,3,4,5,6,7,8,9,10,11,12,13) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24) +() +(1) +(1,2) +(1,2,3) +(1,2,3,4) +(1,2,3,4,5) +(1,2,3,4,5,6) +(1,2,3,4,5,6,7) +(1,2,3,4,5,6,7,8) +(1,2,3,4,5,6,7,8,9) +(1,2,3,4,5,6,7,8,9,10) +(1,2,3,4,5,6,7,8,9,10,11) +(1,2,3,4,5,6,7,8,9,10,11,12) +(1,2,3,4,5,6,7,8,9,10,11,12,13) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23) +(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24) diff --git a/tests/run-deep-subtype/Tuple-init.scala b/tests/run-deep-subtype/Tuple-init.scala new file mode 100644 index 000000000000..286c84a17cf7 --- /dev/null +++ b/tests/run-deep-subtype/Tuple-init.scala @@ -0,0 +1,65 @@ +import scala.reflect.ClassTag + +object Test { + def main(args: Array[String]): Unit = { + def testArray[T: ClassTag](n: Int, elem: Int => T): Unit = { + val t: Int *: Tuple = 0 *: Tuple.fromArray(Array.tabulate(n)(elem)) + println(t.init) + } + + for (i <- 0 to 25) + testArray(i, j => j) + + println(Tuple1(1).init) + println((1, 2).init) + println((1, 2, 3).init) + println((1, 2, 3, 4).init) + println((1, 2, 3, 4, 5).init) + println((1, 2, 3, 4, 5, 6).init) + println((1, 2, 3, 4, 5, 6, 7).init) + println((1, 2, 3, 4, 5, 6, 7, 8).init) + println((1, 2, 3, 4, 5, 6, 7, 8, 9).init) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10).init) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11).init) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12).init) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13).init) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14).init) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15).init) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16).init) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17).init) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18).init) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19).init) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20).init) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21).init) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22).init) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23).init) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24).init) + println((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25).init) + + println((1 *: Tuple()).init) + println((1 *: 2 *: Tuple()).init) + println((1 *: 2 *: 3 *: Tuple()).init) + println((1 *: 2 *: 3 *: 4 *: Tuple()).init) + println((1 *: 2 *: 3 *: 4 *: 5 *: Tuple()).init) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: Tuple()).init) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: Tuple()).init) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: Tuple()).init) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: Tuple()).init) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: Tuple()).init) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: Tuple()).init) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: Tuple()).init) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: Tuple()).init) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: Tuple()).init) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: Tuple()).init) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: Tuple()).init) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: Tuple()).init) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: 18 *: Tuple()).init) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: 18 *: 19 *: Tuple()).init) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: 18 *: 19 *: 20 *: Tuple()).init) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: 18 *: 19 *: 20 *: 21 *: Tuple()).init) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: 18 *: 19 *: 20 *: 21 *: 22 *: Tuple()).init) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: 18 *: 19 *: 20 *: 21 *: 22 *: 23 *: Tuple()).init) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: 18 *: 19 *: 20 *: 21 *: 22 *: 23 *: 24 *: Tuple()).init) + println((1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: 18 *: 19 *: 20 *: 21 *: 22 *: 23 *: 24 *: 25 *: Tuple()).init) + } +} diff --git a/tests/run-deep-subtype/Tuple-last.check b/tests/run-deep-subtype/Tuple-last.check new file mode 100644 index 000000000000..cc750f5e721c --- /dev/null +++ b/tests/run-deep-subtype/Tuple-last.check @@ -0,0 +1,26 @@ +0 +0 +1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 \ No newline at end of file diff --git a/tests/run-deep-subtype/Tuple-last.scala b/tests/run-deep-subtype/Tuple-last.scala new file mode 100644 index 000000000000..d4bad1719b64 --- /dev/null +++ b/tests/run-deep-subtype/Tuple-last.scala @@ -0,0 +1,65 @@ +import scala.reflect.ClassTag + +object Test { + def main(args: Array[String]): Unit = { + def printArray[T: ClassTag](n: Int, elem: Int => T): Unit = { + val t: Int *: Tuple = 0 *: Tuple.fromArray(Array.tabulate(n)(elem)) + println(t.last) + } + + for (i <- 0 to 25) + printArray(i, j => j) + + assert(1 == Tuple1(1).last) + assert(2 == (1, 2).last) + assert(3 == (1, 2, 3).last) + assert(4 == (1, 2, 3, 4).last) + assert(5 == (1, 2, 3, 4, 5).last) + assert(6 == (1, 2, 3, 4, 5, 6).last) + assert(7 == (1, 2, 3, 4, 5, 6, 7).last) + assert(8 == (1, 2, 3, 4, 5, 6, 7, 8).last) + assert(9 == (1, 2, 3, 4, 5, 6, 7, 8, 9).last) + assert(10 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10).last) + assert(11 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11).last) + assert(12 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12).last) + assert(13 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13).last) + assert(14 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14).last) + assert(15 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15).last) + assert(16 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16).last) + assert(17 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17).last) + assert(18 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18).last) + assert(19 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19).last) + assert(20 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20).last) + assert(21 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21).last) + assert(22 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22).last) + assert(23 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23).last) + assert(24 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24).last) + assert(25 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25).last) + + assert(1 == (1 *: Tuple()).last) + assert(2 == (1 *: 2 *: Tuple()).last) + assert(3 == (1 *: 2 *: 3 *: Tuple()).last) + assert(4 == (1 *: 2 *: 3 *: 4 *: Tuple()).last) + assert(5 == (1 *: 2 *: 3 *: 4 *: 5 *: Tuple()).last) + assert(6 == (1 *: 2 *: 3 *: 4 *: 5 *: 6 *: Tuple()).last) + assert(7 == (1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: Tuple()).last) + assert(8 == (1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: Tuple()).last) + assert(9 == (1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: Tuple()).last) + assert(10 == (1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: Tuple()).last) + assert(11 == (1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: Tuple()).last) + assert(12 == (1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: Tuple()).last) + assert(13 == (1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: Tuple()).last) + assert(14 == (1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: Tuple()).last) + assert(15 == (1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: Tuple()).last) + assert(16 == (1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: Tuple()).last) + assert(17 == (1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: Tuple()).last) + assert(18 == (1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: 18 *: Tuple()).last) + assert(19 == (1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: 18 *: 19 *: Tuple()).last) + assert(20 == (1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: 18 *: 19 *: 20 *: Tuple()).last) + assert(21 == (1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: 18 *: 19 *: 20 *: 21 *: Tuple()).last) + assert(22 == (1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: 18 *: 19 *: 20 *: 21 *: 22 *: Tuple()).last) + assert(23 == (1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: 18 *: 19 *: 20 *: 21 *: 22 *: 23 *: Tuple()).last) + assert(24 == (1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: 18 *: 19 *: 20 *: 21 *: 22 *: 23 *: 24 *: Tuple()).last) + assert(25 == (1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: 18 *: 19 *: 20 *: 21 *: 22 *: 23 *: 24 *: 25 *: Tuple()).last) + } +} diff --git a/tests/run/tuple-drop.scala b/tests/run-deep-subtype/tuple-drop.scala similarity index 100% rename from tests/run/tuple-drop.scala rename to tests/run-deep-subtype/tuple-drop.scala diff --git a/tests/run/tuple-take.scala b/tests/run-deep-subtype/tuple-take.scala similarity index 100% rename from tests/run/tuple-take.scala rename to tests/run-deep-subtype/tuple-take.scala diff --git a/tests/run-macros/TypeRepr-isTupleN/Macro_1.scala b/tests/run-macros/TypeRepr-isTupleN/Macro_1.scala new file mode 100644 index 000000000000..37f8c7e32dab --- /dev/null +++ b/tests/run-macros/TypeRepr-isTupleN/Macro_1.scala @@ -0,0 +1,8 @@ +import scala.quoted.* + +inline def isTupleN[T]: Boolean = ${ isTupleNImpl[T] } + +private def isTupleNImpl[T: Type](using Quotes): Expr[Boolean] = { + import quotes.reflect.* + Expr(TypeRepr.of[T].isTupleN) +} diff --git a/tests/run-macros/TypeRepr-isTupleN/Test_2.scala b/tests/run-macros/TypeRepr-isTupleN/Test_2.scala new file mode 100644 index 000000000000..12e15d567b78 --- /dev/null +++ b/tests/run-macros/TypeRepr-isTupleN/Test_2.scala @@ -0,0 +1,38 @@ +@main def Test = { + assert(isTupleN[Tuple1[Int]]) + assert(isTupleN[(Int, Int)]) + assert(isTupleN[(Int, Int, Int)]) + assert(isTupleN[(Int, Int, Int, Int)]) + assert(isTupleN[(Int, Int, Int, Int, Int)]) + assert(isTupleN[(Int, Int, Int, Int, Int, Int)]) + assert(isTupleN[(Int, Int, Int, Int, Int, Int, Int)]) + assert(isTupleN[(Int, Int, Int, Int, Int, Int, Int, Int)]) + assert(isTupleN[(Int, Int, Int, Int, Int, Int, Int, Int, Int)]) + assert(isTupleN[(Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)]) + assert(isTupleN[(Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)]) + assert(isTupleN[(Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)]) + assert(isTupleN[(Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)]) + assert(isTupleN[(Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)]) + assert(isTupleN[(Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)]) + assert(isTupleN[(Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)]) + assert(isTupleN[(Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)]) + assert(isTupleN[(Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)]) + assert(isTupleN[(Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)]) + assert(isTupleN[(Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)]) + assert(isTupleN[(Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)]) + assert(isTupleN[(Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)]) + + type Tup = (Int, Int) + assert(isTupleN[Tup]) + + assert(!isTupleN[(Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)]) // No tuple 23 + assert(!isTupleN[Tuple]) + assert(!isTupleN[EmptyTuple]) + assert(!isTupleN[NonEmptyTuple]) + assert(!isTupleN[Int *: Tuple]) + + assert(!isTupleN[Any]) + assert(!isTupleN[Int]) + assert(!isTupleN[Object]) + assert(!isTupleN[Nothing]) +} diff --git a/tests/run-macros/annot-arg-value-in-java.check b/tests/run-macros/annot-arg-value-in-java.check new file mode 100644 index 000000000000..d49aaf91ae6a --- /dev/null +++ b/tests/run-macros/annot-arg-value-in-java.check @@ -0,0 +1,11 @@ +J: +new java.lang.SuppressWarnings(value = "a") +new java.lang.SuppressWarnings(value = "b") +new java.lang.SuppressWarnings(value = _root_.scala.Array.apply[java.lang.String]("c", "d")(scala.reflect.ClassTag.apply[java.lang.String](classOf[java.lang.String]))) +JOtherTypes: +new Annot(value = 1, _, _) +new Annot(value = -2, _, _) +new Annot(_, m = false, _) +new Annot(_, m = true, _) +new Annot(_, _, n = 1.1) +new Annot(_, _, n = -2.1) \ No newline at end of file diff --git a/tests/run-macros/annot-arg-value-in-java/AnnoMacro.scala b/tests/run-macros/annot-arg-value-in-java/AnnoMacro.scala new file mode 100644 index 000000000000..7528653ef903 --- /dev/null +++ b/tests/run-macros/annot-arg-value-in-java/AnnoMacro.scala @@ -0,0 +1,11 @@ +import scala.quoted.* + +inline def showAnnots(inline c: String): Unit = ${ showAnnotsImpl('c) } + +def showAnnotsImpl(c: Expr[String])(using Quotes): Expr[Unit] = + import quotes.reflect.* + val al = Expr(Symbol.requiredClass(c.valueOrError).declaredMethods.flatMap(_.annotations.map(_.show))) + '{ + println($c + ":") + $al.foreach(println) + } \ No newline at end of file diff --git a/tests/run-macros/annot-arg-value-in-java/Annot.java b/tests/run-macros/annot-arg-value-in-java/Annot.java new file mode 100644 index 000000000000..2764bf8c8324 --- /dev/null +++ b/tests/run-macros/annot-arg-value-in-java/Annot.java @@ -0,0 +1,5 @@ +public @interface Annot { + int value() default 0; + boolean m() default false; + double n() default 0; +} \ No newline at end of file diff --git a/tests/run-macros/annot-arg-value-in-java/J.java b/tests/run-macros/annot-arg-value-in-java/J.java new file mode 100644 index 000000000000..d1234e080258 --- /dev/null +++ b/tests/run-macros/annot-arg-value-in-java/J.java @@ -0,0 +1,23 @@ +public class J { + @SuppressWarnings(value = "a") + public void f1() {} + @SuppressWarnings("b") + public void f2() {} + @SuppressWarnings({"c", "d"}) + public void f3() {} +} + +class JOtherTypes { + @Annot(1) + public void f1() {} + @Annot(-2) + public void f2() {} + @Annot(m = false) + public void f3() {} + @Annot(m = true) + public void f4() {} + @Annot(n = 1.1) + public void f5() {} + @Annot(n = -2.1) + public void f6() {} +} \ No newline at end of file diff --git a/tests/run-macros/annot-arg-value-in-java/S.scala b/tests/run-macros/annot-arg-value-in-java/S.scala new file mode 100644 index 000000000000..8b4369df97b1 --- /dev/null +++ b/tests/run-macros/annot-arg-value-in-java/S.scala @@ -0,0 +1,5 @@ +// Display annotation arguments in Java + +@main def Test = + showAnnots("J") + showAnnots("JOtherTypes") diff --git a/tests/run-macros/enum-nat-macro/Macros_2.scala b/tests/run-macros/enum-nat-macro/Macros_2.scala index c533888718ba..a30cfa5729bc 100644 --- a/tests/run-macros/enum-nat-macro/Macros_2.scala +++ b/tests/run-macros/enum-nat-macro/Macros_2.scala @@ -25,6 +25,6 @@ import Nat.* case 0 => acc case n => inner[Succ[N]](n - 1, '{Succ($acc)}) - val i = int.valueOrError + val i = int.valueOrAbort require(i >= 0) inner[Zero.type](i, '{Zero}) diff --git a/tests/run-macros/f-interpolator-tests.check b/tests/run-macros/f-interpolator-tests.check index be5091b8ef39..4f2464fadae7 100644 --- a/tests/run-macros/f-interpolator-tests.check +++ b/tests/run-macros/f-interpolator-tests.check @@ -1,6 +1,7 @@ integer: 5 string: l 5, 6, hello +3.14 rounds to 3 5 6 Bob is 1 years old diff --git a/tests/run-macros/f-interpolator-tests.scala b/tests/run-macros/f-interpolator-tests.scala index ad712948a0d7..8c59ae19a187 100755 --- a/tests/run-macros/f-interpolator-tests.scala +++ b/tests/run-macros/f-interpolator-tests.scala @@ -1,15 +1,15 @@ -/** - * These tests test all the possible formats the f interpolator has to deal with. - * The tests are sorted by argument category as the arguments are on https://docs.oracle.com/javase/6/docs/api/java/util/Formatter.html#detail - * - * - * Some tests come from https://github.com/lampepfl/dotty/pull/3894/files - */ +/** These tests test all the possible formats the f interpolator has to deal with. + * + * The tests are sorted by argument category as the arguments are on https://docs.oracle.com/javase/6/docs/api/java/util/Formatter.html#detail + * + * Some tests come from https://github.com/lampepfl/dotty/pull/3894/files + */ object Test { def main(args: Array[String]) = { println(f"integer: ${5}%d") println(f"string: ${"l"}%s") println(f"${5}%s, ${6}%d, ${"hello"}%s") + println(f"${3.14}%.2f rounds to ${3}%d") val x = 5 println(f"$x%d") @@ -22,6 +22,7 @@ object Test { dateArgsTests specificLiteralsTests argumentsTests + unitTests } def multilineTests = { @@ -198,5 +199,209 @@ object Test { def argumentsTests = { println(f"${"a"}%s ${"b"}%s % "false", + f"${b_true}%b" -> "true", + + f"${null}%b" -> "false", + f"${false}%b" -> "false", + f"${true}%b" -> "true", + f"${true && false}%b" -> "false", + f"${java.lang.Boolean.valueOf(false)}%b" -> "false", + f"${java.lang.Boolean.valueOf(true)}%b" -> "true", + + f"${null}%B" -> "FALSE", + f"${false}%B" -> "FALSE", + f"${true}%B" -> "TRUE", + f"${java.lang.Boolean.valueOf(false)}%B" -> "FALSE", + f"${java.lang.Boolean.valueOf(true)}%B" -> "TRUE", + + f"${"true"}%b" -> "true", + f"${"false"}%b"-> "false", + + // 'h' | 'H' (category: general) + // ----------------------------- + f"${null}%h" -> "null", + f"${f_zero}%h" -> "0", + f"${f_zero_-}%h" -> "80000000", + f"${s}%h" -> "4c01926", + + f"${null}%H" -> "NULL", + f"${s}%H" -> "4C01926", + + // 's' | 'S' (category: general) + // ----------------------------- + f"${null}%s" -> "null", + f"${null}%S" -> "NULL", + f"${s}%s" -> "Scala", + f"${s}%S" -> "SCALA", + f"${5}" -> "5", + f"${i}" -> "42", + f"${Symbol("foo")}" -> "Symbol(foo)", + + f"${Thread.State.NEW}" -> "NEW", + + // 'c' | 'C' (category: character) + // ------------------------------- + f"${120:Char}%c" -> "x", + f"${120:Byte}%c" -> "x", + f"${120:Short}%c" -> "x", + f"${120:Int}%c" -> "x", + f"${java.lang.Character.valueOf('x')}%c" -> "x", + f"${java.lang.Byte.valueOf(120:Byte)}%c" -> "x", + f"${java.lang.Short.valueOf(120:Short)}%c" -> "x", + f"${java.lang.Integer.valueOf(120)}%c" -> "x", + + f"${'x' : java.lang.Character}%c" -> "x", + f"${(120:Byte) : java.lang.Byte}%c" -> "x", + f"${(120:Short) : java.lang.Short}%c" -> "x", + f"${120 : java.lang.Integer}%c" -> "x", + + f"${"Scala"}%c" -> "S", + + // 'd' | 'o' | 'x' | 'X' (category: integral) + // ------------------------------------------ + f"${120:Byte}%d" -> "120", + f"${120:Short}%d" -> "120", + f"${120:Int}%d" -> "120", + f"${120:Long}%d" -> "120", + f"${60 * 2}%d" -> "120", + f"${java.lang.Byte.valueOf(120:Byte)}%d" -> "120", + f"${java.lang.Short.valueOf(120:Short)}%d" -> "120", + f"${java.lang.Integer.valueOf(120)}%d" -> "120", + f"${java.lang.Long.valueOf(120)}%d" -> "120", + f"${120 : java.lang.Integer}%d" -> "120", + f"${120 : java.lang.Long}%d" -> "120", + f"${BigInt(120)}%d" -> "120", + + f"${new java.math.BigInteger("120")}%d" -> "120", + + f"${4}%#10X" -> " 0X4", + + f"She is ${fff}%#s feet tall." -> "She is 4 feet tall.", + + f"Just want to say ${"hello, world"}%#s..." -> "Just want to say hello, world...", + + //{ implicit val strToShort: Conversion[String, Short] = java.lang.Short.parseShort ; f"${"120"}%d" } -> "120", + //{ implicit val strToInt = (s: String) => 42 ; f"${"120"}%d" } -> "42", + + // 'e' | 'E' | 'g' | 'G' | 'f' | 'a' | 'A' (category: floating point) + // ------------------------------------------------------------------ + f"${3.4f}%e" -> locally"3.400000e+00", + f"${3.4}%e" -> locally"3.400000e+00", + f"${3.4f : java.lang.Float}%e" -> locally"3.400000e+00", + f"${3.4 : java.lang.Double}%e" -> locally"3.400000e+00", + + f"${BigDecimal(3.4)}%e" -> locally"3.400000e+00", + + f"${new java.math.BigDecimal(3.4)}%e" -> locally"3.400000e+00", + + f"${3}%e" -> locally"3.000000e+00", + f"${3L}%e" -> locally"3.000000e+00", + + // 't' | 'T' (category: date/time) + // ------------------------------- + f"${cal}%TD" -> "05/26/12", + f"${cal.getTime}%TD" -> "05/26/12", + f"${cal.getTime.getTime}%TD" -> "05/26/12", + f"""${"1234"}%TD""" -> "05/26/12", + + // literals and arg indexes + f"%%" -> "%", + f" mind%n------%nmatter" -> + """| mind + |------ + |matter""".stripMargin.linesIterator.mkString(System.lineSeparator), + f"${i}%d % "42 42 9", + f"${7}%d % "7 7 9", + f"${7}%d %2$$d ${9}%d" -> "7 9 9", + + f"${null}%d % "null FALSE", + + f"${5: Any}" -> "5", + f"${5}%s% "55", + f"${3.14}%s,% locally"3.14,${"3.140000"}", + + f"z" -> "z" + ) + + for ((f, s) <- ss) assertEquals(s, f) + end `f interpolator baseline` + + def fIf = + val res = f"${if true then 2.5 else 2.5}%.2f" + val expected = locally"2.50" + assertEquals(expected, res) + + def fIfNot = + val res = f"${if false then 2.5 else 3.5}%.2f" + val expected = locally"3.50" + assertEquals(expected, res) + + // in Scala 2, [A >: Any] forced not to convert 3 to 3.0; Scala 3 harmonics should also respect lower bound. + def fHeteroArgs() = + val res = f"${3.14}%.2f rounds to ${3}%d" + val expected = locally"${"3.14"} rounds to 3" + assertEquals(expected, res) } +object StringContextTestUtils: + private val decimalSeparator: Char = new DecimalFormat().getDecimalFormatSymbols().getDecimalSeparator() + private val numberPattern = """(\d+)\.(\d+.*)""".r + private def applyProperLocale(number: String): String = + val numberPattern(intPart, fractionalPartAndSuffix) = number + s"$intPart$decimalSeparator$fractionalPartAndSuffix" + + extension (sc: StringContext) + // Use this String interpolator to avoid problems with a locale-dependent decimal mark. + def locally(numbers: String*): String = + val numbersWithCorrectLocale = numbers.map(applyProperLocale) + sc.s(numbersWithCorrectLocale: _*) + + // Handles cases like locally"3.14" - it's prettier than locally"${"3.14"}". + def locally(): String = sc.parts.map(applyProperLocale).mkString diff --git a/tests/run-macros/from-type.check b/tests/run-macros/from-type.check index 9a5fe95f29aa..d8bff3300be2 100644 --- a/tests/run-macros/from-type.check +++ b/tests/run-macros/from-type.check @@ -1,7 +1,5 @@ Some(true) Some(false) -Some(1) -Some(2) Some(3) Some(4) Some(5) diff --git a/tests/run-macros/from-type/Test_2.scala b/tests/run-macros/from-type/Test_2.scala index 272ea7b5dac8..c14b164299f6 100644 --- a/tests/run-macros/from-type/Test_2.scala +++ b/tests/run-macros/from-type/Test_2.scala @@ -2,8 +2,9 @@ @main def Test: Unit = testValueOfType[true] testValueOfType[false] - testValueOfByte[1] - testValueOfShort[2] + // TODO support Byte and short literal types + // testValueOfType[1b] + // testValueOfType[2s] testValueOfType[3] testValueOfType[4] testValueOfType[5L] @@ -29,6 +30,3 @@ testValueOfType[Null] testValueOfType[Any] testValueOfType[Some[1]] - -transparent inline def testValueOfByte[B <: Byte] = testValueOfType[B] -transparent inline def testValueOfShort[S <: Short] = testValueOfType[S] diff --git a/tests/run-macros/gestalt-type-toolbox-reflect/Macro_1.scala b/tests/run-macros/gestalt-type-toolbox-reflect/Macro_1.scala index 1d085d3a2c15..5f73f7c84386 100644 --- a/tests/run-macros/gestalt-type-toolbox-reflect/Macro_1.scala +++ b/tests/run-macros/gestalt-type-toolbox-reflect/Macro_1.scala @@ -44,7 +44,7 @@ object TypeToolbox { inline def fieldIn[T](inline mem: String): String = ${fieldInImpl[T]('mem)} private def fieldInImpl[T: Type](mem: Expr[String])(using Quotes) : Expr[String] = { import quotes.reflect.* - val field = TypeTree.of[T].symbol.declaredField(mem.valueOrError) + val field = TypeTree.of[T].symbol.declaredField(mem.valueOrAbort) Expr(if field.isNoSymbol then "" else field.name) } @@ -58,7 +58,7 @@ object TypeToolbox { inline def methodIn[T](inline mem: String): Seq[String] = ${methodInImpl[T]('mem)} private def methodInImpl[T: Type](mem: Expr[String])(using Quotes) : Expr[Seq[String]] = { import quotes.reflect.* - Expr(TypeTree.of[T].symbol.declaredMethod(mem.valueOrError).map(_.name)) + Expr(TypeTree.of[T].symbol.declaredMethod(mem.valueOrAbort).map(_.name)) } inline def methodsIn[T]: Seq[String] = ${methodsInImpl[T]} @@ -70,7 +70,7 @@ object TypeToolbox { inline def method[T](inline mem: String): Seq[String] = ${methodImpl[T]('mem)} private def methodImpl[T: Type](mem: Expr[String])(using Quotes) : Expr[Seq[String]] = { import quotes.reflect.* - Expr(TypeTree.of[T].symbol.memberMethod(mem.valueOrError).map(_.name)) + Expr(TypeTree.of[T].symbol.memberMethod(mem.valueOrAbort).map(_.name)) } inline def methods[T]: Seq[String] = ${methodsImpl[T]} diff --git a/tests/run-macros/i10914a/Macro_1.scala b/tests/run-macros/i10914a/Macro_1.scala index 0a1b8088d04c..0fa34291ee14 100644 --- a/tests/run-macros/i10914a/Macro_1.scala +++ b/tests/run-macros/i10914a/Macro_1.scala @@ -24,7 +24,7 @@ object Dsl { import quotes.reflect.* val inputs = c match case '{ Container($list) } => - list.valueOrError + list.valueOrAbort case _ => report.throwError("Cannot Extract List from Container") '{ Entity(${Expr(inputs.head.value)}) } } diff --git a/tests/run-macros/i10914b/Macro_1.scala b/tests/run-macros/i10914b/Macro_1.scala index 680f189a3566..c5b375031846 100644 --- a/tests/run-macros/i10914b/Macro_1.scala +++ b/tests/run-macros/i10914b/Macro_1.scala @@ -13,7 +13,7 @@ object Dsl { //println("Getting Input: " + Printer.TreeStructure.show(c.asTerm)) val entExpr = c match case '{ Input($ent) } => ent - case _ => report.throwError("Cannot Extract Entity from Input") + case _ => report.errorAndAbort("Cannot Extract Entity from Input") '{ Container(List($entExpr)) } @@ -28,7 +28,7 @@ object Dsl { import quotes.reflect.* val inputs = c match case '{ Container($list) } => - list.valueOrError - case _ => report.throwError("Cannot Extract List from Container") + list.valueOrAbort + case _ => report.errorAndAbort("Cannot Extract List from Container") '{ Entity(${Expr(inputs.head.value)}) } } diff --git a/tests/run-macros/i11161/Macro_1.scala b/tests/run-macros/i11161/Macro_1.scala index c189eaac46d3..9918234d4659 100644 --- a/tests/run-macros/i11161/Macro_1.scala +++ b/tests/run-macros/i11161/Macro_1.scala @@ -8,4 +8,4 @@ private def showTypeImpl[T: Type](using Quotes): Expr[String] = case Some(ct) => '{ $ct.runtimeClass.getName } case None => import quotes.reflect._ - report.throwError(s"Unable to find a ClassTag for type ${Type.show[T]}", Position.ofMacroExpansion) + report.errorAndAbort(s"Unable to find a ClassTag for type ${Type.show[T]}", Position.ofMacroExpansion) diff --git a/tests/run-macros/i11856/Test_1.scala b/tests/run-macros/i11856/Test_1.scala index 2b31f3a4a570..0ec8952863fc 100644 --- a/tests/run-macros/i11856/Test_1.scala +++ b/tests/run-macros/i11856/Test_1.scala @@ -5,8 +5,8 @@ object Str: ${ evalConcat('a, 'b) } def evalConcat(expra: Expr[String], exprb: Expr[String])(using Quotes): Expr[String] = - val a = expra.valueOrError - val b = exprb.valueOrError + val a = expra.valueOrAbort + val b = exprb.valueOrAbort Expr(a ++ b) object I: @@ -14,6 +14,6 @@ object I: ${ evalConcat('a, 'b) } def evalConcat(expra: Expr[Int], exprb: Expr[Int])(using Quotes): Expr[Int] = - val a = expra.valueOrError - val b = exprb.valueOrError + val a = expra.valueOrAbort + val b = exprb.valueOrAbort Expr(a + b) \ No newline at end of file diff --git a/tests/run-macros/i12163.check b/tests/run-macros/i12163.check new file mode 100644 index 000000000000..c24232089f0f --- /dev/null +++ b/tests/run-macros/i12163.check @@ -0,0 +1,4 @@ +Test +Type +ABC +No label matched: sdofjsdifj diff --git a/tests/run-macros/i12163/Macro_1.scala b/tests/run-macros/i12163/Macro_1.scala new file mode 100644 index 000000000000..8625bd8ad608 --- /dev/null +++ b/tests/run-macros/i12163/Macro_1.scala @@ -0,0 +1,31 @@ +import scala.quoted.* +import scala.util.matching.Regex + +inline def showLabel(inline label: String): String = + ${ showLabelExpr('label) } + + +private def showLabelExpr(label: Expr[String])(using Quotes): Expr[String] = { + val suggestRegex: Regex = "(Type)([a-zA-Z]+)(Mapping)([a-zA-Z]+)".r + val docRegex: Regex = "(Test)(Mapping)([a-zA-Z]+)".r + val simpleRegex: Regex = "([a-zA-Z]+)(Mapping)([a-zA-Z]+)".r + + label.value match { + case Some(docRegex(doc, _, _)) => + Expr(doc) + + case Some(suggestRegex(suggest, suggestType, _, _)) => + Expr(suggest) + + case Some(simpleRegex(docType, _, _)) => + Expr(docType) + + case Some(value) => + Expr(s"No label matched: $value") + // quotes.reflect.report.throwError(s"No label matched: $value") + + case None => + Expr(s"Expected label to be a know string but was: ${label.show}") + // quotes.reflect.report.throwError(s"Expected label to be a know string but was: ${label.show}") + } +} diff --git a/tests/run-macros/i12163/Test_2.scala b/tests/run-macros/i12163/Test_2.scala new file mode 100644 index 000000000000..3af6e91cc489 --- /dev/null +++ b/tests/run-macros/i12163/Test_2.scala @@ -0,0 +1,6 @@ + +@main def Test(): Unit = + println(showLabel("TestMappingDocument")) + println(showLabel("TypeABCMappingDocument")) + println(showLabel("ABCMappingDocument")) + println(showLabel("sdofjsdifj")) \ No newline at end of file diff --git a/tests/run-macros/i12188/Macro_1.scala b/tests/run-macros/i12188/Macro_1.scala new file mode 100644 index 000000000000..619581802e35 --- /dev/null +++ b/tests/run-macros/i12188/Macro_1.scala @@ -0,0 +1,23 @@ +import scala.quoted.* + +object MatchTest { + inline def test[T](inline obj: T): String = ${testImpl('obj)} + + def testImpl[T](objExpr: Expr[T])(using qctx: Quotes, t: Type[T]): Expr[String] = { + import qctx.reflect.* + + val obj = objExpr.asTerm + val cases = obj.tpe.typeSymbol.children.map { child => + val subtype = TypeIdent(child) + val bind = Symbol.newBind(Symbol.spliceOwner, "c", Flags.EmptyFlags, subtype.tpe) + CaseDef(Bind(bind, Typed(Ref(bind), subtype)), None, Literal(StringConstant(subtype.show))) + } ::: { + CaseDef(Wildcard(), None, Literal(StringConstant("default"))) + } :: Nil + val bind = Symbol.newBind(Symbol.spliceOwner, "o", Flags.EmptyFlags, obj.tpe) + val result = Match(obj, cases) + val code = result.show(using Printer.TreeAnsiCode) + // println(code) + result.asExprOf[String] + } +} \ No newline at end of file diff --git a/tests/run-macros/i12188/Test_2.scala b/tests/run-macros/i12188/Test_2.scala new file mode 100644 index 000000000000..a3d0f0de58fb --- /dev/null +++ b/tests/run-macros/i12188/Test_2.scala @@ -0,0 +1,8 @@ +sealed trait P +case class PC1(a: String) extends P +case class PC2(b: Int) extends P + +@main def Test = + println(MatchTest.test(PC1("ab"): P)) + println(MatchTest.test(PC2(10): P)) + println(MatchTest.test(null: P)) diff --git a/tests/run-macros/i12392.check b/tests/run-macros/i12392.check new file mode 100644 index 000000000000..54c7f5d06c3f --- /dev/null +++ b/tests/run-macros/i12392.check @@ -0,0 +1 @@ +scala.Option[scala.Predef.String] to scala.Option[scala.Int] diff --git a/tests/run-macros/i12392/Macros_1.scala b/tests/run-macros/i12392/Macros_1.scala new file mode 100644 index 000000000000..f604cf815716 --- /dev/null +++ b/tests/run-macros/i12392/Macros_1.scala @@ -0,0 +1,24 @@ +import scala.quoted.* + +inline def testSubst: Unit = ${ testSubstImpl } + +def testSubstImpl(using Quotes): Expr[Unit] = { + import quotes.reflect.* + + val intTpe = TypeRepr.of[Int] + val strOptTpe = TypeRepr.of[Option[String]] + + val tpeArgs: List[TypeRepr] = strOptTpe match { + case AppliedType(_, args) => args + case _ => List.empty[TypeRepr] + } + + val intOptTpe = strOptTpe.substituteTypes( + tpeArgs.map(_.typeSymbol), List(intTpe)) + + val repr = s"${strOptTpe.show} to ${intOptTpe.show}" + + '{ + println(${Expr(repr)}) + } +} diff --git a/tests/run-macros/i12392/Test_2.scala b/tests/run-macros/i12392/Test_2.scala new file mode 100644 index 000000000000..6fb9577c8ef9 --- /dev/null +++ b/tests/run-macros/i12392/Test_2.scala @@ -0,0 +1 @@ +@main def Test = testSubst diff --git a/tests/run-macros/i12417/Macros_1.scala b/tests/run-macros/i12417/Macros_1.scala new file mode 100644 index 000000000000..e961ae6040a1 --- /dev/null +++ b/tests/run-macros/i12417/Macros_1.scala @@ -0,0 +1,13 @@ +import scala.deriving.Mirror +import scala.compiletime.{constValue, error} +import scala.quoted.* + +object TestMacro { + inline def test1[CASE_CLASS <: Product](using m: Mirror.ProductOf[CASE_CLASS]): String = + ${ code('m) } + + def code[CASE_CLASS <: Product: Type](m: Expr[Mirror.ProductOf[CASE_CLASS]])(using Quotes): Expr[String] = + m match + case '{ type t <: Tuple; $_ : Mirror { type MirroredElemLabels = `t` } } => + Expr(Type.valueOfTuple[t].toString) +} diff --git a/tests/run-macros/i12417/Test_2.scala b/tests/run-macros/i12417/Test_2.scala new file mode 100644 index 000000000000..0ebe6d377ba0 --- /dev/null +++ b/tests/run-macros/i12417/Test_2.scala @@ -0,0 +1,7 @@ +import scala.deriving.Mirror +import scala.compiletime.{constValue, error} + +object Test extends App { + case class A(x: String, y: Int) + assert(TestMacro.test1[A] == "Some((x,y))") +} diff --git a/tests/run-macros/i12417b.check b/tests/run-macros/i12417b.check new file mode 100644 index 000000000000..ac68840f16bf --- /dev/null +++ b/tests/run-macros/i12417b.check @@ -0,0 +1,6 @@ +Some(()) +Some((1)) +Some((1,2)) +Some((1,2,3)) +Some((1,2,3,4)) +Some((1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26)) diff --git a/tests/run-macros/i12417b/Macros_1.scala b/tests/run-macros/i12417b/Macros_1.scala new file mode 100644 index 000000000000..2b531e446bc1 --- /dev/null +++ b/tests/run-macros/i12417b/Macros_1.scala @@ -0,0 +1,18 @@ +import scala.deriving.Mirror +import scala.compiletime.{constValue, error} +import scala.quoted.* + +object TestMacro { + inline def test1: Unit = + ${ code() } + + def code()(using Quotes): Expr[Unit] = + '{ + println(${Expr(Type.valueOfTuple[EmptyTuple].toString)}) + println(${Expr(Type.valueOfTuple[1 *: EmptyTuple].toString)}) + println(${Expr(Type.valueOfTuple[(1, 2)].toString)}) + println(${Expr(Type.valueOfTuple[(1, 2, 3)].toString)}) + println(${Expr(Type.valueOfTuple[(1, 2, 3, 4)].toString)}) + println(${Expr(Type.valueOfTuple[(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26)].toString)}) + } +} diff --git a/tests/run-macros/i12417b/Test_2.scala b/tests/run-macros/i12417b/Test_2.scala new file mode 100644 index 000000000000..182c2c0e2f8a --- /dev/null +++ b/tests/run-macros/i12417b/Test_2.scala @@ -0,0 +1 @@ +@main def Test = TestMacro.test1 diff --git a/tests/run-macros/i13033.check b/tests/run-macros/i13033.check new file mode 100644 index 000000000000..d7e52d192eb1 --- /dev/null +++ b/tests/run-macros/i13033.check @@ -0,0 +1,3 @@ +0 +1 +-1 diff --git a/tests/run-macros/i13033/Macro_1.scala b/tests/run-macros/i13033/Macro_1.scala new file mode 100644 index 000000000000..8f2c040bad5b --- /dev/null +++ b/tests/run-macros/i13033/Macro_1.scala @@ -0,0 +1,17 @@ +import scala.quoted.* + +object Macro: + def nanImpl(using Quotes): Expr[Any] = + '{ Double.NaN } match + case '{ Double.NaN } => '{0} + inline def nan: Any = ${nanImpl} + + def positiveImpl(using Quotes): Expr[Any] = + '{ 0.0 } match + case '{ 0.0 } => '{1} + inline def positive: Any = ${positiveImpl} + + def negativeImpl(using Quotes): Expr[Any] = + '{ -0.0 } match + case '{ -0.0 } => '{-1} + inline def negative: Any = ${negativeImpl} diff --git a/tests/run-macros/i13033/Test_2.scala b/tests/run-macros/i13033/Test_2.scala new file mode 100644 index 000000000000..f972c8f67555 --- /dev/null +++ b/tests/run-macros/i13033/Test_2.scala @@ -0,0 +1,4 @@ +@main def Test(): Unit = + println(Macro.nan) + println(Macro.positive) + println(Macro.negative) diff --git a/tests/run-macros/i13183.check b/tests/run-macros/i13183.check new file mode 100644 index 000000000000..49dac665435e --- /dev/null +++ b/tests/run-macros/i13183.check @@ -0,0 +1,5 @@ +"\u001b" +"\u0000\u0001\u0003" +"ABC" +"\u0080\u0081翿" +"\t\n\r👋👌🥳" diff --git a/tests/run-macros/i13183/Macro_1.scala b/tests/run-macros/i13183/Macro_1.scala new file mode 100644 index 000000000000..4400a4497cb8 --- /dev/null +++ b/tests/run-macros/i13183/Macro_1.scala @@ -0,0 +1,5 @@ +import scala.quoted.* + +object Macro_1: + inline def stringLiteral(inline s: String): String = ${showExpr('s)} + def showExpr(s: Expr[?])(using Quotes): Expr[String] = Expr(s.show.toString) diff --git a/tests/run-macros/i13183/Test_2.scala b/tests/run-macros/i13183/Test_2.scala new file mode 100644 index 000000000000..ae5897eb95b0 --- /dev/null +++ b/tests/run-macros/i13183/Test_2.scala @@ -0,0 +1,6 @@ +@main def Test = + println(Macro_1.stringLiteral("\u001b")) // "\u001b" + println(Macro_1.stringLiteral("\u0000\u0001\u0003")) // "\u0000\u0001\u0003" + println(Macro_1.stringLiteral("A\u0042C")) // "ABC" + println(Macro_1.stringLiteral("\u0080\u0081\u7fff")) // "\u0080\u0081翿" + println(Macro_1.stringLiteral("\t\n\r👋👌🥳")) // "\t\n\r👋👌🥳" diff --git a/tests/run-macros/i13230.check b/tests/run-macros/i13230.check new file mode 100644 index 000000000000..01b246fb33f6 --- /dev/null +++ b/tests/run-macros/i13230.check @@ -0,0 +1 @@ +List((A,E), (B,E)) diff --git a/tests/run-macros/i13230/Macros_1.scala b/tests/run-macros/i13230/Macros_1.scala new file mode 100644 index 000000000000..424358e83b1a --- /dev/null +++ b/tests/run-macros/i13230/Macros_1.scala @@ -0,0 +1,12 @@ +import scala.quoted.* + +enum E: + case A, B + +inline def showEnumChildren = ${ showEnumChildrenExpr } + +def showEnumChildrenExpr(using Quotes) = + import quotes.reflect.* + val repr = TypeRepr.of[E] + Expr(TypeRepr.of[E].classSymbol.get.children.map(sym => (sym.name, repr.memberType(sym).show))) + diff --git a/tests/run-macros/i13230/Test_2.scala b/tests/run-macros/i13230/Test_2.scala new file mode 100644 index 000000000000..23f582ac9c94 --- /dev/null +++ b/tests/run-macros/i13230/Test_2.scala @@ -0,0 +1 @@ +@main def Test = println(showEnumChildren) diff --git a/tests/run-macros/i13947.check b/tests/run-macros/i13947.check new file mode 100644 index 000000000000..674afdfa11eb --- /dev/null +++ b/tests/run-macros/i13947.check @@ -0,0 +1,3 @@ +[] +[scala.Predef.String] +[scala.Int, scala.Float, scala.Long] diff --git a/tests/run-macros/i13947/Macro_1.scala b/tests/run-macros/i13947/Macro_1.scala new file mode 100644 index 000000000000..49a4971e553c --- /dev/null +++ b/tests/run-macros/i13947/Macro_1.scala @@ -0,0 +1,12 @@ +import scala.quoted.* + +inline def printTypeParams[A]: Unit = ${ printTypeParamsImpl[A] } + +def printTypeParamsImpl[A: Type](using Quotes): Expr[Unit] = { + import quotes.reflect.* + + val targs: List[TypeRepr] = TypeRepr.of[A].typeArgs + val debug = targs.map(_.show).mkString("[", ", ", "]") + + '{ println(${Expr(debug)}) } +} diff --git a/tests/run-macros/i13947/Test_2.scala b/tests/run-macros/i13947/Test_2.scala new file mode 100644 index 000000000000..009193cf12b8 --- /dev/null +++ b/tests/run-macros/i13947/Test_2.scala @@ -0,0 +1,4 @@ +@main def Test: Unit = + printTypeParams[scala.util.Random] + printTypeParams[Option[String]] + printTypeParams[Function2[Int, Float, Long]] diff --git a/tests/run-macros/i4734/Macro_1.scala b/tests/run-macros/i4734/Macro_1.scala index 30b12d2fce36..39d2457af9d5 100644 --- a/tests/run-macros/i4734/Macro_1.scala +++ b/tests/run-macros/i4734/Macro_1.scala @@ -6,7 +6,7 @@ object Macros { ${ unrolledForeachImpl('seq, 'f, 'unrollSize) } def unrolledForeachImpl(seq: Expr[IndexedSeq[Int]], f: Expr[Int => Unit], unrollSizeExpr: Expr[Int]) (using Quotes): Expr[Unit] = - unrolledForeachImpl(seq, f, unrollSizeExpr.valueOrError) + unrolledForeachImpl(seq, f, unrollSizeExpr.valueOrAbort) def unrolledForeachImpl(seq: Expr[IndexedSeq[Int]], f: Expr[Int => Unit], unrollSize: Int)(using Quotes): Expr[Unit] = '{ val size = ($seq).length diff --git a/tests/run-macros/i4735/Macro_1.scala b/tests/run-macros/i4735/Macro_1.scala index 54cdcb574c91..de96230c7a6c 100644 --- a/tests/run-macros/i4735/Macro_1.scala +++ b/tests/run-macros/i4735/Macro_1.scala @@ -14,7 +14,7 @@ object Macro { while (i < size) { println(" start loop") ${ - for (j <- new UnrolledRange(0, unrollSize.valueOrError)) '{ + for (j <- new UnrolledRange(0, unrollSize.valueOrAbort)) '{ val element = ($seq)(i + ${Expr(j)}) ${Expr.betaReduce('{$f(element)})} // or `($f)(element)` if `f` should not be inlined } diff --git a/tests/run-macros/i4803/Macro_1.scala b/tests/run-macros/i4803/Macro_1.scala index 9ff32194c708..cac39ac4640d 100644 --- a/tests/run-macros/i4803/Macro_1.scala +++ b/tests/run-macros/i4803/Macro_1.scala @@ -2,7 +2,7 @@ import scala.quoted.* object PowerMacro { def powerCode(x: Expr[Double], n: Expr[Long]) (using Quotes): Expr[Double] = - powerCode(x, n.valueOrError) + powerCode(x, n.valueOrAbort) def powerCode(x: Expr[Double], n: Long) (using Quotes): Expr[Double] = if (n == 0) '{1.0} diff --git a/tests/run-macros/i4803b/Macro_1.scala b/tests/run-macros/i4803b/Macro_1.scala index ab82cb100dc3..af782a153b73 100644 --- a/tests/run-macros/i4803b/Macro_1.scala +++ b/tests/run-macros/i4803b/Macro_1.scala @@ -2,7 +2,7 @@ import scala.quoted.* object PowerMacro { def powerCode(x: Expr[Double], n: Expr[Long]) (using Quotes): Expr[Double] = - powerCode(x, n.valueOrError) + powerCode(x, n.valueOrAbort) def powerCode(x: Expr[Double], n: Long) (using Quotes): Expr[Double] = if (n == 0) '{1.0} diff --git a/tests/run-macros/i4803c/Macro_1.scala b/tests/run-macros/i4803c/Macro_1.scala index 2058a0bc4b3b..eb4d25bdfaea 100644 --- a/tests/run-macros/i4803c/Macro_1.scala +++ b/tests/run-macros/i4803c/Macro_1.scala @@ -2,7 +2,7 @@ import scala.quoted.* object PowerMacro { def powerCode(x: Expr[Double], n: Expr[Long]) (using Quotes): Expr[Double] = - powerCode(x, n.valueOrError) + powerCode(x, n.valueOrAbort) def powerCode(x: Expr[Double], n: Long) (using Quotes): Expr[Double] = if (n == 0) '{1.0} diff --git a/tests/run-macros/i5188a/Macro_1.scala b/tests/run-macros/i5188a/Macro_1.scala index 252f5245c9ea..04782ac8edd8 100644 --- a/tests/run-macros/i5188a/Macro_1.scala +++ b/tests/run-macros/i5188a/Macro_1.scala @@ -2,5 +2,5 @@ import scala.quoted.* object Lib { inline def sum(inline args: Int*): Int = ${ impl('args) } - def impl(args: Expr[Seq[Int]]) (using Quotes): Expr[Int] = Expr(args.valueOrError.sum) + def impl(args: Expr[Seq[Int]]) (using Quotes): Expr[Int] = Expr(args.valueOrAbort.sum) } diff --git a/tests/run-macros/i6201/macro_1.scala b/tests/run-macros/i6201/macro_1.scala index 2b87f431f3d9..ca1fecde3c02 100644 --- a/tests/run-macros/i6201/macro_1.scala +++ b/tests/run-macros/i6201/macro_1.scala @@ -4,10 +4,10 @@ extension (inline x: String) inline def strip: String = ${ stripImpl('x) } def stripImpl(x: Expr[String])(using Quotes) : Expr[String] = - Expr(x.valueOrError.stripMargin) + Expr(x.valueOrAbort.stripMargin) inline def isHello(inline x: String): Boolean = ${ isHelloImpl('x) } def isHelloImpl(x: Expr[String])(using Quotes) : Expr[Boolean] = - if (x.valueOrError == "hello") Expr(true) else Expr(false) + if (x.valueOrAbort == "hello") Expr(true) else Expr(false) diff --git a/tests/run-macros/i6765-c/Macro_1.scala b/tests/run-macros/i6765-c/Macro_1.scala index c3401a7f53da..73fe899ed1a5 100644 --- a/tests/run-macros/i6765-c/Macro_1.scala +++ b/tests/run-macros/i6765-c/Macro_1.scala @@ -3,6 +3,6 @@ import scala.quoted.* inline def foo(inline n: Int) = ${fooImpl('n)} def fooImpl(n: Expr[Int])(using Quotes) = { - val res = Expr.ofList(List.tabulate(n.valueOrError)(i => Expr("#" + i))) + val res = Expr.ofList(List.tabulate(n.valueOrAbort)(i => Expr("#" + i))) '{ ${Expr(res.show)} + "\n" + $res.toString + "\n" } } diff --git a/tests/run-macros/i8671/Macro_1.scala b/tests/run-macros/i8671/Macro_1.scala index c00d901a66e3..2a7af715e2dc 100644 --- a/tests/run-macros/i8671/Macro_1.scala +++ b/tests/run-macros/i8671/Macro_1.scala @@ -19,10 +19,10 @@ object FileName { case Right(fn) => '{FileName.unsafe(${Expr(fn.name)})} // Or `Expr(fn)` if there is a `ToExpr[FileName]` case Left(_) => - report.throwError(s"$s is not a valid file name! It must not contain a /", fileName) + report.errorAndAbort(s"$s is not a valid file name! It must not contain a /", fileName) } case _ => - report.throwError(s"$fileName is not a valid file name. It must be a literal string", fileName) + report.errorAndAbort(s"$fileName is not a valid file name. It must be a literal string", fileName) } } diff --git a/tests/run-macros/inline-macro-staged-interpreter/Macro_1.scala b/tests/run-macros/inline-macro-staged-interpreter/Macro_1.scala index d8cf1c788851..148ca05f685e 100644 --- a/tests/run-macros/inline-macro-staged-interpreter/Macro_1.scala +++ b/tests/run-macros/inline-macro-staged-interpreter/Macro_1.scala @@ -7,7 +7,7 @@ object E { inline def eval[T](inline x: E[T]): T = ${ impl('x) } def impl[T: Type](expr: Expr[E[T]]) (using Quotes): Expr[T] = - expr.valueOrError.lift + expr.valueOrAbort.lift implicit def ev1[T: Type]: FromExpr[E[T]] = new FromExpr { // TODO use type class derivation def unapply(x: Expr[E[T]])(using Quotes) = (x match { diff --git a/tests/run-macros/inline-option/Macro_1.scala b/tests/run-macros/inline-option/Macro_1.scala index 4021658bc17a..abdedc679a5c 100644 --- a/tests/run-macros/inline-option/Macro_1.scala +++ b/tests/run-macros/inline-option/Macro_1.scala @@ -3,11 +3,11 @@ import scala.quoted.* object Macros { - def impl(opt: Expr[Option[Int]]) (using Quotes): Expr[Int] = opt.valueOrError match { + def impl(opt: Expr[Option[Int]]) (using Quotes): Expr[Int] = opt.valueOrAbort match { case Some(i) => Expr(i) case None => '{-1} } - def impl2(opt: Expr[Option[Option[Int]]]) (using Quotes): Expr[Int] = impl(Expr(opt.valueOrError.flatten)) + def impl2(opt: Expr[Option[Option[Int]]]) (using Quotes): Expr[Int] = impl(Expr(opt.valueOrAbort.flatten)) } diff --git a/tests/run-macros/inline-tuples-1/Macro_1.scala b/tests/run-macros/inline-tuples-1/Macro_1.scala index 031fb01cabe4..e0ce4f20419a 100644 --- a/tests/run-macros/inline-tuples-1/Macro_1.scala +++ b/tests/run-macros/inline-tuples-1/Macro_1.scala @@ -2,26 +2,26 @@ import scala.quoted.* object Macros { - def tup1(tup: Expr[Tuple1[Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup2(tup: Expr[Tuple2[Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup3(tup: Expr[Tuple3[Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup4(tup: Expr[Tuple4[Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup5(tup: Expr[Tuple5[Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup6(tup: Expr[Tuple6[Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup7(tup: Expr[Tuple7[Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup8(tup: Expr[Tuple8[Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup9(tup: Expr[Tuple9[Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup10(tup: Expr[Tuple10[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup11(tup: Expr[Tuple11[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup12(tup: Expr[Tuple12[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup13(tup: Expr[Tuple13[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup14(tup: Expr[Tuple14[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup15(tup: Expr[Tuple15[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup16(tup: Expr[Tuple16[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup17(tup: Expr[Tuple17[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup18(tup: Expr[Tuple18[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup19(tup: Expr[Tuple19[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup20(tup: Expr[Tuple20[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup21(tup: Expr[Tuple21[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) - def tup22(tup: Expr[Tuple22[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError.productIterator.map(_.asInstanceOf[Int]).sum) + def tup1(tup: Expr[Tuple1[Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup2(tup: Expr[Tuple2[Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup3(tup: Expr[Tuple3[Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup4(tup: Expr[Tuple4[Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup5(tup: Expr[Tuple5[Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup6(tup: Expr[Tuple6[Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup7(tup: Expr[Tuple7[Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup8(tup: Expr[Tuple8[Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup9(tup: Expr[Tuple9[Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup10(tup: Expr[Tuple10[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup11(tup: Expr[Tuple11[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup12(tup: Expr[Tuple12[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup13(tup: Expr[Tuple13[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup14(tup: Expr[Tuple14[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup15(tup: Expr[Tuple15[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup16(tup: Expr[Tuple16[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup17(tup: Expr[Tuple17[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup18(tup: Expr[Tuple18[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup19(tup: Expr[Tuple19[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup20(tup: Expr[Tuple20[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup21(tup: Expr[Tuple21[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) + def tup22(tup: Expr[Tuple22[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort.productIterator.map(_.asInstanceOf[Int]).sum) } diff --git a/tests/run-macros/inline-tuples-2/Macro_1.scala b/tests/run-macros/inline-tuples-2/Macro_1.scala index 7027a1f2680a..263ae5b8f645 100644 --- a/tests/run-macros/inline-tuples-2/Macro_1.scala +++ b/tests/run-macros/inline-tuples-2/Macro_1.scala @@ -3,8 +3,8 @@ import scala.quoted.* object Macros { - def impl(tup: Expr[Tuple1[Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrError._1) + def impl(tup: Expr[Tuple1[Int]]) (using Quotes): Expr[Int] = Expr(tup.valueOrAbort._1) - def impl2(tup: Expr[Tuple1[Tuple1[Int]]]) (using Quotes): Expr[Int] = impl(Expr(tup.valueOrError._1)) + def impl2(tup: Expr[Tuple1[Tuple1[Int]]]) (using Quotes): Expr[Int] = impl(Expr(tup.valueOrAbort._1)) } diff --git a/tests/run-macros/inline-varargs-1/Macro_1.scala b/tests/run-macros/inline-varargs-1/Macro_1.scala index ed58d1479059..22580c3c42c2 100644 --- a/tests/run-macros/inline-varargs-1/Macro_1.scala +++ b/tests/run-macros/inline-varargs-1/Macro_1.scala @@ -2,5 +2,5 @@ import scala.quoted.* object Macros { - def sum(nums: Expr[Int]*) (using Quotes): Expr[Int] = Expr(nums.map(_.valueOrError).sum) + def sum(nums: Expr[Int]*) (using Quotes): Expr[Int] = Expr(nums.map(_.valueOrAbort).sum) } diff --git a/tests/run-macros/power-macro/Macro_1.scala b/tests/run-macros/power-macro/Macro_1.scala index 57d468fd7dc1..abb22c4f2c96 100644 --- a/tests/run-macros/power-macro/Macro_1.scala +++ b/tests/run-macros/power-macro/Macro_1.scala @@ -4,7 +4,7 @@ import scala.quoted.* inline def power(x: Double, inline n: Int) = ${ powerCode1('x, 'n) } private def powerCode1(using Quotes)(x: Expr[Double], n: Expr[Int]): Expr[Double] = - powerCode(x, n.valueOrError) + powerCode(x, n.valueOrAbort) private def powerCode(using Quotes)(x: Expr[Double], n: Int): Expr[Double] = if (n == 0) Expr(1.0) diff --git a/tests/run-macros/quote-and-splice/Macros_1.scala b/tests/run-macros/quote-and-splice/Macros_1.scala index e799318eac12..91c4f586d0e2 100644 --- a/tests/run-macros/quote-and-splice/Macros_1.scala +++ b/tests/run-macros/quote-and-splice/Macros_1.scala @@ -6,7 +6,7 @@ object Macros { def macro1Impl (using Quotes)= '{3} inline def macro2(inline p: Boolean) = ${ macro2Impl('p) } - def macro2Impl(p: Expr[Boolean]) (using Quotes)= if (p.valueOrError) '{3} else '{4} + def macro2Impl(p: Expr[Boolean]) (using Quotes)= if (p.valueOrAbort) '{3} else '{4} inline def macro3(n: Int) = ${ macro3Impl('n) } def macro3Impl(p: Expr[Int]) (using Quotes)= '{ 2 + $p } @@ -20,7 +20,7 @@ object Macros { inline def power(inline n: Int, x: Double) = ${ powerCode('n, 'x) } def powerCode(n: Expr[Int], x: Expr[Double]) (using Quotes): Expr[Double] = - powerCode(n.valueOrError, x) + powerCode(n.valueOrAbort, x) def powerCode(n: Int, x: Expr[Double]) (using Quotes): Expr[Double] = if (n == 0) '{1.0} diff --git a/tests/run-macros/quote-matcher-power/Macro_1.scala b/tests/run-macros/quote-matcher-power/Macro_1.scala index da21736282df..194744bcf85f 100644 --- a/tests/run-macros/quote-matcher-power/Macro_1.scala +++ b/tests/run-macros/quote-matcher-power/Macro_1.scala @@ -4,7 +4,7 @@ import scala.quoted.* object Macros { def power_s(x: Expr[Double], n: Expr[Int]) (using Quotes): Expr[Double] = - power_s(x, n.valueOrError) + power_s(x, n.valueOrAbort) def power_s(x: Expr[Double], n: Int) (using Quotes): Expr[Double] = if (n == 0) '{1.0} diff --git a/tests/run-macros/quote-matcher-string-interpolator-2/quoted_1.scala b/tests/run-macros/quote-matcher-string-interpolator-2/quoted_1.scala index f425cd25ae6d..f42935c5f6df 100644 --- a/tests/run-macros/quote-matcher-string-interpolator-2/quoted_1.scala +++ b/tests/run-macros/quote-matcher-string-interpolator-2/quoted_1.scala @@ -9,8 +9,8 @@ object Macros { private def impl(self: Expr[StringContext], args: Expr[Seq[String]])(using Quotes): Expr[String] = { (self, args) match { case ('{ StringContext(${Varargs(parts)}*) }, Varargs(args1)) => - val strParts = parts.map(_.valueOrError.reverse) - val strArgs = args1.map(_.valueOrError) + val strParts = parts.map(_.valueOrAbort.reverse) + val strArgs = args1.map(_.valueOrAbort) Expr(StringContext(strParts*).s(strArgs*)) case _ => ??? } diff --git a/tests/run-macros/quote-simple-macro/quoted_1.scala b/tests/run-macros/quote-simple-macro/quoted_1.scala index bd39612bf3ca..174164320fa0 100644 --- a/tests/run-macros/quote-simple-macro/quoted_1.scala +++ b/tests/run-macros/quote-simple-macro/quoted_1.scala @@ -2,5 +2,5 @@ import scala.quoted.* object Macros { inline def foo(inline i: Int, dummy: Int, j: Int): Int = ${ bar('i, 'j) } - def bar(x: Expr[Int], y: Expr[Int]) (using Quotes): Expr[Int] = '{ ${Expr(x.valueOrError)} + $y } + def bar(x: Expr[Int], y: Expr[Int]) (using Quotes): Expr[Int] = '{ ${Expr(x.valueOrAbort)} + $y } } diff --git a/tests/run-macros/quote-unrolled-foreach/quoted_1.scala b/tests/run-macros/quote-unrolled-foreach/quoted_1.scala index c84cd639bda7..bc974bce6d07 100644 --- a/tests/run-macros/quote-unrolled-foreach/quoted_1.scala +++ b/tests/run-macros/quote-unrolled-foreach/quoted_1.scala @@ -7,7 +7,7 @@ object Macro { ${unrolledForeachImpl('unrollSize, 'seq, 'f)} private def unrolledForeachImpl(unrollSizeExpr: Expr[Int], seq: Expr[Array[Int]], f: Expr[Int => Unit]) (using Quotes): Expr[Unit] = - unrolledForeachImpl(unrollSizeExpr.valueOrError, seq, f) + unrolledForeachImpl(unrollSizeExpr.valueOrAbort, seq, f) private def unrolledForeachImpl(unrollSize: Int, seq: Expr[Array[Int]], f: Expr[Int => Unit])(using Quotes): Expr[Unit] = '{ val size = $seq.length diff --git a/tests/run-macros/quote-whitebox/Macro_1.scala b/tests/run-macros/quote-whitebox/Macro_1.scala index a18fccbc166e..78b7c36ccce1 100644 --- a/tests/run-macros/quote-whitebox/Macro_1.scala +++ b/tests/run-macros/quote-whitebox/Macro_1.scala @@ -2,7 +2,7 @@ import scala.quoted.* object Macros { transparent inline def defaultOf(inline str: String): Any = ${ defaultOfImpl('str) } - def defaultOfImpl(str: Expr[String]) (using Quotes): Expr[Any] = str.valueOrError match { + def defaultOfImpl(str: Expr[String]) (using Quotes): Expr[Any] = str.valueOrAbort match { case "int" => '{1} case "string" => '{"a"} } diff --git a/tests/run-macros/quoted-ToExpr-derivation-macro/Derivation_1.scala b/tests/run-macros/quoted-ToExpr-derivation-macro/Derivation_1.scala index a080ad96109d..7ac7a341d6b1 100644 --- a/tests/run-macros/quoted-ToExpr-derivation-macro/Derivation_1.scala +++ b/tests/run-macros/quoted-ToExpr-derivation-macro/Derivation_1.scala @@ -62,7 +62,7 @@ object ToExprMaker { Expr.summon[T] match case Some(expr) => expr case None => - quotes.reflect.report.throwError(s"Could not find implicit ${Type.show[T]}") + quotes.reflect.report.errorAndAbort(s"Could not find implicit ${Type.show[T]}") private def switchExpr(scrutinee: Expr[Int], seq: List[Expr[ToExpr[_]]])(using Quotes): Expr[ToExpr[_]] = import quotes.reflect._ diff --git a/tests/run-macros/quoted-expr-block/quoted_1.scala b/tests/run-macros/quoted-expr-block/quoted_1.scala index 8d2403df4c10..910382c42de0 100644 --- a/tests/run-macros/quoted-expr-block/quoted_1.scala +++ b/tests/run-macros/quoted-expr-block/quoted_1.scala @@ -5,5 +5,5 @@ inline def replicate(inline times: Int, code: => Any) = ${replicateImpl('times, private def replicateImpl(times: Expr[Int], code: Expr[Any]) (using Quotes)= { @annotation.tailrec def loop(n: Int, accum: List[Expr[Any]]): List[Expr[Any]] = if (n > 0) loop(n - 1, code :: accum) else accum - Expr.block(loop(times.valueOrError, Nil), '{}) + Expr.block(loop(times.valueOrAbort, Nil), '{}) } diff --git a/tests/run-macros/quoted-liftable-derivation-macro-2/Derivation_1.scala b/tests/run-macros/quoted-liftable-derivation-macro-2/Derivation_1.scala index 7c199910652c..1b45835006c0 100644 --- a/tests/run-macros/quoted-liftable-derivation-macro-2/Derivation_1.scala +++ b/tests/run-macros/quoted-liftable-derivation-macro-2/Derivation_1.scala @@ -65,7 +65,7 @@ object Lft { Expr.summon[T] match case Some(expr) => expr case None => - quotes.reflect.report.throwError(s"Could not find implicit ${Type.show[T]}") + quotes.reflect.report.errorAndAbort(s"Could not find implicit ${Type.show[T]}") private def switchExpr(scrutinee: Expr[Int], seq: List[Expr[Lft[_]]])(using Quotes): Expr[Lft[_]] = import quotes.reflect._ diff --git a/tests/run-macros/quoted-liftable-derivation-macro/Derivation_1.scala b/tests/run-macros/quoted-liftable-derivation-macro/Derivation_1.scala index 059ebc16e446..8ecde7988f4e 100644 --- a/tests/run-macros/quoted-liftable-derivation-macro/Derivation_1.scala +++ b/tests/run-macros/quoted-liftable-derivation-macro/Derivation_1.scala @@ -52,7 +52,7 @@ object Lft { private def elemTypesLfts[X: Type](using Quotes): List[Expr[Lft[_]]] = Type.of[X] match case '[ head *: tail ] => - Expr.summon[Lft[head]].getOrElse(quotes.reflect.report.throwError(s"Could not find given Lft[${Type.show[head]}]")) :: elemTypesLfts[tail] + Expr.summon[Lft[head]].getOrElse(quotes.reflect.report.errorAndAbort(s"Could not find given Lft[${Type.show[head]}]")) :: elemTypesLfts[tail] case '[ EmptyTuple ] => Nil private def elemType[X: Type](ordinal: Int)(using Quotes): Type[_] = diff --git a/tests/run-macros/reflect-inline/assert_1.scala b/tests/run-macros/reflect-inline/assert_1.scala index 6c797be0fc85..95bf9dbf04b8 100644 --- a/tests/run-macros/reflect-inline/assert_1.scala +++ b/tests/run-macros/reflect-inline/assert_1.scala @@ -5,12 +5,12 @@ object api { ${ stripImpl('x) } private def stripImpl(x: Expr[String])(using Quotes): Expr[String] = - Expr(augmentString(x.valueOrError).stripMargin) + Expr(augmentString(x.valueOrAbort).stripMargin) inline def typeChecks(inline x: String): Boolean = ${ typeChecksImpl('{scala.compiletime.testing.typeChecks(x)}) } private def typeChecksImpl(b: Expr[Boolean])(using Quotes): Expr[Boolean] = { - if (b.valueOrError) Expr(true) else Expr(false) + if (b.valueOrAbort) Expr(true) else Expr(false) } } diff --git a/tests/run-macros/self.check b/tests/run-macros/self.check new file mode 100644 index 000000000000..925316ca5318 --- /dev/null +++ b/tests/run-macros/self.check @@ -0,0 +1,3 @@ +ExprType(TypeRef(TermRef(ThisType(TypeRef(NoPrefix,module class )),object scala),Int)) +MethodType(List(x), List(TypeRef(TermRef(TermRef(ThisType(TypeRef(NoPrefix,module class )),object scala),Predef),String)), TypeRef(TypeRef(ThisType(TypeRef(ThisType(TypeRef(NoPrefix,module class )),module class )),B),X)) +MethodType(List(x), List(TypeRef(TermRef(ThisType(TypeRef(NoPrefix,module class )),object scala),Int)), TypeRef(TypeRef(ThisType(TypeRef(ThisType(TypeRef(NoPrefix,module class )),module class )),B),X)) diff --git a/tests/run-macros/self/Macro_1.scala b/tests/run-macros/self/Macro_1.scala new file mode 100644 index 000000000000..98699f2ece14 --- /dev/null +++ b/tests/run-macros/self/Macro_1.scala @@ -0,0 +1,35 @@ +import scala.quoted.* + +trait A { + type X +} +trait B { self: A => + def foo(x: Int): X + def foo(x: String): X +} + +object Obj { + def foo: Int = 1 +} + +object Macros { + + inline def test(): String = ${ testImpl } + + private def testImpl(using Quotes) : Expr[String] = { + import quotes.reflect.* + val bTpe = TypeRepr.of[B] + val bSym = bTpe.classSymbol.get + val bMethSyms = bSym.methodMember("foo") // Used to throw a MissingType exception + val bMethTpes = bMethSyms.map(bTpe.memberType) + + // Make sure we didn't break member lookup on terms + val objTpe = TypeRepr.of[Obj.type] + val objSym = objTpe.termSymbol + val objMethSyms = objSym.methodMember("foo") + val objMethTpes = objMethSyms.map(objTpe.memberType) + + Expr((objMethTpes ++ bMethTpes).map(_.toString).sorted.mkString("\n")) + } + +} diff --git a/tests/run-macros/self/Test_2.scala b/tests/run-macros/self/Test_2.scala new file mode 100644 index 000000000000..f0a2a8a00ab8 --- /dev/null +++ b/tests/run-macros/self/Test_2.scala @@ -0,0 +1,8 @@ + +object Test { + + def main(args: Array[String]): Unit = { + println(Macros.test()) + } + +} diff --git a/tests/run-macros/simple-interpreter/Macro_1.scala b/tests/run-macros/simple-interpreter/Macro_1.scala index 98ce017e563c..324b03676185 100644 --- a/tests/run-macros/simple-interpreter/Macro_1.scala +++ b/tests/run-macros/simple-interpreter/Macro_1.scala @@ -34,7 +34,7 @@ object SchemaInterpreter { vSchema <- interpretSchema(vSchemaExpr) yield Schema.mapSchema(using kSchema, vSchema).asInstanceOf[Schema[T]] case _ => - None // could also hangle with `quotes.reflect.{error, throwError}` + None // could also hangle with `quotes.reflect.{error, errorAndAbort}` } } diff --git a/tests/run-macros/tasty-extractors-1.check b/tests/run-macros/tasty-extractors-1.check index b53e72c1f378..bebfa3d79dbc 100644 --- a/tests/run-macros/tasty-extractors-1.check +++ b/tests/run-macros/tasty-extractors-1.check @@ -40,37 +40,37 @@ OrType(ConstantType(IntConstant(1)), ConstantType(IntConstant(2))) Inlined(None, Nil, Match(Literal(StringConstant("a")), List(CaseDef(Literal(StringConstant("a")), None, Block(Nil, Literal(UnitConstant())))))) TypeRef(ThisType(TypeRef(NoPrefix(), "scala")), "Unit") -Inlined(None, Nil, Match(Literal(StringConstant("b")), List(CaseDef(Bind("n", Ident("_")), None, Block(Nil, Literal(UnitConstant())))))) +Inlined(None, Nil, Match(Literal(StringConstant("b")), List(CaseDef(Bind("n", Wildcard()), None, Block(Nil, Literal(UnitConstant())))))) TypeRef(ThisType(TypeRef(NoPrefix(), "scala")), "Unit") -Inlined(None, Nil, Match(Literal(StringConstant("c")), List(CaseDef(Bind("n", Typed(Ident("_"), TypeIdent("String"))), None, Block(Nil, Literal(UnitConstant())))))) +Inlined(None, Nil, Match(Literal(StringConstant("c")), List(CaseDef(Bind("n", Typed(Wildcard(), TypeIdent("String"))), None, Block(Nil, Literal(UnitConstant())))))) TypeRef(ThisType(TypeRef(NoPrefix(), "scala")), "Unit") -Inlined(None, Nil, Match(Literal(StringConstant("e")), List(CaseDef(Ident("_"), None, Block(Nil, Literal(UnitConstant())))))) +Inlined(None, Nil, Match(Literal(StringConstant("e")), List(CaseDef(Wildcard(), None, Block(Nil, Literal(UnitConstant())))))) TypeRef(ThisType(TypeRef(NoPrefix(), "scala")), "Unit") -Inlined(None, Nil, Match(Literal(StringConstant("f")), List(CaseDef(Typed(Ident("_"), TypeIdent("String")), None, Block(Nil, Literal(UnitConstant())))))) +Inlined(None, Nil, Match(Literal(StringConstant("f")), List(CaseDef(Typed(Wildcard(), TypeIdent("String")), None, Block(Nil, Literal(UnitConstant())))))) TypeRef(ThisType(TypeRef(NoPrefix(), "scala")), "Unit") -Inlined(None, Nil, Match(Typed(Literal(StringConstant("g")), TypeIdent("Any")), List(CaseDef(Alternative(List(Typed(Ident("_"), TypeIdent("String")), Typed(Ident("_"), TypeIdent("Int")))), None, Block(Nil, Literal(UnitConstant())))))) +Inlined(None, Nil, Match(Typed(Literal(StringConstant("g")), TypeIdent("Any")), List(CaseDef(Alternatives(List(Typed(Wildcard(), TypeIdent("String")), Typed(Wildcard(), TypeIdent("Int")))), None, Block(Nil, Literal(UnitConstant())))))) TypeRef(ThisType(TypeRef(NoPrefix(), "scala")), "Unit") -Inlined(None, Nil, Match(Literal(StringConstant("h")), List(CaseDef(Ident("_"), Some(Literal(BooleanConstant(false))), Block(Nil, Literal(UnitConstant())))))) +Inlined(None, Nil, Match(Literal(StringConstant("h")), List(CaseDef(Wildcard(), Some(Literal(BooleanConstant(false))), Block(Nil, Literal(UnitConstant())))))) TypeRef(ThisType(TypeRef(NoPrefix(), "scala")), "Unit") -Inlined(None, Nil, Block(List(ValDef("a", Inferred(), Some(Literal(StringConstant("o"))))), Match(Literal(StringConstant("i")), List(CaseDef(Bind("a", Ident("_")), None, Block(Nil, Literal(UnitConstant()))))))) +Inlined(None, Nil, Block(List(ValDef("a", Inferred(), Some(Literal(StringConstant("o"))))), Match(Literal(StringConstant("i")), List(CaseDef(Bind("a", Wildcard()), None, Block(Nil, Literal(UnitConstant()))))))) TypeRef(ThisType(TypeRef(NoPrefix(), "scala")), "Unit") -Inlined(None, Nil, Match(Ident("Nil"), List(CaseDef(Unapply(TypeApply(Select(Ident("List"), "unapplySeq"), List(Inferred())), Nil, List(Bind("a", Ident("_")), Bind("b", Ident("_")), Bind("c", Ident("_")))), None, Block(Nil, Literal(UnitConstant())))))) +Inlined(None, Nil, Match(Ident("Nil"), List(CaseDef(Unapply(TypeApply(Select(Ident("List"), "unapplySeq"), List(Inferred())), Nil, List(Bind("a", Wildcard()), Bind("b", Wildcard()), Bind("c", Wildcard()))), None, Block(Nil, Literal(UnitConstant())))))) TypeRef(ThisType(TypeRef(NoPrefix(), "scala")), "Unit") -Inlined(None, Nil, Try(Literal(IntConstant(1)), List(CaseDef(Ident("_"), None, Block(Nil, Literal(UnitConstant())))), None)) +Inlined(None, Nil, Try(Literal(IntConstant(1)), List(CaseDef(Wildcard(), None, Block(Nil, Literal(UnitConstant())))), None)) OrType(ConstantType(IntConstant(1)), TypeRef(ThisType(TypeRef(NoPrefix(), "scala")), "Unit")) Inlined(None, Nil, Try(Literal(IntConstant(2)), Nil, Some(Literal(UnitConstant())))) ConstantType(IntConstant(2)) -Inlined(None, Nil, Try(Literal(IntConstant(3)), List(CaseDef(Ident("_"), None, Block(Nil, Literal(UnitConstant())))), Some(Literal(UnitConstant())))) +Inlined(None, Nil, Try(Literal(IntConstant(3)), List(CaseDef(Wildcard(), None, Block(Nil, Literal(UnitConstant())))), Some(Literal(UnitConstant())))) OrType(ConstantType(IntConstant(3)), TypeRef(ThisType(TypeRef(NoPrefix(), "scala")), "Unit")) Inlined(None, Nil, Literal(BooleanConstant(false))) diff --git a/tests/run-macros/tasty-extractors-2.check b/tests/run-macros/tasty-extractors-2.check index 1223a1fb71d9..5dd6af8d8b04 100644 --- a/tests/run-macros/tasty-extractors-2.check +++ b/tests/run-macros/tasty-extractors-2.check @@ -49,7 +49,7 @@ TypeRef(ThisType(TypeRef(NoPrefix(), "scala")), "Unit") Inlined(None, Nil, Block(List(ClassDef("Foo", DefDef("", List(TermParamClause(Nil)), Inferred(), None), List(Apply(Select(New(Inferred()), ""), Nil)), None, List(DefDef("a", Nil, Inferred(), Some(Literal(IntConstant(0))))))), Literal(UnitConstant()))) TypeRef(ThisType(TypeRef(NoPrefix(), "scala")), "Unit") -Inlined(None, Nil, Block(List(ClassDef("Foo", DefDef("", List(TermParamClause(Nil)), Inferred(), None), List(Apply(Select(New(Inferred()), ""), Nil), TypeSelect(Select(Ident("_root_"), "scala"), "Product"), TypeSelect(Select(Ident("_root_"), "scala"), "Serializable")), None, List(DefDef("hashCode", List(TermParamClause(Nil)), Inferred(), Some(Apply(Ident("_hashCode"), List(This(Some("Foo")))))), DefDef("equals", List(TermParamClause(List(ValDef("x$0", Inferred(), None)))), Inferred(), Some(Apply(Select(Apply(Select(This(Some("Foo")), "eq"), List(TypeApply(Select(Ident("x$0"), "$asInstanceOf$"), List(Inferred())))), "||"), List(Match(Ident("x$0"), List(CaseDef(Bind("x$0", Typed(Ident("_"), Inferred())), None, Apply(Select(Literal(BooleanConstant(true)), "&&"), List(Apply(Select(Ident("x$0"), "canEqual"), List(This(Some("Foo"))))))), CaseDef(Ident("_"), None, Literal(BooleanConstant(false))))))))), DefDef("toString", List(TermParamClause(Nil)), Inferred(), Some(Apply(Ident("_toString"), List(This(Some("Foo")))))), DefDef("canEqual", List(TermParamClause(List(ValDef("that", Inferred(), None)))), Inferred(), Some(TypeApply(Select(Ident("that"), "isInstanceOf"), List(Inferred())))), DefDef("productArity", Nil, Inferred(), Some(Literal(IntConstant(0)))), DefDef("productPrefix", Nil, Inferred(), Some(Literal(StringConstant("Foo")))), DefDef("productElement", List(TermParamClause(List(ValDef("n", Inferred(), None)))), Inferred(), Some(Match(Ident("n"), List(CaseDef(Ident("_"), None, Apply(Ident("throw"), List(Apply(Select(New(Inferred()), ""), List(Apply(Select(Ident("n"), "toString"), Nil)))))))))), DefDef("productElementName", List(TermParamClause(List(ValDef("n", Inferred(), None)))), Inferred(), Some(Match(Ident("n"), List(CaseDef(Ident("_"), None, Apply(Ident("throw"), List(Apply(Select(New(Inferred()), ""), List(Apply(Select(Ident("n"), "toString"), Nil)))))))))), DefDef("copy", List(TermParamClause(Nil)), Inferred(), Some(Apply(Select(New(Inferred()), ""), Nil))))), ValDef("Foo", TypeIdent("Foo$"), Some(Apply(Select(New(TypeIdent("Foo$")), ""), Nil))), ClassDef("Foo$", DefDef("", List(TermParamClause(Nil)), Inferred(), None), List(Apply(Select(New(Inferred()), ""), Nil), Inferred()), Some(ValDef("_", Singleton(Ident("Foo")), None)), List(DefDef("apply", List(TermParamClause(Nil)), Inferred(), Some(Apply(Select(New(Inferred()), ""), Nil))), DefDef("unapply", List(TermParamClause(List(ValDef("x$1", Inferred(), None)))), Singleton(Literal(BooleanConstant(true))), Some(Literal(BooleanConstant(true)))), DefDef("toString", Nil, Inferred(), Some(Literal(StringConstant("Foo")))), TypeDef("MirroredMonoType", TypeBoundsTree(Inferred(), Inferred())), DefDef("fromProduct", List(TermParamClause(List(ValDef("x$0", Inferred(), None)))), Inferred(), Some(Apply(Select(New(Inferred()), ""), Nil)))))), Literal(UnitConstant()))) +Inlined(None, Nil, Block(List(ClassDef("Foo", DefDef("", List(TermParamClause(Nil)), Inferred(), None), List(Apply(Select(New(Inferred()), ""), Nil), TypeSelect(Select(Ident("_root_"), "scala"), "Product"), TypeSelect(Select(Ident("_root_"), "scala"), "Serializable")), None, List(DefDef("hashCode", List(TermParamClause(Nil)), Inferred(), Some(Apply(Ident("_hashCode"), List(This(Some("Foo")))))), DefDef("equals", List(TermParamClause(List(ValDef("x$0", Inferred(), None)))), Inferred(), Some(Apply(Select(Apply(Select(This(Some("Foo")), "eq"), List(TypeApply(Select(Ident("x$0"), "$asInstanceOf$"), List(Inferred())))), "||"), List(Match(Ident("x$0"), List(CaseDef(Bind("x$0", Typed(Wildcard(), Inferred())), None, Apply(Select(Literal(BooleanConstant(true)), "&&"), List(Apply(Select(Ident("x$0"), "canEqual"), List(This(Some("Foo"))))))), CaseDef(Wildcard(), None, Literal(BooleanConstant(false))))))))), DefDef("toString", List(TermParamClause(Nil)), Inferred(), Some(Apply(Ident("_toString"), List(This(Some("Foo")))))), DefDef("canEqual", List(TermParamClause(List(ValDef("that", Inferred(), None)))), Inferred(), Some(TypeApply(Select(Ident("that"), "isInstanceOf"), List(Inferred())))), DefDef("productArity", Nil, Inferred(), Some(Literal(IntConstant(0)))), DefDef("productPrefix", Nil, Inferred(), Some(Literal(StringConstant("Foo")))), DefDef("productElement", List(TermParamClause(List(ValDef("n", Inferred(), None)))), Inferred(), Some(Match(Ident("n"), List(CaseDef(Wildcard(), None, Apply(Ident("throw"), List(Apply(Select(New(Inferred()), ""), List(Apply(Select(Ident("n"), "toString"), Nil)))))))))), DefDef("productElementName", List(TermParamClause(List(ValDef("n", Inferred(), None)))), Inferred(), Some(Match(Ident("n"), List(CaseDef(Wildcard(), None, Apply(Ident("throw"), List(Apply(Select(New(Inferred()), ""), List(Apply(Select(Ident("n"), "toString"), Nil)))))))))), DefDef("copy", List(TermParamClause(Nil)), Inferred(), Some(Apply(Select(New(Inferred()), ""), Nil))))), ValDef("Foo", TypeIdent("Foo$"), Some(Apply(Select(New(TypeIdent("Foo$")), ""), Nil))), ClassDef("Foo$", DefDef("", List(TermParamClause(Nil)), Inferred(), None), List(Apply(Select(New(Inferred()), ""), Nil), Inferred()), Some(ValDef("_", Singleton(Ident("Foo")), None)), List(DefDef("apply", List(TermParamClause(Nil)), Inferred(), Some(Apply(Select(New(Inferred()), ""), Nil))), DefDef("unapply", List(TermParamClause(List(ValDef("x$1", Inferred(), None)))), Singleton(Literal(BooleanConstant(true))), Some(Literal(BooleanConstant(true)))), DefDef("toString", Nil, Inferred(), Some(Literal(StringConstant("Foo")))), TypeDef("MirroredMonoType", TypeBoundsTree(Inferred(), Inferred())), DefDef("fromProduct", List(TermParamClause(List(ValDef("x$0", Inferred(), None)))), Inferred(), Some(Apply(Select(New(Inferred()), ""), Nil)))))), Literal(UnitConstant()))) TypeRef(ThisType(TypeRef(NoPrefix(), "scala")), "Unit") Inlined(None, Nil, Block(List(ClassDef("Foo1", DefDef("", List(TermParamClause(List(ValDef("a", TypeIdent("Int"), None)))), Inferred(), None), List(Apply(Select(New(Inferred()), ""), Nil)), None, List(ValDef("a", Inferred(), None)))), Literal(UnitConstant()))) diff --git a/tests/run-macros/tasty-getfile-implicit-by-name-fun-context/Macro_1.scala b/tests/run-macros/tasty-getfile-implicit-by-name-fun-context/Macro_1.scala index bb478331aa64..e78f3b88fe87 100644 --- a/tests/run-macros/tasty-getfile-implicit-by-name-fun-context/Macro_1.scala +++ b/tests/run-macros/tasty-getfile-implicit-by-name-fun-context/Macro_1.scala @@ -9,6 +9,6 @@ object SourceFiles { def getThisFileImpl: Macro[String] = val q = quotes // Quotes is ByName and hence not stable (q stabilizes it) - Expr(q.reflect.SourceFile.current.jpath.getFileName.toString) + Expr(q.reflect.SourceFile.current.name) } diff --git a/tests/run-macros/tasty-macro-positions/quoted_1.scala b/tests/run-macros/tasty-macro-positions/quoted_1.scala index d53cd16088b9..c0765bf1cda3 100644 --- a/tests/run-macros/tasty-macro-positions/quoted_1.scala +++ b/tests/run-macros/tasty-macro-positions/quoted_1.scala @@ -30,6 +30,6 @@ object Macros { def posStr(using Quotes)(pos: quotes.reflect.Position): Expr[String] = { import quotes.reflect.* - Expr(s"${pos.sourceFile.jpath.getFileName.toString}:[${pos.start}..${pos.end}]") + Expr(s"${pos.sourceFile.name}:[${pos.start}..${pos.end}]") } } diff --git a/tests/run-macros/tasty-positioned/quoted_1.scala b/tests/run-macros/tasty-positioned/quoted_1.scala index e6664c6a1774..ff8a614a0ba9 100644 --- a/tests/run-macros/tasty-positioned/quoted_1.scala +++ b/tests/run-macros/tasty-positioned/quoted_1.scala @@ -13,7 +13,7 @@ object Positioned { import quotes.reflect.{Position as Pos, *} val pos = Pos.ofMacroExpansion - val path = Expr(pos.sourceFile.jpath.toString) + val path = Expr(pos.sourceFile.getJPath.get.toString) val start = Expr(pos.start) val end = Expr(pos.end) val startLine = Expr(pos.startLine) diff --git a/tests/run-macros/tasty-simplified.check b/tests/run-macros/tasty-simplified.check index 55e725e10f17..41ae9120aa03 100644 --- a/tests/run-macros/tasty-simplified.check +++ b/tests/run-macros/tasty-simplified.check @@ -1,4 +1,4 @@ -Functor[Const[scala.collection.immutable.List[Dummy]]] -Functor[Const[scala.Int]] -Functor[Const[Dummy]] -Functor[Const[scala.Option[Dummy]]] +Functor[[A >: scala.Nothing <: scala.Any] => scala.collection.immutable.List[A]] +Unapply[[F >: scala.Nothing <: [_$9 >: scala.Nothing <: scala.Any] => scala.Any] => Functor[F], Wrap[scala.Int]] +Unapply[[F >: scala.Nothing <: [_$9 >: scala.Nothing <: scala.Any] => scala.Any] => Functor[F], Wrap[Dummy]] +Functor[[A >: scala.Nothing <: scala.Any] => scala.Option[A]] diff --git a/tests/run-macros/tasty-string-interpolation-reporter-test/Macros_1.scala b/tests/run-macros/tasty-string-interpolation-reporter-test/Macros_1.scala index 14d43bd98657..219cd5cfbebf 100644 --- a/tests/run-macros/tasty-string-interpolation-reporter-test/Macros_1.scala +++ b/tests/run-macros/tasty-string-interpolation-reporter-test/Macros_1.scala @@ -52,7 +52,7 @@ object Macro { private def fooCore(parts: Seq[Expr[String]], args: Seq[Expr[Any]], reporter: Reporter)(using Quotes): Expr[String] = { for ((part, idx) <- parts.zipWithIndex) { - val v = part.valueOrError + val v = part.valueOrAbort if (v.contains("#")) reporter.errorOnPart("Cannot use #", idx) } diff --git a/tests/run-macros/type-show/Macro_1.scala b/tests/run-macros/type-show/Macro_1.scala index 57a74a3a24f9..3de9fe579244 100644 --- a/tests/run-macros/type-show/Macro_1.scala +++ b/tests/run-macros/type-show/Macro_1.scala @@ -1,7 +1,13 @@ import scala.quoted.* object TypeToolbox { - inline def show[A]: String = ${ showImpl[A] } - private def showImpl[A: Type](using Quotes) : Expr[String] = + inline def show[A <: AnyKind]: String = ${ showImpl[A] } + private def showImpl[A <: AnyKind: Type](using Quotes) : Expr[String] = Expr(Type.show[A]) + + inline def showStructure[A <: AnyKind]: String = ${ showStructureImpl[A] } + private def showStructureImpl[A <: AnyKind](using q: Quotes, a: Type[A]) : Expr[String] = { + import q.reflect._ + Expr(TypeRepr.of[A].show(using Printer.TypeReprStructure)) + } } diff --git a/tests/run-macros/type-show/Test_2.scala b/tests/run-macros/type-show/Test_2.scala index ee33205413f1..d741a426cd69 100644 --- a/tests/run-macros/type-show/Test_2.scala +++ b/tests/run-macros/type-show/Test_2.scala @@ -8,6 +8,11 @@ object Test { assert(show[Int] == "scala.Int") assert(show[Int => Int] == "scala.Function1[scala.Int, scala.Int]") assert(show[(Int, String)] == "scala.Tuple2[scala.Int, scala.Predef.String]") + assert(show[[X] =>> X match { case Int => Int }] == + """[X >: scala.Nothing <: scala.Any] => X match { + | case scala.Int => scala.Int + |}""".stripMargin) + assert(showStructure[[X] =>> X match { case Int => Int }] == """TypeLambda(List(X), List(TypeBounds(TypeRef(ThisType(TypeRef(NoPrefix(), "scala")), "Nothing"), TypeRef(ThisType(TypeRef(NoPrefix(), "scala")), "Any"))), MatchType(TypeRef(ThisType(TypeRef(NoPrefix(), "scala")), "Any"), ParamRef(binder, 0), List(MatchCase(TypeRef(TermRef(ThisType(TypeRef(NoPrefix(), "")), "scala"), "Int"), TypeRef(TermRef(ThisType(TypeRef(NoPrefix(), "")), "scala"), "Int")))))""") // TODO: more complex types: // - implicit function types diff --git a/tests/run-macros/xml-interpolation-3/XmlQuote_1.scala b/tests/run-macros/xml-interpolation-3/XmlQuote_1.scala index 5f3505be3fe7..65d32d9bfbe8 100644 --- a/tests/run-macros/xml-interpolation-3/XmlQuote_1.scala +++ b/tests/run-macros/xml-interpolation-3/XmlQuote_1.scala @@ -12,7 +12,7 @@ object XmlQuote { } def impl(receiver: Expr[StringContext], args: Expr[Seq[Any]])(using Quotes): Expr[Xml] = { - val string = receiver.valueOrError.parts.mkString("??") + val string = receiver.valueOrAbort.parts.mkString("??") '{new Xml(${Expr(string)}, $args.toList)} } } diff --git a/tests/run-staging/i11162.scala b/tests/run-staging/i11162.scala new file mode 100644 index 000000000000..81db85f24274 --- /dev/null +++ b/tests/run-staging/i11162.scala @@ -0,0 +1,15 @@ +import scala.quoted.* + +object Test { + + given staging.Compiler = staging.Compiler.make(getClass.getClassLoader) + + def main(args: Array[String]): Unit = + staging.run { + '{ foo() } + } + + inline def foo(): Unit = ${ fooExpr() } + + private def fooExpr()(using Quotes): Expr[Unit] = '{ println("foo") } +} diff --git a/tests/run-staging/i4730.scala b/tests/run-staging/i4730.scala index 4c3d9b23cb10..cc9b6d52fb9b 100644 --- a/tests/run-staging/i4730.scala +++ b/tests/run-staging/i4730.scala @@ -1,7 +1,8 @@ import scala.quoted.* import scala.quoted.staging.* -object Test { + +package i4730: given Compiler = Compiler.make(getClass.getClassLoader) def ret(using Quotes): Expr[Int => Int] = '{ (x: Int) => ${ @@ -9,16 +10,12 @@ object Test { Expr(z) } } - def main(args: Array[String]): Unit = { - scala.mytest.myTest() - } -} package scala { package mytest { def myTest()(using Compiler) = { try { - run(Test.ret).apply(10) + run(i4730.ret).apply(10) throw new Exception } catch { case ex: Exception if ex.getClass.getName == "scala.quoted.runtime.impl.ScopeException" => @@ -27,3 +24,9 @@ package scala { } } } +object Test { + import i4730.given + def main(args: Array[String]): Unit = { + scala.mytest.myTest() + } +} diff --git a/tests/run-staging/i6992/Macro_1.scala b/tests/run-staging/i6992/Macro_1.scala index 09cacfd9e1eb..d0a670c5827a 100644 --- a/tests/run-staging/i6992/Macro_1.scala +++ b/tests/run-staging/i6992/Macro_1.scala @@ -2,19 +2,20 @@ import scala.quoted.* import scala.quoted.staging.* +package macros: -object macros { - inline def mcr(x: => Any): Any = ${mcrImpl('x)} + object macros { + inline def mcr(x: => Any): Any = ${mcrImpl('x)} - class Foo { val x = 10 } + class Foo { val x = 10 } - def mcrImpl(body: Expr[Any])(using ctx: Quotes): Expr[Any] = - MyTest.mcrImpl(body) -} + def mcrImpl(body: Expr[Any])(using ctx: Quotes): Expr[Any] = + MyTest.mcrImpl(body) + } package scala { object MyTest { - import macros.* + import macros.macros.* given Compiler = Compiler.make(getClass.getClassLoader) diff --git a/tests/run-staging/i6992/Test_2.scala b/tests/run-staging/i6992/Test_2.scala index 01ce6977c72c..4e814dcc7de8 100644 --- a/tests/run-staging/i6992/Test_2.scala +++ b/tests/run-staging/i6992/Test_2.scala @@ -1,4 +1,4 @@ -import macros.* +import macros.macros.* object Test { val foo = new Foo diff --git a/tests/run/1938-2.scala b/tests/run/1938-2.scala index 32e4c4518b96..143a94705455 100644 --- a/tests/run/1938-2.scala +++ b/tests/run/1938-2.scala @@ -1,6 +1,6 @@ object ProdNonEmpty { def _1: Int = 0 - def _2: String = "???" // Slight variation with scalac: this test passes + def _2: String = "???" // Slight variation with nsc: this test passes // with ??? here. I think dotty behavior is fine // according to the spec given that methods involved // in pattern matching should be pure. diff --git a/tests/run/9416.scala b/tests/run/9416.scala new file mode 100644 index 000000000000..fbc110a948a3 --- /dev/null +++ b/tests/run/9416.scala @@ -0,0 +1,20 @@ +trait Base[D <: Domain] { + def f(pp: Extract[D]): Unit +} + +class Ext extends Base[ExtDomain] { + def f(pp: String) = println(pp.length) +} + +type Domain +type DomainImpl[T] <: Domain +type ExtDomain = DomainImpl[String] + +type Extract[X] = X match { case DomainImpl[t] => t } + +object Test { + def main(args: Array[String]): Unit = { + def f[M <: Domain](rc: Base[M], v: Extract[M]): Unit = rc.f(v) + f(new Ext, "foo") + } +} diff --git a/tests/run/StringConcat.check b/tests/run/StringConcat.check new file mode 100644 index 000000000000..f7c52a0ffece Binary files /dev/null and b/tests/run/StringConcat.check differ diff --git a/tests/run/StringConcat.scala b/tests/run/StringConcat.scala new file mode 100644 index 000000000000..774147ba1303 --- /dev/null +++ b/tests/run/StringConcat.scala @@ -0,0 +1,79 @@ +@main def Test() = { + + // This should generally obey 15.18.1. of the JLS (String Concatenation Operator +) + def concatenatingVariousTypes(): String = { + val str: String = "some string" + val sb: StringBuffer = new StringBuffer("some stringbuffer") + val cs: CharSequence = java.nio.CharBuffer.allocate(50).append("charsequence") + val i: Int = 123456789 + val s: Short = 345 + val b: Byte = 12 + val z: Boolean = true + val f: Float = 3.14f + val j: Long = 98762147483647L + val d: Double = 3.1415d + + "String " + str + "\n" + + "StringBuffer " + sb + "\n" + + "CharSequence " + cs + "\n" + + "Int " + i + "\n" + + "Short " + s + "\n" + + "Byte " + b + "\n" + + "Boolean " + z + "\n" + + "Float " + f + "\n" + + "Long " + j + "\n" + + "Double " + d + "\n" + } + // The characters `\u0001` and `\u0002` play a special role in `StringConcatFactory` + def concatenationInvolvingSpecialCharacters(): String = { + val s1 = "Qux" + val s2 = "Quux" + + s"Foo \u0001 $s1 Bar \u0002 $s2 Baz" + } + // Concatenation involving more than 200 elements + def largeConcatenation(): String = { + val s00 = "s00" + val s01 = "s01" + val s02 = "s02" + val s03 = "s03" + val s04 = "s04" + val s05 = "s05" + val s06 = "s06" + val s07 = "s07" + val s08 = "s08" + + // 24 rows follow + ((s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n") + + (s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n")) + + ((s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n") + + (s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n" + + s00 + "," + s01 + "," + s02 + "," + s03 + "," + s04 + "," + s05 + "," + s06 + "," + s07 + "," + s08 + "\n")) + } + println("----------") + println(concatenatingVariousTypes()) + println("----------") + println(concatenationInvolvingSpecialCharacters()) + println("----------") + println(largeConcatenation()) + println("----------") +} diff --git a/tests/run/absoverride.scala b/tests/run/absoverride.scala index 0301498935a0..6e3c7318c487 100644 --- a/tests/run/absoverride.scala +++ b/tests/run/absoverride.scala @@ -34,7 +34,7 @@ class Iter2(s: String) extends StringIterator(s) with SyncIterator with LoggedIterator; object Test { def main(args: Array[String]): Unit = { - class Iter extends StringIterator(args(0)) with RichIterator with SyncIterator with LoggedIterator + class Iter extends StringIterator("jvm") with RichIterator with SyncIterator with LoggedIterator val iter = new Iter iter foreach Console.println } diff --git a/tests/run/array-erasure.scala b/tests/run/array-erasure.scala index 264fe46c36e5..1ce727337a4f 100644 --- a/tests/run/array-erasure.scala +++ b/tests/run/array-erasure.scala @@ -65,5 +65,11 @@ object Test { arr4(x) arr5(x) arr6(x) + + + val str: Any = "" + assert(!str.isInstanceOf[Array[?]]) + assert(!str.isInstanceOf[Array[Array[?]]]) + assert(!str.isInstanceOf[Array[? <: Array[?]]]) } } diff --git a/tests/run/boehm-berarducci.check b/tests/run/boehm-berarducci.check new file mode 100644 index 000000000000..03bc5c4710b0 --- /dev/null +++ b/tests/run/boehm-berarducci.check @@ -0,0 +1,8 @@ +List(1, 2) +List(2) +1 +false +List(1, 2) +List(2) +1 +false diff --git a/tests/run/boehm-berarducci.scala b/tests/run/boehm-berarducci.scala new file mode 100644 index 000000000000..fb75268199c7 --- /dev/null +++ b/tests/run/boehm-berarducci.scala @@ -0,0 +1,75 @@ +/* Boehm-Berarducci encoding of lists in polymorphic typed lambda calculus */ +type Op[T, C] = T => C => C +type List[T] = [C] => Op[T, C] => C => C + +def nil[T]: List[T] = + [C] => (op: Op[T, C]) => (s: C) => s + +def cons[T](hd: T, tl: List[T]): List[T] = + [C] => (op: Op[T, C]) => (s: C) => op(hd)(tl(op)(s)) + +/** A trait that can be instantiated with a list decomposition `ListView` */ +trait ListOps: + type ListView[T] + def decompose[T](xs: List[T]): ListView[T] + def fst[T](v: ListView[T]): T + def snd[T](v: ListView[T]): List[T] + def isPair[T](v: ListView[T]): Boolean + + // Some operations and tests that operate with the decomposition + def head[T](xs: List[T]): T = fst(decompose[T](xs)) + def tail[T](xs: List[T]): List[T] = snd(decompose[T](xs)) + def isEmpty[T](xs: List[T]): Boolean = !isPair(decompose[T](xs)) + + def toScalaList[T](xs: List[T]): scala.List[T] = + xs[scala.List[T]](h => t => h :: t)(Nil) + + def print[T](xs: List[T]): Unit = + println(toScalaList[T](xs)) + + def test() = + val xs: List[Int] = cons(1, cons(2, nil)) + print[Int](xs) + print[Int](tail(xs)) + println(head[Int](xs)) + println(isEmpty[Int](xs)) +end ListOps + +// A ListView based on regular Scala classes - options of pairs +object ListOps1 extends ListOps: + type ListView[T] = Option[(T, List[T])] + + def push[T](h: T, v: ListView[T]): ListView[T] = v match + case Some((h2, xs2)) => Some(h, cons[T](h2, xs2)) + case None => Some(h, nil[T]) + + def decompose[T](xs: List[T]): ListView[T] = + xs[Option[(T, List[T])]](h => c => push(h, c))(None) + + def fst[T](v: ListView[T]): T = v.get._1 + def snd[T](v: ListView[T]): List[T] = v.get._2 + def isPair[T](v: ListView[T]): Boolean = v.isDefined + +// A ListView based on (non-recursive) Church encodings in polymorphic lambda calculus +object ListOps2 extends ListOps: + type ListView[T] = [K] => (T => List[T] => K) => (() => K) => K + + def consView[T](x: T, xs: List[T]): ListView[T] = + [K] => (caseCons: T => List[T] => K) => (caseNil: () => K) => caseCons(x)(xs) + + def nilView[T]: ListView[T] = + [K] => (caseCons: T => List[T] => K) => (caseNil: () => K) => caseNil() + + def push[T](h: T)(c: ListView[T]): ListView[T] = + c[ListView[T]](h2 => xs2 => consView(h, cons[T](h2, xs2)))(() => consView(h, nil[T])) + + def decompose[T](xs: List[T]): ListView[T] = + xs[ListView[T]](push)(nilView) + + def fst[T](v: ListView[T]): T = v(hd => tl => hd)(() => ???) + def snd[T](v: ListView[T]): List[T] = v(hd => tl => tl)(() => ???) + def isPair[T](v: ListView[T]): Boolean = v(hd => tl => true)(() => false) + +@main def Test() = + ListOps1.test() + ListOps2.test() diff --git a/tests/run/enum-values.scala b/tests/run/enum-values.scala index b6ac3d2f9bce..ecc356c12b90 100644 --- a/tests/run/enum-values.scala +++ b/tests/run/enum-values.scala @@ -50,7 +50,7 @@ enum ClassOnly: // this should still generate the `ordinal` and `fromOrdinal` co s"$c does not `eq` companion.fromOrdinal(${c.ordinal}), got ${companion.fromOrdinal(c.ordinal)}") def notFromOrdinal[T <: AnyRef & reflect.Enum](companion: FromOrdinal[T], compare: T): Unit = - cantFind(companion, compare.ordinal) + cantFind(companion.asInstanceOf[FromOrdinal[Any]], compare.ordinal) def cantFind[T](companion: FromOrdinal[T], ordinal: Int): Unit = try diff --git a/tests/run/experimentalRun.scala b/tests/run/experimentalRun.scala new file mode 100644 index 000000000000..2d93c15c606f --- /dev/null +++ b/tests/run/experimentalRun.scala @@ -0,0 +1,6 @@ +import scala.annotation.experimental + +@experimental +def f = 3 + +@experimental @main def Test = f diff --git a/tests/run/exports.scala b/tests/run/exports.scala index 02862f2d0a39..2c03c52c25b4 100644 --- a/tests/run/exports.scala +++ b/tests/run/exports.scala @@ -7,7 +7,7 @@ object Test extends App { class Printer { def print() = println("printing") object cfg extends Config - given config: Config with {} + given config: Config() } class Scanner { diff --git a/tests/run/extension-specificity2.scala b/tests/run/extension-specificity2.scala new file mode 100644 index 000000000000..eeaad80a3687 --- /dev/null +++ b/tests/run/extension-specificity2.scala @@ -0,0 +1,37 @@ +trait Foo[F[_]]: + extension [A](fa: F[A]) + def foo[B](fb: F[B]): Int + +def test1 = + // Simplified from https://github.com/typelevel/spotted-leopards/issues/2 + given listFoo: Foo[List] with + extension [A](fa: List[A]) + def foo[B](fb: List[B]): Int = 1 + + given functionFoo[T]: Foo[[A] =>> T => A] with + extension [A](fa: T => A) + def foo[B](fb: T => B): Int = 2 + + val x = List(1, 2).foo(List(3, 4)) + assert(x == 1, x) + +def test2 = + // This test case would fail if we used `wildApprox` on the method types + // instead of using the correct typer state. + trait Bar1[A]: + extension (x: A => A) def bar(y: A): Int + trait Bar2: + extension (x: Int => 1) def bar(y: Int): Int + + given bla1[T]: Bar1[T] with + extension (x: T => T) def bar(y: T): Int = 1 + given bla2: Bar2 with + extension (x: Int => 1) def bar(y: Int): Int = 2 + + val f: Int => 1 = x => 1 + val x = f.bar(1) + assert(x == 2, x) + +@main def Test = + test1 + test2 diff --git a/tests/run/extmethod-overload.scala b/tests/run/extmethod-overload.scala index c13a2a99aaa8..4a9fe125a8a9 100644 --- a/tests/run/extmethod-overload.scala +++ b/tests/run/extmethod-overload.scala @@ -61,7 +61,7 @@ object Test extends App { extension [T](xs: List[T]) def +++ (ys: List[T]): List[T] = xs ++ ys ++ ys extension [T](xs: List[T]) def +++ (ys: Iterator[T]): List[T] = xs ++ ys ++ ys } - given Bar: Foo with {} + given Bar: Foo() assert((1 |+| 2) == 3) assert((1 |+| "2") == 2) diff --git a/tests/run/extra-implicits.scala b/tests/run/extra-implicits.scala index 72787b92e9a4..62ff862c709f 100644 --- a/tests/run/extra-implicits.scala +++ b/tests/run/extra-implicits.scala @@ -1,8 +1,8 @@ case class A(x: String) case class B(x: String) -given a1: A("default") with {} -given b1: B("default") with {} +given a1: A("default") +given b1: B("default") val a2 = A("explicit") val b2 = B("explicit") diff --git a/tests/run/forwardCompat-nestedSumMirror/Lib1_1_r3.0.scala b/tests/run/forwardCompat-nestedSumMirror/Lib1_1_r3.0.scala new file mode 100644 index 000000000000..aaac31229228 --- /dev/null +++ b/tests/run/forwardCompat-nestedSumMirror/Lib1_1_r3.0.scala @@ -0,0 +1,8 @@ +// Adapted from i11050 + +sealed trait TreeValue + +sealed trait SubLevel extends TreeValue + +case class Leaf1(value: String) extends TreeValue +case class Leaf2(value: Int) extends SubLevel diff --git a/tests/run/forwardCompat-nestedSumMirror/Lib2_2_r3.0.scala b/tests/run/forwardCompat-nestedSumMirror/Lib2_2_r3.0.scala new file mode 100644 index 000000000000..833fe13bb71e --- /dev/null +++ b/tests/run/forwardCompat-nestedSumMirror/Lib2_2_r3.0.scala @@ -0,0 +1,3 @@ +import scala.deriving._ + +val treeValueMirror = summon[Mirror.Of[TreeValue]] \ No newline at end of file diff --git a/tests/run/forwardCompat-nestedSumMirror/Test_3_c3.0.2.scala b/tests/run/forwardCompat-nestedSumMirror/Test_3_c3.0.2.scala new file mode 100644 index 000000000000..13438d173ffb --- /dev/null +++ b/tests/run/forwardCompat-nestedSumMirror/Test_3_c3.0.2.scala @@ -0,0 +1,3 @@ +object Test: + def main(args: Array[String]): Unit = + println(treeValueMirror) diff --git a/tests/run/forwardCompat-refinedGivens/Lib_1_r3.0.scala b/tests/run/forwardCompat-refinedGivens/Lib_1_r3.0.scala new file mode 100644 index 000000000000..6da2077b0086 --- /dev/null +++ b/tests/run/forwardCompat-refinedGivens/Lib_1_r3.0.scala @@ -0,0 +1,16 @@ +// Adapted from i12949 + +object Catch22: + trait TC[V] + object TC: + export Hodor.TC.given + +object Hodor: + object TC: + import Catch22.TC + given fromString[V <: String]: TC[V] = new TC[V] {} + transparent inline given fromDouble[V <: Double]: TC[V] = + new TC[V]: + type Out = Double + given fromInt[V <: Int]: TC[V] with + type Out = Int \ No newline at end of file diff --git a/tests/run/forwardCompat-refinedGivens/Test_2_c3.0.2.scala b/tests/run/forwardCompat-refinedGivens/Test_2_c3.0.2.scala new file mode 100644 index 000000000000..35f2b594881d --- /dev/null +++ b/tests/run/forwardCompat-refinedGivens/Test_2_c3.0.2.scala @@ -0,0 +1,7 @@ +// Adapted from i12949 + +object Test: + def main(args: Array[String]): Unit = + summon[Catch22.TC["hi"]] + summon[Catch22.TC[7.7]] + summon[Catch22.TC[1]] diff --git a/tests/run/forwardCompat-strictEquals/Equality_1_r3.0.scala b/tests/run/forwardCompat-strictEquals/Equality_1_r3.0.scala new file mode 100644 index 000000000000..538b85075bf9 --- /dev/null +++ b/tests/run/forwardCompat-strictEquals/Equality_1_r3.0.scala @@ -0,0 +1,5 @@ +// Instances of CanEqual are erased during compilation so their absence at runtime should not cause a crash + +import scala.language.strictEquality + +def emptyTupleEquality = EmptyTuple == EmptyTuple diff --git a/tests/run/forwardCompat-strictEquals/Test_2_c3.0.2.scala b/tests/run/forwardCompat-strictEquals/Test_2_c3.0.2.scala new file mode 100644 index 000000000000..1d1d811d0a9b --- /dev/null +++ b/tests/run/forwardCompat-strictEquals/Test_2_c3.0.2.scala @@ -0,0 +1,3 @@ +object Test: + def main(args: Array[String]): Unit = + println(emptyTupleEquality) diff --git a/tests/run/forwardCompat-unusedImport/Imports_1_r3.0.scala b/tests/run/forwardCompat-unusedImport/Imports_1_r3.0.scala new file mode 100644 index 000000000000..7946a9c9e2cc --- /dev/null +++ b/tests/run/forwardCompat-unusedImport/Imports_1_r3.0.scala @@ -0,0 +1,14 @@ +object A: + import scala.quoted.Type.valueOfTuple + +object B: + import scala.quoted.Type.* + +object C: + import Tuple.canEqualTuple + +object D: + import Tuple.given + +object E: + import Selectable.WithoutPreciseParameterTypes diff --git a/tests/run/forwardCompat-unusedImport/Test_2_c3.0.2.scala b/tests/run/forwardCompat-unusedImport/Test_2_c3.0.2.scala new file mode 100644 index 000000000000..c670ace4e768 --- /dev/null +++ b/tests/run/forwardCompat-unusedImport/Test_2_c3.0.2.scala @@ -0,0 +1,7 @@ +object Test: + def main(args: Array[String]): Unit = + println(A) + println(B) + println(C) + println(D) + println(E) diff --git a/tests/run/genericTupleMembers.scala b/tests/run/genericTupleMembers.scala new file mode 100644 index 000000000000..1b6de1486a05 --- /dev/null +++ b/tests/run/genericTupleMembers.scala @@ -0,0 +1,46 @@ + +@main def Test: Unit = + val tup1 = 1 *: EmptyTuple + val tup2 = 1 *: 2 *: EmptyTuple + val tup3 = 1 *: 2 *: 3 *: EmptyTuple + val tup4 = 1 *: 2 *: 3 *: 4 *: EmptyTuple + val tup5 = 1 *: 2 *: 3 *: 4 *: 5 *: EmptyTuple + val tup22 = 1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: 18 *: 19 *: 20 *: 21 *: 22 *: EmptyTuple + + tup1._1 + + tup2._1 + tup2._2 + tup2.swap + + tup3._1 + tup3._2 + tup3._3 + + tup4._1 + tup4._2 + tup4._3 + tup4._4 + + tup22._1 + tup22._2 + tup22._3 + tup22._4 + tup22._5 + tup22._6 + tup22._7 + tup22._8 + tup22._9 + tup22._10 + tup22._11 + tup22._12 + tup22._13 + tup22._14 + tup22._15 + tup22._16 + tup22._17 + tup22._18 + tup22._19 + tup22._20 + tup22._21 + tup22._22 diff --git a/tests/run/given-eta.scala b/tests/run/given-eta.scala index 1eaf6fd16c49..a01f1c441018 100644 --- a/tests/run/given-eta.scala +++ b/tests/run/given-eta.scala @@ -8,7 +8,7 @@ def f(x: Int)(using c: C) (y: Int) = x + c.x + y def g(x: Int)(using d: D) (y: d.T): d.T = d.trans(y) @main def Test = - given C(1) with {} + given C(1) val x = f assert(x(2)(3) == 6) diff --git a/tests/run/i10047.check b/tests/run/i10047.check new file mode 100644 index 000000000000..8b200126cd1e --- /dev/null +++ b/tests/run/i10047.check @@ -0,0 +1 @@ +slash diff --git a/tests/run/i10047.scala b/tests/run/i10047.scala new file mode 100644 index 000000000000..60e1a4319e0f --- /dev/null +++ b/tests/run/i10047.scala @@ -0,0 +1,15 @@ +object wrapper { + val / = "slash" +} + +object foo { + implicit val postfixOps: scala.languageFeature.postfixOps = scala.language.postfixOps + val / = wrapper./ +} + +object Test { + def main(args: Array[String]): Unit = { + val wd = foo./ + println(wd) + } +} diff --git a/tests/run/i10178.scala b/tests/run/i10178.scala index 2225a154fcd0..335de0b0d986 100644 --- a/tests/run/i10178.scala +++ b/tests/run/i10178.scala @@ -3,3 +3,42 @@ x <- Option(23) given Int = x do assert(summon[Int] == 23) + + for + y <- Option("ok") + q @ given String = y + do assert(summon[String] == "ok") + + for + z <- Option("key" -> true) + (q @ given String, u @ given Boolean) = z + do + assert(summon[String] == "key") + assert(summon[Boolean] == true) + + for + w <- Option("no" -> false) + (given String, given Boolean) = w + do + assert(summon[String] == "no") + assert(summon[Boolean] == false) + + for + given Int <- Option(23) + do assert(summon[Int] == 23) + + for + q @ given String <- Option("ok") + do assert(summon[String] == "ok") + + for + (q @ given String, u @ given Boolean) <- Option("key" -> true) + do + assert(summon[String] == "key") + assert(summon[Boolean] == true) + + for + (given String, given Boolean) <- Option("no" -> false) + do + assert(summon[String] == "no") + assert(summon[Boolean] == false) diff --git a/tests/run/i10889.scala b/tests/run/i10889.scala new file mode 100644 index 000000000000..130b6d2d9636 --- /dev/null +++ b/tests/run/i10889.scala @@ -0,0 +1,25 @@ +import scala.annotation.tailrec +import scala.util.chaining.given + +object Test { + class Ctx + type Op[A] = Ctx ?=> A + + var min = Int.MaxValue + var max = 0 + def stk = new Throwable().getStackTrace.length + + @tailrec def f[A](n: Int)(op: Op[A]): A = + val depth = stk + min = min.min(depth) + max = max.max(depth) + given Ctx = Ctx() + if (n > 0) f(n-1)(op) + else op + + def g(ctx: Ctx) = stk + + def main(args: Array[String]): Unit = + val extra = 3 + f(10)(Ctx ?=> g(summon[Ctx])).tap(res => assert(res <= max + extra, s"min $min, max $max, ran g at $res")) +} \ No newline at end of file diff --git a/tests/run/i11008.check b/tests/run/i11008.check new file mode 100644 index 000000000000..3be25c0dc8dd --- /dev/null +++ b/tests/run/i11008.check @@ -0,0 +1,2 @@ +hello +hello and 10 diff --git a/tests/run/i11008.scala b/tests/run/i11008.scala new file mode 100644 index 000000000000..dfe6be288d7a --- /dev/null +++ b/tests/run/i11008.scala @@ -0,0 +1,13 @@ +object A: + def unapply(s: String): String *: EmptyTuple = Tuple1(s) + +object B: + def unapply(s:String): String *: Int *: EmptyTuple = Tuple2(s, 10) + +@main def Test = + "hello" match + case A(x) => + println(x) + "hello" match + case B(x, y) => + println(s"$x and $y") diff --git a/tests/run/i11008b.check b/tests/run/i11008b.check new file mode 100644 index 000000000000..c0c7fc09b324 --- /dev/null +++ b/tests/run/i11008b.check @@ -0,0 +1 @@ +Many 3 characters List(f, o, o) \ No newline at end of file diff --git a/tests/run/i11008b.scala b/tests/run/i11008b.scala new file mode 100644 index 000000000000..f5c8a69c7b48 --- /dev/null +++ b/tests/run/i11008b.scala @@ -0,0 +1,7 @@ +object Foo: + def unapplySeq(x: String): Int *: Seq[String] *: EmptyTuple = (x.length, x.toList.map(_.toString)) + +@main def Test = + "foo" match + case Foo(1, c) => println("One character " + c) + case Foo(x, xs*) => println(s"Many $x characters $xs") diff --git a/tests/run/i11045.scala b/tests/run/i11045.scala new file mode 100644 index 000000000000..2885c7608e60 --- /dev/null +++ b/tests/run/i11045.scala @@ -0,0 +1,5 @@ +abstract class Foo(x: Any) +class Boom(var x: Unit, y: Unit) extends Foo((x: Int) => x) // was error: super constructor cannot be passed a self reference +@main def Test = + Boom((), ()) + diff --git a/tests/run/i11542.scala b/tests/run/i11542.scala index 90c8471bdf03..32e940e745d0 100644 --- a/tests/run/i11542.scala +++ b/tests/run/i11542.scala @@ -2,7 +2,7 @@ object demo { trait Reader[A] - given Reader[Int] with {} + given Reader[Int]() inline def summonReader[T <: Tuple]: List[Reader[_]] = inline compiletime.erasedValue[T] match { case _: EmptyTuple => Nil diff --git a/tests/run/i11938.scala b/tests/run/i11938.scala new file mode 100644 index 000000000000..ac39ecfc7f61 --- /dev/null +++ b/tests/run/i11938.scala @@ -0,0 +1,22 @@ +import java.util.function.Function + +object Test { + def foo[V](v: V): Int = 1 + def foo[U](fn: Function[Int, U]): Int = 2 + + def foo2(a: Int => Int): Int = 1 + def foo2(a: PartialFunction[Int, Int]): Int = 2 + + def main(args: Array[String]): Unit = { + assert(foo((x: Int) => x) == 2) + val jf: Function[Int, Int] = x => x + assert(foo(jf) == 2) + + assert(foo2(x => x) == 1) + val f: Int => Int = x => x + assert(foo2(f) == 1) + assert(foo2({ case x if x % 2 == 0 => x }) == 2) + val pf: PartialFunction[Int, Int] = { case x if x % 2 == 0 => x } + assert(foo2(pf) == 2) + } +} diff --git a/tests/run/i12170.scala b/tests/run/i12170.scala new file mode 100644 index 000000000000..df5811863343 --- /dev/null +++ b/tests/run/i12170.scala @@ -0,0 +1,13 @@ +import scala.compiletime.error + +object BadFilters: + inline def withFilter(f: Int => Boolean): BadFilters.type = error("Unexpected withFilter call") + def foreach(f: Int => Unit): Unit = f(42) + +@main def Test = + for + x: Int <- BadFilters + do println(x) + for + given Int <- BadFilters + do println(summon[Int]) diff --git a/tests/run/i12194.check b/tests/run/i12194.check new file mode 100644 index 000000000000..9e90577cbd4f --- /dev/null +++ b/tests/run/i12194.check @@ -0,0 +1 @@ +List(foo, bar) diff --git a/tests/run/i12204/A_1.scala b/tests/run/i12204/A_1.scala new file mode 100644 index 000000000000..95b519fa066a --- /dev/null +++ b/tests/run/i12204/A_1.scala @@ -0,0 +1,3 @@ +object A { + def intARRAY_131(x: Array[String] with Array[Int]): Unit = {} +} diff --git a/tests/run/i12204/B_2.java b/tests/run/i12204/B_2.java new file mode 100644 index 000000000000..283e63a8974b --- /dev/null +++ b/tests/run/i12204/B_2.java @@ -0,0 +1,5 @@ +public class B_2 { + public static void test() { + A.intARRAY_131(null); // shouldn't throw a NoSuchMethodError + } +} diff --git a/tests/run/i12204/Test_3.scala b/tests/run/i12204/Test_3.scala new file mode 100644 index 000000000000..51ec04559ca7 --- /dev/null +++ b/tests/run/i12204/Test_3.scala @@ -0,0 +1,5 @@ +object Test { + def main(args: Array[String]): Unit = { + B_2.test() + } +} diff --git a/tests/run/i1240.scala b/tests/run/i1240.scala index 7092d91314e4..b425c4f55ff2 100644 --- a/tests/run/i1240.scala +++ b/tests/run/i1240.scala @@ -19,7 +19,9 @@ object Test { // In Java, this gives an error like this: // methods foo(A) from C[D] and foo(String) from C[D] are inherited with the same signature // But the analogous example with `b1` compiles OK in Java. - assert(b2.foo(new D) == "T foo") + assert(b2.foo(new D) == "D foo") + // Here we get "D foo" since a bridge method for foo(x: D) was inserted + // in the anonymous class of b2. } } diff --git a/tests/run/i12597.scala b/tests/run/i12597.scala new file mode 100644 index 000000000000..73cc4185d74e --- /dev/null +++ b/tests/run/i12597.scala @@ -0,0 +1,5 @@ +@main def Test = + val a: IArray[Int] = IArray(2) + val b: IArray[Any] = a + val c = IArray.genericWrapArray(b).toArray + c(0) = "" diff --git a/tests/run/i12656.scala b/tests/run/i12656.scala new file mode 100644 index 000000000000..ee22988ac771 --- /dev/null +++ b/tests/run/i12656.scala @@ -0,0 +1,16 @@ +transparent inline def expectCompileError( + inline code: String, + expectedMsg: String +) = + val errors = compiletime.testing.typeCheckErrors(code) + assert(errors.head.message == expectedMsg, (errors.head.message, expectedMsg)) + +transparent inline def expectTypeCheck( + inline code: String, +) : Boolean = compiletime.testing.typeChecks(code) + +@main def Test = + assert(!expectTypeCheck("""compiletime.error("some error")""")) + assert(expectTypeCheck("""1 + 1""")) + expectCompileError("""compiletime.error("some error")""", "some error") + diff --git a/tests/run/i12729.scala b/tests/run/i12729.scala new file mode 100644 index 000000000000..d7683fe72a05 --- /dev/null +++ b/tests/run/i12729.scala @@ -0,0 +1,3 @@ +object Test: + val `` = "hello!" + def main(args: Array[String]): Unit = println(``) diff --git a/tests/run/i12753.check b/tests/run/i12753.check new file mode 100644 index 000000000000..e599ced53b63 --- /dev/null +++ b/tests/run/i12753.check @@ -0,0 +1,28 @@ +1 +Dbr +1 +1 +2 +1 +1 +1 +1 +2 +1 +1 +synthetic public static C D.foo(int) +public static D D.foo(int) +public static D D.t() +synthetic public static java.lang.Object D.bar() +public static java.lang.String D.bar() +public static int O.a() +public static int O.b() +public static int O.c() +public static int O.d() +public static int O.i() +public static int O.j() +public static int O.k() +public static int O.l() +synthetic public static void O.T$_setter_$a_$eq(int) +public static void O.b_$eq(int) +public static void O.j_$eq(int) diff --git a/tests/run/i12753/C.scala b/tests/run/i12753/C.scala new file mode 100644 index 000000000000..1b62940d6ffb --- /dev/null +++ b/tests/run/i12753/C.scala @@ -0,0 +1,34 @@ +trait C[This <: C[This]] + +trait COps[This <: C[This]] { + def t: This + def foo(x: Int): This = t + def bar: Object = "Cbr" +} + +class D extends C[D] { + def x = 1 +} +object D extends COps[D] { + def t = new D + override def foo(x: Int): D = super.foo(x) + override def bar: String = "Dbr" +} + +trait T { + val a = 1 + var b = 1 + lazy val c = 1 + def d = 1 + + val i: Int + var j: Int + lazy val k: Int = 1 + def l: Int +} +object O extends T { + val i: Int = 1 + var j: Int = 1 + override lazy val k: Int = 1 + def l: Int = 1 +} diff --git a/tests/run/i12753/Test.java b/tests/run/i12753/Test.java new file mode 100644 index 000000000000..2a0bd9480b9c --- /dev/null +++ b/tests/run/i12753/Test.java @@ -0,0 +1,36 @@ +public class Test { + public static void s(Object s) { + System.out.println(s); + } + + public static void statics(Class c) { + java.lang.reflect.Method[] ms = c.getDeclaredMethods(); + java.util.Arrays.sort(ms, (a, b) -> a.toString().compareTo(b.toString())); + for (java.lang.reflect.Method a : ms) { + if (java.lang.reflect.Modifier.isStatic(a.getModifiers())) + s((a.isSynthetic() ? "synthetic " : "") + a); + } + } + + public static void main(String[] args) { + s(D.foo(1).x()); + s(D.bar().trim()); + + s(O.a()); + s(O.b()); + O.b_$eq(2); + s(O.b()); + s(O.c()); + s(O.d()); + + s(O.i()); + s(O.j()); + O.j_$eq(2); + s(O.j()); + s(O.k()); + s(O.l()); + + statics(D.class); + statics(O.class); + } +} diff --git a/tests/run/i12759.scala b/tests/run/i12759.scala new file mode 100644 index 000000000000..2c5fcded46ac --- /dev/null +++ b/tests/run/i12759.scala @@ -0,0 +1,15 @@ +package example { + + trait Foo { + val (a2, a3) = ("", "") + val (x1, x2, x3) = ("", "", "") + } + + class A extends Foo +} + +object Test { + def main(args: Array[String]): Unit = { + new example.A + } +} diff --git a/tests/run/i12759b.scala b/tests/run/i12759b.scala new file mode 100644 index 000000000000..c7480575f86b --- /dev/null +++ b/tests/run/i12759b.scala @@ -0,0 +1,39 @@ +package example { + + class A1 + class A2 + class A3 + + class TypeClass[A](val value: A) + + object TypeClass { + def apply[A](implicit a: TypeClass[A]): TypeClass[A] = a + + def get2[X1: TypeClass, X2: TypeClass]: (X1, X2) = { + (TypeClass[X1].value, TypeClass[X2].value) + } + + def get3[X1: TypeClass, X2: TypeClass, X3: TypeClass]: (X1, X2, X3) = { + (TypeClass[X1].value, TypeClass[X2].value, TypeClass[X3].value) + } + + implicit def a1: TypeClass[A1] = new TypeClass[A1](new A1) + implicit def a2: TypeClass[A2] = new TypeClass[A2](new A2) + implicit def a3: TypeClass[A3] = new TypeClass[A3](new A3) + } + + trait Foo { + + val (a2, a3) = TypeClass.get2[A2, A3] + + val (x1, x2, x3) = TypeClass.get3[A1, A2, A3] + + } + +} + +object Test { + def main(args: Array[String]): Unit = { + new example.Foo {} + } +} \ No newline at end of file diff --git a/tests/run/i12796/A_1.scala b/tests/run/i12796/A_1.scala new file mode 100644 index 000000000000..46290e942335 --- /dev/null +++ b/tests/run/i12796/A_1.scala @@ -0,0 +1,4 @@ +object A: + type Timeframe = "1m" | "2m" | "1H" + + def test(input: String) = input.isInstanceOf[Timeframe] diff --git a/tests/run/i12796/Test_2.scala b/tests/run/i12796/Test_2.scala new file mode 100644 index 000000000000..8274f8804063 --- /dev/null +++ b/tests/run/i12796/Test_2.scala @@ -0,0 +1,3 @@ +@main def Test = + val x = "1" + assert(A.test(x + "m")) diff --git a/tests/run/i12828a.scala b/tests/run/i12828a.scala new file mode 100644 index 000000000000..459af156980c --- /dev/null +++ b/tests/run/i12828a.scala @@ -0,0 +1,12 @@ +trait Foo[A] { + def foo(x: A): Unit = () +} +trait Bar[A] extends Foo[A] { + def foo(x: A with String): Unit = println(x.toUpperCase) +} +object Baz extends Bar[Int] // was: error: Baz inherits conflicting members, now like Scala 2 + // Scala 2 compiles and runs + +object Test { + def main(args: Array[String]) = Baz.foo(42) +} diff --git a/tests/run/i12828b.scala b/tests/run/i12828b.scala new file mode 100644 index 000000000000..d1cd3e087a36 --- /dev/null +++ b/tests/run/i12828b.scala @@ -0,0 +1,12 @@ +class Foo[A] { + def foo(x: A): Unit = () +} +class Bar[A] extends Foo[A] { + def foo(x: A with String): Unit = println(x.toUpperCase) +} +object Baz extends Bar[Int] // was error: Baz inherits conflicting members, now like Scalac + // Scala 2 compiles and runs + +object Test { + def main(args: Array[String]) = Baz.foo(42) +} diff --git a/tests/run/i12829.check b/tests/run/i12829.check new file mode 100644 index 000000000000..5d8a5566f0e8 --- /dev/null +++ b/tests/run/i12829.check @@ -0,0 +1,2 @@ +abc +abc diff --git a/tests/run/i12829.scala b/tests/run/i12829.scala new file mode 100644 index 000000000000..cf691792e0c3 --- /dev/null +++ b/tests/run/i12829.scala @@ -0,0 +1,19 @@ +object Test: + + class Foo: + var buf = "" + + def add(v: String): Unit = buf += v + + inline def += (inline v: String): this.type = {add(v); this } + + + def main(sa: Array[String]): Unit = + + var fooVar = new Foo + fooVar += "a" += "b" += "c" + println(fooVar.buf) // Prints: abc + + val fooVal = new Foo + fooVal += "a" += "b" += "c" + println(fooVal.buf) // Printed: c // Expected abc diff --git a/tests/run/i12914.check b/tests/run/i12914.check new file mode 100644 index 000000000000..9fe584d62718 --- /dev/null +++ b/tests/run/i12914.check @@ -0,0 +1,8 @@ +ASD +asd +ASD +asd +ASD +asd +aSdaSdaSd +aSdaSdaSd diff --git a/tests/run/i12914.scala b/tests/run/i12914.scala new file mode 100644 index 000000000000..c60937916149 --- /dev/null +++ b/tests/run/i12914.scala @@ -0,0 +1,27 @@ + +class opq: + opaque type Str = java.lang.String + object Str: + def apply(s: String): Str = s + inline def lower(s: Str): String = s.toLowerCase + extension (s: Str) + transparent inline def upper: String = s.toUpperCase + inline def concat(xs: List[Str]): Str = String(xs.flatten.toArray) + transparent inline def concat2(xs: List[Str]): Str = String(xs.flatten.toArray) + + +@main def Test = + val opq = new opq() + import opq.* + val a: Str = Str("aSd") + println(a.upper) + println(opq.lower(a)) + def b: Str = Str("aSd") + println(b.upper) + println(opq.lower(b)) + def c(): Str = Str("aSd") + println(c().upper) + println(opq.lower(c())) + println(opq.concat(List(a, b, c()))) + println(opq.concat2(List(a, b, c()))) + diff --git a/tests/run/i12976.scala b/tests/run/i12976.scala new file mode 100644 index 000000000000..a14a18885f1e --- /dev/null +++ b/tests/run/i12976.scala @@ -0,0 +1,39 @@ + +case class A(s: String) + +class B { + def b1[X](str: String): String = str + + def b2[X](str: String): X = null.asInstanceOf[X] +} + +object Test { + + def main(args: Array[String]): Unit = { + val a = A("aaa") + val b = new B + + // no error + a match { + case A(s) => + b.b1(s) + } + + // no error if add explicit type param + a match { + case A(s) => + b.b2[Boolean](s) + } + + // scala.MatchError: A(aaa) + try + a match { + case A(s) => + b.b2(s) + } + assert(false) + catch case ex: NullPointerException => + () // OK + } + +} \ No newline at end of file diff --git a/tests/run/i13087.scala b/tests/run/i13087.scala new file mode 100644 index 000000000000..74eaa0e69e85 --- /dev/null +++ b/tests/run/i13087.scala @@ -0,0 +1,38 @@ +import scala.collection.mutable.Builder + +class DDD[S,T,A] + +trait NN[S, T, A, K[_], +D <: DDD[Set[S],T,K[A]]] +class NNN[S, T, K[_], A] extends NN[S, T, A, K, DDD[Set[S],T,K[A]]] + +object NN { + def newBuilder[S, T, A, K[_]]: + NNbuilder[S, T, K, A, DDD[Set[S],T,K[A]], NN[S,T,A,K,?], Unit] = + new NNNbuilder[S, T, K, A] +} + +// Remove the type parameter E, hardcoding in E := Unit, and the issue +// goes away. +trait NNbuilder + [S, T, K[_], A, +D <: DDD[Set[S],T,K[A]], +N <: NN[S,T,A,K,D], E] + extends Builder[Unit, N] { + def clear(): Unit = throw new UnsupportedOperationException() + final def addOne(builder: E): this.type = this +} + +// Unfold this class defn, and the issue goes away +abstract class AbstractNNNbuilder + [S, T, K[_], A, +D <: DDD[Set[S],T,K[A]], +N <: NN[S,T,A,K,D], E] + extends NNbuilder[S,T,K,A,D,N,E] + +class NNNbuilder[S, T, K[_], A] extends AbstractNNNbuilder[ + S, T, K, A, DDD[Set[S], T, K[A]], NNN[S, T, K, A], Unit +] { + override def result(): NNN[S, T, K, A] = new NNN[S, T, K, A] +} + +@main def Test: Unit = { + val builder = NN.newBuilder[String, Char, Int, Set] + builder += () + builder.result() +} \ No newline at end of file diff --git a/tests/run/i13087a.scala b/tests/run/i13087a.scala new file mode 100644 index 000000000000..f1513583e8fb --- /dev/null +++ b/tests/run/i13087a.scala @@ -0,0 +1,17 @@ +trait SimpleTrait[T] { + def myMethod(t: T): Int + def doIt(t: T): Unit = { + myMethod(t) // java.lang.AbstractMethodError: Method BadClass.myMethod(Ljava/lang/Object;)I is abstract + } +} + +abstract class SimpleClass[T] extends SimpleTrait[T] { + def myMethod(t: String): Int = 5 +} + +class BadClass extends SimpleClass[String] + +object Test { + def main(args: Array[String]): Unit = + (new BadClass).doIt("foobar") +} \ No newline at end of file diff --git a/tests/run/i13096.scala b/tests/run/i13096.scala new file mode 100644 index 000000000000..5afb0d0dc1b3 --- /dev/null +++ b/tests/run/i13096.scala @@ -0,0 +1,10 @@ +class C1 { + private class C2 + new C2 match { + case c: C2 => + } +} + +object Test extends App { + new C1 +} diff --git a/tests/run/i13131.scala b/tests/run/i13131.scala new file mode 100644 index 000000000000..0c9661ffa75a --- /dev/null +++ b/tests/run/i13131.scala @@ -0,0 +1,4 @@ +@main def Test = + val `ct_[[I` = reflect.classTag[Array[Array[Int]] | Array[Array[Int]]] + val arrArrInt = Array(Array(1)) + assert(`ct_[[I`.runtimeClass == arrArrInt.getClass) diff --git a/tests/run/i13181.scala b/tests/run/i13181.scala new file mode 100644 index 000000000000..a6693b3f68e4 --- /dev/null +++ b/tests/run/i13181.scala @@ -0,0 +1 @@ +@main def Test = assert(scala.compiletime.codeOf(1+2) == "1.+(2)") diff --git a/tests/run/i13183.check b/tests/run/i13183.check new file mode 100644 index 000000000000..2dc88a12f2d4 --- /dev/null +++ b/tests/run/i13183.check @@ -0,0 +1 @@ +"\u001b" diff --git a/tests/run/i13183.scala b/tests/run/i13183.scala new file mode 100644 index 000000000000..128f42b162f1 --- /dev/null +++ b/tests/run/i13183.scala @@ -0,0 +1,2 @@ +@main def Test(): Unit = + println(scala.compiletime.codeOf("\u001b")) diff --git a/tests/run/i13228.check b/tests/run/i13228.check new file mode 100644 index 000000000000..3cdefad74555 --- /dev/null +++ b/tests/run/i13228.check @@ -0,0 +1,5 @@ +RegisteredUser(Id,User(Name)) +false +false +false +false diff --git a/tests/run/i13228.scala b/tests/run/i13228.scala new file mode 100644 index 000000000000..e1b0dcc4fb46 --- /dev/null +++ b/tests/run/i13228.scala @@ -0,0 +1,13 @@ +case class User(name: String) + +case class RegisteredUser(id: String, data: User) { + export data.* +} + +@main def Test() = + println(RegisteredUser("Id", User("Name"))) // RegisteredUser(Name) + println(RegisteredUser("Id", User("Name")).canEqual(User("Name"))) // True + // The rest works as expected + println(RegisteredUser("Id", User("Name")) == User("Name")) // False + println(RegisteredUser("Id", User("Name")).hashCode == User("Name").hashCode) // False + println(RegisteredUser("Id", User("Name")).productArity == User("Name").productArity) // False diff --git a/tests/run/i13252a.scala b/tests/run/i13252a.scala new file mode 100644 index 000000000000..d12706f62a95 --- /dev/null +++ b/tests/run/i13252a.scala @@ -0,0 +1,12 @@ +object Test: + + class Star + + trait Foo: + @annotation.targetName("star") + val * : Star = new Star + + object Bar extends Foo + + def main(sa: Array[String]): Unit = + Bar.* \ No newline at end of file diff --git a/tests/run/i13252b/Foo_1.scala b/tests/run/i13252b/Foo_1.scala new file mode 100644 index 000000000000..7e7551e14f4c --- /dev/null +++ b/tests/run/i13252b/Foo_1.scala @@ -0,0 +1,5 @@ +class Star + +trait Foo: + @annotation.targetName("star") + val * : Star = new Star diff --git a/tests/run/i13252b/Test_2.scala b/tests/run/i13252b/Test_2.scala new file mode 100644 index 000000000000..b4d382d2cc1f --- /dev/null +++ b/tests/run/i13252b/Test_2.scala @@ -0,0 +1,6 @@ +object Test: + + object Bar extends Foo + + def main(sa: Array[String]): Unit = + Bar.* \ No newline at end of file diff --git a/tests/run/i13304.check b/tests/run/i13304.check new file mode 100644 index 000000000000..257cc5642cb1 --- /dev/null +++ b/tests/run/i13304.check @@ -0,0 +1 @@ +foo diff --git a/tests/run/i13304.scala b/tests/run/i13304.scala new file mode 100644 index 000000000000..f60359b82d47 --- /dev/null +++ b/tests/run/i13304.scala @@ -0,0 +1,13 @@ +trait Zero[F[_]]: + def zero[A]: F[A] + +given Zero[List] with + def zero[A] = List.empty[A] + +given Zero[Option] with + def zero[A] = Option.empty[A] + + +@main def Test = + val test = [F[_]] => (f: Zero[F]) ?=> [G[_]] => (g: Zero[G]) ?=> println("foo") + test[List][Option] \ No newline at end of file diff --git a/tests/run/i13358.check b/tests/run/i13358.check new file mode 100644 index 000000000000..d5425d4bc05f --- /dev/null +++ b/tests/run/i13358.check @@ -0,0 +1,8 @@ +1 +1 +1 +1 +2 +2 +2 +2 diff --git a/tests/run/i13358.scala b/tests/run/i13358.scala new file mode 100644 index 000000000000..c469e5373f54 --- /dev/null +++ b/tests/run/i13358.scala @@ -0,0 +1,22 @@ +object Test: + var cnt = 0 + + trait Foo: + lazy val foo1 = {cnt+=1; cnt} + + @scala.annotation.targetName("fooTwo") + lazy val foo2 = {cnt+=1; cnt} + + object Bar extends Foo + + def main(sa: Array[String]): Unit = + + println(Bar.foo1) // Prints 1 + println(Bar.foo1) // Prints 1 + println(Bar.foo1) // Prints 1 + println(Bar.foo1) // Prints 1 + + println(Bar.foo2) // Prints 2 + println(Bar.foo2) // Prints 3 EXPECTED 2 + println(Bar.foo2) // Prints 4 EXPECTED 2 + println(Bar.foo2) // Prints 5 EXPECTED 2 \ No newline at end of file diff --git a/tests/run/i13433.check b/tests/run/i13433.check new file mode 100644 index 000000000000..88223478e837 --- /dev/null +++ b/tests/run/i13433.check @@ -0,0 +1,4 @@ +Some(abc) +Some(abc) +None +None diff --git a/tests/run/i13433.scala b/tests/run/i13433.scala new file mode 100644 index 000000000000..fe634a5ac14a --- /dev/null +++ b/tests/run/i13433.scala @@ -0,0 +1,28 @@ +import scala.reflect.TypeTest + +type Matcher[A] = A match { case String => String } + +def patternMatch[A](a: Any)(using tt: TypeTest[Any, Matcher[A]]): Option[Matcher[A]] = { + // type T = RDF.Triple[Rdf] + a match { + case res: Matcher[A] => Some(res) + case _ => None + } +} + +def patternMatchWithAlias[A](a: Any)(using tt: TypeTest[Any, Matcher[A]]): Option[Matcher[A]] = { + type T = Matcher[A] + a match { + case res: T => Some(res) + case _ => None + } +} + + +@main def Test = { + println(patternMatch[String]("abc")) + println(patternMatchWithAlias[String]("abc")) + + println(patternMatch[String](1)) + println(patternMatchWithAlias[String](1)) +} diff --git a/tests/run/i13433b.check b/tests/run/i13433b.check new file mode 100644 index 000000000000..88223478e837 --- /dev/null +++ b/tests/run/i13433b.check @@ -0,0 +1,4 @@ +Some(abc) +Some(abc) +None +None diff --git a/tests/run/i13433b.scala b/tests/run/i13433b.scala new file mode 100644 index 000000000000..914530b91fd1 --- /dev/null +++ b/tests/run/i13433b.scala @@ -0,0 +1,28 @@ +import scala.reflect.ClassTag + +type Matcher[A] = A match { case String => String } + +def patternMatch[A](a: Any)(using tt: ClassTag[Matcher[A]]): Option[Matcher[A]] = { + // type T = RDF.Triple[Rdf] + a match { + case res: Matcher[A] => Some(res) + case _ => None + } +} + +def patternMatchWithAlias[A](a: Any)(using tt: ClassTag[Matcher[A]]): Option[Matcher[A]] = { + type T = Matcher[A] + a match { + case res: T => Some(res) + case _ => None + } +} + + +@main def Test = { + println(patternMatch[String]("abc")) + println(patternMatchWithAlias[String]("abc")) + + println(patternMatch[String](1)) + println(patternMatchWithAlias[String](1)) +} diff --git a/tests/run/i13490.min.scala b/tests/run/i13490.min.scala new file mode 100644 index 000000000000..4f9c19764a5d --- /dev/null +++ b/tests/run/i13490.min.scala @@ -0,0 +1,13 @@ +object MyTypes: + enum MyEnum: + case Foo + case Bar + +object MyApi: + export MyTypes.* + +object MyUse: + import MyApi.MyEnum.Foo + def foo = Foo + +@main def Test = assert(MyUse.foo.toString == "Foo") diff --git a/tests/run/i13630.check b/tests/run/i13630.check new file mode 100644 index 000000000000..64bf9740e617 --- /dev/null +++ b/tests/run/i13630.check @@ -0,0 +1 @@ +it worked diff --git a/tests/run/i13630.scala b/tests/run/i13630.scala new file mode 100644 index 000000000000..64f260262a8e --- /dev/null +++ b/tests/run/i13630.scala @@ -0,0 +1,9 @@ +class ClassWithLambda(sup: () => Long) +class ClassWithVar(var msg: String) extends ClassWithLambda(() => 1) + +object Test: + val _ = new ClassWithVar("foo") + + def main(args: Array[String]): Unit = { + println("it worked") + } \ No newline at end of file diff --git a/tests/run/i13691.scala b/tests/run/i13691.scala new file mode 100644 index 000000000000..224656d87923 --- /dev/null +++ b/tests/run/i13691.scala @@ -0,0 +1,53 @@ +import language.experimental.erasedDefinitions + +erased class CanThrow[-E <: Exception] +erased class Foo +class Bar + +object unsafeExceptions: + given canThrowAny: CanThrow[Exception] = new CanThrow + +object test1: + trait Decoder[+T]: + def apply(): T + + def deco: Decoder[CanThrow[Exception] ?=> Int] = new Decoder[CanThrow[Exception] ?=> Int]: + def apply(): CanThrow[Exception] ?=> Int = 1 + +object test2: + trait Decoder[+T]: + def apply(): T + + def deco: Decoder[(CanThrow[Exception], Foo) ?=> Int] = new Decoder[(CanThrow[Exception], Foo) ?=> Int]: + def apply(): (CanThrow[Exception], Foo) ?=> Int = 1 + +object test3: + trait Decoder[+T]: + def apply(): T + + def deco: Decoder[CanThrow[Exception] ?=> Foo ?=> Int] = new Decoder[CanThrow[Exception] ?=> Foo ?=> Int]: + def apply(): CanThrow[Exception] ?=> Foo ?=> Int = 1 + +object test4: + trait Decoder[+T]: + def apply(): T + + def deco: Decoder[CanThrow[Exception] ?=> Bar ?=> Int] = new Decoder[CanThrow[Exception] ?=> Bar ?=> Int]: + def apply(): CanThrow[Exception] ?=> Bar ?=> Int = 1 + +object test5: + trait Decoder[+T]: + def apply(): T + + def deco: Decoder[Bar ?=> CanThrow[Exception] ?=> Int] = new Decoder[Bar ?=> CanThrow[Exception] ?=> Int]: + def apply(): Bar ?=> CanThrow[Exception] ?=> Int = 1 + +@main def Test(): Unit = + import unsafeExceptions.canThrowAny + given Foo = Foo() + given Bar = Bar() + test1.deco.apply().apply + test2.deco.apply().apply + test3.deco.apply().apply + test4.deco.apply().apply + test5.deco.apply().apply diff --git a/tests/run/i13703.scala b/tests/run/i13703.scala new file mode 100644 index 000000000000..0483e4ca7dc2 --- /dev/null +++ b/tests/run/i13703.scala @@ -0,0 +1,5 @@ +trait Foo extends reflect.Selectable + +@main def Test: Unit = + val f = new Foo { var i: Int = 0 } + f.i = 1 \ No newline at end of file diff --git a/tests/run/i13747.scala b/tests/run/i13747.scala new file mode 100644 index 000000000000..2167523bd754 --- /dev/null +++ b/tests/run/i13747.scala @@ -0,0 +1,17 @@ +var res = "" +trait Bar: + def +(that: Bar): Bar = new Plus(this, that) + transparent inline def -(that: Bar): Bar = new Minus(this, that) + +class LHS extends Bar {res += "LHS "} +class RHS extends Bar {res += "RHS "} + +class Plus(lhs: Bar, rhs: Bar) extends Bar {res += "op"} +class Minus(lhs: Bar, rhs: Bar) extends Bar {res += "op"} + +@main def Test = + val pls = new LHS + new RHS + val plsRes = res + res = "" + val min = new LHS - new RHS + assert(plsRes == res) \ No newline at end of file diff --git a/tests/run/i13862.scala b/tests/run/i13862.scala new file mode 100644 index 000000000000..4577f65e50d9 --- /dev/null +++ b/tests/run/i13862.scala @@ -0,0 +1,11 @@ +trait Foo(val num: Int) // a trait with a parameter stored in a val + +class Bar(num: Int) extends Foo(num): // an extending class with a parameter of the same name + def bar = this.num // implicitly creates another num in Bar + +@main def Test = Bar(123) + +class Bar2(n: Int) extends Foo(n): // an extending class with a parameter of the same name + private val num = n + def bar = this.num // implicitly creates another num in Bar + diff --git a/tests/run/i13950.scala b/tests/run/i13950.scala new file mode 100644 index 000000000000..b8f93129beb8 --- /dev/null +++ b/tests/run/i13950.scala @@ -0,0 +1,11 @@ +def example(x: Any & ([V] => V => Int)) = + x[Int](1) +def example2(x: (Any & ([V] => V => Int)) @unchecked) = + x[Int](1) +def example3[S <: Any & ([V] => V => Int)](x: S) = + x[Int](1) + +@main def Test = + example([A] => (x: A) => 1) + example2([A] => (x: A) => 1) + example3([A] => (x: A) => 1) diff --git a/tests/run/i13961a.scala b/tests/run/i13961a.scala new file mode 100644 index 000000000000..9d49ab30b0a4 --- /dev/null +++ b/tests/run/i13961a.scala @@ -0,0 +1,11 @@ +import language.experimental.saferExceptions + +trait Decoder[+T]: + def apply(): T + +given Decoder[Int throws Exception] = new Decoder[Int throws Exception]: + def apply(): Int throws Exception = 1 + +@main def Test(): Unit = + import unsafeExceptions.canThrowAny + summon[Decoder[Int throws Exception]]() \ No newline at end of file diff --git a/tests/run/i13968.scala b/tests/run/i13968.scala new file mode 100644 index 000000000000..8e63d57ecf1b --- /dev/null +++ b/tests/run/i13968.scala @@ -0,0 +1,26 @@ +object Bar { + def unapply(x: Any): Option[Int *: Int *: EmptyTuple] = Some(1 *: 2 *: Tuple()) +} + +object Bar23 { + def unapply(x: Any): Option[ + Int *: Int *: Int *: Int *: Int *: Int *: Int *: Int *: Int *: Int *: + Int *: Int *: Int *: Int *: Int *: Int *: Int *: Int *: Int *: Int *: + Int *: Int *: Int *: EmptyTuple + ] = Some( + 1 *: 2 *: 3 *: 4 *: 5 *: 6 *: 7 *: 8 *: 9 *: 10 *: + 11 *: 12 *: 13 *: 14 *: 15 *: 16 *: 17 *: 18 *: 19 *: 20 *: + 21 *: 22 *: 23 *: Tuple() + ) +} + +@main def Test() = + "" match + case Bar((a, b)) => assert(a == 1 && b == 2, (a, b)) + + "" match + case Bar23(( + u1, u2, u3, u4, u5, u6, u7, u8, u9, u10, + u11, u12, u13, u14, u15, u16, u17, u18, u19, u20, + u21, u22, u23 + )) => assert(u1 == 1 && u23 == 23, (u1, u23)) diff --git a/tests/run/i14020.check b/tests/run/i14020.check new file mode 100644 index 000000000000..304f6c16f72a --- /dev/null +++ b/tests/run/i14020.check @@ -0,0 +1,7 @@ +Hello you +Hello John +Hello you +Hello you +Hello John +Hello you +bark: Woof! diff --git a/tests/run/i14020.scala b/tests/run/i14020.scala new file mode 100644 index 000000000000..86123874c034 --- /dev/null +++ b/tests/run/i14020.scala @@ -0,0 +1,39 @@ +class A: + def greeting(name: String = "you") = s"Hello $name" + +class A2: + inline def greeting(name: String = "you") = s"Hello $name" + +class B: + val a = A() + export a.* + +class C: + val a = A2() + export a.greeting + +@main def Test = + val b = B() + + println(b.a.greeting()) // works + println(b.greeting("John")) // works + println(b.greeting()) // nope ! + + val c = C() + + println(c.a.greeting()) // works + println(c.greeting("John")) // works + println(c.greeting()) // nope ! + + val w = Wolf() + import w.given + + println(summon[String]) // error: I found: w.bark(/* missing */summon[String]) + + +class Dog: + given bark(using msg: String = "Woof!"): String = s"bark: $msg" + +class Wolf: + private val dog = Dog() + export dog.given // needs to be `export dog.{given, *}` to export the default arguments diff --git a/tests/run/i14083.scala b/tests/run/i14083.scala new file mode 100644 index 000000000000..4014e474ad45 --- /dev/null +++ b/tests/run/i14083.scala @@ -0,0 +1,13 @@ +class Outer { + class Inner +} +@main def Test = + assert(classOf[Outer#Inner] + .getConstructors.head + .getParameters.head + .isSynthetic) + assert( + classOf[Outer#Inner] + .getDeclaredFields + .filter(_.getName == "$outer") + .exists(_.isSynthetic)) diff --git a/tests/run/i14198.scala b/tests/run/i14198.scala new file mode 100644 index 000000000000..94ac07ee5d84 --- /dev/null +++ b/tests/run/i14198.scala @@ -0,0 +1,60 @@ + +import java.lang.ref.WeakReference +import java.util.concurrent.atomic.AtomicReference + +final class Mark + +object Test: + + def main(args: Array[String]): Unit = + myTest + trying + + final def myAssert(cond: => Boolean): Unit = assert(cond) + + def terminally(cond: => Boolean): Unit = + System.gc() + var n = 10 + while (n > 0 && !cond) + do + System.gc() + Thread.`yield`() + //print(".") + n -= 1 + assert(cond) + + def myTest: Unit = + val ref = new AtomicReference[WeakReference[AnyRef]] + var mark: AnyRef = null + assert(ref.compareAndSet(null, WeakReference(Mark()))) + mark = ref.get().get() + myAssert(mark ne null) // in theory this could fail, but it isn't + mark = null + terminally(ref.get().get() == null) + + def trying: Unit = + def ignore[A]: (Throwable => A) = _ => null.asInstanceOf[A] + var i: Int = 21 + var s: String = "hello" + var r: WeakReference[String] = null + def f(n: => Int) = n + n + 1 + def g(x: => String) = + r = WeakReference(x + "/" + x) + r.get() + i = try f(i) catch ignore + s = try g(s) catch ignore + assert(s == "hello/hello") + assert(r.get() == "hello/hello") + s = null + terminally(r.get() == null) + s = "bye" + s = try g(s) catch ignore + assert(s == "bye/bye") + assert(r.get() == "bye/bye") + s = null.asInstanceOf[String] + terminally(r.get() == null) + @volatile var z: String = "whoa" + z = try g(z) catch ignore + assert(r.get() == "whoa/whoa") + z = null + terminally(r.get() == null) diff --git a/tests/run/i14215.scala b/tests/run/i14215.scala new file mode 100644 index 000000000000..d732337ce0f2 --- /dev/null +++ b/tests/run/i14215.scala @@ -0,0 +1,12 @@ +def f[T <: Tuple2[Int, Int]](tup: T): T = tup + +@main def Test: Unit = + (1, 2)._1 + f((1, 2))._1 + + (1 *: 2 *: EmptyTuple)._1 + f(1 *: 2 *: EmptyTuple)._1 + f[Int *: Int *: EmptyTuple](1 *: 2 *: EmptyTuple)._1 + + f[Int *: Int *: EmptyTuple]((1, 2))._1 + f[Tuple2[Int, Int]](1 *: 2 *: EmptyTuple)._1 diff --git a/tests/run/i2567.scala b/tests/run/i2567.scala index 7ab93aaab4f1..c9625aeb33d1 100644 --- a/tests/run/i2567.scala +++ b/tests/run/i2567.scala @@ -1,6 +1,6 @@ class TC -given tc: TC with {} +given tc: TC() class Foo(using TC) { println("hi") diff --git a/tests/run/i2738.check b/tests/run/i2738.check index b54b3d840cf0..5954c1afefc9 100644 --- a/tests/run/i2738.check +++ b/tests/run/i2738.check @@ -1,4 +1,4 @@ -Test$qux$1$ Test$qux$2$ +Test$qux$4$ bar$1 bar$2 diff --git a/tests/run/i2964.check b/tests/run/i2964.check index 748b2a47faf1..ac42be082812 100644 --- a/tests/run/i2964.check +++ b/tests/run/i2964.check @@ -1,4 +1,4 @@ class Foo$$anon$1 -class Bar$$anon$1 class Bar$$anon$2 -class Baz$$anon$1 +class Bar$$anon$3 +class Baz$$anon$4 diff --git a/tests/run/i2964b.check b/tests/run/i2964b.check index 748b2a47faf1..ac42be082812 100644 --- a/tests/run/i2964b.check +++ b/tests/run/i2964b.check @@ -1,4 +1,4 @@ class Foo$$anon$1 -class Bar$$anon$1 class Bar$$anon$2 -class Baz$$anon$1 +class Bar$$anon$3 +class Baz$$anon$4 diff --git a/tests/run/i2964c.check b/tests/run/i2964c.check index f7cc0f12df69..729b24dcb9f3 100644 --- a/tests/run/i2964c.check +++ b/tests/run/i2964c.check @@ -1,4 +1,4 @@ class Foo$Inner$1 -class Bar$Inner$1 class Bar$Inner$2 -class Baz$Inner$1 +class Bar$Inner$3 +class Baz$Inner$4 diff --git a/tests/run/i2964d.check b/tests/run/i2964d.check index f1a0e8bb51c4..32a719a2630c 100644 --- a/tests/run/i2964d.check +++ b/tests/run/i2964d.check @@ -1,4 +1,4 @@ -class Foo$Inner$1$ -class Bar$Inner$1$ -class Bar$Inner$2$ -class Baz$Inner$1$ +class Foo$Inner$2$ +class Bar$Inner$4$ +class Bar$Inner$6$ +class Baz$Inner$8$ diff --git a/tests/run/i2964e.check b/tests/run/i2964e.check index a42348b97b00..ccffda6bc06b 100644 --- a/tests/run/i2964e.check +++ b/tests/run/i2964e.check @@ -1,4 +1,4 @@ class foo.bar.Foo$$anon$1 class foo.bar.Foo$$anon$2 -class foo.Foo$$anon$1 -class Foo$$anon$1 +class foo.Foo$$anon$3 +class Foo$$anon$4 diff --git a/tests/run/i3000b.check b/tests/run/i3000b.check index c5980c545f33..605021c9b2c0 100644 --- a/tests/run/i3000b.check +++ b/tests/run/i3000b.check @@ -1,2 +1,2 @@ Foo$$anon$1 -bar.Bar$$anon$1 +bar.Bar$$anon$2 diff --git a/tests/run/i3006.check b/tests/run/i3006.check index 8f4e653138b7..a2d3b474408c 100644 --- a/tests/run/i3006.check +++ b/tests/run/i3006.check @@ -1,5 +1,5 @@ f$1 f$2 f$3 -f$1 -f$2 +f$4 +f$5 diff --git a/tests/run/i3006b.check b/tests/run/i3006b.check index 3698829ef115..ec64184dc80c 100644 --- a/tests/run/i3006b.check +++ b/tests/run/i3006b.check @@ -1,3 +1,3 @@ bar$1 bar$2 -bar$1 +bar$3 diff --git a/tests/run/i4241.scala b/tests/run/i4241.scala new file mode 100644 index 000000000000..c55cb5be475f --- /dev/null +++ b/tests/run/i4241.scala @@ -0,0 +1,24 @@ +object Test extends App { + val a: PartialFunction[Int, Int] = { case x => x } + val b: PartialFunction[Int, Int] = x => x match { case 1 => 1; case 2 => 2 } + val c: PartialFunction[Int, Int] = x => { x match { case 1 => 1 } } + val d: PartialFunction[Int, Int] = x => { { x match { case 1 => 1 } } } + + val e: PartialFunction[Int, Int] = x => { println("foo"); x match { case 1 => 1 } } + val f: PartialFunction[Int, Int] = x => x + val g: PartialFunction[Int, String] = { x => x.toString } + val h: PartialFunction[Int, String] = _.toString + assert(a.isDefinedAt(2)) + assert(b.isDefinedAt(2)) + assert(!b.isDefinedAt(3)) + assert(c.isDefinedAt(1)) + assert(!c.isDefinedAt(2)) + assert(d.isDefinedAt(1)) + assert(!d.isDefinedAt(2)) + assert(e.isDefinedAt(2)) + assert(f.isDefinedAt(2)) + assert(g.isDefinedAt(2)) + assert(h.isDefinedAt(2)) +} + + diff --git a/tests/run/i6996.check b/tests/run/i6996.check index a4387987de57..3e37e529877c 100644 --- a/tests/run/i6996.check +++ b/tests/run/i6996.check @@ -1,3 +1,3 @@ an `a` -false -not `a` +true +an `a` diff --git a/tests/run/i8396.scala b/tests/run/i8396.scala index 3e44e3898dc5..3f5965d17c9b 100644 --- a/tests/run/i8396.scala +++ b/tests/run/i8396.scala @@ -6,9 +6,9 @@ object Prefix: type UpperBoundedType <: String type FullyBoundedType >: String <: String - given A: Show[AbstractType] with {} - given B: Show[UpperBoundedType] with {} - given C: Show[FullyBoundedType] with {} + given A: Show[AbstractType]() + given B: Show[UpperBoundedType]() + given C: Show[FullyBoundedType]() @main def Test = summon[Show[Prefix.AbstractType]] diff --git a/tests/neg/i8861.scala b/tests/run/i8861.scala similarity index 77% rename from tests/neg/i8861.scala rename to tests/run/i8861.scala index 87f1884f6155..e1e802a5c72b 100644 --- a/tests/neg/i8861.scala +++ b/tests/run/i8861.scala @@ -18,14 +18,16 @@ object Test { int = vi => vi.i : vi.A, str = vs => vs.t : vs.A ) + // Used to infer `c.visit[Int & M)]` and error out in the second lambda, + // now infers `c.visit[(Int & M | String & M)]` def minimalFail[M](c: Container { type A = M }): M = c.visit( int = vi => vi.i : vi.A, - str = vs => vs.t : vs.A // error + str = vs => vs.t : vs.A ) def main(args: Array[String]): Unit = { val e: Container { type A = String } = new StrV println(minimalOk(e)) // this one prints "hello" - println(minimalFail(e)) // this one fails with ClassCastException: class java.lang.String cannot be cast to class java.lang.Integer + println(minimalFail(e)) // used to fail with ClassCastException, now prints "hello" } -} \ No newline at end of file +} diff --git a/tests/run/i9482.scala b/tests/run/i9482.scala new file mode 100644 index 000000000000..425a9b333d29 --- /dev/null +++ b/tests/run/i9482.scala @@ -0,0 +1,83 @@ +import scala.reflect.{OptManifest, ClassTag} + +object Ref { + + object Sentinel + + def makeWithArr[A: OptManifest]: String = optManifest[A] match { + case m: ClassTag[_] => m.newArray(0).asInstanceOf[AnyRef] match { + // these can be reordered, so long as Unit comes before AnyRef + case _: Array[Boolean] => "bool" + case _: Array[Byte] => "byte" + case _: Array[Short] => "short" + case _: Array[Char] => "char" + case _: Array[Int] => "int" + case _: Array[Float] => "float" + case _: Array[Long] => "long" + case _: Array[Double] => "double" + case _: Array[Unit] => "unit" + case a: Array[AnyRef] => a.getClass.getComponentType.getName + } + case _ => "" + } + + def make[A: OptManifest]: String = optManifest[A] match { + case m: ClassTag[a] => m match { + case ClassTag.Boolean => "bool" + case ClassTag.Byte => "byte" + case ClassTag.Short => "short" + case ClassTag.Char => "char" + case ClassTag.Int => "int" + case ClassTag.Float => "float" + case ClassTag.Long => "long" + case ClassTag.Double => "double" + case ClassTag.Unit => "unit" + case ClassTag.Any => "any" + case ClassTag.AnyVal => "anyval" + case ClassTag.Object => "anyref" + case _ => m.runtimeClass.getName + } + case NoManifest => "" + } + +} + +import Ref.* + +def baz[A] = Ref.makeWithArr[A] +def qux[A] = Ref.make[A] + +@main def Test = { + + assert(Ref.makeWithArr[Boolean] == "bool") + assert(Ref.makeWithArr[Byte] == "byte") + assert(Ref.makeWithArr[Short] == "short") + assert(Ref.makeWithArr[Char] == "char") + assert(Ref.makeWithArr[Int] == "int") + assert(Ref.makeWithArr[Float] == "float") + assert(Ref.makeWithArr[Long] == "long") + assert(Ref.makeWithArr[Double] == "double") + assert(Ref.makeWithArr[Unit] == "unit") + assert(Ref.makeWithArr["abc"] == "java.lang.String") + assert(Ref.makeWithArr[Null] == "") + assert(Ref.makeWithArr[Nothing] == "") + assert(baz[Int] == "") + + assert(Ref.make[Boolean] == "bool") + assert(Ref.make[Byte] == "byte") + assert(Ref.make[Short] == "short") + assert(Ref.make[Char] == "char") + assert(Ref.make[Int] == "int") + assert(Ref.make[Float] == "float") + assert(Ref.make[Long] == "long") + assert(Ref.make[Double] == "double") + assert(Ref.make[Unit] == "unit") + assert(Ref.make[Any] == "any") + assert(Ref.make[AnyVal] == "anyval") + assert(Ref.make[AnyRef] == "anyref") + assert(Ref.make["abc"] == "java.lang.String") + assert(Ref.make[Null] == "") + assert(Ref.make[Nothing] == "") + assert(qux[Int] == "") + +} diff --git a/tests/run/i9939.check b/tests/run/i9939.check new file mode 100644 index 000000000000..7040250edc75 --- /dev/null +++ b/tests/run/i9939.check @@ -0,0 +1,2 @@ +[3.0, 42] +[3.0, 42.0] diff --git a/tests/run/i9939.scala b/tests/run/i9939.scala new file mode 100644 index 000000000000..84ae5c2723c8 --- /dev/null +++ b/tests/run/i9939.scala @@ -0,0 +1,6 @@ +def nonHarmonized[A >: Any](args: A*): String = args.mkString("[", ", ", "]") +def harmonized[A >: Any](args: A*): List[A] = args.toList + +@main def Test = + println(nonHarmonized(3.0, 42)) + println(harmonized(3.0, 42).mkString("[", ", ", "]")) diff --git a/tests/run/implicit-alias.scala b/tests/run/implicit-alias.scala index 1a30a3d22915..64117c3fc977 100644 --- a/tests/run/implicit-alias.scala +++ b/tests/run/implicit-alias.scala @@ -6,7 +6,7 @@ object Test extends App { class TC1 - given TC1 with {} + given TC1() class TV(val tc: TC) extends AnyVal diff --git a/tests/run/implicit-disambiguation.scala b/tests/run/implicit-disambiguation.scala index ad6f9a9bedc6..98ac89f8addf 100644 --- a/tests/run/implicit-disambiguation.scala +++ b/tests/run/implicit-disambiguation.scala @@ -14,7 +14,7 @@ object M { } } object Test extends App { - given b: B with {} - given c: C with {} + given b: B() + given c: C() println(M.f) } diff --git a/tests/run/implicit-specifity.scala b/tests/run/implicit-specifity.scala index c92ee5f9acef..51fa02d91cfd 100644 --- a/tests/run/implicit-specifity.scala +++ b/tests/run/implicit-specifity.scala @@ -20,12 +20,12 @@ object Generic2 { class SubGen extends Generic object SubGen { - given SubGen with {} + given SubGen() } object Contextual { trait Context - given ctx: Context with {} + given ctx: Context() given showGen[T](using Generic): Show[T] = new Show[T](2) diff --git a/tests/run/implied-divergence.scala b/tests/run/implied-divergence.scala index 791df6dbd5bd..ecf5fd81dfd8 100644 --- a/tests/run/implied-divergence.scala +++ b/tests/run/implied-divergence.scala @@ -2,11 +2,11 @@ // recursions. case class E(x: E | Null) -given e: E(null) with {} +given e: E(null) object Test extends App { - given f(using e: E): E(e) with {} + given f(using e: E): E(e) assert(summon[E].toString == "E(E(null))") diff --git a/tests/run/implied-for.scala b/tests/run/implied-for.scala index 252bac528712..c7789ce570e4 100644 --- a/tests/run/implied-for.scala +++ b/tests/run/implied-for.scala @@ -6,10 +6,10 @@ object A { class C extends T class D[T] - given b: B with {} - given c: C with {} - given t: T with {} - given d: D[Int] with {} + given b: B() + given c: C() + given t: T() + given d: D[Int]() } object Test extends App { @@ -29,11 +29,11 @@ class ExecutionContext class Monoid[T] object Instances { - given intOrd: Ordering[Int] with {} + given intOrd: Ordering[Int]() - given listOrd[T](using Ordering[T]): Ordering[List[T]] with {} - given ec: ExecutionContext with {} - given im: Monoid[Int] with {} + given listOrd[T](using Ordering[T]): Ordering[List[T]]() + given ec: ExecutionContext() + given im: Monoid[Int]() } object Test2 { diff --git a/tests/run/implied-priority.scala b/tests/run/implied-priority.scala index 44cd7d56504c..0822fae6778f 100644 --- a/tests/run/implied-priority.scala +++ b/tests/run/implied-priority.scala @@ -11,18 +11,18 @@ class Arg[T] // An argument that we use as a given for some given instances bel * Traditional scheme: prioritize with location in class hierarchy */ class LowPriorityImplicits { - given t1[T]: E[T]("low") with {} + given t1[T]: E[T]("low") } object NormalImplicits extends LowPriorityImplicits { - given t2[T](using Arg[T]): E[T]("norm") with {} + given t2[T](using Arg[T]): E[T]("norm") } def test1 = { import NormalImplicits.given assert(summon[E[String]].str == "low") // No Arg available, so only t1 applies - { given Arg[String] with {} + { given Arg[String]() assert(summon[E[String]].str == "norm") // Arg available, t2 takes priority } } @@ -31,22 +31,22 @@ def test1 = { */ object Priority { class Low - object Low { given Low with {} } + object Low { given Low() } class High extends Low - object High { given High with {} } + object High { given High() } } object Impl2 { - given t1[T](using Priority.Low): E[T]("low") with {} - given t2[T](using Priority.High)(using Arg[T]): E[T]("norm") with {} + given t1[T](using Priority.Low): E[T]("low") + given t2[T](using Priority.High)(using Arg[T]): E[T]("norm") } def test2 = { import Impl2.given assert(summon[E[String]].str == "low") // No Arg available, so only t1 applies - { given Arg[String] with {} + { given Arg[String]() assert(summon[E[String]].str == "norm") // Arg available, t2 takes priority } } @@ -60,14 +60,14 @@ def test2 = { * an alternative without implicit arguments would override all of them. */ object Impl2a { - given t3[T]: E[T]("hi") with {} + given t3[T]: E[T]("hi") } def test2a = { import Impl2.given import Impl2a.given - given Arg[String] with {} + given Arg[String]() assert(summon[E[String]].str == "hi") } @@ -75,13 +75,13 @@ def test2a = { * result type of the given instance, e.g. like this: */ object Impl3 { - given t1[T]: E[T]("low") with {} + given t1[T]: E[T]("low") } object Override { trait HighestPriority // A marker trait to indicate a higher priority - given over[T]: E[T]("hi") with HighestPriority with {} + given over[T]: E[T]("hi") with HighestPriority() } def test3 = { @@ -101,9 +101,9 @@ def test3 = { * with a default argument. */ object Impl4 { - given t1: E[String]("string") with {} + given t1: E[String]("string") - given t2[T](using Arg[T]): E[T]("generic") with {} + given t2[T](using Arg[T]): E[T]("generic") } object fallback4 { @@ -116,7 +116,7 @@ def test4 = { assert(withFallback[String].str == "string") // t1 is applicable assert(withFallback[Int].str == "fallback") // No applicable instances, pick the default - { given Arg[Int] with {} + { given Arg[Int]() assert(withFallback[Int].str == "generic") // t2 is applicable } } @@ -146,7 +146,7 @@ def test5 = { assert(summon[E[String]].str == "string") // t1 is applicable assert(summon[E[Int]].str == "fallback") // No applicable instances, pick the default - { given Arg[Int] with {} + { given Arg[Int]() assert(summon[E[Int]].str == "generic") // t2 is applicable } } diff --git a/tests/run/implied-specifity-2.scala b/tests/run/implied-specifity-2.scala index a787531c1089..2676b33cda97 100644 --- a/tests/run/implied-specifity-2.scala +++ b/tests/run/implied-specifity-2.scala @@ -1,58 +1,58 @@ class Low object Low { - given low: Low with {} + given low: Low() } class Medium extends Low object Medium { - given medium: Medium with {} + given medium: Medium() } class High extends Medium object High { - given high: High with {} + given high: High() } class Foo[T](val i: Int) object Foo { def apply[T](using fooT: Foo[T]): Int = fooT.i - given foo[T](using Low): Foo[T](0) with {} - given foobar[T](using Low): Foo[Bar[T]](1) with {} - given foobarbaz(using Low): Foo[Bar[Baz]](2) with {} + given foo[T](using Low): Foo[T](0) + given foobar[T](using Low): Foo[Bar[T]](1) + given foobarbaz(using Low): Foo[Bar[Baz]](2) } class Bar[T] object Bar { - given foobar[T](using Medium): Foo[Bar[T]](3) with {} - given foobarbaz(using Medium): Foo[Bar[Baz]](4) with {} + given foobar[T](using Medium): Foo[Bar[T]](3) + given foobarbaz(using Medium): Foo[Bar[Baz]](4) } class Baz object Baz { - given baz(using High): Foo[Bar[Baz]](5) with {} + given baz(using High): Foo[Bar[Baz]](5) } class Arg -given Arg with {} +given Arg() class Bam(val str: String) -given lo(using Low): Bam("lo") with {} +given lo(using Low): Bam("lo") -given hi(using High)(using Arg): Bam("hi") with {} +given hi(using High)(using Arg): Bam("hi") class Bam2(val str: String) -given lo2(using Low): Bam2("lo") with {} +given lo2(using Low): Bam2("lo") -given mid2(using High)(using Arg): Bam2("mid") with {} +given mid2(using High)(using Arg): Bam2("mid") -given hi2: Bam2("hi") with {} +given hi2: Bam2("hi") class Arg2 class Red(val str: String) -given normal(using Arg2): Red("normal") with {} +given normal(using Arg2): Red("normal") -given reduced(using ev: Arg2 | Low): Red("reduced") with {} +given reduced(using ev: Arg2 | Low): Red("reduced") object Test extends App { assert(Foo[Int] == 0) @@ -62,7 +62,7 @@ object Test extends App { assert(summon[Bam2].str == "hi") assert(summon[Red].str == "reduced") - { given Arg2 with {} + { given Arg2() assert(summon[Red].str == "normal") } } \ No newline at end of file diff --git a/tests/run/inline-override-num.scala b/tests/run/inline-override-num.scala index 06f0ae91c0f8..b4a44e2b3a8d 100644 --- a/tests/run/inline-override-num.scala +++ b/tests/run/inline-override-num.scala @@ -6,7 +6,7 @@ object Num { class IntNum extends Num[Int] { inline def plus(x: Int, y: Int): Int = x + y } - given IntNum with {} + given IntNum() extension [T](inline x: T)(using inline num: Num[T]) { inline def +(inline y: T): T = num.plus(x, y) diff --git a/tests/run/java-intersection.check b/tests/run/java-intersection.check new file mode 100644 index 000000000000..51c75c6b280b --- /dev/null +++ b/tests/run/java-intersection.check @@ -0,0 +1,6 @@ +1 +2 +1 +Sub1 +2 +Sub2 diff --git a/tests/run/java-intersection/A_1.java b/tests/run/java-intersection/A_1.java index ebe834a74cce..f54862e8adc2 100644 --- a/tests/run/java-intersection/A_1.java +++ b/tests/run/java-intersection/A_1.java @@ -1,3 +1,11 @@ +import java.io.Serializable; + public class A_1 { - public void foo(T x) {} + public void foo(T x) { + System.out.println("1"); + } + + public void foo(T x) { + System.out.println("2"); + } } diff --git a/tests/run/java-intersection/Test_2.scala b/tests/run/java-intersection/Test_2.scala index cbc39988340a..a3d7b11fce12 100644 --- a/tests/run/java-intersection/Test_2.scala +++ b/tests/run/java-intersection/Test_2.scala @@ -1,7 +1,26 @@ +import java.io.Serializable + +class Sub extends A_1 { + override def foo[T <: Object & Serializable](x: T) = + super.foo(x) + println("Sub1") + + override def foo[T <: Cloneable & Serializable](x: T) = + super.foo(x) + println("Sub2") +} + object Test { def main(args: Array[String]): Unit = { + val x: Object & Serializable = new Serializable {} + val y: Cloneable & Serializable = new Cloneable with java.io.Serializable {} + val a = new A_1 - val x: java.io.Serializable = new java.io.Serializable {} a.foo(x) + a.foo(y) + + val s = new Sub + s.foo(x) + s.foo(y) } } diff --git a/tests/run/java-varargs-3/A.java b/tests/run/java-varargs-3/A.java new file mode 100644 index 000000000000..046592bb020e --- /dev/null +++ b/tests/run/java-varargs-3/A.java @@ -0,0 +1,14 @@ +class A { + public void gen(S... args) { + } + + public void gen2(S... args) { + } +} +class B { + public void gen(S... args) { + } + + public void gen2(S... args) { + } +} diff --git a/tests/run/java-varargs-3/Test.scala b/tests/run/java-varargs-3/Test.scala new file mode 100644 index 000000000000..4c00e491a6ad --- /dev/null +++ b/tests/run/java-varargs-3/Test.scala @@ -0,0 +1,19 @@ +object Test { + def main(args: Array[String]): Unit = { + val ai = new A[Int] + ai.gen(1) + ai.gen2(1) + ai.gen(Array(1)*) + ai.gen2(Array(1)*) + ai.gen(Seq(1)*) + ai.gen2(Seq(1)*) + + val b = new B[String] + b.gen("") + b.gen2("") + b.gen(Array("")*) + b.gen2(Array("")*) + b.gen(Seq("")*) + b.gen2(Seq("")*) + } +} diff --git a/tests/run/liftedTry.scala b/tests/run/liftedTry.scala index ff9af98eca32..0a158b5a0ba2 100644 --- a/tests/run/liftedTry.scala +++ b/tests/run/liftedTry.scala @@ -16,7 +16,7 @@ object Test { assert(x == 1) assert(foo(2) == 2) assert(foo(try raise(3) catch handle) == 3) - Tr.foo + assert(Tr.foo == 3) } } @@ -24,7 +24,26 @@ object Tr { def fun(a: Int => Unit) = a(2) def foo: Int = { var s = 1 - s = try {fun(s = _); 3} catch{ case ex: Throwable => val x = 4; s = x; 5 } + s = try {fun(s = _); 3} catch { case ex: Throwable => val x = 4; s = x; 5 } s } } + +/* was: +Caused by: java.lang.VerifyError: Inconsistent stackmap frames at branch target 33 +Exception Details: + Location: + Tr$.foo()I @30: goto + Reason: + Current frame's stack size doesn't match stackmap. + Current Frame: + bci: @30 + flags: { } + locals: { 'Tr$', 'scala/runtime/IntRef', 'java/lang/Throwable', integer } + stack: { integer } + Stackmap Frame: + bci: @33 + flags: { } + locals: { 'Tr$', 'scala/runtime/IntRef' } + stack: { top, integer } + */ diff --git a/tests/run/manifest-summoning.scala b/tests/run/manifest-summoning.scala new file mode 100644 index 000000000000..dcee63801863 --- /dev/null +++ b/tests/run/manifest-summoning.scala @@ -0,0 +1,101 @@ +import scala.reflect.{classTag, ClassTag, NoManifest} + +@main def Test: Unit = + + /* ====== no manifest available ====== */ + + locally { + noManifest[Array[? <: Int]] // available as a manifest + noManifest[Array[? <: String]] // available as a manifest + noManifest[Array[Nothing]] + noManifest[Array[Null]] + noManifest[Nothing] + noManifest[Null] + } + + /* ====== ClassTag and OptManifest have the same runtime class and same equality ======= */ + + locally { + interopOpt[List[Int]] + interopOpt[List[? <: Int]] + } + + /* ====== Test some OptManifest have the same runtime class and are equal ======= */ + + locally { + sameClassEqualOpt[List[Int], List[? <: Int]] // not equal for full manifests + sameClassEqualOpt[List[Int], List[String]] // not equal for full manifests + } + + /* ============================================================================= */ + // The following tests rely on <:< being correct, i.e. `equals` on Manifest // + // uses `<:<` underneath. // + /* ============================================================================= */ + + /* ====== Test some Manifest have the same runtime class and are equal ======= */ + + locally { + trait A + trait B {def b: Int} + trait C {def c: Int} + trait D {def d: Int} + class fooAnnot extends scala.annotation.StaticAnnotation + + type SomeRefinedType = + ((B {def b: 0} & C) & ((A @fooAnnot) & D {def d: 2})) {def c: 1} + + sameClassEqualMan[Array[? <: String], Array[String]] + sameClassEqualMan[SomeRefinedType, A] + } + + + /* ====== Test some Manifest have the same runtime class but are not equal ======= */ + + locally { + sameClassNonEqualMan[List[Int], List[? <: Int]] + sameClassNonEqualMan[List[Int], List[String]] + } + + /* ====== Test that some Manifest have the same runtime class, are not equal, but are `<:<` ======= */ + + locally { + class A + class B extends A + + sameClassSub[List[Int], List[AnyVal]] + sameClassSub[List[Unit], List[AnyVal]] + sameClassSub[List[B], List[A]] + sameClassSub[Array[List[B]], Array[List[A]]] + } + +end Test + +def noManifest[A: OptManifest] = + assert(optManifest[A] eq NoManifest) + +def interopOpt[A: ClassTag: OptManifest] = + assert(classTag[A] == optManifest[A]) + optManifest[A] match + case optA: ClassTag[_] => + assert(classTag[A].runtimeClass == optA.runtimeClass) + +def sameClassEqualOpt[A: OptManifest, B: OptManifest] = + assert(optManifest[A] == optManifest[B]) + (optManifest[A], optManifest[B]) match + case (a: ClassTag[_], b: ClassTag[_]) => + assert(a.runtimeClass == b.runtimeClass) + +def sameClassMan[A: Manifest, B: Manifest] = + assert(manifest[A].runtimeClass == manifest[B].runtimeClass) + +def sameClassEqualMan[A: Manifest, B: Manifest] = + sameClassMan[A, B] + assert(manifest[A] == manifest[B]) + +def sameClassNonEqualMan[A: Manifest, B: Manifest] = + sameClassMan[A, B] + assert(manifest[A] != manifest[B]) + +def sameClassSub[A: Manifest, B: Manifest] = + sameClassNonEqualMan[A, B] + assert(manifest[A] <:< manifest[B]) diff --git a/tests/run/opaque-inline/EmailAddress.scala b/tests/run/opaque-inline/EmailAddress.scala new file mode 100644 index 000000000000..de5036a0ca77 --- /dev/null +++ b/tests/run/opaque-inline/EmailAddress.scala @@ -0,0 +1,10 @@ +import scala.quoted.* + +opaque type EmailAddress = String +object EmailAddress extends EmailAddressOps[EmailAddress]: + + given (using s: ToExpr[String]): ToExpr[EmailAddress] = s + + def parse(s: String): Either[String, EmailAddress] = + if (s contains "@") Right(s) + else Left("No @ symbol") diff --git a/tests/run/opaque-inline/EmailAddressOps.scala b/tests/run/opaque-inline/EmailAddressOps.scala new file mode 100644 index 000000000000..217c36dd6724 --- /dev/null +++ b/tests/run/opaque-inline/EmailAddressOps.scala @@ -0,0 +1,33 @@ +import scala.quoted.* + +trait EmailAddressOps[EmailAddressTransparent <: String]: + + inline def apply(inline s: String): EmailAddress = + ${ EmailAddressOps.applyImpl('s) } + + private val pattern = java.util.regex.Pattern.compile("([^@]*)@([^@]*)") + + extension (value: EmailAddressTransparent) + inline def localPart: String = + val matcher = pattern.matcher(value: String) + matcher.matches + matcher.group(1) + inline def domainPart: String = + val matcher = pattern.matcher(value: String) + matcher.matches + matcher.group(2) + +object EmailAddressOps { + def applyImpl(expr: Expr[String])(using Quotes): Expr[EmailAddress] = + import quotes.reflect.* + expr.asTerm match + case Inlined(_, _, Literal(StringConstant(s))) => + EmailAddress.parse(s) match + case Right(email) => Expr(email) + case Left(err) => + report.error(s"Not a valid email address: $err", expr) + '{???} + case _ => + report.error(s"Not a constant", expr) + '{???} +} \ No newline at end of file diff --git a/tests/run/opaque-inline/Test.scala b/tests/run/opaque-inline/Test.scala new file mode 100644 index 000000000000..c228e361e5a9 --- /dev/null +++ b/tests/run/opaque-inline/Test.scala @@ -0,0 +1,2 @@ +@main def Test = + (new TestEmail).run diff --git a/tests/run/opaque-inline/TestEmail.scala b/tests/run/opaque-inline/TestEmail.scala new file mode 100644 index 000000000000..13121f59e1d6 --- /dev/null +++ b/tests/run/opaque-inline/TestEmail.scala @@ -0,0 +1,7 @@ +class TestEmail { + def getDomain(e: EmailAddress): String = e.domainPart + + def run: Unit = + val em = EmailAddress("a@b.c") + assert(getDomain(em) == "b.c") +} diff --git a/tests/run/opaque-inline2.scala b/tests/run/opaque-inline2.scala new file mode 100644 index 000000000000..fda5f6756827 --- /dev/null +++ b/tests/run/opaque-inline2.scala @@ -0,0 +1,28 @@ + +import compiletime.* + +object refined: + opaque type Positive = Int + + object Positive extends PositiveFactory + + trait PositiveFactory: + inline def apply(inline value: Int): Positive = + inline if value < 0 then error(codeOf(value) + " is not positive.") + else value + + transparent inline def safe(value: Int): Positive | IllegalArgumentException = + if value < 0 then IllegalArgumentException(s"$value is not positive") + else value: Positive + +@main def Test: Unit = + import refined.* + val eight = Positive(8) + // val negative = Positive(-1) // This correctly produces a compile error "-1 is not positive." + // val random = Positive(scala.util.Random.nextInt()) // This correctly produces a compile error about being unable to inline the method call + val random = Positive.safe(scala.util.Random.nextInt()) + val safeNegative = Positive.safe(-1) + val safeFive = Positive.safe(5) + println(eight) + println(random) + println(safeFive) \ No newline at end of file diff --git a/tests/run/overloading-specifity.scala b/tests/run/overloading-specifity.scala index 905e45ace938..b826b3320d1e 100644 --- a/tests/run/overloading-specifity.scala +++ b/tests/run/overloading-specifity.scala @@ -11,7 +11,7 @@ object Generic { object Test extends App { trait Context - given ctx: Context with {} + given ctx: Context() object b { def foo[T](implicit gen: Generic): Show[T] = new Show[T](1) diff --git a/tests/run/poly-kinded-derives.scala b/tests/run/poly-kinded-derives.scala index 73ad7dc1957a..0d3c7e9f8ffd 100644 --- a/tests/run/poly-kinded-derives.scala +++ b/tests/run/poly-kinded-derives.scala @@ -4,10 +4,10 @@ object Test extends App { { trait Show[T] object Show { - given Show[Int] with {} - given [T](using st: Show[T]): Show[Tuple1[T]] with {} - given t2[T, U](using st: Show[T], su: Show[U]): Show[(T, U)] with {} - given t3 [T, U, V](using st: Show[T], su: Show[U], sv: Show[V]): Show[(T, U, V)] with {} + given Show[Int]() + given [T](using st: Show[T]): Show[Tuple1[T]]() + given t2[T, U](using st: Show[T], su: Show[U]): Show[(T, U)]() + given t3 [T, U, V](using st: Show[T], su: Show[U], sv: Show[V]): Show[(T, U, V)]() def derived[T](using m: Mirror.Of[T], r: Show[m.MirroredElemTypes]): Show[T] = new Show[T] {} } @@ -22,10 +22,10 @@ object Test extends App { { trait Functor[F[_]] object Functor { - given [C]: Functor[[T] =>> C] with {} - given Functor[[T] =>> Tuple1[T]] with {} - given t2 [T]: Functor[[U] =>> (T, U)] with {} - given t3 [T, U]: Functor[[V] =>> (T, U, V)] with {} + given [C]: Functor[[T] =>> C]() + given Functor[[T] =>> Tuple1[T]]() + given t2 [T]: Functor[[U] =>> (T, U)]() + given t3 [T, U]: Functor[[V] =>> (T, U, V)]() def derived[F[_]](using m: Mirror { type MirroredType[X] = F[X] ; type MirroredElemTypes[_] }, r: Functor[m.MirroredElemTypes]): Functor[F] = new Functor[F] {} } @@ -40,8 +40,8 @@ object Test extends App { { trait FunctorK[F[_[_]]] object FunctorK { - given [C]: FunctorK[[F[_]] =>> C] with {} - given [T]: FunctorK[[F[_]] =>> Tuple1[F[T]]] with {} + given [C]: FunctorK[[F[_]] =>> C]() + given [T]: FunctorK[[F[_]] =>> Tuple1[F[T]]]() def derived[F[_[_]]](using m: Mirror { type MirroredType[X[_]] = F[X] ; type MirroredElemTypes[_[_]] }, r: FunctorK[m.MirroredElemTypes]): FunctorK[F] = new FunctorK[F] {} } @@ -56,10 +56,10 @@ object Test extends App { { trait Bifunctor[F[_, _]] object Bifunctor { - given [C]: Bifunctor[[T, U] =>> C] with {} - given Bifunctor[[T, U] =>> Tuple1[U]] with {} - given t2: Bifunctor[[T, U] =>> (T, U)] with {} - given t3 [T]: Bifunctor[[U, V] =>> (T, U, V)] with {} + given [C]: Bifunctor[[T, U] =>> C]() + given Bifunctor[[T, U] =>> Tuple1[U]]() + given t2: Bifunctor[[T, U] =>> (T, U)]() + given t3 [T]: Bifunctor[[U, V] =>> (T, U, V)]() def derived[F[_, _]](using m: Mirror { type MirroredType[X, Y] = F[X, Y] ; type MirroredElemTypes[_, _] }, r: Bifunctor[m.MirroredElemTypes]): Bifunctor[F] = ??? } diff --git a/tests/run/polymorphic-functions.scala b/tests/run/polymorphic-functions.scala index e723fa34cf79..d059484d60cd 100644 --- a/tests/run/polymorphic-functions.scala +++ b/tests/run/polymorphic-functions.scala @@ -95,4 +95,6 @@ object Test extends App { // Parens handling val tt1: [T] => (T => T) = [T] => (x: T) => x val tt2: [T] => T => T = [T] => ((x: T) => x) + val tt3: [T] => T => T = [T] => { (x: T) => x } + val tt4: [T] => T => T = [T] => (x: T) => { x } } diff --git a/tests/run/productElementName.check b/tests/run/productElementName.check index a7499b7c1e5d..6bad7ad80814 100644 --- a/tests/run/productElementName.check +++ b/tests/run/productElementName.check @@ -9,6 +9,6 @@ AuxiliaryConstructor(a=Susan, b=42) OverloadedApply(a=Susan, b=123) DefinesProductElementName(foo=Susan, foo=42) InheritsProductElementName(a=Susan, b=42) -InheritsProductElementName_Override(overriden=Susan, overriden=42) +InheritsProductElementName_Override(overridden=Susan, overridden=42) InheritsProductElementName_Override_SelfType(a=Susan, b=42) PrivateMembers(a=10, b=20, c=30, d=40, e=50, f=60) diff --git a/tests/run/productElementName.scala b/tests/run/productElementName.scala index b98ac96cdfa9..ff9a2e4dac33 100644 --- a/tests/run/productElementName.scala +++ b/tests/run/productElementName.scala @@ -30,17 +30,17 @@ case class DefinesProductElementName(a: String, b: Int) { } trait A { - def productElementName(n: Int): String = "overriden" + def productElementName(n: Int): String = "overridden" } case class InheritsProductElementName(a: String, b: Int) extends A trait B extends Product { - override def productElementName(n: Int): String = "overriden" + override def productElementName(n: Int): String = "overridden" } case class InheritsProductElementName_Override(a: String, b: Int) extends B trait C { self: Product => - override def productElementName(n: Int): String = "overriden" + override def productElementName(n: Int): String = "overridden" } case class InheritsProductElementName_Override_SelfType(a: String, b: Int) extends C diff --git a/tests/run/safeThrowsStrawman.scala b/tests/run/safeThrowsStrawman.scala index 8ddb594b787a..973c9d8f5137 100644 --- a/tests/run/safeThrowsStrawman.scala +++ b/tests/run/safeThrowsStrawman.scala @@ -3,29 +3,29 @@ import language.experimental.erasedDefinitions object scalax: erased class CanThrow[-E <: Exception] - infix type throws[R, +E <: Exception] = CanThrow[E] ?=> R + infix type raises[R, +E <: Exception] = CanThrow[E] ?=> R class Fail extends Exception - def raise[E <: Exception](e: E): Nothing throws E = throw e + def raise[E <: Exception](e: E): Nothing raises E = throw e import scalax._ -def foo(x: Boolean): Int throws Fail = +def foo(x: Boolean): Int raises Fail = if x then 1 else raise(Fail()) def bar(x: Boolean)(using CanThrow[Fail]): Int = foo(x) -def baz: Int throws Exception = foo(false) +def baz: Int raises Exception = foo(false) @main def Test = try - given CanThrow[Fail] = ??? + given CanThrow[Fail] = new CanThrow println(foo(true)) println(foo(false)) catch case ex: Fail => println("failed") try - given CanThrow[Exception] = ??? + given CanThrow[Exception] = new CanThrow println(baz) catch case ex: Fail => println("failed") diff --git a/tests/run/safeThrowsStrawman2.scala b/tests/run/safeThrowsStrawman2.scala index 1fa38257c177..1c84d84babc7 100644 --- a/tests/run/safeThrowsStrawman2.scala +++ b/tests/run/safeThrowsStrawman2.scala @@ -3,22 +3,22 @@ import language.experimental.erasedDefinitions object scalax: erased class CanThrow[-E <: Exception] - infix type throws[R, +E <: Exception] = CanThrow[E] ?=> R + infix type raises[R, +E <: Exception] = CanThrow[E] ?=> R class Fail extends Exception - def raise[E <: Exception](e: E): Nothing throws E = throw e + def raise[E <: Exception](e: E): Nothing raises E = throw e private class Result[T]: var value: T = scala.compiletime.uninitialized - def try1[R, E <: Exception](body: => R throws E)(c: E => Unit): R = + def try1[R, E <: Exception](body: => R raises E)(c: E => Unit): R = try2(body)(c) {} - def try2[R, E <: Exception](body: => R throws E)(c: E => Unit)(f: => Unit): R = + def try2[R, E <: Exception](body: => R raises E)(c: E => Unit)(f: => Unit): R = val res = new Result[R] try - given CanThrow[E] = ??? + given CanThrow[E] = new CanThrow res.value = body catch c.asInstanceOf[Throwable => Unit] finally f @@ -30,11 +30,11 @@ object scalax: import scalax._ -def foo(x: Boolean): Int throws Fail = +def foo(x: Boolean): Int raises Fail = if x then 1 else raise(Fail()) def bar(x: Boolean)(using CanThrow[Fail]): Int = foo(x) -def baz: Int throws Exception = foo(false) +def baz: Int raises Exception = foo(false) @main def Test = try1 { diff --git a/tests/run/saferExceptions.scala b/tests/run/saferExceptions.scala new file mode 100644 index 000000000000..c28509ddf7be --- /dev/null +++ b/tests/run/saferExceptions.scala @@ -0,0 +1,34 @@ +import language.experimental.saferExceptions + +class Fail extends Exception + +def foo(x: Int) = + try x match + case 1 => throw AssertionError() + case 2 => throw Fail() + case 3 => throw java.io.IOException() + case 4 => throw Exception() + case 5 => throw Throwable() + case _ => 0 + catch + case ex: AssertionError => 1 + case ex: Fail => 2 + case ex: java.io.IOException => 3 + case ex: Exception => 4 + case ex: Throwable => 5 + +def bar(x: Int): Int throws Exception = + x match + case 1 => throw AssertionError() + case 2 => throw Fail() + case 3 => throw java.io.IOException() + case 4 => throw Exception() + case _ => 0 + +@main def Test = + assert(foo(1) + foo(2) + foo(3) + foo(4) + foo(5) + foo(6) == 15) + import unsafeExceptions.canThrowAny + val x = + try bar(2) + catch case ex: Fail => 3 // OK + assert(x == 3) diff --git a/tests/run/singleton-ops-flags.scala b/tests/run/singleton-ops-flags.scala index 667c43251b07..3b8d3c21aaca 100644 --- a/tests/run/singleton-ops-flags.scala +++ b/tests/run/singleton-ops-flags.scala @@ -7,8 +7,7 @@ package example { final val EmptyFlags = baseFlags final val Erased = EmptyFlags.next - final val Internal = Erased.next - final val Inline = Internal.next + final val Inline = Erased.next final val InlineProxy = Inline.next final val Opaque = InlineProxy.next final val Scala2x = Opaque.next @@ -26,7 +25,6 @@ package example { if s == EmptyFlags then "EmptyFlags" else s.toSingletonSets[LastFlag].map ( [n <: Int] => (flag: SingletonFlagSet[n]) => flag match { case Erased => "Erased" - case Internal => "Internal" case Inline => "Inline" case InlineProxy => "InlineProxy" case Opaque => "Opaque" diff --git a/tests/run/splice-position.check b/tests/run/splice-position.check new file mode 100644 index 000000000000..904f7c9c2efd --- /dev/null +++ b/tests/run/splice-position.check @@ -0,0 +1,2 @@ +Test$.main(Test.scala:3) +Test$.main(Test.scala:4) diff --git a/tests/run/splice-position/Test.scala b/tests/run/splice-position/Test.scala new file mode 100644 index 000000000000..0bb354cf0115 --- /dev/null +++ b/tests/run/splice-position/Test.scala @@ -0,0 +1,4 @@ +object Test: + def main(args: Array[String]) = + try assertTrue(1 == 2) catch e => println(e.getStackTrace()(0)) + try assertTrue(1 == 3) catch e => println(e.getStackTrace()(0)) diff --git a/tests/run/splice-position/macros.scala b/tests/run/splice-position/macros.scala new file mode 100644 index 000000000000..b575a460b6e5 --- /dev/null +++ b/tests/run/splice-position/macros.scala @@ -0,0 +1,7 @@ +import scala.quoted.{Quotes, Expr, quotes} + +inline def assertTrue(cond: Boolean) = + ${ assertTrueImpl('cond) } + +def assertTrueImpl(cond: Expr[Boolean])(using Quotes) = + '{ if (!$cond) throw new Error(${'{""}}) } diff --git a/tests/run/string-switch-defaults-null.check b/tests/run/string-switch-defaults-null.check new file mode 100644 index 000000000000..4bbcfcf56827 --- /dev/null +++ b/tests/run/string-switch-defaults-null.check @@ -0,0 +1,2 @@ +2 +-1 diff --git a/tests/run/string-switch-defaults-null.scala b/tests/run/string-switch-defaults-null.scala new file mode 100644 index 000000000000..9fc4ce235a2d --- /dev/null +++ b/tests/run/string-switch-defaults-null.scala @@ -0,0 +1,16 @@ +import annotation.switch + +object Test { + def test(s: String): Int = { + (s : @switch) match { + case "1" => 0 + case null => -1 + case _ => s.toInt + } + } + + def main(args: Array[String]): Unit = { + println(test("2")) + println(test(null)) + } +} diff --git a/tests/run/string-switch.check b/tests/run/string-switch.check new file mode 100644 index 000000000000..7ab6b33ec0ae --- /dev/null +++ b/tests/run/string-switch.check @@ -0,0 +1,29 @@ +fido Success(dog) +garfield Success(cat) +wanda Success(fish) +henry Success(horse) +felix Failure(scala.MatchError: felix (of class java.lang.String)) +deuteronomy Success(cat) +===== +AaAa 2031744 Success(1) +BBBB 2031744 Success(2) +BBAa 2031744 Failure(scala.MatchError: BBAa (of class java.lang.String)) +cCCc 3015872 Success(3) +ddDd 3077408 Success(4) +EEee 2125120 Failure(scala.MatchError: EEee (of class java.lang.String)) +===== +A Success(()) +X Failure(scala.MatchError: X (of class java.lang.String)) +===== + Success(3) +null Success(2) +7 Failure(scala.MatchError: 7 (of class java.lang.String)) +===== +pig Success(1) +dog Success(2) +===== +Ea 2236 Success(1) +FB 2236 Success(2) +cC 3136 Success(3) +xx 3840 Success(4) +null 0 Success(4) diff --git a/tests/run/string-switch.scala b/tests/run/string-switch.scala new file mode 100644 index 000000000000..6a1522b416d9 --- /dev/null +++ b/tests/run/string-switch.scala @@ -0,0 +1,69 @@ +// scalac: -Werror +import annotation.switch +import util.Try + +object Test extends App { + + def species(name: String) = (name.toLowerCase : @switch) match { + case "fido" => "dog" + case "garfield" | "deuteronomy" => "cat" + case "wanda" => "fish" + case "henry" => "horse" + } + List("fido", "garfield", "wanda", "henry", "felix", "deuteronomy").foreach { n => println(s"$n ${Try(species(n))}") } + + println("=====") + + def collide(in: String) = (in : @switch) match { + case "AaAa" => 1 + case "BBBB" => 2 + case "cCCc" => 3 + case x if x == "ddDd" => 4 + } + List("AaAa", "BBBB", "BBAa", "cCCc", "ddDd", "EEee").foreach { s => + println(s"$s ${s.##} ${Try(collide(s))}") + } + + println("=====") + + def unitary(in: String) = (in : @switch) match { + case "A" => + case x => throw new MatchError(x) + } + List("A","X").foreach { s => + println(s"$s ${Try(unitary(s))}") + } + + println("=====") + + def nullFun(in: String) = (in : @switch) match { + case "1" => 1 + case null => 2 + case "" => 3 + } + List("", null, "7").foreach { s => + println(s"$s ${Try(nullFun(s))}") + } + + println("=====") + + def default(in: String) = (in : @switch) match { + case "pig" => 1 + case _ => 2 + } + List("pig","dog").foreach { s => + println(s"$s ${Try(default(s))}") + } + + println("=====") + + def onceOnly(in: Iterator[String]) = (in.next() : @switch) match { + case "Ea" => 1 + case "FB" => 2 //collision with above + case "cC" => 3 + case _ => 4 + } + List("Ea", "FB", "cC", "xx", null).foreach { s => + println(s"$s ${s.##} ${Try(onceOnly(Iterator(s)))}") + } +} diff --git a/tests/run/t13418.check b/tests/run/t13418.check new file mode 100644 index 000000000000..7614df8ec464 --- /dev/null +++ b/tests/run/t13418.check @@ -0,0 +1,3 @@ +ok +ok +ok diff --git a/tests/run/t13418.scala b/tests/run/t13418.scala new file mode 100644 index 000000000000..344bae8a274b --- /dev/null +++ b/tests/run/t13418.scala @@ -0,0 +1,26 @@ +class A { class B } +val a1 = new A +val a2 = new A +val b: Any = new a1.B + +class X { class Y[Q] } +val x1 = new X +val x2 = new X +val y: Any = new x1.Y[Int] +type Foo = [Q] =>> x2.Y[Q] +type Bar = [Q] =>> x1.Y[Q] + +@main def Test() = { + b match { + case _: a2.B => println("wrong") + case _: a1.B => println("ok") + } + y match { + case _: x2.Y[_] => println("wrong") + case _: x1.Y[_] => println("ok") + } + y match { + case _: Foo[_] => println("wrong") + case _: Bar[_] => println("ok") + } +} diff --git a/tests/run/t6138-2.check b/tests/run/t6138-2.check new file mode 100644 index 000000000000..473ecde25dba --- /dev/null +++ b/tests/run/t6138-2.check @@ -0,0 +1 @@ +Foo$Bar was instantiated! diff --git a/tests/run/t6138-2/JavaClass.java b/tests/run/t6138-2/JavaClass.java new file mode 100644 index 000000000000..9774c05a0d91 --- /dev/null +++ b/tests/run/t6138-2/JavaClass.java @@ -0,0 +1,4 @@ +public class JavaClass { + // This is defined in ScalaClass + public static final Foo.Bar bar = new Foo.Bar(); +} \ No newline at end of file diff --git a/tests/run/t6138-2/ScalaClass.scala b/tests/run/t6138-2/ScalaClass.scala new file mode 100644 index 000000000000..0528133cbf2c --- /dev/null +++ b/tests/run/t6138-2/ScalaClass.scala @@ -0,0 +1,18 @@ +/* Similar to t10490 -- but defines `Foo` in the object. + * Placing this test within t10490 makes it work without a fix, that's why it's independent. + * Note that this was already working, we add it to make sure we don't regress + */ + +class Foo +object Foo { + class Bar { + override def toString: String = "Foo$Bar was instantiated!" + } +} + +object Test { + def main(args: Array[String]): Unit = { + // JavaClass is the user of the Scala defined classes + println(JavaClass.bar) + } +} \ No newline at end of file diff --git a/tests/run/t6138/JavaClass.java b/tests/run/t6138/JavaClass.java new file mode 100644 index 000000000000..08b9e0bd55d4 --- /dev/null +++ b/tests/run/t6138/JavaClass.java @@ -0,0 +1,4 @@ +public class JavaClass { + // This is defined in ScalaClass + public static final Foo.Bar bar = (new Foo()).new Bar(); +} \ No newline at end of file diff --git a/tests/run/t6138/ScalaClass.scala b/tests/run/t6138/ScalaClass.scala new file mode 100644 index 000000000000..da3c682b5033 --- /dev/null +++ b/tests/run/t6138/ScalaClass.scala @@ -0,0 +1,13 @@ +class Foo { + class Bar { + override def toString: String = "Foo$Bar was instantiated!" + } +} + +object Test { + def main(args: Array[String]): Unit = { + // JavaClass is the user of the Scala defined classes + println(JavaClass.bar) + //println(JavaClass.baz) + } +} \ No newline at end of file diff --git a/tests/run/t6238.check b/tests/run/t6238.check new file mode 100644 index 000000000000..473ecde25dba --- /dev/null +++ b/tests/run/t6238.check @@ -0,0 +1 @@ +Foo$Bar was instantiated! diff --git a/tests/run/t6476.check b/tests/run/t6476.check index 69bf68978177..e2a080bcf6dc 100644 --- a/tests/run/t6476.check +++ b/tests/run/t6476.check @@ -1,13 +1,18 @@ "Hello", Alice "Hello", Alice + +"Hello", Alice +"Hello", Alice + \"Hello\", Alice \"Hello\", Alice -\"Hello\", Alice -\"Hello\", Alice + \TILT\ -\\TILT\\ -\\TILT\\ \TILT\ \\TILT\\ + +\TILT\ +\TILT\ \\TILT\\ + \TILT\ diff --git a/tests/run/t6476.scala b/tests/run/t6476.scala index a04645065a2a..25a1d5f03ec1 100644 --- a/tests/run/t6476.scala +++ b/tests/run/t6476.scala @@ -3,21 +3,21 @@ object Test { val person = "Alice" println(s"\"Hello\", $person") println(s"""\"Hello\", $person""") - + println() println(f"\"Hello\", $person") println(f"""\"Hello\", $person""") - + println() println(raw"\"Hello\", $person") println(raw"""\"Hello\", $person""") - + println() println(s"\\TILT\\") println(f"\\TILT\\") println(raw"\\TILT\\") - + println() println(s"""\\TILT\\""") println(f"""\\TILT\\""") println(raw"""\\TILT\\""") - + println() println(raw"""\TILT\""") } } diff --git a/tests/run/tagless.scala b/tests/run/tagless.scala index 5333c4e30995..5abc32d84578 100644 --- a/tests/run/tagless.scala +++ b/tests/run/tagless.scala @@ -103,7 +103,7 @@ object Test extends App { private class Exc(msg: String) extends Exception(msg) def _throw(msg: String)(using CanThrow): Nothing = throw new Exc(msg) def _try[T](op: Maybe[T])(handler: String => T): T = { - given CanThrow with {} + given CanThrow() try op catch { case ex: Exception => handler(ex.getMessage) diff --git a/tests/run/tailrec-return.check b/tests/run/tailrec-return.check new file mode 100644 index 000000000000..361e76d8a285 --- /dev/null +++ b/tests/run/tailrec-return.check @@ -0,0 +1,7 @@ +6 +false +true +false +true +Ada Lovelace, Alan Turing +List(9, 10) diff --git a/tests/run/tailrec-return.scala b/tests/run/tailrec-return.scala new file mode 100644 index 000000000000..aa760960403d --- /dev/null +++ b/tests/run/tailrec-return.scala @@ -0,0 +1,66 @@ +object Test: + + @annotation.tailrec + def sum(n: Int, acc: Int = 0): Int = + if n != 0 then return sum(n - 1, acc + n) + acc + + @annotation.tailrec + def isEven(n: Int): Boolean = + if n != 0 && n != 1 then return isEven(n - 2) + if n == 1 then return false + true + + @annotation.tailrec + def isEvenApply(n: Int): Boolean = + // Return inside an `Apply.fun` + ( + if n != 0 && n != 1 then return isEvenApply(n - 2) + else if n == 1 then return false + else (x: Boolean) => x + )(true) + + @annotation.tailrec + def isEvenWhile(n: Int): Boolean = + // Return inside a `WhileDo.cond` + while( + if n != 0 && n != 1 then return isEvenWhile(n - 2) + else if n == 1 then return false + else true + ) {} + true + + @annotation.tailrec + def isEvenReturn(n: Int): Boolean = + // Return inside a `Return` + return + if n != 0 && n != 1 then return isEvenReturn(n - 2) + else if n == 1 then return false + else true + + @annotation.tailrec + def names(l: List[(String, String) | Null], acc: List[String] = Nil): List[String] = + l match + case Nil => acc.reverse + case x :: xs => + if x == null then return names(xs, acc) + + val displayName = x._1 + " " + x._2 + names(xs, displayName :: acc) + + def nonTail(l: List[Int]): List[Int] = + l match + case Nil => Nil + case x :: xs => + // The call to nonTail should *not* be eliminated + (x + 1) :: nonTail(xs) + + + def main(args: Array[String]): Unit = + println(sum(3)) + println(isEven(5)) + println(isEvenApply(6)) + println(isEvenWhile(7)) + println(isEvenReturn(8)) + println(names(List(("Ada", "Lovelace"), null, ("Alan", "Turing"))).mkString(", ")) + println(nonTail(List(8, 9))) diff --git a/tests/run/trailingCommas/trailingCommas.check b/tests/run/trailingCommas.check similarity index 100% rename from tests/run/trailingCommas/trailingCommas.check rename to tests/run/trailingCommas.check diff --git a/tests/run/trailingCommas/trailingCommas.scala b/tests/run/trailingCommas.scala similarity index 100% rename from tests/run/trailingCommas/trailingCommas.scala rename to tests/run/trailingCommas.scala diff --git a/tests/run/trait-static-forwarder.check b/tests/run/trait-static-forwarder.check new file mode 100644 index 000000000000..d81cc0710eb6 --- /dev/null +++ b/tests/run/trait-static-forwarder.check @@ -0,0 +1 @@ +42 diff --git a/tests/run/trait-static-forwarder/Test.java b/tests/run/trait-static-forwarder/Test.java new file mode 100644 index 000000000000..89012c016209 --- /dev/null +++ b/tests/run/trait-static-forwarder/Test.java @@ -0,0 +1,5 @@ +public final class Test { + public static void main(String... args) { + System.out.println(T.foo()); + } +} diff --git a/tests/run/trait-static-forwarder/forwarders.scala b/tests/run/trait-static-forwarder/forwarders.scala new file mode 100644 index 000000000000..d6ee9a081d02 --- /dev/null +++ b/tests/run/trait-static-forwarder/forwarders.scala @@ -0,0 +1,5 @@ +trait T + +object T { + def foo = 42 +} diff --git a/tests/semanticdb/expect/Advanced.expect.scala b/tests/semanticdb/expect/Advanced.expect.scala index d52b8cecf668..0764c11c4dd2 100644 --- a/tests/semanticdb/expect/Advanced.expect.scala +++ b/tests/semanticdb/expect/Advanced.expect.scala @@ -11,22 +11,24 @@ class C/*<-advanced::C#*/[T/*<-advanced::C#[T]*/] { class Structural/*<-advanced::Structural#*/ { def s1/*<-advanced::Structural#s1().*/: { val x/*<-local0*/: Int/*->scala::Int#*/ } = ???/*->scala::Predef.`???`().*/ - def s2/*<-advanced::Structural#s2().*/: { val x/*<-local1*/: Int/*->scala::Int#*/ } = new { val x/*<-local3*/: Int/*->scala::Int#*/ = ???/*->scala::Predef.`???`().*/ } - def s3/*<-advanced::Structural#s3().*/: { def m/*<-local4*/(x/*<-local5*/: Int/*->scala::Int#*/): Int/*->scala::Int#*/ } = new { def m/*<-local7*/(x/*<-local8*/: Int/*->scala::Int#*/): Int/*->scala::Int#*/ = ???/*->scala::Predef.`???`().*/ } + def s2/*<-advanced::Structural#s2().*/: { val x/*<-local1*/: Int/*->scala::Int#*/ } = new { val x/*<-local2*/: Int/*->scala::Int#*/ = ???/*->scala::Predef.`???`().*/ } + def s3/*<-advanced::Structural#s3().*/: { def m/*<-local6*/(x/*<-local5*/: Int/*->scala::Int#*/): Int/*->scala::Int#*/ } = new { def m/*<-local8*/(x/*<-local7*/: Int/*->scala::Int#*/): Int/*->scala::Int#*/ = ???/*->scala::Predef.`???`().*/ } + def s4/*<-advanced::Structural#s4().*/(a/*<-advanced::Structural#s4().(a)*/: Int/*->scala::Int#*/): { val x/*<-local11*/: Int/*->scala::Int#*/ } = ???/*->scala::Predef.`???`().*/ } class Wildcards/*<-advanced::Wildcards#*/ { def e1/*<-advanced::Wildcards#e1().*/: List/*->scala::package.List#*/[_] = ???/*->scala::Predef.`???`().*/ + def e2/*<-advanced::Wildcards#e2().*/: List/*->scala::package.List#*/[_ <: Int/*->scala::Int#*/] = ???/*->scala::Predef.`???`().*/ } object Test/*<-advanced::Test.*/ { val s/*<-advanced::Test.s.*/ = new Structural/*->advanced::Structural#*/ val s1/*<-advanced::Test.s1.*/ = s/*->advanced::Test.s.*/.s1/*->advanced::Structural#s1().*/ - val s1x/*<-advanced::Test.s1x.*/ = /*->scala::reflect::Selectable.reflectiveSelectable().*/s/*->advanced::Test.s.*/.s1/*->advanced::Structural#s1().*//*->scala::reflect::Selectable#selectDynamic().*/.x + val s1x/*<-advanced::Test.s1x.*/ = s/*->advanced::Test.s.*/.s1/*->advanced::Structural#s1().*/.x val s2/*<-advanced::Test.s2.*/ = s/*->advanced::Test.s.*/.s2/*->advanced::Structural#s2().*/ - val s2x/*<-advanced::Test.s2x.*/ = /*->scala::reflect::Selectable.reflectiveSelectable().*/s/*->advanced::Test.s.*/.s2/*->advanced::Structural#s2().*//*->scala::reflect::Selectable#selectDynamic().*/.x + val s2x/*<-advanced::Test.s2x.*/ = s/*->advanced::Test.s.*/.s2/*->advanced::Structural#s2().*/.x val s3/*<-advanced::Test.s3.*/ = s/*->advanced::Test.s.*/.s3/*->advanced::Structural#s3().*/ - val s3x/*<-advanced::Test.s3x.*/ = /*->scala::reflect::Selectable.reflectiveSelectable().*/s/*->advanced::Test.s.*/.s3/*->advanced::Structural#s3().*//*->scala::reflect::Selectable#applyDynamic().*/.m(???/*->scala::Predef.`???`().*/) + val s3x/*<-advanced::Test.s3x.*/ = s/*->advanced::Test.s.*/.s3/*->advanced::Structural#s3().*/.m(???/*->scala::Predef.`???`().*/) val e/*<-advanced::Test.e.*/ = new Wildcards/*->advanced::Wildcards#*/ val e1/*<-advanced::Test.e1.*/ = e/*->advanced::Test.e.*/.e1/*->advanced::Wildcards#e1().*/ @@ -34,9 +36,15 @@ object Test/*<-advanced::Test.*/ { { (???/*->scala::Predef.`???`().*/ : Any/*->scala::Any#*/) match { - case e3/*<-local9*/: List/*->scala::package.List#*/[_] => - val e3x/*<-local10*/ = e3/*->local9*/.head/*->scala::collection::IterableOps#head().*/ + case e3/*<-local12*/: List/*->scala::package.List#*/[_] => + val e3x/*<-local14*/ = e3/*->local12*/.head/*->scala::collection::IterableOps#head().*/ () } } } + + +// Curried Type Application +class HKClass/*<-advanced::HKClass#*/[F/*<-advanced::HKClass#[F]*/ <: [T/*<-advanced::HKClass#``().[F][T]*/] =>> [U/*<-advanced::HKClass#``().[F][U]*/] =>> (U/*->advanced::HKClass#``().[F][U]*/, T/*->advanced::HKClass#``().[F][T]*/)] { + def foo/*<-advanced::HKClass#foo().*/[T/*<-advanced::HKClass#foo().[T]*/,U/*<-advanced::HKClass#foo().[U]*/](x/*<-advanced::HKClass#foo().(x)*/: F/*->advanced::HKClass#[F]*/[T/*->advanced::HKClass#foo().[T]*/][U/*->advanced::HKClass#foo().[U]*/]): String/*->scala::Predef.String#*/ = x/*->advanced::HKClass#foo().(x)*/.toString/*->scala::Tuple2#toString().*/() +} diff --git a/tests/semanticdb/expect/Advanced.scala b/tests/semanticdb/expect/Advanced.scala index 3e14a46cb194..e0439d911c36 100644 --- a/tests/semanticdb/expect/Advanced.scala +++ b/tests/semanticdb/expect/Advanced.scala @@ -13,10 +13,12 @@ class Structural { def s1: { val x: Int } = ??? def s2: { val x: Int } = new { val x: Int = ??? } def s3: { def m(x: Int): Int } = new { def m(x: Int): Int = ??? } + def s4(a: Int): { val x: Int } = ??? } class Wildcards { def e1: List[_] = ??? + def e2: List[_ <: Int] = ??? } object Test { @@ -40,3 +42,9 @@ object Test { } } } + + +// Curried Type Application +class HKClass[F <: [T] =>> [U] =>> (U, T)] { + def foo[T,U](x: F[T][U]): String = x.toString() +} diff --git a/tests/semanticdb/expect/Annotations.expect.scala b/tests/semanticdb/expect/Annotations.expect.scala index fb89c0e94abc..2422839c8307 100644 --- a/tests/semanticdb/expect/Annotations.expect.scala +++ b/tests/semanticdb/expect/Annotations.expect.scala @@ -21,7 +21,10 @@ class Annotations/*<-annot::Annotations#*/[@TypeParameterAnnotation/*->com::java class B/*<-annot::B#*/ @ConstructorAnnotation/*->com::javacp::annot::ConstructorAnnotation#*/()(x/*<-annot::B#x.*/: Int/*->scala::Int#*/) { @ConstructorAnnotation/*->com::javacp::annot::ConstructorAnnotation#*/ - def this()/*<-annot::B#``(+1).*/ = this(42) + def this/*<-annot::B#``(+1).*/() = this(42) + + @throws/*->scala::throws#*/[Exception/*->scala::package.Exception#*/] + def throwing/*<-annot::B#throwing().*/ = throw new Exception/*->scala::package.Exception#*/("") } @ObjectAnnotation/*->com::javacp::annot::ObjectAnnotation#*/ diff --git a/tests/semanticdb/expect/Annotations.scala b/tests/semanticdb/expect/Annotations.scala index 176ced6487e8..0556a2299b80 100644 --- a/tests/semanticdb/expect/Annotations.scala +++ b/tests/semanticdb/expect/Annotations.scala @@ -22,6 +22,9 @@ class Annotations[@TypeParameterAnnotation T](@ParameterAnnotation x: T) { self: class B @ConstructorAnnotation()(x: Int) { @ConstructorAnnotation def this() = this(42) + + @throws[Exception] + def throwing = throw new Exception("") } @ObjectAnnotation diff --git a/tests/semanticdb/expect/AnonymousGiven.expect.scala b/tests/semanticdb/expect/AnonymousGiven.expect.scala index 68800202b6c1..c28b9d5e770d 100644 --- a/tests/semanticdb/expect/AnonymousGiven.expect.scala +++ b/tests/semanticdb/expect/AnonymousGiven.expect.scala @@ -2,4 +2,4 @@ package angiven trait Foo/*<-angiven::Foo#*/ -/*<-angiven::AnonymousGiven$package.*/def bar/*<-angiven::AnonymousGiven$package.bar().*/(using Foo/*->angiven::Foo#*/) = 42 \ No newline at end of file +def bar/*<-angiven::AnonymousGiven$package.bar().*/(using Foo/*->angiven::Foo#*/) = 42 \ No newline at end of file diff --git a/tests/semanticdb/expect/Classes.expect.scala b/tests/semanticdb/expect/Classes.expect.scala index af092703c250..a4015d7aa150 100644 --- a/tests/semanticdb/expect/Classes.expect.scala +++ b/tests/semanticdb/expect/Classes.expect.scala @@ -1,8 +1,8 @@ package classes import scala.language/*->scala::language.*/.experimental/*->scala::language.experimental.*/.macros/*->scala::language.experimental.macros.*/ -class C1/*<-classes::C1#*/(val x1/*<-classes::C1#x1.*/: Int/*->scala::Int#*/) extends AnyVal/*->scala::AnyVal#*//*->scala::AnyVal#``().*/ +class C1/*<-classes::C1#*/(val x1/*<-classes::C1#x1.*/: Int/*->scala::Int#*/) extends AnyVal/*->scala::AnyVal#*/ -class C2/*<-classes::C2#*/(val x2/*<-classes::C2#x2.*/: Int/*->scala::Int#*/) extends AnyVal/*->scala::AnyVal#*//*->scala::AnyVal#``().*/ +class C2/*<-classes::C2#*/(val x2/*<-classes::C2#x2.*/: Int/*->scala::Int#*/) extends AnyVal/*->scala::AnyVal#*/ object C2/*<-classes::C2.*/ case class C3/*<-classes::C3#*/(x/*<-classes::C3#x.*/: Int/*->scala::Int#*/) @@ -47,10 +47,10 @@ class C12/*<-classes::C12#*/ { object N/*<-classes::N.*/ { val anonClass/*<-classes::N.anonClass.*/ = new C7/*->classes::C7#*/(42) { - val local/*<-local1*/ = ???/*->scala::Predef.`???`().*/ + val local/*<-local0*/ = ???/*->scala::Predef.`???`().*/ } - val anonFun/*<-classes::N.anonFun.*/ = List/*->scala::package.List.*//*->scala::collection::IterableFactory#apply().*/(1).map/*->scala::collection::immutable::List#map().*/ { i/*<-local2*/ => - val local/*<-local3*/ = 2 - local/*->local3*/ +/*->scala::Int#`+`(+4).*/ 2 + val anonFun/*<-classes::N.anonFun.*/ = List/*->scala::package.List.*/(1).map/*->scala::collection::immutable::List#map().*/ { i/*<-local3*/ => + val local/*<-local4*/ = 2 + local/*->local4*/ +/*->scala::Int#`+`(+4).*/ 2 } } diff --git a/tests/semanticdb/expect/EndMarkers.expect.scala b/tests/semanticdb/expect/EndMarkers.expect.scala new file mode 100644 index 000000000000..7ace3ceae22f --- /dev/null +++ b/tests/semanticdb/expect/EndMarkers.expect.scala @@ -0,0 +1,59 @@ +package endmarkers: + + class MultiCtor/*<-endmarkers::MultiCtor#*/(val i/*<-endmarkers::MultiCtor#i.*/: Int/*->scala::Int#*/): + def this/*<-endmarkers::MultiCtor#``(+1).*/() = + this(23) + end this/*->endmarkers::MultiCtor#``(+1).*/ + end MultiCtor/*->endmarkers::MultiCtor#*/ + + def topLevelMethod/*<-endmarkers::EndMarkers$package.topLevelMethod().*/: String/*->scala::Predef.String#*/ = + "hello" + end topLevelMethod/*->endmarkers::EndMarkers$package.topLevelMethod().*/ + + val topLevelVal/*<-endmarkers::EndMarkers$package.topLevelVal.*/: Int/*->scala::Int#*/ = + 23 + end topLevelVal/*->endmarkers::EndMarkers$package.topLevelVal.*/ + + var topLevelVar/*<-endmarkers::EndMarkers$package.topLevelVar().*/: String/*->scala::Predef.String#*/ = + "" + end topLevelVar/*->endmarkers::EndMarkers$package.topLevelVar().*/ + + class Container/*<-endmarkers::Container#*/: + + def foo/*<-endmarkers::Container#foo().*/ = + (1,2,3) + end foo/*->endmarkers::Container#foo().*/ + + val bar/*<-endmarkers::Container#bar.*/ = + (4,5,6) + end bar/*->endmarkers::Container#bar.*/ + + var baz/*<-endmarkers::Container#baz().*/ = + 15 + end baz/*->endmarkers::Container#baz().*/ + + end Container/*->endmarkers::Container#*/ + + def topLevelWithLocals/*<-endmarkers::EndMarkers$package.topLevelWithLocals().*/: Unit/*->scala::Unit#*/ = + + val localVal/*<-local0*/ = + 37 + end localVal/*->local0*/ + + var localVar/*<-local1*/ = + 43 + end localVar/*->local1*/ + + def localDef/*<-local2*/ = + 97 + end localDef/*->local2*/ + + end topLevelWithLocals/*->endmarkers::EndMarkers$package.topLevelWithLocals().*/ + + object TestObj/*<-endmarkers::TestObj.*/: + + def foo/*<-endmarkers::TestObj.foo().*/ = 23 + + end TestObj/*->endmarkers::TestObj.*/ + +end endmarkers diff --git a/tests/semanticdb/expect/EndMarkers.scala b/tests/semanticdb/expect/EndMarkers.scala new file mode 100644 index 000000000000..c7ae04898ebd --- /dev/null +++ b/tests/semanticdb/expect/EndMarkers.scala @@ -0,0 +1,59 @@ +package endmarkers: + + class MultiCtor(val i: Int): + def this() = + this(23) + end this + end MultiCtor + + def topLevelMethod: String = + "hello" + end topLevelMethod + + val topLevelVal: Int = + 23 + end topLevelVal + + var topLevelVar: String = + "" + end topLevelVar + + class Container: + + def foo = + (1,2,3) + end foo + + val bar = + (4,5,6) + end bar + + var baz = + 15 + end baz + + end Container + + def topLevelWithLocals: Unit = + + val localVal = + 37 + end localVal + + var localVar = + 43 + end localVar + + def localDef = + 97 + end localDef + + end topLevelWithLocals + + object TestObj: + + def foo = 23 + + end TestObj + +end endmarkers diff --git a/tests/semanticdb/expect/EndMarkers2.expect.scala b/tests/semanticdb/expect/EndMarkers2.expect.scala new file mode 100644 index 000000000000..61b8f8093a8e --- /dev/null +++ b/tests/semanticdb/expect/EndMarkers2.expect.scala @@ -0,0 +1,7 @@ +package object endmarkers2/*<-endmarkers2::package.*/: + + type Foo/*<-endmarkers2::package.Foo#*/ = + Unit/*->scala::Unit#*/ + end Foo/*->endmarkers2::package.Foo#*/ + +end endmarkers2/*->endmarkers2::package.*/ diff --git a/tests/semanticdb/expect/EndMarkers2.scala b/tests/semanticdb/expect/EndMarkers2.scala new file mode 100644 index 000000000000..377696ca8a79 --- /dev/null +++ b/tests/semanticdb/expect/EndMarkers2.scala @@ -0,0 +1,7 @@ +package object endmarkers2: + + type Foo = + Unit + end Foo + +end endmarkers2 diff --git a/tests/semanticdb/expect/Enums.expect.scala b/tests/semanticdb/expect/Enums.expect.scala index 8d99d98fc6da..3e1dc9087db7 100644 --- a/tests/semanticdb/expect/Enums.expect.scala +++ b/tests/semanticdb/expect/Enums.expect.scala @@ -8,7 +8,7 @@ object Enums/*<-_empty_::Enums.*/: enum Directions/*<-_empty_::Enums.Directions#*/: case North/*<-_empty_::Enums.Directions.North.*/, East/*<-_empty_::Enums.Directions.East.*/, South/*<-_empty_::Enums.Directions.South.*/, West/*<-_empty_::Enums.Directions.West.*/ - enum Suits/*<-_empty_::Enums.Suits#*/ derives /*->scala::CanEqual.derived.*/CanEqual: + enum Suits/*<-_empty_::Enums.Suits#*/ derives CanEqual: case Hearts/*<-_empty_::Enums.Suits.Hearts.*/, Spades/*<-_empty_::Enums.Suits.Spades.*/, Clubs/*<-_empty_::Enums.Suits.Clubs.*/, Diamonds/*<-_empty_::Enums.Suits.Diamonds.*/ object Suits/*<-_empty_::Enums.Suits.*/: @@ -47,14 +47,14 @@ object Enums/*<-_empty_::Enums.*/: case Refl/*<-_empty_::Enums.`<:<`.Refl#*/[C/*<-_empty_::Enums.`<:<`.Refl#[C]*/]() extends (C/*->_empty_::Enums.`<:<`.Refl#[C]*/ <:_empty_::Enums.`<:<`#*/ C/*->_empty_::Enums.`<:<`.Refl#[C]*/) object <:_empty_::Enums.`<:<`.`given_<:<_T_T`().[T]*/ <:_empty_::Enums.`<:<`#*/ T/*->_empty_::Enums.`<:<`.`given_<:<_T_T`().[T]*/) = Refl/*->_empty_::Enums.`<:<`.Refl.*//*->_empty_::Enums.`<:<`.Refl.apply().*/() + given [T/*<-_empty_::Enums.`<:<`.`given_<:<_T_T`().[T]*/]: (T/*->_empty_::Enums.`<:<`.`given_<:<_T_T`().[T]*/ <:_empty_::Enums.`<:<`#*/ T/*->_empty_::Enums.`<:<`.`given_<:<_T_T`().[T]*/) = Refl/*->_empty_::Enums.`<:<`.Refl.*/() extension [A/*<-_empty_::Enums.unwrap().[A]*/, B/*<-_empty_::Enums.unwrap().[B]*/](opt/*<-_empty_::Enums.unwrap().(opt)*/: Option/*->scala::Option#*/[A/*->_empty_::Enums.unwrap().[A]*/]) def unwrap/*<-_empty_::Enums.unwrap().*/(using ev/*<-_empty_::Enums.unwrap().(ev)*/: A/*->_empty_::Enums.unwrap().[A]*/ <:_empty_::Enums.`<:<`#*/ Option/*->scala::Option#*/[B/*->_empty_::Enums.unwrap().[B]*/]): Option/*->scala::Option#*/[B/*->_empty_::Enums.unwrap().[B]*/] = ev/*->_empty_::Enums.unwrap().(ev)*/ match - case Refl/*->_empty_::Enums.`<:<`.Refl.*//*->_empty_::Enums.`<:<`.Refl.unapply().*/() => opt/*->_empty_::Enums.unwrap().(opt)*/.flatMap/*->scala::Option#flatMap().*/(identity/*->scala::Predef.identity().*//*->local0*/[Option/*->scala::Option#*/[B/*->_empty_::Enums.unwrap().[B]*/]]) + case Refl/*->_empty_::Enums.`<:<`.Refl.*/() => opt/*->_empty_::Enums.unwrap().(opt)*/.flatMap/*->scala::Option#flatMap().*/(identity/*->scala::Predef.identity().*/[Option/*->scala::Option#*/[B/*->_empty_::Enums.unwrap().[B]*/]]) - val some1/*<-_empty_::Enums.some1.*/ = Some/*->scala::Some.*//*->scala::Some.apply().*/(Some/*->scala::Some.*//*->scala::Some.apply().*/(1)).unwrap/*->_empty_::Enums.unwrap().*//*->_empty_::Enums.`<:<`.`given_<:<_T_T`().*/ + val some1/*<-_empty_::Enums.some1.*/ = Some/*->scala::Some.*/(Some/*->scala::Some.*/(1)).unwrap/*->_empty_::Enums.unwrap().*/ - enum Planet/*<-_empty_::Enums.Planet#*/(mass/*<-_empty_::Enums.Planet#mass.*/: Double/*->scala::Double#*/, radius/*<-_empty_::Enums.Planet#radius.*/: Double/*->scala::Double#*/) extends Enum/*->java::lang::Enum#*/[Planet/*->_empty_::Enums.Planet#*/]/*->java::lang::Enum#``().*/: + enum Planet/*<-_empty_::Enums.Planet#*/(mass/*<-_empty_::Enums.Planet#mass.*/: Double/*->scala::Double#*/, radius/*<-_empty_::Enums.Planet#radius.*/: Double/*->scala::Double#*/) extends Enum/*->java::lang::Enum#*/[Planet/*->_empty_::Enums.Planet#*/]: private final val G/*<-_empty_::Enums.Planet#G.*/ = 6.67300E-11 def surfaceGravity/*<-_empty_::Enums.Planet#surfaceGravity().*/ = G/*->_empty_::Enums.Planet#G.*/ */*->scala::Double#`*`(+6).*/ mass/*->_empty_::Enums.Planet#mass.*/ //*->scala::Double#`::`(+6).*/ (radius/*->_empty_::Enums.Planet#radius.*/ */*->scala::Double#`*`(+6).*/ radius/*->_empty_::Enums.Planet#radius.*/) def surfaceWeight/*<-_empty_::Enums.Planet#surfaceWeight().*/(otherMass/*<-_empty_::Enums.Planet#surfaceWeight().(otherMass)*/: Double/*->scala::Double#*/) = otherMass/*->_empty_::Enums.Planet#surfaceWeight().(otherMass)*/ */*->scala::Double#`*`(+6).*/ surfaceGravity/*->_empty_::Enums.Planet#surfaceGravity().*/ diff --git a/tests/semanticdb/expect/EtaExpansion.expect.scala b/tests/semanticdb/expect/EtaExpansion.expect.scala index 55207ffa5ff2..76a99f66ad1b 100644 --- a/tests/semanticdb/expect/EtaExpansion.expect.scala +++ b/tests/semanticdb/expect/EtaExpansion.expect.scala @@ -1,6 +1,6 @@ package example class EtaExpansion/*<-example::EtaExpansion#*/ { - Some/*->scala::Some.*//*->scala::Some.apply().*/(1).map/*->scala::Option#map().*/(identity/*->scala::Predef.identity().*//*->local0*/) - List/*->scala::package.List.*//*->scala::collection::IterableFactory#apply().*/(1).foldLeft/*->scala::collection::LinearSeqOps#foldLeft().*/("")(_ +/*->java::lang::String#`+`().*/ _) + Some/*->scala::Some.*/(1).map/*->scala::Option#map().*/(identity/*->scala::Predef.identity().*/) + List/*->scala::package.List.*/(1).foldLeft/*->scala::collection::LinearSeqOps#foldLeft().*/("")(_ +/*->java::lang::String#`+`().*/ _) } diff --git a/tests/semanticdb/expect/Example.expect.scala b/tests/semanticdb/expect/Example.expect.scala index 8ee4cedeae41..5a6513ce1645 100644 --- a/tests/semanticdb/expect/Example.expect.scala +++ b/tests/semanticdb/expect/Example.expect.scala @@ -3,9 +3,9 @@ package example import scala.concurrent.Future/*->scala::concurrent::Future.*//*->scala::concurrent::Future#*/ object Example/*<-example::Example.*/ { self/*<-local0*/ => - new scala.collection.mutable.Stack/*->scala::collection::mutable::Stack#*/[Int/*->scala::Int#*/]/*->scala::collection::mutable::Stack#``(+1).*/() + new scala.collection.mutable.Stack/*->scala::collection::mutable::Stack#*/[Int/*->scala::Int#*/]() def main/*<-example::Example.main().*/(args/*<-example::Example.main().(args)*/: Array/*->scala::Array#*/[String/*->scala::Predef.String#*/]): Unit/*->scala::Unit#*/ = { println/*->scala::Predef.println(+1).*/(1) } - val x/*<-example::Example.x.*/ = scala.reflect.classTag/*->scala::reflect::package.classTag().*/[Int/*->scala::Int#*/]/*->scala::reflect::ClassTag.apply().*/ + val x/*<-example::Example.x.*/ = scala.reflect.classTag/*->scala::reflect::package.classTag().*/[Int/*->scala::Int#*/] } diff --git a/tests/semanticdb/expect/Extension.expect.scala b/tests/semanticdb/expect/Extension.expect.scala index d96dcd4c87c8..b40e965d4885 100644 --- a/tests/semanticdb/expect/Extension.expect.scala +++ b/tests/semanticdb/expect/Extension.expect.scala @@ -1,9 +1,18 @@ package ext -/*<-ext::Extension$package.*/extension (s/*<-ext::Extension$package.foo().(s)*//*<-ext::Extension$package.`#*#`().(s)*/: String/*->scala::Predef.String#*/) +extension (s/*<-ext::Extension$package.foo().(s)*//*<-ext::Extension$package.`#*#`().(s)*/: String/*->scala::Predef.String#*/) def foo/*<-ext::Extension$package.foo().*/: Int/*->scala::Int#*/ = 42 - def #*#/*<-ext::Extension$package.`#*#`().*/ (i/*<-ext::Extension$package.`#*#`().(i)*/: Int/*->scala::Int#*/): (String/*->scala::Predef.String#*/, Int/*->scala::Int#*/) = (/*->scala::Tuple2.apply().*/s/*->ext::Extension$package.`#*#`().(s)*/, i/*->ext::Extension$package.`#*#`().(i)*/) + def #*#/*<-ext::Extension$package.`#*#`().*/ (i/*<-ext::Extension$package.`#*#`().(i)*/: Int/*->scala::Int#*/): (String/*->scala::Predef.String#*/, Int/*->scala::Int#*/) = (s/*->ext::Extension$package.`#*#`().(s)*/, i/*->ext::Extension$package.`#*#`().(i)*/) val a/*<-ext::Extension$package.a.*/ = "asd".foo/*->ext::Extension$package.foo().*/ -val c/*<-ext::Extension$package.c.*/ = "foo" #*#/*->ext::Extension$package.`#*#`().*/ 23 \ No newline at end of file +val c/*<-ext::Extension$package.c.*/ = "foo" #*#/*->ext::Extension$package.`#*#`().*/ 23 + +trait Read/*<-ext::Read#*/[+T/*<-ext::Read#[T]*/]: + def fromString/*<-ext::Read#fromString().*/(s/*<-ext::Read#fromString().(s)*/: String/*->scala::Predef.String#*/): Option/*->scala::Option#*/[T/*->ext::Read#[T]*/] + +extension (s/*<-ext::Extension$package.readInto().(s)*/: String/*->scala::Predef.String#*/) + def readInto/*<-ext::Extension$package.readInto().*/[T/*<-ext::Extension$package.readInto().[T]*/](using Read/*->ext::Read#*/[T/*->ext::Extension$package.readInto().[T]*/]): Option/*->scala::Option#*/[T/*->ext::Extension$package.readInto().[T]*/] = summon/*->scala::Predef.summon().*/[Read/*->ext::Read#*/[T/*->ext::Extension$package.readInto().[T]*/]].fromString/*->ext::Read#fromString().*/(s/*->ext::Extension$package.readInto().(s)*/) + +trait Functor/*<-ext::Functor#*/[F/*<-ext::Functor#[F]*/[_]]: + extension [T/*<-ext::Functor#map().[T]*/](t/*<-ext::Functor#map().(t)*/: F/*->ext::Functor#[F]*/[T/*->ext::Functor#map().[T]*/]) def map/*<-ext::Functor#map().*/[U/*<-ext::Functor#map().[U]*/](f/*<-ext::Functor#map().(f)*/: T/*->ext::Functor#map().[T]*/ => U/*->ext::Functor#map().[U]*/): F/*->ext::Functor#[F]*/[U/*->ext::Functor#map().[U]*/] diff --git a/tests/semanticdb/expect/Extension.scala b/tests/semanticdb/expect/Extension.scala index 5b9c3e5f21ab..c204b1ff7fcc 100644 --- a/tests/semanticdb/expect/Extension.scala +++ b/tests/semanticdb/expect/Extension.scala @@ -6,4 +6,13 @@ extension (s: String) val a = "asd".foo -val c = "foo" #*# 23 \ No newline at end of file +val c = "foo" #*# 23 + +trait Read[+T]: + def fromString(s: String): Option[T] + +extension (s: String) + def readInto[T](using Read[T]): Option[T] = summon[Read[T]].fromString(s) + +trait Functor[F[_]]: + extension [T](t: F[T]) def map[U](f: T => U): F[U] diff --git a/tests/semanticdb/expect/ForComprehension.expect.scala b/tests/semanticdb/expect/ForComprehension.expect.scala index c4d3340bc84c..815b7a93518d 100644 --- a/tests/semanticdb/expect/ForComprehension.expect.scala +++ b/tests/semanticdb/expect/ForComprehension.expect.scala @@ -2,39 +2,39 @@ package example class ForComprehension/*<-example::ForComprehension#*/ { for { - a/*<-local0*/ <- List/*->scala::package.List.*//*->scala::collection::IterableFactory#apply().*/(1)/*->scala::collection::immutable::List#flatMap().*/ - b/*<-local1*//*->scala::Tuple2.apply().*//*->local1*//*->local3*//*->scala::Tuple2.unapply().*/ <- List/*->scala::package.List.*//*->scala::collection::IterableFactory#apply().*/(1)/*->scala::collection::IterableOps#withFilter().*/ - if b/*->local1*/ >/*->scala::Int#`>`(+3).*/ 1/*->scala::collection::WithFilter#map().*/ - c/*<-local2*//*->local2*/ = a/*->local0*/ +/*->scala::Int#`+`(+4).*/ b/*->local1*//*->scala::collection::immutable::List#map().*/ - } yield (/*->scala::Tuple3.apply().*/a/*->local0*/, b/*->local1*/, c/*->local2*/) + a/*<-local0*/ <- List/*->scala::package.List.*/(1) + b/*<-local1*//*->local1*/ <- List/*->scala::package.List.*/(1) + if b/*->local1*/ >/*->scala::Int#`>`(+3).*/ 1 + c/*<-local2*//*->local2*/ = a/*->local0*/ +/*->scala::Int#`+`(+4).*/ b/*->local1*/ + } yield (a/*->local0*/, b/*->local1*/, c/*->local2*/) for { - a/*<-local4*/ <- List/*->scala::package.List.*//*->scala::collection::IterableFactory#apply().*/(1)/*->scala::collection::immutable::List#flatMap().*/ - b/*<-local5*/ <- List/*->scala::package.List.*//*->scala::collection::IterableFactory#apply().*/(a/*->local4*/)/*->scala::collection::IterableOps#withFilter().*/ + a/*<-local4*/ <- List/*->scala::package.List.*/(1) + b/*<-local5*/ <- List/*->scala::package.List.*/(a/*->local4*/) if ( - /*->scala::Tuple2.apply().*/a/*->local4*/, + a/*->local4*/, b/*->local5*/ - ) ==/*->scala::Any#`==`().*/ (/*->scala::Tuple2.apply().*/1, 2)/*->scala::collection::WithFilter#flatMap().*/ - /*->local6*//*->scala::Tuple2.unapply().*/( - /*->scala::Tuple2.unapply().*/c/*<-local7*/, + ) ==/*->scala::Any#`==`().*/ (1, 2) + ( + c/*<-local7*/, d/*<-local8*/ - ) <- List/*->scala::package.List.*//*->scala::collection::IterableFactory#apply().*/((/*->scala::Tuple2.apply().*/a/*->local4*/, b/*->local5*/))/*->scala::collection::WithFilter#withFilter().*//*->scala::collection::IterableOps#withFilter().*/ + ) <- List/*->scala::package.List.*/((a/*->local4*/, b/*->local5*/)) if ( - /*->scala::Tuple4.apply().*/a/*->local4*/, + a/*->local4*/, b/*->local5*/, c/*->local7*/, d/*->local8*/ - ) ==/*->scala::Any#`==`().*/ (/*->scala::Tuple4.apply().*/1, 2, 3, 4)/*->scala::collection::WithFilter#map().*/ - e/*<-local9*//*->scala::Tuple2.apply().*//*->local9*/ = ( - /*->scala::Tuple4.apply().*/a/*->local4*/, + ) ==/*->scala::Any#`==`().*/ (1, 2, 3, 4) + e/*<-local9*//*->local9*/ = ( + a/*->local4*/, b/*->local5*/, c/*->local7*/, d/*->local8*/ - )/*->scala::collection::IterableOps#withFilter().*/ - if e/*->local9*/ ==/*->scala::Any#`==`().*/ (/*->scala::Tuple4.apply().*/1, 2, 3, 4)/*->scala::collection::WithFilter#flatMap().*/ - f/*<-local10*/ <- List/*->scala::package.List.*//*->scala::collection::IterableFactory#apply().*/(e/*->local9*/)/*->scala::collection::immutable::List#map().*/ + ) + if e/*->local9*/ ==/*->scala::Any#`==`().*/ (1, 2, 3, 4) + f/*<-local10*/ <- List/*->scala::package.List.*/(e/*->local9*/) } yield { ( - /*->scala::Tuple6.apply().*/a/*->local4*/, + a/*->local4*/, b/*->local5*/, c/*->local7*/, d/*->local8*/, diff --git a/tests/semanticdb/expect/Givens.expect.scala b/tests/semanticdb/expect/Givens.expect.scala index 35ef1d7c3cf9..5d7a9f5dc798 100644 --- a/tests/semanticdb/expect/Givens.expect.scala +++ b/tests/semanticdb/expect/Givens.expect.scala @@ -4,11 +4,11 @@ package b object Givens/*<-a::b::Givens.*/: extension [A/*<-a::b::Givens.sayHello().[A]*/](any/*<-a::b::Givens.sayHello().(any)*/: A/*->a::b::Givens.sayHello().[A]*/) - def sayHello/*<-a::b::Givens.sayHello().*/ = s"/*->scala::StringContext.apply().*/Hello, I am $any/*->a::b::Givens.sayHello().(any)*/"/*->scala::StringContext#s().*/ + def sayHello/*<-a::b::Givens.sayHello().*/ = s"Hello, I am $any/*->a::b::Givens.sayHello().(any)*/"/*->scala::StringContext#s().*/ extension [B/*<-a::b::Givens.sayGoodbye().[B]*//*<-a::b::Givens.saySoLong().[B]*/](any/*<-a::b::Givens.sayGoodbye().(any)*//*<-a::b::Givens.saySoLong().(any)*/: B/*->a::b::Givens.sayGoodbye().[B]*//*->a::b::Givens.saySoLong().[B]*/) - def sayGoodbye/*<-a::b::Givens.sayGoodbye().*/ = s"/*->scala::StringContext.apply().*/Goodbye, from $any/*->a::b::Givens.sayGoodbye().(any)*/"/*->scala::StringContext#s().*/ - def saySoLong/*<-a::b::Givens.saySoLong().*/ = s"/*->scala::StringContext.apply().*/So Long, from $any/*->a::b::Givens.saySoLong().(any)*/"/*->scala::StringContext#s().*/ + def sayGoodbye/*<-a::b::Givens.sayGoodbye().*/ = s"Goodbye, from $any/*->a::b::Givens.sayGoodbye().(any)*/"/*->scala::StringContext#s().*/ + def saySoLong/*<-a::b::Givens.saySoLong().*/ = s"So Long, from $any/*->a::b::Givens.saySoLong().(any)*/"/*->scala::StringContext#s().*/ val hello1/*<-a::b::Givens.hello1.*/ = 1.sayHello/*->a::b::Givens.sayHello().*/ val goodbye1/*<-a::b::Givens.goodbye1.*/ = 1.sayGoodbye/*->a::b::Givens.sayGoodbye().*/ @@ -18,7 +18,7 @@ object Givens/*<-a::b::Givens.*/: def empty/*<-a::b::Givens.Monoid#empty().*/: A/*->a::b::Givens.Monoid#[A]*/ extension (x/*<-a::b::Givens.Monoid#combine().(x)*/: A/*->a::b::Givens.Monoid#[A]*/) def combine/*<-a::b::Givens.Monoid#combine().*/(y/*<-a::b::Givens.Monoid#combine().(y)*/: A/*->a::b::Givens.Monoid#[A]*/): A/*->a::b::Givens.Monoid#[A]*/ - given Monoid[String] with/*<-a::b::Givens.given_Monoid_String.*//*->a::b::Givens.Monoid#*//*->scala::Predef.String#*/ + given Monoid/*->a::b::Givens.Monoid#*/[String/*->scala::Predef.String#*/] with def empty/*<-a::b::Givens.given_Monoid_String.empty().*/ = "" extension (x/*<-a::b::Givens.given_Monoid_String.combine().(x)*/: String/*->scala::Predef.String#*/) def combine/*<-a::b::Givens.given_Monoid_String.combine().*/(y/*<-a::b::Givens.given_Monoid_String.combine().(y)*/: String/*->scala::Predef.String#*/) = x/*->a::b::Givens.given_Monoid_String.combine().(x)*/ +/*->java::lang::String#`+`().*/ y/*->a::b::Givens.given_Monoid_String.combine().(y)*/ diff --git a/tests/semanticdb/expect/ImplicitConversion.expect.scala b/tests/semanticdb/expect/ImplicitConversion.expect.scala index e5309ca26467..635ffb4d94c9 100644 --- a/tests/semanticdb/expect/ImplicitConversion.expect.scala +++ b/tests/semanticdb/expect/ImplicitConversion.expect.scala @@ -9,29 +9,29 @@ class ImplicitConversion/*<-example::ImplicitConversion#*/ { ): Int/*->scala::Int#*/ = 42 val message/*<-example::ImplicitConversion#message.*/ = "" val number/*<-example::ImplicitConversion#number.*/ = 42 - val tuple/*<-example::ImplicitConversion#tuple.*/ = (/*->scala::Tuple2.apply().*/1, 2) + val tuple/*<-example::ImplicitConversion#tuple.*/ = (1, 2) val char/*<-example::ImplicitConversion#char.*/: Char/*->scala::Char#*/ = 'a' // extension methods - /*->scala::Predef.augmentString().*/message/*->example::ImplicitConversion#message.*/ + message/*->example::ImplicitConversion#message.*/ .stripSuffix/*->scala::collection::StringOps#stripSuffix().*/("h") - /*->example::ImplicitConversion.newAny2stringadd().*/tuple/*->example::ImplicitConversion#tuple.*/ +/*->example::ImplicitConversion.newAny2stringadd#`+`().*/ "Hello" + tuple/*->example::ImplicitConversion#tuple.*/ +/*->example::ImplicitConversion.newAny2stringadd#`+`().*/ "Hello" // implicit conversions - val x/*<-example::ImplicitConversion#x.*/: Int/*->scala::Int#*/ = /*->example::ImplicitConversion#string2Number().*/message/*->example::ImplicitConversion#message.*/ + val x/*<-example::ImplicitConversion#x.*/: Int/*->scala::Int#*/ = message/*->example::ImplicitConversion#message.*/ // interpolators - s"/*->scala::StringContext.apply().*/Hello $message/*->example::ImplicitConversion#message.*/ $number/*->example::ImplicitConversion#number.*/"/*->scala::StringContext#s().*/ - /*->scala::Predef.augmentString().*/s"""/*->scala::StringContext.apply().*/Hello + s"Hello $message/*->example::ImplicitConversion#message.*/ $number/*->example::ImplicitConversion#number.*/"/*->scala::StringContext#s().*/ + s"""Hello |$message/*->example::ImplicitConversion#message.*/ |$number/*->example::ImplicitConversion#number.*/"""/*->scala::StringContext#s().*/.stripMargin/*->scala::collection::StringOps#stripMargin(+1).*/ - val a/*<-example::ImplicitConversion#a.*/: Int/*->scala::Int#*/ = /*->scala::Char.char2int().*/char/*->example::ImplicitConversion#char.*/ - val b/*<-example::ImplicitConversion#b.*/: Long/*->scala::Long#*/ = /*->scala::Char.char2long().*/char/*->example::ImplicitConversion#char.*/ + val a/*<-example::ImplicitConversion#a.*/: Int/*->scala::Int#*/ = char/*->example::ImplicitConversion#char.*/ + val b/*<-example::ImplicitConversion#b.*/: Long/*->scala::Long#*/ = char/*->example::ImplicitConversion#char.*/ } object ImplicitConversion/*<-example::ImplicitConversion.*/ { - implicit final class newAny2stringadd/*<-example::ImplicitConversion.newAny2stringadd#*/[A/*<-example::ImplicitConversion.newAny2stringadd#[A]*/](private val self/*<-example::ImplicitConversion.newAny2stringadd#self.*/: A/*->example::ImplicitConversion.newAny2stringadd#[A]*/) extends AnyVal/*->scala::AnyVal#*//*->scala::AnyVal#``().*/ { + implicit final class newAny2stringadd/*<-example::ImplicitConversion.newAny2stringadd#*/[A/*<-example::ImplicitConversion.newAny2stringadd#[A]*/](private val self/*<-example::ImplicitConversion.newAny2stringadd#self.*/: A/*->example::ImplicitConversion.newAny2stringadd#[A]*/) extends AnyVal/*->scala::AnyVal#*/ { def +/*<-example::ImplicitConversion.newAny2stringadd#`+`().*/(other/*<-example::ImplicitConversion.newAny2stringadd#`+`().(other)*/: String/*->scala::Predef.String#*/): String/*->scala::Predef.String#*/ = String/*->java::lang::String#*/.valueOf/*->java::lang::String#valueOf().*/(self/*->example::ImplicitConversion.newAny2stringadd#self.*/) +/*->java::lang::String#`+`().*/ other/*->example::ImplicitConversion.newAny2stringadd#`+`().(other)*/ } } diff --git a/tests/semanticdb/expect/Imports.expect.scala b/tests/semanticdb/expect/Imports.expect.scala index 87f7d4cb768f..420545fc02c9 100644 --- a/tests/semanticdb/expect/Imports.expect.scala +++ b/tests/semanticdb/expect/Imports.expect.scala @@ -1 +1,4 @@ import scala.util.control.NonFatal/*->scala::util::control::NonFatal.*/ +import scala.collection.immutable.{HashMap/*->scala::collection::immutable::HashMap.*//*->scala::collection::immutable::HashMap#*/ as HM} + +val m/*<-_empty_::Imports$package.m.*/: HM/*->scala::collection::immutable::HashMap#*/[Int/*->scala::Int#*/, Int/*->scala::Int#*/] = HM/*->scala::collection::immutable::HashMap.*/[Int/*->scala::Int#*/, Int/*->scala::Int#*/]() diff --git a/tests/semanticdb/expect/Imports.scala b/tests/semanticdb/expect/Imports.scala index e6a2bef1a5c4..611c008ec20c 100644 --- a/tests/semanticdb/expect/Imports.scala +++ b/tests/semanticdb/expect/Imports.scala @@ -1 +1,4 @@ import scala.util.control.NonFatal +import scala.collection.immutable.{HashMap as HM} + +val m: HM[Int, Int] = HM[Int, Int]() diff --git a/tests/semanticdb/expect/InstrumentTyper.expect.scala b/tests/semanticdb/expect/InstrumentTyper.expect.scala index fdce47af0a20..5ed05aa56192 100644 --- a/tests/semanticdb/expect/InstrumentTyper.expect.scala +++ b/tests/semanticdb/expect/InstrumentTyper.expect.scala @@ -6,7 +6,7 @@ import scala.language/*->scala::language.*/.higherKinds/*->scala::language.highe import types.Test/*->types::Test.*/.* class InstrumentTyper/*<-example::InstrumentTyper#*/ { self/*<-local0*/: AnyRef/*->scala::AnyRef#*/ => - def all/*<-example::InstrumentTyper#all().*/ = List/*->scala::package.List.*//*->scala::collection::IterableFactory#apply().*/( + def all/*<-example::InstrumentTyper#all().*/ = List/*->scala::package.List.*/( Literal/*->types::Test.Literal.*/.int/*->types::Test.Literal.int.*/, Literal/*->types::Test.Literal.*/.long/*->types::Test.Literal.long.*/, Literal/*->types::Test.Literal.*/.float/*->types::Test.Literal.float.*/, @@ -18,7 +18,7 @@ class InstrumentTyper/*<-example::InstrumentTyper#*/ { self/*<-local0*/: AnyRef/ Literal/*->types::Test.Literal.*/.unit/*->types::Test.Literal.unit.*/, Literal/*->types::Test.Literal.*/.javaEnum/*->types::Test.Literal.javaEnum.*/, Literal/*->types::Test.Literal.*/.clazzOf/*->types::Test.Literal.clazzOf.*/, - List/*->scala::package.List.*//*->scala::collection::IterableFactory#apply().*/() + List/*->scala::package.List.*/() ) type AnnotatedType/*<-example::InstrumentTyper#AnnotatedType#*/ = Int/*->scala::Int#*/ @param/*->scala::annotation::meta::param#*/ def singletonType/*<-example::InstrumentTyper#singletonType().*/(x/*<-example::InstrumentTyper#singletonType().(x)*/: Predef/*->scala::Predef.*/.type) = ???/*->scala::Predef.`???`().*/ diff --git a/tests/semanticdb/expect/InventedNames.expect.scala b/tests/semanticdb/expect/InventedNames.expect.scala index 5ff2f20213bd..7c5b008209c2 100644 --- a/tests/semanticdb/expect/InventedNames.expect.scala +++ b/tests/semanticdb/expect/InventedNames.expect.scala @@ -11,16 +11,16 @@ trait Z/*<-givens::Z#*/[T/*<-givens::Z#[T]*/]: -/*<-givens::InventedNames$package.*/given intValue/*<-givens::InventedNames$package.intValue.*/: Int/*->scala::Int#*/ = 4 -given /*<-givens::InventedNames$package.given_String.*/String/*->scala::Predef.String#*/ = "str" -given /*<-givens::InventedNames$package.given_Double().*/(using Int/*->scala::Int#*/): Double/*->scala::Double#*/ = 4.0 -given /*<-givens::InventedNames$package.given_List_T().*/[T/*<-givens::InventedNames$package.given_List_T().[T]*/]: List/*->scala::package.List#*/[T/*->givens::InventedNames$package.given_List_T().[T]*/] = Nil/*->scala::package.Nil.*/ +given intValue/*<-givens::InventedNames$package.intValue.*/: Int/*->scala::Int#*/ = 4 +given String/*->scala::Predef.String#*/ = "str" +given (using Int/*->scala::Int#*/): Double/*->scala::Double#*/ = 4.0 +given [T/*<-givens::InventedNames$package.given_List_T().[T]*/]: List/*->scala::package.List#*/[T/*->givens::InventedNames$package.given_List_T().[T]*/] = Nil/*->scala::package.Nil.*/ given given_Char/*<-givens::InventedNames$package.given_Char.*/: Char/*->scala::Char#*/ = '?' given `given_Float/*<-givens::InventedNames$package.given_Float.*/`: Float/*->scala::Float#*/ = 3.0 given `* */*<-givens::InventedNames$package.`* *`.*/`: Long/*->scala::Long#*/ = 5 -given X with -/*<-givens::InventedNames$package.given_X.*//*->givens::X#*/ def doX/*<-givens::InventedNames$package.given_X.doX().*/ = 7 +given X/*->givens::X#*/ with + def doX/*<-givens::InventedNames$package.given_X.doX().*/ = 7 given (using X/*->givens::X#*/): Y/*->givens::Y#*/ with def doY/*<-givens::InventedNames$package.given_Y#doY().*/ = "7" @@ -32,11 +32,11 @@ given [T/*<-givens::InventedNames$package.given_Z_T#[T]*/]: Z/*->givens::Z#*/[T/ val a/*<-givens::InventedNames$package.a.*/ = intValue/*->givens::InventedNames$package.intValue.*/ val b/*<-givens::InventedNames$package.b.*/ = given_String/*->givens::InventedNames$package.given_String.*/ -val c/*<-givens::InventedNames$package.c.*/ = given_Double/*->givens::InventedNames$package.given_Double().*//*->givens::InventedNames$package.intValue.*/ +val c/*<-givens::InventedNames$package.c.*/ = given_Double/*->givens::InventedNames$package.given_Double().*/ val d/*<-givens::InventedNames$package.d.*/ = given_List_T/*->givens::InventedNames$package.given_List_T().*/[Int/*->scala::Int#*/] val e/*<-givens::InventedNames$package.e.*/ = given_Char/*->givens::InventedNames$package.given_Char.*/ val f/*<-givens::InventedNames$package.f.*/ = given_Float/*->givens::InventedNames$package.given_Float.*/ val g/*<-givens::InventedNames$package.g.*/ = `* *`/*->givens::InventedNames$package.`* *`.*/ val x/*<-givens::InventedNames$package.x.*/ = given_X/*->givens::InventedNames$package.given_X.*/ -val y/*<-givens::InventedNames$package.y.*/ = given_Y/*->givens::InventedNames$package.given_Y().*//*->givens::InventedNames$package.given_X.*/ -val z/*<-givens::InventedNames$package.z.*/ = given_Z_T/*->givens::InventedNames$package.given_Z_T().*/[String/*->scala::Predef.String#*/] \ No newline at end of file +val y/*<-givens::InventedNames$package.y.*/ = given_Y/*->givens::InventedNames$package.given_Y().*/ +val z/*<-givens::InventedNames$package.z.*/ = given_Z_T/*->givens::InventedNames$package.given_Z_T().*/[String/*->scala::Predef.String#*/] diff --git a/tests/semanticdb/expect/InventedNames.scala b/tests/semanticdb/expect/InventedNames.scala index 3858f22c8331..42c14c90e370 100644 --- a/tests/semanticdb/expect/InventedNames.scala +++ b/tests/semanticdb/expect/InventedNames.scala @@ -39,4 +39,4 @@ val f = given_Float val g = `* *` val x = given_X val y = given_Y -val z = given_Z_T[String] \ No newline at end of file +val z = given_Z_T[String] diff --git a/tests/semanticdb/expect/Issue1749.expect.scala b/tests/semanticdb/expect/Issue1749.expect.scala index b047b771e9ea..19fe1a14ad36 100644 --- a/tests/semanticdb/expect/Issue1749.expect.scala +++ b/tests/semanticdb/expect/Issue1749.expect.scala @@ -6,11 +6,11 @@ import scala.math.Ordered/*->scala::math::Ordered.*/.orderingToOrdered/*->scala: class Issue1749/*<-example::Issue1749#*/ { val x1/*<-example::Issue1749#x1.*/ = 42 val x2/*<-example::Issue1749#x2.*/ = 42 - /*->scala::math::Ordered.orderingToOrdered().*/(/*->scala::Tuple2.apply().*/x1/*->example::Issue1749#x1.*/, x1/*->example::Issue1749#x1.*/)/*->scala::math::Ordering.Tuple2().*//*->scala::math::Ordering.Int.*/ - .compare/*->scala::math::Ordered#compare().*/((/*->scala::Tuple2.apply().*/x2/*->example::Issue1749#x2.*/, x2/*->example::Issue1749#x2.*/)) + (x1/*->example::Issue1749#x1.*/, x1/*->example::Issue1749#x1.*/) + .compare/*->scala::math::Ordered#compare().*/((x2/*->example::Issue1749#x2.*/, x2/*->example::Issue1749#x2.*/)) } class Issue1854/*<-example::Issue1854#*/ { val map/*<-example::Issue1854#map.*/ = collection.mutable.Map/*->scala::collection::mutable::Map.*/.empty/*->scala::collection::MapFactory.Delegate#empty().*/[String/*->scala::Predef.String#*/, String/*->scala::Predef.String#*/] - map/*->example::Issue1854#map.*//*->scala::collection::mutable::MapOps#update().*/("a") = "b" + map/*->example::Issue1854#map.*/("a") = "b" } diff --git a/tests/semanticdb/expect/Local.expect.scala b/tests/semanticdb/expect/Local.expect.scala index 6c1aa8440b23..e6dfc04f5422 100644 --- a/tests/semanticdb/expect/Local.expect.scala +++ b/tests/semanticdb/expect/Local.expect.scala @@ -2,7 +2,7 @@ package example class Local/*<-example::Local#*/ { def a/*<-example::Local#a().*/() = { - def id/*<-local0*/[A/*<-local1*/](a/*<-local2*/: A/*->local1*/): A/*->local1*/ = a/*->local2*/ - id/*->local0*/(1) + def id/*<-local2*/[A/*<-local0*/](a/*<-local1*/: A/*->local0*/): A/*->local0*/ = a/*->local1*/ + id/*->local2*/(1) } } diff --git a/tests/semanticdb/expect/Locals.expect.scala b/tests/semanticdb/expect/Locals.expect.scala index 7a8fdf0dd432..8cc14786f7ed 100644 --- a/tests/semanticdb/expect/Locals.expect.scala +++ b/tests/semanticdb/expect/Locals.expect.scala @@ -3,6 +3,6 @@ package locals object Test/*<-locals::Test.*/ { val xs/*<-locals::Test.xs.*/ = { val x/*<-local0*/ = 42 - List/*->scala::package.List.*//*->scala::collection::IterableFactory#apply().*/(x/*->local0*/) + List/*->scala::package.List.*/(x/*->local0*/) } } diff --git a/tests/semanticdb/expect/MetacJava.expect.scala b/tests/semanticdb/expect/MetacJava.expect.scala index f95f82d787eb..08e787ce4b8a 100644 --- a/tests/semanticdb/expect/MetacJava.expect.scala +++ b/tests/semanticdb/expect/MetacJava.expect.scala @@ -4,16 +4,16 @@ import com.javacp class MetacJava/*<-example::MetacJava#*/ { javacp.MetacJava/*->com::javacp::MetacJava#*/.StaticInner/*->com::javacp::MetacJava#StaticInner#*/.isStatic/*->com::javacp::MetacJava#StaticInner#isStatic().*/() - new javacp.MetacJava/*->com::javacp::MetacJava#*/.StaticInner/*->com::javacp::MetacJava#StaticInner#*//*->com::javacp::MetacJava#StaticInner#``().*/().isNotStatic/*->com::javacp::MetacJava#StaticInner#isNotStatic().*/() - val inner/*<-example::MetacJava#inner.*/ = new javacp.MetacJava/*->com::javacp::MetacJava#*//*->com::javacp::MetacJava#``().*/() - val overload1/*<-example::MetacJava#overload1.*/ = new inner/*->example::MetacJava#inner.*/.Overload1/*->com::javacp::MetacJava#Overload1#*//*->com::javacp::MetacJava#Overload1#``().*/() - val overload2/*<-example::MetacJava#overload2.*/ = new inner/*->example::MetacJava#inner.*/.Overload2/*->com::javacp::MetacJava#Overload2#*//*->com::javacp::MetacJava#Overload2#``().*/() - inner/*->example::MetacJava#inner.*/.overload/*->com::javacp::MetacJava#overload().*/(new overload1/*->example::MetacJava#overload1.*/.A/*->com::javacp::MetacJava#Overload1#A#*//*->com::javacp::MetacJava#Overload1#A#``().*/()) - inner/*->example::MetacJava#inner.*/.overload/*->com::javacp::MetacJava#overload(+1).*/(new overload2/*->example::MetacJava#overload2.*/.A/*->com::javacp::MetacJava#Overload2#A#*//*->com::javacp::MetacJava#Overload2#A#``().*/()) - val staticInner/*<-example::MetacJava#staticInner.*/ = new javacp.MetacJava/*->com::javacp::MetacJava#*/.StaticInner/*->com::javacp::MetacJava#StaticInner#*//*->com::javacp::MetacJava#StaticInner#``().*/() - val nonStatic/*<-example::MetacJava#nonStatic.*/ = new staticInner/*->example::MetacJava#staticInner.*/.NonStatic/*->com::javacp::MetacJava#StaticInner#NonStatic#*//*->com::javacp::MetacJava#StaticInner#NonStatic#``().*/() + new javacp.MetacJava/*->com::javacp::MetacJava#*/.StaticInner/*->com::javacp::MetacJava#StaticInner#*/().isNotStatic/*->com::javacp::MetacJava#StaticInner#isNotStatic().*/() + val inner/*<-example::MetacJava#inner.*/ = new javacp.MetacJava/*->com::javacp::MetacJava#*/() + val overload1/*<-example::MetacJava#overload1.*/ = new inner/*->example::MetacJava#inner.*/.Overload1/*->com::javacp::MetacJava#Overload1#*/() + val overload2/*<-example::MetacJava#overload2.*/ = new inner/*->example::MetacJava#inner.*/.Overload2/*->com::javacp::MetacJava#Overload2#*/() + inner/*->example::MetacJava#inner.*/.overload/*->com::javacp::MetacJava#overload().*/(new overload1/*->example::MetacJava#overload1.*/.A/*->com::javacp::MetacJava#Overload1#A#*/()) + inner/*->example::MetacJava#inner.*/.overload/*->com::javacp::MetacJava#overload(+1).*/(new overload2/*->example::MetacJava#overload2.*/.A/*->com::javacp::MetacJava#Overload2#A#*/()) + val staticInner/*<-example::MetacJava#staticInner.*/ = new javacp.MetacJava/*->com::javacp::MetacJava#*/.StaticInner/*->com::javacp::MetacJava#StaticInner#*/() + val nonStatic/*<-example::MetacJava#nonStatic.*/ = new staticInner/*->example::MetacJava#staticInner.*/.NonStatic/*->com::javacp::MetacJava#StaticInner#NonStatic#*/() nonStatic/*->example::MetacJava#nonStatic.*/.method/*->com::javacp::MetacJava#StaticInner#NonStatic#method().*/(nonStatic/*->example::MetacJava#nonStatic.*/) - javacp.MetacJava/*->com::javacp::MetacJava#*/.overload/*->com::javacp::MetacJava#overload(+2).*/(new javacp.MetacJava/*->com::javacp::MetacJava#*/.Overload3/*->com::javacp::MetacJava#Overload3#*/.A/*->com::javacp::MetacJava#Overload3#A#*//*->com::javacp::MetacJava#Overload3#A#``().*/()) + javacp.MetacJava/*->com::javacp::MetacJava#*/.overload/*->com::javacp::MetacJava#overload(+2).*/(new javacp.MetacJava/*->com::javacp::MetacJava#*/.Overload3/*->com::javacp::MetacJava#Overload3#*/.A/*->com::javacp::MetacJava#Overload3#A#*/()) val interface/*<-example::MetacJava#interface.*/: javacp.Interface/*->com::javacp::Interface#*/ = null val coin/*<-example::MetacJava#coin.*/: javacp.Coin/*->com::javacp::Coin#*/ = javacp.Coin/*->com::javacp::Coin#*/.PENNY/*->com::javacp::Coin#PENNY.*/ val entry/*<-example::MetacJava#entry.*/: java.util.Map/*->java::util::Map#*/.Entry/*->java::util::Map#Entry#*/[Int/*->scala::Int#*/, Int/*->scala::Int#*/] = null diff --git a/tests/semanticdb/expect/MethodUsages.expect.scala b/tests/semanticdb/expect/MethodUsages.expect.scala index d202e21ec851..e6963465c97e 100644 --- a/tests/semanticdb/expect/MethodUsages.expect.scala +++ b/tests/semanticdb/expect/MethodUsages.expect.scala @@ -1,7 +1,7 @@ package example class MethodUsages/*<-example::MethodUsages#*/ { - val m/*<-example::MethodUsages#m.*/ = new Methods/*->example::Methods#*/[Int/*->scala::Int#*/]/*->example::Methods#``().*/ + val m/*<-example::MethodUsages#m.*/ = new Methods/*->example::Methods#*/[Int/*->scala::Int#*/] m/*->example::MethodUsages#m.*/.m1/*->example::Methods#m1().*/ m/*->example::MethodUsages#m.*/.m2/*->example::Methods#m2().*/() m/*->example::MethodUsages#m.*/.m3/*->example::Methods#m3().*/(0) @@ -9,9 +9,9 @@ class MethodUsages/*<-example::MethodUsages#*/ { m/*->example::MethodUsages#m.*/.m5/*->example::Methods#m5().*/("") m/*->example::MethodUsages#m.*/.m5/*->example::Methods#m5(+1).*/(0) m/*->example::MethodUsages#m.*/.m6/*->example::Methods#m6().*/(0) - m/*->example::MethodUsages#m.*/.m6/*->example::Methods#m6(+1).*/(new m/*->example::MethodUsages#m.*/.List/*->example::Methods#List#*/[Int/*->scala::Int#*/]/*->example::Methods#List#``().*/) + m/*->example::MethodUsages#m.*/.m6/*->example::Methods#m6(+1).*/(new m/*->example::MethodUsages#m.*/.List/*->example::Methods#List#*/[Int/*->scala::Int#*/]) m/*->example::MethodUsages#m.*/.m6/*->example::Methods#m6(+2).*/(Nil/*->scala::package.Nil.*/) - m/*->example::MethodUsages#m.*/.m7/*->example::Methods#m7().*/(m/*->example::MethodUsages#m.*/, new m/*->example::MethodUsages#m.*/.List/*->example::Methods#List#*/[Int/*->scala::Int#*/]/*->example::Methods#List#``().*/)/*->scala::math::Ordering.Int.*/ + m/*->example::MethodUsages#m.*/.m7/*->example::Methods#m7().*/(m/*->example::MethodUsages#m.*/, new m/*->example::MethodUsages#m.*/.List/*->example::Methods#List#*/[Int/*->scala::Int#*/]) m/*->example::MethodUsages#m.*/.`m8().`/*->example::Methods#`m8().`().*/() m/*->example::MethodUsages#m.*/.m9/*->example::Methods#m9().*/(null) m/*->example::MethodUsages#m.*/.m10/*->example::Methods#m10().*/(null) diff --git a/tests/semanticdb/expect/Methods.expect.scala b/tests/semanticdb/expect/Methods.expect.scala index 8190bf0308d1..f34c657b2f6d 100644 --- a/tests/semanticdb/expect/Methods.expect.scala +++ b/tests/semanticdb/expect/Methods.expect.scala @@ -15,7 +15,7 @@ class Methods/*<-example::Methods#*/[T/*<-example::Methods#[T]*/] { def m6/*<-example::Methods#m6().*/(x/*<-example::Methods#m6().(x)*/: Int/*->scala::Int#*/) = ???/*->scala::Predef.`???`().*/ def m6/*<-example::Methods#m6(+1).*/(x/*<-example::Methods#m6(+1).(x)*/: List/*->example::Methods#List#*/[T/*->example::Methods#[T]*/]) = ???/*->scala::Predef.`???`().*/ def m6/*<-example::Methods#m6(+2).*/(x/*<-example::Methods#m6(+2).(x)*/: scala.List/*->scala::package.List#*/[T/*->example::Methods#[T]*/]) = ???/*->scala::Predef.`???`().*/ - def m7/*<-example::Methods#m7().*/[U/*<-example::Methods#m7().[U]*/: Ordering/*->scala::math::Ordering#*//*->example::Methods#m7().[U]*/](c/*<-example::Methods#m7().(c)*/: Methods/*->example::Methods#*/[T/*->example::Methods#[T]*/], l/*<-example::Methods#m7().(l)*/: List/*->example::Methods#List#*/[U/*->example::Methods#m7().[U]*/]) = ???/*->scala::Predef.`???`().*/ + def m7/*<-example::Methods#m7().*/[U/*<-example::Methods#m7().[U]*//*<-example::Methods#m7().(evidence$1)*/: Ordering/*->scala::math::Ordering#*/](c/*<-example::Methods#m7().(c)*/: Methods/*->example::Methods#*/[T/*->example::Methods#[T]*/], l/*<-example::Methods#m7().(l)*/: List/*->example::Methods#List#*/[U/*->example::Methods#m7().[U]*/]) = ???/*->scala::Predef.`???`().*/ def `m8()./*<-example::Methods#`m8().`().*/`() = ???/*->scala::Predef.`???`().*/ class `m9()./*<-example::Methods#`m9().`#*/` def m9/*<-example::Methods#m9().*/(x/*<-example::Methods#m9().(x)*/: `m9().`/*->example::Methods#`m9().`#*/) = ???/*->scala::Predef.`???`().*/ @@ -24,6 +24,7 @@ class Methods/*<-example::Methods#*/[T/*<-example::Methods#[T]*/] { def m11/*<-example::Methods#m11(+1).*/(x/*<-example::Methods#m11(+1).(x)*/: Example/*->example::Example.*/.type) = ???/*->scala::Predef.`???`().*/ def m12a/*<-example::Methods#m12a().*/(x/*<-example::Methods#m12a().(x)*/: {}) = ???/*->scala::Predef.`???`().*/ def m12b/*<-example::Methods#m12b().*/(x/*<-example::Methods#m12b().(x)*/: { val x/*<-local0*/: Int/*->scala::Int#*/ }) = ???/*->scala::Predef.`???`().*/ + def m12c/*<-example::Methods#m12c().*/(x/*<-example::Methods#m12c().(x)*/: { val x/*<-local1*/: Int/*->scala::Int#*/; def y/*<-local2*/: Int/*->scala::Int#*/ }) = ???/*->scala::Predef.`???`().*/ def m13/*<-example::Methods#m13().*/(x/*<-example::Methods#m13().(x)*/: Int/*->scala::Int#*/ @unchecked/*->scala::unchecked#*/) = ???/*->scala::Predef.`???`().*/ def m15/*<-example::Methods#m15().*/(x/*<-example::Methods#m15().(x)*/: => Int/*->scala::Int#*/) = ???/*->scala::Predef.`???`().*/ def m16/*<-example::Methods#m16().*/(x/*<-example::Methods#m16().(x)*/: Int/*->scala::Int#*/*) = ???/*->scala::Predef.`???`().*/ diff --git a/tests/semanticdb/expect/Methods.scala b/tests/semanticdb/expect/Methods.scala index d588d3df2e55..f07dea11752d 100644 --- a/tests/semanticdb/expect/Methods.scala +++ b/tests/semanticdb/expect/Methods.scala @@ -24,6 +24,7 @@ class Methods[T] { def m11(x: Example.type) = ??? def m12a(x: {}) = ??? def m12b(x: { val x: Int }) = ??? + def m12c(x: { val x: Int; def y: Int }) = ??? def m13(x: Int @unchecked) = ??? def m15(x: => Int) = ??? def m16(x: Int*) = ??? diff --git a/tests/semanticdb/expect/NamedApplyBlock.expect.scala b/tests/semanticdb/expect/NamedApplyBlock.expect.scala index db1311082bad..f47393b0322a 100644 --- a/tests/semanticdb/expect/NamedApplyBlock.expect.scala +++ b/tests/semanticdb/expect/NamedApplyBlock.expect.scala @@ -4,11 +4,11 @@ object NamedApplyBlockMethods/*<-example::NamedApplyBlockMethods.*/ { val local/*<-example::NamedApplyBlockMethods.local.*/ = 1 def foo/*<-example::NamedApplyBlockMethods.foo().*/(a/*<-example::NamedApplyBlockMethods.foo().(a)*/: Int/*->scala::Int#*/ = 1, b/*<-example::NamedApplyBlockMethods.foo().(b)*/: Int/*->scala::Int#*/ = 2, c/*<-example::NamedApplyBlockMethods.foo().(c)*/: Int/*->scala::Int#*/ = 3): Int/*->scala::Int#*/ = a/*->example::NamedApplyBlockMethods.foo().(a)*/ +/*->scala::Int#`+`(+4).*/ b/*->example::NamedApplyBlockMethods.foo().(b)*/ +/*->scala::Int#`+`(+4).*/ c/*->example::NamedApplyBlockMethods.foo().(c)*/ def baseCase/*<-example::NamedApplyBlockMethods.baseCase().*/ = foo/*->example::NamedApplyBlockMethods.foo().*/(local/*->example::NamedApplyBlockMethods.local.*/, c/*->example::NamedApplyBlockMethods.foo().(c)*/ = 3) - def recursive/*<-example::NamedApplyBlockMethods.recursive().*/ = foo/*->example::NamedApplyBlockMethods.foo().*//*->local1*/(local/*->example::NamedApplyBlockMethods.local.*/, c/*->example::NamedApplyBlockMethods.foo().(c)*/ = foo/*->example::NamedApplyBlockMethods.foo().*/(local/*->example::NamedApplyBlockMethods.local.*/, c/*->example::NamedApplyBlockMethods.foo().(c)*/ = 3)) + def recursive/*<-example::NamedApplyBlockMethods.recursive().*/ = foo/*->example::NamedApplyBlockMethods.foo().*/(local/*->example::NamedApplyBlockMethods.local.*/, c/*->example::NamedApplyBlockMethods.foo().(c)*/ = foo/*->example::NamedApplyBlockMethods.foo().*/(local/*->example::NamedApplyBlockMethods.local.*/, c/*->example::NamedApplyBlockMethods.foo().(c)*/ = 3)) } object NamedApplyBlockCaseClassConstruction/*<-example::NamedApplyBlockCaseClassConstruction.*/ { case class Msg/*<-example::NamedApplyBlockCaseClassConstruction.Msg#*/(body/*<-example::NamedApplyBlockCaseClassConstruction.Msg#body.*/: String/*->scala::Predef.String#*/, head/*<-example::NamedApplyBlockCaseClassConstruction.Msg#head.*/: String/*->scala::Predef.String#*/ = "default", tail/*<-example::NamedApplyBlockCaseClassConstruction.Msg#tail.*/: String/*->scala::Predef.String#*/) val bodyText/*<-example::NamedApplyBlockCaseClassConstruction.bodyText.*/ = "body" - val msg/*<-example::NamedApplyBlockCaseClassConstruction.msg.*/ = Msg/*->example::NamedApplyBlockCaseClassConstruction.Msg.*//*->example::NamedApplyBlockCaseClassConstruction.Msg.apply().*/(bodyText/*->example::NamedApplyBlockCaseClassConstruction.bodyText.*/, tail/*->example::NamedApplyBlockCaseClassConstruction.Msg.apply().(tail)*/ = "tail") + val msg/*<-example::NamedApplyBlockCaseClassConstruction.msg.*/ = Msg/*->example::NamedApplyBlockCaseClassConstruction.Msg.*/(bodyText/*->example::NamedApplyBlockCaseClassConstruction.bodyText.*/, tail/*->example::NamedApplyBlockCaseClassConstruction.Msg.apply().(tail)*/ = "tail") } diff --git a/tests/semanticdb/expect/NamedArguments.expect.scala b/tests/semanticdb/expect/NamedArguments.expect.scala index 1d478decc4b4..454a2b6d6281 100644 --- a/tests/semanticdb/expect/NamedArguments.expect.scala +++ b/tests/semanticdb/expect/NamedArguments.expect.scala @@ -2,6 +2,6 @@ package example class NamedArguments/*<-example::NamedArguments#*/ { case class User/*<-example::NamedArguments#User#*/(name/*<-example::NamedArguments#User#name.*/: String/*->scala::Predef.String#*/) - User/*->example::NamedArguments#User.*//*->example::NamedArguments#User.apply().*/(name/*->example::NamedArguments#User.apply().(name)*/ = "John") + User/*->example::NamedArguments#User.*/(name/*->example::NamedArguments#User.apply().(name)*/ = "John") User/*->example::NamedArguments#User.*/.apply/*->example::NamedArguments#User.apply().*/(name/*->example::NamedArguments#User.apply().(name)*/ = "John") } diff --git a/tests/semanticdb/expect/NewModifiers.expect.scala b/tests/semanticdb/expect/NewModifiers.expect.scala index bc0d1dcf71e7..cc93048214bb 100644 --- a/tests/semanticdb/expect/NewModifiers.expect.scala +++ b/tests/semanticdb/expect/NewModifiers.expect.scala @@ -2,3 +2,16 @@ object NewModifiers/*<-_empty_::NewModifiers.*/ { inline val foo/*<-_empty_::NewModifiers.foo.*/ = "foo" opaque type A/*<-_empty_::NewModifiers.A#*/ = Int/*->scala::Int#*/ } + +opaque type OpaqueB/*<-_empty_::NewModifiers$package.OpaqueB#*/ = Int/*->scala::Int#*/ + +class NewModifiersClass/*<-_empty_::NewModifiersClass#*/ { + opaque type C/*<-_empty_::NewModifiersClass#C#*/ = Int/*->scala::Int#*/ + class Nested/*<-_empty_::NewModifiersClass#Nested#*/ { + opaque type NestedOpaque/*<-_empty_::NewModifiersClass#Nested#NestedOpaque#*/ = Int/*->scala::Int#*/ + } +} + +trait NewModifiersTrait/*<-_empty_::NewModifiersTrait#*/ { + opaque type D/*<-_empty_::NewModifiersTrait#D#*/ = Int/*->scala::Int#*/ +} diff --git a/tests/semanticdb/expect/NewModifiers.scala b/tests/semanticdb/expect/NewModifiers.scala index 2d5e82551b02..07b47ad8fb02 100644 --- a/tests/semanticdb/expect/NewModifiers.scala +++ b/tests/semanticdb/expect/NewModifiers.scala @@ -2,3 +2,16 @@ object NewModifiers { inline val foo = "foo" opaque type A = Int } + +opaque type OpaqueB = Int + +class NewModifiersClass { + opaque type C = Int + class Nested { + opaque type NestedOpaque = Int + } +} + +trait NewModifiersTrait { + opaque type D = Int +} diff --git a/tests/semanticdb/expect/Prefixes.expect.scala b/tests/semanticdb/expect/Prefixes.expect.scala index 185e2d3530d8..857dcb28b600 100644 --- a/tests/semanticdb/expect/Prefixes.expect.scala +++ b/tests/semanticdb/expect/Prefixes.expect.scala @@ -24,7 +24,7 @@ object Test/*<-prefixes::Test.*/ { def m2/*<-prefixes::Test.m2().*/: c/*->prefixes::Test.c.*/.T/*->prefixes::C#T#*/ = ???/*->scala::Predef.`???`().*/ def k2/*<-prefixes::Test.k2().*/: c/*->prefixes::Test.c.*/.N/*->prefixes::C#N.*/.U/*->prefixes::C#N.U#*/ = ???/*->scala::Predef.`???`().*/ import c/*->prefixes::Test.c.*/.N/*->prefixes::C#N.*/.* - def k3/*<-prefixes::Test.k3().*/: U = ???/*->scala::Predef.`???`().*/ + def k3/*<-prefixes::Test.k3().*/: U/*->prefixes::C#N.U#*/ = ???/*->scala::Predef.`???`().*/ def n2/*<-prefixes::Test.n2().*/: M/*->prefixes::M.*/.T/*->prefixes::M.T#*/ = ???/*->scala::Predef.`???`().*/ diff --git a/tests/semanticdb/expect/RecOrRefined.expect.scala b/tests/semanticdb/expect/RecOrRefined.expect.scala new file mode 100644 index 000000000000..3c32d22506ef --- /dev/null +++ b/tests/semanticdb/expect/RecOrRefined.expect.scala @@ -0,0 +1,34 @@ +package example + +def m1/*<-example::RecOrRefined$package.m1().*/(a/*<-example::RecOrRefined$package.m1().(a)*/: Int/*->scala::Int#*/ { val x/*<-local4*/: Int/*->scala::Int#*/ }) = ???/*->scala::Predef.`???`().*/ +def m2/*<-example::RecOrRefined$package.m2().*/(x/*<-example::RecOrRefined$package.m2().(x)*/: { val x/*<-local5*/: Int/*->scala::Int#*/; def y/*<-local6*/: Int/*->scala::Int#*/ }) = ???/*->scala::Predef.`???`().*/ +def m3/*<-example::RecOrRefined$package.m3().*/(x/*<-example::RecOrRefined$package.m3().(x)*/: { val x/*<-local7*/: Int/*->scala::Int#*/; def y/*<-local8*/: Int/*->scala::Int#*/; type z/*<-local9*/ }) = ???/*->scala::Predef.`???`().*/ +trait PolyHolder/*<-example::PolyHolder#*/ { + def foo/*<-example::PolyHolder#foo().*/[T/*<-example::PolyHolder#foo().[T]*/](t/*<-example::PolyHolder#foo().(t)*/: T/*->example::PolyHolder#foo().[T]*/): Any/*->scala::Any#*/ +} + +def m4/*<-example::RecOrRefined$package.m4().*/(x/*<-example::RecOrRefined$package.m4().(x)*/: PolyHolder/*->example::PolyHolder#*/ { def foo/*<-local12*/[T/*<-local10*/](t/*<-local11*/: T/*->local10*/): T/*->local10*/ }) = ???/*->scala::Predef.`???`().*/ +def m5/*<-example::RecOrRefined$package.m5().*/[Z/*<-example::RecOrRefined$package.m5().[Z]*/](x/*<-example::RecOrRefined$package.m5().(x)*/: Int/*->scala::Int#*/): PolyHolder/*->example::PolyHolder#*/ { def foo/*<-local15*/[T/*<-local13*/](t/*<-local14*/: T/*->local13*/): T/*->local13*/ } = ???/*->scala::Predef.`???`().*/ + +type m6/*<-example::RecOrRefined$package.m6#*/ = [X/*<-example::RecOrRefined$package.m6#[X]*/] =>> PolyHolder/*->example::PolyHolder#*/ { def foo/*<-local18*/[T/*<-local16*/](t/*<-local17*/: T/*->local16*/): T/*->local16*/ } + +class Record/*<-example::Record#*/(elems/*<-example::Record#elems.*/: (String/*->scala::Predef.String#*/, Any/*->scala::Any#*/)*) extends Selectable/*->scala::Selectable#*/: + private val fields/*<-example::Record#fields.*/ = elems/*->example::Record#elems.*/.toMap/*->scala::collection::IterableOnceOps#toMap().*/ + def selectDynamic/*<-example::Record#selectDynamic().*/(name/*<-example::Record#selectDynamic().(name)*/: String/*->scala::Predef.String#*/): Any/*->scala::Any#*/ = fields/*->example::Record#fields.*/(name/*->example::Record#selectDynamic().(name)*/) + +type Person/*<-example::RecOrRefined$package.Person#*/ = Record/*->example::Record#*/ { + val name/*<-local19*/: String/*->scala::Predef.String#*/ + val age/*<-local20*/: Int/*->scala::Int#*/ +} + +// RecType +class C/*<-example::C#*/ { type T1/*<-example::C#T1#*/; type T2/*<-example::C#T2#*/ } +type C2/*<-example::RecOrRefined$package.C2#*/ = C/*->example::C#*/ { type T1/*<-local21*/; type T2/*<-local22*/ = T1/*->local21*/ } + +trait SpecialRefinement/*<-example::SpecialRefinement#*/ { + def pickOne/*<-example::SpecialRefinement#pickOne().*/[T/*<-example::SpecialRefinement#pickOne().[T]*/](as/*<-example::SpecialRefinement#pickOne().(as)*/: T/*->example::SpecialRefinement#pickOne().[T]*/*): Option/*->scala::Option#*/[Any/*->scala::Any#*/] +} + +class PickOneRefinement_1/*<-example::PickOneRefinement_1#*/[S/*<-example::PickOneRefinement_1#[S]*/ <: SpecialRefinement/*->example::SpecialRefinement#*/ { def pickOne/*<-local3*/[T/*<-local1*/](as/*<-local2*/: T/*->local1*/*): Option/*->scala::Option#*/[String/*->scala::Predef.String#*/] }] { + def run/*<-example::PickOneRefinement_1#run().*/(s/*<-example::PickOneRefinement_1#run().(s)*/: S/*->example::PickOneRefinement_1#[S]*/, as/*<-example::PickOneRefinement_1#run().(as)*/: String/*->scala::Predef.String#*/*): Option/*->scala::Option#*/[String/*->scala::Predef.String#*/] = s/*->example::PickOneRefinement_1#run().(s)*/.pickOne/*->example::SpecialRefinement#pickOne().*/(as/*->example::PickOneRefinement_1#run().(as)*/:_*) +} diff --git a/tests/semanticdb/expect/RecOrRefined.scala b/tests/semanticdb/expect/RecOrRefined.scala new file mode 100644 index 000000000000..06292435a91e --- /dev/null +++ b/tests/semanticdb/expect/RecOrRefined.scala @@ -0,0 +1,34 @@ +package example + +def m1(a: Int { val x: Int }) = ??? +def m2(x: { val x: Int; def y: Int }) = ??? +def m3(x: { val x: Int; def y: Int; type z }) = ??? +trait PolyHolder { + def foo[T](t: T): Any +} + +def m4(x: PolyHolder { def foo[T](t: T): T }) = ??? +def m5[Z](x: Int): PolyHolder { def foo[T](t: T): T } = ??? + +type m6 = [X] =>> PolyHolder { def foo[T](t: T): T } + +class Record(elems: (String, Any)*) extends Selectable: + private val fields = elems.toMap + def selectDynamic(name: String): Any = fields(name) + +type Person = Record { + val name: String + val age: Int +} + +// RecType +class C { type T1; type T2 } +type C2 = C { type T1; type T2 = T1 } + +trait SpecialRefinement { + def pickOne[T](as: T*): Option[Any] +} + +class PickOneRefinement_1[S <: SpecialRefinement { def pickOne[T](as: T*): Option[String] }] { + def run(s: S, as: String*): Option[String] = s.pickOne(as:_*) +} diff --git a/tests/semanticdb/expect/RightAssociativeExtension.expect.scala b/tests/semanticdb/expect/RightAssociativeExtension.expect.scala index 38f81d0154ef..9d8271a43685 100644 --- a/tests/semanticdb/expect/RightAssociativeExtension.expect.scala +++ b/tests/semanticdb/expect/RightAssociativeExtension.expect.scala @@ -1,6 +1,6 @@ package ext -/*<-ext::RightAssociativeExtension$package.*/extension (s/*<-ext::RightAssociativeExtension$package.`:*:`().(s)*/: String/*->scala::Predef.String#*/) - def :*:/*<-ext::RightAssociativeExtension$package.`:*:`().*/ (i/*<-ext::RightAssociativeExtension$package.`:*:`().(i)*/: Int/*->scala::Int#*/): (String/*->scala::Predef.String#*/, Int/*->scala::Int#*/) = (/*->scala::Tuple2.apply().*/s/*->ext::RightAssociativeExtension$package.`:*:`().(s)*/, i/*->ext::RightAssociativeExtension$package.`:*:`().(i)*/) +extension (s/*<-ext::RightAssociativeExtension$package.`:*:`().(s)*/: String/*->scala::Predef.String#*/) + def :*:/*<-ext::RightAssociativeExtension$package.`:*:`().*/ (i/*<-ext::RightAssociativeExtension$package.`:*:`().(i)*/: Int/*->scala::Int#*/): (String/*->scala::Predef.String#*/, Int/*->scala::Int#*/) = (s/*->ext::RightAssociativeExtension$package.`:*:`().(s)*/, i/*->ext::RightAssociativeExtension$package.`:*:`().(i)*/) val b/*<-ext::RightAssociativeExtension$package.b.*/ = "foo" :*:/*->ext::RightAssociativeExtension$package.`:*:`().*/ 23 \ No newline at end of file diff --git a/tests/semanticdb/expect/Synthetic.expect.scala b/tests/semanticdb/expect/Synthetic.expect.scala index 0f95d9d0b01f..2672b8106785 100644 --- a/tests/semanticdb/expect/Synthetic.expect.scala +++ b/tests/semanticdb/expect/Synthetic.expect.scala @@ -3,48 +3,59 @@ package example import scala.language/*->scala::language.*/.implicitConversions/*->scala::language.implicitConversions.*/ class Synthetic/*<-example::Synthetic#*/ { - List/*->scala::package.List.*//*->scala::collection::IterableFactory#apply().*/(1).map/*->scala::collection::immutable::List#map().*/(_ +/*->scala::Int#`+`(+4).*/ 2) - /*->scala::Predef.intArrayOps().*/Array/*->scala::Array.*/.empty/*->scala::Array.empty().*/[Int/*->scala::Int#*/]/*->scala::reflect::ClassTag.apply().*/.headOption/*->scala::collection::ArrayOps#headOption().*/ - /*->scala::Predef.augmentString().*/"fooo".stripPrefix/*->scala::collection::StringOps#stripPrefix().*/("o") + List/*->scala::package.List.*/(1).map/*->scala::collection::immutable::List#map().*/(_ +/*->scala::Int#`+`(+4).*/ 2) + Array/*->scala::Array.*/.empty/*->scala::Array.empty().*/[Int/*->scala::Int#*/].headOption/*->scala::collection::ArrayOps#headOption().*/ + "fooo".stripPrefix/*->scala::collection::StringOps#stripPrefix().*/("o") // See https://github.com/scalameta/scalameta/issues/977 - val Name/*<-example::Synthetic#Name.*/ = /*->scala::Predef.augmentString().*/"name:(.*)".r/*->scala::collection::StringOps#r().*/ - val x/*<-example::Synthetic#x.*/ #::/*->scala::package.`#::`.*//*->scala::package.`#::`.unapply().*/ xs/*<-example::Synthetic#xs.*/ = LazyList/*->scala::package.LazyList.*//*->scala::collection::IterableFactory#apply().*/(1, 2) - val Name/*->example::Synthetic#Name.*//*->scala::util::matching::Regex#unapplySeq().*/(name/*<-example::Synthetic#name.*/) = "name:foo" - 1 #:: /*->scala::collection::immutable::LazyList.toDeferrer().*/2 #:: /*->scala::collection::immutable::LazyList.toDeferrer().*/LazyList/*->scala::package.LazyList.*/.empty/*->scala::collection::immutable::LazyList.empty().*//*->scala::collection::immutable::LazyList.Deferrer#`#::`().*/ + val Name/*<-example::Synthetic#Name.*/ = "name:(.*)".r/*->scala::collection::StringOps#r().*/ + val x/*<-example::Synthetic#x.*/ #::/*->scala::package.`#::`.*/ xs/*<-example::Synthetic#xs.*/ = LazyList/*->scala::package.LazyList.*/(1, 2) + val Name/*->example::Synthetic#Name.*/(name/*<-example::Synthetic#name.*/) = "name:foo" + 1 #:: 2 #:: LazyList/*->scala::package.LazyList.*/.empty/*->scala::collection::immutable::LazyList.empty().*/ - val a1/*<-example::Synthetic#a1.*/ #::/*->scala::package.`#::`.*//*->scala::package.`#::`.unapply().*/ a2/*<-example::Synthetic#a2.*/ #::/*->scala::package.`#::`.*//*->scala::package.`#::`.unapply().*/ as/*<-example::Synthetic#as.*/ = LazyList/*->scala::package.LazyList.*//*->scala::collection::IterableFactory#apply().*/(1, 2) + val a1/*<-example::Synthetic#a1.*/ #::/*->scala::package.`#::`.*/ a2/*<-example::Synthetic#a2.*/ #::/*->scala::package.`#::`.*/ as/*<-example::Synthetic#as.*/ = LazyList/*->scala::package.LazyList.*/(1, 2) - val lst/*<-example::Synthetic#lst.*/ = 1 #:: /*->scala::collection::immutable::LazyList.toDeferrer().*/2 #:: /*->scala::collection::immutable::LazyList.toDeferrer().*/LazyList/*->scala::package.LazyList.*/.empty/*->scala::collection::immutable::LazyList.empty().*//*->scala::collection::immutable::LazyList.Deferrer#`#::`().*/ + val lst/*<-example::Synthetic#lst.*/ = 1 #:: 2 #:: LazyList/*->scala::package.LazyList.*/.empty/*->scala::collection::immutable::LazyList.empty().*/ - for (x/*<-local0*/ <- /*->scala::LowPriorityImplicits#intWrapper().*/1 to/*->scala::runtime::RichInt#to().*/ 10/*->scala::collection::immutable::Range#foreach().*/; y/*<-local1*/ <- /*->scala::LowPriorityImplicits#intWrapper().*/0 until/*->scala::runtime::RichInt#until().*/ 10/*->scala::collection::immutable::Range#foreach().*/) println/*->scala::Predef.println(+1).*/(/*->scala::Predef.ArrowAssoc().*/x/*->local0*/ ->/*->scala::Predef.ArrowAssoc#`->`().*/ x/*->local0*/) - for (i/*<-local2*/ <- /*->scala::LowPriorityImplicits#intWrapper().*/1 to/*->scala::runtime::RichInt#to().*/ 10/*->scala::collection::StrictOptimizedIterableOps#flatMap().*/; j/*<-local3*/ <- /*->scala::LowPriorityImplicits#intWrapper().*/0 until/*->scala::runtime::RichInt#until().*/ 10/*->scala::collection::immutable::Range#map().*/) yield (/*->scala::Tuple2.apply().*/i/*->local2*/, j/*->local3*/) - for (i/*<-local4*/ <- /*->scala::LowPriorityImplicits#intWrapper().*/1 to/*->scala::runtime::RichInt#to().*/ 10/*->scala::collection::StrictOptimizedIterableOps#flatMap().*/; j/*<-local5*/ <- /*->scala::LowPriorityImplicits#intWrapper().*/0 until/*->scala::runtime::RichInt#until().*/ 10/*->scala::collection::IterableOps#withFilter().*/ if i/*->local4*/ %/*->scala::Int#`%`(+3).*/ 2 ==/*->scala::Int#`==`(+3).*/ 0/*->scala::collection::WithFilter#map().*/) yield (/*->scala::Tuple2.apply().*/i/*->local4*/, j/*->local5*/) + for (x/*<-local0*/ <- 1 to/*->scala::runtime::RichInt#to().*/ 10; y/*<-local1*/ <- 0 until/*->scala::runtime::RichInt#until().*/ 10) println/*->scala::Predef.println(+1).*/(x/*->local0*/ ->/*->scala::Predef.ArrowAssoc#`->`().*/ x/*->local0*/) + for (i/*<-local2*/ <- 1 to/*->scala::runtime::RichInt#to().*/ 10; j/*<-local3*/ <- 0 until/*->scala::runtime::RichInt#until().*/ 10) yield (i/*->local2*/, j/*->local3*/) + for (i/*<-local4*/ <- 1 to/*->scala::runtime::RichInt#to().*/ 10; j/*<-local5*/ <- 0 until/*->scala::runtime::RichInt#until().*/ 10 if i/*->local4*/ %/*->scala::Int#`%`(+3).*/ 2 ==/*->scala::Int#`==`(+3).*/ 0) yield (i/*->local4*/, j/*->local5*/) object s/*<-example::Synthetic#s.*/ { def apply/*<-example::Synthetic#s.apply().*/() = 2 - s/*->example::Synthetic#s.apply().*/() + s() s.apply/*->example::Synthetic#s.apply().*/() case class Bar/*<-example::Synthetic#s.Bar#*/() - Bar/*->example::Synthetic#s.Bar.*//*->example::Synthetic#s.Bar.apply().*/() - null.asInstanceOf/*->scala::Any#asInstanceOf().*/[Int/*->scala::Int#*/ => Int/*->scala::Int#*/]/*->scala::Function1#apply().*/(2) + Bar/*->example::Synthetic#s.Bar.*/() + null.asInstanceOf/*->scala::Any#asInstanceOf().*/[Int/*->scala::Int#*/ => Int/*->scala::Int#*/](2) } - class J/*<-example::Synthetic#J#*/[T/*<-example::Synthetic#J#[T]*/: Manifest/*->scala::Predef.Manifest#*//*->example::Synthetic#J#[T]*/] { val arr/*<-example::Synthetic#J#arr.*/ = Array/*->scala::Array.*/.empty/*->scala::Array.empty().*/[T/*->example::Synthetic#J#[T]*/] } + class J/*<-example::Synthetic#J#*/[T/*<-example::Synthetic#J#[T]*//*<-example::Synthetic#J#evidence$1.*/: Manifest/*->scala::Predef.Manifest#*/] { val arr/*<-example::Synthetic#J#arr.*/ = Array/*->scala::Array.*/.empty/*->scala::Array.empty().*/[T/*->example::Synthetic#J#[T]*/] } class F/*<-example::Synthetic#F#*/ implicit val ordering/*<-example::Synthetic#ordering.*/: Ordering/*->scala::package.Ordering#*/[F/*->example::Synthetic#F#*/] = ???/*->scala::Predef.`???`().*/ - val f/*<-example::Synthetic#f.*/: Ordered/*->scala::package.Ordered#*/[F/*->example::Synthetic#F#*/] = /*->scala::math::Ordered.orderingToOrdered().*/new F/*->example::Synthetic#F#*//*->example::Synthetic#ordering.*/ + val f/*<-example::Synthetic#f.*/: Ordered/*->scala::package.Ordered#*/[F/*->example::Synthetic#F#*/] = new F/*->example::Synthetic#F#*/ import scala.concurrent.ExecutionContext/*->scala::concurrent::ExecutionContext.*/.Implicits/*->scala::concurrent::ExecutionContext.Implicits.*/.global/*->scala::concurrent::ExecutionContext.Implicits.global().*/ for { - a/*<-local6*/ <- scala.concurrent.Future/*->scala::concurrent::Future.*/.successful/*->scala::concurrent::Future.successful().*/(1)/*->scala::concurrent::Future#foreach().*/ - b/*<-local7*/ <- scala.concurrent.Future/*->scala::concurrent::Future.*/.successful/*->scala::concurrent::Future.successful().*/(2)/*->scala::concurrent::Future#foreach().*/ - } println/*->scala::Predef.println(+1).*/(a/*->local6*/)/*->scala::concurrent::ExecutionContext.Implicits.global().*/ + a/*<-local6*/ <- scala.concurrent.Future/*->scala::concurrent::Future.*/.successful/*->scala::concurrent::Future.successful().*/(1) + b/*<-local7*/ <- scala.concurrent.Future/*->scala::concurrent::Future.*/.successful/*->scala::concurrent::Future.successful().*/(2) + } println/*->scala::Predef.println(+1).*/(a/*->local6*/) for { - a/*<-local8*/ <- scala.concurrent.Future/*->scala::concurrent::Future.*/.successful/*->scala::concurrent::Future.successful().*/(1)/*->scala::concurrent::Future#flatMap().*/ - b/*<-local9*/ <- scala.concurrent.Future/*->scala::concurrent::Future.*/.successful/*->scala::concurrent::Future.successful().*/(2)/*->scala::concurrent::Future#withFilter().*/ - if a/*->local8*/ scala::Int#`<`(+3).*/ b/*->local9*//*->scala::concurrent::Future#map().*//*->scala::concurrent::ExecutionContext.Implicits.global().*/ - } yield a/*->local8*//*->scala::concurrent::ExecutionContext.Implicits.global().*/ - + a/*<-local8*/ <- scala.concurrent.Future/*->scala::concurrent::Future.*/.successful/*->scala::concurrent::Future.successful().*/(1) + b/*<-local9*/ <- scala.concurrent.Future/*->scala::concurrent::Future.*/.successful/*->scala::concurrent::Future.successful().*/(2) + if a/*->local8*/ scala::Int#`<`(+3).*/ b/*->local9*/ + } yield a/*->local8*/ + + object Contexts/*<-example::Synthetic#Contexts.*/ { + def foo/*<-example::Synthetic#Contexts.foo().*/(x/*<-example::Synthetic#Contexts.foo().(x)*/: Int/*->scala::Int#*/)(using Int/*->scala::Int#*/) = ???/*->scala::Predef.`???`().*/ + def m1/*<-example::Synthetic#Contexts.m1().*/(using Int/*->scala::Int#*/) = foo/*->example::Synthetic#Contexts.foo().*/(0) + def m2/*<-example::Synthetic#Contexts.m2().*/(using x/*<-example::Synthetic#Contexts.m2().(x)*/: Int/*->scala::Int#*/) = foo/*->example::Synthetic#Contexts.foo().*/(0) + def m3/*<-example::Synthetic#Contexts.m3().*/ = + given x/*<-local10*/: Int/*->scala::Int#*/ = 1 + foo/*->example::Synthetic#Contexts.foo().*/(x/*->local10*/) + def m4/*<-example::Synthetic#Contexts.m4().*/ = + given Int/*->scala::Int#*/ = 1 + foo/*->example::Synthetic#Contexts.foo().*/(0) + } } diff --git a/tests/semanticdb/expect/Synthetic.scala b/tests/semanticdb/expect/Synthetic.scala index 484e07098877..10d13e936468 100644 --- a/tests/semanticdb/expect/Synthetic.scala +++ b/tests/semanticdb/expect/Synthetic.scala @@ -47,4 +47,15 @@ class Synthetic { if a < b } yield a + object Contexts { + def foo(x: Int)(using Int) = ??? + def m1(using Int) = foo(0) + def m2(using x: Int) = foo(0) + def m3 = + given x: Int = 1 + foo(x) + def m4 = + given Int = 1 + foo(0) + } } diff --git a/tests/semanticdb/expect/Traits.expect.scala b/tests/semanticdb/expect/Traits.expect.scala index 193390b38826..4cb0d6d65b22 100644 --- a/tests/semanticdb/expect/Traits.expect.scala +++ b/tests/semanticdb/expect/Traits.expect.scala @@ -10,5 +10,5 @@ object U/*<-traits::U.*/ { } class C/*<-traits::C#*/ -trait V/*<-traits::V#*/ { self/*<-local1*/: C/*->traits::C#*/ => +trait V/*<-traits::V#*/ { self/*<-local2*/: C/*->traits::C#*/ => } diff --git a/tests/semanticdb/expect/ValPattern.expect.scala b/tests/semanticdb/expect/ValPattern.expect.scala index f89133f6ef5b..8eac1dc87d87 100644 --- a/tests/semanticdb/expect/ValPattern.expect.scala +++ b/tests/semanticdb/expect/ValPattern.expect.scala @@ -2,20 +2,20 @@ package example class ValPattern/*<-example::ValPattern#*/ { - val (left/*<-example::ValPattern#left.*/, right/*<-example::ValPattern#right.*/) = (/*->scala::Tuple2.apply().*/1, 2) - val Some/*->scala::Some.*//*->scala::Some.unapply().*/(number1/*<-example::ValPattern#number1.*/) = - Some/*->scala::Some.*//*->scala::Some.apply().*/(1) + val (left/*<-example::ValPattern#left.*/, right/*<-example::ValPattern#right.*/) = (1, 2) + val Some/*->scala::Some.*/(number1/*<-example::ValPattern#number1.*/) = + Some/*->scala::Some.*/(1) - val List/*->scala::package.List.*//*->scala::collection::SeqFactory#unapplySeq().*/(Some/*->scala::Some.*//*->scala::Some.unapply().*/(q1/*<-example::ValPattern#q1.*/), None/*->scala::None.*/: None/*->scala::None.*/.type, None/*->scala::None.*/) = ???/*->scala::Predef.`???`().*/ + val List/*->scala::package.List.*/(Some/*->scala::Some.*/(q1/*<-example::ValPattern#q1.*/), None/*->scala::None.*/: None/*->scala::None.*/.type, None/*->scala::None.*/) = ???/*->scala::Predef.`???`().*/ - var (leftVar/*<-example::ValPattern#leftVar().*/, rightVar/*<-example::ValPattern#rightVar().*/) = (/*->scala::Tuple2.apply().*/1, 2) - var Some/*->scala::Some.*//*->scala::Some.unapply().*/(number1Var/*<-example::ValPattern#number1Var().*/) = - Some/*->scala::Some.*//*->scala::Some.apply().*/(1) + var (leftVar/*<-example::ValPattern#leftVar().*/, rightVar/*<-example::ValPattern#rightVar().*/) = (1, 2) + var Some/*->scala::Some.*/(number1Var/*<-example::ValPattern#number1Var().*/) = + Some/*->scala::Some.*/(1) def app/*<-example::ValPattern#app().*/(): Unit/*->scala::Unit#*/ = { println/*->scala::Predef.println(+1).*/( ( - /*->scala::Tuple6.apply().*/number1/*->example::ValPattern#number1.*/, + number1/*->example::ValPattern#number1.*/, left/*->example::ValPattern#left.*/, right/*->example::ValPattern#right.*/, number1Var/*->example::ValPattern#number1Var().*/, @@ -24,16 +24,16 @@ class ValPattern/*<-example::ValPattern#*/ { ) ) locally/*->scala::Predef.locally().*/ { - val (left/*<-local0*/, right/*<-local1*/) = (/*->scala::Tuple2.apply().*/1, 2) - val Some/*->scala::Some.*//*->scala::Some.unapply().*/(number1/*<-local2*/) = - Some/*->scala::Some.*//*->scala::Some.apply().*/(1) + val (left/*<-local0*/, right/*<-local1*/) = (1, 2) + val Some/*->scala::Some.*/(number1/*<-local2*/) = + Some/*->scala::Some.*/(1) - var (leftVar/*<-local3*/, rightVar/*<-local4*/) = (/*->scala::Tuple2.apply().*/1, 2) - var Some/*->scala::Some.*//*->scala::Some.unapply().*/(number1Var/*<-local5*/) = - Some/*->scala::Some.*//*->scala::Some.apply().*/(1) + var (leftVar/*<-local3*/, rightVar/*<-local4*/) = (1, 2) + var Some/*->scala::Some.*/(number1Var/*<-local5*/) = + Some/*->scala::Some.*/(1) println/*->scala::Predef.println(+1).*/( ( - /*->scala::Tuple6.apply().*/number1/*->local2*/, + number1/*->local2*/, left/*->local0*/, right/*->local1*/, number1Var/*->local5*/, diff --git a/tests/semanticdb/expect/exports-example-Codec.expect.scala b/tests/semanticdb/expect/exports-example-Codec.expect.scala index b2afc55a75f0..712bb96fccc9 100644 --- a/tests/semanticdb/expect/exports-example-Codec.expect.scala +++ b/tests/semanticdb/expect/exports-example-Codec.expect.scala @@ -10,6 +10,6 @@ trait Encoder/*<-exports::example::Encoder#*/[-T/*<-exports::example::Encoder#[T trait Codec/*<-exports::example::Codec#*/[T/*<-exports::example::Codec#[T]*/](decode/*<-exports::example::Codec#decode.*/: Decoder/*->exports::example::Decoder#*/[T/*->exports::example::Codec#[T]*/], encode/*<-exports::example::Codec#encode.*/: Encoder/*->exports::example::Encoder#*/[T/*->exports::example::Codec#[T]*/]) extends Decoder/*->exports::example::Decoder#*/[T/*->exports::example::Codec#[T]*/] with Encoder/*->exports::example::Encoder#*/[T/*->exports::example::Codec#[T]*/] { - export decode/*->exports::example::Codec#decode.*//*->exports::example::Decoder#decode().*//*->exports::example::Codec#decode().(a)*/./*<-exports::example::Codec#decode().*/_ - export encode/*->exports::example::Codec#encode.*//*->exports::example::Encoder#encode().*//*->exports::example::Codec#encode().(t)*/./*<-exports::example::Codec#encode().*/_ + export decode/*->exports::example::Codec#decode.*/._ + export encode/*->exports::example::Codec#encode.*/._ } diff --git a/tests/semanticdb/expect/exports-package.expect.scala b/tests/semanticdb/expect/exports-package.expect.scala index 79c0f5b16e53..512b68dcc842 100644 --- a/tests/semanticdb/expect/exports-package.expect.scala +++ b/tests/semanticdb/expect/exports-package.expect.scala @@ -1,3 +1,3 @@ package exports -/*<-exports::`exports-package$package`.*/export example.{Decoder/*<-exports::`exports-package$package`.Decoder#*/, Encoder/*<-exports::`exports-package$package`.Encoder#*/, Codec/*<-exports::`exports-package$package`.Codec#*/} +export example.{Decoder/*<-exports::`exports-package$package`.Decoder#*/, Encoder/*<-exports::`exports-package$package`.Encoder#*/, Codec/*<-exports::`exports-package$package`.Codec#*/} diff --git a/tests/semanticdb/expect/i9727.expect.scala b/tests/semanticdb/expect/i9727.expect.scala index 017da086d4b5..4cbe6761906f 100644 --- a/tests/semanticdb/expect/i9727.expect.scala +++ b/tests/semanticdb/expect/i9727.expect.scala @@ -1,5 +1,5 @@ package i9727 class Test/*<-i9727::Test#*/(a/*<-i9727::Test#a.*/: Int/*->scala::Int#*/) -/*<-i9727::i9727$package.*/val a/*<-i9727::i9727$package.a.*/ = new Test/*->i9727::Test#*/(1) +val a/*<-i9727::i9727$package.a.*/ = new Test/*->i9727::Test#*/(1) val b/*<-i9727::i9727$package.b.*/ = new Test/*->i9727::Test#*/(2) diff --git a/tests/semanticdb/expect/i9782.expect.scala b/tests/semanticdb/expect/i9782.expect.scala new file mode 100644 index 000000000000..2b10a2f37d7f --- /dev/null +++ b/tests/semanticdb/expect/i9782.expect.scala @@ -0,0 +1,20 @@ +// LazyRef +trait Txn/*<-_empty_::Txn#*/[T/*<-_empty_::Txn#[T]*/ <: Txn/*->_empty_::Txn#*/[T/*->_empty_::Txn#[T]*/]] + +trait Elem/*<-_empty_::Elem#*/[T/*<-_empty_::Elem#[T]*/ <: Txn/*->_empty_::Txn#*/[T/*->_empty_::Elem#[T]*/]] + +trait Obj/*<-_empty_::Obj#*/[T/*<-_empty_::Obj#[T]*/ <: Txn/*->_empty_::Txn#*/[T/*->_empty_::Obj#[T]*/]] extends Elem/*->_empty_::Elem#*/[T/*->_empty_::Obj#[T]*/] + +trait Copy/*<-_empty_::Copy#*/[In/*<-_empty_::Copy#[In]*/ <: Txn/*->_empty_::Txn#*/[In/*->_empty_::Copy#[In]*/], Out/*<-_empty_::Copy#[Out]*/ <: Txn/*->_empty_::Txn#*/[Out/*->_empty_::Copy#[Out]*/]] { + def copyImpl/*<-_empty_::Copy#copyImpl().*/[Repr/*<-_empty_::Copy#copyImpl().[Repr]*/[~/*<-_empty_::Copy#copyImpl().[Repr][`~`]*/ <: Txn/*->_empty_::Txn#*/[~/*->_empty_::Copy#copyImpl().[Repr][`~`]*/]] <: Elem/*->_empty_::Elem#*/[~/*->_empty_::Copy#copyImpl().[Repr][`~`]*/]](in/*<-_empty_::Copy#copyImpl().(in)*/: Repr/*->_empty_::Copy#copyImpl().[Repr]*/[In/*->_empty_::Copy#[In]*/]): Repr/*->_empty_::Copy#copyImpl().[Repr]*/[Out/*->_empty_::Copy#[Out]*/] + + def apply/*<-_empty_::Copy#apply().*/[Repr/*<-_empty_::Copy#apply().[Repr]*/[~/*<-_empty_::Copy#apply().[Repr][`~`]*/ <: Txn/*->_empty_::Txn#*/[~/*->_empty_::Copy#apply().[Repr][`~`]*/]] <: Elem/*->_empty_::Elem#*/[~/*->_empty_::Copy#apply().[Repr][`~`]*/]](in/*<-_empty_::Copy#apply().(in)*/: Repr/*->_empty_::Copy#apply().[Repr]*/[In/*->_empty_::Copy#[In]*/]): Repr/*->_empty_::Copy#apply().[Repr]*/[Out/*->_empty_::Copy#[Out]*/] = { + val out/*<-local0*/ = copyImpl/*->_empty_::Copy#copyImpl().*/[Repr/*->_empty_::Copy#apply().[Repr]*/](in/*->_empty_::Copy#apply().(in)*/) + (in/*->_empty_::Copy#apply().(in)*/, out/*->local0*/) match { + case (inObj/*<-local1*/: Obj/*->_empty_::Obj#*/[In/*->_empty_::Copy#[In]*/], outObj/*<-local2*/: Obj/*->_empty_::Obj#*/[Out/*->_empty_::Copy#[Out]*/]) => // problem here + println/*->scala::Predef.println(+1).*/("copy the attributes") + case _ => + } + out/*->local0*/ + } +} diff --git a/tests/semanticdb/expect/i9782.scala b/tests/semanticdb/expect/i9782.scala new file mode 100644 index 000000000000..8004300491cd --- /dev/null +++ b/tests/semanticdb/expect/i9782.scala @@ -0,0 +1,20 @@ +// LazyRef +trait Txn[T <: Txn[T]] + +trait Elem[T <: Txn[T]] + +trait Obj[T <: Txn[T]] extends Elem[T] + +trait Copy[In <: Txn[In], Out <: Txn[Out]] { + def copyImpl[Repr[~ <: Txn[~]] <: Elem[~]](in: Repr[In]): Repr[Out] + + def apply[Repr[~ <: Txn[~]] <: Elem[~]](in: Repr[In]): Repr[Out] = { + val out = copyImpl[Repr](in) + (in, out) match { + case (inObj: Obj[In], outObj: Obj[Out]) => // problem here + println("copy the attributes") + case _ => + } + out + } +} diff --git a/tests/semanticdb/expect/inlinedefs.expect.scala b/tests/semanticdb/expect/inlinedefs.expect.scala index c0e1a8913c0a..390bec22e632 100644 --- a/tests/semanticdb/expect/inlinedefs.expect.scala +++ b/tests/semanticdb/expect/inlinedefs.expect.scala @@ -12,6 +12,6 @@ object FakePredef/*<-inlinedefs::FakePredef.*/: * sunt in culpa qui officia deserunt mollit anim id est laborum. */ transparent inline final def assert/*<-inlinedefs::FakePredef.assert().*/(inline assertion/*<-inlinedefs::FakePredef.assert().(assertion)*/: Boolean/*->scala::Boolean#*/): Unit/*->scala::Unit#*/ = { - if (!assertion/*->inlinedefs::FakePredef.assert().(assertion)*//*->scala::Boolean#`unary_!`().*/) - throw new java.lang.AssertionError/*->java::lang::AssertionError#*//*->java::lang::AssertionError#``(+2).*/("assertion failed") + if (!assertion/*->inlinedefs::FakePredef.assert().(assertion)*/) + throw new java.lang.AssertionError/*->java::lang::AssertionError#*/("assertion failed") } diff --git a/tests/semanticdb/expect/nullary.expect.scala b/tests/semanticdb/expect/nullary.expect.scala new file mode 100644 index 000000000000..3f0fdb1af7b0 --- /dev/null +++ b/tests/semanticdb/expect/nullary.expect.scala @@ -0,0 +1,20 @@ +abstract class NullaryTest/*<-_empty_::NullaryTest#*/[T/*<-_empty_::NullaryTest#[T]*/, m/*<-_empty_::NullaryTest#[m]*/[s/*<-_empty_::NullaryTest#``().[m][s]*/]] { + def nullary/*<-_empty_::NullaryTest#nullary().*/: String/*->scala::Predef.String#*/ = "a" + val x/*<-_empty_::NullaryTest#x.*/ = nullary/*->_empty_::NullaryTest#nullary().*/ + + def nullary2/*<-_empty_::NullaryTest#nullary2().*/: T/*->_empty_::NullaryTest#[T]*/ + val x2/*<-_empty_::NullaryTest#x2.*/ = nullary2/*->_empty_::NullaryTest#nullary2().*/ + + def nullary3/*<-_empty_::NullaryTest#nullary3().*/: m/*->_empty_::NullaryTest#[m]*/[T/*->_empty_::NullaryTest#[T]*/] + val x3/*<-_empty_::NullaryTest#x3.*/ = nullary3/*->_empty_::NullaryTest#nullary3().*/ +} + +class Concrete/*<-_empty_::Concrete#*/ extends NullaryTest/*->_empty_::NullaryTest#*/[Int/*->scala::Int#*/, List/*->scala::package.List#*/] { + def nullary2/*<-_empty_::Concrete#nullary2().*/ = 1 + def nullary3/*<-_empty_::Concrete#nullary3().*/ = List/*->scala::package.List.*/(1,2,3) +} + +object test/*<-_empty_::test.*/ { + (new Concrete/*->_empty_::Concrete#*/).nullary2/*->_empty_::Concrete#nullary2().*/ + (new Concrete/*->_empty_::Concrete#*/).nullary3/*->_empty_::Concrete#nullary3().*/ +} diff --git a/tests/semanticdb/expect/nullary.scala b/tests/semanticdb/expect/nullary.scala new file mode 100644 index 000000000000..d3b5b67ea53e --- /dev/null +++ b/tests/semanticdb/expect/nullary.scala @@ -0,0 +1,20 @@ +abstract class NullaryTest[T, m[s]] { + def nullary: String = "a" + val x = nullary + + def nullary2: T + val x2 = nullary2 + + def nullary3: m[T] + val x3 = nullary3 +} + +class Concrete extends NullaryTest[Int, List] { + def nullary2 = 1 + def nullary3 = List(1,2,3) +} + +object test { + (new Concrete).nullary2 + (new Concrete).nullary3 +} diff --git a/tests/semanticdb/expect/recursion.expect.scala b/tests/semanticdb/expect/recursion.expect.scala index aba74b1b032e..0454bd54eecc 100644 --- a/tests/semanticdb/expect/recursion.expect.scala +++ b/tests/semanticdb/expect/recursion.expect.scala @@ -3,23 +3,23 @@ package recursion object Nats/*<-recursion::Nats.*/ { sealed trait Nat/*<-recursion::Nats.Nat#*/ { - transparent inline def ++/*<-recursion::Nats.Nat#`++`().*/ : Succ/*->recursion::Nats.Succ#*/[this.type] = Succ/*->recursion::Nats.Succ.*//*->recursion::Nats.Succ.apply().*/(this) + transparent inline def ++/*<-recursion::Nats.Nat#`++`().*/ : Succ/*->recursion::Nats.Succ#*/[this.type] = Succ/*->recursion::Nats.Succ.*/(this) transparent inline def +/*<-recursion::Nats.Nat#`+`().*/(inline that/*<-recursion::Nats.Nat#`+`().(that)*/: Nat/*->recursion::Nats.Nat#*/): Nat/*->recursion::Nats.Nat#*/ = inline this match { case Zero/*->recursion::Nats.Zero.*/ => that/*->recursion::Nats.Nat#`+`().(that)*/ - case Succ/*->recursion::Nats.Succ.*//*->recursion::Nats.Succ.unapply().*//*->local0*/(p/*<-local1*/) => p/*->local1*/ +/*->recursion::Nats.Nat#`+`().*/ that/*->recursion::Nats.Nat#`+`().(that)*/.++/*->recursion::Nats.Nat#`++`().*/ + case Succ/*->recursion::Nats.Succ.*/(p/*<-local1*/) => p/*->local1*/ +/*->recursion::Nats.Nat#`+`().*/ that/*->recursion::Nats.Nat#`+`().(that)*/.++/*->recursion::Nats.Nat#`++`().*/ } } case object Zero/*<-recursion::Nats.Zero.*/ extends Nat/*->recursion::Nats.Nat#*/ - case class Succ/*<-recursion::Nats.Succ#*/[N/*<-recursion::Nats.Succ#[N]*/ <: Nat](p/*<-recursion::Nats.Succ#p.*/: N/*->recursion::Nats.Succ#[N]*/) extends Nat/*->recursion::Nats.Nat#*/ + case class Succ/*<-recursion::Nats.Succ#*/[N/*<-recursion::Nats.Succ#[N]*/ <: Nat/*->recursion::Nats.Nat#*/](p/*<-recursion::Nats.Succ#p.*/: N/*->recursion::Nats.Succ#[N]*/) extends Nat/*->recursion::Nats.Nat#*/ transparent inline def toIntg/*<-recursion::Nats.toIntg().*/(inline n/*<-recursion::Nats.toIntg().(n)*/: Nat/*->recursion::Nats.Nat#*/): Int/*->scala::Int#*/ = inline n/*->recursion::Nats.toIntg().(n)*/ match { case Zero/*->recursion::Nats.Zero.*/ => 0 - case Succ/*->recursion::Nats.Succ.*//*->recursion::Nats.Succ.unapply().*//*->local2*/(p/*<-local3*/) => toIntg/*->recursion::Nats.toIntg().*/(p/*->local3*/) +/*->scala::Int#`+`(+4).*/ 1 + case Succ/*->recursion::Nats.Succ.*/(p/*<-local3*/) => toIntg/*->recursion::Nats.toIntg().*/(p/*->local3*/) +/*->scala::Int#`+`(+4).*/ 1 } - val j31/*<-recursion::Nats.j31.*/ = toIntg/*->recursion::Nats.toIntg().*/(Zero/*->recursion::Nats.Zero.*/.++.++.++/*<-local4*//*->recursion::Nats.Zero.*//*->recursion::Nats.Nat#`++`().*/ + /*<-local5*//*->recursion::Nats.Nat#`++`().*/Zer/*<-local6*//*->recursion::Nats.Nat#`++`().*//*->recursion::Nats.Nat#`+`().*/o/*->recursion::Nats.Zero.*/.++/*->recursion::Nats.Nat#`++`().*/) + val j31/*<-recursion::Nats.j31.*/ = toIntg/*->recursion::Nats.toIntg().*/(Zero/*->recursion::Nats.Zero.*/.++/*->recursion::Nats.Nat#`++`().*/.++/*->recursion::Nats.Nat#`++`().*/.++/*->recursion::Nats.Nat#`++`().*/ +/*->recursion::Nats.Nat#`+`().*/ Zero/*->recursion::Nats.Zero.*/.++/*->recursion::Nats.Nat#`++`().*/) } diff --git a/tests/semanticdb/expect/semanticdb-Flags.expect.scala b/tests/semanticdb/expect/semanticdb-Flags.expect.scala index fe920433e231..99a7cbdddfb6 100644 --- a/tests/semanticdb/expect/semanticdb-Flags.expect.scala +++ b/tests/semanticdb/expect/semanticdb-Flags.expect.scala @@ -2,14 +2,14 @@ package flags import scala.language/*->scala::language.*/.experimental/*->scala::language.experimental.*/.macros/*->scala::language.experimental.macros.*/ -package object p { - p/*<-flags::p::package.*/rivate lazy val x/*<-flags::p::package.x.*/ = 1 +package object p/*<-flags::p::package.*/ { + private lazy val x/*<-flags::p::package.x.*/ = 1 protected implicit var y/*<-flags::p::package.y().*/: Int/*->scala::Int#*/ = 2 def z/*<-flags::p::package.z().*/(pp/*<-flags::p::package.z().(pp)*/: Int/*->scala::Int#*/) = 3 def m/*<-flags::p::package.m().*/[TT/*<-flags::p::package.m().[TT]*/]: Int/*->scala::Int#*/ = macro ???/*->scala::Predef.`???`().*/ abstract class C/*<-flags::p::package.C#*/[+T/*<-flags::p::package.C#[T]*/, -U/*<-flags::p::package.C#[U]*/, V/*<-flags::p::package.C#[V]*/](x/*<-flags::p::package.C#x.*/: T/*->flags::p::package.C#[T]*/, y/*<-flags::p::package.C#y.*/: U/*->flags::p::package.C#[U]*/, z/*<-flags::p::package.C#z.*/: V/*->flags::p::package.C#[V]*/) { - def this()/*<-flags::p::package.C#``(+1).*/ = this(???/*->scala::Predef.`???`().*/, ???/*->scala::Predef.`???`().*/, ???/*->scala::Predef.`???`().*/) - def this(t/*<-flags::p::package.C#``(+2).*//*<-flags::p::package.C#``(+2).(t)*/: T/*->flags::p::package.C#[T]*/) = this(t/*->flags::p::package.C#``(+2).(t)*/, ???/*->scala::Predef.`???`().*/, ???/*->scala::Predef.`???`().*/) + def this/*<-flags::p::package.C#``(+1).*/() = this(???/*->scala::Predef.`???`().*/, ???/*->scala::Predef.`???`().*/, ???/*->scala::Predef.`???`().*/) + def this/*<-flags::p::package.C#``(+2).*/(t/*<-flags::p::package.C#``(+2).(t)*/: T/*->flags::p::package.C#[T]*/) = this(t/*->flags::p::package.C#``(+2).(t)*/, ???/*->scala::Predef.`???`().*/, ???/*->scala::Predef.`???`().*/) def w/*<-flags::p::package.C#w().*/: Int/*->scala::Int#*/ } type T1/*<-flags::p::package.T1#*/ = Int/*->scala::Int#*/ @@ -21,7 +21,7 @@ package object p { sealed trait Z/*<-flags::p::package.Z#*/ class AA/*<-flags::p::package.AA#*/(x/*<-flags::p::package.AA#x.*/: Int/*->scala::Int#*/, val y/*<-flags::p::package.AA#y.*/: Int/*->scala::Int#*/, var z/*<-flags::p::package.AA#z().*/: Int/*->scala::Int#*/) class S/*<-flags::p::package.S#*/[@specialized/*->scala::specialized#*/ T/*<-flags::p::package.S#[T]*/] - val List/*->scala::package.List.*//*->scala::collection::SeqFactory#unapplySeq().*/(xs1/*<-flags::p::package.xs1.*/) = ???/*->scala::Predef.`???`().*/ - ???/*->scala::Predef.`???`().*/ match { case List/*->scala::package.List.*//*->scala::collection::SeqFactory#unapplySeq().*/(xs2/*<-local0*/) => ???/*->scala::Predef.`???`().*/ } + val List/*->scala::package.List.*/(xs1/*<-flags::p::package.xs1.*/) = ???/*->scala::Predef.`???`().*/ + ???/*->scala::Predef.`???`().*/ match { case List/*->scala::package.List.*/(xs2/*<-local0*/) => ???/*->scala::Predef.`???`().*/ } ???/*->scala::Predef.`???`().*/ match { case _: List/*->scala::package.List#*/[t/*<-local1*/] => ???/*->scala::Predef.`???`().*/ } } diff --git a/tests/semanticdb/expect/semanticdb-Types.expect.scala b/tests/semanticdb/expect/semanticdb-Types.expect.scala index 122579d42a9e..b39d7aad59af 100644 --- a/tests/semanticdb/expect/semanticdb-Types.expect.scala +++ b/tests/semanticdb/expect/semanticdb-Types.expect.scala @@ -62,9 +62,9 @@ object Test/*<-types::Test.*/ { val compoundType1/*<-types::Test.C#compoundType1.*/: { def k/*<-local0*/: Int/*->scala::Int#*/ } = ???/*->scala::Predef.`???`().*/ val compoundType2/*<-types::Test.C#compoundType2.*/: M/*->types::Test.M#*/ with N/*->types::Test.N#*/ = ???/*->scala::Predef.`???`().*/ val compoundType3/*<-types::Test.C#compoundType3.*/: M/*->types::Test.M#*/ with N/*->types::Test.N#*/ { def k/*<-local1*/: Int/*->scala::Int#*/ } = ???/*->scala::Predef.`???`().*/ - val compoundType4/*<-types::Test.C#compoundType4.*/ = new { def k/*<-local3*/: Int/*->scala::Int#*/ = ???/*->scala::Predef.`???`().*/ } + val compoundType4/*<-types::Test.C#compoundType4.*/ = new { def k/*<-local2*/: Int/*->scala::Int#*/ = ???/*->scala::Predef.`???`().*/ } val compoundType5/*<-types::Test.C#compoundType5.*/ = new M/*->types::Test.M#*/ with N/*->types::Test.N#*/ - val compoundType6/*<-types::Test.C#compoundType6.*/ = new M/*->types::Test.M#*/ with N/*->types::Test.N#*/ { def k/*<-local6*/: Int/*->scala::Int#*/ = ???/*->scala::Predef.`???`().*/ } + val compoundType6/*<-types::Test.C#compoundType6.*/ = new M/*->types::Test.M#*/ with N/*->types::Test.N#*/ { def k/*<-local7*/: Int/*->scala::Int#*/ = ???/*->scala::Predef.`???`().*/ } val annType1/*<-types::Test.C#annType1.*/: T/*->types::T#*/ @ann(42) = ???/*->scala::Predef.`???`().*/ val annType2/*<-types::Test.C#annType2.*/: T/*->types::T#*/ @ann1/*->types::ann1#*/ @ann2/*->types::ann2#*/ = ???/*->scala::Predef.`???`().*/ @@ -74,7 +74,7 @@ object Test/*<-types::Test.*/ { val existentialType4/*<-types::Test.C#existentialType4.*/ = Class/*->java::lang::Class#*/.forName/*->java::lang::Class#forName().*/("foo.Bar") def typeLambda1/*<-types::Test.C#typeLambda1().*/[M/*<-types::Test.C#typeLambda1().[M]*/[_]] = ???/*->scala::Predef.`???`().*/ - typeLambda1/*->types::Test.C#typeLambda1().*/[({ type L/*<-local7*/[T/*<-local8*/] = List/*->scala::package.List#*/[T/*->local8*/] })#L] + typeLambda1/*->types::Test.C#typeLambda1().*/[({ type L/*<-local11*/[T/*<-local10*/] = List/*->scala::package.List#*/[T/*->local10*/] })#L] object ClassInfoType1/*<-types::Test.C#ClassInfoType1.*/ class ClassInfoType2/*<-types::Test.C#ClassInfoType2#*/ extends B/*->types::B#*/ { def x/*<-types::Test.C#ClassInfoType2#x().*/ = 42 } diff --git a/tests/semanticdb/expect/semanticdb-extract.expect.scala b/tests/semanticdb/expect/semanticdb-extract.expect.scala index 02bd562a718b..a601b65218a9 100644 --- a/tests/semanticdb/expect/semanticdb-extract.expect.scala +++ b/tests/semanticdb/expect/semanticdb-extract.expect.scala @@ -9,7 +9,7 @@ object AnObject/*<-_empty_::AnObject.*/ { "".substring/*->java::lang::String#substring().*/(1) "".substring/*->java::lang::String#substring(+1).*/(1, 2) - List/*->scala::package.List.*//*->scala::collection::IterableFactory#apply().*/(1, 2) + List/*->scala::package.List.*/(1, 2) List/*->scala::package.List.*/.apply/*->scala::collection::IterableFactory#apply().*/() List/*->scala::package.List.*/.`apply`/*->scala::collection::IterableFactory#apply().*/() println/*->scala::Predef.println(+1).*/(1 +/*->scala::Int#`+`(+4).*/ 2) diff --git a/tests/semanticdb/expect/toplevel.expect.scala b/tests/semanticdb/expect/toplevel.expect.scala index a856d673e68c..65160afd7ee6 100644 --- a/tests/semanticdb/expect/toplevel.expect.scala +++ b/tests/semanticdb/expect/toplevel.expect.scala @@ -1,6 +1,8 @@ -/*<-_empty_::toplevel$package.*/inline val a/*<-_empty_::toplevel$package.a.*/ = "" +inline val a/*<-_empty_::toplevel$package.a.*/ = "" extension (x/*<-_empty_::toplevel$package.combine().(x)*/: Int/*->scala::Int#*/) def combine/*<-_empty_::toplevel$package.combine().*/ (y/*<-_empty_::toplevel$package.combine().(y)*/: Int/*->scala::Int#*/) = x/*->_empty_::toplevel$package.combine().(x)*/ +/*->scala::Int#`+`(+4).*/ y/*->_empty_::toplevel$package.combine().(y)*/ def combine/*<-_empty_::toplevel$package.combine(+1).*/(x/*<-_empty_::toplevel$package.combine(+1).(x)*/: Int/*->scala::Int#*/, y/*<-_empty_::toplevel$package.combine(+1).(y)*/: Int/*->scala::Int#*/, z/*<-_empty_::toplevel$package.combine(+1).(z)*/: Int/*->scala::Int#*/) = x/*->_empty_::toplevel$package.combine(+1).(x)*/ +/*->scala::Int#`+`(+4).*/ y/*->_empty_::toplevel$package.combine(+1).(y)*/ +/*->scala::Int#`+`(+4).*/ z/*->_empty_::toplevel$package.combine(+1).(z)*/ def combine/*<-_empty_::toplevel$package.combine(+2).*/ = 0 def foo/*<-_empty_::toplevel$package.foo().*/ = "foo" -/*<-_empty_::MyProgram#*//*->_empty_::toplevel$package.MyProgram().*//*->scala::util::CommandLineParser.parseArgument().*//*->_empty_::MyProgram#main().(args)*//*->scala::util::CommandLineParser.FromString.given_FromString_Int.*//*->scala::util::CommandLineParser.showError().*//*->local0*/@main/*->scala::main#*/ def MyProgram/*<-_empty_::toplevel$package.MyProgram().*/(times/*<-_empty_::toplevel$package.MyProgram().(times)*/: Int/*->scala::Int#*/): Unit/*->scala::Unit#*/ = (/*->scala::LowPriorityImplicits#intWrapper().*/1 to/*->scala::runtime::RichInt#to().*/ times/*->_empty_::toplevel$package.MyProgram().(times)*/) foreach/*->scala::collection::immutable::Range#foreach().*/ (_ => println/*->scala::Predef.println(+1).*/("hello")) +@main/*->scala::main#*/ def MyProgram/*<-_empty_::toplevel$package.MyProgram().*/(times/*<-_empty_::toplevel$package.MyProgram().(times)*/: Int/*->scala::Int#*/): Unit/*->scala::Unit#*/ = (1 to/*->scala::runtime::RichInt#to().*/ times/*->_empty_::toplevel$package.MyProgram().(times)*/) foreach/*->scala::collection::immutable::Range#foreach().*/ (_ => println/*->scala::Predef.println(+1).*/("hello")) +@main/*->scala::main#*/ def readInts/*<-_empty_::toplevel$package.readInts().*/(ints/*<-_empty_::toplevel$package.readInts().(ints)*/: Int/*->scala::Int#*/*): Unit/*->scala::Unit#*/ = println/*->scala::Predef.println(+1).*/(ints/*->_empty_::toplevel$package.readInts().(ints)*/.mkString/*->scala::collection::IterableOnceOps#mkString(+1).*/(",")) +def fooRef/*<-_empty_::toplevel$package.fooRef().*/ = toplevel$package/*->_empty_::toplevel$package.*/.foo/*->_empty_::toplevel$package.foo().*/ diff --git a/tests/semanticdb/expect/toplevel.scala b/tests/semanticdb/expect/toplevel.scala index 5dae4ccadd5e..c4340efcf212 100644 --- a/tests/semanticdb/expect/toplevel.scala +++ b/tests/semanticdb/expect/toplevel.scala @@ -4,3 +4,5 @@ def combine(x: Int, y: Int, z: Int) = x + y + z def combine = 0 def foo = "foo" @main def MyProgram(times: Int): Unit = (1 to times) foreach (_ => println("hello")) +@main def readInts(ints: Int*): Unit = println(ints.mkString(",")) +def fooRef = toplevel$package.foo diff --git a/tests/semanticdb/metac.expect b/tests/semanticdb/metac.expect index 5c741a5acf74..732e9b07aa85 100644 --- a/tests/semanticdb/metac.expect +++ b/tests/semanticdb/metac.expect @@ -7,23 +7,22 @@ Uri => Access.scala Text => empty Language => Scala Symbols => 9 entries -Occurrences => 19 entries +Occurrences => 18 entries Symbols: -example/Access# => class Access -example/Access#``(). => primary ctor -example/Access#m1(). => method m1 -example/Access#m2(). => method m2 -example/Access#m3(). => method m3 -example/Access#m4(). => method m4 -example/Access#m5(). => method m5 -example/Access#m6(). => method m6 -example/Access#m7(). => method m7 +example/Access# => class Access extends Object { self: Access => +8 decls } +example/Access#``(). => primary ctor (): Access +example/Access#m1(). => private[this] method m1 => Nothing +example/Access#m2(). => private[this] method m2 => Nothing +example/Access#m3(). => private[example/Access#] method m3 => Nothing +example/Access#m4(). => protected method m4 => Nothing +example/Access#m5(). => protected[this] method m5 => Nothing +example/Access#m6(). => protected[example/] method m6 => Nothing +example/Access#m7(). => method m7 => Nothing Occurrences: [0:8..0:15): example <- example/ [2:6..2:12): Access <- example/Access# -[3:2..3:2): <- example/Access#``(). [3:14..3:16): m1 <- example/Access#m1(). [3:19..3:22): ??? -> scala/Predef.`???`(). [4:20..4:22): m2 <- example/Access#m2(). @@ -49,44 +48,59 @@ Schema => SemanticDB v4 Uri => Advanced.scala Text => empty Language => Scala -Symbols => 34 entries -Occurrences => 90 entries +Symbols => 48 entries +Occurrences => 105 entries +Synthetics => 3 entries Symbols: -advanced/C# => class C -advanced/C#[T] => typeparam T -advanced/C#``(). => primary ctor -advanced/C#t(). => method t -advanced/Structural# => class Structural -advanced/Structural#``(). => primary ctor -advanced/Structural#s1(). => method s1 -advanced/Structural#s2(). => method s2 -advanced/Structural#s3(). => method s3 -advanced/Test. => final object Test -advanced/Test.e. => val method e -advanced/Test.e1. => val method e1 -advanced/Test.e1x. => val method e1x -advanced/Test.s. => val method s -advanced/Test.s1. => val method s1 -advanced/Test.s1x. => val method s1x -advanced/Test.s2. => val method s2 -advanced/Test.s2x. => val method s2x -advanced/Test.s3. => val method s3 -advanced/Test.s3x. => val method s3x -advanced/Wildcards# => class Wildcards -advanced/Wildcards#``(). => primary ctor -advanced/Wildcards#e1(). => method e1 -local0 => abstract val method x -local1 => abstract val method x -local2 => final class $anon -local3 => val method x -local4 => abstract method m -local5 => param x -local6 => final class $anon -local7 => method m -local8 => param x -local9 => val local e3 -local10 => val local e3x +advanced/C# => class C [typeparam T ] extends Object { self: C[T] => +3 decls } +advanced/C#[T] => typeparam T +advanced/C#``(). => primary ctor [typeparam T ](): C[T] +advanced/C#t(). => method t => T +advanced/HKClass# => class HKClass [typeparam F [typeparam T ] <: ] extends Object { self: HKClass[F] => +3 decls } +advanced/HKClass#[F] => typeparam F [typeparam T ] <: +advanced/HKClass#[F][T] => typeparam T +advanced/HKClass#``(). => primary ctor [typeparam F [typeparam T ] <: ](): HKClass[F] +advanced/HKClass#``().[F][T] => typeparam T +advanced/HKClass#``().[F][U] => typeparam U +advanced/HKClass#foo(). => method foo [typeparam T , typeparam U ](param x: F[T, U]): String +advanced/HKClass#foo().(x) => param x: F[T, U] +advanced/HKClass#foo().[T] => typeparam T +advanced/HKClass#foo().[U] => typeparam U +advanced/Structural# => class Structural extends Object { self: Structural => +5 decls } +advanced/Structural#``(). => primary ctor (): Structural +advanced/Structural#s1(). => method s1 => Object { abstract val method x Int } +advanced/Structural#s2(). => method s2 => Object { abstract val method x Int } +advanced/Structural#s3(). => method s3 => Object { abstract method m (param x: Int): Int } +advanced/Structural#s4(). => method s4 (param a: Int): Object { abstract val method x Int } +advanced/Structural#s4().(a) => param a: Int +advanced/Test. => final object Test extends Object { self: Test.type => +11 decls } +advanced/Test.e. => val method e Wildcards +advanced/Test.e1. => val method e1 List[_] forSome { type _ } +advanced/Test.e1x. => val method e1x Any +advanced/Test.s. => val method s Structural +advanced/Test.s1. => val method s1 Object { abstract val method x Int } +advanced/Test.s1x. => val method s1x Int +advanced/Test.s2. => val method s2 Object { abstract val method x Int } +advanced/Test.s2x. => val method s2x Int +advanced/Test.s3. => val method s3 Object { abstract method m (param x: Int): Int } +advanced/Test.s3x. => val method s3x Int +advanced/Wildcards# => class Wildcards extends Object { self: Wildcards => +3 decls } +advanced/Wildcards#``(). => primary ctor (): Wildcards +advanced/Wildcards#e1(). => method e1 => List[_] forSome { type _ } +advanced/Wildcards#e2(). => method e2 => List[_] forSome { type _ <: Int } +local0 => abstract val method x Int +local1 => abstract val method x Int +local2 => val method x Int +local3 => final class $anon extends Object { self: $anon => +2 decls } +local5 => param x: Int +local6 => abstract method m (param x: Int): Int +local7 => param x: Int +local8 => method m (param x: Int): Int +local9 => final class $anon extends Object { self: $anon => +2 decls } +local11 => abstract val method x Int +local12 => val local e3: List[local13] +local14 => val local e3x: local13 Occurrences: [0:8..0:16): advanced <- advanced/ @@ -101,13 +115,11 @@ Occurrences: [5:21..5:31): Selectable -> scala/reflect/Selectable. [5:32..5:52): reflectiveSelectable -> scala/reflect/Selectable.reflectiveSelectable(). [7:6..7:7): C <- advanced/C# -[7:7..7:7): <- advanced/C#``(). [7:8..7:9): T <- advanced/C#[T] [8:6..8:7): t <- advanced/C#t(). [8:9..8:10): T -> advanced/C#[T] [8:13..8:16): ??? -> scala/Predef.`???`(). [11:6..11:16): Structural <- advanced/Structural# -[12:2..12:2): <- advanced/Structural#``(). [12:6..12:8): s1 <- advanced/Structural#s1(). [12:16..12:17): x <- local0 [12:19..12:22): Int -> scala/Int# @@ -115,70 +127,92 @@ Occurrences: [13:6..13:8): s2 <- advanced/Structural#s2(). [13:16..13:17): x <- local1 [13:19..13:22): Int -> scala/Int# -[13:37..13:38): x <- local3 +[13:37..13:38): x <- local2 [13:40..13:43): Int -> scala/Int# [13:46..13:49): ??? -> scala/Predef.`???`(). [14:6..14:8): s3 <- advanced/Structural#s3(). -[14:16..14:17): m <- local4 +[14:16..14:17): m <- local6 [14:18..14:19): x <- local5 [14:21..14:24): Int -> scala/Int# [14:27..14:30): Int -> scala/Int# -[14:45..14:46): m <- local7 -[14:47..14:48): x <- local8 +[14:45..14:46): m <- local8 +[14:47..14:48): x <- local7 [14:50..14:53): Int -> scala/Int# [14:56..14:59): Int -> scala/Int# [14:62..14:65): ??? -> scala/Predef.`???`(). -[17:6..17:15): Wildcards <- advanced/Wildcards# -[18:2..18:2): <- advanced/Wildcards#``(). -[18:6..18:8): e1 <- advanced/Wildcards#e1(). -[18:10..18:14): List -> scala/package.List# -[18:20..18:23): ??? -> scala/Predef.`???`(). -[21:7..21:11): Test <- advanced/Test. -[22:6..22:7): s <- advanced/Test.s. -[22:14..22:24): Structural -> advanced/Structural# -[22:24..22:24): -> advanced/Structural#``(). -[23:6..23:8): s1 <- advanced/Test.s1. -[23:11..23:12): s -> advanced/Test.s. -[23:13..23:15): s1 -> advanced/Structural#s1(). -[24:6..24:9): s1x <- advanced/Test.s1x. -[24:12..24:12): -> scala/reflect/Selectable.reflectiveSelectable(). -[24:12..24:13): s -> advanced/Test.s. -[24:14..24:16): s1 -> advanced/Structural#s1(). -[24:16..24:16): -> scala/reflect/Selectable#selectDynamic(). -[25:6..25:8): s2 <- advanced/Test.s2. +[15:6..15:8): s4 <- advanced/Structural#s4(). +[15:9..15:10): a <- advanced/Structural#s4().(a) +[15:12..15:15): Int -> scala/Int# +[15:24..15:25): x <- local11 +[15:27..15:30): Int -> scala/Int# +[15:35..15:38): ??? -> scala/Predef.`???`(). +[18:6..18:15): Wildcards <- advanced/Wildcards# +[19:6..19:8): e1 <- advanced/Wildcards#e1(). +[19:10..19:14): List -> scala/package.List# +[19:20..19:23): ??? -> scala/Predef.`???`(). +[20:6..20:8): e2 <- advanced/Wildcards#e2(). +[20:10..20:14): List -> scala/package.List# +[20:20..20:23): Int -> scala/Int# +[20:27..20:30): ??? -> scala/Predef.`???`(). +[23:7..23:11): Test <- advanced/Test. +[24:6..24:7): s <- advanced/Test.s. +[24:14..24:24): Structural -> advanced/Structural# +[25:6..25:8): s1 <- advanced/Test.s1. [25:11..25:12): s -> advanced/Test.s. -[25:13..25:15): s2 -> advanced/Structural#s2(). -[26:6..26:9): s2x <- advanced/Test.s2x. -[26:12..26:12): -> scala/reflect/Selectable.reflectiveSelectable(). +[25:13..25:15): s1 -> advanced/Structural#s1(). +[26:6..26:9): s1x <- advanced/Test.s1x. [26:12..26:13): s -> advanced/Test.s. -[26:14..26:16): s2 -> advanced/Structural#s2(). -[26:16..26:16): -> scala/reflect/Selectable#selectDynamic(). -[27:6..27:8): s3 <- advanced/Test.s3. +[26:14..26:16): s1 -> advanced/Structural#s1(). +[27:6..27:8): s2 <- advanced/Test.s2. [27:11..27:12): s -> advanced/Test.s. -[27:13..27:15): s3 -> advanced/Structural#s3(). -[28:6..28:9): s3x <- advanced/Test.s3x. -[28:12..28:12): -> scala/reflect/Selectable.reflectiveSelectable(). +[27:13..27:15): s2 -> advanced/Structural#s2(). +[28:6..28:9): s2x <- advanced/Test.s2x. [28:12..28:13): s -> advanced/Test.s. -[28:14..28:16): s3 -> advanced/Structural#s3(). -[28:16..28:16): -> scala/reflect/Selectable#applyDynamic(). -[28:19..28:22): ??? -> scala/Predef.`???`(). -[30:6..30:7): e <- advanced/Test.e. -[30:14..30:23): Wildcards -> advanced/Wildcards# -[30:23..30:23): -> advanced/Wildcards#``(). -[31:6..31:8): e1 <- advanced/Test.e1. -[31:11..31:12): e -> advanced/Test.e. -[31:13..31:15): e1 -> advanced/Wildcards#e1(). -[32:6..32:9): e1x <- advanced/Test.e1x. -[32:12..32:13): e -> advanced/Test.e. -[32:14..32:16): e1 -> advanced/Wildcards#e1(). -[32:17..32:21): head -> scala/collection/IterableOps#head(). -[35:5..35:8): ??? -> scala/Predef.`???`(). -[35:11..35:14): Any -> scala/Any# -[36:11..36:13): e3 <- local9 -[36:15..36:19): List -> scala/package.List# -[37:12..37:15): e3x <- local10 -[37:18..37:20): e3 -> local9 -[37:21..37:25): head -> scala/collection/IterableOps#head(). +[28:14..28:16): s2 -> advanced/Structural#s2(). +[29:6..29:8): s3 <- advanced/Test.s3. +[29:11..29:12): s -> advanced/Test.s. +[29:13..29:15): s3 -> advanced/Structural#s3(). +[30:6..30:9): s3x <- advanced/Test.s3x. +[30:12..30:13): s -> advanced/Test.s. +[30:14..30:16): s3 -> advanced/Structural#s3(). +[30:19..30:22): ??? -> scala/Predef.`???`(). +[32:6..32:7): e <- advanced/Test.e. +[32:14..32:23): Wildcards -> advanced/Wildcards# +[33:6..33:8): e1 <- advanced/Test.e1. +[33:11..33:12): e -> advanced/Test.e. +[33:13..33:15): e1 -> advanced/Wildcards#e1(). +[34:6..34:9): e1x <- advanced/Test.e1x. +[34:12..34:13): e -> advanced/Test.e. +[34:14..34:16): e1 -> advanced/Wildcards#e1(). +[34:17..34:21): head -> scala/collection/IterableOps#head(). +[37:5..37:8): ??? -> scala/Predef.`???`(). +[37:11..37:14): Any -> scala/Any# +[38:11..38:13): e3 <- local12 +[38:15..38:19): List -> scala/package.List# +[39:12..39:15): e3x <- local14 +[39:18..39:20): e3 -> local12 +[39:21..39:25): head -> scala/collection/IterableOps#head(). +[47:6..47:13): HKClass <- advanced/HKClass# +[47:14..47:15): F <- advanced/HKClass#[F] +[47:20..47:21): T <- advanced/HKClass#``().[F][T] +[47:28..47:29): U <- advanced/HKClass#``().[F][U] +[47:36..47:37): U -> advanced/HKClass#``().[F][U] +[47:39..47:40): T -> advanced/HKClass#``().[F][T] +[48:6..48:9): foo <- advanced/HKClass#foo(). +[48:10..48:11): T <- advanced/HKClass#foo().[T] +[48:12..48:13): U <- advanced/HKClass#foo().[U] +[48:15..48:16): x <- advanced/HKClass#foo().(x) +[48:18..48:19): F -> advanced/HKClass#[F] +[48:20..48:21): T -> advanced/HKClass#foo().[T] +[48:23..48:24): U -> advanced/HKClass#foo().[U] +[48:28..48:34): String -> scala/Predef.String# +[48:37..48:38): x -> advanced/HKClass#foo().(x) +[48:39..48:47): toString -> scala/Tuple2#toString(). + +Synthetics: +[26:12..26:16):s.s1 => reflectiveSelectable(*) +[28:12..28:16):s.s2 => reflectiveSelectable(*) +[30:12..30:16):s.s3 => reflectiveSelectable(*) expect/Annotations.scala ------------------------ @@ -188,32 +222,34 @@ Schema => SemanticDB v4 Uri => Annotations.scala Text => empty Language => Scala -Symbols => 22 entries +Symbols => 23 entries Occurrences => 52 entries +Synthetics => 2 entries Symbols: -annot/Alias. => final object Alias -annot/Alias.A# => type A -annot/Annotations# => class Annotations -annot/Annotations#S# => type S -annot/Annotations#[T] => typeparam T -annot/Annotations#``(). => primary ctor -annot/Annotations#``().(x) => param x -annot/Annotations#field. => val method field -annot/Annotations#method(). => method method -annot/Annotations#x. => val method x -annot/B# => class B -annot/B#``(). => primary ctor -annot/B#``().(x) => param x -annot/B#``(+1). => ctor -annot/B#x. => val method x -annot/M. => final object M -annot/M.m(). => macro m -annot/M.m().[TT] => typeparam TT -annot/T# => trait T -annot/T#``(). => primary ctor -local0 => selfparam self -local1 => val local local +annot/Alias. => final object Alias extends Object { self: Alias.type => +2 decls } +annot/Alias.A# => type A = ClassAnnotation @param +annot/Annotations# => @ClassAnnotation class Annotations [@TypeParameterAnnotation typeparam T ] extends Object { self: AnyRef & Annotations[T] => +6 decls } +annot/Annotations#S# => @TypeAnnotation type S +annot/Annotations#[T] => @TypeParameterAnnotation typeparam T +annot/Annotations#``(). => primary ctor [@TypeParameterAnnotation typeparam T ](@ParameterAnnotation param x: T): Annotations[T] +annot/Annotations#``().(x) => @ParameterAnnotation param x: T +annot/Annotations#field. => @FieldAnnotation val method field Int +annot/Annotations#method(). => @MethodAnnotation method method => Int +annot/Annotations#x. => @ParameterAnnotation private[this] val method x T +annot/B# => class B extends Object { self: B => +4 decls } +annot/B#``(). => @ConstructorAnnotation primary ctor (param x: Int): B +annot/B#``().(x) => param x: Int +annot/B#``(+1). => @ConstructorAnnotation ctor (): B +annot/B#throwing(). => @throws[Exception] method throwing => Nothing +annot/B#x. => private[this] val method x Int +annot/M. => @ObjectAnnotation final object M extends Object { self: M.type => +1 decls } +annot/M.m(). => @MacroAnnotation macro m [typeparam TT ]: Int +annot/M.m().[TT] => typeparam TT +annot/T# => @TraitAnnotation trait T extends Object { self: T => +1 decls } +annot/T#``(). => primary ctor (): T +local0 => selfparam self: AnyRef +local1 => @LocalAnnotation val local local: Int Occurrences: [0:8..0:13): annot <- annot/ @@ -229,7 +265,6 @@ Occurrences: [4:35..4:41): macros -> scala/language.experimental.macros. [6:1..6:16): ClassAnnotation -> com/javacp/annot/ClassAnnotation# [7:6..7:17): Annotations <- annot/Annotations# -[7:17..7:17): <- annot/Annotations#``(). [7:19..7:42): TypeParameterAnnotation -> com/javacp/annot/TypeParameterAnnotation# [7:43..7:44): T <- annot/Annotations#[T] [7:47..7:66): ParameterAnnotation -> com/javacp/annot/ParameterAnnotation# @@ -247,27 +282,32 @@ Occurrences: [17:3..17:17): TypeAnnotation -> com/javacp/annot/TypeAnnotation# [18:7..18:8): S <- annot/Annotations#S# [21:6..21:7): B <- annot/B# -[21:7..21:7): <- annot/B#``(). [21:9..21:30): ConstructorAnnotation -> com/javacp/annot/ConstructorAnnotation# [21:33..21:34): x <- annot/B#x. [21:36..21:39): Int -> scala/Int# [22:3..22:24): ConstructorAnnotation -> com/javacp/annot/ConstructorAnnotation# -[23:6..23:12): <- annot/B#``(+1). -[23:20..23:20): -> annot/B#``(). -[26:1..26:17): ObjectAnnotation -> com/javacp/annot/ObjectAnnotation# -[27:7..27:8): M <- annot/M. -[28:3..28:18): MacroAnnotation -> com/javacp/annot/MacroAnnotation# -[29:6..29:7): m <- annot/M.m(). -[29:8..29:10): TT <- annot/M.m().[TT] -[29:13..29:16): Int -> scala/Int# -[29:25..29:28): ??? -> scala/Predef.`???`(). -[32:0..32:0): <- annot/T#``(). -[32:1..32:16): TraitAnnotation -> com/javacp/annot/TraitAnnotation# -[33:6..33:7): T <- annot/T# -[35:7..35:12): Alias <- annot/Alias. -[36:7..36:8): A <- annot/Alias.A# -[36:11..36:26): ClassAnnotation -> com/javacp/annot/ClassAnnotation# -[36:28..36:33): param -> scala/annotation/meta/param# +[23:6..23:10): <- annot/B#``(+1). +[25:3..25:9): throws -> scala/throws# +[25:10..25:19): Exception -> scala/package.Exception# +[26:6..26:14): throwing <- annot/B#throwing(). +[26:27..26:36): Exception -> scala/package.Exception# +[29:1..29:17): ObjectAnnotation -> com/javacp/annot/ObjectAnnotation# +[30:7..30:8): M <- annot/M. +[31:3..31:18): MacroAnnotation -> com/javacp/annot/MacroAnnotation# +[32:6..32:7): m <- annot/M.m(). +[32:8..32:10): TT <- annot/M.m().[TT] +[32:13..32:16): Int -> scala/Int# +[32:25..32:28): ??? -> scala/Predef.`???`(). +[35:1..35:16): TraitAnnotation -> com/javacp/annot/TraitAnnotation# +[36:6..36:7): T <- annot/T# +[38:7..38:12): Alias <- annot/Alias. +[39:7..39:8): A <- annot/Alias.A# +[39:11..39:26): ClassAnnotation -> com/javacp/annot/ClassAnnotation# +[39:28..39:33): param -> scala/annotation/meta/param# + +Synthetics: +[25:2..25:20):@throws[Exception] => *[Exception] +[25:2..25:20):@throws[Exception] => *[Exception] expect/Anonymous.scala ---------------------- @@ -277,23 +317,25 @@ Schema => SemanticDB v4 Uri => Anonymous.scala Text => empty Language => Scala -Symbols => 13 entries -Occurrences => 32 entries +Symbols => 14 entries +Occurrences => 30 entries +Synthetics => 2 entries Symbols: -example/Anonymous# => class Anonymous -example/Anonymous#Foo# => trait Foo -example/Anonymous#Foo#``(). => primary ctor -example/Anonymous#``(). => primary ctor -example/Anonymous#foo. => val method foo -example/Anonymous#locally(). => method locally -example/Anonymous#locally().(x) => param x -example/Anonymous#locally().[A] => typeparam A -example/Anonymous#m1(). => method m1 -example/Anonymous#m1().[T] => typeparam T -example/Anonymous#m2(). => method m2 -local0 => val local x -local1 => final class $anon +example/Anonymous# => class Anonymous extends Object { self: Anonymous & Anonymous => +6 decls } +example/Anonymous#Foo# => trait Foo extends Object { self: Foo => +1 decls } +example/Anonymous#Foo#``(). => primary ctor (): Foo +example/Anonymous#``(). => primary ctor (): Anonymous +example/Anonymous#foo. => val method foo Foo +example/Anonymous#locally(). => method locally [typeparam A ](param x: A): A +example/Anonymous#locally().(x) => param x: A +example/Anonymous#locally().[A] => typeparam A +example/Anonymous#m1(). => method m1 [typeparam T [type _ ]]: Nothing +example/Anonymous#m1().[T] => typeparam T [type _ ] +example/Anonymous#m1().[T][_] => type _ +example/Anonymous#m2(). => method m2 => Map[_, List[_] forSome { type _ }] forSome { type _ } +local0 => val local x: Function1[Int, Int] +local1 => final class $anon extends Object with Foo { self: $anon => +1 decls } Occurrences: [0:8..0:15): example <- example/ @@ -301,7 +343,6 @@ Occurrences: [1:13..1:21): language -> scala/language. [1:22..1:33): higherKinds -> scala/language.higherKinds. [3:6..3:15): Anonymous <- example/Anonymous# -[4:2..4:2): <- example/Anonymous#``(). [4:8..4:17): Anonymous -> example/Anonymous# [6:6..6:13): locally <- example/Anonymous#locally(). [6:14..6:15): A <- example/Anonymous#locally().[A] @@ -324,11 +365,14 @@ Occurrences: [14:11..14:14): Int -> scala/Int# [14:18..14:21): Int -> scala/Int# [14:29..14:32): ??? -> scala/Predef.`???`(). -[17:2..17:2): <- example/Anonymous#Foo#``(). [17:8..17:11): Foo <- example/Anonymous#Foo# [18:6..18:9): foo <- example/Anonymous#foo. [18:16..18:19): Foo -> example/Anonymous#Foo# +Synthetics: +[10:2..10:9):locally => *[Unit] +[13:2..13:9):locally => *[Unit] + expect/AnonymousGiven.scala --------------------------- @@ -338,20 +382,18 @@ Uri => AnonymousGiven.scala Text => empty Language => Scala Symbols => 5 entries -Occurrences => 6 entries +Occurrences => 4 entries Symbols: -angiven/AnonymousGiven$package. => final package object angiven -angiven/AnonymousGiven$package.bar(). => method bar -angiven/AnonymousGiven$package.bar().(x$1) => implicit param x$1 -angiven/Foo# => trait Foo -angiven/Foo#``(). => primary ctor +angiven/AnonymousGiven$package. => final package object angiven extends Object { self: angiven.type => +2 decls } +angiven/AnonymousGiven$package.bar(). => method bar (implicit given param x$1: Foo): Int +angiven/AnonymousGiven$package.bar().(x$1) => implicit given param x$1: Foo +angiven/Foo# => trait Foo extends Object { self: Foo => +1 decls } +angiven/Foo#``(). => primary ctor (): Foo Occurrences: [0:8..0:15): angiven <- angiven/ -[2:0..2:0): <- angiven/Foo#``(). [2:6..2:9): Foo <- angiven/Foo# -[4:0..4:0): <- angiven/AnonymousGiven$package. [4:4..4:7): bar <- angiven/AnonymousGiven$package.bar(). [4:14..4:17): Foo -> angiven/Foo# @@ -364,118 +406,119 @@ Uri => Classes.scala Text => empty Language => Scala Symbols => 109 entries -Occurrences => 130 entries +Occurrences => 113 entries +Synthetics => 2 entries Symbols: -classes/C1# => final class C1 -classes/C1#``(). => primary ctor -classes/C1#``().(x1) => val param x1 -classes/C1#x1. => val method x1 -classes/C1. => final object C1 -classes/C2# => final class C2 -classes/C2#``(). => primary ctor -classes/C2#``().(x2) => val param x2 -classes/C2#x2. => val method x2 -classes/C2. => final object C2 -classes/C3# => case class C3 -classes/C3#_1(). => method _1 -classes/C3#``(). => primary ctor -classes/C3#``().(x) => val param x -classes/C3#copy$default$1(). => method copy$default$1 -classes/C3#copy(). => method copy -classes/C3#copy().(x) => param x -classes/C3#x. => val method x -classes/C3. => final object C3 -classes/C3.apply(). => method apply -classes/C3.apply().(x) => param x -classes/C3.toString(). => method toString -classes/C3.unapply(). => method unapply -classes/C3.unapply().(x$1) => param x$1 -classes/C4# => case class C4 -classes/C4#_1(). => method _1 -classes/C4#``(). => primary ctor -classes/C4#``().(x) => val param x -classes/C4#copy$default$1(). => method copy$default$1 -classes/C4#copy(). => method copy -classes/C4#copy().(x) => param x -classes/C4#x. => val method x -classes/C4. => final object C4 -classes/C4.apply(). => method apply -classes/C4.apply().(x) => param x -classes/C4.toString(). => method toString -classes/C4.unapply(). => method unapply -classes/C4.unapply().(x$1) => param x$1 -classes/C6# => case class C6 -classes/C6#_1(). => method _1 -classes/C6#``(). => primary ctor -classes/C6#``().(x) => param x -classes/C6#copy$default$1(). => method copy$default$1 -classes/C6#copy(). => method copy -classes/C6#copy().(x) => param x -classes/C6#x. => val method x -classes/C6. => final object C6 -classes/C6.apply(). => method apply -classes/C6.apply().(x) => param x -classes/C6.toString(). => method toString -classes/C6.unapply(). => method unapply -classes/C6.unapply().(x$1) => param x$1 -classes/C7# => class C7 -classes/C7#``(). => primary ctor -classes/C7#``().(x) => param x -classes/C7#x. => val method x -classes/C8# => class C8 -classes/C8#``(). => primary ctor -classes/C8#``().(x) => param x -classes/C8#x. => val method x -classes/C9# => class C9 -classes/C9#``(). => primary ctor -classes/C9#``().(x) => param x -classes/C9#x(). => var method x -classes/C10# => class C10 -classes/C10#``(). => primary ctor -classes/C10#``().(s) => param s -classes/C10#s. => val method s -classes/C11# => class C11 -classes/C11#``(). => primary ctor -classes/C11#foo(). => macro foo -classes/C11#foo(). => macro foo -classes/C12# => class C12 -classes/C12#Context# => class Context -classes/C12#Context#Expr# => type Expr -classes/C12#Context#Expr#[T] => typeparam T -classes/C12#Context#``(). => primary ctor -classes/C12#``(). => primary ctor -classes/C12#foo1(). => macro foo1 -classes/C12#foo1(). => method foo1 -classes/C12#foo1().(x) => param x -classes/C12#foo1().(x) => param x -classes/C12#foo1Impl(). => method foo1Impl -classes/C12#foo1Impl().(context) => param context -classes/C12#foo1Impl().(x) => param x -classes/C12#foo2(). => macro foo2 -classes/C12#foo2(). => method foo2 -classes/C12#foo2().(x) => param x -classes/C12#foo2().(x) => param x -classes/C12#foo2().(y) => param y -classes/C12#foo2().(y) => param y -classes/C12#foo2Impl(). => method foo2Impl -classes/C12#foo2Impl().(context) => param context -classes/C12#foo2Impl().(x) => param x -classes/C12#foo2Impl().(y) => param y -classes/M. => final object M -classes/M.C5# => class C5 -classes/M.C5#``(). => primary ctor -classes/M.C5#``().(x) => param x -classes/M.C5#x. => val method x -classes/M.C5(). => final implicit method C5 -classes/M.C5().(x) => param x -classes/N. => final object N -classes/N.anonClass. => val method anonClass -classes/N.anonFun. => val method anonFun -local0 => final class $anon -local1 => val method local -local2 => param i -local3 => val local local +classes/C1# => final class C1 extends AnyVal { self: C1 => +2 decls } +classes/C1#``(). => primary ctor (val param x1: Int): C1 +classes/C1#``().(x1) => val param x1: Int +classes/C1#x1. => val method x1 Int +classes/C1. => final object C1 extends Object { self: C1.type => +2 decls } +classes/C2# => final class C2 extends AnyVal { self: C2 => +2 decls } +classes/C2#``(). => primary ctor (val param x2: Int): C2 +classes/C2#``().(x2) => val param x2: Int +classes/C2#x2. => val method x2 Int +classes/C2. => final object C2 extends Object { self: C2.type => +2 decls } +classes/C3# => case class C3 extends Object with Product with Serializable { self: C3 => +5 decls } +classes/C3#_1(). => method _1 => Int +classes/C3#``(). => primary ctor (val param x: Int): C3 +classes/C3#``().(x) => val param x: Int +classes/C3#copy$default$1(). => method copy$default$1 => Int @uncheckedVariance +classes/C3#copy(). => method copy (param x: Int): C3 +classes/C3#copy().(x) => param x: Int +classes/C3#x. => val method x Int +classes/C3. => final object C3 extends Object { self: C3.type => +4 decls } +classes/C3.apply(). => method apply (param x: Int): C3 +classes/C3.apply().(x) => param x: Int +classes/C3.toString(). => method toString => String <: scala/Any#toString(). +classes/C3.unapply(). => method unapply (param x$1: C3): C3 +classes/C3.unapply().(x$1) => param x$1: C3 +classes/C4# => case class C4 extends Object with Product with Serializable { self: C4 => +5 decls } +classes/C4#_1(). => method _1 => Int +classes/C4#``(). => primary ctor (val param x: Int): C4 +classes/C4#``().(x) => val param x: Int +classes/C4#copy$default$1(). => method copy$default$1 => Int @uncheckedVariance +classes/C4#copy(). => method copy (param x: Int): C4 +classes/C4#copy().(x) => param x: Int +classes/C4#x. => val method x Int +classes/C4. => final object C4 extends Object { self: C4.type => +4 decls } +classes/C4.apply(). => method apply (param x: Int): C4 +classes/C4.apply().(x) => param x: Int +classes/C4.toString(). => method toString => String <: scala/Any#toString(). +classes/C4.unapply(). => method unapply (param x$1: C4): C4 +classes/C4.unapply().(x$1) => param x$1: C4 +classes/C6# => case class C6 extends Object with Product with Serializable { self: C6 => +5 decls } +classes/C6#_1(). => method _1 => Int +classes/C6#``(). => primary ctor (param x: Int): C6 +classes/C6#``().(x) => param x: Int +classes/C6#copy$default$1(). => method copy$default$1 => Int @uncheckedVariance +classes/C6#copy(). => method copy (param x: Int): C6 +classes/C6#copy().(x) => param x: Int +classes/C6#x. => private val method x Int +classes/C6. => final object C6 extends Object { self: C6.type => +4 decls } +classes/C6.apply(). => method apply (param x: Int): C6 +classes/C6.apply().(x) => param x: Int +classes/C6.toString(). => method toString => String <: scala/Any#toString(). +classes/C6.unapply(). => method unapply (param x$1: C6): C6 +classes/C6.unapply().(x$1) => param x$1: C6 +classes/C7# => class C7 extends Object { self: C7 => +2 decls } +classes/C7#``(). => primary ctor (param x: Int): C7 +classes/C7#``().(x) => param x: Int +classes/C7#x. => private[this] val method x Int +classes/C8# => class C8 extends Object { self: C8 => +2 decls } +classes/C8#``(). => primary ctor (param x: Int): C8 +classes/C8#``().(x) => param x: Int +classes/C8#x. => private[this] val method x Int +classes/C9# => class C9 extends Object { self: C9 => +2 decls } +classes/C9#``(). => primary ctor (param x: Int): C9 +classes/C9#``().(x) => param x: Int +classes/C9#x(). => private[this] var method x Int +classes/C10# => class C10 extends Object { self: C10 => +2 decls } +classes/C10#``(). => primary ctor (param s: => String): C10 +classes/C10#``().(s) => param s: => String +classes/C10#s. => private[this] val method s => String +classes/C11# => class C11 extends Object { self: C11 => +2 decls } +classes/C11#``(). => primary ctor (): C11 +classes/C11#foo(). => macro foo => Int +classes/C11#foo(). => inline macro foo => Int +classes/C12# => class C12 extends Object { self: C12 => +8 decls } +classes/C12#Context# => class Context extends Object { self: Context => +2 decls } +classes/C12#Context#Expr# => type Expr [typeparam T ] +classes/C12#Context#Expr#[T] => typeparam T +classes/C12#Context#``(). => primary ctor (): Context +classes/C12#``(). => primary ctor (): C12 +classes/C12#foo1(). => macro foo1 (param x: Int): Int +classes/C12#foo1(). => method foo1 (param x: Int): Int +classes/C12#foo1().(x) => param x: Int +classes/C12#foo1().(x) => param x: Int +classes/C12#foo1Impl(). => method foo1Impl (param context: Context)(param x: context.Expr[Int]): context.Expr[Int] +classes/C12#foo1Impl().(context) => param context: Context +classes/C12#foo1Impl().(x) => param x: context.Expr[Int] +classes/C12#foo2(). => macro foo2 (param x: Int, param y: String): Int +classes/C12#foo2(). => method foo2 (param x: Int, param y: String): Int +classes/C12#foo2().(x) => param x: Int +classes/C12#foo2().(x) => param x: Int +classes/C12#foo2().(y) => param y: String +classes/C12#foo2().(y) => param y: String +classes/C12#foo2Impl(). => method foo2Impl (param context: Context)(param x: context.Expr[Int], param y: context.Expr[String]): context.Expr[Int] +classes/C12#foo2Impl().(context) => param context: Context +classes/C12#foo2Impl().(x) => param x: context.Expr[Int] +classes/C12#foo2Impl().(y) => param y: context.Expr[String] +classes/M. => final object M extends Object { self: M.type => +3 decls } +classes/M.C5# => implicit class C5 extends Object { self: C5 => +2 decls } +classes/M.C5#``(). => primary ctor (param x: Int): C5 +classes/M.C5#``().(x) => param x: Int +classes/M.C5#x. => private[this] val method x Int +classes/M.C5(). => final implicit method C5 (param x: Int): C5 +classes/M.C5().(x) => param x: Int +classes/N. => final object N extends Object { self: N.type => +3 decls } +classes/N.anonClass. => val method anonClass C7 +classes/N.anonFun. => val method anonFun List[Int] +local0 => val method local Nothing +local1 => final class $anon extends C7 { self: $anon => +2 decls } +local3 => param i: Int +local4 => val local local: Int Occurrences: [0:8..0:15): classes <- classes/ @@ -484,54 +527,41 @@ Occurrences: [1:22..1:34): experimental -> scala/language.experimental. [1:35..1:41): macros -> scala/language.experimental.macros. [2:6..2:8): C1 <- classes/C1# -[2:8..2:8): <- classes/C1#``(). [2:13..2:15): x1 <- classes/C1#x1. [2:17..2:20): Int -> scala/Int# [2:30..2:36): AnyVal -> scala/AnyVal# -[2:36..2:36): -> scala/AnyVal#``(). [4:6..4:8): C2 <- classes/C2# -[4:8..4:8): <- classes/C2#``(). [4:13..4:15): x2 <- classes/C2#x2. [4:17..4:20): Int -> scala/Int# [4:30..4:36): AnyVal -> scala/AnyVal# -[4:36..4:36): -> scala/AnyVal#``(). [5:7..5:9): C2 <- classes/C2. [7:11..7:13): C3 <- classes/C3# -[7:13..7:13): <- classes/C3#``(). [7:14..7:15): x <- classes/C3#x. [7:17..7:20): Int -> scala/Int# [9:11..9:13): C4 <- classes/C4# -[9:13..9:13): <- classes/C4#``(). [9:14..9:15): x <- classes/C4#x. [9:17..9:20): Int -> scala/Int# [10:7..10:9): C4 <- classes/C4. [12:7..12:8): M <- classes/M. [13:17..13:19): C5 <- classes/M.C5# -[13:19..13:19): <- classes/M.C5#``(). [13:20..13:21): x <- classes/M.C5#x. [13:23..13:26): Int -> scala/Int# [16:11..16:13): C6 <- classes/C6# -[16:13..16:13): <- classes/C6#``(). [16:26..16:27): x <- classes/C6#x. [16:29..16:32): Int -> scala/Int# [18:6..18:8): C7 <- classes/C7# -[18:8..18:8): <- classes/C7#``(). [18:9..18:10): x <- classes/C7#x. [18:12..18:15): Int -> scala/Int# [20:6..20:8): C8 <- classes/C8# -[20:8..20:8): <- classes/C8#``(). [20:27..20:28): x <- classes/C8#x. [20:30..20:33): Int -> scala/Int# [22:6..22:8): C9 <- classes/C9# -[22:8..22:8): <- classes/C9#``(). [22:27..22:28): x <- classes/C9#x(). [22:30..22:33): Int -> scala/Int# [24:6..24:9): C10 <- classes/C10# -[24:9..24:9): <- classes/C10#``(). [24:10..24:11): s <- classes/C10#s. [24:16..24:22): String -> scala/Predef.String# [26:6..26:9): C11 <- classes/C11# -[27:2..27:2): <- classes/C11#``(). [27:6..27:9): foo <- classes/C11#foo(). [27:11..27:14): Int -> scala/Int# [27:23..27:26): ??? -> scala/Predef.`???`(). @@ -539,9 +569,7 @@ Occurrences: [28:18..28:21): Int -> scala/Int# [28:24..28:27): ??? -> scala/Predef.`???`(). [31:6..31:9): C12 <- classes/C12# -[33:2..33:2): <- classes/C12#``(). [33:8..33:15): Context <- classes/C12#Context# -[34:4..34:4): <- classes/C12#Context#``(). [34:9..34:13): Expr <- classes/C12#Context#Expr# [34:14..34:15): T <- classes/C12#Context#Expr#[T] [36:6..36:10): foo1 <- classes/C12#foo1(). @@ -597,18 +625,20 @@ Occurrences: [47:7..47:8): N <- classes/N. [48:6..48:15): anonClass <- classes/N.anonClass. [48:22..48:24): C7 -> classes/C7# -[48:24..48:24): -> classes/C7#``(). -[49:8..49:13): local <- local1 +[49:8..49:13): local <- local0 [49:16..49:19): ??? -> scala/Predef.`???`(). [51:6..51:13): anonFun <- classes/N.anonFun. [51:16..51:20): List -> scala/package.List. -[51:20..51:20): -> scala/collection/IterableFactory#apply(). [51:24..51:27): map -> scala/collection/immutable/List#map(). -[51:30..51:31): i <- local2 -[52:8..52:13): local <- local3 -[53:4..53:9): local -> local3 +[51:30..51:31): i <- local3 +[52:8..52:13): local <- local4 +[53:4..53:9): local -> local4 [53:10..53:11): + -> scala/Int#`+`(+4). +Synthetics: +[51:16..51:27):List(1).map => *[Int] +[51:16..51:20):List => *.apply[Int] + expect/Empty.scala ------------------ @@ -618,24 +648,22 @@ Uri => Empty.scala Text => empty Language => Scala Symbols => 6 entries -Occurrences => 10 entries +Occurrences => 8 entries Symbols: -_empty_/A# => class A -_empty_/A#``(). => primary ctor -_empty_/A#b(). => method b -_empty_/B# => class B -_empty_/B#``(). => primary ctor -_empty_/B#a(). => method a +_empty_/A# => class A extends Object { self: A => +2 decls } +_empty_/A#``(). => primary ctor (): A +_empty_/A#b(). => method b => B +_empty_/B# => class B extends Object { self: B => +2 decls } +_empty_/B#``(). => primary ctor (): B +_empty_/B#a(). => method a => A Occurrences: [0:6..0:7): A <- _empty_/A# -[1:2..1:2): <- _empty_/A#``(). [1:6..1:7): b <- _empty_/A#b(). [1:9..1:10): B -> _empty_/B# [1:13..1:16): ??? -> scala/Predef.`???`(). [4:6..4:7): B <- _empty_/B# -[5:2..5:2): <- _empty_/B#``(). [5:6..5:7): a <- _empty_/B#a(). [5:9..5:10): A -> _empty_/A# [5:13..5:16): ??? -> scala/Predef.`???`(). @@ -652,12 +680,110 @@ Symbols => 1 entries Occurrences => 2 entries Symbols: -example/EmptyObject. => final object EmptyObject +example/EmptyObject. => final object EmptyObject extends Object { self: EmptyObject.type => +1 decls } Occurrences: [0:8..0:15): example <- example/ [2:7..2:18): EmptyObject <- example/EmptyObject. +expect/EndMarkers.scala +----------------------- + +Summary: +Schema => SemanticDB v4 +Uri => EndMarkers.scala +Text => empty +Language => Scala +Symbols => 24 entries +Occurrences => 37 entries + +Symbols: +endmarkers/Container# => class Container extends Object { self: Container => +5 decls } +endmarkers/Container#``(). => primary ctor (): Container +endmarkers/Container#`baz_=`(). => var method baz_= (param x$1: Int): Unit +endmarkers/Container#`baz_=`().(x$1) => param x$1: Int +endmarkers/Container#bar. => val method bar Tuple3[Int, Int, Int] +endmarkers/Container#baz(). => var method baz Int +endmarkers/Container#foo(). => method foo => Tuple3[Int, Int, Int] +endmarkers/EndMarkers$package. => final package object endmarkers extends Object { self: endmarkers.type => +6 decls } +endmarkers/EndMarkers$package.`topLevelVar_=`(). => var method topLevelVar_= (param x$1: String): Unit +endmarkers/EndMarkers$package.`topLevelVar_=`().(x$1) => param x$1: String +endmarkers/EndMarkers$package.topLevelMethod(). => method topLevelMethod => String +endmarkers/EndMarkers$package.topLevelVal. => val method topLevelVal Int +endmarkers/EndMarkers$package.topLevelVar(). => var method topLevelVar String +endmarkers/EndMarkers$package.topLevelWithLocals(). => method topLevelWithLocals => Unit +endmarkers/MultiCtor# => class MultiCtor extends Object { self: MultiCtor => +3 decls } +endmarkers/MultiCtor#``(). => primary ctor (val param i: Int): MultiCtor +endmarkers/MultiCtor#``().(i) => val param i: Int +endmarkers/MultiCtor#``(+1). => ctor (): MultiCtor +endmarkers/MultiCtor#i. => val method i Int +endmarkers/TestObj. => final object TestObj extends Object { self: TestObj.type => +2 decls } +endmarkers/TestObj.foo(). => method foo => Int +local0 => val local localVal: Int +local1 => var local localVar: Int +local2 => local localDef: => Int + +Occurrences: +[0:8..0:18): endmarkers <- endmarkers/ +[2:8..2:17): MultiCtor <- endmarkers/MultiCtor# +[2:22..2:23): i <- endmarkers/MultiCtor#i. +[2:25..2:28): Int -> scala/Int# +[3:8..3:12): <- endmarkers/MultiCtor#``(+1). +[5:8..5:12): this -> endmarkers/MultiCtor#``(+1). +[6:6..6:15): MultiCtor -> endmarkers/MultiCtor# +[8:6..8:20): topLevelMethod <- endmarkers/EndMarkers$package.topLevelMethod(). +[8:22..8:28): String -> scala/Predef.String# +[10:6..10:20): topLevelMethod -> endmarkers/EndMarkers$package.topLevelMethod(). +[12:6..12:17): topLevelVal <- endmarkers/EndMarkers$package.topLevelVal. +[12:19..12:22): Int -> scala/Int# +[14:6..14:17): topLevelVal -> endmarkers/EndMarkers$package.topLevelVal. +[16:6..16:17): topLevelVar <- endmarkers/EndMarkers$package.topLevelVar(). +[16:19..16:25): String -> scala/Predef.String# +[18:6..18:17): topLevelVar -> endmarkers/EndMarkers$package.topLevelVar(). +[20:8..20:17): Container <- endmarkers/Container# +[22:8..22:11): foo <- endmarkers/Container#foo(). +[24:8..24:11): foo -> endmarkers/Container#foo(). +[26:8..26:11): bar <- endmarkers/Container#bar. +[28:8..28:11): bar -> endmarkers/Container#bar. +[30:8..30:11): baz <- endmarkers/Container#baz(). +[32:8..32:11): baz -> endmarkers/Container#baz(). +[34:6..34:15): Container -> endmarkers/Container# +[36:6..36:24): topLevelWithLocals <- endmarkers/EndMarkers$package.topLevelWithLocals(). +[36:26..36:30): Unit -> scala/Unit# +[38:8..38:16): localVal <- local0 +[40:8..40:16): localVal -> local0 +[42:8..42:16): localVar <- local1 +[44:8..44:16): localVar -> local1 +[46:8..46:16): localDef <- local2 +[48:8..48:16): localDef -> local2 +[50:6..50:24): topLevelWithLocals -> endmarkers/EndMarkers$package.topLevelWithLocals(). +[52:9..52:16): TestObj <- endmarkers/TestObj. +[54:8..54:11): foo <- endmarkers/TestObj.foo(). +[56:6..56:13): TestObj -> endmarkers/TestObj. +[58:4..58:14): endmarkers -> endmarkers/ + +expect/EndMarkers2.scala +------------------------ + +Summary: +Schema => SemanticDB v4 +Uri => EndMarkers2.scala +Text => empty +Language => Scala +Symbols => 2 entries +Occurrences => 5 entries + +Symbols: +endmarkers2/package. => final package object endmarkers2 extends Object { self: endmarkers2.type => +2 decls } +endmarkers2/package.Foo# => type Foo = Unit + +Occurrences: +[0:15..0:26): endmarkers2 <- endmarkers2/package. +[2:7..2:10): Foo <- endmarkers2/package.Foo# +[3:4..3:8): Unit -> scala/Unit# +[4:6..4:9): Foo -> endmarkers2/package.Foo# +[6:4..6:15): endmarkers2 -> endmarkers2/package. + expect/EnumVal.scala -------------------- @@ -667,48 +793,43 @@ Uri => EnumVal.scala Text => empty Language => Scala Symbols => 16 entries -Occurrences => 21 entries +Occurrences => 16 entries Symbols: -enumVal/A# => trait A -enumVal/A#``(). => primary ctor -enumVal/Color# => abstract sealed enum class Color -enumVal/Color#``(). => primary ctor -enumVal/Color#``().(rgb) => val param rgb -enumVal/Color#rgb. => val method rgb -enumVal/Color. => final object Color -enumVal/Color.$values. => val method $values -enumVal/Color.Blue. => case val static enum method Blue -enumVal/Color.Green. => case val static enum method Green -enumVal/Color.Red. => case val static enum method Red -enumVal/Color.fromOrdinal(). => method fromOrdinal -enumVal/Color.fromOrdinal().(ordinal) => param ordinal -enumVal/Color.valueOf(). => method valueOf -enumVal/Color.valueOf().($name) => param $name -enumVal/Color.values(). => method values +enumVal/A# => trait A extends Object { self: A => +1 decls } +enumVal/A#``(). => primary ctor (): A +enumVal/Color# => abstract sealed enum class Color extends Object with Enum { self: Color => +2 decls } +enumVal/Color#``(). => primary ctor (val param rgb: Int): Color +enumVal/Color#``().(rgb) => val param rgb: Int +enumVal/Color#rgb. => val method rgb Int +enumVal/Color. => final object Color extends Object { self: Color.type => +8 decls } +enumVal/Color.$values. => private[this] val method $values Array[Color] +enumVal/Color.Blue. => case val static enum method Blue Color +enumVal/Color.Green. => case val static enum method Green Color & A +enumVal/Color.Red. => case val static enum method Red Color +enumVal/Color.fromOrdinal(). => method fromOrdinal (param ordinal: Int): Color +enumVal/Color.fromOrdinal().(ordinal) => param ordinal: Int +enumVal/Color.valueOf(). => method valueOf (param $name: String): Color +enumVal/Color.valueOf().($name) => param $name: String +enumVal/Color.values(). => method values => Array[Color] Occurrences: [0:8..0:15): enumVal <- enumVal/ [2:7..2:12): scala -> scala/ [2:13..2:20): runtime -> scala/runtime/ [2:21..2:30): EnumValue -> scala/runtime/EnumValue. -[5:0..5:0): <- enumVal/A#``(). [5:6..5:7): A <- enumVal/A# [7:5..7:10): Color <- enumVal/Color# -[7:10..7:10): <- enumVal/Color#``(). [7:15..7:18): rgb <- enumVal/Color#rgb. [7:20..7:23): Int -> scala/Int# [8:7..8:10): Red <- enumVal/Color.Red. [8:21..8:26): Color -> enumVal/Color# -[8:26..8:26): -> enumVal/Color#``(). [8:42..8:51): EnumValue -> scala/runtime/EnumValue# [9:7..9:12): Green <- enumVal/Color.Green. [9:21..9:26): Color -> enumVal/Color# -[9:26..9:26): -> enumVal/Color#``(). [9:42..9:43): A -> enumVal/A# [10:7..10:11): Blue <- enumVal/Color.Blue. [10:21..10:26): Color -> enumVal/Color# -[10:26..10:26): -> enumVal/Color#``(). expect/Enums.scala ------------------ @@ -719,210 +840,207 @@ Uri => Enums.scala Text => empty Language => Scala Symbols => 181 entries -Occurrences => 184 entries +Occurrences => 148 entries +Synthetics => 10 entries Symbols: -_empty_/Enums. => final object Enums -_empty_/Enums.Coin# => abstract sealed enum class Coin -_empty_/Enums.Coin#``(). => primary ctor -_empty_/Enums.Coin#``().(value) => param value -_empty_/Enums.Coin#value. => val method value -_empty_/Enums.Coin. => final object Coin -_empty_/Enums.Coin.$values. => val method $values -_empty_/Enums.Coin.Dime. => case val static enum method Dime -_empty_/Enums.Coin.Dollar. => case val static enum method Dollar -_empty_/Enums.Coin.Nickel. => case val static enum method Nickel -_empty_/Enums.Coin.Penny. => case val static enum method Penny -_empty_/Enums.Coin.Quarter. => case val static enum method Quarter -_empty_/Enums.Coin.fromOrdinal(). => method fromOrdinal -_empty_/Enums.Coin.fromOrdinal().(ordinal) => param ordinal -_empty_/Enums.Coin.valueOf(). => method valueOf -_empty_/Enums.Coin.valueOf().($name) => param $name -_empty_/Enums.Coin.values(). => method values -_empty_/Enums.Colour# => abstract sealed enum class Colour -_empty_/Enums.Colour#``(). => primary ctor -_empty_/Enums.Colour. => final object Colour -_empty_/Enums.Colour.$new(). => method $new -_empty_/Enums.Colour.$new().($name) => param $name -_empty_/Enums.Colour.$new().(_$ordinal) => param _$ordinal -_empty_/Enums.Colour.$values. => val method $values -_empty_/Enums.Colour.Blue. => case val static enum method Blue -_empty_/Enums.Colour.Green. => case val static enum method Green -_empty_/Enums.Colour.Red. => case val static enum method Red -_empty_/Enums.Colour.fromOrdinal(). => method fromOrdinal -_empty_/Enums.Colour.fromOrdinal().(ordinal) => param ordinal -_empty_/Enums.Colour.valueOf(). => method valueOf -_empty_/Enums.Colour.valueOf().($name) => param $name -_empty_/Enums.Colour.values(). => method values -_empty_/Enums.Directions# => abstract sealed enum class Directions -_empty_/Enums.Directions#``(). => primary ctor -_empty_/Enums.Directions. => final object Directions -_empty_/Enums.Directions.$new(). => method $new -_empty_/Enums.Directions.$new().($name) => param $name -_empty_/Enums.Directions.$new().(_$ordinal) => param _$ordinal -_empty_/Enums.Directions.$values. => val method $values -_empty_/Enums.Directions.East. => case val static enum method East -_empty_/Enums.Directions.North. => case val static enum method North -_empty_/Enums.Directions.South. => case val static enum method South -_empty_/Enums.Directions.West. => case val static enum method West -_empty_/Enums.Directions.fromOrdinal(). => method fromOrdinal -_empty_/Enums.Directions.fromOrdinal().(ordinal) => param ordinal -_empty_/Enums.Directions.valueOf(). => method valueOf -_empty_/Enums.Directions.valueOf().($name) => param $name -_empty_/Enums.Directions.values(). => method values -_empty_/Enums.Maybe# => abstract sealed enum class Maybe -_empty_/Enums.Maybe#[A] => covariant typeparam A -_empty_/Enums.Maybe#``(). => primary ctor -_empty_/Enums.Maybe. => final object Maybe -_empty_/Enums.Maybe.Just# => final case enum class Just -_empty_/Enums.Maybe.Just#[A] => covariant typeparam A -_empty_/Enums.Maybe.Just#_1(). => method _1 -_empty_/Enums.Maybe.Just#``(). => primary ctor -_empty_/Enums.Maybe.Just#``().(value) => val param value -_empty_/Enums.Maybe.Just#copy$default$1(). => method copy$default$1 -_empty_/Enums.Maybe.Just#copy$default$1().[A] => typeparam A -_empty_/Enums.Maybe.Just#copy(). => method copy -_empty_/Enums.Maybe.Just#copy().(value) => param value -_empty_/Enums.Maybe.Just#copy().[A] => typeparam A -_empty_/Enums.Maybe.Just#ordinal(). => method ordinal -_empty_/Enums.Maybe.Just#value. => val method value -_empty_/Enums.Maybe.Just. => final object Just -_empty_/Enums.Maybe.Just.apply(). => method apply -_empty_/Enums.Maybe.Just.apply().(value) => param value -_empty_/Enums.Maybe.Just.apply().[A] => typeparam A -_empty_/Enums.Maybe.Just.toString(). => method toString -_empty_/Enums.Maybe.Just.unapply(). => method unapply -_empty_/Enums.Maybe.Just.unapply().(x$1) => param x$1 -_empty_/Enums.Maybe.Just.unapply().[A] => typeparam A -_empty_/Enums.Maybe.None. => case val static enum method None -_empty_/Enums.Maybe.fromOrdinal(). => method fromOrdinal -_empty_/Enums.Maybe.fromOrdinal().(ordinal) => param ordinal -_empty_/Enums.Planet# => abstract sealed enum class Planet -_empty_/Enums.Planet#G. => final val method G -_empty_/Enums.Planet#``(). => primary ctor -_empty_/Enums.Planet#``().(mass) => param mass -_empty_/Enums.Planet#``().(radius) => param radius -_empty_/Enums.Planet#mass. => val method mass -_empty_/Enums.Planet#radius. => val method radius -_empty_/Enums.Planet#surfaceGravity(). => method surfaceGravity -_empty_/Enums.Planet#surfaceWeight(). => method surfaceWeight -_empty_/Enums.Planet#surfaceWeight().(otherMass) => param otherMass -_empty_/Enums.Planet. => final object Planet -_empty_/Enums.Planet.$values. => val method $values -_empty_/Enums.Planet.Earth. => case val static enum method Earth -_empty_/Enums.Planet.Jupiter. => case val static enum method Jupiter -_empty_/Enums.Planet.Mars. => case val static enum method Mars -_empty_/Enums.Planet.Mercury. => case val static enum method Mercury -_empty_/Enums.Planet.Neptune. => case val static enum method Neptune -_empty_/Enums.Planet.Saturn. => case val static enum method Saturn -_empty_/Enums.Planet.Uranus. => case val static enum method Uranus -_empty_/Enums.Planet.Venus. => case val static enum method Venus -_empty_/Enums.Planet.fromOrdinal(). => method fromOrdinal -_empty_/Enums.Planet.fromOrdinal().(ordinal) => param ordinal -_empty_/Enums.Planet.valueOf(). => method valueOf -_empty_/Enums.Planet.valueOf().($name) => param $name -_empty_/Enums.Planet.values(). => method values -_empty_/Enums.Suits# => abstract sealed enum class Suits -_empty_/Enums.Suits#``(). => primary ctor -_empty_/Enums.Suits. => final object Suits -_empty_/Enums.Suits.$new(). => method $new -_empty_/Enums.Suits.$new().($name) => param $name -_empty_/Enums.Suits.$new().(_$ordinal) => param _$ordinal -_empty_/Enums.Suits.$values. => val method $values -_empty_/Enums.Suits.Clubs. => case val static enum method Clubs -_empty_/Enums.Suits.Diamonds. => case val static enum method Diamonds -_empty_/Enums.Suits.Hearts. => case val static enum method Hearts -_empty_/Enums.Suits.Spades. => case val static enum method Spades -_empty_/Enums.Suits.derived$CanEqual. => implicit lazy val method derived$CanEqual -_empty_/Enums.Suits.fromOrdinal(). => method fromOrdinal -_empty_/Enums.Suits.fromOrdinal().(ordinal) => param ordinal -_empty_/Enums.Suits.isBlack(). => method isBlack -_empty_/Enums.Suits.isBlack().(suit) => param suit -_empty_/Enums.Suits.isRed(). => method isRed -_empty_/Enums.Suits.isRed().(suit) => param suit -_empty_/Enums.Suits.valueOf(). => method valueOf -_empty_/Enums.Suits.valueOf().($name) => param $name -_empty_/Enums.Suits.values(). => method values -_empty_/Enums.Tag# => abstract sealed enum class Tag -_empty_/Enums.Tag#[A] => typeparam A -_empty_/Enums.Tag#``(). => primary ctor -_empty_/Enums.Tag. => final object Tag -_empty_/Enums.Tag.$values. => val method $values -_empty_/Enums.Tag.BooleanTag. => case val static enum method BooleanTag -_empty_/Enums.Tag.IntTag. => case val static enum method IntTag -_empty_/Enums.Tag.fromOrdinal(). => method fromOrdinal -_empty_/Enums.Tag.fromOrdinal().(ordinal) => param ordinal -_empty_/Enums.Tag.valueOf(). => method valueOf -_empty_/Enums.Tag.valueOf().($name) => param $name -_empty_/Enums.Tag.values(). => method values -_empty_/Enums.WeekDays# => abstract sealed enum class WeekDays -_empty_/Enums.WeekDays#``(). => primary ctor -_empty_/Enums.WeekDays. => final object WeekDays -_empty_/Enums.WeekDays.$new(). => method $new -_empty_/Enums.WeekDays.$new().($name) => param $name -_empty_/Enums.WeekDays.$new().(_$ordinal) => param _$ordinal -_empty_/Enums.WeekDays.$values. => val method $values -_empty_/Enums.WeekDays.Friday. => case val static enum method Friday -_empty_/Enums.WeekDays.Monday. => case val static enum method Monday -_empty_/Enums.WeekDays.Saturday. => case val static enum method Saturday -_empty_/Enums.WeekDays.Sunday. => case val static enum method Sunday -_empty_/Enums.WeekDays.Thursday. => case val static enum method Thursday -_empty_/Enums.WeekDays.Tuesday. => case val static enum method Tuesday -_empty_/Enums.WeekDays.Wednesday. => case val static enum method Wednesday -_empty_/Enums.WeekDays.fromOrdinal(). => method fromOrdinal -_empty_/Enums.WeekDays.fromOrdinal().(ordinal) => param ordinal -_empty_/Enums.WeekDays.valueOf(). => method valueOf -_empty_/Enums.WeekDays.valueOf().($name) => param $name -_empty_/Enums.WeekDays.values(). => method values -_empty_/Enums.`<:<`# => abstract sealed enum class <:< -_empty_/Enums.`<:<`#[A] => contravariant typeparam A -_empty_/Enums.`<:<`#[B] => typeparam B -_empty_/Enums.`<:<`#``(). => primary ctor -_empty_/Enums.`<:<`. => final object <:< -_empty_/Enums.`<:<`.Refl# => final case enum class Refl -_empty_/Enums.`<:<`.Refl#[C] => typeparam C -_empty_/Enums.`<:<`.Refl#``(). => primary ctor -_empty_/Enums.`<:<`.Refl#copy(). => method copy -_empty_/Enums.`<:<`.Refl#copy().[C] => typeparam C -_empty_/Enums.`<:<`.Refl#ordinal(). => method ordinal -_empty_/Enums.`<:<`.Refl. => final object Refl -_empty_/Enums.`<:<`.Refl.apply(). => method apply -_empty_/Enums.`<:<`.Refl.apply().[C] => typeparam C -_empty_/Enums.`<:<`.Refl.toString(). => method toString -_empty_/Enums.`<:<`.Refl.unapply(). => method unapply -_empty_/Enums.`<:<`.Refl.unapply().(x$1) => param x$1 -_empty_/Enums.`<:<`.Refl.unapply().[C] => typeparam C -_empty_/Enums.`<:<`.`given_<:<_T_T`(). => final implicit method given_<:<_T_T -_empty_/Enums.`<:<`.`given_<:<_T_T`().[T] => typeparam T -_empty_/Enums.`<:<`.fromOrdinal(). => method fromOrdinal -_empty_/Enums.`<:<`.fromOrdinal().(ordinal) => param ordinal -_empty_/Enums.some1. => val method some1 -_empty_/Enums.unwrap(). => method unwrap -_empty_/Enums.unwrap().(ev) => implicit param ev -_empty_/Enums.unwrap().(opt) => param opt -_empty_/Enums.unwrap().[A] => typeparam A -_empty_/Enums.unwrap().[B] => typeparam B -local0 => param x +_empty_/Enums. => final object Enums extends Object { self: Enums.type => +30 decls } +_empty_/Enums.Coin# => abstract sealed enum class Coin extends Object with Enum { self: Coin => +2 decls } +_empty_/Enums.Coin#``(). => primary ctor (param value: Int): Coin +_empty_/Enums.Coin#``().(value) => param value: Int +_empty_/Enums.Coin#value. => private[this] val method value Int +_empty_/Enums.Coin. => final object Coin extends Object { self: Coin.type => +10 decls } +_empty_/Enums.Coin.$values. => private[this] val method $values Array[Coin] +_empty_/Enums.Coin.Dime. => case val static enum method Dime Coin +_empty_/Enums.Coin.Dollar. => case val static enum method Dollar Coin +_empty_/Enums.Coin.Nickel. => case val static enum method Nickel Coin +_empty_/Enums.Coin.Penny. => case val static enum method Penny Coin +_empty_/Enums.Coin.Quarter. => case val static enum method Quarter Coin +_empty_/Enums.Coin.fromOrdinal(). => method fromOrdinal (param ordinal: Int): Coin +_empty_/Enums.Coin.fromOrdinal().(ordinal) => param ordinal: Int +_empty_/Enums.Coin.valueOf(). => method valueOf (param $name: String): Coin +_empty_/Enums.Coin.valueOf().($name) => param $name: String +_empty_/Enums.Coin.values(). => method values => Array[Coin] +_empty_/Enums.Colour# => abstract sealed enum class Colour extends Object with Enum { self: Colour => +1 decls } +_empty_/Enums.Colour#``(). => primary ctor (): Colour +_empty_/Enums.Colour. => final object Colour extends Object { self: Colour.type => +9 decls } +_empty_/Enums.Colour.$new(). => private[this] method $new (param _$ordinal: Int, param $name: String): Colour +_empty_/Enums.Colour.$new().($name) => param $name: String +_empty_/Enums.Colour.$new().(_$ordinal) => param _$ordinal: Int +_empty_/Enums.Colour.$values. => private[this] val method $values Array[Colour] +_empty_/Enums.Colour.Blue. => case val static enum method Blue Colour +_empty_/Enums.Colour.Green. => case val static enum method Green Colour +_empty_/Enums.Colour.Red. => case val static enum method Red Colour +_empty_/Enums.Colour.fromOrdinal(). => method fromOrdinal (param ordinal: Int): Colour +_empty_/Enums.Colour.fromOrdinal().(ordinal) => param ordinal: Int +_empty_/Enums.Colour.valueOf(). => method valueOf (param $name: String): Colour +_empty_/Enums.Colour.valueOf().($name) => param $name: String +_empty_/Enums.Colour.values(). => method values => Array[Colour] +_empty_/Enums.Directions# => abstract sealed enum class Directions extends Object with Enum { self: Directions => +1 decls } +_empty_/Enums.Directions#``(). => primary ctor (): Directions +_empty_/Enums.Directions. => final object Directions extends Object { self: Directions.type => +10 decls } +_empty_/Enums.Directions.$new(). => private[this] method $new (param _$ordinal: Int, param $name: String): Directions +_empty_/Enums.Directions.$new().($name) => param $name: String +_empty_/Enums.Directions.$new().(_$ordinal) => param _$ordinal: Int +_empty_/Enums.Directions.$values. => private[this] val method $values Array[Directions] +_empty_/Enums.Directions.East. => case val static enum method East Directions +_empty_/Enums.Directions.North. => case val static enum method North Directions +_empty_/Enums.Directions.South. => case val static enum method South Directions +_empty_/Enums.Directions.West. => case val static enum method West Directions +_empty_/Enums.Directions.fromOrdinal(). => method fromOrdinal (param ordinal: Int): Directions +_empty_/Enums.Directions.fromOrdinal().(ordinal) => param ordinal: Int +_empty_/Enums.Directions.valueOf(). => method valueOf (param $name: String): Directions +_empty_/Enums.Directions.valueOf().($name) => param $name: String +_empty_/Enums.Directions.values(). => method values => Array[Directions] +_empty_/Enums.Maybe# => abstract sealed enum class Maybe [covariant typeparam A ] extends Object with Enum { self: Maybe[A] => +2 decls } +_empty_/Enums.Maybe#[A] => covariant typeparam A +_empty_/Enums.Maybe#``(). => primary ctor [covariant typeparam A ](): Maybe[A] +_empty_/Enums.Maybe. => final object Maybe extends Object { self: Maybe.type => +6 decls } +_empty_/Enums.Maybe.Just# => final case enum class Just [covariant typeparam A ] extends Maybe[A] { self: Just[A] => +7 decls } +_empty_/Enums.Maybe.Just#[A] => covariant typeparam A +_empty_/Enums.Maybe.Just#_1(). => method _1 => A +_empty_/Enums.Maybe.Just#``(). => primary ctor [covariant typeparam A ](val param value: A): Just[A] +_empty_/Enums.Maybe.Just#``().(value) => val param value: A +_empty_/Enums.Maybe.Just#copy$default$1(). => method copy$default$1 [covariant typeparam A ]: A +_empty_/Enums.Maybe.Just#copy$default$1().[A] => typeparam A +_empty_/Enums.Maybe.Just#copy(). => method copy [covariant typeparam A ](param value: A): Just[A] +_empty_/Enums.Maybe.Just#copy().(value) => param value: A +_empty_/Enums.Maybe.Just#copy().[A] => typeparam A +_empty_/Enums.Maybe.Just#ordinal(). => method ordinal => Int <: scala/reflect/Enum#ordinal(). +_empty_/Enums.Maybe.Just#value. => val method value A +_empty_/Enums.Maybe.Just. => final object Just extends Object { self: Just.type => +4 decls } +_empty_/Enums.Maybe.Just.apply(). => method apply [typeparam A ](param value: A): Just[A] +_empty_/Enums.Maybe.Just.apply().(value) => param value: A +_empty_/Enums.Maybe.Just.apply().[A] => typeparam A +_empty_/Enums.Maybe.Just.toString(). => method toString => String <: scala/Any#toString(). +_empty_/Enums.Maybe.Just.unapply(). => method unapply [typeparam A ](param x$1: Just[A]): Just[A] +_empty_/Enums.Maybe.Just.unapply().(x$1) => param x$1: Just[A] +_empty_/Enums.Maybe.Just.unapply().[A] => typeparam A +_empty_/Enums.Maybe.None. => case val static enum method None Maybe[Nothing] +_empty_/Enums.Maybe.fromOrdinal(). => method fromOrdinal (param ordinal: Int): Maybe[_] forSome { type _ } +_empty_/Enums.Maybe.fromOrdinal().(ordinal) => param ordinal: Int +_empty_/Enums.Planet# => abstract sealed enum class Planet extends Enum[Planet] with Enum { self: Planet => +6 decls } +_empty_/Enums.Planet#G. => private[this] final val method G 6.673E-11 +_empty_/Enums.Planet#``(). => primary ctor (param mass: Double, param radius: Double): Planet +_empty_/Enums.Planet#``().(mass) => param mass: Double +_empty_/Enums.Planet#``().(radius) => param radius: Double +_empty_/Enums.Planet#mass. => private[this] val method mass Double +_empty_/Enums.Planet#radius. => private[this] val method radius Double +_empty_/Enums.Planet#surfaceGravity(). => method surfaceGravity => Double +_empty_/Enums.Planet#surfaceWeight(). => method surfaceWeight (param otherMass: Double): Double +_empty_/Enums.Planet#surfaceWeight().(otherMass) => param otherMass: Double +_empty_/Enums.Planet. => final object Planet extends Object { self: Planet.type => +13 decls } +_empty_/Enums.Planet.$values. => private[this] val method $values Array[Planet] +_empty_/Enums.Planet.Earth. => case val static enum method Earth Planet +_empty_/Enums.Planet.Jupiter. => case val static enum method Jupiter Planet +_empty_/Enums.Planet.Mars. => case val static enum method Mars Planet +_empty_/Enums.Planet.Mercury. => case val static enum method Mercury Planet +_empty_/Enums.Planet.Neptune. => case val static enum method Neptune Planet +_empty_/Enums.Planet.Saturn. => case val static enum method Saturn Planet +_empty_/Enums.Planet.Uranus. => case val static enum method Uranus Planet +_empty_/Enums.Planet.Venus. => case val static enum method Venus Planet +_empty_/Enums.Planet.fromOrdinal(). => method fromOrdinal (param ordinal: Int): Planet +_empty_/Enums.Planet.fromOrdinal().(ordinal) => param ordinal: Int +_empty_/Enums.Planet.valueOf(). => method valueOf (param $name: String): Planet +_empty_/Enums.Planet.valueOf().($name) => param $name: String +_empty_/Enums.Planet.values(). => method values => Array[Planet] +_empty_/Enums.Suits# => abstract sealed enum class Suits extends Object with Enum { self: Suits => +1 decls } +_empty_/Enums.Suits#``(). => primary ctor (): Suits +_empty_/Enums.Suits. => final object Suits extends Object { self: Suits.type => +13 decls } +_empty_/Enums.Suits.$new(). => private[this] method $new (param _$ordinal: Int, param $name: String): Suits +_empty_/Enums.Suits.$new().($name) => param $name: String +_empty_/Enums.Suits.$new().(_$ordinal) => param _$ordinal: Int +_empty_/Enums.Suits.$values. => private[this] val method $values Array[Suits] +_empty_/Enums.Suits.Clubs. => case val static enum method Clubs Suits +_empty_/Enums.Suits.Diamonds. => case val static enum method Diamonds Suits +_empty_/Enums.Suits.Hearts. => case val static enum method Hearts Suits +_empty_/Enums.Suits.Spades. => case val static enum method Spades Suits +_empty_/Enums.Suits.derived$CanEqual. => implicit lazy val given method derived$CanEqual CanEqual[Suits, Suits] +_empty_/Enums.Suits.fromOrdinal(). => method fromOrdinal (param ordinal: Int): Suits +_empty_/Enums.Suits.fromOrdinal().(ordinal) => param ordinal: Int +_empty_/Enums.Suits.isBlack(). => method isBlack (param suit: Suits): Boolean +_empty_/Enums.Suits.isBlack().(suit) => param suit: Suits +_empty_/Enums.Suits.isRed(). => method isRed (param suit: Suits): Boolean +_empty_/Enums.Suits.isRed().(suit) => param suit: Suits +_empty_/Enums.Suits.valueOf(). => method valueOf (param $name: String): Suits +_empty_/Enums.Suits.valueOf().($name) => param $name: String +_empty_/Enums.Suits.values(). => method values => Array[Suits] +_empty_/Enums.Tag# => abstract sealed enum class Tag [typeparam A ] extends Object with Enum { self: Tag[A] => +2 decls } +_empty_/Enums.Tag#[A] => typeparam A +_empty_/Enums.Tag#``(). => primary ctor [typeparam A ](): Tag[A] +_empty_/Enums.Tag. => final object Tag extends Object { self: Tag.type => +7 decls } +_empty_/Enums.Tag.$values. => private[this] val method $values Array[Tag[_] forSome { type _ }] +_empty_/Enums.Tag.BooleanTag. => case val static enum method BooleanTag Tag[Boolean] +_empty_/Enums.Tag.IntTag. => case val static enum method IntTag Tag[Int] +_empty_/Enums.Tag.fromOrdinal(). => method fromOrdinal (param ordinal: Int): Tag[_] forSome { type _ } +_empty_/Enums.Tag.fromOrdinal().(ordinal) => param ordinal: Int +_empty_/Enums.Tag.valueOf(). => method valueOf (param $name: String): Tag[_] forSome { type _ >: Int & Boolean <: Int | Boolean } +_empty_/Enums.Tag.valueOf().($name) => param $name: String +_empty_/Enums.Tag.values(). => method values => Array[Tag[_] forSome { type _ }] +_empty_/Enums.WeekDays# => abstract sealed enum class WeekDays extends Object with Enum { self: WeekDays => +1 decls } +_empty_/Enums.WeekDays#``(). => primary ctor (): WeekDays +_empty_/Enums.WeekDays. => final object WeekDays extends Object { self: WeekDays.type => +13 decls } +_empty_/Enums.WeekDays.$new(). => private[this] method $new (param _$ordinal: Int, param $name: String): WeekDays +_empty_/Enums.WeekDays.$new().($name) => param $name: String +_empty_/Enums.WeekDays.$new().(_$ordinal) => param _$ordinal: Int +_empty_/Enums.WeekDays.$values. => private[this] val method $values Array[WeekDays] +_empty_/Enums.WeekDays.Friday. => case val static enum method Friday WeekDays +_empty_/Enums.WeekDays.Monday. => case val static enum method Monday WeekDays +_empty_/Enums.WeekDays.Saturday. => case val static enum method Saturday WeekDays +_empty_/Enums.WeekDays.Sunday. => case val static enum method Sunday WeekDays +_empty_/Enums.WeekDays.Thursday. => case val static enum method Thursday WeekDays +_empty_/Enums.WeekDays.Tuesday. => case val static enum method Tuesday WeekDays +_empty_/Enums.WeekDays.Wednesday. => case val static enum method Wednesday WeekDays +_empty_/Enums.WeekDays.fromOrdinal(). => method fromOrdinal (param ordinal: Int): WeekDays +_empty_/Enums.WeekDays.fromOrdinal().(ordinal) => param ordinal: Int +_empty_/Enums.WeekDays.valueOf(). => method valueOf (param $name: String): WeekDays +_empty_/Enums.WeekDays.valueOf().($name) => param $name: String +_empty_/Enums.WeekDays.values(). => method values => Array[WeekDays] +_empty_/Enums.`<:<`# => abstract sealed enum class <:< [contravariant typeparam A , typeparam B ] extends Object with Enum { self: <:<[A, B] => +3 decls } +_empty_/Enums.`<:<`#[A] => contravariant typeparam A +_empty_/Enums.`<:<`#[B] => typeparam B +_empty_/Enums.`<:<`#``(). => primary ctor [contravariant typeparam A , typeparam B ](): <:<[A, B] +_empty_/Enums.`<:<`. => final object <:< extends Object { self: <:<.type => +6 decls } +_empty_/Enums.`<:<`.Refl# => final case enum class Refl [typeparam C ] extends <:<[C, C] { self: Refl[C] => +4 decls } +_empty_/Enums.`<:<`.Refl#[C] => typeparam C +_empty_/Enums.`<:<`.Refl#``(). => primary ctor [typeparam C ](): Refl[C] +_empty_/Enums.`<:<`.Refl#copy(). => method copy [typeparam C ](): Refl[C] +_empty_/Enums.`<:<`.Refl#copy().[C] => typeparam C +_empty_/Enums.`<:<`.Refl#ordinal(). => method ordinal => Int <: scala/reflect/Enum#ordinal(). +_empty_/Enums.`<:<`.Refl. => final object Refl extends Object { self: Refl.type => +4 decls } +_empty_/Enums.`<:<`.Refl.apply(). => method apply [typeparam C ](): Refl[C] +_empty_/Enums.`<:<`.Refl.apply().[C] => typeparam C +_empty_/Enums.`<:<`.Refl.toString(). => method toString => String <: scala/Any#toString(). +_empty_/Enums.`<:<`.Refl.unapply(). => method unapply [typeparam C ](param x$1: Refl[C]): true +_empty_/Enums.`<:<`.Refl.unapply().(x$1) => param x$1: Refl[C] +_empty_/Enums.`<:<`.Refl.unapply().[C] => typeparam C +_empty_/Enums.`<:<`.`given_<:<_T_T`(). => final implicit given method given_<:<_T_T [typeparam T ]: <:<[T, T] +_empty_/Enums.`<:<`.`given_<:<_T_T`().[T] => typeparam T +_empty_/Enums.`<:<`.fromOrdinal(). => method fromOrdinal (param ordinal: Int): <:<[_, _] forSome { type _ ; type _ } +_empty_/Enums.`<:<`.fromOrdinal().(ordinal) => param ordinal: Int +_empty_/Enums.some1. => val method some1 Option[Int] +_empty_/Enums.unwrap(). => method unwrap [typeparam A , typeparam B ](param opt: Option[A])(implicit given param ev: <:<[A, Option[B]]): Option[B] +_empty_/Enums.unwrap().(ev) => implicit given param ev: <:<[A, Option[B]] +_empty_/Enums.unwrap().(opt) => param opt: Option[A] +_empty_/Enums.unwrap().[A] => typeparam A +_empty_/Enums.unwrap().[B] => typeparam B +local0 => param x: Option[B] Occurrences: [0:7..0:12): Enums <- _empty_/Enums. [1:9..1:12): <:< -> _empty_/Enums.`<:<`. [3:7..3:13): Colour <- _empty_/Enums.Colour# -[4:4..4:4): <- _empty_/Enums.Colour#``(). [4:11..4:17): Colour -> _empty_/Enums.Colour. [4:18..4:21): Red -> _empty_/Enums.Colour.Red. [5:9..5:12): Red <- _empty_/Enums.Colour.Red. [5:14..5:19): Green <- _empty_/Enums.Colour.Green. [5:21..5:25): Blue <- _empty_/Enums.Colour.Blue. [7:7..7:17): Directions <- _empty_/Enums.Directions# -[8:4..8:4): <- _empty_/Enums.Directions#``(). [8:9..8:14): North <- _empty_/Enums.Directions.North. [8:16..8:20): East <- _empty_/Enums.Directions.East. [8:22..8:27): South <- _empty_/Enums.Directions.South. [8:29..8:33): West <- _empty_/Enums.Directions.West. [10:7..10:12): Suits <- _empty_/Enums.Suits# -[10:21..10:21): <- _empty_/Enums.Suits#``(). -[10:21..10:21): -> scala/CanEqual.derived. [11:9..11:15): Hearts <- _empty_/Enums.Suits.Hearts. [11:17..11:23): Spades <- _empty_/Enums.Suits.Spades. [11:25..11:30): Clubs <- _empty_/Enums.Suits.Clubs. @@ -947,7 +1065,6 @@ Occurrences: [18:11..18:17): Spades -> _empty_/Enums.Suits.Spades. [18:20..18:25): Clubs -> _empty_/Enums.Suits.Clubs. [21:7..21:15): WeekDays <- _empty_/Enums.WeekDays# -[22:4..22:4): <- _empty_/Enums.WeekDays#``(). [22:9..22:15): Monday <- _empty_/Enums.WeekDays.Monday. [23:9..23:16): Tuesday <- _empty_/Enums.WeekDays.Tuesday. [24:9..24:18): Wednesday <- _empty_/Enums.WeekDays.Wednesday. @@ -956,62 +1073,46 @@ Occurrences: [27:9..27:17): Saturday <- _empty_/Enums.WeekDays.Saturday. [28:9..28:15): Sunday <- _empty_/Enums.WeekDays.Sunday. [30:7..30:11): Coin <- _empty_/Enums.Coin# -[30:11..30:11): <- _empty_/Enums.Coin#``(). [30:12..30:17): value <- _empty_/Enums.Coin#value. [30:19..30:22): Int -> scala/Int# [31:9..31:14): Penny <- _empty_/Enums.Coin.Penny. [31:26..31:30): Coin -> _empty_/Enums.Coin# -[31:30..31:30): -> _empty_/Enums.Coin#``(). [32:9..32:15): Nickel <- _empty_/Enums.Coin.Nickel. [32:26..32:30): Coin -> _empty_/Enums.Coin# -[32:30..32:30): -> _empty_/Enums.Coin#``(). [33:9..33:13): Dime <- _empty_/Enums.Coin.Dime. [33:26..33:30): Coin -> _empty_/Enums.Coin# -[33:30..33:30): -> _empty_/Enums.Coin#``(). [34:9..34:16): Quarter <- _empty_/Enums.Coin.Quarter. [34:26..34:30): Coin -> _empty_/Enums.Coin# -[34:30..34:30): -> _empty_/Enums.Coin#``(). [35:9..35:15): Dollar <- _empty_/Enums.Coin.Dollar. [35:26..35:30): Coin -> _empty_/Enums.Coin# -[35:30..35:30): -> _empty_/Enums.Coin#``(). [37:7..37:12): Maybe <- _empty_/Enums.Maybe# -[37:12..37:12): <- _empty_/Enums.Maybe#``(). [37:14..37:15): A <- _empty_/Enums.Maybe#[A] [38:9..38:13): Just <- _empty_/Enums.Maybe.Just# -[38:13..38:13): <- _empty_/Enums.Maybe.Just#``(). [38:14..38:19): value <- _empty_/Enums.Maybe.Just#value. [38:21..38:22): A -> _empty_/Enums.Maybe.Just#[A] [39:9..39:13): None <- _empty_/Enums.Maybe.None. [41:7..41:10): Tag <- _empty_/Enums.Tag# -[41:10..41:10): <- _empty_/Enums.Tag#``(). [41:11..41:12): A <- _empty_/Enums.Tag#[A] [42:9..42:15): IntTag <- _empty_/Enums.Tag.IntTag. [42:24..42:27): Tag -> _empty_/Enums.Tag# [42:28..42:31): Int -> scala/Int# -[42:32..42:32): -> _empty_/Enums.Tag#``(). [43:9..43:19): BooleanTag <- _empty_/Enums.Tag.BooleanTag. [43:28..43:31): Tag -> _empty_/Enums.Tag# [43:32..43:39): Boolean -> scala/Boolean# -[43:40..43:40): -> _empty_/Enums.Tag#``(). [45:7..45:10): <:< <- _empty_/Enums.`<:<`# -[45:10..45:10): <- _empty_/Enums.`<:<`#``(). [45:12..45:13): A <- _empty_/Enums.`<:<`#[A] [45:15..45:16): B <- _empty_/Enums.`<:<`#[B] [46:9..46:13): Refl <- _empty_/Enums.`<:<`.Refl# -[46:13..46:13): <- _empty_/Enums.`<:<`.Refl#``(). [46:14..46:15): C <- _empty_/Enums.`<:<`.Refl#[C] [46:28..46:29): C -> _empty_/Enums.`<:<`.Refl#[C] [46:30..46:33): <:< -> _empty_/Enums.`<:<`# [46:34..46:35): C -> _empty_/Enums.`<:<`.Refl#[C] -[46:35..46:35): -> _empty_/Enums.`<:<`#``(). [48:9..48:12): <:< <- _empty_/Enums.`<:<`. -[49:10..49:10): <- _empty_/Enums.`<:<`.`given_<:<_T_T`(). [49:11..49:12): T <- _empty_/Enums.`<:<`.`given_<:<_T_T`().[T] [49:16..49:17): T -> _empty_/Enums.`<:<`.`given_<:<_T_T`().[T] [49:18..49:21): <:< -> _empty_/Enums.`<:<`# [49:22..49:23): T -> _empty_/Enums.`<:<`.`given_<:<_T_T`().[T] [49:27..49:31): Refl -> _empty_/Enums.`<:<`.Refl. -[49:31..49:31): -> _empty_/Enums.`<:<`.Refl.apply(). [51:13..51:14): A <- _empty_/Enums.unwrap().[A] [51:16..51:17): B <- _empty_/Enums.unwrap().[B] [51:19..51:22): opt <- _empty_/Enums.unwrap().(opt) @@ -1027,29 +1128,22 @@ Occurrences: [51:81..51:82): B -> _empty_/Enums.unwrap().[B] [51:86..51:88): ev -> _empty_/Enums.unwrap().(ev) [52:9..52:13): Refl -> _empty_/Enums.`<:<`.Refl. -[52:13..52:13): -> _empty_/Enums.`<:<`.Refl.unapply(). [52:19..52:22): opt -> _empty_/Enums.unwrap().(opt) [52:23..52:30): flatMap -> scala/Option#flatMap(). [52:31..52:39): identity -> scala/Predef.identity(). -[52:31..52:31): -> local0 [52:40..52:46): Option -> scala/Option# [52:47..52:48): B -> _empty_/Enums.unwrap().[B] [54:6..54:11): some1 <- _empty_/Enums.some1. [54:14..54:18): Some -> scala/Some. -[54:18..54:18): -> scala/Some.apply(). [54:19..54:23): Some -> scala/Some. -[54:23..54:23): -> scala/Some.apply(). [54:28..54:34): unwrap -> _empty_/Enums.unwrap(). -[54:34..54:34): -> _empty_/Enums.`<:<`.`given_<:<_T_T`(). [56:7..56:13): Planet <- _empty_/Enums.Planet# -[56:13..56:13): <- _empty_/Enums.Planet#``(). [56:14..56:18): mass <- _empty_/Enums.Planet#mass. [56:20..56:26): Double -> scala/Double# [56:28..56:34): radius <- _empty_/Enums.Planet#radius. [56:36..56:42): Double -> scala/Double# [56:52..56:56): Enum -> java/lang/Enum# [56:57..56:63): Planet -> _empty_/Enums.Planet# -[56:64..56:64): -> java/lang/Enum#``(). [57:22..57:23): G <- _empty_/Enums.Planet#G. [58:8..58:22): surfaceGravity <- _empty_/Enums.Planet#surfaceGravity(). [58:25..58:26): G -> _empty_/Enums.Planet#G. @@ -1067,28 +1161,32 @@ Occurrences: [59:55..59:69): surfaceGravity -> _empty_/Enums.Planet#surfaceGravity(). [61:9..61:16): Mercury <- _empty_/Enums.Planet.Mercury. [61:25..61:31): Planet -> _empty_/Enums.Planet# -[61:31..61:31): -> _empty_/Enums.Planet#``(). [62:9..62:14): Venus <- _empty_/Enums.Planet.Venus. [62:25..62:31): Planet -> _empty_/Enums.Planet# -[62:31..62:31): -> _empty_/Enums.Planet#``(). [63:9..63:14): Earth <- _empty_/Enums.Planet.Earth. [63:25..63:31): Planet -> _empty_/Enums.Planet# -[63:31..63:31): -> _empty_/Enums.Planet#``(). [64:9..64:13): Mars <- _empty_/Enums.Planet.Mars. [64:25..64:31): Planet -> _empty_/Enums.Planet# -[64:31..64:31): -> _empty_/Enums.Planet#``(). [65:9..65:16): Jupiter <- _empty_/Enums.Planet.Jupiter. [65:25..65:31): Planet -> _empty_/Enums.Planet# -[65:31..65:31): -> _empty_/Enums.Planet#``(). [66:9..66:15): Saturn <- _empty_/Enums.Planet.Saturn. [66:25..66:31): Planet -> _empty_/Enums.Planet# -[66:31..66:31): -> _empty_/Enums.Planet#``(). [67:9..67:15): Uranus <- _empty_/Enums.Planet.Uranus. [67:25..67:31): Planet -> _empty_/Enums.Planet# -[67:31..67:31): -> _empty_/Enums.Planet#``(). [68:9..68:16): Neptune <- _empty_/Enums.Planet.Neptune. [68:25..68:31): Planet -> _empty_/Enums.Planet# -[68:31..68:31): -> _empty_/Enums.Planet#``(). + +Synthetics: +[46:28..46:35):C <:< C => *[C, C] +[49:27..49:31):Refl => *.apply[T] +[52:9..52:13):Refl => *.unapply[Option[B]] +[52:19..52:30):opt.flatMap => *[B] +[52:31..52:50):identity[Option[B]] => *[Function1[A, Option[B]]] +[54:14..54:18):Some => *.apply[Some[Int]] +[54:14..54:34):Some(Some(1)).unwrap => *(given_<:<_T_T[Option[Int]]) +[54:19..54:23):Some => *.apply[Int] +[54:28..54:34):unwrap => *[Some[Int], Int] +[56:52..56:64):Enum[Planet] => *[Planet] expect/EtaExpansion.scala ------------------------- @@ -1099,27 +1197,31 @@ Uri => EtaExpansion.scala Text => empty Language => Scala Symbols => 3 entries -Occurrences => 12 entries +Occurrences => 8 entries +Synthetics => 5 entries Symbols: -example/EtaExpansion# => class EtaExpansion -example/EtaExpansion#``(). => primary ctor -local0 => param x +example/EtaExpansion# => class EtaExpansion extends Object { self: EtaExpansion => +1 decls } +example/EtaExpansion#``(). => primary ctor (): EtaExpansion +local0 => param x: Int Occurrences: [0:8..0:15): example <- example/ [2:6..2:18): EtaExpansion <- example/EtaExpansion# -[3:2..3:2): <- example/EtaExpansion#``(). [3:2..3:6): Some -> scala/Some. -[3:6..3:6): -> scala/Some.apply(). [3:10..3:13): map -> scala/Option#map(). [3:14..3:22): identity -> scala/Predef.identity(). -[3:14..3:14): -> local0 [4:2..4:6): List -> scala/package.List. -[4:6..4:6): -> scala/collection/IterableFactory#apply(). [4:10..4:18): foldLeft -> scala/collection/LinearSeqOps#foldLeft(). [4:25..4:26): + -> java/lang/String#`+`(). +Synthetics: +[3:2..3:13):Some(1).map => *[Int] +[3:2..3:6):Some => *.apply[Int] +[3:14..3:22):identity => *[Int] +[4:2..4:18):List(1).foldLeft => *[String] +[4:2..4:6):List => *.apply[Int] + expect/Example.scala -------------------- @@ -1129,14 +1231,14 @@ Uri => Example.scala Text => empty Language => Scala Symbols => 5 entries -Occurrences => 25 entries +Occurrences => 23 entries Symbols: -example/Example. => final object Example -example/Example.main(). => method main -example/Example.main().(args) => param args -example/Example.x. => val method x -local0 => selfparam self +example/Example. => final object Example extends Object { self: Example.type => +3 decls } +example/Example.main(). => method main (param args: Array[String]): Unit +example/Example.main().(args) => param args: Array[String] +example/Example.x. => val method x ClassTag[Int] +local0 => selfparam self: Example.type Occurrences: [0:8..0:15): example <- example/ @@ -1151,7 +1253,6 @@ Occurrences: [5:23..5:30): mutable -> scala/collection/mutable/ [5:31..5:36): Stack -> scala/collection/mutable/Stack# [5:37..5:40): Int -> scala/Int# -[5:41..5:41): -> scala/collection/mutable/Stack#``(+1). [6:6..6:10): main <- example/Example.main(). [6:11..6:15): args <- example/Example.main().(args) [6:17..6:22): Array -> scala/Array# @@ -1163,7 +1264,6 @@ Occurrences: [9:16..9:23): reflect -> scala/reflect/ [9:24..9:32): classTag -> scala/reflect/package.classTag(). [9:33..9:36): Int -> scala/Int# -[9:37..9:37): -> scala/reflect/ClassTag.apply(). expect/Extension.scala ---------------------- @@ -1173,22 +1273,40 @@ Schema => SemanticDB v4 Uri => Extension.scala Text => empty Language => Scala -Symbols => 8 entries -Occurrences => 19 entries +Symbols => 26 entries +Occurrences => 50 entries +Synthetics => 1 entries Symbols: -ext/Extension$package. => final package object ext -ext/Extension$package.`#*#`(). => method #*# -ext/Extension$package.`#*#`().(i) => param i -ext/Extension$package.`#*#`().(s) => param s -ext/Extension$package.a. => val method a -ext/Extension$package.c. => val method c -ext/Extension$package.foo(). => method foo -ext/Extension$package.foo().(s) => param s +ext/Extension$package. => final package object ext extends Object { self: ext.type => +6 decls } +ext/Extension$package.`#*#`(). => method #*# (param s: String)(param i: Int): Tuple2[String, Int] +ext/Extension$package.`#*#`().(i) => param i: Int +ext/Extension$package.`#*#`().(s) => param s: String +ext/Extension$package.a. => val method a Int +ext/Extension$package.c. => val method c Tuple2[String, Int] +ext/Extension$package.foo(). => method foo (param s: String): Int +ext/Extension$package.foo().(s) => param s: String +ext/Extension$package.readInto(). => method readInto [typeparam T ](param s: String)(implicit given param x$2: Read[T]): Option[T] +ext/Extension$package.readInto().(s) => param s: String +ext/Extension$package.readInto().(x$2) => implicit given param x$2: Read[T] +ext/Extension$package.readInto().[T] => typeparam T +ext/Functor# => trait Functor [typeparam F [type _ ]] extends Object { self: Functor[F] => +3 decls } +ext/Functor#[F] => typeparam F [type _ ] +ext/Functor#[F][_] => type _ +ext/Functor#``(). => primary ctor [typeparam F [type _ ]](): Functor[F] +ext/Functor#map(). => abstract method map [typeparam T , typeparam U ](param t: F[T])(param f: Function1[T, U]): F[U] +ext/Functor#map().(f) => param f: Function1[T, U] +ext/Functor#map().(t) => param t: F[T] +ext/Functor#map().[T] => typeparam T +ext/Functor#map().[U] => typeparam U +ext/Read# => trait Read [covariant typeparam T ] extends Object { self: Read[T] => +3 decls } +ext/Read#[T] => covariant typeparam T +ext/Read#``(). => primary ctor [covariant typeparam T ](): Read[T] +ext/Read#fromString(). => abstract method fromString (param s: String): Option[T] +ext/Read#fromString().(s) => param s: String Occurrences: [0:8..0:11): ext <- ext/ -[2:0..2:0): <- ext/Extension$package. [2:11..2:12): s <- ext/Extension$package.foo().(s) [2:11..2:12): s <- ext/Extension$package.`#*#`().(s) [2:14..2:20): String -> scala/Predef.String# @@ -1199,13 +1317,48 @@ Occurrences: [4:14..4:17): Int -> scala/Int# [4:21..4:27): String -> scala/Predef.String# [4:29..4:32): Int -> scala/Int# -[4:37..4:37): -> scala/Tuple2.apply(). [4:37..4:38): s -> ext/Extension$package.`#*#`().(s) [4:40..4:41): i -> ext/Extension$package.`#*#`().(i) [6:4..6:5): a <- ext/Extension$package.a. [6:14..6:17): foo -> ext/Extension$package.foo(). [8:4..8:5): c <- ext/Extension$package.c. [8:14..8:17): #*# -> ext/Extension$package.`#*#`(). +[10:6..10:10): Read <- ext/Read# +[10:12..10:13): T <- ext/Read#[T] +[11:6..11:16): fromString <- ext/Read#fromString(). +[11:17..11:18): s <- ext/Read#fromString().(s) +[11:20..11:26): String -> scala/Predef.String# +[11:29..11:35): Option -> scala/Option# +[11:36..11:37): T -> ext/Read#[T] +[13:11..13:12): s <- ext/Extension$package.readInto().(s) +[13:14..13:20): String -> scala/Predef.String# +[14:6..14:14): readInto <- ext/Extension$package.readInto(). +[14:15..14:16): T <- ext/Extension$package.readInto().[T] +[14:24..14:28): Read -> ext/Read# +[14:29..14:30): T -> ext/Extension$package.readInto().[T] +[14:34..14:40): Option -> scala/Option# +[14:41..14:42): T -> ext/Extension$package.readInto().[T] +[14:46..14:52): summon -> scala/Predef.summon(). +[14:53..14:57): Read -> ext/Read# +[14:58..14:59): T -> ext/Extension$package.readInto().[T] +[14:62..14:72): fromString -> ext/Read#fromString(). +[14:73..14:74): s -> ext/Extension$package.readInto().(s) +[16:6..16:13): Functor <- ext/Functor# +[16:14..16:15): F <- ext/Functor#[F] +[17:13..17:14): T <- ext/Functor#map().[T] +[17:16..17:17): t <- ext/Functor#map().(t) +[17:19..17:20): F -> ext/Functor#[F] +[17:21..17:22): T -> ext/Functor#map().[T] +[17:29..17:32): map <- ext/Functor#map(). +[17:33..17:34): U <- ext/Functor#map().[U] +[17:36..17:37): f <- ext/Functor#map().(f) +[17:39..17:40): T -> ext/Functor#map().[T] +[17:44..17:45): U -> ext/Functor#map().[U] +[17:48..17:49): F -> ext/Functor#[F] +[17:50..17:51): U -> ext/Functor#map().[U] + +Synthetics: +[14:46..14:61):summon[Read[T]] => *(x$2) expect/ForComprehension.scala ----------------------------- @@ -1216,106 +1369,71 @@ Uri => ForComprehension.scala Text => empty Language => Scala Symbols => 13 entries -Occurrences => 88 entries +Occurrences => 52 entries +Synthetics => 6 entries Symbols: -example/ForComprehension# => class ForComprehension -example/ForComprehension#``(). => primary ctor -local0 => param a -local1 => param b -local2 => val local c -local3 => param x$1 -local4 => param a -local5 => param b -local6 => param x$1 -local7 => val local c -local8 => val local d -local9 => val local e -local10 => param f +example/ForComprehension# => class ForComprehension extends Object { self: ForComprehension => +1 decls } +example/ForComprehension#``(). => primary ctor (): ForComprehension +local0 => param a: Int +local1 => param b: Int +local2 => val local c: Int +local3 => param x$1: Tuple2[Int, Int] +local4 => param a: Int +local5 => param b: Int +local6 => param x$1: Tuple2[Int, Int] +local7 => val local c: Int +local8 => val local d: Int +local9 => val local e: Tuple4[Int, Int, Int, Int] +local10 => param f: Tuple4[Int, Int, Int, Int] Occurrences: [0:8..0:15): example <- example/ [2:6..2:22): ForComprehension <- example/ForComprehension# -[3:2..3:2): <- example/ForComprehension#``(). [4:4..4:5): a <- local0 [4:9..4:13): List -> scala/package.List. -[4:13..4:13): -> scala/collection/IterableFactory#apply(). -[4:16..4:16): -> scala/collection/immutable/List#flatMap(). [5:4..5:5): b <- local1 -[5:4..5:4): -> scala/Tuple2.apply(). [5:4..5:5): b -> local1 -[5:4..5:4): -> local3 -[5:4..5:4): -> scala/Tuple2.unapply(). [5:9..5:13): List -> scala/package.List. -[5:13..5:13): -> scala/collection/IterableFactory#apply(). -[5:16..5:16): -> scala/collection/IterableOps#withFilter(). [6:7..6:8): b -> local1 [6:9..6:10): > -> scala/Int#`>`(+3). -[6:12..6:12): -> scala/collection/WithFilter#map(). [7:4..7:5): c <- local2 [7:4..7:5): c -> local2 [7:8..7:9): a -> local0 [7:10..7:11): + -> scala/Int#`+`(+4). [7:12..7:13): b -> local1 -[7:13..7:13): -> scala/collection/immutable/List#map(). -[8:11..8:11): -> scala/Tuple3.apply(). [8:11..8:12): a -> local0 [8:14..8:15): b -> local1 [8:17..8:18): c -> local2 [10:4..10:5): a <- local4 [10:9..10:13): List -> scala/package.List. -[10:13..10:13): -> scala/collection/IterableFactory#apply(). -[10:16..10:16): -> scala/collection/immutable/List#flatMap(). [11:4..11:5): b <- local5 [11:9..11:13): List -> scala/package.List. -[11:13..11:13): -> scala/collection/IterableFactory#apply(). [11:14..11:15): a -> local4 -[11:16..11:16): -> scala/collection/IterableOps#withFilter(). -[13:6..13:6): -> scala/Tuple2.apply(). [13:6..13:7): a -> local4 [14:6..14:7): b -> local5 [15:6..15:8): == -> scala/Any#`==`(). -[15:10..15:10): -> scala/Tuple2.apply(). -[15:15..15:15): -> scala/collection/WithFilter#flatMap(). -[16:4..16:4): -> local6 -[16:4..16:4): -> scala/Tuple2.unapply(). -[17:6..17:6): -> scala/Tuple2.unapply(). [17:6..17:7): c <- local7 [18:6..18:7): d <- local8 [19:9..19:13): List -> scala/package.List. -[19:13..19:13): -> scala/collection/IterableFactory#apply(). -[19:15..19:15): -> scala/Tuple2.apply(). [19:15..19:16): a -> local4 [19:18..19:19): b -> local5 -[19:21..19:21): -> scala/collection/WithFilter#withFilter(). -[19:21..19:21): -> scala/collection/IterableOps#withFilter(). -[21:6..21:6): -> scala/Tuple4.apply(). [21:6..21:7): a -> local4 [22:6..22:7): b -> local5 [23:6..23:7): c -> local7 [24:6..24:7): d -> local8 [25:6..25:8): == -> scala/Any#`==`(). -[25:10..25:10): -> scala/Tuple4.apply(). -[25:21..25:21): -> scala/collection/WithFilter#map(). [26:4..26:5): e <- local9 -[26:4..26:4): -> scala/Tuple2.apply(). [26:4..26:5): e -> local9 -[27:6..27:6): -> scala/Tuple4.apply(). [27:6..27:7): a -> local4 [28:6..28:7): b -> local5 [29:6..29:7): c -> local7 [30:6..30:7): d -> local8 -[31:5..31:5): -> scala/collection/IterableOps#withFilter(). [32:7..32:8): e -> local9 [32:9..32:11): == -> scala/Any#`==`(). -[32:13..32:13): -> scala/Tuple4.apply(). -[32:24..32:24): -> scala/collection/WithFilter#flatMap(). [33:4..33:5): f <- local10 [33:9..33:13): List -> scala/package.List. -[33:13..33:13): -> scala/collection/IterableFactory#apply(). [33:14..33:15): e -> local9 -[33:16..33:16): -> scala/collection/immutable/List#map(). -[36:6..36:6): -> scala/Tuple6.apply(). [36:6..36:7): a -> local4 [37:6..37:7): b -> local5 [38:6..38:7): c -> local7 @@ -1323,6 +1441,14 @@ Occurrences: [40:6..40:7): e -> local9 [41:6..41:7): f -> local10 +Synthetics: +[4:9..4:13):List => *.apply[Int] +[5:9..5:13):List => *.apply[Int] +[10:9..10:13):List => *.apply[Int] +[11:9..11:13):List => *.apply[Int] +[19:9..19:13):List => *.apply[Tuple2[Int, Int]] +[33:9..33:13):List => *.apply[Tuple4[Int, Int, Int, Int]] + expect/Givens.scala ------------------- @@ -1332,38 +1458,39 @@ Uri => Givens.scala Text => empty Language => Scala Symbols => 29 entries -Occurrences => 70 entries +Occurrences => 65 entries +Synthetics => 3 entries Symbols: -a/b/Givens. => final object Givens -a/b/Givens.Monoid# => trait Monoid -a/b/Givens.Monoid#[A] => typeparam A -a/b/Givens.Monoid#``(). => primary ctor -a/b/Givens.Monoid#combine(). => abstract method combine -a/b/Givens.Monoid#combine().(x) => param x -a/b/Givens.Monoid#combine().(y) => param y -a/b/Givens.Monoid#empty(). => abstract method empty -a/b/Givens.foo(). => method foo -a/b/Givens.foo().(A) => implicit param A -a/b/Givens.foo().[A] => typeparam A -a/b/Givens.given_Monoid_String. => final implicit object given_Monoid_String -a/b/Givens.given_Monoid_String.combine(). => method combine -a/b/Givens.given_Monoid_String.combine().(x) => param x -a/b/Givens.given_Monoid_String.combine().(y) => param y -a/b/Givens.given_Monoid_String.empty(). => method empty -a/b/Givens.goodbye1. => val method goodbye1 -a/b/Givens.hello1. => val method hello1 -a/b/Givens.int2String(). => final implicit macro int2String -a/b/Givens.sayGoodbye(). => method sayGoodbye -a/b/Givens.sayGoodbye().(any) => param any -a/b/Givens.sayGoodbye().[B] => typeparam B -a/b/Givens.sayHello(). => method sayHello -a/b/Givens.sayHello().(any) => param any -a/b/Givens.sayHello().[A] => typeparam A -a/b/Givens.saySoLong(). => method saySoLong -a/b/Givens.saySoLong().(any) => param any -a/b/Givens.saySoLong().[B] => typeparam B -a/b/Givens.soLong1. => val method soLong1 +a/b/Givens. => final object Givens extends Object { self: Givens.type => +12 decls } +a/b/Givens.Monoid# => trait Monoid [typeparam A ] extends Object { self: Monoid[A] => +4 decls } +a/b/Givens.Monoid#[A] => typeparam A +a/b/Givens.Monoid#``(). => primary ctor [typeparam A ](): Monoid[A] +a/b/Givens.Monoid#combine(). => abstract method combine (param x: A)(param y: A): A +a/b/Givens.Monoid#combine().(x) => param x: A +a/b/Givens.Monoid#combine().(y) => param y: A +a/b/Givens.Monoid#empty(). => abstract method empty => A +a/b/Givens.foo(). => method foo [typeparam A ](implicit given param A: Monoid[A]): A +a/b/Givens.foo().(A) => implicit given param A: Monoid[A] +a/b/Givens.foo().[A] => typeparam A +a/b/Givens.given_Monoid_String. => final implicit given object given_Monoid_String extends Object with Monoid[String] { self: given_Monoid_String.type => +3 decls } +a/b/Givens.given_Monoid_String.combine(). => method combine (param x: String)(param y: String): String <: a/b/Givens.Monoid#combine(). +a/b/Givens.given_Monoid_String.combine().(x) => param x: String +a/b/Givens.given_Monoid_String.combine().(y) => param y: String +a/b/Givens.given_Monoid_String.empty(). => method empty => String <: a/b/Givens.Monoid#empty(). +a/b/Givens.goodbye1. => val method goodbye1 String +a/b/Givens.hello1. => val method hello1 String +a/b/Givens.int2String(). => final implicit given inline macro int2String => Conversion[Int, String] +a/b/Givens.sayGoodbye(). => method sayGoodbye [typeparam B ](param any: B): String +a/b/Givens.sayGoodbye().(any) => param any: B +a/b/Givens.sayGoodbye().[B] => typeparam B +a/b/Givens.sayHello(). => method sayHello [typeparam A ](param any: A): String +a/b/Givens.sayHello().(any) => param any: A +a/b/Givens.sayHello().[A] => typeparam A +a/b/Givens.saySoLong(). => method saySoLong [typeparam B ](param any: B): String +a/b/Givens.saySoLong().(any) => param any: B +a/b/Givens.saySoLong().[B] => typeparam B +a/b/Givens.soLong1. => val method soLong1 String Occurrences: [0:8..0:9): a <- a/ @@ -1373,7 +1500,6 @@ Occurrences: [5:16..5:19): any <- a/b/Givens.sayHello().(any) [5:21..5:22): A -> a/b/Givens.sayHello().[A] [6:8..6:16): sayHello <- a/b/Givens.sayHello(). -[6:21..6:21): -> scala/StringContext.apply(). [6:34..6:37): any -> a/b/Givens.sayHello().(any) [6:37..6:38): " -> scala/StringContext#s(). [8:13..8:14): B <- a/b/Givens.sayGoodbye().[B] @@ -1383,11 +1509,9 @@ Occurrences: [8:21..8:22): B -> a/b/Givens.sayGoodbye().[B] [8:21..8:22): B -> a/b/Givens.saySoLong().[B] [9:8..9:18): sayGoodbye <- a/b/Givens.sayGoodbye(). -[9:23..9:23): -> scala/StringContext.apply(). [9:38..9:41): any -> a/b/Givens.sayGoodbye().(any) [9:41..9:42): " -> scala/StringContext#s(). [10:8..10:17): saySoLong <- a/b/Givens.saySoLong(). -[10:22..10:22): -> scala/StringContext.apply(). [10:37..10:40): any -> a/b/Givens.saySoLong().(any) [10:40..10:41): " -> scala/StringContext#s(). [12:6..12:12): hello1 <- a/b/Givens.hello1. @@ -1397,7 +1521,6 @@ Occurrences: [14:6..14:13): soLong1 <- a/b/Givens.soLong1. [14:18..14:27): saySoLong -> a/b/Givens.saySoLong(). [16:8..16:14): Monoid <- a/b/Givens.Monoid# -[16:14..16:14): <- a/b/Givens.Monoid#``(). [16:15..16:16): A <- a/b/Givens.Monoid#[A] [17:8..17:13): empty <- a/b/Givens.Monoid#empty(). [17:15..17:16): A -> a/b/Givens.Monoid#[A] @@ -1407,7 +1530,6 @@ Occurrences: [18:33..18:34): y <- a/b/Givens.Monoid#combine().(y) [18:36..18:37): A -> a/b/Givens.Monoid#[A] [18:40..18:41): A -> a/b/Givens.Monoid#[A] -[20:8..20:27): Monoid[String] with <- a/b/Givens.given_Monoid_String. [20:8..20:14): Monoid -> a/b/Givens.Monoid# [20:15..20:21): String -> scala/Predef.String# [21:8..21:13): empty <- a/b/Givens.given_Monoid_String.empty(). @@ -1437,6 +1559,11 @@ Occurrences: [26:57..26:58): A -> a/b/Givens.foo().(A) [26:59..26:64): empty -> a/b/Givens.Monoid#empty(). +Synthetics: +[12:17..12:25):sayHello => *[Int] +[13:19..13:29):sayGoodbye => *[Int] +[14:18..14:27):saySoLong => *[Int] + expect/ImplicitConversion.scala ------------------------------- @@ -1446,32 +1573,33 @@ Uri => ImplicitConversion.scala Text => empty Language => Scala Symbols => 23 entries -Occurrences => 62 entries +Occurrences => 50 entries +Synthetics => 6 entries Symbols: -example/ImplicitConversion# => class ImplicitConversion -example/ImplicitConversion#``(). => primary ctor -example/ImplicitConversion#a. => val method a -example/ImplicitConversion#b. => val method b -example/ImplicitConversion#char. => val method char -example/ImplicitConversion#message. => val method message -example/ImplicitConversion#number. => val method number -example/ImplicitConversion#string2Number(). => implicit method string2Number -example/ImplicitConversion#string2Number().(string) => param string -example/ImplicitConversion#tuple. => val method tuple -example/ImplicitConversion#x. => val method x -example/ImplicitConversion. => final object ImplicitConversion -example/ImplicitConversion.newAny2stringadd# => final class newAny2stringadd -example/ImplicitConversion.newAny2stringadd#[A] => typeparam A -example/ImplicitConversion.newAny2stringadd#`+`(). => method + -example/ImplicitConversion.newAny2stringadd#`+`().(other) => param other -example/ImplicitConversion.newAny2stringadd#``(). => primary ctor -example/ImplicitConversion.newAny2stringadd#``().(self) => param self -example/ImplicitConversion.newAny2stringadd#self. => val method self -example/ImplicitConversion.newAny2stringadd(). => final implicit method newAny2stringadd -example/ImplicitConversion.newAny2stringadd().(self) => param self -example/ImplicitConversion.newAny2stringadd().[A] => typeparam A -example/ImplicitConversion.newAny2stringadd. => final object newAny2stringadd +example/ImplicitConversion# => class ImplicitConversion extends Object { self: ImplicitConversion => +9 decls } +example/ImplicitConversion#``(). => primary ctor (): ImplicitConversion +example/ImplicitConversion#a. => val method a Int +example/ImplicitConversion#b. => val method b Long +example/ImplicitConversion#char. => val method char Char +example/ImplicitConversion#message. => val method message String +example/ImplicitConversion#number. => val method number Int +example/ImplicitConversion#string2Number(). => implicit method string2Number (param string: String): Int +example/ImplicitConversion#string2Number().(string) => param string: String +example/ImplicitConversion#tuple. => val method tuple Tuple2[Int, Int] +example/ImplicitConversion#x. => val method x Int +example/ImplicitConversion. => final object ImplicitConversion extends Object { self: ImplicitConversion.type => +6 decls } +example/ImplicitConversion.newAny2stringadd# => final implicit class newAny2stringadd [typeparam A ] extends AnyVal { self: newAny2stringadd[A] => +4 decls } +example/ImplicitConversion.newAny2stringadd#[A] => typeparam A +example/ImplicitConversion.newAny2stringadd#`+`(). => method + (param other: String): String +example/ImplicitConversion.newAny2stringadd#`+`().(other) => param other: String +example/ImplicitConversion.newAny2stringadd#``(). => primary ctor [typeparam A ](param self: A): newAny2stringadd[A] +example/ImplicitConversion.newAny2stringadd#``().(self) => param self: A +example/ImplicitConversion.newAny2stringadd#self. => private val method self A +example/ImplicitConversion.newAny2stringadd(). => final implicit method newAny2stringadd [typeparam A ](param self: A): newAny2stringadd[A] +example/ImplicitConversion.newAny2stringadd().(self) => param self: A +example/ImplicitConversion.newAny2stringadd().[A] => typeparam A +example/ImplicitConversion.newAny2stringadd. => final object newAny2stringadd extends Object { self: newAny2stringadd.type => +2 decls } Occurrences: [0:8..0:15): example <- example/ @@ -1479,7 +1607,6 @@ Occurrences: [2:13..2:21): language -> scala/language. [2:22..2:41): implicitConversions -> scala/language.implicitConversions. [4:6..4:24): ImplicitConversion <- example/ImplicitConversion# -[5:2..5:2): <- example/ImplicitConversion#``(). [5:9..5:27): ImplicitConversion -> example/ImplicitConversion. [6:15..6:28): string2Number <- example/ImplicitConversion#string2Number(). [7:6..7:12): string <- example/ImplicitConversion#string2Number().(string) @@ -1488,45 +1615,34 @@ Occurrences: [9:6..9:13): message <- example/ImplicitConversion#message. [10:6..10:12): number <- example/ImplicitConversion#number. [11:6..11:11): tuple <- example/ImplicitConversion#tuple. -[11:15..11:15): -> scala/Tuple2.apply(). [12:6..12:10): char <- example/ImplicitConversion#char. [12:12..12:16): Char -> scala/Char# -[15:2..15:2): -> scala/Predef.augmentString(). [15:2..15:9): message -> example/ImplicitConversion#message. [16:5..16:16): stripSuffix -> scala/collection/StringOps#stripSuffix(). -[17:2..17:2): -> example/ImplicitConversion.newAny2stringadd(). [17:2..17:7): tuple -> example/ImplicitConversion#tuple. [17:8..17:9): + -> example/ImplicitConversion.newAny2stringadd#`+`(). [20:6..20:7): x <- example/ImplicitConversion#x. [20:9..20:12): Int -> scala/Int# -[20:15..20:15): -> example/ImplicitConversion#string2Number(). [20:15..20:22): message -> example/ImplicitConversion#message. -[23:4..23:4): -> scala/StringContext.apply(). [23:11..23:18): message -> example/ImplicitConversion#message. [23:20..23:26): number -> example/ImplicitConversion#number. [23:26..23:27): " -> scala/StringContext#s(). -[24:2..24:2): -> scala/Predef.augmentString(). -[24:6..24:6): -> scala/StringContext.apply(). [25:7..25:14): message -> example/ImplicitConversion#message. [26:7..26:13): number -> example/ImplicitConversion#number. [26:15..26:16): " -> scala/StringContext#s(). [26:17..26:28): stripMargin -> scala/collection/StringOps#stripMargin(+1). [28:6..28:7): a <- example/ImplicitConversion#a. [28:9..28:12): Int -> scala/Int# -[28:15..28:15): -> scala/Char.char2int(). [28:15..28:19): char -> example/ImplicitConversion#char. [29:6..29:7): b <- example/ImplicitConversion#b. [29:9..29:13): Long -> scala/Long# -[29:16..29:16): -> scala/Char.char2long(). [29:16..29:20): char -> example/ImplicitConversion#char. [32:7..32:25): ImplicitConversion <- example/ImplicitConversion. [33:23..33:39): newAny2stringadd <- example/ImplicitConversion.newAny2stringadd# -[33:39..33:39): <- example/ImplicitConversion.newAny2stringadd#``(). [33:40..33:41): A <- example/ImplicitConversion.newAny2stringadd#[A] [33:55..33:59): self <- example/ImplicitConversion.newAny2stringadd#self. [33:61..33:62): A -> example/ImplicitConversion.newAny2stringadd#[A] [33:72..33:78): AnyVal -> scala/AnyVal# -[33:78..33:78): -> scala/AnyVal#``(). [34:8..34:9): + <- example/ImplicitConversion.newAny2stringadd#`+`(). [34:10..34:15): other <- example/ImplicitConversion.newAny2stringadd#`+`().(other) [34:17..34:23): String -> scala/Predef.String# @@ -1537,6 +1653,16 @@ Occurrences: [34:56..34:57): + -> java/lang/String#`+`(). [34:58..34:63): other -> example/ImplicitConversion.newAny2stringadd#`+`().(other) +Synthetics: +[15:2..15:9):message => augmentString(*) +[17:2..17:7):tuple => newAny2stringadd[Tuple2[Int, Int]](*) +[20:15..20:22):message => string2Number(*) +[24:2..26:16):s"""Hello + |$message + |$number""" => augmentString(*) +[28:15..28:19):char => char2int(*) +[29:16..29:20):char => char2long(*) + expect/Imports.scala -------------------- @@ -1545,16 +1671,30 @@ Schema => SemanticDB v4 Uri => Imports.scala Text => empty Language => Scala -Symbols => 0 entries -Occurrences => 4 entries +Symbols => 2 entries +Occurrences => 16 entries Symbols: +_empty_/Imports$package. => final package object _empty_ extends Object { self: _empty_.type => +2 decls } +_empty_/Imports$package.m. => val method m HashMap[Int, Int] Occurrences: [0:7..0:12): scala -> scala/ [0:13..0:17): util -> scala/util/ [0:18..0:25): control -> scala/util/control/ [0:26..0:34): NonFatal -> scala/util/control/NonFatal. +[1:7..1:12): scala -> scala/ +[1:13..1:23): collection -> scala/collection/ +[1:24..1:33): immutable -> scala/collection/immutable/ +[1:35..1:42): HashMap -> scala/collection/immutable/HashMap. +[1:35..1:42): HashMap -> scala/collection/immutable/HashMap# +[3:4..3:5): m <- _empty_/Imports$package.m. +[3:7..3:9): HM -> scala/collection/immutable/HashMap# +[3:10..3:13): Int -> scala/Int# +[3:15..3:18): Int -> scala/Int# +[3:22..3:24): HM -> scala/collection/immutable/HashMap. +[3:25..3:28): Int -> scala/Int# +[3:30..3:33): Int -> scala/Int# expect/InstrumentTyper.scala ---------------------------- @@ -1565,17 +1705,18 @@ Uri => InstrumentTyper.scala Text => empty Language => Scala Symbols => 8 entries -Occurrences => 55 entries +Occurrences => 52 entries +Synthetics => 2 entries Symbols: -example/InstrumentTyper# => class InstrumentTyper -example/InstrumentTyper#AnnotatedType# => type AnnotatedType -example/InstrumentTyper#``(). => primary ctor -example/InstrumentTyper#all(). => method all -example/InstrumentTyper#clazzOf. => final val method clazzOf -example/InstrumentTyper#singletonType(). => method singletonType -example/InstrumentTyper#singletonType().(x) => param x -local0 => selfparam self +example/InstrumentTyper# => class InstrumentTyper extends Object { self: AnyRef & InstrumentTyper => +5 decls } +example/InstrumentTyper#AnnotatedType# => type AnnotatedType = Int @param +example/InstrumentTyper#``(). => primary ctor (): InstrumentTyper +example/InstrumentTyper#all(). => method all => List[Matchable] +example/InstrumentTyper#clazzOf. => final val method clazzOf Option[Int] +example/InstrumentTyper#singletonType(). => method singletonType (param x: Predef.type): Nothing +example/InstrumentTyper#singletonType().(x) => param x: Predef.type +local0 => selfparam self: AnyRef Occurrences: [0:8..0:15): example <- example/ @@ -1592,12 +1733,10 @@ Occurrences: [5:7..5:12): types -> types/ [5:13..5:17): Test -> types/Test. [7:6..7:21): InstrumentTyper <- example/InstrumentTyper# -[7:24..7:24): <- example/InstrumentTyper#``(). [7:24..7:28): self <- local0 [7:30..7:36): AnyRef -> scala/AnyRef# [8:6..8:9): all <- example/InstrumentTyper#all(). [8:12..8:16): List -> scala/package.List. -[8:16..8:16): -> scala/collection/IterableFactory#apply(). [9:4..9:11): Literal -> types/Test.Literal. [9:12..9:15): int -> types/Test.Literal.int. [10:4..10:11): Literal -> types/Test.Literal. @@ -1621,7 +1760,6 @@ Occurrences: [19:4..19:11): Literal -> types/Test.Literal. [19:12..19:19): clazzOf -> types/Test.Literal.clazzOf. [20:4..20:8): List -> scala/package.List. -[20:8..20:8): -> scala/collection/IterableFactory#apply(). [22:7..22:20): AnnotatedType <- example/InstrumentTyper#AnnotatedType# [22:23..22:26): Int -> scala/Int# [22:28..22:33): param -> scala/annotation/meta/param# @@ -1634,6 +1772,10 @@ Occurrences: [24:30..24:36): Option -> scala/Option# [24:37..24:40): Int -> scala/Int# +Synthetics: +[8:12..8:16):List => *.apply[Matchable] +[20:4..20:8):List => *.apply[Nothing] + expect/InventedNames.scala -------------------------- @@ -1643,80 +1785,74 @@ Uri => InventedNames.scala Text => empty Language => Scala Symbols => 45 entries -Occurrences => 73 entries +Occurrences => 61 entries +Synthetics => 3 entries Symbols: -givens/InventedNames$package. => final package object givens -givens/InventedNames$package.`* *`. => final implicit lazy val method * * -givens/InventedNames$package.a. => val method a -givens/InventedNames$package.b. => val method b -givens/InventedNames$package.c. => val method c -givens/InventedNames$package.d. => val method d -givens/InventedNames$package.e. => val method e -givens/InventedNames$package.f. => val method f -givens/InventedNames$package.g. => val method g -givens/InventedNames$package.given_Char. => final implicit lazy val method given_Char -givens/InventedNames$package.given_Double(). => final implicit method given_Double -givens/InventedNames$package.given_Double().(x$1) => implicit param x$1 -givens/InventedNames$package.given_Float. => final implicit lazy val method given_Float -givens/InventedNames$package.given_List_T(). => final implicit method given_List_T -givens/InventedNames$package.given_List_T().[T] => typeparam T -givens/InventedNames$package.given_String. => final implicit lazy val method given_String -givens/InventedNames$package.given_X. => final implicit object given_X -givens/InventedNames$package.given_X.doX(). => method doX -givens/InventedNames$package.given_Y# => class given_Y -givens/InventedNames$package.given_Y#``(). => primary ctor -givens/InventedNames$package.given_Y#``().(x$1) => implicit val param x$1 -givens/InventedNames$package.given_Y#doY(). => method doY -givens/InventedNames$package.given_Y#x$1. => implicit val method x$1 -givens/InventedNames$package.given_Y(). => final implicit method given_Y -givens/InventedNames$package.given_Y().(x$1) => implicit param x$1 -givens/InventedNames$package.given_Z_T# => class given_Z_T -givens/InventedNames$package.given_Z_T#[T] => typeparam T -givens/InventedNames$package.given_Z_T#``(). => primary ctor -givens/InventedNames$package.given_Z_T#doZ(). => method doZ -givens/InventedNames$package.given_Z_T(). => final implicit method given_Z_T -givens/InventedNames$package.given_Z_T().[T] => typeparam T -givens/InventedNames$package.intValue. => final implicit lazy val method intValue -givens/InventedNames$package.x. => val method x -givens/InventedNames$package.y. => val method y -givens/InventedNames$package.z. => val method z -givens/X# => trait X -givens/X#``(). => primary ctor -givens/X#doX(). => abstract method doX -givens/Y# => trait Y -givens/Y#``(). => primary ctor -givens/Y#doY(). => abstract method doY -givens/Z# => trait Z -givens/Z#[T] => typeparam T -givens/Z#``(). => primary ctor -givens/Z#doZ(). => abstract method doZ +givens/InventedNames$package. => final package object givens extends Object { self: givens.type => +24 decls } +givens/InventedNames$package.`* *`. => final implicit lazy val given method * * Long +givens/InventedNames$package.a. => val method a Int +givens/InventedNames$package.b. => val method b String +givens/InventedNames$package.c. => val method c Double +givens/InventedNames$package.d. => val method d List[Int] +givens/InventedNames$package.e. => val method e Char +givens/InventedNames$package.f. => val method f Float +givens/InventedNames$package.g. => val method g Long +givens/InventedNames$package.given_Char. => final implicit lazy val given method given_Char Char +givens/InventedNames$package.given_Double(). => final implicit given method given_Double (implicit given param x$1: Int): Double +givens/InventedNames$package.given_Double().(x$1) => implicit given param x$1: Int +givens/InventedNames$package.given_Float. => final implicit lazy val given method given_Float Float +givens/InventedNames$package.given_List_T(). => final implicit given method given_List_T [typeparam T ]: List[T] +givens/InventedNames$package.given_List_T().[T] => typeparam T +givens/InventedNames$package.given_String. => final implicit lazy val given method given_String String +givens/InventedNames$package.given_X. => final implicit given object given_X extends Object with X { self: given_X.type => +2 decls } +givens/InventedNames$package.given_X.doX(). => method doX => Int <: givens/X#doX(). +givens/InventedNames$package.given_Y# => implicit given class given_Y extends Object with Y { self: given_Y => +3 decls } +givens/InventedNames$package.given_Y#``(). => primary ctor ()(implicit val given param x$1: X): given_Y +givens/InventedNames$package.given_Y#``().(x$1) => implicit val given param x$1: X +givens/InventedNames$package.given_Y#doY(). => method doY => String <: givens/Y#doY(). +givens/InventedNames$package.given_Y#x$1. => protected implicit val given method x$1 X +givens/InventedNames$package.given_Y(). => final implicit given method given_Y (implicit given param x$1: X): given_Y +givens/InventedNames$package.given_Y().(x$1) => implicit given param x$1: X +givens/InventedNames$package.given_Z_T# => implicit given class given_Z_T [typeparam T ] extends Object with Z[T] { self: given_Z_T[T] => +3 decls } +givens/InventedNames$package.given_Z_T#[T] => typeparam T +givens/InventedNames$package.given_Z_T#``(). => primary ctor [typeparam T ](): given_Z_T[T] +givens/InventedNames$package.given_Z_T#doZ(). => method doZ => List[T] <: givens/Z#doZ(). +givens/InventedNames$package.given_Z_T(). => final implicit given method given_Z_T [typeparam T ]: given_Z_T[T] +givens/InventedNames$package.given_Z_T().[T] => typeparam T +givens/InventedNames$package.intValue. => final implicit lazy val given method intValue Int +givens/InventedNames$package.x. => val method x given_X.type +givens/InventedNames$package.y. => val method y given_Y +givens/InventedNames$package.z. => val method z given_Z_T[String] +givens/X# => trait X extends Object { self: X => +2 decls } +givens/X#``(). => primary ctor (): X +givens/X#doX(). => abstract method doX => Int +givens/Y# => trait Y extends Object { self: Y => +2 decls } +givens/Y#``(). => primary ctor (): Y +givens/Y#doY(). => abstract method doY => String +givens/Z# => trait Z [typeparam T ] extends Object { self: Z[T] => +3 decls } +givens/Z#[T] => typeparam T +givens/Z#``(). => primary ctor [typeparam T ](): Z[T] +givens/Z#doZ(). => abstract method doZ => List[T] Occurrences: [0:8..0:14): givens <- givens/ [2:6..2:7): X <- givens/X# -[3:2..3:2): <- givens/X#``(). [3:6..3:9): doX <- givens/X#doX(). [3:11..3:14): Int -> scala/Int# [5:6..5:7): Y <- givens/Y# -[6:2..6:2): <- givens/Y#``(). [6:6..6:9): doY <- givens/Y#doY(). [6:11..6:17): String -> scala/Predef.String# [8:6..8:7): Z <- givens/Z# -[8:7..8:7): <- givens/Z#``(). [8:8..8:9): T <- givens/Z#[T] [9:6..9:9): doZ <- givens/Z#doZ(). [9:11..9:15): List -> scala/package.List# [9:16..9:17): T -> givens/Z#[T] -[13:0..13:0): <- givens/InventedNames$package. [13:6..13:14): intValue <- givens/InventedNames$package.intValue. [13:16..13:19): Int -> scala/Int# -[14:6..14:6): <- givens/InventedNames$package.given_String. [14:6..14:12): String -> scala/Predef.String# -[15:6..15:6): <- givens/InventedNames$package.given_Double(). [15:13..15:16): Int -> scala/Int# [15:19..15:25): Double -> scala/Double# -[16:6..16:6): <- givens/InventedNames$package.given_List_T(). [16:7..16:8): T <- givens/InventedNames$package.given_List_T().[T] [16:11..16:15): List -> scala/package.List# [16:16..16:17): T -> givens/InventedNames$package.given_List_T().[T] @@ -1727,14 +1863,11 @@ Occurrences: [18:21..18:26): Float -> scala/Float# [19:7..19:10): * * <- givens/InventedNames$package.`* *`. [19:13..19:17): Long -> scala/Long# -[21:6..22:0): <- givens/InventedNames$package.given_X. [21:6..21:7): X -> givens/X# [22:6..22:9): doX <- givens/InventedNames$package.given_X.doX(). -[24:13..24:13): <- givens/InventedNames$package.given_Y#``(). [24:13..24:14): X -> givens/X# [24:17..24:18): Y -> givens/Y# [25:6..25:9): doY <- givens/InventedNames$package.given_Y#doY(). -[27:7..27:7): <- givens/InventedNames$package.given_Z_T#``(). [27:7..27:8): T <- givens/InventedNames$package.given_Z_T#[T] [27:11..27:12): Z -> givens/Z# [27:13..27:14): T -> givens/InventedNames$package.given_Z_T#[T] @@ -1748,7 +1881,6 @@ Occurrences: [33:8..33:20): given_String -> givens/InventedNames$package.given_String. [34:4..34:5): c <- givens/InventedNames$package.c. [34:8..34:20): given_Double -> givens/InventedNames$package.given_Double(). -[34:20..34:20): -> givens/InventedNames$package.intValue. [35:4..35:5): d <- givens/InventedNames$package.d. [35:8..35:20): given_List_T -> givens/InventedNames$package.given_List_T(). [35:21..35:24): Int -> scala/Int# @@ -1762,11 +1894,15 @@ Occurrences: [39:8..39:15): given_X -> givens/InventedNames$package.given_X. [40:4..40:5): y <- givens/InventedNames$package.y. [40:8..40:15): given_Y -> givens/InventedNames$package.given_Y(). -[40:15..40:15): -> givens/InventedNames$package.given_X. [41:4..41:5): z <- givens/InventedNames$package.z. [41:8..41:17): given_Z_T -> givens/InventedNames$package.given_Z_T(). [41:18..41:24): String -> scala/Predef.String# +Synthetics: +[24:0..24:0): => *(x$1) +[34:8..34:20):given_Double => *(intValue) +[40:8..40:15):given_Y => *(given_X) + expect/Issue1749.scala ---------------------- @@ -1776,16 +1912,17 @@ Uri => Issue1749.scala Text => empty Language => Scala Symbols => 7 entries -Occurrences => 30 entries +Occurrences => 22 entries +Synthetics => 3 entries Symbols: -example/Issue1749# => class Issue1749 -example/Issue1749#``(). => primary ctor -example/Issue1749#x1. => val method x1 -example/Issue1749#x2. => val method x2 -example/Issue1854# => class Issue1854 -example/Issue1854#``(). => primary ctor -example/Issue1854#map. => val method map +example/Issue1749# => class Issue1749 extends Object { self: Issue1749 => +3 decls } +example/Issue1749#``(). => primary ctor (): Issue1749 +example/Issue1749#x1. => val method x1 Int +example/Issue1749#x2. => val method x2 Int +example/Issue1854# => class Issue1854 extends Object { self: Issue1854 => +2 decls } +example/Issue1854#``(). => primary ctor (): Issue1854 +example/Issue1854#map. => val method map Map[String, String] Occurrences: [1:8..1:15): example <- example/ @@ -1794,21 +1931,14 @@ Occurrences: [3:18..3:25): Ordered -> scala/math/Ordered. [3:26..3:43): orderingToOrdered -> scala/math/Ordered.orderingToOrdered(). [5:6..5:15): Issue1749 <- example/Issue1749# -[6:2..6:2): <- example/Issue1749#``(). [6:6..6:8): x1 <- example/Issue1749#x1. [7:6..7:8): x2 <- example/Issue1749#x2. -[8:2..8:2): -> scala/math/Ordered.orderingToOrdered(). -[8:3..8:3): -> scala/Tuple2.apply(). [8:3..8:5): x1 -> example/Issue1749#x1. [8:7..8:9): x1 -> example/Issue1749#x1. -[8:10..8:10): -> scala/math/Ordering.Tuple2(). -[8:10..8:10): -> scala/math/Ordering.Int. [9:5..9:12): compare -> scala/math/Ordered#compare(). -[9:14..9:14): -> scala/Tuple2.apply(). [9:14..9:16): x2 -> example/Issue1749#x2. [9:18..9:20): x2 -> example/Issue1749#x2. [12:6..12:15): Issue1854 <- example/Issue1854# -[13:2..13:2): <- example/Issue1854#``(). [13:6..13:9): map <- example/Issue1854#map. [13:12..13:22): collection -> scala/collection/ [13:23..13:30): mutable -> scala/collection/mutable/ @@ -1817,7 +1947,11 @@ Occurrences: [13:41..13:47): String -> scala/Predef.String# [13:49..13:55): String -> scala/Predef.String# [14:2..14:5): map -> example/Issue1854#map. -[14:5..14:5): -> scala/collection/mutable/MapOps#update(). + +Synthetics: +[8:2..8:10):(x1, x1) => orderingToOrdered[Tuple2[Int, Int]](*) +[8:2..8:10):(x1, x1) => *(Tuple2(Int, Int)) +[8:10..8:10): => *(Int, Int) expect/Local.scala ------------------ @@ -1828,28 +1962,31 @@ Uri => Local.scala Text => empty Language => Scala Symbols => 6 entries -Occurrences => 11 entries +Occurrences => 10 entries +Synthetics => 1 entries Symbols: -example/Local# => class Local -example/Local#``(). => primary ctor -example/Local#a(). => method a -local0 => local id -local1 => typeparam A -local2 => param a +example/Local# => class Local extends Object { self: Local => +2 decls } +example/Local#``(). => primary ctor (): Local +example/Local#a(). => method a (): Int +local0 => typeparam A +local1 => param a: A +local2 => local id: [typeparam A ](param a: A): A Occurrences: [0:8..0:15): example <- example/ [2:6..2:11): Local <- example/Local# -[3:2..3:2): <- example/Local#``(). [3:6..3:7): a <- example/Local#a(). -[4:8..4:10): id <- local0 -[4:11..4:12): A <- local1 -[4:14..4:15): a <- local2 -[4:17..4:18): A -> local1 -[4:21..4:22): A -> local1 -[4:25..4:26): a -> local2 -[5:4..5:6): id -> local0 +[4:8..4:10): id <- local2 +[4:11..4:12): A <- local0 +[4:14..4:15): a <- local1 +[4:17..4:18): A -> local0 +[4:21..4:22): A -> local0 +[4:25..4:26): a -> local1 +[5:4..5:6): id -> local2 + +Synthetics: +[5:4..5:6):id => *[Int] expect/Locals.scala ------------------- @@ -1860,12 +1997,13 @@ Uri => Locals.scala Text => empty Language => Scala Symbols => 3 entries -Occurrences => 7 entries +Occurrences => 6 entries +Synthetics => 1 entries Symbols: -local0 => val local x -locals/Test. => final object Test -locals/Test.xs. => val method xs +local0 => val local x: Int +locals/Test. => final object Test extends Object { self: Test.type => +2 decls } +locals/Test.xs. => val method xs List[Int] Occurrences: [0:8..0:14): locals <- locals/ @@ -1873,9 +2011,11 @@ Occurrences: [3:6..3:8): xs <- locals/Test.xs. [4:8..4:9): x <- local0 [5:4..5:8): List -> scala/package.List. -[5:8..5:8): -> scala/collection/IterableFactory#apply(). [5:9..5:10): x -> local0 +Synthetics: +[5:4..5:8):List => *.apply[Int] + expect/MetacJava.scala ---------------------- @@ -1885,26 +2025,25 @@ Uri => MetacJava.scala Text => empty Language => Scala Symbols => 10 entries -Occurrences => 72 entries +Occurrences => 62 entries Symbols: -example/MetacJava# => class MetacJava -example/MetacJava#``(). => primary ctor -example/MetacJava#coin. => val method coin -example/MetacJava#entry. => val method entry -example/MetacJava#inner. => val method inner -example/MetacJava#interface. => val method interface -example/MetacJava#nonStatic. => val method nonStatic -example/MetacJava#overload1. => val method overload1 -example/MetacJava#overload2. => val method overload2 -example/MetacJava#staticInner. => val method staticInner +example/MetacJava# => class MetacJava extends Object { self: MetacJava => +9 decls } +example/MetacJava#``(). => primary ctor (): MetacJava +example/MetacJava#coin. => val method coin Coin +example/MetacJava#entry. => val method entry Entry[Int, Int] +example/MetacJava#inner. => val method inner MetacJava +example/MetacJava#interface. => val method interface Interface +example/MetacJava#nonStatic. => val method nonStatic staticInner.NonStatic +example/MetacJava#overload1. => val method overload1 inner.Overload1 +example/MetacJava#overload2. => val method overload2 inner.Overload2 +example/MetacJava#staticInner. => val method staticInner StaticInner Occurrences: [0:8..0:15): example <- example/ [2:7..2:10): com -> com/ [2:11..2:17): javacp -> com/javacp/ [4:6..4:15): MetacJava <- example/MetacJava# -[5:2..5:2): <- example/MetacJava#``(). [5:2..5:8): javacp -> com/javacp/ [5:9..5:18): MetacJava -> com/javacp/MetacJava# [5:19..5:30): StaticInner -> com/javacp/MetacJava#StaticInner# @@ -1912,39 +2051,31 @@ Occurrences: [6:6..6:12): javacp -> com/javacp/ [6:13..6:22): MetacJava -> com/javacp/MetacJava# [6:23..6:34): StaticInner -> com/javacp/MetacJava#StaticInner# -[6:34..6:34): -> com/javacp/MetacJava#StaticInner#``(). [6:37..6:48): isNotStatic -> com/javacp/MetacJava#StaticInner#isNotStatic(). [7:6..7:11): inner <- example/MetacJava#inner. [7:18..7:24): javacp -> com/javacp/ [7:25..7:34): MetacJava -> com/javacp/MetacJava# -[7:34..7:34): -> com/javacp/MetacJava#``(). [8:6..8:15): overload1 <- example/MetacJava#overload1. [8:22..8:27): inner -> example/MetacJava#inner. [8:28..8:37): Overload1 -> com/javacp/MetacJava#Overload1# -[8:37..8:37): -> com/javacp/MetacJava#Overload1#``(). [9:6..9:15): overload2 <- example/MetacJava#overload2. [9:22..9:27): inner -> example/MetacJava#inner. [9:28..9:37): Overload2 -> com/javacp/MetacJava#Overload2# -[9:37..9:37): -> com/javacp/MetacJava#Overload2#``(). [10:2..10:7): inner -> example/MetacJava#inner. [10:8..10:16): overload -> com/javacp/MetacJava#overload(). [10:21..10:30): overload1 -> example/MetacJava#overload1. [10:31..10:32): A -> com/javacp/MetacJava#Overload1#A# -[10:32..10:32): -> com/javacp/MetacJava#Overload1#A#``(). [11:2..11:7): inner -> example/MetacJava#inner. [11:8..11:16): overload -> com/javacp/MetacJava#overload(+1). [11:21..11:30): overload2 -> example/MetacJava#overload2. [11:31..11:32): A -> com/javacp/MetacJava#Overload2#A# -[11:32..11:32): -> com/javacp/MetacJava#Overload2#A#``(). [12:6..12:17): staticInner <- example/MetacJava#staticInner. [12:24..12:30): javacp -> com/javacp/ [12:31..12:40): MetacJava -> com/javacp/MetacJava# [12:41..12:52): StaticInner -> com/javacp/MetacJava#StaticInner# -[12:52..12:52): -> com/javacp/MetacJava#StaticInner#``(). [13:6..13:15): nonStatic <- example/MetacJava#nonStatic. [13:22..13:33): staticInner -> example/MetacJava#staticInner. [13:34..13:43): NonStatic -> com/javacp/MetacJava#StaticInner#NonStatic# -[13:43..13:43): -> com/javacp/MetacJava#StaticInner#NonStatic#``(). [14:2..14:11): nonStatic -> example/MetacJava#nonStatic. [14:12..14:18): method -> com/javacp/MetacJava#StaticInner#NonStatic#method(). [14:19..14:28): nonStatic -> example/MetacJava#nonStatic. @@ -1955,7 +2086,6 @@ Occurrences: [15:39..15:48): MetacJava -> com/javacp/MetacJava# [15:49..15:58): Overload3 -> com/javacp/MetacJava#Overload3# [15:59..15:60): A -> com/javacp/MetacJava#Overload3#A# -[15:60..15:60): -> com/javacp/MetacJava#Overload3#A#``(). [16:6..16:15): interface <- example/MetacJava#interface. [16:17..16:23): javacp -> com/javacp/ [16:24..16:33): Interface -> com/javacp/Interface# @@ -1982,21 +2112,20 @@ Uri => MethodUsages.scala Text => empty Language => Scala Symbols => 3 entries -Occurrences => 85 entries +Occurrences => 80 entries +Synthetics => 2 entries Symbols: -example/MethodUsages# => class MethodUsages -example/MethodUsages#``(). => primary ctor -example/MethodUsages#m. => val method m +example/MethodUsages# => class MethodUsages extends Object { self: MethodUsages => +2 decls } +example/MethodUsages#``(). => primary ctor (): MethodUsages +example/MethodUsages#m. => val method m Methods[Int] Occurrences: [0:8..0:15): example <- example/ [2:6..2:18): MethodUsages <- example/MethodUsages# -[3:2..3:2): <- example/MethodUsages#``(). [3:6..3:7): m <- example/MethodUsages#m. [3:14..3:21): Methods -> example/Methods# [3:22..3:25): Int -> scala/Int# -[3:26..3:26): -> example/Methods#``(). [4:2..4:3): m -> example/MethodUsages#m. [4:4..4:6): m1 -> example/Methods#m1(). [5:2..5:3): m -> example/MethodUsages#m. @@ -2016,7 +2145,6 @@ Occurrences: [11:11..11:12): m -> example/MethodUsages#m. [11:13..11:17): List -> example/Methods#List# [11:18..11:21): Int -> scala/Int# -[11:22..11:22): -> example/Methods#List#``(). [12:2..12:3): m -> example/MethodUsages#m. [12:4..12:6): m6 -> example/Methods#m6(+2). [12:7..12:10): Nil -> scala/package.Nil. @@ -2026,8 +2154,6 @@ Occurrences: [13:14..13:15): m -> example/MethodUsages#m. [13:16..13:20): List -> example/Methods#List# [13:21..13:24): Int -> scala/Int# -[13:25..13:25): -> example/Methods#List#``(). -[13:26..13:26): -> scala/math/Ordering.Int. [14:2..14:3): m -> example/MethodUsages#m. [14:4..14:11): `m8().` -> example/Methods#`m8().`(). [15:2..15:3): m -> example/MethodUsages#m. @@ -2076,6 +2202,10 @@ Occurrences: [34:4..34:7): m20 -> example/Methods#m20(+2). [34:8..34:9): m -> example/Methods#m17.m(). +Synthetics: +[13:2..13:6):m.m7 => *[Int] +[13:2..13:26):m.m7(m, new m.List[Int]) => *(Int) + expect/Methods.scala -------------------- @@ -2084,87 +2214,92 @@ Schema => SemanticDB v4 Uri => Methods.scala Text => empty Language => Scala -Symbols => 77 entries -Occurrences => 149 entries +Symbols => 82 entries +Occurrences => 153 entries Symbols: -example/Methods# => class Methods -example/Methods#AList# => type AList -example/Methods#AList#[T] => typeparam T -example/Methods#List# => class List -example/Methods#List#[T] => typeparam T -example/Methods#List#``(). => primary ctor -example/Methods#[T] => typeparam T -example/Methods#``(). => primary ctor -example/Methods#`m8().`(). => method m8(). -example/Methods#`m9().`# => class m9(). -example/Methods#`m9().`#``(). => primary ctor -example/Methods#`m20_=`(). => var method m20_= -example/Methods#`m20_=`().(x$1) => param x$1 -example/Methods#m1(). => method m1 -example/Methods#m2(). => method m2 -example/Methods#m3(). => method m3 -example/Methods#m3().(x) => param x -example/Methods#m4(). => method m4 -example/Methods#m4().(x) => param x -example/Methods#m4().(y) => param y -example/Methods#m5(). => method m5 -example/Methods#m5().(x) => param x -example/Methods#m5(+1). => method m5 -example/Methods#m5(+1).(x) => param x -example/Methods#m6(). => method m6 -example/Methods#m6().(x) => param x -example/Methods#m6(+1). => method m6 -example/Methods#m6(+1).(x) => param x -example/Methods#m6(+2). => method m6 -example/Methods#m6(+2).(x) => param x -example/Methods#m7(). => method m7 -example/Methods#m7().(c) => param c -example/Methods#m7().(l) => param l -example/Methods#m7().[U] => typeparam U -example/Methods#m9(). => method m9 -example/Methods#m9().(x) => param x -example/Methods#m10(). => method m10 -example/Methods#m10().(x) => param x -example/Methods#m11(). => method m11 -example/Methods#m11().(x) => param x -example/Methods#m11(+1). => method m11 -example/Methods#m11(+1).(x) => param x -example/Methods#m12a(). => method m12a -example/Methods#m12a().(x) => param x -example/Methods#m12b(). => method m12b -example/Methods#m12b().(x) => param x -example/Methods#m13(). => method m13 -example/Methods#m13().(x) => param x -example/Methods#m15(). => method m15 -example/Methods#m15().(x) => param x -example/Methods#m16(). => method m16 -example/Methods#m16().(x) => param x -example/Methods#m17(). => method m17 -example/Methods#m17().(a) => param a -example/Methods#m17(+1). => method m17 -example/Methods#m17(+1).(b) => param b -example/Methods#m17. => final object m17 -example/Methods#m17.m(). => method m -example/Methods#m18(). => method m18 -example/Methods#m18().(a) => param a -example/Methods#m18(+1). => method m18 -example/Methods#m18(+1).(b) => param b -example/Methods#m18. => val method m18 -example/Methods#m19$default$2(). => method m19$default$2 -example/Methods#m19$default$3(). => method m19$default$3 -example/Methods#m19$default$3().(x) => param x -example/Methods#m19$default$3().(y) => param y -example/Methods#m19(). => method m19 -example/Methods#m19().(x) => param x -example/Methods#m19().(y) => param y -example/Methods#m19().(z) => param z -example/Methods#m20(). => method m20 -example/Methods#m20().(a) => param a -example/Methods#m20(+1). => method m20 -example/Methods#m20(+1).(b) => param b -example/Methods#m20(+2). => var method m20 -local0 => abstract val method x +example/Methods# => class Methods [typeparam T ] extends Object { self: Methods[T] => +44 decls } +example/Methods#AList# => type AList [typeparam T ] = List[T] +example/Methods#AList#[T] => typeparam T +example/Methods#List# => class List [typeparam T ] extends Object { self: List[T] => +2 decls } +example/Methods#List#[T] => typeparam T +example/Methods#List#``(). => primary ctor [typeparam T ](): List[T] +example/Methods#[T] => typeparam T +example/Methods#``(). => primary ctor [typeparam T ](): Methods[T] +example/Methods#`m8().`(). => method m8(). (): Nothing +example/Methods#`m9().`# => class m9(). extends Object { self: m9(). => +1 decls } +example/Methods#`m9().`#``(). => primary ctor (): m9(). +example/Methods#`m20_=`(). => var method m20_= (param x$1: m17.type): Unit +example/Methods#`m20_=`().(x$1) => param x$1: m17.type +example/Methods#m1(). => method m1 => Nothing +example/Methods#m2(). => method m2 (): Nothing +example/Methods#m3(). => method m3 (param x: Int): Nothing +example/Methods#m3().(x) => param x: Int +example/Methods#m4(). => method m4 (param x: Int)(param y: Int): Nothing +example/Methods#m4().(x) => param x: Int +example/Methods#m4().(y) => param y: Int +example/Methods#m5(). => method m5 (param x: String): Nothing +example/Methods#m5().(x) => param x: String +example/Methods#m5(+1). => method m5 (param x: Int): Nothing +example/Methods#m5(+1).(x) => param x: Int +example/Methods#m6(). => method m6 (param x: Int): Nothing +example/Methods#m6().(x) => param x: Int +example/Methods#m6(+1). => method m6 (param x: List[T]): Nothing +example/Methods#m6(+1).(x) => param x: List[T] +example/Methods#m6(+2). => method m6 (param x: List[T]): Nothing +example/Methods#m6(+2).(x) => param x: List[T] +example/Methods#m7(). => method m7 [typeparam U ](param c: Methods[T], param l: List[U])(implicit param evidence$1: Ordering[U]): Nothing +example/Methods#m7().(c) => param c: Methods[T] +example/Methods#m7().(evidence$1) => implicit param evidence$1: Ordering[U] +example/Methods#m7().(l) => param l: List[U] +example/Methods#m7().[U] => typeparam U +example/Methods#m9(). => method m9 (param x: m9().): Nothing +example/Methods#m9().(x) => param x: m9(). +example/Methods#m10(). => method m10 (param x: List[T]): Nothing +example/Methods#m10().(x) => param x: List[T] +example/Methods#m11(). => method m11 (param x: Predef.type): Nothing +example/Methods#m11().(x) => param x: Predef.type +example/Methods#m11(+1). => method m11 (param x: Example.type): Nothing +example/Methods#m11(+1).(x) => param x: Example.type +example/Methods#m12a(). => method m12a (param x: Object): Nothing +example/Methods#m12a().(x) => param x: Object +example/Methods#m12b(). => method m12b (param x: Object { abstract val method x Int }): Nothing +example/Methods#m12b().(x) => param x: Object { abstract val method x Int } +example/Methods#m12c(). => method m12c (param x: Object { abstract method y => Int; abstract val method x Int }): Nothing +example/Methods#m12c().(x) => param x: Object { abstract method y => Int; abstract val method x Int } +example/Methods#m13(). => method m13 (param x: Int @unchecked): Nothing +example/Methods#m13().(x) => param x: Int @unchecked +example/Methods#m15(). => method m15 (param x: => Int): Nothing +example/Methods#m15().(x) => param x: => Int +example/Methods#m16(). => method m16 (param x: Int*): Nothing +example/Methods#m16().(x) => param x: Int* +example/Methods#m17(). => method m17 (param a: Int): Nothing +example/Methods#m17().(a) => param a: Int +example/Methods#m17(+1). => method m17 (param b: String): Nothing +example/Methods#m17(+1).(b) => param b: String +example/Methods#m17. => final object m17 extends Object { self: m17.type => +2 decls } +example/Methods#m17.m(). => method m (): Nothing +example/Methods#m18(). => method m18 (param a: Int): Nothing +example/Methods#m18().(a) => param a: Int +example/Methods#m18(+1). => method m18 (param b: String): Nothing +example/Methods#m18(+1).(b) => param b: String +example/Methods#m18. => val method m18 m17.type +example/Methods#m19$default$2(). => method m19$default$2 => Int @uncheckedVariance +example/Methods#m19$default$3(). => method m19$default$3 (param x: Int, param y: Int): Int @uncheckedVariance +example/Methods#m19$default$3().(x) => param x: Int +example/Methods#m19$default$3().(y) => param y: Int +example/Methods#m19(). => method m19 (param x: Int, param y: Int)(param z: Int): Nothing +example/Methods#m19().(x) => param x: Int +example/Methods#m19().(y) => param y: Int +example/Methods#m19().(z) => param z: Int +example/Methods#m20(). => method m20 (param a: Int): Nothing +example/Methods#m20().(a) => param a: Int +example/Methods#m20(+1). => method m20 (param b: String): Nothing +example/Methods#m20(+1).(b) => param b: String +example/Methods#m20(+2). => var method m20 m17.type +local0 => abstract val method x Int +local1 => abstract val method x Int +local2 => abstract method y => Int Occurrences: [0:8..0:15): example <- example/ @@ -2176,10 +2311,8 @@ Occurrences: [3:13..3:21): language -> scala/language. [3:22..3:34): existentials -> scala/language.existentials. [5:6..5:13): Methods <- example/Methods# -[5:13..5:13): <- example/Methods#``(). [5:14..5:15): T <- example/Methods#[T] [6:8..6:12): List <- example/Methods#List# -[6:12..6:12): <- example/Methods#List#``(). [6:13..6:14): T <- example/Methods#List#[T] [7:7..7:12): AList <- example/Methods#AList# [7:13..7:14): T <- example/Methods#AList#[T] @@ -2224,8 +2357,8 @@ Occurrences: [16:29..16:32): ??? -> scala/Predef.`???`(). [17:6..17:8): m7 <- example/Methods#m7(). [17:9..17:10): U <- example/Methods#m7().[U] +[17:10..17:10): <- example/Methods#m7().(evidence$1) [17:12..17:20): Ordering -> scala/math/Ordering# -[17:20..17:20): -> example/Methods#m7().[U] [17:22..17:23): c <- example/Methods#m7().(c) [17:25..17:32): Methods -> example/Methods# [17:33..17:34): T -> example/Methods#[T] @@ -2235,7 +2368,6 @@ Occurrences: [17:51..17:54): ??? -> scala/Predef.`???`(). [18:7..18:12): m8(). <- example/Methods#`m8().`(). [18:18..18:21): ??? -> scala/Predef.`???`(). -[19:2..19:2): <- example/Methods#`m9().`#``(). [19:9..19:14): m9(). <- example/Methods#`m9().`# [20:6..20:8): m9 <- example/Methods#m9(). [20:9..20:10): x <- example/Methods#m9().(x) @@ -2262,60 +2394,67 @@ Occurrences: [25:20..25:21): x <- local0 [25:23..25:26): Int -> scala/Int# [25:32..25:35): ??? -> scala/Predef.`???`(). -[26:6..26:9): m13 <- example/Methods#m13(). -[26:10..26:11): x <- example/Methods#m13().(x) -[26:13..26:16): Int -> scala/Int# -[26:18..26:27): unchecked -> scala/unchecked# -[26:31..26:34): ??? -> scala/Predef.`???`(). -[27:6..27:9): m15 <- example/Methods#m15(). -[27:10..27:11): x <- example/Methods#m15().(x) -[27:16..27:19): Int -> scala/Int# -[27:23..27:26): ??? -> scala/Predef.`???`(). -[28:6..28:9): m16 <- example/Methods#m16(). -[28:10..28:11): x <- example/Methods#m16().(x) -[28:13..28:16): Int -> scala/Int# -[28:21..28:24): ??? -> scala/Predef.`???`(). -[29:9..29:12): m17 <- example/Methods#m17. -[29:19..29:20): m <- example/Methods#m17.m(). -[29:25..29:28): ??? -> scala/Predef.`???`(). -[30:6..30:9): m17 <- example/Methods#m17(). -[30:10..30:11): a <- example/Methods#m17().(a) -[30:13..30:16): Int -> scala/Int# -[30:20..30:23): ??? -> scala/Predef.`???`(). -[31:6..31:9): m17 <- example/Methods#m17(+1). -[31:10..31:11): b <- example/Methods#m17(+1).(b) -[31:13..31:19): String -> scala/Predef.String# -[31:23..31:26): ??? -> scala/Predef.`???`(). -[32:6..32:9): m18 <- example/Methods#m18. -[32:12..32:15): m17 -> example/Methods#m17. -[33:6..33:9): m18 <- example/Methods#m18(). -[33:10..33:11): a <- example/Methods#m18().(a) -[33:13..33:16): Int -> scala/Int# -[33:20..33:23): ??? -> scala/Predef.`???`(). -[34:6..34:9): m18 <- example/Methods#m18(+1). -[34:10..34:11): b <- example/Methods#m18(+1).(b) -[34:13..34:19): String -> scala/Predef.String# -[34:23..34:26): ??? -> scala/Predef.`???`(). -[35:6..35:9): m19 <- example/Methods#m19(). -[35:10..35:11): x <- example/Methods#m19().(x) -[35:10..35:11): x <- example/Methods#m19$default$3().(x) -[35:13..35:16): Int -> scala/Int# -[35:18..35:19): y <- example/Methods#m19().(y) -[35:18..35:19): y <- example/Methods#m19$default$3().(y) -[35:21..35:24): Int -> scala/Int# -[35:30..35:31): z <- example/Methods#m19().(z) -[35:33..35:36): Int -> scala/Int# -[35:44..35:47): ??? -> scala/Predef.`???`(). -[36:6..36:9): m20 <- example/Methods#m20(). -[36:10..36:11): a <- example/Methods#m20().(a) +[26:6..26:10): m12c <- example/Methods#m12c(). +[26:11..26:12): x <- example/Methods#m12c().(x) +[26:20..26:21): x <- local1 +[26:23..26:26): Int -> scala/Int# +[26:32..26:33): y <- local2 +[26:35..26:38): Int -> scala/Int# +[26:44..26:47): ??? -> scala/Predef.`???`(). +[27:6..27:9): m13 <- example/Methods#m13(). +[27:10..27:11): x <- example/Methods#m13().(x) +[27:13..27:16): Int -> scala/Int# +[27:18..27:27): unchecked -> scala/unchecked# +[27:31..27:34): ??? -> scala/Predef.`???`(). +[28:6..28:9): m15 <- example/Methods#m15(). +[28:10..28:11): x <- example/Methods#m15().(x) +[28:16..28:19): Int -> scala/Int# +[28:23..28:26): ??? -> scala/Predef.`???`(). +[29:6..29:9): m16 <- example/Methods#m16(). +[29:10..29:11): x <- example/Methods#m16().(x) +[29:13..29:16): Int -> scala/Int# +[29:21..29:24): ??? -> scala/Predef.`???`(). +[30:9..30:12): m17 <- example/Methods#m17. +[30:19..30:20): m <- example/Methods#m17.m(). +[30:25..30:28): ??? -> scala/Predef.`???`(). +[31:6..31:9): m17 <- example/Methods#m17(). +[31:10..31:11): a <- example/Methods#m17().(a) +[31:13..31:16): Int -> scala/Int# +[31:20..31:23): ??? -> scala/Predef.`???`(). +[32:6..32:9): m17 <- example/Methods#m17(+1). +[32:10..32:11): b <- example/Methods#m17(+1).(b) +[32:13..32:19): String -> scala/Predef.String# +[32:23..32:26): ??? -> scala/Predef.`???`(). +[33:6..33:9): m18 <- example/Methods#m18. +[33:12..33:15): m17 -> example/Methods#m17. +[34:6..34:9): m18 <- example/Methods#m18(). +[34:10..34:11): a <- example/Methods#m18().(a) +[34:13..34:16): Int -> scala/Int# +[34:20..34:23): ??? -> scala/Predef.`???`(). +[35:6..35:9): m18 <- example/Methods#m18(+1). +[35:10..35:11): b <- example/Methods#m18(+1).(b) +[35:13..35:19): String -> scala/Predef.String# +[35:23..35:26): ??? -> scala/Predef.`???`(). +[36:6..36:9): m19 <- example/Methods#m19(). +[36:10..36:11): x <- example/Methods#m19().(x) +[36:10..36:11): x <- example/Methods#m19$default$3().(x) [36:13..36:16): Int -> scala/Int# -[36:20..36:23): ??? -> scala/Predef.`???`(). -[37:6..37:9): m20 <- example/Methods#m20(+1). -[37:10..37:11): b <- example/Methods#m20(+1).(b) -[37:13..37:19): String -> scala/Predef.String# -[37:23..37:26): ??? -> scala/Predef.`???`(). -[38:6..38:9): m20 <- example/Methods#m20(+2). -[38:12..38:15): m17 -> example/Methods#m17. +[36:18..36:19): y <- example/Methods#m19().(y) +[36:18..36:19): y <- example/Methods#m19$default$3().(y) +[36:21..36:24): Int -> scala/Int# +[36:30..36:31): z <- example/Methods#m19().(z) +[36:33..36:36): Int -> scala/Int# +[36:44..36:47): ??? -> scala/Predef.`???`(). +[37:6..37:9): m20 <- example/Methods#m20(). +[37:10..37:11): a <- example/Methods#m20().(a) +[37:13..37:16): Int -> scala/Int# +[37:20..37:23): ??? -> scala/Predef.`???`(). +[38:6..38:9): m20 <- example/Methods#m20(+1). +[38:10..38:11): b <- example/Methods#m20(+1).(b) +[38:13..38:19): String -> scala/Predef.String# +[38:23..38:26): ??? -> scala/Predef.`???`(). +[39:6..39:9): m20 <- example/Methods#m20(+2). +[39:12..39:15): m17 -> example/Methods#m17. expect/NamedApplyBlock.scala ---------------------------- @@ -2326,52 +2465,52 @@ Uri => NamedApplyBlock.scala Text => empty Language => Scala Symbols => 43 entries -Occurrences => 43 entries +Occurrences => 40 entries Symbols: -example/NamedApplyBlockCaseClassConstruction. => final object NamedApplyBlockCaseClassConstruction -example/NamedApplyBlockCaseClassConstruction.Msg# => case class Msg -example/NamedApplyBlockCaseClassConstruction.Msg#_1(). => method _1 -example/NamedApplyBlockCaseClassConstruction.Msg#_2(). => method _2 -example/NamedApplyBlockCaseClassConstruction.Msg#_3(). => method _3 -example/NamedApplyBlockCaseClassConstruction.Msg#``(). => primary ctor -example/NamedApplyBlockCaseClassConstruction.Msg#``().(body) => val param body -example/NamedApplyBlockCaseClassConstruction.Msg#``().(head) => val param head -example/NamedApplyBlockCaseClassConstruction.Msg#``().(tail) => val param tail -example/NamedApplyBlockCaseClassConstruction.Msg#body. => val method body -example/NamedApplyBlockCaseClassConstruction.Msg#copy$default$1(). => method copy$default$1 -example/NamedApplyBlockCaseClassConstruction.Msg#copy$default$2(). => method copy$default$2 -example/NamedApplyBlockCaseClassConstruction.Msg#copy$default$3(). => method copy$default$3 -example/NamedApplyBlockCaseClassConstruction.Msg#copy(). => method copy -example/NamedApplyBlockCaseClassConstruction.Msg#copy().(body) => param body -example/NamedApplyBlockCaseClassConstruction.Msg#copy().(head) => param head -example/NamedApplyBlockCaseClassConstruction.Msg#copy().(tail) => param tail -example/NamedApplyBlockCaseClassConstruction.Msg#head. => val method head -example/NamedApplyBlockCaseClassConstruction.Msg#tail. => val method tail -example/NamedApplyBlockCaseClassConstruction.Msg. => final object Msg -example/NamedApplyBlockCaseClassConstruction.Msg.$lessinit$greater$default$2(). => method $lessinit$greater$default$2 -example/NamedApplyBlockCaseClassConstruction.Msg.apply(). => method apply -example/NamedApplyBlockCaseClassConstruction.Msg.apply().(body) => param body -example/NamedApplyBlockCaseClassConstruction.Msg.apply().(head) => param head -example/NamedApplyBlockCaseClassConstruction.Msg.apply().(tail) => param tail -example/NamedApplyBlockCaseClassConstruction.Msg.toString(). => method toString -example/NamedApplyBlockCaseClassConstruction.Msg.unapply(). => method unapply -example/NamedApplyBlockCaseClassConstruction.Msg.unapply().(x$1) => param x$1 -example/NamedApplyBlockCaseClassConstruction.bodyText. => val method bodyText -example/NamedApplyBlockCaseClassConstruction.msg. => val method msg -example/NamedApplyBlockMethods. => final object NamedApplyBlockMethods -example/NamedApplyBlockMethods.baseCase(). => method baseCase -example/NamedApplyBlockMethods.foo$default$1(). => method foo$default$1 -example/NamedApplyBlockMethods.foo$default$2(). => method foo$default$2 -example/NamedApplyBlockMethods.foo$default$3(). => method foo$default$3 -example/NamedApplyBlockMethods.foo(). => method foo -example/NamedApplyBlockMethods.foo().(a) => param a -example/NamedApplyBlockMethods.foo().(b) => param b -example/NamedApplyBlockMethods.foo().(c) => param c -example/NamedApplyBlockMethods.local. => val method local -example/NamedApplyBlockMethods.recursive(). => method recursive -local0 => val local c$1 -local1 => val local b$1 +example/NamedApplyBlockCaseClassConstruction. => final object NamedApplyBlockCaseClassConstruction extends Object { self: NamedApplyBlockCaseClassConstruction.type => +6 decls } +example/NamedApplyBlockCaseClassConstruction.Msg# => case class Msg extends Object with Product with Serializable { self: Msg => +11 decls } +example/NamedApplyBlockCaseClassConstruction.Msg#_1(). => method _1 => String +example/NamedApplyBlockCaseClassConstruction.Msg#_2(). => method _2 => String +example/NamedApplyBlockCaseClassConstruction.Msg#_3(). => method _3 => String +example/NamedApplyBlockCaseClassConstruction.Msg#``(). => primary ctor (val param body: String, val param head: String, val param tail: String): Msg +example/NamedApplyBlockCaseClassConstruction.Msg#``().(body) => val param body: String +example/NamedApplyBlockCaseClassConstruction.Msg#``().(head) => val param head: String +example/NamedApplyBlockCaseClassConstruction.Msg#``().(tail) => val param tail: String +example/NamedApplyBlockCaseClassConstruction.Msg#body. => val method body String +example/NamedApplyBlockCaseClassConstruction.Msg#copy$default$1(). => method copy$default$1 => String @uncheckedVariance +example/NamedApplyBlockCaseClassConstruction.Msg#copy$default$2(). => method copy$default$2 => String @uncheckedVariance +example/NamedApplyBlockCaseClassConstruction.Msg#copy$default$3(). => method copy$default$3 => String @uncheckedVariance +example/NamedApplyBlockCaseClassConstruction.Msg#copy(). => method copy (param body: String, param head: String, param tail: String): Msg +example/NamedApplyBlockCaseClassConstruction.Msg#copy().(body) => param body: String +example/NamedApplyBlockCaseClassConstruction.Msg#copy().(head) => param head: String +example/NamedApplyBlockCaseClassConstruction.Msg#copy().(tail) => param tail: String +example/NamedApplyBlockCaseClassConstruction.Msg#head. => val method head String +example/NamedApplyBlockCaseClassConstruction.Msg#tail. => val method tail String +example/NamedApplyBlockCaseClassConstruction.Msg. => final object Msg extends Object { self: Msg.type => +5 decls } +example/NamedApplyBlockCaseClassConstruction.Msg.$lessinit$greater$default$2(). => method $lessinit$greater$default$2 => String @uncheckedVariance +example/NamedApplyBlockCaseClassConstruction.Msg.apply(). => method apply (param body: String, param head: String, param tail: String): Msg +example/NamedApplyBlockCaseClassConstruction.Msg.apply().(body) => param body: String +example/NamedApplyBlockCaseClassConstruction.Msg.apply().(head) => param head: String +example/NamedApplyBlockCaseClassConstruction.Msg.apply().(tail) => param tail: String +example/NamedApplyBlockCaseClassConstruction.Msg.toString(). => method toString => String <: scala/Any#toString(). +example/NamedApplyBlockCaseClassConstruction.Msg.unapply(). => method unapply (param x$1: Msg): Msg +example/NamedApplyBlockCaseClassConstruction.Msg.unapply().(x$1) => param x$1: Msg +example/NamedApplyBlockCaseClassConstruction.bodyText. => val method bodyText String +example/NamedApplyBlockCaseClassConstruction.msg. => val method msg Msg +example/NamedApplyBlockMethods. => final object NamedApplyBlockMethods extends Object { self: NamedApplyBlockMethods.type => +8 decls } +example/NamedApplyBlockMethods.baseCase(). => method baseCase => Int +example/NamedApplyBlockMethods.foo$default$1(). => method foo$default$1 => Int @uncheckedVariance +example/NamedApplyBlockMethods.foo$default$2(). => method foo$default$2 => Int @uncheckedVariance +example/NamedApplyBlockMethods.foo$default$3(). => method foo$default$3 => Int @uncheckedVariance +example/NamedApplyBlockMethods.foo(). => method foo (param a: Int, param b: Int, param c: Int): Int +example/NamedApplyBlockMethods.foo().(a) => param a: Int +example/NamedApplyBlockMethods.foo().(b) => param b: Int +example/NamedApplyBlockMethods.foo().(c) => param c: Int +example/NamedApplyBlockMethods.local. => val method local Int +example/NamedApplyBlockMethods.recursive(). => method recursive => Int +local0 => val local c$1: Int +local1 => val local b$1: Int @uncheckedVariance Occurrences: [0:8..0:15): example <- example/ @@ -2396,7 +2535,6 @@ Occurrences: [5:28..5:29): c -> example/NamedApplyBlockMethods.foo().(c) [6:6..6:15): recursive <- example/NamedApplyBlockMethods.recursive(). [6:18..6:21): foo -> example/NamedApplyBlockMethods.foo(). -[6:18..6:18): -> local1 [6:22..6:27): local -> example/NamedApplyBlockMethods.local. [6:29..6:30): c -> example/NamedApplyBlockMethods.foo().(c) [6:33..6:36): foo -> example/NamedApplyBlockMethods.foo(). @@ -2404,7 +2542,6 @@ Occurrences: [6:44..6:45): c -> example/NamedApplyBlockMethods.foo().(c) [9:7..9:43): NamedApplyBlockCaseClassConstruction <- example/NamedApplyBlockCaseClassConstruction. [10:13..10:16): Msg <- example/NamedApplyBlockCaseClassConstruction.Msg# -[10:16..10:16): <- example/NamedApplyBlockCaseClassConstruction.Msg#``(). [10:17..10:21): body <- example/NamedApplyBlockCaseClassConstruction.Msg#body. [10:23..10:29): String -> scala/Predef.String# [10:31..10:35): head <- example/NamedApplyBlockCaseClassConstruction.Msg#head. @@ -2414,7 +2551,6 @@ Occurrences: [11:6..11:14): bodyText <- example/NamedApplyBlockCaseClassConstruction.bodyText. [12:6..12:9): msg <- example/NamedApplyBlockCaseClassConstruction.msg. [12:12..12:15): Msg -> example/NamedApplyBlockCaseClassConstruction.Msg. -[12:15..12:15): -> example/NamedApplyBlockCaseClassConstruction.Msg.apply(). [12:16..12:24): bodyText -> example/NamedApplyBlockCaseClassConstruction.bodyText. [12:26..12:30): tail -> example/NamedApplyBlockCaseClassConstruction.Msg.apply().(tail) @@ -2427,36 +2563,33 @@ Uri => NamedArguments.scala Text => empty Language => Scala Symbols => 16 entries -Occurrences => 13 entries +Occurrences => 10 entries Symbols: -example/NamedArguments# => class NamedArguments -example/NamedArguments#User# => case class User -example/NamedArguments#User#_1(). => method _1 -example/NamedArguments#User#``(). => primary ctor -example/NamedArguments#User#``().(name) => val param name -example/NamedArguments#User#copy$default$1(). => method copy$default$1 -example/NamedArguments#User#copy(). => method copy -example/NamedArguments#User#copy().(name) => param name -example/NamedArguments#User#name. => val method name -example/NamedArguments#User. => final object User -example/NamedArguments#User.apply(). => method apply -example/NamedArguments#User.apply().(name) => param name -example/NamedArguments#User.toString(). => method toString -example/NamedArguments#User.unapply(). => method unapply -example/NamedArguments#User.unapply().(x$1) => param x$1 -example/NamedArguments#``(). => primary ctor +example/NamedArguments# => class NamedArguments extends Object { self: NamedArguments => +4 decls } +example/NamedArguments#User# => case class User extends Object with Product with Serializable { self: User => +5 decls } +example/NamedArguments#User#_1(). => method _1 => String +example/NamedArguments#User#``(). => primary ctor (val param name: String): User +example/NamedArguments#User#``().(name) => val param name: String +example/NamedArguments#User#copy$default$1(). => method copy$default$1 => String @uncheckedVariance +example/NamedArguments#User#copy(). => method copy (param name: String): User +example/NamedArguments#User#copy().(name) => param name: String +example/NamedArguments#User#name. => val method name String +example/NamedArguments#User. => final object User extends Object { self: User.type => +4 decls } +example/NamedArguments#User.apply(). => method apply (param name: String): User +example/NamedArguments#User.apply().(name) => param name: String +example/NamedArguments#User.toString(). => method toString => String <: scala/Any#toString(). +example/NamedArguments#User.unapply(). => method unapply (param x$1: User): User +example/NamedArguments#User.unapply().(x$1) => param x$1: User +example/NamedArguments#``(). => primary ctor (): NamedArguments Occurrences: [0:8..0:15): example <- example/ [2:6..2:20): NamedArguments <- example/NamedArguments# -[3:2..3:2): <- example/NamedArguments#``(). [3:13..3:17): User <- example/NamedArguments#User# -[3:17..3:17): <- example/NamedArguments#User#``(). [3:18..3:22): name <- example/NamedArguments#User#name. [3:24..3:30): String -> scala/Predef.String# [4:2..4:6): User -> example/NamedArguments#User. -[4:6..4:6): -> example/NamedArguments#User.apply(). [4:7..4:11): name -> example/NamedArguments#User.apply().(name) [5:2..5:6): User -> example/NamedArguments#User. [5:7..5:12): apply -> example/NamedArguments#User.apply(). @@ -2470,19 +2603,41 @@ Schema => SemanticDB v4 Uri => NewModifiers.scala Text => empty Language => Scala -Symbols => 3 entries -Occurrences => 4 entries +Symbols => 14 entries +Occurrences => 15 entries Symbols: -_empty_/NewModifiers. => final object NewModifiers -_empty_/NewModifiers.A# => type A -_empty_/NewModifiers.foo. => val method foo +_empty_/NewModifiers$package. => final package object _empty_ extends Object { self: _empty_.type { opaque type OpaqueB } => +2 decls } +_empty_/NewModifiers$package.OpaqueB# => opaque type OpaqueB +_empty_/NewModifiers. => final object NewModifiers extends Object { self: NewModifiers.type { opaque type A } => +3 decls } +_empty_/NewModifiers.A# => opaque type A +_empty_/NewModifiers.foo. => val inline method foo "foo" +_empty_/NewModifiersClass# => opaque class NewModifiersClass extends Object { self: Any { opaque type C } & NewModifiersClass => +5 decls } +_empty_/NewModifiersClass#C# => opaque type C +_empty_/NewModifiersClass#Nested# => opaque class Nested extends Object { self: Any { opaque type NestedOpaque } & Nested => +2 decls } +_empty_/NewModifiersClass#Nested#NestedOpaque# => opaque type NestedOpaque +_empty_/NewModifiersClass#Nested#``(). => primary ctor (): Nested +_empty_/NewModifiersClass#``(). => primary ctor (): NewModifiersClass +_empty_/NewModifiersTrait# => opaque trait NewModifiersTrait extends Object { self: Any { opaque type D } & NewModifiersTrait => +2 decls } +_empty_/NewModifiersTrait#D# => opaque type D +_empty_/NewModifiersTrait#``(). => primary ctor (): NewModifiersTrait Occurrences: [0:7..0:19): NewModifiers <- _empty_/NewModifiers. [1:13..1:16): foo <- _empty_/NewModifiers.foo. [2:14..2:15): A <- _empty_/NewModifiers.A# [2:18..2:21): Int -> scala/Int# +[5:12..5:19): OpaqueB <- _empty_/NewModifiers$package.OpaqueB# +[5:22..5:25): Int -> scala/Int# +[7:6..7:23): NewModifiersClass <- _empty_/NewModifiersClass# +[8:14..8:15): C <- _empty_/NewModifiersClass#C# +[8:18..8:21): Int -> scala/Int# +[9:8..9:14): Nested <- _empty_/NewModifiersClass#Nested# +[10:16..10:28): NestedOpaque <- _empty_/NewModifiersClass#Nested#NestedOpaque# +[10:31..10:34): Int -> scala/Int# +[14:6..14:23): NewModifiersTrait <- _empty_/NewModifiersTrait# +[15:14..15:15): D <- _empty_/NewModifiersTrait#D# +[15:18..15:21): Int -> scala/Int# expect/Objects.scala -------------------- @@ -2496,8 +2651,8 @@ Symbols => 2 entries Occurrences => 3 entries Symbols: -objects/X. => final object X -objects/X.Y. => final object Y +objects/X. => final object X extends Object { self: X.type => +3 decls } +objects/X.Y. => final object Y extends Object { self: Y.type => +1 decls } Occurrences: [0:8..0:15): objects <- objects/ @@ -2513,24 +2668,22 @@ Uri => Overrides.scala Text => empty Language => Scala Symbols => 6 entries -Occurrences => 10 entries +Occurrences => 8 entries Symbols: -example/A# => trait A -example/A#``(). => primary ctor -example/A#foo(). => abstract method foo -example/B# => class B -example/B#``(). => primary ctor -example/B#foo(). => method foo +example/A# => trait A extends Object { self: A => +2 decls } +example/A#``(). => primary ctor (): A +example/A#foo(). => abstract method foo => Int +example/B# => class B extends Object with A { self: B => +2 decls } +example/B#``(). => primary ctor (): B +example/B#foo(). => method foo => Int <: example/A#foo(). Occurrences: [0:8..0:15): example <- example/ [2:6..2:7): A <- example/A# -[2:10..2:10): <- example/A#``(). [2:14..2:17): foo <- example/A#foo(). [2:19..2:22): Int -> scala/Int# [3:6..3:7): B <- example/B# -[3:7..3:7): <- example/B#``(). [3:18..3:19): A -> example/A# [3:26..3:29): foo <- example/B#foo(). [3:31..3:34): Int -> scala/Int# @@ -2544,33 +2697,32 @@ Uri => Prefixes.scala Text => empty Language => Scala Symbols => 19 entries -Occurrences => 49 entries +Occurrences => 48 entries Symbols: -prefixes/C# => class C -prefixes/C#N. => final object N -prefixes/C#N.U# => type U -prefixes/C#T# => type T -prefixes/C#``(). => primary ctor -prefixes/C#k1(). => method k1 -prefixes/C#m1(). => method m1 -prefixes/M. => final object M -prefixes/M.T# => type T -prefixes/M.n1(). => method n1 -prefixes/O. => final object O -prefixes/O.o1(). => method o1 -prefixes/Test. => final object Test -prefixes/Test.c. => val method c -prefixes/Test.k2(). => method k2 -prefixes/Test.k3(). => method k3 -prefixes/Test.m2(). => method m2 -prefixes/Test.n2(). => method n2 -prefixes/Test.n3(). => method n3 +prefixes/C# => class C extends Object { self: C => +6 decls } +prefixes/C#N. => final object N extends Object { self: N.type => +2 decls } +prefixes/C#N.U# => type U +prefixes/C#T# => type T +prefixes/C#``(). => primary ctor (): C +prefixes/C#k1(). => method k1 => U +prefixes/C#m1(). => method m1 => T +prefixes/M. => final object M extends Object { self: M.type => +3 decls } +prefixes/M.T# => type T +prefixes/M.n1(). => method n1 => T +prefixes/O. => final object O extends C { self: O.type => +2 decls } +prefixes/O.o1(). => method o1 => O.this.T +prefixes/Test. => final object Test extends Object { self: Test.type => +7 decls } +prefixes/Test.c. => val method c C +prefixes/Test.k2(). => method k2 => c.N.U +prefixes/Test.k3(). => method k3 => c.N.U +prefixes/Test.m2(). => method m2 => c.T +prefixes/Test.n2(). => method n2 => T +prefixes/Test.n3(). => method n3 => T Occurrences: [0:8..0:16): prefixes <- prefixes/ [2:6..2:7): C <- prefixes/C# -[3:2..3:2): <- prefixes/C#``(). [3:7..3:8): T <- prefixes/C#T# [4:6..4:8): m1 <- prefixes/C#m1(). [4:10..4:11): T -> prefixes/C#T# @@ -2588,7 +2740,6 @@ Occurrences: [14:14..14:17): ??? -> scala/Predef.`???`(). [17:7..17:8): O <- prefixes/O. [17:17..17:18): C -> prefixes/C# -[17:18..17:18): -> prefixes/C#``(). [18:6..18:8): o1 <- prefixes/O.o1(). [18:10..18:11): T -> prefixes/C#T# [18:14..18:17): ??? -> scala/Predef.`???`(). @@ -2608,6 +2759,7 @@ Occurrences: [25:9..25:10): c -> prefixes/Test.c. [25:11..25:12): N -> prefixes/C#N. [26:6..26:8): k3 <- prefixes/Test.k3(). +[26:10..26:11): U -> prefixes/C#N.U# [26:14..26:17): ??? -> scala/Predef.`???`(). [28:6..28:8): n2 <- prefixes/Test.n2(). [28:10..28:11): M -> prefixes/M. @@ -2618,6 +2770,205 @@ Occurrences: [31:10..31:11): T -> prefixes/M.T# [31:14..31:17): ??? -> scala/Predef.`???`(). +expect/RecOrRefined.scala +------------------------- + +Summary: +Schema => SemanticDB v4 +Uri => RecOrRefined.scala +Text => empty +Language => Scala +Symbols => 68 entries +Occurrences => 110 entries +Synthetics => 3 entries + +Symbols: +example/C# => class C extends Object { self: C => +3 decls } +example/C#T1# => type T1 +example/C#T2# => type T2 +example/C#``(). => primary ctor (): C +example/PickOneRefinement_1# => class PickOneRefinement_1 [typeparam S <: SpecialRefinement { abstract method pickOne [typeparam T ](param as: T*): Option[String] }] extends Object { self: PickOneRefinement_1[S] => +3 decls } +example/PickOneRefinement_1#[S] => typeparam S <: SpecialRefinement { abstract method pickOne [typeparam T ](param as: T*): Option[String] } +example/PickOneRefinement_1#[S](as) => param as: T* +example/PickOneRefinement_1#[S][T] => typeparam T +example/PickOneRefinement_1#``(). => primary ctor [typeparam S <: SpecialRefinement { abstract method pickOne [typeparam T ](param as: T*): Option[String] }](): PickOneRefinement_1[S] +example/PickOneRefinement_1#run(). => method run (param s: S, param as: String*): Option[String] +example/PickOneRefinement_1#run().(as) => param as: String* +example/PickOneRefinement_1#run().(s) => param s: S +example/PolyHolder# => trait PolyHolder extends Object { self: PolyHolder => +2 decls } +example/PolyHolder#``(). => primary ctor (): PolyHolder +example/PolyHolder#foo(). => abstract method foo [typeparam T ](param t: T): Any +example/PolyHolder#foo().(t) => param t: T +example/PolyHolder#foo().[T] => typeparam T +example/RecOrRefined$package. => final package object example extends Object { self: example.type => +9 decls } +example/RecOrRefined$package.C2# => type C2 = C { type T2 = T1 <: example/C#T2#; type T1 <: example/C#T1# } +example/RecOrRefined$package.Person# => type Person = Record { abstract val method age Int; abstract val method name String } +example/RecOrRefined$package.m1(). => method m1 (param a: Int { abstract val method x Int }): Nothing +example/RecOrRefined$package.m1().(a) => param a: Int { abstract val method x Int } +example/RecOrRefined$package.m2(). => method m2 (param x: Object { abstract method y => Int; abstract val method x Int }): Nothing +example/RecOrRefined$package.m2().(x) => param x: Object { abstract method y => Int; abstract val method x Int } +example/RecOrRefined$package.m3(). => method m3 (param x: Object { type z ; abstract method y => Int; abstract val method x Int }): Nothing +example/RecOrRefined$package.m3().(x) => param x: Object { type z ; abstract method y => Int; abstract val method x Int } +example/RecOrRefined$package.m4(). => method m4 (param x: PolyHolder { abstract method foo [typeparam T ](param t: T): T <: example/PolyHolder#foo(). }): Nothing +example/RecOrRefined$package.m4().(x) => param x: PolyHolder { abstract method foo [typeparam T ](param t: T): T <: example/PolyHolder#foo(). } +example/RecOrRefined$package.m5(). => method m5 [typeparam Z ](param x: Int): PolyHolder { abstract method foo [typeparam T ](param t: T): T <: example/PolyHolder#foo(). } +example/RecOrRefined$package.m5().(x) => param x: Int +example/RecOrRefined$package.m5().[Z] => typeparam Z +example/RecOrRefined$package.m6# => type m6 [typeparam X ] = PolyHolder { abstract method foo [typeparam T ](param t: T): T <: example/PolyHolder#foo(). } +example/RecOrRefined$package.m6#[X] => typeparam X +example/Record# => class Record extends Object with Selectable { self: Record => +4 decls } +example/Record#``(). => primary ctor (param elems: Tuple2[String, Any]*): Record +example/Record#``().(elems) => param elems: Tuple2[String, Any]* +example/Record#elems. => private[this] val method elems Tuple2[String, Any]* +example/Record#fields. => private[this] val method fields Map[String, Any] +example/Record#selectDynamic(). => method selectDynamic (param name: String): Any +example/Record#selectDynamic().(name) => param name: String +example/SpecialRefinement# => trait SpecialRefinement extends Object { self: SpecialRefinement => +2 decls } +example/SpecialRefinement#``(). => primary ctor (): SpecialRefinement +example/SpecialRefinement#pickOne(). => abstract method pickOne [typeparam T ](param as: T*): Option[Any] +example/SpecialRefinement#pickOne().(as) => param as: T* +example/SpecialRefinement#pickOne().[T] => typeparam T +local0 => abstract method pickOne [typeparam T ](param as: T*): Option[String] +local1 => typeparam T +local2 => param as: T* +local3 => abstract method pickOne [typeparam T ](param as: T*): Option[String] <: example/SpecialRefinement#pickOne(). +local4 => abstract val method x Int +local5 => abstract val method x Int +local6 => abstract method y => Int +local7 => abstract val method x Int +local8 => abstract method y => Int +local9 => type z +local10 => typeparam T +local11 => param t: T +local12 => abstract method foo [typeparam T ](param t: T): T <: example/PolyHolder#foo(). +local13 => typeparam T +local14 => param t: T +local15 => abstract method foo [typeparam T ](param t: T): T <: example/PolyHolder#foo(). +local16 => typeparam T +local17 => param t: T +local18 => abstract method foo [typeparam T ](param t: T): T <: example/PolyHolder#foo(). +local19 => abstract val method name String +local20 => abstract val method age Int +local21 => type T1 <: example/C#T1# +local22 => type T2 = T1 <: example/C#T2# + +Occurrences: +[0:8..0:15): example <- example/ +[2:4..2:6): m1 <- example/RecOrRefined$package.m1(). +[2:7..2:8): a <- example/RecOrRefined$package.m1().(a) +[2:10..2:13): Int -> scala/Int# +[2:20..2:21): x <- local4 +[2:23..2:26): Int -> scala/Int# +[2:32..2:35): ??? -> scala/Predef.`???`(). +[3:4..3:6): m2 <- example/RecOrRefined$package.m2(). +[3:7..3:8): x <- example/RecOrRefined$package.m2().(x) +[3:16..3:17): x <- local5 +[3:19..3:22): Int -> scala/Int# +[3:28..3:29): y <- local6 +[3:31..3:34): Int -> scala/Int# +[3:40..3:43): ??? -> scala/Predef.`???`(). +[4:4..4:6): m3 <- example/RecOrRefined$package.m3(). +[4:7..4:8): x <- example/RecOrRefined$package.m3().(x) +[4:16..4:17): x <- local7 +[4:19..4:22): Int -> scala/Int# +[4:28..4:29): y <- local8 +[4:31..4:34): Int -> scala/Int# +[4:41..4:42): z <- local9 +[4:48..4:51): ??? -> scala/Predef.`???`(). +[5:6..5:16): PolyHolder <- example/PolyHolder# +[6:6..6:9): foo <- example/PolyHolder#foo(). +[6:10..6:11): T <- example/PolyHolder#foo().[T] +[6:13..6:14): t <- example/PolyHolder#foo().(t) +[6:16..6:17): T -> example/PolyHolder#foo().[T] +[6:20..6:23): Any -> scala/Any# +[9:4..9:6): m4 <- example/RecOrRefined$package.m4(). +[9:7..9:8): x <- example/RecOrRefined$package.m4().(x) +[9:10..9:20): PolyHolder -> example/PolyHolder# +[9:27..9:30): foo <- local12 +[9:31..9:32): T <- local10 +[9:34..9:35): t <- local11 +[9:37..9:38): T -> local10 +[9:41..9:42): T -> local10 +[9:48..9:51): ??? -> scala/Predef.`???`(). +[10:4..10:6): m5 <- example/RecOrRefined$package.m5(). +[10:7..10:8): Z <- example/RecOrRefined$package.m5().[Z] +[10:10..10:11): x <- example/RecOrRefined$package.m5().(x) +[10:13..10:16): Int -> scala/Int# +[10:19..10:29): PolyHolder -> example/PolyHolder# +[10:36..10:39): foo <- local15 +[10:40..10:41): T <- local13 +[10:43..10:44): t <- local14 +[10:46..10:47): T -> local13 +[10:50..10:51): T -> local13 +[10:56..10:59): ??? -> scala/Predef.`???`(). +[12:5..12:7): m6 <- example/RecOrRefined$package.m6# +[12:11..12:12): X <- example/RecOrRefined$package.m6#[X] +[12:18..12:28): PolyHolder -> example/PolyHolder# +[12:35..12:38): foo <- local18 +[12:39..12:40): T <- local16 +[12:42..12:43): t <- local17 +[12:45..12:46): T -> local16 +[12:49..12:50): T -> local16 +[14:6..14:12): Record <- example/Record# +[14:13..14:18): elems <- example/Record#elems. +[14:21..14:27): String -> scala/Predef.String# +[14:29..14:32): Any -> scala/Any# +[14:44..14:54): Selectable -> scala/Selectable# +[15:14..15:20): fields <- example/Record#fields. +[15:23..15:28): elems -> example/Record#elems. +[15:29..15:34): toMap -> scala/collection/IterableOnceOps#toMap(). +[16:6..16:19): selectDynamic <- example/Record#selectDynamic(). +[16:20..16:24): name <- example/Record#selectDynamic().(name) +[16:26..16:32): String -> scala/Predef.String# +[16:35..16:38): Any -> scala/Any# +[16:41..16:47): fields -> example/Record#fields. +[16:48..16:52): name -> example/Record#selectDynamic().(name) +[18:5..18:11): Person <- example/RecOrRefined$package.Person# +[18:14..18:20): Record -> example/Record# +[19:6..19:10): name <- local19 +[19:12..19:18): String -> scala/Predef.String# +[20:6..20:9): age <- local20 +[20:11..20:14): Int -> scala/Int# +[24:6..24:7): C <- example/C# +[24:15..24:17): T1 <- example/C#T1# +[24:24..24:26): T2 <- example/C#T2# +[25:5..25:7): C2 <- example/RecOrRefined$package.C2# +[25:10..25:11): C -> example/C# +[25:19..25:21): T1 <- local21 +[25:28..25:30): T2 <- local22 +[25:33..25:35): T1 -> local21 +[27:6..27:23): SpecialRefinement <- example/SpecialRefinement# +[28:6..28:13): pickOne <- example/SpecialRefinement#pickOne(). +[28:14..28:15): T <- example/SpecialRefinement#pickOne().[T] +[28:17..28:19): as <- example/SpecialRefinement#pickOne().(as) +[28:21..28:22): T -> example/SpecialRefinement#pickOne().[T] +[28:26..28:32): Option -> scala/Option# +[28:33..28:36): Any -> scala/Any# +[31:6..31:25): PickOneRefinement_1 <- example/PickOneRefinement_1# +[31:26..31:27): S <- example/PickOneRefinement_1#[S] +[31:31..31:48): SpecialRefinement -> example/SpecialRefinement# +[31:55..31:62): pickOne <- local3 +[31:63..31:64): T <- local1 +[31:66..31:68): as <- local2 +[31:70..31:71): T -> local1 +[31:75..31:81): Option -> scala/Option# +[31:82..31:88): String -> scala/Predef.String# +[32:6..32:9): run <- example/PickOneRefinement_1#run(). +[32:10..32:11): s <- example/PickOneRefinement_1#run().(s) +[32:13..32:14): S -> example/PickOneRefinement_1#[S] +[32:16..32:18): as <- example/PickOneRefinement_1#run().(as) +[32:20..32:26): String -> scala/Predef.String# +[32:30..32:36): Option -> scala/Option# +[32:37..32:43): String -> scala/Predef.String# +[32:47..32:48): s -> example/PickOneRefinement_1#run().(s) +[32:49..32:56): pickOne -> example/SpecialRefinement#pickOne(). +[32:57..32:59): as -> example/PickOneRefinement_1#run().(as) + +Synthetics: +[15:23..15:34):elems.toMap => *[String, Any] +[15:23..15:34):elems.toMap => *(refl[Tuple2[String, Any]]) +[32:47..32:56):s.pickOne => *[String] + expect/RightAssociativeExtension.scala -------------------------------------- @@ -2627,18 +2978,17 @@ Uri => RightAssociativeExtension.scala Text => empty Language => Scala Symbols => 5 entries -Occurrences => 14 entries +Occurrences => 12 entries Symbols: -ext/RightAssociativeExtension$package. => final package object ext -ext/RightAssociativeExtension$package.`:*:`(). => method :*: -ext/RightAssociativeExtension$package.`:*:`().(i) => param i -ext/RightAssociativeExtension$package.`:*:`().(s) => param s -ext/RightAssociativeExtension$package.b. => val method b +ext/RightAssociativeExtension$package. => final package object ext extends Object { self: ext.type => +3 decls } +ext/RightAssociativeExtension$package.`:*:`(). => method :*: (param i: Int)(param s: String): Tuple2[String, Int] +ext/RightAssociativeExtension$package.`:*:`().(i) => param i: Int +ext/RightAssociativeExtension$package.`:*:`().(s) => param s: String +ext/RightAssociativeExtension$package.b. => val method b Tuple2[String, Int] Occurrences: [0:8..0:11): ext <- ext/ -[2:0..2:0): <- ext/RightAssociativeExtension$package. [2:11..2:12): s <- ext/RightAssociativeExtension$package.`:*:`().(s) [2:14..2:20): String -> scala/Predef.String# [3:6..3:9): :*: <- ext/RightAssociativeExtension$package.`:*:`(). @@ -2646,7 +2996,6 @@ Occurrences: [3:14..3:17): Int -> scala/Int# [3:21..3:27): String -> scala/Predef.String# [3:29..3:32): Int -> scala/Int# -[3:37..3:37): -> scala/Tuple2.apply(). [3:37..3:38): s -> ext/RightAssociativeExtension$package.`:*:`().(s) [3:40..3:41): i -> ext/RightAssociativeExtension$package.`:*:`().(i) [5:4..5:5): b <- ext/RightAssociativeExtension$package.b. @@ -2661,49 +3010,40 @@ Uri => Selfs.scala Text => empty Language => Scala Symbols => 13 entries -Occurrences => 26 entries +Occurrences => 17 entries Symbols: -local0 => selfparam self -local1 => selfparam self -local2 => selfparam self -selfs/B# => class B -selfs/B#``(). => primary ctor -selfs/C1# => class C1 -selfs/C1#``(). => primary ctor -selfs/C2# => class C2 -selfs/C2#``(). => primary ctor -selfs/C3# => class C3 -selfs/C3#``(). => primary ctor -selfs/C6# => class C6 -selfs/C6#``(). => primary ctor +local0 => selfparam self: C1 +local1 => selfparam self: B +local2 => selfparam self: B & C1 +selfs/B# => class B extends Object { self: B => +1 decls } +selfs/B#``(). => primary ctor (): B +selfs/C1# => class C1 extends B { self: C1 & C1 => +1 decls } +selfs/C1#``(). => primary ctor (): C1 +selfs/C2# => class C2 extends B { self: B & C2 => +1 decls } +selfs/C2#``(). => primary ctor (): C2 +selfs/C3# => class C3 extends B { self: B & C1 & C3 => +1 decls } +selfs/C3#``(). => primary ctor (): C3 +selfs/C6# => class C6 extends B { self: B & C6 => +1 decls } +selfs/C6#``(). => primary ctor (): C6 Occurrences: [0:8..0:13): selfs <- selfs/ -[2:0..2:0): <- selfs/B#``(). [2:6..2:7): B <- selfs/B# [4:6..4:8): C1 <- selfs/C1# -[4:17..4:17): <- selfs/C1#``(). [4:17..4:18): B -> selfs/B# -[4:18..4:18): -> selfs/B#``(). [4:21..4:25): self <- local0 [7:6..7:8): C2 <- selfs/C2# -[7:17..7:17): <- selfs/C2#``(). [7:17..7:18): B -> selfs/B# -[7:18..7:18): -> selfs/B#``(). [7:21..7:25): self <- local1 [7:27..7:28): B -> selfs/B# [10:6..10:8): C3 <- selfs/C3# -[10:17..10:17): <- selfs/C3#``(). [10:17..10:18): B -> selfs/B# -[10:18..10:18): -> selfs/B#``(). [10:21..10:25): self <- local2 [10:27..10:28): B -> selfs/B# [10:34..10:36): C1 -> selfs/C1# [13:6..13:8): C6 <- selfs/C6# -[13:17..13:17): <- selfs/C6#``(). [13:17..13:18): B -> selfs/B# -[13:18..13:18): -> selfs/B#``(). [13:27..13:28): B -> selfs/B# expect/Synthetic.scala @@ -2714,48 +3054,63 @@ Schema => SemanticDB v4 Uri => Synthetic.scala Text => empty Language => Scala -Symbols => 38 entries -Occurrences => 162 entries +Symbols => 52 entries +Occurrences => 132 entries +Synthetics => 36 entries Symbols: -example/Synthetic# => class Synthetic -example/Synthetic#F# => class F -example/Synthetic#F#``(). => primary ctor -example/Synthetic#J# => class J -example/Synthetic#J#[T] => typeparam T -example/Synthetic#J#``(). => primary ctor -example/Synthetic#J#arr. => val method arr -example/Synthetic#Name. => val method Name -example/Synthetic#``(). => primary ctor -example/Synthetic#a1. => val method a1 -example/Synthetic#a2. => val method a2 -example/Synthetic#as. => val method as -example/Synthetic#f. => val method f -example/Synthetic#lst. => val method lst -example/Synthetic#name. => val method name -example/Synthetic#ordering. => implicit val method ordering -example/Synthetic#s. => final object s -example/Synthetic#s.Bar# => case class Bar -example/Synthetic#s.Bar#``(). => primary ctor -example/Synthetic#s.Bar#copy(). => method copy -example/Synthetic#s.Bar. => final object Bar -example/Synthetic#s.Bar.apply(). => method apply -example/Synthetic#s.Bar.toString(). => method toString -example/Synthetic#s.Bar.unapply(). => method unapply -example/Synthetic#s.Bar.unapply().(x$1) => param x$1 -example/Synthetic#s.apply(). => method apply -example/Synthetic#x. => val method x -example/Synthetic#xs. => val method xs -local0 => param x -local1 => param y -local2 => param i -local3 => param j -local4 => param i -local5 => param j -local6 => param a -local7 => param b -local8 => param a -local9 => param b +example/Synthetic# => class Synthetic extends Object { self: Synthetic => +23 decls } +example/Synthetic#Contexts. => final object Contexts extends Object { self: Contexts.type => +6 decls } +example/Synthetic#Contexts.foo(). => method foo (param x: Int)(implicit given param x$2: Int): Nothing +example/Synthetic#Contexts.foo().(x$2) => implicit given param x$2: Int +example/Synthetic#Contexts.foo().(x) => param x: Int +example/Synthetic#Contexts.m1(). => method m1 (implicit given param x$1: Int): Nothing +example/Synthetic#Contexts.m1().(x$1) => implicit given param x$1: Int +example/Synthetic#Contexts.m2(). => method m2 (implicit given param x: Int): Nothing +example/Synthetic#Contexts.m2().(x) => implicit given param x: Int +example/Synthetic#Contexts.m3(). => method m3 => Nothing +example/Synthetic#Contexts.m4(). => method m4 => Nothing +example/Synthetic#F# => class F extends Object { self: F => +1 decls } +example/Synthetic#F#``(). => primary ctor (): F +example/Synthetic#J# => class J [typeparam T ] extends Object { self: J[T] => +4 decls } +example/Synthetic#J#[T] => typeparam T +example/Synthetic#J#``(). => primary ctor [typeparam T ]()(implicit param evidence$1: Manifest[T]): J[T] +example/Synthetic#J#``().(evidence$1) => implicit param evidence$1: Manifest[T] +example/Synthetic#J#arr. => val method arr Array[T] +example/Synthetic#J#evidence$1. => private[this] implicit val method evidence$1 Manifest[T] +example/Synthetic#Name. => val method Name Regex +example/Synthetic#``(). => primary ctor (): Synthetic +example/Synthetic#a1. => val method a1 Int +example/Synthetic#a2. => val method a2 Int +example/Synthetic#as. => val method as LazyList[Int] +example/Synthetic#f. => val method f Ordered[F] +example/Synthetic#lst. => val method lst LazyList[Int] +example/Synthetic#name. => val method name String +example/Synthetic#ordering. => implicit val method ordering Ordering[F] +example/Synthetic#s. => final object s extends Object { self: s.type => +5 decls } +example/Synthetic#s.Bar# => case class Bar extends Object with Product with Serializable { self: Bar => +2 decls } +example/Synthetic#s.Bar#``(). => primary ctor (): Bar +example/Synthetic#s.Bar#copy(). => method copy (): Bar +example/Synthetic#s.Bar. => final object Bar extends Object { self: Bar.type => +4 decls } +example/Synthetic#s.Bar.apply(). => method apply (): Bar +example/Synthetic#s.Bar.toString(). => method toString => String <: scala/Any#toString(). +example/Synthetic#s.Bar.unapply(). => method unapply (param x$1: Bar): true +example/Synthetic#s.Bar.unapply().(x$1) => param x$1: Bar +example/Synthetic#s.apply(). => method apply (): Int +example/Synthetic#x. => val method x Int +example/Synthetic#xs. => val method xs LazyList[Int] +local0 => param x: Int +local1 => param y: Int +local2 => param i: Int +local3 => param j: Int +local4 => param i: Int +local5 => param j: Int +local6 => param a: Int +local7 => param b: Int +local8 => param a: Int +local9 => param b: Int +local10 => final implicit lazy val given local x: Int +local11 => final implicit lazy val given local given_Int: Int Occurrences: [0:8..0:15): example <- example/ @@ -2763,112 +3118,72 @@ Occurrences: [2:13..2:21): language -> scala/language. [2:22..2:41): implicitConversions -> scala/language.implicitConversions. [4:6..4:15): Synthetic <- example/Synthetic# -[5:2..5:2): <- example/Synthetic#``(). [5:2..5:6): List -> scala/package.List. -[5:6..5:6): -> scala/collection/IterableFactory#apply(). [5:10..5:13): map -> scala/collection/immutable/List#map(). [5:16..5:17): + -> scala/Int#`+`(+4). -[6:2..6:2): -> scala/Predef.intArrayOps(). [6:2..6:7): Array -> scala/Array. [6:8..6:13): empty -> scala/Array.empty(). [6:14..6:17): Int -> scala/Int# -[6:18..6:18): -> scala/reflect/ClassTag.apply(). [6:19..6:29): headOption -> scala/collection/ArrayOps#headOption(). -[7:2..7:2): -> scala/Predef.augmentString(). [7:9..7:20): stripPrefix -> scala/collection/StringOps#stripPrefix(). [10:6..10:10): Name <- example/Synthetic#Name. -[10:13..10:13): -> scala/Predef.augmentString(). [10:25..10:26): r -> scala/collection/StringOps#r(). [11:6..11:7): x <- example/Synthetic#x. [11:8..11:11): #:: -> scala/package.`#::`. -[11:11..11:11): -> scala/package.`#::`.unapply(). [11:12..11:14): xs <- example/Synthetic#xs. [11:17..11:25): LazyList -> scala/package.LazyList. -[11:25..11:25): -> scala/collection/IterableFactory#apply(). [12:6..12:10): Name -> example/Synthetic#Name. -[12:10..12:10): -> scala/util/matching/Regex#unapplySeq(). [12:11..12:15): name <- example/Synthetic#name. -[13:8..13:8): -> scala/collection/immutable/LazyList.toDeferrer(). -[13:14..13:14): -> scala/collection/immutable/LazyList.toDeferrer(). [13:14..13:22): LazyList -> scala/package.LazyList. [13:23..13:28): empty -> scala/collection/immutable/LazyList.empty(). -[13:28..13:28): -> scala/collection/immutable/LazyList.Deferrer#`#::`(). [15:6..15:8): a1 <- example/Synthetic#a1. [15:9..15:12): #:: -> scala/package.`#::`. -[15:12..15:12): -> scala/package.`#::`.unapply(). [15:13..15:15): a2 <- example/Synthetic#a2. [15:16..15:19): #:: -> scala/package.`#::`. -[15:19..15:19): -> scala/package.`#::`.unapply(). [15:20..15:22): as <- example/Synthetic#as. [15:25..15:33): LazyList -> scala/package.LazyList. -[15:33..15:33): -> scala/collection/IterableFactory#apply(). [17:6..17:9): lst <- example/Synthetic#lst. -[17:18..17:18): -> scala/collection/immutable/LazyList.toDeferrer(). -[17:24..17:24): -> scala/collection/immutable/LazyList.toDeferrer(). [17:24..17:32): LazyList -> scala/package.LazyList. [17:33..17:38): empty -> scala/collection/immutable/LazyList.empty(). -[17:38..17:38): -> scala/collection/immutable/LazyList.Deferrer#`#::`(). [19:7..19:8): x <- local0 -[19:12..19:12): -> scala/LowPriorityImplicits#intWrapper(). [19:14..19:16): to -> scala/runtime/RichInt#to(). -[19:19..19:19): -> scala/collection/immutable/Range#foreach(). [19:21..19:22): y <- local1 -[19:26..19:26): -> scala/LowPriorityImplicits#intWrapper(). [19:28..19:33): until -> scala/runtime/RichInt#until(). -[19:36..19:36): -> scala/collection/immutable/Range#foreach(). [19:38..19:45): println -> scala/Predef.println(+1). -[19:46..19:46): -> scala/Predef.ArrowAssoc(). [19:46..19:47): x -> local0 [19:48..19:50): -> -> scala/Predef.ArrowAssoc#`->`(). [19:51..19:52): x -> local0 [20:7..20:8): i <- local2 -[20:12..20:12): -> scala/LowPriorityImplicits#intWrapper(). [20:14..20:16): to -> scala/runtime/RichInt#to(). -[20:19..20:19): -> scala/collection/StrictOptimizedIterableOps#flatMap(). [20:21..20:22): j <- local3 -[20:26..20:26): -> scala/LowPriorityImplicits#intWrapper(). [20:28..20:33): until -> scala/runtime/RichInt#until(). -[20:36..20:36): -> scala/collection/immutable/Range#map(). -[20:45..20:45): -> scala/Tuple2.apply(). [20:45..20:46): i -> local2 [20:48..20:49): j -> local3 [21:7..21:8): i <- local4 -[21:12..21:12): -> scala/LowPriorityImplicits#intWrapper(). [21:14..21:16): to -> scala/runtime/RichInt#to(). -[21:19..21:19): -> scala/collection/StrictOptimizedIterableOps#flatMap(). [21:21..21:22): j <- local5 -[21:26..21:26): -> scala/LowPriorityImplicits#intWrapper(). [21:28..21:33): until -> scala/runtime/RichInt#until(). -[21:36..21:36): -> scala/collection/IterableOps#withFilter(). [21:40..21:41): i -> local4 [21:42..21:43): % -> scala/Int#`%`(+3). [21:46..21:48): == -> scala/Int#`==`(+3). -[21:50..21:50): -> scala/collection/WithFilter#map(). -[21:59..21:59): -> scala/Tuple2.apply(). [21:59..21:60): i -> local4 [21:62..21:63): j -> local5 [23:9..23:10): s <- example/Synthetic#s. [24:8..24:13): apply <- example/Synthetic#s.apply(). -[25:5..25:5): -> example/Synthetic#s.apply(). [26:6..26:11): apply -> example/Synthetic#s.apply(). [27:15..27:18): Bar <- example/Synthetic#s.Bar# -[27:18..27:18): <- example/Synthetic#s.Bar#``(). [28:4..28:7): Bar -> example/Synthetic#s.Bar. -[28:7..28:7): -> example/Synthetic#s.Bar.apply(). [29:9..29:21): asInstanceOf -> scala/Any#asInstanceOf(). [29:22..29:25): Int -> scala/Int# [29:29..29:32): Int -> scala/Int# -[29:33..29:33): -> scala/Function1#apply(). [32:8..32:9): J <- example/Synthetic#J# -[32:9..32:9): <- example/Synthetic#J#``(). [32:10..32:11): T <- example/Synthetic#J#[T] +[32:11..32:11): <- example/Synthetic#J#evidence$1. [32:13..32:21): Manifest -> scala/Predef.Manifest# -[32:21..32:21): -> example/Synthetic#J#[T] [32:29..32:32): arr <- example/Synthetic#J#arr. [32:35..32:40): Array -> scala/Array. [32:41..32:46): empty -> scala/Array.empty(). [32:47..32:48): T -> example/Synthetic#J#[T] -[34:2..34:2): <- example/Synthetic#F#``(). [34:8..34:9): F <- example/Synthetic#F# [35:15..35:23): ordering <- example/Synthetic#ordering. [35:25..35:33): Ordering -> scala/package.Ordering# @@ -2877,10 +3192,7 @@ Occurrences: [36:6..36:7): f <- example/Synthetic#f. [36:9..36:16): Ordered -> scala/package.Ordered# [36:17..36:18): F -> example/Synthetic#F# -[36:22..36:22): -> scala/math/Ordered.orderingToOrdered(). [36:26..36:27): F -> example/Synthetic#F# -[36:27..36:27): -> example/Synthetic#F#``(). -[36:27..36:27): -> example/Synthetic#ordering. [38:9..38:14): scala -> scala/ [38:15..38:25): concurrent -> scala/concurrent/ [38:26..38:42): ExecutionContext -> scala/concurrent/ExecutionContext. @@ -2891,35 +3203,86 @@ Occurrences: [40:15..40:25): concurrent -> scala/concurrent/ [40:26..40:32): Future -> scala/concurrent/Future. [40:33..40:43): successful -> scala/concurrent/Future.successful(). -[40:46..40:46): -> scala/concurrent/Future#foreach(). [41:4..41:5): b <- local7 [41:9..41:14): scala -> scala/ [41:15..41:25): concurrent -> scala/concurrent/ [41:26..41:32): Future -> scala/concurrent/Future. [41:33..41:43): successful -> scala/concurrent/Future.successful(). -[41:46..41:46): -> scala/concurrent/Future#foreach(). [42:4..42:11): println -> scala/Predef.println(+1). [42:12..42:13): a -> local6 -[42:14..42:14): -> scala/concurrent/ExecutionContext.Implicits.global(). [44:4..44:5): a <- local8 [44:9..44:14): scala -> scala/ [44:15..44:25): concurrent -> scala/concurrent/ [44:26..44:32): Future -> scala/concurrent/Future. [44:33..44:43): successful -> scala/concurrent/Future.successful(). -[44:46..44:46): -> scala/concurrent/Future#flatMap(). [45:4..45:5): b <- local9 [45:9..45:14): scala -> scala/ [45:15..45:25): concurrent -> scala/concurrent/ [45:26..45:32): Future -> scala/concurrent/Future. [45:33..45:43): successful -> scala/concurrent/Future.successful(). -[45:46..45:46): -> scala/concurrent/Future#withFilter(). [46:7..46:8): a -> local8 [46:9..46:10): < -> scala/Int#`<`(+3). [46:11..46:12): b -> local9 -[46:12..46:12): -> scala/concurrent/Future#map(). -[46:12..46:12): -> scala/concurrent/ExecutionContext.Implicits.global(). [47:10..47:11): a -> local8 -[47:11..47:11): -> scala/concurrent/ExecutionContext.Implicits.global(). +[49:9..49:17): Contexts <- example/Synthetic#Contexts. +[50:8..50:11): foo <- example/Synthetic#Contexts.foo(). +[50:12..50:13): x <- example/Synthetic#Contexts.foo().(x) +[50:15..50:18): Int -> scala/Int# +[50:26..50:29): Int -> scala/Int# +[50:33..50:36): ??? -> scala/Predef.`???`(). +[51:8..51:10): m1 <- example/Synthetic#Contexts.m1(). +[51:17..51:20): Int -> scala/Int# +[51:24..51:27): foo -> example/Synthetic#Contexts.foo(). +[52:8..52:10): m2 <- example/Synthetic#Contexts.m2(). +[52:17..52:18): x <- example/Synthetic#Contexts.m2().(x) +[52:20..52:23): Int -> scala/Int# +[52:27..52:30): foo -> example/Synthetic#Contexts.foo(). +[53:8..53:10): m3 <- example/Synthetic#Contexts.m3(). +[54:12..54:13): x <- local10 +[54:15..54:18): Int -> scala/Int# +[55:6..55:9): foo -> example/Synthetic#Contexts.foo(). +[55:10..55:11): x -> local10 +[56:8..56:10): m4 <- example/Synthetic#Contexts.m4(). +[57:12..57:15): Int -> scala/Int# +[58:6..58:9): foo -> example/Synthetic#Contexts.foo(). + +Synthetics: +[5:2..5:13):List(1).map => *[Int] +[5:2..5:6):List => *.apply[Int] +[6:2..6:18):Array.empty[Int] => intArrayOps(*) +[7:2..7:8):"fooo" => augmentString(*) +[10:13..10:24):"name:(.*)" => augmentString(*) +[11:17..11:25):LazyList => *.apply[Int] +[13:4..13:28):#:: 2 #:: LazyList.empty => *[Int] +[13:8..13:28):2 #:: LazyList.empty => toDeferrer[Int](*) +[13:10..13:28):#:: LazyList.empty => *[Int] +[13:14..13:28):LazyList.empty => toDeferrer[Nothing](*) +[13:14..13:28):LazyList.empty => *[Nothing] +[15:25..15:33):LazyList => *.apply[Int] +[17:14..17:38):#:: 2 #:: LazyList.empty => *[Int] +[17:18..17:38):2 #:: LazyList.empty => toDeferrer[Int](*) +[17:20..17:38):#:: LazyList.empty => *[Int] +[17:24..17:38):LazyList.empty => toDeferrer[Nothing](*) +[17:24..17:38):LazyList.empty => *[Nothing] +[19:12..19:13):1 => intWrapper(*) +[19:26..19:27):0 => intWrapper(*) +[19:46..19:50):x -> => *[Int] +[19:46..19:47):x => ArrowAssoc[Int](*) +[20:12..20:13):1 => intWrapper(*) +[20:26..20:27):0 => intWrapper(*) +[21:12..21:13):1 => intWrapper(*) +[21:26..21:27):0 => intWrapper(*) +[32:35..32:49):Array.empty[T] => *(evidence$1) +[36:22..36:27):new F => orderingToOrdered[F](*) +[36:22..36:27):new F => *(ordering) +[40:9..40:43):scala.concurrent.Future.successful => *[Int] +[41:9..41:43):scala.concurrent.Future.successful => *[Int] +[44:9..44:43):scala.concurrent.Future.successful => *[Int] +[45:9..45:43):scala.concurrent.Future.successful => *[Int] +[51:24..51:30):foo(0) => *(x$1) +[52:27..52:33):foo(0) => *(x) +[55:6..55:12):foo(x) => *(x) +[58:6..58:12):foo(0) => *(given_Int) expect/Traits.scala ------------------- @@ -2930,39 +3293,35 @@ Uri => Traits.scala Text => empty Language => Scala Symbols => 13 entries -Occurrences => 16 entries +Occurrences => 12 entries Symbols: -local0 => final class $anon -local1 => selfparam self -traits/C# => class C -traits/C#``(). => primary ctor -traits/T# => trait T -traits/T#``(). => primary ctor -traits/T#x(). => method x -traits/U# => sealed trait U -traits/U#``(). => primary ctor -traits/U. => final object U -traits/U.u(). => method u -traits/V# => trait V -traits/V#``(). => primary ctor +local0 => final class $anon extends Object with U { self: $anon => +1 decls } +local2 => selfparam self: C +traits/C# => class C extends Object { self: C => +1 decls } +traits/C#``(). => primary ctor (): C +traits/T# => trait T extends Object { self: T => +2 decls } +traits/T#``(). => primary ctor (): T +traits/T#x(). => method x => Int +traits/U# => sealed trait U extends Object { self: U => +1 decls } +traits/U#``(). => primary ctor (): U +traits/U. => final object U extends Object { self: U.type => +2 decls } +traits/U.u(). => method u => U +traits/V# => trait V extends Object { self: C & V => +1 decls } +traits/V#``(). => primary ctor (): V Occurrences: [0:8..0:14): traits <- traits/ [2:6..2:7): T <- traits/T# -[3:2..3:2): <- traits/T#``(). [3:6..3:7): x <- traits/T#x(). -[6:0..6:0): <- traits/U#``(). [6:13..6:14): U <- traits/U# [7:7..7:8): U <- traits/U. [8:6..8:7): u <- traits/U.u(). [8:9..8:10): U -> traits/U# [8:17..8:18): U -> traits/U# -[11:0..11:0): <- traits/C#``(). [11:6..11:7): C <- traits/C# [12:6..12:7): V <- traits/V# -[12:10..12:10): <- traits/V#``(). -[12:10..12:14): self <- local1 +[12:10..12:14): self <- local2 [12:16..12:17): C -> traits/C# expect/ValPattern.scala @@ -2974,48 +3333,43 @@ Uri => ValPattern.scala Text => empty Language => Scala Symbols => 22 entries -Occurrences => 63 entries +Occurrences => 46 entries +Synthetics => 7 entries Symbols: -example/ValPattern# => class ValPattern -example/ValPattern#``(). => primary ctor -example/ValPattern#`leftVar_=`(). => var method leftVar_= -example/ValPattern#`leftVar_=`().(x$1) => param x$1 -example/ValPattern#`number1Var_=`(). => var method number1Var_= -example/ValPattern#`number1Var_=`().(x$1) => param x$1 -example/ValPattern#`rightVar_=`(). => var method rightVar_= -example/ValPattern#`rightVar_=`().(x$1) => param x$1 -example/ValPattern#app(). => method app -example/ValPattern#left. => val method left -example/ValPattern#leftVar(). => var method leftVar -example/ValPattern#number1. => val method number1 -example/ValPattern#number1Var(). => var method number1Var -example/ValPattern#q1. => val method q1 -example/ValPattern#right. => val method right -example/ValPattern#rightVar(). => var method rightVar -local0 => val local left -local1 => val local right -local2 => val local number1 -local3 => var local leftVar -local4 => var local rightVar -local5 => var local number1Var +example/ValPattern# => class ValPattern extends Object { self: ValPattern => +14 decls } +example/ValPattern#``(). => primary ctor (): ValPattern +example/ValPattern#`leftVar_=`(). => var method leftVar_= (param x$1: Int): Unit +example/ValPattern#`leftVar_=`().(x$1) => param x$1: Int +example/ValPattern#`number1Var_=`(). => var method number1Var_= (param x$1: Int): Unit +example/ValPattern#`number1Var_=`().(x$1) => param x$1: Int +example/ValPattern#`rightVar_=`(). => var method rightVar_= (param x$1: Int): Unit +example/ValPattern#`rightVar_=`().(x$1) => param x$1: Int +example/ValPattern#app(). => method app (): Unit +example/ValPattern#left. => val method left Int +example/ValPattern#leftVar(). => var method leftVar Int +example/ValPattern#number1. => val method number1 Int +example/ValPattern#number1Var(). => var method number1Var Int +example/ValPattern#q1. => val method q1 Nothing +example/ValPattern#right. => val method right Int +example/ValPattern#rightVar(). => var method rightVar Int +local0 => val local left: Int +local1 => val local right: Int +local2 => val local number1: Int +local3 => var local leftVar: Int +local4 => var local rightVar: Int +local5 => var local number1Var: Int Occurrences: [0:8..0:15): example <- example/ [2:6..2:16): ValPattern <- example/ValPattern# -[4:2..4:2): <- example/ValPattern#``(). [4:7..4:11): left <- example/ValPattern#left. [4:13..4:18): right <- example/ValPattern#right. -[4:23..4:23): -> scala/Tuple2.apply(). [5:6..5:10): Some -> scala/Some. -[5:10..5:10): -> scala/Some.unapply(). [5:11..5:18): number1 <- example/ValPattern#number1. [6:4..6:8): Some -> scala/Some. -[6:8..6:8): -> scala/Some.apply(). [8:6..8:10): List -> scala/package.List. -[8:10..8:10): -> scala/collection/SeqFactory#unapplySeq(). [8:11..8:15): Some -> scala/Some. -[8:15..8:15): -> scala/Some.unapply(). [8:16..8:18): q1 <- example/ValPattern#q1. [8:21..8:25): None -> scala/None. [8:27..8:31): None -> scala/None. @@ -3023,16 +3377,12 @@ Occurrences: [8:46..8:49): ??? -> scala/Predef.`???`(). [10:7..10:14): leftVar <- example/ValPattern#leftVar(). [10:16..10:24): rightVar <- example/ValPattern#rightVar(). -[10:29..10:29): -> scala/Tuple2.apply(). [11:6..11:10): Some -> scala/Some. -[11:10..11:10): -> scala/Some.unapply(). [11:11..11:21): number1Var <- example/ValPattern#number1Var(). [12:4..12:8): Some -> scala/Some. -[12:8..12:8): -> scala/Some.apply(). [14:6..14:9): app <- example/ValPattern#app(). [14:13..14:17): Unit -> scala/Unit# [15:4..15:11): println -> scala/Predef.println(+1). -[17:8..17:8): -> scala/Tuple6.apply(). [17:8..17:15): number1 -> example/ValPattern#number1. [18:8..18:12): left -> example/ValPattern#left. [19:8..19:13): right -> example/ValPattern#right. @@ -3042,22 +3392,15 @@ Occurrences: [25:4..25:11): locally -> scala/Predef.locally(). [26:11..26:15): left <- local0 [26:17..26:22): right <- local1 -[26:27..26:27): -> scala/Tuple2.apply(). [27:10..27:14): Some -> scala/Some. -[27:14..27:14): -> scala/Some.unapply(). [27:15..27:22): number1 <- local2 [28:8..28:12): Some -> scala/Some. -[28:12..28:12): -> scala/Some.apply(). [30:11..30:18): leftVar <- local3 [30:20..30:28): rightVar <- local4 -[30:33..30:33): -> scala/Tuple2.apply(). [31:10..31:14): Some -> scala/Some. -[31:14..31:14): -> scala/Some.unapply(). [31:15..31:25): number1Var <- local5 [32:8..32:12): Some -> scala/Some. -[32:12..32:12): -> scala/Some.apply(). [33:6..33:13): println -> scala/Predef.println(+1). -[35:10..35:10): -> scala/Tuple6.apply(). [35:10..35:17): number1 -> local2 [36:10..36:14): left -> local0 [37:10..37:15): right -> local1 @@ -3065,6 +3408,15 @@ Occurrences: [39:10..39:17): leftVar -> local3 [40:10..40:18): rightVar -> local4 +Synthetics: +[6:4..6:8):Some => *.apply[Int] +[8:6..8:10):List => *.unapplySeq[Nothing] +[8:11..8:15):Some => *.unapply[Nothing] +[12:4..12:8):Some => *.apply[Int] +[25:4..25:11):locally => *[Unit] +[28:8..28:12):Some => *.apply[Int] +[32:8..32:12):Some => *.apply[Int] + expect/Vals.scala ----------------- @@ -3074,56 +3426,55 @@ Uri => Vals.scala Text => empty Language => Scala Symbols => 42 entries -Occurrences => 129 entries +Occurrences => 128 entries Symbols: -example/ValUsages. => final object ValUsages -example/ValUsages.v. => val method v -example/Vals# => abstract class Vals -example/Vals#_explicitSetter(). => var method _explicitSetter -example/Vals#``(). => primary ctor -example/Vals#``().(p) => param p -example/Vals#``().(xp) => val param xp -example/Vals#``().(yp) => var param yp -example/Vals#`explicitSetter_=`(). => method explicitSetter_= -example/Vals#`explicitSetter_=`().(x) => param x -example/Vals#`yam_=`(). => var method yam_= -example/Vals#`yam_=`().(x$1) => param x$1 -example/Vals#`yfm_=`(). => final var method yfm_= -example/Vals#`yfm_=`().(x$1) => param x$1 -example/Vals#`yim_=`(). => var method yim_= -example/Vals#`yim_=`().(x$1) => param x$1 -example/Vals#`ym_=`(). => var method ym_= -example/Vals#`ym_=`().(x$1) => param x$1 -example/Vals#`yp_=`(). => var method yp_= -example/Vals#`yp_=`().(x$1) => param x$1 -example/Vals#explicitSetter(). => method explicitSetter -example/Vals#m(). => method m -example/Vals#p. => val method p -example/Vals#xam. => abstract val method xam -example/Vals#xfm. => final val method xfm -example/Vals#xim. => implicit val method xim -example/Vals#xlm. => val method xlm -example/Vals#xm. => val method xm -example/Vals#xp. => val method xp -example/Vals#xzlm. => lazy val method xzlm -example/Vals#xzm. => lazy val method xzm -example/Vals#yam(). => abstract var method yam -example/Vals#yfm(). => final var method yfm -example/Vals#yim(). => implicit var method yim -example/Vals#ylm(). => var method ylm -example/Vals#ym(). => var method ym -example/Vals#yp(). => var method yp -local0 => val local xl -local1 => lazy val local xzl -local2 => implicit val local xil -local3 => var local yl -local4 => implicit var local yil +example/ValUsages. => final object ValUsages extends Object { self: ValUsages.type => +2 decls } +example/ValUsages.v. => val method v Vals +example/Vals# => abstract class Vals extends Object { self: Vals => +25 decls } +example/Vals#_explicitSetter(). => private[this] var method _explicitSetter Int +example/Vals#``(). => primary ctor (param p: Int, val param xp: Int, var param yp: Int): Vals +example/Vals#``().(p) => param p: Int +example/Vals#``().(xp) => val param xp: Int +example/Vals#``().(yp) => var param yp: Int +example/Vals#`explicitSetter_=`(). => method explicitSetter_= (param x: Int): Unit +example/Vals#`explicitSetter_=`().(x) => param x: Int +example/Vals#`yam_=`(). => var method yam_= (param x$1: Int): Unit +example/Vals#`yam_=`().(x$1) => param x$1: Int +example/Vals#`yfm_=`(). => final var method yfm_= (param x$1: Int): Unit +example/Vals#`yfm_=`().(x$1) => param x$1: Int +example/Vals#`yim_=`(). => var method yim_= (param x$1: Int): Unit +example/Vals#`yim_=`().(x$1) => param x$1: Int +example/Vals#`ym_=`(). => var method ym_= (param x$1: Int): Unit +example/Vals#`ym_=`().(x$1) => param x$1: Int +example/Vals#`yp_=`(). => var method yp_= (param x$1: Int): Unit +example/Vals#`yp_=`().(x$1) => param x$1: Int +example/Vals#explicitSetter(). => method explicitSetter => Int +example/Vals#m(). => method m => Unit +example/Vals#p. => private[this] val method p Int +example/Vals#xam. => abstract val method xam Int +example/Vals#xfm. => final val method xfm Int +example/Vals#xim. => implicit val method xim Int +example/Vals#xlm. => private[this] val method xlm Int +example/Vals#xm. => val method xm Int +example/Vals#xp. => val method xp Int +example/Vals#xzlm. => private[this] lazy val method xzlm Int +example/Vals#xzm. => lazy val method xzm Int +example/Vals#yam(). => abstract var method yam Int +example/Vals#yfm(). => final var method yfm Int +example/Vals#yim(). => implicit var method yim Int +example/Vals#ylm(). => private[this] var method ylm Int +example/Vals#ym(). => var method ym Int +example/Vals#yp(). => var method yp Int +local0 => val local xl: Int +local1 => lazy val local xzl: Int +local2 => implicit val local xil: Int +local3 => var local yl: Int +local4 => implicit var local yil: Int Occurrences: [0:8..0:15): example <- example/ [2:15..2:19): Vals <- example/Vals# -[2:19..2:19): <- example/Vals#``(). [2:20..2:21): p <- example/Vals#p. [2:23..2:26): Int -> scala/Int# [2:32..2:34): xp <- example/Vals#xp. @@ -3260,20 +3611,19 @@ Uri => Vararg.scala Text => empty Language => Scala Symbols => 6 entries -Occurrences => 11 entries +Occurrences => 10 entries Symbols: -example/Vararg# => class Vararg -example/Vararg#``(). => primary ctor -example/Vararg#add1(). => method add1 -example/Vararg#add1().(a) => param a -example/Vararg#add2(). => method add2 -example/Vararg#add2().(a) => param a +example/Vararg# => class Vararg extends Object { self: Vararg => +3 decls } +example/Vararg#``(). => primary ctor (): Vararg +example/Vararg#add1(). => method add1 (param a: Int*): Unit +example/Vararg#add1().(a) => param a: Int* +example/Vararg#add2(). => method add2 (param a: Seq[Int]*): Unit +example/Vararg#add2().(a) => param a: Seq[Int]* Occurrences: [0:8..0:15): example <- example/ [2:6..2:12): Vararg <- example/Vararg# -[3:2..3:2): <- example/Vararg#``(). [3:6..3:10): add1 <- example/Vararg#add1(). [3:11..3:12): a <- example/Vararg#add1().(a) [3:14..3:17): Int -> scala/Int# @@ -3292,36 +3642,35 @@ Uri => exports-example-Codec.scala Text => empty Language => Scala Symbols => 21 entries -Occurrences => 39 entries +Occurrences => 30 entries Symbols: -exports/example/Codec# => trait Codec -exports/example/Codec#[T] => typeparam T -exports/example/Codec#``(). => primary ctor -exports/example/Codec#``().(decode) => param decode -exports/example/Codec#``().(encode) => param encode -exports/example/Codec#decode(). => final method decode -exports/example/Codec#decode().(a) => param a -exports/example/Codec#decode. => val method decode -exports/example/Codec#encode(). => final method encode -exports/example/Codec#encode().(t) => param t -exports/example/Codec#encode. => val method encode -exports/example/Decoder# => trait Decoder -exports/example/Decoder#[T] => covariant typeparam T -exports/example/Decoder#``(). => primary ctor -exports/example/Decoder#decode(). => abstract method decode -exports/example/Decoder#decode().(a) => param a -exports/example/Encoder# => trait Encoder -exports/example/Encoder#[T] => contravariant typeparam T -exports/example/Encoder#``(). => primary ctor -exports/example/Encoder#encode(). => abstract method encode -exports/example/Encoder#encode().(t) => param t +exports/example/Codec# => trait Codec [typeparam T ] extends Object with Decoder[T] with Encoder[T] { self: Codec[T] => +6 decls } +exports/example/Codec#[T] => typeparam T +exports/example/Codec#``(). => primary ctor [typeparam T ](param decode: Decoder[T], param encode: Encoder[T]): Codec[T] +exports/example/Codec#``().(decode) => param decode: Decoder[T] +exports/example/Codec#``().(encode) => param encode: Encoder[T] +exports/example/Codec#decode(). => final method decode (param a: Array[Byte]): T <: exports/example/Decoder#decode(). +exports/example/Codec#decode().(a) => param a: Array[Byte] +exports/example/Codec#decode. => private[this] val method decode Decoder[T] +exports/example/Codec#encode(). => final method encode (param t: T): Array[Byte] <: exports/example/Encoder#encode(). +exports/example/Codec#encode().(t) => param t: T +exports/example/Codec#encode. => private[this] val method encode Encoder[T] +exports/example/Decoder# => trait Decoder [covariant typeparam T ] extends Object { self: Decoder[T] => +3 decls } +exports/example/Decoder#[T] => covariant typeparam T +exports/example/Decoder#``(). => primary ctor [covariant typeparam T ](): Decoder[T] +exports/example/Decoder#decode(). => abstract method decode (param a: Array[Byte]): T +exports/example/Decoder#decode().(a) => param a: Array[Byte] +exports/example/Encoder# => trait Encoder [contravariant typeparam T ] extends Object { self: Encoder[T] => +3 decls } +exports/example/Encoder#[T] => contravariant typeparam T +exports/example/Encoder#``(). => primary ctor [contravariant typeparam T ](): Encoder[T] +exports/example/Encoder#encode(). => abstract method encode (param t: T): Array[Byte] +exports/example/Encoder#encode().(t) => param t: T Occurrences: [0:8..0:15): exports -> exports/ [0:16..0:23): example <- exports/example/ [2:6..2:13): Decoder <- exports/example/Decoder# -[2:13..2:13): <- exports/example/Decoder#``(). [2:15..2:16): T <- exports/example/Decoder#[T] [3:6..3:12): decode <- exports/example/Decoder#decode(). [3:13..3:14): a <- exports/example/Decoder#decode().(a) @@ -3329,7 +3678,6 @@ Occurrences: [3:22..3:26): Byte -> scala/Byte# [3:30..3:31): T -> exports/example/Decoder#[T] [6:6..6:13): Encoder <- exports/example/Encoder# -[6:13..6:13): <- exports/example/Encoder#``(). [6:15..6:16): T <- exports/example/Encoder#[T] [7:6..7:12): encode <- exports/example/Encoder#encode(). [7:13..7:14): t <- exports/example/Encoder#encode().(t) @@ -3337,7 +3685,6 @@ Occurrences: [7:20..7:25): Array -> scala/Array# [7:26..7:30): Byte -> scala/Byte# [10:6..10:11): Codec <- exports/example/Codec# -[10:11..10:11): <- exports/example/Codec#``(). [10:12..10:13): T <- exports/example/Codec#[T] [10:15..10:21): decode <- exports/example/Codec#decode. [10:23..10:30): Decoder -> exports/example/Decoder# @@ -3350,13 +3697,7 @@ Occurrences: [11:26..11:33): Encoder -> exports/example/Encoder# [11:34..11:35): T -> exports/example/Codec#[T] [12:9..12:15): decode -> exports/example/Codec#decode. -[12:15..12:15): -> exports/example/Decoder#decode(). -[12:15..12:15): -> exports/example/Codec#decode().(a) -[12:16..12:16): <- exports/example/Codec#decode(). [13:9..13:15): encode -> exports/example/Codec#encode. -[13:15..13:15): -> exports/example/Encoder#encode(). -[13:15..13:15): -> exports/example/Codec#encode().(t) -[13:16..13:16): <- exports/example/Codec#encode(). expect/exports-package.scala ---------------------------- @@ -3366,18 +3707,20 @@ Schema => SemanticDB v4 Uri => exports-package.scala Text => empty Language => Scala -Symbols => 4 entries -Occurrences => 6 entries +Symbols => 7 entries +Occurrences => 5 entries Symbols: -exports/`exports-package$package`. => final package object exports -exports/`exports-package$package`.Codec# => final type Codec -exports/`exports-package$package`.Decoder# => final type Decoder -exports/`exports-package$package`.Encoder# => final type Encoder +exports/`exports-package$package`. => final package object exports extends Object { self: exports.type => +4 decls } +exports/`exports-package$package`.Codec# => final type Codec [typeparam T ] = Codec[T] +exports/`exports-package$package`.Codec#[T] => typeparam T +exports/`exports-package$package`.Decoder# => final type Decoder [typeparam T ] = Decoder[T] +exports/`exports-package$package`.Decoder#[T] => typeparam T +exports/`exports-package$package`.Encoder# => final type Encoder [typeparam T ] = Encoder[T] +exports/`exports-package$package`.Encoder#[T] => typeparam T Occurrences: [0:8..0:15): exports <- exports/ -[2:0..2:0): <- exports/`exports-package$package`. [2:7..2:14): example -> exports/example/ [2:16..2:23): Decoder <- exports/`exports-package$package`.Decoder# [2:25..2:32): Encoder <- exports/`exports-package$package`.Encoder# @@ -3392,15 +3735,14 @@ Uri => filename%20with%20spaces.scala Text => empty Language => Scala Symbols => 2 entries -Occurrences => 3 entries +Occurrences => 2 entries Symbols: -example/FilenameWithSpaces# => class FilenameWithSpaces -example/FilenameWithSpaces#``(). => primary ctor +example/FilenameWithSpaces# => class FilenameWithSpaces extends Object { self: FilenameWithSpaces => +1 decls } +example/FilenameWithSpaces#``(). => primary ctor (): FilenameWithSpaces Occurrences: [0:8..0:15): example <- example/ -[2:0..2:0): <- example/FilenameWithSpaces#``(). [2:6..2:24): FilenameWithSpaces <- example/FilenameWithSpaces# expect/i9727.scala @@ -3412,30 +3754,124 @@ Uri => i9727.scala Text => empty Language => Scala Symbols => 7 entries -Occurrences => 12 entries +Occurrences => 8 entries Symbols: -i9727/Test# => class Test -i9727/Test#``(). => primary ctor -i9727/Test#``().(a) => param a -i9727/Test#a. => val method a -i9727/i9727$package. => final package object i9727 -i9727/i9727$package.a. => val method a -i9727/i9727$package.b. => val method b +i9727/Test# => class Test extends Object { self: Test => +2 decls } +i9727/Test#``(). => primary ctor (param a: Int): Test +i9727/Test#``().(a) => param a: Int +i9727/Test#a. => private[this] val method a Int +i9727/i9727$package. => final package object i9727 extends Object { self: i9727.type => +3 decls } +i9727/i9727$package.a. => val method a Test +i9727/i9727$package.b. => val method b Test Occurrences: [0:8..0:13): i9727 <- i9727/ [2:6..2:10): Test <- i9727/Test# -[2:10..2:10): <- i9727/Test#``(). [2:11..2:12): a <- i9727/Test#a. [2:14..2:17): Int -> scala/Int# -[3:0..3:0): <- i9727/i9727$package. [3:4..3:5): a <- i9727/i9727$package.a. [3:12..3:16): Test -> i9727/Test# -[3:16..3:16): -> i9727/Test#``(). [4:4..4:5): b <- i9727/i9727$package.b. [4:12..4:16): Test -> i9727/Test# -[4:16..4:16): -> i9727/Test#``(). + +expect/i9782.scala +------------------ + +Summary: +Schema => SemanticDB v4 +Uri => i9782.scala +Text => empty +Language => Scala +Symbols => 24 entries +Occurrences => 59 entries + +Symbols: +_empty_/Copy# => trait Copy [typeparam In <: Txn[In], typeparam Out <: Txn[Out]] extends Object { self: Copy[In, Out] => +5 decls } +_empty_/Copy#[In] => typeparam In <: Txn[In] +_empty_/Copy#[Out] => typeparam Out <: Txn[Out] +_empty_/Copy#``(). => primary ctor [typeparam In <: Txn[In], typeparam Out <: Txn[Out]](): Copy[In, Out] +_empty_/Copy#apply(). => method apply [typeparam Repr [typeparam ~ <: Txn[~]] <: Elem[~]](param in: Repr[In]): Repr[Out] +_empty_/Copy#apply().(in) => param in: Repr[In] +_empty_/Copy#apply().[Repr] => typeparam Repr [typeparam ~ <: Txn[~]] <: Elem[~] +_empty_/Copy#apply().[Repr][`~`] => typeparam ~ <: Txn[~] +_empty_/Copy#copyImpl(). => abstract method copyImpl [typeparam Repr [typeparam ~ <: Txn[~]] <: Elem[~]](param in: Repr[In]): Repr[Out] +_empty_/Copy#copyImpl().(in) => param in: Repr[In] +_empty_/Copy#copyImpl().[Repr] => typeparam Repr [typeparam ~ <: Txn[~]] <: Elem[~] +_empty_/Copy#copyImpl().[Repr][`~`] => typeparam ~ <: Txn[~] +_empty_/Elem# => trait Elem [typeparam T <: Txn[T]] extends Object { self: Elem[T] => +2 decls } +_empty_/Elem#[T] => typeparam T <: Txn[T] +_empty_/Elem#``(). => primary ctor [typeparam T <: Txn[T]](): Elem[T] +_empty_/Obj# => trait Obj [typeparam T <: Txn[T]] extends Object with Elem[T] { self: Obj[T] => +2 decls } +_empty_/Obj#[T] => typeparam T <: Txn[T] +_empty_/Obj#``(). => primary ctor [typeparam T <: Txn[T]](): Obj[T] +_empty_/Txn# => trait Txn [typeparam T <: Txn[T]] extends Object { self: Txn[T] => +2 decls } +_empty_/Txn#[T] => typeparam T <: Txn[T] +_empty_/Txn#``(). => primary ctor [typeparam T <: Txn[T]](): Txn[T] +local0 => val local out: Repr[Out] +local1 => val local inObj: Repr[In] & Obj[In] +local2 => val local outObj: Repr[Out] & Obj[Out] + +Occurrences: +[1:6..1:9): Txn <- _empty_/Txn# +[1:10..1:11): T <- _empty_/Txn#[T] +[1:15..1:18): Txn -> _empty_/Txn# +[1:19..1:20): T -> _empty_/Txn#[T] +[3:6..3:10): Elem <- _empty_/Elem# +[3:11..3:12): T <- _empty_/Elem#[T] +[3:16..3:19): Txn -> _empty_/Txn# +[3:20..3:21): T -> _empty_/Elem#[T] +[5:6..5:9): Obj <- _empty_/Obj# +[5:10..5:11): T <- _empty_/Obj#[T] +[5:15..5:18): Txn -> _empty_/Txn# +[5:19..5:20): T -> _empty_/Obj#[T] +[5:31..5:35): Elem -> _empty_/Elem# +[5:36..5:37): T -> _empty_/Obj#[T] +[7:6..7:10): Copy <- _empty_/Copy# +[7:11..7:13): In <- _empty_/Copy#[In] +[7:17..7:20): Txn -> _empty_/Txn# +[7:21..7:23): In -> _empty_/Copy#[In] +[7:26..7:29): Out <- _empty_/Copy#[Out] +[7:33..7:36): Txn -> _empty_/Txn# +[7:37..7:40): Out -> _empty_/Copy#[Out] +[8:6..8:14): copyImpl <- _empty_/Copy#copyImpl(). +[8:15..8:19): Repr <- _empty_/Copy#copyImpl().[Repr] +[8:20..8:21): ~ <- _empty_/Copy#copyImpl().[Repr][`~`] +[8:25..8:28): Txn -> _empty_/Txn# +[8:29..8:30): ~ -> _empty_/Copy#copyImpl().[Repr][`~`] +[8:36..8:40): Elem -> _empty_/Elem# +[8:41..8:42): ~ -> _empty_/Copy#copyImpl().[Repr][`~`] +[8:45..8:47): in <- _empty_/Copy#copyImpl().(in) +[8:49..8:53): Repr -> _empty_/Copy#copyImpl().[Repr] +[8:54..8:56): In -> _empty_/Copy#[In] +[8:60..8:64): Repr -> _empty_/Copy#copyImpl().[Repr] +[8:65..8:68): Out -> _empty_/Copy#[Out] +[10:6..10:11): apply <- _empty_/Copy#apply(). +[10:12..10:16): Repr <- _empty_/Copy#apply().[Repr] +[10:17..10:18): ~ <- _empty_/Copy#apply().[Repr][`~`] +[10:22..10:25): Txn -> _empty_/Txn# +[10:26..10:27): ~ -> _empty_/Copy#apply().[Repr][`~`] +[10:33..10:37): Elem -> _empty_/Elem# +[10:38..10:39): ~ -> _empty_/Copy#apply().[Repr][`~`] +[10:42..10:44): in <- _empty_/Copy#apply().(in) +[10:46..10:50): Repr -> _empty_/Copy#apply().[Repr] +[10:51..10:53): In -> _empty_/Copy#[In] +[10:57..10:61): Repr -> _empty_/Copy#apply().[Repr] +[10:62..10:65): Out -> _empty_/Copy#[Out] +[11:8..11:11): out <- local0 +[11:14..11:22): copyImpl -> _empty_/Copy#copyImpl(). +[11:23..11:27): Repr -> _empty_/Copy#apply().[Repr] +[11:29..11:31): in -> _empty_/Copy#apply().(in) +[12:5..12:7): in -> _empty_/Copy#apply().(in) +[12:9..12:12): out -> local0 +[13:12..13:17): inObj <- local1 +[13:19..13:22): Obj -> _empty_/Obj# +[13:23..13:25): In -> _empty_/Copy#[In] +[13:28..13:34): outObj <- local2 +[13:36..13:39): Obj -> _empty_/Obj# +[13:40..13:43): Out -> _empty_/Copy#[Out] +[14:8..14:15): println -> scala/Predef.println(+1). +[17:4..17:7): out -> local0 expect/inlineconsume.scala -------------------------- @@ -3446,12 +3882,12 @@ Uri => inlineconsume.scala Text => empty Language => Scala Symbols => 3 entries -Occurrences => 9 entries +Occurrences => 8 entries Symbols: -inlineconsume/Foo# => class Foo -inlineconsume/Foo#``(). => primary ctor -inlineconsume/Foo#test(). => method test +inlineconsume/Foo# => class Foo extends Object { self: Foo => +2 decls } +inlineconsume/Foo#``(). => primary ctor (): Foo +inlineconsume/Foo#test(). => method test => Unit Occurrences: [0:8..0:21): inlineconsume <- inlineconsume/ @@ -3459,7 +3895,6 @@ Occurrences: [2:18..2:28): FakePredef -> inlinedefs/FakePredef. [2:29..2:35): assert -> inlinedefs/FakePredef.assert(). [4:6..4:9): Foo <- inlineconsume/Foo# -[5:2..5:2): <- inlineconsume/Foo#``(). [5:6..5:10): test <- inlineconsume/Foo#test(). [5:13..5:19): assert -> inlinedefs/FakePredef.assert(). [5:22..5:23): > -> scala/Int#`>`(+3). @@ -3473,12 +3908,12 @@ Uri => inlinedefs.scala Text => empty Language => Scala Symbols => 3 entries -Occurrences => 12 entries +Occurrences => 10 entries Symbols: -inlinedefs/FakePredef. => final object FakePredef -inlinedefs/FakePredef.assert(). => final macro assert -inlinedefs/FakePredef.assert().(assertion) => param assertion +inlinedefs/FakePredef. => final object FakePredef extends Object { self: FakePredef.type => +2 decls } +inlinedefs/FakePredef.assert(). => final inline transparent macro assert (inline param assertion: Boolean): Unit +inlinedefs/FakePredef.assert().(assertion) => inline param assertion: Boolean Occurrences: [0:8..0:18): inlinedefs <- inlinedefs/ @@ -3488,11 +3923,9 @@ Occurrences: [13:56..13:63): Boolean -> scala/Boolean# [13:66..13:70): Unit -> scala/Unit# [14:9..14:18): assertion -> inlinedefs/FakePredef.assert().(assertion) -[14:18..14:18): -> scala/Boolean#`unary_!`(). [15:16..15:20): java -> java/ [15:21..15:25): lang -> java/lang/ [15:26..15:40): AssertionError -> java/lang/AssertionError# -[15:40..15:40): -> java/lang/AssertionError#``(+2). expect/local-file.scala ----------------------- @@ -3503,22 +3936,90 @@ Uri => local-file.scala Text => empty Language => Scala Symbols => 3 entries -Occurrences => 7 entries +Occurrences => 6 entries +Synthetics => 1 entries Symbols: -example/`local-file`# => class local-file -example/`local-file`#``(). => primary ctor -local0 => val local local +example/`local-file`# => class local-file extends Object { self: local-file => +1 decls } +example/`local-file`#``(). => primary ctor (): local-file +local0 => val local local: Int Occurrences: [0:8..0:15): example <- example/ [2:7..2:17): local-file <- example/`local-file`# -[3:2..3:2): <- example/`local-file`#``(). [3:2..3:9): locally -> scala/Predef.locally(). [4:8..4:13): local <- local0 [5:4..5:9): local -> local0 [5:10..5:11): + -> scala/Int#`+`(+4). +Synthetics: +[3:2..3:9):locally => *[Int] + +expect/nullary.scala +-------------------- + +Summary: +Schema => SemanticDB v4 +Uri => nullary.scala +Text => empty +Language => Scala +Symbols => 17 entries +Occurrences => 29 entries +Synthetics => 1 entries + +Symbols: +_empty_/Concrete# => class Concrete extends NullaryTest[Int, List] { self: Concrete => +3 decls } +_empty_/Concrete#``(). => primary ctor (): Concrete +_empty_/Concrete#nullary2(). => method nullary2 => Int <: _empty_/NullaryTest#nullary2(). +_empty_/Concrete#nullary3(). => method nullary3 => List[Int] <: _empty_/NullaryTest#nullary3(). +_empty_/NullaryTest# => abstract class NullaryTest [typeparam T , typeparam m [typeparam s ]] extends Object { self: NullaryTest[T, m] => +9 decls } +_empty_/NullaryTest#[T] => typeparam T +_empty_/NullaryTest#[m] => typeparam m [typeparam s ] +_empty_/NullaryTest#[m][s] => typeparam s +_empty_/NullaryTest#``(). => primary ctor [typeparam T , typeparam m [typeparam s ]](): NullaryTest[T, m] +_empty_/NullaryTest#``().[m][s] => typeparam s +_empty_/NullaryTest#nullary(). => method nullary => String +_empty_/NullaryTest#nullary2(). => abstract method nullary2 => T +_empty_/NullaryTest#nullary3(). => abstract method nullary3 => m[T] +_empty_/NullaryTest#x. => val method x String +_empty_/NullaryTest#x2. => val method x2 T +_empty_/NullaryTest#x3. => val method x3 m[T] +_empty_/test. => final object test extends Object { self: test.type => +1 decls } + +Occurrences: +[0:15..0:26): NullaryTest <- _empty_/NullaryTest# +[0:27..0:28): T <- _empty_/NullaryTest#[T] +[0:30..0:31): m <- _empty_/NullaryTest#[m] +[0:32..0:33): s <- _empty_/NullaryTest#``().[m][s] +[1:6..1:13): nullary <- _empty_/NullaryTest#nullary(). +[1:15..1:21): String -> scala/Predef.String# +[2:6..2:7): x <- _empty_/NullaryTest#x. +[2:10..2:17): nullary -> _empty_/NullaryTest#nullary(). +[4:6..4:14): nullary2 <- _empty_/NullaryTest#nullary2(). +[4:16..4:17): T -> _empty_/NullaryTest#[T] +[5:6..5:8): x2 <- _empty_/NullaryTest#x2. +[5:11..5:19): nullary2 -> _empty_/NullaryTest#nullary2(). +[7:6..7:14): nullary3 <- _empty_/NullaryTest#nullary3(). +[7:16..7:17): m -> _empty_/NullaryTest#[m] +[7:18..7:19): T -> _empty_/NullaryTest#[T] +[8:6..8:8): x3 <- _empty_/NullaryTest#x3. +[8:11..8:19): nullary3 -> _empty_/NullaryTest#nullary3(). +[11:6..11:14): Concrete <- _empty_/Concrete# +[11:23..11:34): NullaryTest -> _empty_/NullaryTest# +[11:35..11:38): Int -> scala/Int# +[11:40..11:44): List -> scala/package.List# +[12:6..12:14): nullary2 <- _empty_/Concrete#nullary2(). +[13:6..13:14): nullary3 <- _empty_/Concrete#nullary3(). +[13:17..13:21): List -> scala/package.List. +[16:7..16:11): test <- _empty_/test. +[17:7..17:15): Concrete -> _empty_/Concrete# +[17:17..17:25): nullary2 -> _empty_/Concrete#nullary2(). +[18:7..18:15): Concrete -> _empty_/Concrete# +[18:17..18:25): nullary3 -> _empty_/Concrete#nullary3(). + +Synthetics: +[13:17..13:21):List => *.apply[Int] + expect/recursion.scala ---------------------- @@ -3528,55 +4029,54 @@ Uri => recursion.scala Text => empty Language => Scala Symbols => 36 entries -Occurrences => 56 entries +Occurrences => 46 entries +Synthetics => 3 entries Symbols: -local0 => case val method N$1 -local1 => val local p -local2 => case val method N$2 -local3 => val local p -local4 => val local Nat_this -local5 => val local Nat_this -local6 => val local Nat_this -recursion/Nats. => final object Nats -recursion/Nats.Nat# => sealed trait Nat -recursion/Nats.Nat#`++`(). => macro ++ -recursion/Nats.Nat#`+`(). => macro + -recursion/Nats.Nat#`+`().(that) => param that -recursion/Nats.Nat#``(). => primary ctor -recursion/Nats.Succ# => case class Succ -recursion/Nats.Succ#[N] => typeparam N -recursion/Nats.Succ#_1(). => method _1 -recursion/Nats.Succ#``(). => primary ctor -recursion/Nats.Succ#``().(p) => val param p -recursion/Nats.Succ#copy$default$1(). => method copy$default$1 -recursion/Nats.Succ#copy$default$1().[N] => typeparam N -recursion/Nats.Succ#copy(). => method copy -recursion/Nats.Succ#copy().(p) => param p -recursion/Nats.Succ#copy().[N] => typeparam N -recursion/Nats.Succ#p. => val method p -recursion/Nats.Succ. => final object Succ -recursion/Nats.Succ.apply(). => method apply -recursion/Nats.Succ.apply().(p) => param p -recursion/Nats.Succ.apply().[N] => typeparam N -recursion/Nats.Succ.toString(). => method toString -recursion/Nats.Succ.unapply(). => method unapply -recursion/Nats.Succ.unapply().(x$1) => param x$1 -recursion/Nats.Succ.unapply().[N] => typeparam N -recursion/Nats.Zero. => final case object Zero -recursion/Nats.j31. => val method j31 -recursion/Nats.toIntg(). => macro toIntg -recursion/Nats.toIntg().(n) => param n +local0 => case val method N$1 <: Nat +local1 => val local p: N$1 +local2 => case val method N$2 <: Nat +local3 => val local p: N$2 +local4 => val local Nat_this: Zero.type +local5 => val local Nat_this: Succ[Zero.type] +local6 => val local Nat_this: Succ[_] forSome { type _ <: Succ[Zero.type] } +recursion/Nats. => final object Nats extends Object { self: Nats.type => +9 decls } +recursion/Nats.Nat# => sealed trait Nat extends Object { self: Nat => +3 decls } +recursion/Nats.Nat#`++`(). => inline transparent macro ++ => Succ[Nat.this.type] +recursion/Nats.Nat#`+`(). => inline transparent macro + (inline param that: Nat): Nat +recursion/Nats.Nat#`+`().(that) => inline param that: Nat +recursion/Nats.Nat#``(). => primary ctor (): Nat +recursion/Nats.Succ# => case class Succ [typeparam N <: Nat] extends Object with Nat with Product with Serializable { self: Succ[N] => +6 decls } +recursion/Nats.Succ#[N] => typeparam N <: Nat +recursion/Nats.Succ#_1(). => method _1 => N +recursion/Nats.Succ#``(). => primary ctor [typeparam N <: Nat](val param p: N): Succ[N] +recursion/Nats.Succ#``().(p) => val param p: N +recursion/Nats.Succ#copy$default$1(). => method copy$default$1 [typeparam N <: Nat]: N +recursion/Nats.Succ#copy$default$1().[N] => typeparam N <: Nat +recursion/Nats.Succ#copy(). => method copy [typeparam N <: Nat](param p: N): Succ[N] +recursion/Nats.Succ#copy().(p) => param p: N +recursion/Nats.Succ#copy().[N] => typeparam N <: Nat +recursion/Nats.Succ#p. => val method p N +recursion/Nats.Succ. => final object Succ extends Object { self: Succ.type => +4 decls } +recursion/Nats.Succ.apply(). => method apply [typeparam N <: Nat](param p: N): Succ[N] +recursion/Nats.Succ.apply().(p) => param p: N +recursion/Nats.Succ.apply().[N] => typeparam N <: Nat +recursion/Nats.Succ.toString(). => method toString => String <: scala/Any#toString(). +recursion/Nats.Succ.unapply(). => method unapply [typeparam N <: Nat](param x$1: Succ[N]): Succ[N] +recursion/Nats.Succ.unapply().(x$1) => param x$1: Succ[N] +recursion/Nats.Succ.unapply().[N] => typeparam N <: Nat +recursion/Nats.Zero. => final case object Zero extends Object with Nat with Product with Serializable { self: Zero.type => +1 decls } +recursion/Nats.j31. => val method j31 Int +recursion/Nats.toIntg(). => inline transparent macro toIntg (inline param n: Nat): Int +recursion/Nats.toIntg().(n) => inline param n: Nat Occurrences: [1:8..1:17): recursion <- recursion/ [3:7..3:11): Nats <- recursion/Nats. [4:15..4:18): Nat <- recursion/Nats.Nat# -[5:4..5:4): <- recursion/Nats.Nat#``(). [5:27..5:29): ++ <- recursion/Nats.Nat#`++`(). [5:32..5:36): Succ -> recursion/Nats.Succ# [5:50..5:54): Succ -> recursion/Nats.Succ. -[5:54..5:54): -> recursion/Nats.Succ.apply(). [7:27..7:28): + <- recursion/Nats.Nat#`+`(). [7:36..7:40): that <- recursion/Nats.Nat#`+`().(that) [7:42..7:45): Nat -> recursion/Nats.Nat# @@ -3584,8 +4084,6 @@ Occurrences: [9:13..9:17): Zero -> recursion/Nats.Zero. [9:24..9:28): that -> recursion/Nats.Nat#`+`().(that) [10:13..10:17): Succ -> recursion/Nats.Succ. -[10:17..10:17): -> recursion/Nats.Succ.unapply(). -[10:17..10:17): -> local0 [10:18..10:19): p <- local1 [10:24..10:25): p -> local1 [10:26..10:27): + -> recursion/Nats.Nat#`+`(). @@ -3594,8 +4092,8 @@ Occurrences: [14:14..14:18): Zero <- recursion/Nats.Zero. [14:27..14:30): Nat -> recursion/Nats.Nat# [15:13..15:17): Succ <- recursion/Nats.Succ# -[15:17..15:17): <- recursion/Nats.Succ#``(). [15:18..15:19): N <- recursion/Nats.Succ#[N] +[15:23..15:26): Nat -> recursion/Nats.Nat# [15:28..15:29): p <- recursion/Nats.Succ#p. [15:31..15:32): N -> recursion/Nats.Succ#[N] [15:42..15:45): Nat -> recursion/Nats.Nat# @@ -3606,8 +4104,6 @@ Occurrences: [18:11..18:12): n -> recursion/Nats.toIntg().(n) [19:11..19:15): Zero -> recursion/Nats.Zero. [20:11..20:15): Succ -> recursion/Nats.Succ. -[20:15..20:15): -> recursion/Nats.Succ.unapply(). -[20:15..20:15): -> local2 [20:16..20:17): p <- local3 [20:22..20:28): toIntg -> recursion/Nats.toIntg(). [20:29..20:30): p -> local3 @@ -3615,17 +4111,18 @@ Occurrences: [23:6..23:9): j31 <- recursion/Nats.j31. [23:12..23:18): toIntg -> recursion/Nats.toIntg(). [23:19..23:23): Zero -> recursion/Nats.Zero. -[23:24..23:32): ++.++.++ <- local4 -[23:24..23:24): -> recursion/Nats.Zero. [23:24..23:26): ++ -> recursion/Nats.Nat#`++`(). -[23:27..23:35): ++.++ + <- local5 [23:27..23:29): ++ -> recursion/Nats.Nat#`++`(). -[23:30..23:38): ++ + Zer <- local6 [23:30..23:32): ++ -> recursion/Nats.Nat#`++`(). [23:33..23:34): + -> recursion/Nats.Nat#`+`(). [23:35..23:39): Zero -> recursion/Nats.Zero. [23:40..23:42): ++ -> recursion/Nats.Nat#`++`(). +Synthetics: +[5:50..5:54):Succ => *.apply[Nat.this.type] +[10:13..10:17):Succ => *.unapply[N$1] +[20:11..20:15):Succ => *.unapply[N$2] + expect/semanticdb-Definitions.scala ----------------------------------- @@ -3635,19 +4132,19 @@ Uri => semanticdb-Definitions.scala Text => empty Language => Scala Symbols => 10 entries -Occurrences => 9 entries +Occurrences => 7 entries Symbols: -a/Definitions. => final object Definitions -a/Definitions.D# => class D -a/Definitions.D#``(). => primary ctor -a/Definitions.E# => trait E -a/Definitions.E#``(). => primary ctor -a/Definitions.`b_=`(). => var method b_= -a/Definitions.`b_=`().(x$1) => param x$1 -a/Definitions.a. => val method a -a/Definitions.b(). => var method b -a/Definitions.c(). => method c +a/Definitions. => final object Definitions extends Object { self: Definitions.type => +9 decls } +a/Definitions.D# => class D extends Object { self: D => +1 decls } +a/Definitions.D#``(). => primary ctor (): D +a/Definitions.E# => trait E extends Object { self: E => +1 decls } +a/Definitions.E#``(). => primary ctor (): E +a/Definitions.`b_=`(). => var method b_= (param x$1: Int): Unit +a/Definitions.`b_=`().(x$1) => param x$1: Int +a/Definitions.a. => val method a Int +a/Definitions.b(). => var method b Int +a/Definitions.c(). => method c => Int Occurrences: [0:8..0:9): a <- a/ @@ -3655,9 +4152,7 @@ Occurrences: [2:6..2:7): a <- a/Definitions.a. [3:6..3:7): b <- a/Definitions.b(). [4:6..4:7): c <- a/Definitions.c(). -[5:2..5:2): <- a/Definitions.D#``(). [5:8..5:9): D <- a/Definitions.D# -[6:2..6:2): <- a/Definitions.E#``(). [6:8..6:9): E <- a/Definitions.E# expect/semanticdb-Flags.scala @@ -3669,59 +4164,60 @@ Uri => semanticdb-Flags.scala Text => empty Language => Scala Symbols => 50 entries -Occurrences => 82 entries +Occurrences => 73 entries +Synthetics => 2 entries Symbols: -flags/p/package. => final package object p -flags/p/package.AA# => class AA -flags/p/package.AA#``(). => primary ctor -flags/p/package.AA#``().(x) => param x -flags/p/package.AA#``().(y) => val param y -flags/p/package.AA#``().(z) => var param z -flags/p/package.AA#`z_=`(). => var method z_= -flags/p/package.AA#`z_=`().(x$1) => param x$1 -flags/p/package.AA#x. => val method x -flags/p/package.AA#y. => val method y -flags/p/package.AA#z(). => var method z -flags/p/package.C# => abstract class C -flags/p/package.C#[T] => covariant typeparam T -flags/p/package.C#[U] => contravariant typeparam U -flags/p/package.C#[V] => typeparam V -flags/p/package.C#``(). => primary ctor -flags/p/package.C#``().(x) => param x -flags/p/package.C#``().(y) => param y -flags/p/package.C#``().(z) => param z -flags/p/package.C#``(+1). => ctor -flags/p/package.C#``(+2). => ctor -flags/p/package.C#``(+2).(t) => param t -flags/p/package.C#w(). => abstract method w -flags/p/package.C#x. => val method x -flags/p/package.C#y. => val method y -flags/p/package.C#z. => val method z -flags/p/package.S# => class S -flags/p/package.S#[T] => typeparam T -flags/p/package.S#``(). => primary ctor -flags/p/package.T1# => type T1 -flags/p/package.T2# => type T2 -flags/p/package.T2#[T] => typeparam T -flags/p/package.U# => type U -flags/p/package.V# => type V -flags/p/package.X. => final case object X -flags/p/package.Y# => final class Y -flags/p/package.Y#``(). => primary ctor -flags/p/package.Z# => sealed trait Z -flags/p/package.Z#``(). => primary ctor -flags/p/package.`y_=`(). => var method y_= -flags/p/package.`y_=`().(x$1) => param x$1 -flags/p/package.m(). => macro m -flags/p/package.m().[TT] => typeparam TT -flags/p/package.x. => lazy val method x -flags/p/package.xs1. => val method xs1 -flags/p/package.y(). => implicit var method y -flags/p/package.z(). => method z -flags/p/package.z().(pp) => param pp -local0 => val local xs2 -local1 => case val method t +flags/p/package. => final package object p extends Object { self: p.type => +23 decls } +flags/p/package.AA# => class AA extends Object { self: AA => +5 decls } +flags/p/package.AA#``(). => primary ctor (param x: Int, val param y: Int, var param z: Int): AA +flags/p/package.AA#``().(x) => param x: Int +flags/p/package.AA#``().(y) => val param y: Int +flags/p/package.AA#``().(z) => var param z: Int +flags/p/package.AA#`z_=`(). => var method z_= (param x$1: Int): Unit +flags/p/package.AA#`z_=`().(x$1) => param x$1: Int +flags/p/package.AA#x. => private[this] val method x Int +flags/p/package.AA#y. => val method y Int +flags/p/package.AA#z(). => var method z Int +flags/p/package.C# => abstract class C [covariant typeparam T , contravariant typeparam U , typeparam V ] extends Object { self: C[T, U, V] => +10 decls } +flags/p/package.C#[T] => covariant typeparam T +flags/p/package.C#[U] => contravariant typeparam U +flags/p/package.C#[V] => typeparam V +flags/p/package.C#``(). => primary ctor [covariant typeparam T , contravariant typeparam U , typeparam V ](param x: T, param y: U, param z: V): C[T, U, V] +flags/p/package.C#``().(x) => param x: T +flags/p/package.C#``().(y) => param y: U +flags/p/package.C#``().(z) => param z: V +flags/p/package.C#``(+1). => ctor [covariant typeparam T , contravariant typeparam U , typeparam V ](): C[T, U, V] +flags/p/package.C#``(+2). => ctor [covariant typeparam T , contravariant typeparam U , typeparam V ](param t: T): C[T, U, V] +flags/p/package.C#``(+2).(t) => param t: T +flags/p/package.C#w(). => abstract method w => Int +flags/p/package.C#x. => private[this] val method x T +flags/p/package.C#y. => private[this] val method y U +flags/p/package.C#z. => private[this] val method z V +flags/p/package.S# => class S [@specialized typeparam T ] extends Object { self: S[T] => +2 decls } +flags/p/package.S#[T] => @specialized typeparam T +flags/p/package.S#``(). => primary ctor [@specialized typeparam T ](): S[T] +flags/p/package.T1# => type T1 = Int +flags/p/package.T2# => type T2 [typeparam T ] = S[T] +flags/p/package.T2#[T] => typeparam T +flags/p/package.U# => type U <: Int +flags/p/package.V# => type V >: Int +flags/p/package.X. => final case object X extends Object with Product with Serializable { self: X.type => +1 decls } +flags/p/package.Y# => final class Y extends Object { self: Y => +1 decls } +flags/p/package.Y#``(). => primary ctor (): Y +flags/p/package.Z# => sealed trait Z extends Object { self: Z => +1 decls } +flags/p/package.Z#``(). => primary ctor (): Z +flags/p/package.`y_=`(). => protected var method y_= (param x$1: Int): Unit +flags/p/package.`y_=`().(x$1) => param x$1: Int +flags/p/package.m(). => macro m [typeparam TT ]: Int +flags/p/package.m().[TT] => typeparam TT +flags/p/package.x. => private[flags/p/] lazy val method x Int +flags/p/package.xs1. => val method xs1 Nothing +flags/p/package.y(). => protected implicit var method y Int +flags/p/package.z(). => method z (param pp: Int): Int +flags/p/package.z().(pp) => param pp: Int +local0 => val local xs2: Nothing +local1 => case val method t Occurrences: [0:8..0:13): flags <- flags/ @@ -3729,7 +4225,7 @@ Occurrences: [2:13..2:21): language -> scala/language. [2:22..2:34): experimental -> scala/language.experimental. [2:35..2:41): macros -> scala/language.experimental.macros. -[4:15..5:3): <- flags/p/package. +[4:15..4:16): p <- flags/p/package. [5:2..5:3): p -> flags/p/ [5:19..5:20): x <- flags/p/package.x. [6:25..6:26): y <- flags/p/package.y(). @@ -3742,7 +4238,6 @@ Occurrences: [8:13..8:16): Int -> scala/Int# [8:25..8:28): ??? -> scala/Predef.`???`(). [9:17..9:18): C <- flags/p/package.C# -[9:18..9:18): <- flags/p/package.C#``(). [9:20..9:21): T <- flags/p/package.C#[T] [9:24..9:25): U <- flags/p/package.C#[U] [9:27..9:28): V <- flags/p/package.C#[V] @@ -3752,15 +4247,13 @@ Occurrences: [9:39..9:40): U -> flags/p/package.C#[U] [9:42..9:43): z <- flags/p/package.C#z. [9:45..9:46): V -> flags/p/package.C#[V] -[10:8..10:14): <- flags/p/package.C#``(+1). -[10:22..10:22): -> flags/p/package.C#``(). +[10:8..10:12): <- flags/p/package.C#``(+1). [10:22..10:25): ??? -> scala/Predef.`???`(). [10:27..10:30): ??? -> scala/Predef.`???`(). [10:32..10:35): ??? -> scala/Predef.`???`(). -[11:8..11:14): <- flags/p/package.C#``(+2). +[11:8..11:12): <- flags/p/package.C#``(+2). [11:13..11:14): t <- flags/p/package.C#``(+2).(t) [11:16..11:17): T -> flags/p/package.C#[T] -[11:26..11:26): -> flags/p/package.C#``(). [11:26..11:27): t -> flags/p/package.C#``(+2).(t) [11:29..11:32): ??? -> scala/Predef.`???`(). [11:34..11:37): ??? -> scala/Predef.`???`(). @@ -3777,12 +4270,9 @@ Occurrences: [17:7..17:8): V <- flags/p/package.V# [17:12..17:15): Int -> scala/Int# [18:14..18:15): X <- flags/p/package.X. -[19:2..19:2): <- flags/p/package.Y#``(). [19:14..19:15): Y <- flags/p/package.Y# -[20:2..20:2): <- flags/p/package.Z#``(). [20:15..20:16): Z <- flags/p/package.Z# [21:8..21:10): AA <- flags/p/package.AA# -[21:10..21:10): <- flags/p/package.AA#``(). [21:11..21:12): x <- flags/p/package.AA#x. [21:14..21:17): Int -> scala/Int# [21:23..21:24): y <- flags/p/package.AA#y. @@ -3790,16 +4280,13 @@ Occurrences: [21:35..21:36): z <- flags/p/package.AA#z(). [21:38..21:41): Int -> scala/Int# [22:8..22:9): S <- flags/p/package.S# -[22:9..22:9): <- flags/p/package.S#``(). [22:11..22:22): specialized -> scala/specialized# [22:23..22:24): T <- flags/p/package.S#[T] [23:6..23:10): List -> scala/package.List. -[23:10..23:10): -> scala/collection/SeqFactory#unapplySeq(). [23:11..23:14): xs1 <- flags/p/package.xs1. [23:18..23:21): ??? -> scala/Predef.`???`(). [24:2..24:5): ??? -> scala/Predef.`???`(). [24:19..24:23): List -> scala/package.List. -[24:23..24:23): -> scala/collection/SeqFactory#unapplySeq(). [24:24..24:27): xs2 <- local0 [24:32..24:35): ??? -> scala/Predef.`???`(). [25:2..25:5): ??? -> scala/Predef.`???`(). @@ -3807,6 +4294,10 @@ Occurrences: [25:27..25:28): t <- local1 [25:33..25:36): ??? -> scala/Predef.`???`(). +Synthetics: +[23:6..23:10):List => *.unapplySeq[Nothing] +[24:19..24:23):List => *.unapplySeq[Nothing] + expect/semanticdb-Types.scala ----------------------------- @@ -3815,152 +4306,155 @@ Schema => SemanticDB v4 Uri => semanticdb-Types.scala Text => empty Language => Scala -Symbols => 142 entries -Occurrences => 250 entries +Symbols => 144 entries +Occurrences => 225 entries +Synthetics => 1 entries Symbols: -local0 => abstract method k -local1 => abstract method k -local2 => final class $anon -local3 => method k -local4 => final class $anon -local5 => final class $anon -local6 => method k -local7 => type L -local8 => typeparam T -types/B# => class B -types/B#``(). => primary ctor -types/C# => class C -types/C#``(). => primary ctor -types/Foo# => case class Foo -types/Foo#_1(). => method _1 -types/Foo#``(). => primary ctor -types/Foo#``().(s) => val param s -types/Foo#copy$default$1(). => method copy$default$1 -types/Foo#copy(). => method copy -types/Foo#copy().(s) => param s -types/Foo#s. => val method s -types/Foo. => final object Foo -types/Foo.apply(). => method apply -types/Foo.apply().(s) => param s -types/Foo.toString(). => method toString -types/Foo.unapply(). => method unapply -types/Foo.unapply().(x$1) => param x$1 -types/Foo.x. => val method x -types/Foo.y. => val method y -types/P# => class P -types/P#C# => class C -types/P#C#``(). => primary ctor -types/P#X# => class X -types/P#X#``(). => primary ctor -types/P#``(). => primary ctor -types/P#x. => val method x -types/T# => class T -types/T#C# => class C -types/T#C#``(). => primary ctor -types/T#X# => class X -types/T#X#``(). => primary ctor -types/T#``(). => primary ctor -types/T#x. => val method x -types/Test. => final object Test -types/Test.C# => class C -types/Test.C#ByNameType. => final object ByNameType -types/Test.C#ByNameType.m1(). => method m1 -types/Test.C#ByNameType.m1().(x) => param x -types/Test.C#ClassInfoType1. => final object ClassInfoType1 -types/Test.C#ClassInfoType2# => class ClassInfoType2 -types/Test.C#ClassInfoType2#``(). => primary ctor -types/Test.C#ClassInfoType2#x(). => method x -types/Test.C#ClassInfoType3# => trait ClassInfoType3 -types/Test.C#ClassInfoType3#[T] => typeparam T -types/Test.C#ClassInfoType3#``(). => primary ctor -types/Test.C#Either. => val method Either -types/Test.C#MethodType. => final object MethodType -types/Test.C#MethodType.m3(). => method m3 -types/Test.C#MethodType.m4(). => method m4 -types/Test.C#MethodType.m5(). => method m5 -types/Test.C#MethodType.m5().(x) => param x -types/Test.C#MethodType.m6(). => method m6 -types/Test.C#MethodType.m6().(x) => param x -types/Test.C#MethodType.m6().[T] => typeparam T -types/Test.C#MethodType.x1(). => method x1 -types/Test.C#MethodType.x2(). => method x2 -types/Test.C#RepeatedType# => case class RepeatedType -types/Test.C#RepeatedType#_1(). => method _1 -types/Test.C#RepeatedType#``(). => primary ctor -types/Test.C#RepeatedType#``().(s) => val param s -types/Test.C#RepeatedType#m1(). => method m1 -types/Test.C#RepeatedType#m1().(x) => param x -types/Test.C#RepeatedType#s. => val method s -types/Test.C#RepeatedType. => final object RepeatedType -types/Test.C#RepeatedType.apply(). => method apply -types/Test.C#RepeatedType.apply().(s) => param s -types/Test.C#RepeatedType.toString(). => method toString -types/Test.C#RepeatedType.unapplySeq(). => method unapplySeq -types/Test.C#RepeatedType.unapplySeq().(x$1) => param x$1 -types/Test.C#TypeType. => final object TypeType -types/Test.C#TypeType.T1# => type T1 -types/Test.C#TypeType.T4# => type T4 -types/Test.C#TypeType.T5# => type T5 -types/Test.C#TypeType.T5#[U] => typeparam U -types/Test.C#TypeType.m2(). => method m2 -types/Test.C#TypeType.m2().[T2] => typeparam T2 -types/Test.C#TypeType.m3(). => method m3 -types/Test.C#TypeType.m3().[M3] => typeparam M3 -types/Test.C#``(). => primary ctor -types/Test.C#annType1. => val method annType1 -types/Test.C#annType2. => val method annType2 -types/Test.C#compoundType1. => val method compoundType1 -types/Test.C#compoundType2. => val method compoundType2 -types/Test.C#compoundType3. => val method compoundType3 -types/Test.C#compoundType4. => val method compoundType4 -types/Test.C#compoundType5. => val method compoundType5 -types/Test.C#compoundType6. => val method compoundType6 -types/Test.C#existentialType2. => val method existentialType2 -types/Test.C#existentialType3. => val method existentialType3 -types/Test.C#existentialType4. => val method existentialType4 -types/Test.C#p. => val method p -types/Test.C#singleType1. => val method singleType1 -types/Test.C#singleType2. => val method singleType2 -types/Test.C#superType1. => val method superType1 -types/Test.C#superType2. => val method superType2 -types/Test.C#superType3. => val method superType3 -types/Test.C#thisType1. => val method thisType1 -types/Test.C#thisType2. => val method thisType2 -types/Test.C#typeLambda1(). => method typeLambda1 -types/Test.C#typeLambda1().[M] => typeparam M -types/Test.C#typeRef1. => val method typeRef1 -types/Test.C#typeRef2. => val method typeRef2 -types/Test.C#typeRef3. => val method typeRef3 -types/Test.C#typeRef4. => val method typeRef4 -types/Test.C#x. => val method x -types/Test.Literal. => final object Literal -types/Test.Literal.bool. => final val method bool -types/Test.Literal.char. => final val method char -types/Test.Literal.clazzOf. => final val method clazzOf -types/Test.Literal.double. => final val method double -types/Test.Literal.float. => final val method float -types/Test.Literal.int. => final val method int -types/Test.Literal.javaEnum. => final val method javaEnum -types/Test.Literal.long. => final val method long -types/Test.Literal.nil. => final val method nil -types/Test.Literal.string. => final val method string -types/Test.Literal.unit. => final val method unit -types/Test.M# => class M -types/Test.M#``(). => primary ctor -types/Test.M#m(). => method m -types/Test.N# => trait N -types/Test.N#``(). => primary ctor -types/Test.N#n(). => method n -types/ann# => class ann -types/ann#[T] => typeparam T -types/ann#``(). => primary ctor -types/ann#``().(x) => param x -types/ann#x. => val method x -types/ann1# => class ann1 -types/ann1#``(). => primary ctor -types/ann2# => class ann2 -types/ann2#``(). => primary ctor +local0 => abstract method k => Int +local1 => abstract method k => Int +local2 => method k => Int +local3 => final class $anon extends Object { self: $anon => +2 decls } +local5 => final class $anon extends M with N { self: $anon => +1 decls } +local7 => method k => Int +local8 => final class $anon extends M with N { self: $anon => +2 decls } +local10 => typeparam T +local11 => type L [typeparam T ] = List[T] +types/B# => class B extends Object { self: B => +1 decls } +types/B#``(). => primary ctor (): B +types/C# => class C extends Object { self: C => +1 decls } +types/C#``(). => primary ctor (): C +types/Foo# => case class Foo extends Object with Product with Serializable { self: Foo => +5 decls } +types/Foo#_1(). => method _1 => "abc" +types/Foo#``(). => primary ctor (val param s: "abc"): Foo +types/Foo#``().(s) => val param s: "abc" +types/Foo#copy$default$1(). => method copy$default$1 => "abc" @uncheckedVariance +types/Foo#copy(). => method copy (param s: "abc"): Foo +types/Foo#copy().(s) => param s: "abc" +types/Foo#s. => val method s "abc" +types/Foo. => final object Foo extends Object { self: Foo.type => +6 decls } +types/Foo.apply(). => method apply (param s: "abc"): Foo +types/Foo.apply().(s) => param s: "abc" +types/Foo.toString(). => method toString => String <: scala/Any#toString(). +types/Foo.unapply(). => method unapply (param x$1: Foo): Foo +types/Foo.unapply().(x$1) => param x$1: Foo +types/Foo.x. => val method x "abc" @deprecated +types/Foo.y. => val method y "abc" +types/P# => class P extends Object { self: P => +8 decls } +types/P#C# => class C extends Object { self: C => +1 decls } +types/P#C#``(). => primary ctor (): C +types/P#X# => class X extends Object { self: X => +1 decls } +types/P#X#``(). => primary ctor (): X +types/P#``(). => primary ctor (): P +types/P#x. => val method x X +types/T# => class T extends Object { self: T => +8 decls } +types/T#C# => class C extends Object { self: C => +1 decls } +types/T#C#``(). => primary ctor (): C +types/T#X# => class X extends Object { self: X => +1 decls } +types/T#X#``(). => primary ctor (): X +types/T#``(). => primary ctor (): T +types/T#x. => val method x X +types/Test. => final object Test extends Object { self: Test.type => +10 decls } +types/Test.C# => class C extends M { self: C => +42 decls } +types/Test.C#ByNameType. => final object ByNameType extends Object { self: ByNameType.type => +2 decls } +types/Test.C#ByNameType.m1(). => method m1 (param x: => Int): Int +types/Test.C#ByNameType.m1().(x) => param x: => Int +types/Test.C#ClassInfoType1. => final object ClassInfoType1 extends Object { self: ClassInfoType1.type => +1 decls } +types/Test.C#ClassInfoType2# => class ClassInfoType2 extends B { self: ClassInfoType2 => +2 decls } +types/Test.C#ClassInfoType2#``(). => primary ctor (): ClassInfoType2 +types/Test.C#ClassInfoType2#x(). => method x => Int +types/Test.C#ClassInfoType3# => trait ClassInfoType3 [typeparam T ] extends Object { self: ClassInfoType3[T] => +2 decls } +types/Test.C#ClassInfoType3#[T] => typeparam T +types/Test.C#ClassInfoType3#``(). => primary ctor [typeparam T ](): ClassInfoType3[T] +types/Test.C#Either. => val method Either Either.type +types/Test.C#MethodType. => final object MethodType extends Object { self: MethodType.type => +7 decls } +types/Test.C#MethodType.m3(). => method m3 => Int +types/Test.C#MethodType.m4(). => method m4 (): Int +types/Test.C#MethodType.m5(). => method m5 (param x: Int): Int +types/Test.C#MethodType.m5().(x) => param x: Int +types/Test.C#MethodType.m6(). => method m6 [typeparam T ](param x: T): T +types/Test.C#MethodType.m6().(x) => param x: T +types/Test.C#MethodType.m6().[T] => typeparam T +types/Test.C#MethodType.x1(). => method x1 => Int +types/Test.C#MethodType.x2(). => method x2 => Int +types/Test.C#RepeatedType# => case class RepeatedType extends Object with Product with Serializable { self: RepeatedType => +4 decls } +types/Test.C#RepeatedType#_1(). => method _1 => String* +types/Test.C#RepeatedType#``(). => primary ctor (val param s: String*): RepeatedType +types/Test.C#RepeatedType#``().(s) => val param s: String* +types/Test.C#RepeatedType#m1(). => method m1 (param x: Int*): Int +types/Test.C#RepeatedType#m1().(x) => param x: Int* +types/Test.C#RepeatedType#s. => val method s String* +types/Test.C#RepeatedType. => final object RepeatedType extends Object { self: RepeatedType.type => +4 decls } +types/Test.C#RepeatedType.apply(). => method apply (param s: String*): RepeatedType +types/Test.C#RepeatedType.apply().(s) => param s: String* +types/Test.C#RepeatedType.toString(). => method toString => String <: scala/Any#toString(). +types/Test.C#RepeatedType.unapplySeq(). => method unapplySeq (param x$1: RepeatedType): RepeatedType +types/Test.C#RepeatedType.unapplySeq().(x$1) => param x$1: RepeatedType +types/Test.C#TypeType. => final object TypeType extends Object { self: TypeType.type => +6 decls } +types/Test.C#TypeType.T1# => type T1 +types/Test.C#TypeType.T4# => type T4 = C +types/Test.C#TypeType.T5# => type T5 [typeparam U ] = U +types/Test.C#TypeType.T5#[U] => typeparam U +types/Test.C#TypeType.m2(). => method m2 [typeparam T2 = C]: Nothing +types/Test.C#TypeType.m2().[T2] => typeparam T2 = C +types/Test.C#TypeType.m3(). => method m3 [typeparam M3 [type _ ]]: Nothing +types/Test.C#TypeType.m3().[M3] => typeparam M3 [type _ ] +types/Test.C#TypeType.m3().[M3][_] => type _ +types/Test.C#``(). => primary ctor (): C +types/Test.C#annType1. => val method annType1 T @ann[T] +types/Test.C#annType2. => val method annType2 T @ann1 @ann2 +types/Test.C#compoundType1. => val method compoundType1 Object { abstract method k => Int } +types/Test.C#compoundType2. => val method compoundType2 M & N +types/Test.C#compoundType3. => val method compoundType3 M & N { abstract method k => Int } +types/Test.C#compoundType4. => val method compoundType4 Object +types/Test.C#compoundType5. => val method compoundType5 M & N +types/Test.C#compoundType6. => val method compoundType6 M & N +types/Test.C#existentialType2. => val method existentialType2 List[_] forSome { type _ } +types/Test.C#existentialType3. => val method existentialType3 Class[_] forSome { type _ } +types/Test.C#existentialType4. => val method existentialType4 Class[_] forSome { type _ } +types/Test.C#p. => val method p P +types/Test.C#singleType1. => val method singleType1 x.type +types/Test.C#singleType2. => val method singleType2 p.x.type +types/Test.C#superType1. => val method superType1 Int +types/Test.C#superType2. => val method superType2 Int +types/Test.C#superType3. => val method superType3 Int +types/Test.C#thisType1. => val method thisType1 C.this.type +types/Test.C#thisType2. => val method thisType2 C.this.type +types/Test.C#typeLambda1(). => method typeLambda1 [typeparam M [type _ ]]: Nothing +types/Test.C#typeLambda1().[M] => typeparam M [type _ ] +types/Test.C#typeLambda1().[M][_] => type _ +types/Test.C#typeRef1. => val method typeRef1 C +types/Test.C#typeRef2. => val method typeRef2 p.C +types/Test.C#typeRef3. => val method typeRef3 T#C +types/Test.C#typeRef4. => val method typeRef4 List[Int] +types/Test.C#x. => val method x p.X +types/Test.Literal. => final object Literal extends Object { self: Literal.type => +12 decls } +types/Test.Literal.bool. => final val method bool true +types/Test.Literal.char. => final val method char 'a' +types/Test.Literal.clazzOf. => final val method clazzOf Option[Int] +types/Test.Literal.double. => final val method double 2.0 +types/Test.Literal.float. => final val method float 1.0f +types/Test.Literal.int. => final val method int 1 +types/Test.Literal.javaEnum. => final val method javaEnum LinkOption +types/Test.Literal.long. => final val method long 1L +types/Test.Literal.nil. => final val method nil Null +types/Test.Literal.string. => final val method string "a" +types/Test.Literal.unit. => final val method unit Unit +types/Test.M# => class M extends Object { self: M => +2 decls } +types/Test.M#``(). => primary ctor (): M +types/Test.M#m(). => method m => Int +types/Test.N# => trait N extends Object { self: N => +2 decls } +types/Test.N#``(). => primary ctor (): N +types/Test.N#n(). => method n => Int +types/ann# => class ann [typeparam T ] extends Annotation with StaticAnnotation { self: ann[T] => +3 decls } +types/ann#[T] => typeparam T +types/ann#``(). => primary ctor [typeparam T ](param x: T): ann[T] +types/ann#``().(x) => param x: T +types/ann#x. => private[this] val method x T +types/ann1# => class ann1 extends Annotation with StaticAnnotation { self: ann1 => +1 decls } +types/ann1#``(). => primary ctor (): ann1 +types/ann2# => class ann2 extends Annotation with StaticAnnotation { self: ann2 => +1 decls } +types/ann2#``(). => primary ctor (): ann2 Occurrences: [0:8..0:13): types <- types/ @@ -3971,7 +4465,6 @@ Occurrences: [3:13..3:21): language -> scala/language. [3:22..3:33): higherKinds -> scala/language.higherKinds. [5:6..5:9): ann <- types/ann# -[5:9..5:9): <- types/ann#``(). [5:10..5:11): T <- types/ann#[T] [5:13..5:14): x <- types/ann#x. [5:16..5:17): T -> types/ann#[T] @@ -3979,39 +4472,26 @@ Occurrences: [5:33..5:43): annotation -> scala/annotation/ [5:44..5:60): StaticAnnotation -> scala/annotation/StaticAnnotation# [6:6..6:10): ann1 <- types/ann1# -[6:19..6:19): <- types/ann1#``(). [6:19..6:24): scala -> scala/ [6:25..6:35): annotation -> scala/annotation/ [6:36..6:52): StaticAnnotation -> scala/annotation/StaticAnnotation# [7:6..7:10): ann2 <- types/ann2# -[7:19..7:19): <- types/ann2#``(). [7:19..7:24): scala -> scala/ [7:25..7:35): annotation -> scala/annotation/ [7:36..7:52): StaticAnnotation -> scala/annotation/StaticAnnotation# -[9:0..9:0): <- types/B#``(). [9:6..9:7): B <- types/B# -[11:0..11:0): <- types/C#``(). [11:6..11:7): C <- types/C# [13:6..13:7): P <- types/P# -[14:2..14:2): <- types/P#``(). -[14:2..14:2): <- types/P#C#``(). [14:8..14:9): C <- types/P#C# -[15:2..15:2): <- types/P#X#``(). [15:8..15:9): X <- types/P#X# [16:6..16:7): x <- types/P#x. [16:14..16:15): X -> types/P#X# -[16:15..16:15): -> types/P#X#``(). [19:6..19:7): T <- types/T# -[20:2..20:2): <- types/T#``(). -[20:2..20:2): <- types/T#C#``(). [20:8..20:9): C <- types/T#C# -[21:2..21:2): <- types/T#X#``(). [21:8..21:9): X <- types/T#X# [22:6..22:7): x <- types/T#x. [22:14..22:15): X -> types/T#X# -[22:15..22:15): -> types/T#X#``(). [25:11..25:14): Foo <- types/Foo# -[25:14..25:14): <- types/Foo#``(). [25:15..25:16): s <- types/Foo#s. [27:7..27:10): Foo <- types/Foo. [28:6..28:7): x <- types/Foo.x. @@ -4020,22 +4500,17 @@ Occurrences: [29:17..29:18): x -> types/Foo.x. [32:7..32:11): Test <- types/Test. [33:8..33:9): M <- types/Test.M# -[34:4..34:4): <- types/Test.M#``(). [34:8..34:9): m <- types/Test.M#m(). [34:11..34:14): Int -> scala/Int# [34:17..34:20): ??? -> scala/Predef.`???`(). [37:8..37:9): N <- types/Test.N# -[38:4..38:4): <- types/Test.N#``(). [38:8..38:9): n <- types/Test.N#n(). [38:11..38:14): Int -> scala/Int# [38:17..38:20): ??? -> scala/Predef.`???`(). [41:8..41:9): C <- types/Test.C# -[41:18..41:18): <- types/Test.C#``(). [41:18..41:19): M -> types/Test.M# -[41:19..41:19): -> types/Test.M#``(). [42:8..42:9): p <- types/Test.C#p. [42:16..42:17): P -> types/P# -[42:17..42:17): -> types/P#``(). [43:8..43:9): x <- types/Test.C#x. [43:12..43:13): p -> types/Test.C#p. [43:14..43:15): x -> types/P#x. @@ -4090,18 +4565,16 @@ Occurrences: [63:41..63:44): Int -> scala/Int# [63:49..63:52): ??? -> scala/Predef.`???`(). [64:8..64:21): compoundType4 <- types/Test.C#compoundType4. -[64:34..64:35): k <- local3 +[64:34..64:35): k <- local2 [64:37..64:40): Int -> scala/Int# [64:43..64:46): ??? -> scala/Predef.`???`(). [65:8..65:21): compoundType5 <- types/Test.C#compoundType5. [65:28..65:29): M -> types/Test.M# -[65:29..65:29): -> types/Test.M#``(). [65:35..65:36): N -> types/Test.N# [66:8..66:21): compoundType6 <- types/Test.C#compoundType6. [66:28..66:29): M -> types/Test.M# -[66:29..66:29): -> types/Test.M#``(). [66:35..66:36): N -> types/Test.N# -[66:43..66:44): k <- local6 +[66:43..66:44): k <- local7 [66:46..66:49): Int -> scala/Int# [66:52..66:55): ??? -> scala/Predef.`???`(). [68:8..68:16): annType1 <- types/Test.C#annType1. @@ -4125,18 +4598,15 @@ Occurrences: [75:20..75:21): M <- types/Test.C#typeLambda1().[M] [75:28..75:31): ??? -> scala/Predef.`???`(). [76:4..76:15): typeLambda1 -> types/Test.C#typeLambda1(). -[76:24..76:25): L <- local7 -[76:26..76:27): T <- local8 +[76:24..76:25): L <- local11 +[76:26..76:27): T <- local10 [76:31..76:35): List -> scala/package.List# -[76:36..76:37): T -> local8 +[76:36..76:37): T -> local10 [78:11..78:25): ClassInfoType1 <- types/Test.C#ClassInfoType1. [79:10..79:24): ClassInfoType2 <- types/Test.C#ClassInfoType2# -[79:33..79:33): <- types/Test.C#ClassInfoType2#``(). [79:33..79:34): B -> types/B# -[79:34..79:34): -> types/B#``(). [79:41..79:42): x <- types/Test.C#ClassInfoType2#x(). [80:10..80:24): ClassInfoType3 <- types/Test.C#ClassInfoType3# -[80:24..80:24): <- types/Test.C#ClassInfoType3#``(). [80:25..80:26): T <- types/Test.C#ClassInfoType3#[T] [82:11..82:21): MethodType <- types/Test.C#MethodType. [83:10..83:12): x1 <- types/Test.C#MethodType.x1(). @@ -4169,7 +4639,6 @@ Occurrences: [92:25..92:28): Int -> scala/Int# [92:31..92:34): ??? -> scala/Predef.`???`(). [95:15..95:27): RepeatedType <- types/Test.C#RepeatedType# -[95:27..95:27): <- types/Test.C#RepeatedType#``(). [95:28..95:29): s <- types/Test.C#RepeatedType#s. [95:31..95:37): String -> scala/Predef.String# [96:10..96:12): m1 <- types/Test.C#RepeatedType#m1(). @@ -4214,6 +4683,9 @@ Occurrences: [119:32..119:38): Option -> scala/Option# [119:39..119:42): Int -> scala/Int# +Synthetics: +[68:20..68:24):@ann => *[Int] + expect/semanticdb-extract.scala ------------------------------- @@ -4223,27 +4695,28 @@ Uri => semanticdb-extract.scala Text => empty Language => Scala Symbols => 18 entries -Occurrences => 22 entries +Occurrences => 20 entries +Synthetics => 3 entries Symbols: -_empty_/AnObject. => final object AnObject -_empty_/AnObject.Foo# => case class Foo -_empty_/AnObject.Foo#_1(). => method _1 -_empty_/AnObject.Foo#``(). => primary ctor -_empty_/AnObject.Foo#``().(x) => val param x -_empty_/AnObject.Foo#copy$default$1(). => method copy$default$1 -_empty_/AnObject.Foo#copy(). => method copy -_empty_/AnObject.Foo#copy().(x) => param x -_empty_/AnObject.Foo#x. => val method x -_empty_/AnObject.Foo. => final object Foo -_empty_/AnObject.Foo.apply(). => method apply -_empty_/AnObject.Foo.apply().(x) => param x -_empty_/AnObject.Foo.toString(). => method toString -_empty_/AnObject.Foo.unapply(). => method unapply -_empty_/AnObject.Foo.unapply().(x$1) => param x$1 -_empty_/AnObject.foo(). => method foo -_empty_/AnObject.foo().(x) => param x -_empty_/AnObject.foo(+1). => method foo +_empty_/AnObject. => final object AnObject extends Object { self: AnObject.type => +6 decls } +_empty_/AnObject.Foo# => case class Foo extends Object with Product with Serializable { self: Foo => +5 decls } +_empty_/AnObject.Foo#_1(). => method _1 => Int +_empty_/AnObject.Foo#``(). => primary ctor (val param x: Int): Foo +_empty_/AnObject.Foo#``().(x) => val param x: Int +_empty_/AnObject.Foo#copy$default$1(). => method copy$default$1 => Int @uncheckedVariance +_empty_/AnObject.Foo#copy(). => method copy (param x: Int): Foo +_empty_/AnObject.Foo#copy().(x) => param x: Int +_empty_/AnObject.Foo#x. => val method x Int +_empty_/AnObject.Foo. => final object Foo extends Object { self: Foo.type => +4 decls } +_empty_/AnObject.Foo.apply(). => method apply (param x: Int): Foo +_empty_/AnObject.Foo.apply().(x) => param x: Int +_empty_/AnObject.Foo.toString(). => method toString => String <: scala/Any#toString(). +_empty_/AnObject.Foo.unapply(). => method unapply (param x$1: Foo): Foo +_empty_/AnObject.Foo.unapply().(x$1) => param x$1: Foo +_empty_/AnObject.foo(). => method foo (param x: Int): Unit +_empty_/AnObject.foo().(x) => param x: Int +_empty_/AnObject.foo(+1). => method foo (): Unit Occurrences: [0:7..0:15): AnObject <- _empty_/AnObject. @@ -4257,7 +4730,6 @@ Occurrences: [8:5..8:14): substring -> java/lang/String#substring(). [9:5..9:14): substring -> java/lang/String#substring(+1). [11:2..11:6): List -> scala/package.List. -[11:6..11:6): -> scala/collection/IterableFactory#apply(). [12:2..12:6): List -> scala/package.List. [12:7..12:12): apply -> scala/collection/IterableFactory#apply(). [13:2..13:6): List -> scala/package.List. @@ -4265,10 +4737,14 @@ Occurrences: [14:2..14:9): println -> scala/Predef.println(+1). [14:12..14:13): + -> scala/Int#`+`(+4). [16:13..16:16): Foo <- _empty_/AnObject.Foo# -[16:16..16:16): <- _empty_/AnObject.Foo#``(). [16:17..16:18): x <- _empty_/AnObject.Foo#x. [16:20..16:23): Int -> scala/Int# +Synthetics: +[11:2..11:6):List => *.apply[Int] +[12:2..12:12):List.apply => *[Nothing] +[13:2..13:14):List.`apply` => *[Nothing] + expect/toplevel.scala --------------------- @@ -4278,30 +4754,30 @@ Uri => toplevel.scala Text => empty Language => Scala Symbols => 18 entries -Occurrences => 42 entries +Occurrences => 43 entries +Synthetics => 2 entries Symbols: -_empty_/MyProgram# => final class MyProgram -_empty_/MyProgram#``(). => primary ctor -_empty_/MyProgram#main(). => static method main -_empty_/MyProgram#main().(args) => param args -_empty_/toplevel$package. => final package object _empty_ -_empty_/toplevel$package.MyProgram(). => method MyProgram -_empty_/toplevel$package.MyProgram().(times) => param times -_empty_/toplevel$package.a. => val method a -_empty_/toplevel$package.combine(). => method combine -_empty_/toplevel$package.combine().(x) => param x -_empty_/toplevel$package.combine().(y) => param y -_empty_/toplevel$package.combine(+1). => method combine -_empty_/toplevel$package.combine(+1).(x) => param x -_empty_/toplevel$package.combine(+1).(y) => param y -_empty_/toplevel$package.combine(+1).(z) => param z -_empty_/toplevel$package.combine(+2). => method combine -_empty_/toplevel$package.foo(). => method foo -local0 => val local error +_empty_/MyProgram# => final class MyProgram extends Object { self: MyProgram => +2 decls } +_empty_/readInts# => final class readInts extends Object { self: readInts => +2 decls } +_empty_/toplevel$package. => final package object _empty_ extends Object { self: _empty_.type => +9 decls } +_empty_/toplevel$package.MyProgram(). => @main method MyProgram (param times: Int): Unit +_empty_/toplevel$package.MyProgram().(times) => param times: Int +_empty_/toplevel$package.a. => val inline method a "" +_empty_/toplevel$package.combine(). => method combine (param x: Int)(param y: Int): Int +_empty_/toplevel$package.combine().(x) => param x: Int +_empty_/toplevel$package.combine().(y) => param y: Int +_empty_/toplevel$package.combine(+1). => method combine (param x: Int, param y: Int, param z: Int): Int +_empty_/toplevel$package.combine(+1).(x) => param x: Int +_empty_/toplevel$package.combine(+1).(y) => param y: Int +_empty_/toplevel$package.combine(+1).(z) => param z: Int +_empty_/toplevel$package.combine(+2). => method combine => Int +_empty_/toplevel$package.foo(). => method foo => String +_empty_/toplevel$package.fooRef(). => method fooRef => String +_empty_/toplevel$package.readInts(). => @main method readInts (param ints: Int*): Unit +_empty_/toplevel$package.readInts().(ints) => param ints: Int* Occurrences: -[0:0..0:0): <- _empty_/toplevel$package. [0:11..0:12): a <- _empty_/toplevel$package.a. [1:11..1:12): x <- _empty_/toplevel$package.combine().(x) [1:14..1:17): Int -> scala/Int# @@ -4325,22 +4801,28 @@ Occurrences: [2:46..2:47): z -> _empty_/toplevel$package.combine(+1).(z) [3:4..3:11): combine <- _empty_/toplevel$package.combine(+2). [4:4..4:7): foo <- _empty_/toplevel$package.foo(). -[5:0..5:0): <- _empty_/MyProgram# -[5:0..5:0): <- _empty_/MyProgram#``(). -[5:0..5:0): -> _empty_/toplevel$package.MyProgram(). -[5:0..5:0): -> scala/util/CommandLineParser.parseArgument(). -[5:0..5:0): -> _empty_/MyProgram#main().(args) -[5:0..5:0): -> scala/util/CommandLineParser.FromString.given_FromString_Int. -[5:0..5:0): -> scala/util/CommandLineParser.showError(). -[5:0..5:0): -> local0 [5:1..5:5): main -> scala/main# [5:10..5:19): MyProgram <- _empty_/toplevel$package.MyProgram(). [5:20..5:25): times <- _empty_/toplevel$package.MyProgram().(times) [5:27..5:30): Int -> scala/Int# [5:33..5:37): Unit -> scala/Unit# -[5:41..5:41): -> scala/LowPriorityImplicits#intWrapper(). [5:43..5:45): to -> scala/runtime/RichInt#to(). [5:46..5:51): times -> _empty_/toplevel$package.MyProgram().(times) [5:53..5:60): foreach -> scala/collection/immutable/Range#foreach(). [5:67..5:74): println -> scala/Predef.println(+1). +[6:1..6:5): main -> scala/main# +[6:10..6:18): readInts <- _empty_/toplevel$package.readInts(). +[6:19..6:23): ints <- _empty_/toplevel$package.readInts().(ints) +[6:25..6:28): Int -> scala/Int# +[6:32..6:36): Unit -> scala/Unit# +[6:39..6:46): println -> scala/Predef.println(+1). +[6:47..6:51): ints -> _empty_/toplevel$package.readInts().(ints) +[6:52..6:60): mkString -> scala/collection/IterableOnceOps#mkString(+1). +[7:4..7:10): fooRef <- _empty_/toplevel$package.fooRef(). +[7:13..7:29): toplevel$package -> _empty_/toplevel$package. +[7:30..7:33): foo -> _empty_/toplevel$package.foo(). + +Synthetics: +[5:40..5:60):(1 to times) foreach => *[Unit] +[5:41..5:42):1 => intWrapper(*) diff --git a/tests/sjs-junit/test/org/scalajs/testsuite/compiler/RegressionTestScala3.scala b/tests/sjs-junit/test/org/scalajs/testsuite/compiler/RegressionTestScala3.scala index 4a7ccad3cc5a..08aa26726dc7 100644 --- a/tests/sjs-junit/test/org/scalajs/testsuite/compiler/RegressionTestScala3.scala +++ b/tests/sjs-junit/test/org/scalajs/testsuite/compiler/RegressionTestScala3.scala @@ -27,6 +27,38 @@ class RegressionTestScala3 { assertEquals("foo", new RangeErrorIssue11592("foo").message) assertEquals("", new RangeErrorIssue11592().message) } + + @Test def testNonJVMCharsInClosureParametersIssue12507(): Unit = { + def foo(`[-3, 3]`: Int): Int => Int = { x => + `[-3, 3]` + } + + assertEquals(5, foo(5)(4)) + } + + @Test def defaultAccessorBridgesIssue12572(): Unit = { + new MyPromiseIssue12572[Int](5) + } + + @Test def desugarIdentCrashIssue13221(): Unit = { + assertEquals(1, X_Issue13221.I.i) + assertEquals(1, X_Issue13221.blah) + } + + @Test def primitivePlusStringThatIsATermRefIssue13518(): Unit = { + def charPlusString(x: String): String = 'a' + x + assertEquals("abc", charPlusString("bc")) + + def intPlusString(x: String): String = 5 + x + assertEquals("5bc", intPlusString("bc")) + } + + @Test def defaultParamsInModuleDefWithBridgesIssue13860(): Unit = { + import Issue13860._ + + assertEquals(0L, Foo.bar().x) + assertEquals(5L, Foo.bar(5L).x) + } } object RegressionTestScala3 { @@ -45,6 +77,48 @@ object RegressionTestScala3 { class RangeErrorIssue11592(msg: String = js.native) extends js.Object { val message: String = js.native } + + class MyPromiseIssue12572[T](t: T) extends js.Promise[T]((resolve, reject) => resolve(t)) { + override def `then`[S]( + onFulfilled: js.Function1[T, S | js.Thenable[S]], + onRejected: js.UndefOr[js.Function1[scala.Any, S | js.Thenable[S]]] = js.undefined): js.Promise[S] = { + ??? + } + + override def `then`[S >: T]( + onFulfilled: Unit, + onRejected: js.UndefOr[js.Function1[scala.Any, S | js.Thenable[S]]]): js.Promise[S] = { + ??? + } + + override def `catch`[S >: T]( + onRejected: js.UndefOr[js.Function1[scala.Any, S | js.Thenable[S]]] = js.undefined): js.Promise[S] = { + ??? + } + } + + object X_Issue13221 extends Y_Issue13221 { + object I { + def i = 1 + } + } + + abstract class Y_Issue13221 { self: X_Issue13221.type => + import I._ + def blah = i + } + + object Issue13860 { + class Foo(var x: Long) + + trait Companion[A] { + def bar(x: Long = 0): A + } + + object Foo extends Companion[Foo] { + def bar(x: Long = 0): Foo = new Foo(x) + } + } } // This class needs to be at the top-level, not in an object, to reproduce the issue diff --git a/tests/sjs-junit/test/org/scalajs/testsuite/jsinterop/ExportedJSNativeMembersScala3.scala b/tests/sjs-junit/test/org/scalajs/testsuite/jsinterop/ExportedJSNativeMembersScala3.scala new file mode 100644 index 000000000000..79d78d929578 --- /dev/null +++ b/tests/sjs-junit/test/org/scalajs/testsuite/jsinterop/ExportedJSNativeMembersScala3.scala @@ -0,0 +1,161 @@ +package org.scalajs.testsuite.jsinterop + +import org.junit.Assert.* +import org.junit.Test + +import scala.scalajs.js +import scala.scalajs.js.annotation.* + +object ExportedJSNativeMembersScala3: + + object A { + + @js.native + trait FooModule extends js.Any { self: Foo.type => + val foo: String + } + + @js.native + @JSGlobal("Foo_GlobalThatWillBeExported") + val Foo: FooModule = js.native + + @js.native + @JSGlobal("Bar_GlobalThatWillBeExported") + object Bar extends js.Any { + val bar: Int = js.native + } + + @js.native + @JSGlobal("Baz_GlobalThatWillBeExported") + final class Baz(var baz: String) extends js.Object + + @js.native + @JSGlobal("QuxHolder_GlobalThatWillBeExported") + final class QuxHolder(val qux: String) extends js.Object + + @js.native + @JSGlobal("QuxHolderHolder_GlobalThatWillBeExported") + final class QuxHolderHolder(val quxHolder: QuxHolder) extends js.Object { + val qux: quxHolder.qux.type = js.native + } + + @js.native // structurally equivalent to QuxHolderHolder, but a trait + trait QuxHolderHolderTrait(val quxHolder: QuxHolder) extends js.Any { + val qux: quxHolder.qux.type + } + + @js.native + @JSGlobal("quxxInstance_GlobalThatWillBeExported") + val quxxInstance: QuxHolderHolderTrait = js.native + + @js.native + @JSGlobal("addOne_GlobalThatWillBeExported") + def addOne(i: Int): Int = js.native + + } + + object B extends js.Object { + export A.FooModule // trait (native) + export A.Foo // val (native) + export A.Bar // object (native) + export A.Baz // class (native) + export A.QuxHolder // class (native) + export A.QuxHolderHolder // class (native) + export A.QuxHolderHolderTrait // trait (native) + export A.quxxInstance // val (native) + export A.addOne // def (native) + } + + final class C extends js.Object { + export A.FooModule // trait (native) + export A.Foo // val (native) + export A.Bar // object (native) + export A.Baz // class (native) + export A.QuxHolder // class (native) + export A.QuxHolderHolder // class (native) + export A.QuxHolderHolderTrait // trait (native) + export A.quxxInstance // val (native) + export A.addOne // def (native) + } + +class ExportedJSNativeMembersScala3: + import ExportedJSNativeMembersScala3.* + + @Test def forward_top_level_JS_var_with_export(): Unit = { + js.eval(""" + var Foo_GlobalThatWillBeExported = { + foo: "foo" + } + var Bar_GlobalThatWillBeExported = { + bar: 23 + } + function Baz_GlobalThatWillBeExported(baz) { + this.baz = baz + } + function QuxHolder_GlobalThatWillBeExported(qux) { + this.qux = qux + } + function QuxHolderHolder_GlobalThatWillBeExported(quxHolder) { + this.quxHolder = quxHolder; + this.qux = quxHolder.qux; + } + var quxxInstance_GlobalThatWillBeExported = ( + new QuxHolderHolder_GlobalThatWillBeExported( + new QuxHolder_GlobalThatWillBeExported("quxxInstance") + ) + ) + function addOne_GlobalThatWillBeExported(i) { + return i + 1; + } + """) + + val C = ExportedJSNativeMembersScala3.C() + + assertEquals("foo", A.Foo.foo) + assertEquals("foo", B.Foo.foo) + assertEquals("foo", C.Foo.foo) + + assertEquals(23, A.Bar.bar) + assertEquals(23, B.Bar.bar) + assertEquals(23, C.Bar.bar) + + val abaz = A.Baz("abaz1") + assertEquals("abaz1", abaz.baz) + abaz.baz = "abaz2" + assertEquals("abaz2", abaz.baz) + + val bbaz = B.Baz("bbaz1") + assertEquals("bbaz1", bbaz.baz) + bbaz.baz = "bbaz2" + assertEquals("bbaz2", bbaz.baz) + + val cbaz = C.Baz("cbaz1") + assertEquals("cbaz1", cbaz.baz) + cbaz.baz = "cbaz2" + assertEquals("cbaz2", cbaz.baz) + + val quxHolderHolderA = A.QuxHolderHolder(A.QuxHolder("quxHolderHolderA")) + assertEquals("quxHolderHolderA", quxHolderHolderA.qux) + assertEquals("quxHolderHolderA", quxHolderHolderA.quxHolder.qux) + + val quxHolderHolderB = B.QuxHolderHolder(B.QuxHolder("quxHolderHolderB")) + assertEquals("quxHolderHolderB", quxHolderHolderB.qux) + assertEquals("quxHolderHolderB", quxHolderHolderB.quxHolder.qux) + + val quxHolderHolderC = C.QuxHolderHolder(C.QuxHolder("quxHolderHolderC")) + assertEquals("quxHolderHolderC", quxHolderHolderC.qux) + assertEquals("quxHolderHolderC", quxHolderHolderC.quxHolder.qux) + + assertEquals("quxxInstance", A.quxxInstance.qux) + assertEquals("quxxInstance", A.quxxInstance.quxHolder.qux) + assertEquals("quxxInstance", B.quxxInstance.qux) + assertEquals("quxxInstance", B.quxxInstance.quxHolder.qux) + assertEquals("quxxInstance", C.quxxInstance.qux) + assertEquals("quxxInstance", C.quxxInstance.quxHolder.qux) + + assertEquals(2, A.addOne(1)) + assertEquals(3, B.addOne(2)) + assertEquals(4, C.addOne(3)) + } + +end ExportedJSNativeMembersScala3 diff --git a/tests/sjs-junit/test/org/scalajs/testsuite/jsinterop/PromiseMock.scala b/tests/sjs-junit/test/org/scalajs/testsuite/jsinterop/PromiseMock.scala deleted file mode 100644 index 6764cdc7ac83..000000000000 --- a/tests/sjs-junit/test/org/scalajs/testsuite/jsinterop/PromiseMock.scala +++ /dev/null @@ -1,260 +0,0 @@ -/* - * Scala.js (https://www.scala-js.org/) - * - * Copyright EPFL. - * - * Licensed under Apache License 2.0 - * (https://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package org.scalajs.testsuite.jsinterop - -import scala.scalajs.js -import scala.scalajs.js.annotation._ -import scala.scalajs.js.| - -import js.Thenable - -object PromiseMock { - - @noinline - def withMockedPromise[A](body: (() => Unit) => A): A = { - val global = org.scalajs.testsuite.utils.JSUtils.globalObject - - val oldPromise = - if (global.hasOwnProperty("Promise").asInstanceOf[Boolean]) Some(global.Promise) - else None - - global.Promise = js.constructorOf[MockPromise[_]] - try { - body(MockPromise.processQueue _) - } finally { - oldPromise.fold { - js.special.delete(global, "Promise") - } { old => - global.Promise = old - } - } - } - - @noinline - def withMockedPromiseIfExists[A](body: (Option[() => Unit]) => A): A = { - val global = org.scalajs.testsuite.utils.JSUtils.globalObject - - val oldPromise = global.Promise - - if (js.isUndefined(oldPromise)) { - body(None) - } else { - global.Promise = js.constructorOf[MockPromise[_]] - try { - body(Some(MockPromise.processQueue _)) - } finally { - global.Promise = oldPromise - } - } - } - - private object MockPromise { - private val queue = js.Array[js.Function0[Any]]() - - @JSExportStatic - def resolve[A](value: A | js.Thenable[A]): MockPromise[A] = { - new MockPromise[A]({ - (resolve: js.Function1[A | js.Thenable[A], _], - reject: js.Function1[Any, _]) => - resolve(value) - }) - } - - @JSExportStatic - def reject(reason: Any): MockPromise[Nothing] = { - new MockPromise[Nothing]({ - (resolve: js.Function1[Nothing | js.Thenable[Nothing], _], - reject: js.Function1[Any, _]) => - reject(reason) - }) - } - - def enqueue(f: js.Function0[Any]): Unit = - queue.push(f) - - def processQueue(): Unit = { - while (queue.nonEmpty) - queue.shift()() - } - - private sealed abstract class State[+A] - - private case object Pending extends State[Nothing] - private case class Fulfilled[+A](value: A) extends State[A] - private case class Rejected(reason: Any) extends State[Nothing] - - private def isNotAnObject(x: Any): Boolean = x match { - case null | () | _:Double | _:Boolean | _:String => true - case _ => false - } - - private def isCallable(x: Any): Boolean = - js.typeOf(x.asInstanceOf[js.Any]) == "function" - - private def throwAny(e: Any): Nothing = { - throw (e match { - case th: Throwable => th - case _ => js.JavaScriptException(e) - }) - } - - private def tryCatchAny[A](tryBody: => A)(catchBody: Any => A): A = { - try { - tryBody - } catch { - case th: Throwable => - catchBody(th match { - case js.JavaScriptException(e) => e - case _ => th - }) - } - } - } - - private class MockPromise[+A]( - executor: js.Function2[js.Function1[A | Thenable[A], _], js.Function1[scala.Any, _], _]) - extends js.Object with js.Thenable[A] { - - import MockPromise._ - - private[this] var state: State[A] = Pending - - private[this] var fulfillReactions = js.Array[js.Function1[A, Any]]() - private[this] var rejectReactions = js.Array[js.Function1[Any, Any]]() - - init(executor) - - // 25.4.3.1 Promise(executor) - private[this] def init( - executor: js.Function2[js.Function1[A | Thenable[A], _], js.Function1[scala.Any, _], _]) = { - tryCatchAny[Unit] { - executor(resolve _, reject _) - } { e => - reject(e) - } - } - - private[this] def fulfill(value: A): Unit = { - assert(state == Pending) - state = Fulfilled(value) - clearAndTriggerReactions(fulfillReactions, value) - } - - private[this] def clearAndTriggerReactions[A]( - reactions: js.Array[js.Function1[A, Any]], - argument: A): Unit = { - - assert(state != Pending) - - fulfillReactions = null - rejectReactions = null - - for (reaction <- reactions) - enqueue(() => reaction(argument)) - } - - // 25.4.1.3.2 Promise Resolve Functions - private[this] def resolve(resolution: A | Thenable[A]): Unit = { - if (state == Pending) { - if (resolution.asInstanceOf[AnyRef] eq this) { - reject(new js.TypeError("Self resolution")) - } else if (isNotAnObject(resolution)) { - fulfill(resolution.asInstanceOf[A]) - } else { - tryCatchAny { - val thenAction = resolution.asInstanceOf[js.Dynamic].`then` - if (!isCallable(thenAction)) { - fulfill(resolution.asInstanceOf[A]) - } else { - val thenable = resolution.asInstanceOf[Thenable[A]] - val thenActionFun = thenAction.asInstanceOf[js.Function] - enqueue(() => promiseResolveThenableJob(thenable, thenActionFun)) - } - } { e => - reject(e) - } - } - } - } - - // 25.4.2.2 PromiseResolveThenableJob - private[this] def promiseResolveThenableJob(thenable: Thenable[A], - thenAction: js.Function): Unit = { - thenAction.call(thenable, resolve _, reject _) - } - - // 25.4.1.3.1 Promise Reject Functions - private[this] def reject(reason: Any): Unit = { - if (state == Pending) { - state = Rejected(reason) - clearAndTriggerReactions(rejectReactions, reason) - } - } - - // 25.4.5.3 Promise.prototype.then - def `then`[B]( - onFulfilled: js.Function1[A, B | Thenable[B]], - onRejected: js.UndefOr[js.Function1[scala.Any, B | Thenable[B]]]): MockPromise[B] = { - - new MockPromise[B]( - { (innerResolve: js.Function1[B | Thenable[B], _], - innerReject: js.Function1[scala.Any, _]) => - - def doFulfilled(value: A): Unit = { - tryCatchAny[Unit] { - innerResolve(onFulfilled(value)) - } { e => - innerReject(e) - } - } - - def doRejected(reason: Any): Unit = { - tryCatchAny[Unit] { - onRejected.fold[Unit] { - innerReject(reason) - } { onRejectedFun => - innerResolve(onRejectedFun(reason)) - } - } { e => - innerReject(e) - } - } - - state match { - case Pending => - fulfillReactions += doFulfilled _ - rejectReactions += doRejected _ - - case Fulfilled(value) => - enqueue(() => doFulfilled(value)) - - case Rejected(reason) => - enqueue(() => doRejected(reason)) - } - } - ) - } - - def `then`[B >: A]( - onFulfilled: Unit, - onRejected: js.UndefOr[js.Function1[scala.Any, B | Thenable[B]]]): MockPromise[B] = { - `then`((x: A) => (x: B | Thenable[B]), onRejected) - } - - // 25.4.5.1 Promise.prototype.catch - def `catch`[B >: A]( - onRejected: js.UndefOr[js.Function1[scala.Any, B | Thenable[B]]]): MockPromise[B] = { - `then`((), onRejected) - } - } -} diff --git a/tests/untried/neg/t5702-neg-bad-and-wild.check b/tests/untried/neg/t5702-neg-bad-and-wild.check deleted file mode 100644 index a52136dbf889..000000000000 --- a/tests/untried/neg/t5702-neg-bad-and-wild.check +++ /dev/null @@ -1,28 +0,0 @@ -t5702-neg-bad-and-wild.scala:10: error: bad simple pattern: bad use of _* (a sequence pattern must be the last pattern) - case List(1, _*,) => // bad use of _* (a sequence pattern must be the last pattern) - ^ -t5702-neg-bad-and-wild.scala:10: error: illegal start of simple pattern - case List(1, _*,) => // bad use of _* (a sequence pattern must be the last pattern) - ^ -t5702-neg-bad-and-wild.scala:12: error: illegal start of simple pattern - case List(1, _*3,) => // illegal start of simple pattern - ^ -t5702-neg-bad-and-wild.scala:14: error: bad simple pattern: use _* to match a sequence - case List(1, x*) => // use _* to match a sequence - ^ -t5702-neg-bad-and-wild.scala:15: error: bad simple pattern: trailing * is not a valid pattern - case List(x*, 1) => // trailing * is not a valid pattern - ^ -t5702-neg-bad-and-wild.scala:16: error: bad simple pattern: trailing * is not a valid pattern - case (1, x*) => // trailing * is not a valid pattern - ^ -t5702-neg-bad-and-wild.scala:17: error: bad simple pattern: bad use of _* (sequence pattern not allowed) - case (1, x@_*) => // bad use of _* (sequence pattern not allowed) - ^ -t5702-neg-bad-and-wild.scala:23: error: bad simple pattern: bad use of _* (a sequence pattern must be the last pattern) - val K(ns @ _*, x) = k // bad use of _* (a sequence pattern must be the last pattern) - ^ -t5702-neg-bad-and-wild.scala:24: error: bad simple pattern: bad use of _* (sequence pattern not allowed) - val (b, _ * ) = (5,6) // bad use of _* (sequence pattern not allowed) - ^ -9 errors found diff --git a/tests/untried/neg/t5702-neg-bad-brace.check b/tests/untried/neg/t5702-neg-bad-brace.check deleted file mode 100644 index 503f7d95edc1..000000000000 --- a/tests/untried/neg/t5702-neg-bad-brace.check +++ /dev/null @@ -1,10 +0,0 @@ -t5702-neg-bad-brace.scala:14: error: Unmatched closing brace '}' ignored here - case List(1, _*} => - ^ -t5702-neg-bad-brace.scala:14: error: illegal start of simple pattern - case List(1, _*} => - ^ -t5702-neg-bad-brace.scala:15: error: ')' expected but '}' found. - } - ^ -three errors found diff --git a/tests/untried/neg/t5702-neg-bad-brace.scala b/tests/untried/neg/t5702-neg-bad-brace.scala deleted file mode 100644 index 16a341cf8c17..000000000000 --- a/tests/untried/neg/t5702-neg-bad-brace.scala +++ /dev/null @@ -1,17 +0,0 @@ - -object Test { - - def main(args: Array[String]) { - val is = List(1,2,3) - - is match { -// the erroneous brace is ignored, so we can't halt on it. -// maybe brace healing can detect overlapping unmatched (...} -// In this case, the fix emits an extra error: -// t5702-neg-bad-brace.scala:10: error: Unmatched closing brace '}' ignored here -// t5702-neg-bad-brace.scala:10: error: illegal start of simple pattern (i.e., =>) -// t5702-neg-bad-brace.scala:11: error: ')' expected but '}' found. - case List(1, _*} => - } - } -}