diff --git a/.github/Dockerfile b/.github/Dockerfile new file mode 100644 index 000000000000..d56ec6a59f2d --- /dev/null +++ b/.github/Dockerfile @@ -0,0 +1,25 @@ +FROM ubuntu:22.04 + +# The default locale is "POSIX" which is just ASCII. +ENV LANG C.UTF-8 +ENV DEBIAN_FRONTEND noninteractive +ENV TZ Europe/Zurich + +# Add packages to image, set default JDK version +RUN apt-get update && \ + apt-get upgrade -y && \ + # Use a PPA to get Java 17 + apt-get install -y software-properties-common && add-apt-repository ppa:openjdk-r/ppa && \ + apt-get install -y bash curl git ssh htop nano vim-tiny zile \ + openjdk-8-jdk-headless \ + openjdk-17-jdk-headless \ + openjdk-21-jdk-headless && \ + (curl -fsSL https://deb.nodesource.com/setup_18.x | bash -) && \ + apt-get install -y nodejs + + +# Install sbt +ENV SBT_HOME /usr/local/sbt +ENV PATH ${SBT_HOME}/bin:${PATH} +ENV SBT_VERSION 1.9.0 +RUN curl -sL "https://github.com/sbt/sbt/releases/download/v$SBT_VERSION/sbt-$SBT_VERSION.tgz" | gunzip | tar -x -C /usr/local \ No newline at end of file diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 99602049f5ec..f9cb18a0ad00 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -1,7 +1,10 @@ version: 2 updates: - - - package-ecosystem: "github-actions" + - package-ecosystem: github-actions directory: "/" schedule: - interval: "weekly" + interval: weekly + assignees: + - hamzaremmal + reviewers: + - hamzaremmal diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index cd5c1d717990..2747830fb7d6 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -141,7 +141,8 @@ jobs: - name: Cmd Tests run: | - ./project/scripts/sbt ";dist/pack; scala3-bootstrapped/compile; scala3-bootstrapped/test ;sbt-test/scripted scala2-compat/*; scala3-compiler-bootstrapped/scala3CompilerCoursierTest:test" + ./project/scripts/buildScalaBinary + ./project/scripts/sbt ";scala3-bootstrapped/compile ;scala3-bootstrapped/test ;sbt-test/scripted scala2-compat/* ;scala3-compiler-bootstrapped/scala3CompilerCoursierTest:test" ./project/scripts/cmdTests ./project/scripts/bootstrappedOnlyCmdTests @@ -230,7 +231,7 @@ jobs: shell: cmd - name: build binary - run: sbt "dist/pack" & bash -version + run: sbt "dist-win-x86_64/pack" & bash -version shell: cmd - name: cygwin tests @@ -269,8 +270,12 @@ jobs: - name: Git Checkout uses: actions/checkout@v4 + - name: build binary + run: sbt "dist-win-x86_64/pack" + shell: cmd + - name: Test - run: sbt ";dist/pack ;scala3-bootstrapped/compile ;scala3-bootstrapped/test" + run: sbt ";scala3-bootstrapped/compile ;scala3-bootstrapped/test" shell: cmd - name: Scala.js Test @@ -596,7 +601,8 @@ jobs: - name: Test run: | - ./project/scripts/sbt ";dist/pack ;scala3-bootstrapped/compile ;scala3-bootstrapped/test ;sbt-test/scripted scala2-compat/*" + ./project/scripts/buildScalaBinary + ./project/scripts/sbt ";scala3-bootstrapped/compile ;scala3-bootstrapped/test ;sbt-test/scripted scala2-compat/*" ./project/scripts/cmdTests ./project/scripts/bootstrappedOnlyCmdTests @@ -613,6 +619,9 @@ jobs: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt - ${{ github.workspace }}/../../cache/ivy:/root/.ivy2/cache - ${{ github.workspace }}/../../cache/general:/root/.cache + strategy: + matrix: + branch: [main, lts-3.3] needs: [test_non_bootstrapped, test, mima, community_build_a, community_build_b, community_build_c, test_sbt, test_java8] if: "(github.event_name == 'schedule' || github.event_name == 'workflow_dispatch') && github.repository == 'scala/scala3'" env: @@ -641,6 +650,8 @@ jobs: - name: Git Checkout uses: actions/checkout@v4 + with: + ref: ${{ matrix.branch }} - name: Add SBT proxy repositories run: cp -vf .github/workflows/repositories /root/.sbt/ ; true @@ -707,7 +718,7 @@ jobs: ./project/scripts/genDocs -doc-snapshot - name: Deploy Website to dotty-website - uses: peaceiris/actions-gh-pages@v3 + uses: peaceiris/actions-gh-pages@v4 with: personal_token: ${{ env.DOTTY_WEBSITE_BOT_TOKEN }} publish_dir: docs/_site @@ -758,13 +769,35 @@ jobs: - name: Add SBT proxy repositories run: cp -vf .github/workflows/repositories /root/.sbt/ ; true - - - name: Prepare Release - run: | + # Extract the release tag + - name: Extract the release tag + run : echo "RELEASE_TAG=${GITHUB_REF#*refs/tags/}" >> $GITHUB_ENV + # BUILD THE SDKs + - name: Build and pack the SDK (universal) + run : | ./project/scripts/sbt dist/packArchive sha256sum dist/target/scala3-* > dist/target/sha256sum.txt - echo "RELEASE_TAG=${GITHUB_REF#*refs/tags/}" >> $GITHUB_ENV - + - name: Build and pack the SDK (linux x86-64) + run : | + ./project/scripts/sbt dist-linux-x86_64/packArchive + sha256sum dist/linux-x86_64/target/scala3-* > dist/linux-x86_64/target/sha256sum.txt + - name: Build and pack the SDK (linux aarch64) + run : | + ./project/scripts/sbt dist-linux-aarch64/packArchive + sha256sum dist/linux-aarch64/target/scala3-* > dist/linux-aarch64/target/sha256sum.txt + - name: Build and pack the SDK (mac x86-64) + run : | + ./project/scripts/sbt dist-mac-x86_64/packArchive + sha256sum dist/mac-x86_64/target/scala3-* > dist/mac-x86_64/target/sha256sum.txt + - name: Build and pack the SDK (mac aarch64) + run : | + ./project/scripts/sbt dist-mac-aarch64/packArchive + sha256sum dist/mac-aarch64/target/scala3-* > dist/mac-aarch64/target/sha256sum.txt + - name: Build and pack the SDK (win x86-64) + run : | + ./project/scripts/sbt dist-win-x86_64/packArchive + sha256sum dist/win-x86_64/target/scala3-* > dist/win-x86_64/target/sha256sum.txt + # Create the GitHub release - name: Create GitHub Release id: create_gh_release uses: actions/create-release@latest @@ -777,7 +810,7 @@ jobs: draft: true prerelease: ${{ contains(env.RELEASE_TAG, '-') }} - - name: Upload zip archive to GitHub Release + - name: Upload zip archive to GitHub Release (universal) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -786,8 +819,7 @@ jobs: asset_path: ./dist/target/scala3-${{ env.RELEASE_TAG }}.zip asset_name: scala3-${{ env.RELEASE_TAG }}.zip asset_content_type: application/zip - - - name: Upload tar.gz archive to GitHub Release + - name: Upload tar.gz archive to GitHub Release (universal) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -797,7 +829,103 @@ jobs: asset_name: scala3-${{ env.RELEASE_TAG }}.tar.gz asset_content_type: application/gzip - - name: Upload SHA256 sum of the release artefacts to GitHub Release + - name: Upload zip archive to GitHub Release (linux x86-64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/linux-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.zip + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.zip + asset_content_type: application/zip + - name: Upload tar.gz archive to GitHub Release (linux x86-64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/linux-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.tar.gz + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.tar.gz + asset_content_type: application/gzip + + - name: Upload zip archive to GitHub Release (linux aarch64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/linux-aarch64/target/scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.zip + asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.zip + asset_content_type: application/zip + - name: Upload tar.gz archive to GitHub Release (linux aarch64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/linux-aarch64/target/scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.tar.gz + asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.tar.gz + asset_content_type: application/gzip + + - name: Upload zip archive to GitHub Release (mac x86-64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/mac-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.zip + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.zip + asset_content_type: application/zip + - name: Upload tar.gz archive to GitHub Release (mac x86-64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/mac-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.tar.gz + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.tar.gz + asset_content_type: application/gzip + + - name: Upload zip archive to GitHub Release (mac aarch64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/mac-aarch64/target/scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.zip + asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.zip + asset_content_type: application/zip + - name: Upload tar.gz archive to GitHub Release (mac aarch64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/mac-aarch64/target/scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.tar.gz + asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.tar.gz + asset_content_type: application/gzip + + - name: Upload zip archive to GitHub Release (win x86-64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/win-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.zip + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.zip + asset_content_type: application/zip + - name: Upload tar.gz archive to GitHub Release (win x86-64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/win-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.tar.gz + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.tar.gz + asset_content_type: application/gzip + + + - name: Upload SHA256 sum of the release artefacts to GitHub Release (universal) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -807,6 +935,56 @@ jobs: asset_name: sha256sum.txt asset_content_type: text/plain + - name: Upload SHA256 sum of the release artefacts to GitHub Release (linux x86-64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/linux-x86_64/target/sha256sum.txt + asset_name: sha256sum-x86_64-pc-linux.txt + asset_content_type: text/plain + + - name: Upload SHA256 sum of the release artefacts to GitHub Release (linux aarch64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/linux-aarch64/target/sha256sum.txt + asset_name: sha256sum-aarch64-pc-linux.txt + asset_content_type: text/plain + + - name: Upload SHA256 sum of the release artefacts to GitHub Release (mac x86-64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/mac-x86_64/target/sha256sum.txt + asset_name: sha256sum-x86_64-apple-darwin.txt + asset_content_type: text/plain + + - name: Upload SHA256 sum of the release artefacts to GitHub Release (mac aarch64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/mac-aarch64/target/sha256sum.txt + asset_name: sha256sum-aarch64-apple-darwin.txt + asset_content_type: text/plain + + - name: Upload SHA256 sum of the release artefacts to GitHub Release (win x86-64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./dist/win-x86_64/target/sha256sum.txt + asset_name: sha256sum-x86_64-pc-win32.txt + asset_content_type: text/plain + - name: Publish Release run: ./project/scripts/sbtPublish ";project scala3-bootstrapped ;publishSigned ;sonatypeBundleRelease" diff --git a/.github/workflows/launchers.yml b/.github/workflows/launchers.yml new file mode 100644 index 000000000000..818e3b72b06b --- /dev/null +++ b/.github/workflows/launchers.yml @@ -0,0 +1,96 @@ +name: Test CLI Launchers on all the platforms +on: + pull_request: + workflow_dispatch: + +jobs: + linux-x86_64: + name: Deploy and Test on Linux x64 architecture + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Set up JDK 17 + uses: actions/setup-java@v4 + with: + java-version: '17' + distribution: 'temurin' + cache: 'sbt' + - name: Build and test launcher command + run: ./project/scripts/native-integration/bashTests + env: + LAUNCHER_EXPECTED_PROJECT: "dist-linux-x86_64" + + linux-aarch64: + name: Deploy and Test on Linux ARM64 architecture + runs-on: macos-latest + if: ${{ false }} + steps: + - uses: actions/checkout@v4 + - name: Set up JDK 17 + uses: actions/setup-java@v4 + with: + java-version: '17' + distribution: 'temurin' + cache: 'sbt' + # https://github.com/actions/runner-images/issues/9369 + - name: Install sbt + run: brew install sbt + - name: Build and test launcher command + run: ./project/scripts/native-integration/bashTests + env: + LAUNCHER_EXPECTED_PROJECT: "dist-linux-aarch64" + + mac-x86_64: + name: Deploy and Test on Mac x64 architecture + runs-on: macos-13 + steps: + - uses: actions/checkout@v4 + - name: Set up JDK 17 + uses: actions/setup-java@v4 + with: + java-version: '17' + distribution: 'temurin' + cache: 'sbt' + # https://github.com/actions/runner-images/issues/9369 + - name: Install sbt + run: brew install sbt + - name: Build and test launcher command + run: ./project/scripts/native-integration/bashTests + env: + LAUNCHER_EXPECTED_PROJECT: "dist-mac-x86_64" + + mac-aarch64: + name: Deploy and Test on Mac ARM64 architecture + runs-on: macos-latest + steps: + - uses: actions/checkout@v4 + - name: Set up JDK 17 + uses: actions/setup-java@v4 + with: + java-version: '17' + distribution: 'temurin' + cache: 'sbt' + # https://github.com/actions/runner-images/issues/9369 + - name: Install sbt + run: brew install sbt + - name: Build and test launcher command + run: ./project/scripts/native-integration/bashTests + env: + LAUNCHER_EXPECTED_PROJECT: "dist-mac-aarch64" + + win-x86_64: + name: Deploy and Test on Windows x64 architecture + runs-on: windows-latest + steps: + - uses: actions/checkout@v4 + - name: Set up JDK 17 + uses: actions/setup-java@v4 + with: + java-version: '17' + distribution: 'temurin' + cache: 'sbt' + - name: Build the launcher command + run: sbt "dist-win-x86_64/pack" + - name: Run the launcher command tests + run: './project/scripts/native-integration/winTests.bat' + shell: cmd diff --git a/.github/workflows/lts-backport.yaml b/.github/workflows/lts-backport.yaml index 7e8564a634c4..9c3405235b31 100644 --- a/.github/workflows/lts-backport.yaml +++ b/.github/workflows/lts-backport.yaml @@ -15,7 +15,7 @@ jobs: with: fetch-depth: 0 - uses: coursier/cache-action@v6 - - uses: VirtusLab/scala-cli-setup@v1.2.0 + - uses: VirtusLab/scala-cli-setup@v1.3.0 - run: scala-cli ./project/scripts/addToBackportingProject.scala -- ${{ github.sha }} env: GRAPHQL_API_TOKEN: ${{ secrets.GRAPHQL_API_TOKEN }} diff --git a/.github/workflows/publish-sdkman.yml b/.github/workflows/publish-sdkman.yml new file mode 100644 index 000000000000..6f10ac128b6e --- /dev/null +++ b/.github/workflows/publish-sdkman.yml @@ -0,0 +1,67 @@ +################################################################################################### +### THIS IS A REUSABLE WORKFLOW TO PUBLISH SCALA TO SDKMAN! ### +### HOW TO USE: ### +### - THE RELEASE WORKFLOW SHOULD CALL THIS WORKFLOW ### +### - IT WILL PUBLISH TO SDKMAN! THE BINARIES TO EACH SUPPORTED PLATFORM AND A UNIVERSAL JAR ### +### - IT CHANGES THE DEFAULT VERSION IN SDKMAN! ### +### ### +### NOTE: ### +### - WE SHOULD KEEP IN SYNC THE NAME OF THE ARCHIVES WITH THE ACTUAL BUILD ### +### - WE SHOULD KEEP IN SYNC THE URL OF THE RELEASE ### +################################################################################################### + + +name: Publish Scala to SDKMAN! +run-name: Publish Scala ${{ inputs.version }} to SDKMAN! + +on: + workflow_call: + inputs: + version: + required: true + type: string + secrets: + CONSUMER-KEY: + required: true + CONSUMER-TOKEN: + required: true + +env: + RELEASE-URL: 'https://github.com/scala/scala3/releases/download/${{ inputs.version }}' + +jobs: + publish: + runs-on: ubuntu-latest + strategy: + matrix: + include: + - platform: LINUX_64 + archive : 'scala3-${{ inputs.version }}-x86_64-pc-linux.zip' + - platform: LINUX_ARM64 + archive : 'scala3-${{ inputs.version }}-aarch64-pc-linux.zip' + - platform: MAC_OSX + archive : 'scala3-${{ inputs.version }}-x86_64-apple-darwin.zip' + - platform: MAC_ARM64 + archive : 'scala3-${{ inputs.version }}-aarch64-apple-darwin.zip' + - platform: WINDOWS_64 + archive : 'scala3-${{ inputs.version }}-x86_64-pc-win32.zip' + steps: + - uses: hamzaremmal/sdkman-release-action@4cb6c8cf99cfdf0ed5de586d6b38500558737e65 + with: + CONSUMER-KEY : ${{ secrets.CONSUMER-KEY }} + CONSUMER-TOKEN : ${{ secrets.CONSUMER-TOKEN }} + CANDIDATE : scala + VERSION : ${{ inputs.version }} + URL : '${{ env.RELEASE-URL }}/${{ matrix.archive }}' + PLATFORM : ${{ matrix.platform }} + + default: + runs-on: ubuntu-latest + needs: publish + steps: + - uses: hamzaremmal/sdkman-default-action@f312ff69dec7c4f83b060c3df90df7ed19e2d70e + with: + CONSUMER-KEY : ${{ secrets.CONSUMER-KEY }} + CONSUMER-TOKEN : ${{ secrets.CONSUMER-TOKEN }} + CANDIDATE : scala + VERSION : ${{ inputs.version }} diff --git a/.github/workflows/releases.yml b/.github/workflows/releases.yml index dde8b0372d52..4b75dd1b737d 100644 --- a/.github/workflows/releases.yml +++ b/.github/workflows/releases.yml @@ -1,32 +1,33 @@ -name: Releases +################################################################################################### +### OFFICIAL RELEASE WORKFLOW ### +### HOW TO USE: ### +### - THIS WORKFLOW WILL NEED TO BE TRIGGERED MANUALLY ### +### ### +### NOTE: ### +### - THIS WORKFLOW SHOULD ONLY BE RUN ON STABLE RELEASES ### +### - IT ASSUMES THAT THE PRE-RELEASE WORKFLOW WAS PREVIOUSLY EXECUTED ### +### ### +################################################################################################### + +name: Official release of Scala +run-name: Official release of Scala ${{ inputs.version }} + on: workflow_dispatch: - -permissions: - contents: read + inputs: + version: + description: 'The version to officially release' + required: true + type: string jobs: - publish_release: - runs-on: [self-hosted, Linux] - container: - image: lampepfl/dotty:2021-03-22 - options: --cpu-shares 4096 - - env: - SDKMAN_KEY: ${{ secrets.SDKMAN_KEY }} - SDKMAN_TOKEN: ${{ secrets.SDKMAN_TOKEN }} - - steps: - - name: Reset existing repo - run: | - git config --global --add safe.directory /__w/dotty/dotty - git -c "http.https://github.com/.extraheader=" fetch --recurse-submodules=no "https://github.com/lampepfl/dotty" && git reset --hard FETCH_HEAD || true - - - name: Cleanup - run: .github/workflows/cleanup.sh - - - name: Git Checkout - uses: actions/checkout@v4 - - - name: Publish to SDKMAN - run: .github/workflows/scripts/publish-sdkman.sh + # TODO: ADD JOB TO SWITCH THE GITHUB RELEASE FROM DRAFT TO LATEST + publish-sdkman: + uses: ./.github/workflows/publish-sdkman.yml + with: + version: ${{ inputs.version }} + secrets: + CONSUMER-KEY: ${{ secrets.SDKMAN_KEY }} + CONSUMER-TOKEN: ${{ secrets.SDKMAN_TOKEN }} + + # TODO: ADD RELEASE WORKFLOW TO CHOCOLATEY AND OTHER PACKAGE MANAGERS HERE \ No newline at end of file diff --git a/.github/workflows/scripts/publish-sdkman.sh b/.github/workflows/scripts/publish-sdkman.sh deleted file mode 100755 index f959c426e9d8..000000000000 --- a/.github/workflows/scripts/publish-sdkman.sh +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/env bash - -# This is script for publishing scala on SDKMAN. -# Script resolves the latest stable version of scala and then send REST request to SDKMAN Vendor API. -# It's releasing and announcing the release of scala on SDKMAN. -# -# Requirement: -# - the latest stable version of scala should be available in github artifacts - -set -u - -# latest stable dotty version -DOTTY_VERSION=$(curl -s https://api.github.com/repos/scala/scala3/releases/latest | grep '"tag_name":' | sed -E 's/.*"([^"]+)".*/\1/') -DOTTY_URL="https://github.com/scala/scala3/releases/download/$DOTTY_VERSION/scala3-$DOTTY_VERSION.zip" - -# checking if dotty version is available -if ! curl --output /dev/null --silent --head --fail "$DOTTY_URL"; then - echo "URL doesn't exist: $DOTTY_URL" - exit 1 -fi - -# Release a new Candidate Version -curl --silent --show-error --fail \ - -X POST \ - -H "Consumer-Key: $SDKMAN_KEY" \ - -H "Consumer-Token: $SDKMAN_TOKEN" \ - -H "Content-Type: application/json" \ - -H "Accept: application/json" \ - -d '{"candidate": "scala", "version": "'"$DOTTY_VERSION"'", "url": "'"$DOTTY_URL"'"}' \ - https://vendors.sdkman.io/release - -if [[ $? -ne 0 ]]; then - echo "Fail sending POST request to releasing scala on SDKMAN." - exit 1 -fi - -# Set DOTTY_VERSION as Default for Candidate -curl --silent --show-error --fail \ - -X PUT \ - -H "Consumer-Key: $SDKMAN_KEY" \ - -H "Consumer-Token: $SDKMAN_TOKEN" \ - -H "Content-Type: application/json" \ - -H "Accept: application/json" \ - -d '{"candidate": "scala", "version": "'"$DOTTY_VERSION"'"}' \ - https://vendors.sdkman.io/default - -if [[ $? -ne 0 ]]; then - echo "Fail sending PUT request to announcing the release of scala on SDKMAN." - exit 1 -fi diff --git a/.github/workflows/spec.yml b/.github/workflows/spec.yml index 94b99e81e044..a639c80bbda9 100644 --- a/.github/workflows/spec.yml +++ b/.github/workflows/spec.yml @@ -45,7 +45,7 @@ jobs: env: USER_FOR_TEST: ${{ secrets.SPEC_DEPLOY_USER }} if: ${{ env.USER_FOR_TEST != '' }} - uses: burnett01/rsync-deployments@7.0.0 + uses: burnett01/rsync-deployments@7.0.1 with: switches: -rzv path: docs/_spec/_site/ diff --git a/README.md b/README.md index 6c3212f0676b..7a2bda3f8073 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ Dotty ===== -[![Dotty CI](https://github.com/scala/scala3/workflows/Dotty/badge.svg?branch=master)](https://github.com/scala/scala3/actions?query=branch%3Amain) +[![Dotty CI](https://github.com/scala/scala3/workflows/Dotty/badge.svg?branch=main)](https://github.com/scala/scala3/actions?query=branch%3Amain) [![Join the chat at https://discord.com/invite/scala](https://img.shields.io/discord/632150470000902164)](https://discord.com/invite/scala) * [Documentation](https://docs.scala-lang.org/scala3/) diff --git a/bin/common b/bin/common index 7d3aa7148265..37b2ebd1ff93 100755 --- a/bin/common +++ b/bin/common @@ -9,15 +9,18 @@ target="$1" shift # Mutates $@ by deleting the first element ($1) +# set the $DIST_PROJECT and $DIST_DIR variables +source "$ROOT/bin/common-platform" + # Marker file used to obtain the date of latest call to sbt-back -version="$ROOT/dist/target/pack/VERSION" +version="$ROOT/$DIST_DIR/target/pack/VERSION" # Create the target if absent or if file changed in ROOT/compiler new_files="$(find "$ROOT/compiler" \( -iname "*.scala" -o -iname "*.java" \) -newer "$version" 2> /dev/null)" if [ ! -f "$version" ] || [ ! -z "$new_files" ]; then echo "Building Dotty..." - (cd $ROOT && sbt "dist/pack") + (cd $ROOT && sbt "$DIST_PROJECT/pack") fi -"$target" "$@" +"$ROOT/$DIST_DIR/target/pack/bin/$target" "$@" diff --git a/bin/common-platform b/bin/common-platform new file mode 100755 index 000000000000..648e0195e7e6 --- /dev/null +++ b/bin/common-platform @@ -0,0 +1,63 @@ +#!/usr/bin/env bash + +unset cygwin mingw msys darwin + +# COLUMNS is used together with command line option '-pageWidth'. +if command -v tput >/dev/null 2>&1; then + export COLUMNS="$(tput -Tdumb cols)" +fi + +case "`uname`" in + CYGWIN*) cygwin=true + ;; + MINGW*) mingw=true + ;; + MSYS*) msys=true + ;; + Darwin*) darwin=true + ;; +esac + +unset DIST_PROJECT DIST_DIR + +if [[ ${cygwin-} || ${mingw-} || ${msys-} ]]; then + DIST_PROJECT="dist-win-x86_64" + DIST_DIR="dist/win-x86_64" +else + # OS and arch logic taken from https://github.com/VirtusLab/scala-cli/blob/main/scala-cli.sh + unset arch ARCH_NORM + arch=$(uname -m) + if [[ "$arch" == "aarch64" ]] || [[ "$arch" == "x86_64" ]]; then + ARCH_NORM="$arch" + elif [[ "$arch" == "amd64" ]]; then + ARCH_NORM="x86_64" + elif [[ "$arch" == "arm64" ]]; then + ARCH_NORM="aarch64" + else + ARCH_NORM="unknown" + fi + + if [ "$(expr substr $(uname -s) 1 5 2>/dev/null)" == "Linux" ]; then + if [[ "$ARCH_NORM" == "unknown" ]]; then + echo >&2 "unknown Linux CPU architecture, defaulting to JVM launcher" + DIST_PROJECT="dist" + DIST_DIR="dist" + else + DIST_PROJECT="dist-linux-$ARCH_NORM" + DIST_DIR="dist/linux-$ARCH_NORM" + fi + elif [ "$(uname)" == "Darwin" ]; then + if [[ "$ARCH_NORM" == "unknown" ]]; then + echo >&2 "unknown Darwin CPU architecture, defaulting to JVM launcher" + DIST_PROJECT="dist" + DIST_DIR="dist" + else + DIST_PROJECT="dist-mac-$ARCH_NORM" + DIST_DIR="dist/mac-$ARCH_NORM" + fi + else + echo >&2 "unknown OS, defaulting to JVM launcher" + DIST_PROJECT="dist" + DIST_DIR="dist" + fi +fi diff --git a/bin/scala b/bin/scala index 66ec9a5774c7..e87c4391806b 100755 --- a/bin/scala +++ b/bin/scala @@ -2,4 +2,37 @@ ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" >& /dev/null && pwd)/.." -"$ROOT/bin/common" "$ROOT/dist/target/pack/bin/scala" "$@" +scala_args() { + + declare -a CLI_ARGS + declare -a SCRIPT_ARGS + declare DISABLE_BLOOP=1 + + while (( "$#" )); do + case "$1" in + "--") + shift + SCRIPT_ARGS+=("--") + SCRIPT_ARGS+=("$@") + break + ;; + "clean" | "version" | "--version" | "-version" | "help" | "--help" | "-help") + CLI_ARGS+=("$1") + DISABLE_BLOOP=0 # clean command should not add --offline --server=false + shift + ;; + *) + CLI_ARGS+=("$1") + shift + ;; + esac + done + + if [ $DISABLE_BLOOP -eq 1 ]; then + CLI_ARGS+=("--offline" "--server=false") + fi + + echo "--power ${CLI_ARGS[@]} ${SCRIPT_ARGS[@]}" +} + +"$ROOT/bin/common" "scala" $(scala_args "$@") diff --git a/bin/scalac b/bin/scalac index faeb48d92d87..d141b9a6c6bb 100755 --- a/bin/scalac +++ b/bin/scalac @@ -2,4 +2,4 @@ ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" >& /dev/null && pwd)/.." -"$ROOT/bin/common" "$ROOT/dist/target/pack/bin/scalac" "$@" +"$ROOT/bin/common" "scalac" "$@" diff --git a/bin/scaladoc b/bin/scaladoc index 11a754c6579f..02decabb9ae3 100755 --- a/bin/scaladoc +++ b/bin/scaladoc @@ -2,4 +2,4 @@ ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" >& /dev/null && pwd)/.." -"$ROOT/bin/common" "$ROOT/dist/target/pack/bin/scaladoc" "$@" +"$ROOT/bin/common" "scaladoc" "$@" diff --git a/bin/test/TestScripts.scala b/bin/test/TestScripts.scala index bada140580fc..4a2fd9a05c83 100644 --- a/bin/test/TestScripts.scala +++ b/bin/test/TestScripts.scala @@ -57,7 +57,7 @@ class TestScripts { s"bin/scalac script did not run properly. Output:$lineSep$dotcOutput" ) - val (retDotr, dotrOutput) = executeScript("./bin/scala HelloWorld") + val (retDotr, dotrOutput) = executeScript("./bin/scala -M HelloWorld") assert( retDotr == 0 && dotrOutput == "hello world\n", s"Running hello world exited with status: $retDotr and output: $dotrOutput" diff --git a/build.sbt b/build.sbt index 1bc74e5e23fb..f357044c91ca 100644 --- a/build.sbt +++ b/build.sbt @@ -28,6 +28,11 @@ val `scaladoc-js-main` = Build.`scaladoc-js-main` val `scaladoc-js-contributors` = Build.`scaladoc-js-contributors` val `scala3-bench-run` = Build.`scala3-bench-run` val dist = Build.dist +val `dist-mac-x86_64` = Build.`dist-mac-x86_64` +val `dist-mac-aarch64` = Build.`dist-mac-aarch64` +val `dist-win-x86_64` = Build.`dist-win-x86_64` +val `dist-linux-x86_64` = Build.`dist-linux-x86_64` +val `dist-linux-aarch64` = Build.`dist-linux-aarch64` val `community-build` = Build.`community-build` val `sbt-community-build` = Build.`sbt-community-build` val `scala3-presentation-compiler` = Build.`scala3-presentation-compiler` diff --git a/changelogs/3.5.0-RC1.md b/changelogs/3.5.0-RC1.md new file mode 100644 index 000000000000..4cbc2aa1d668 --- /dev/null +++ b/changelogs/3.5.0-RC1.md @@ -0,0 +1,254 @@ +# Highlights of the release + +- Bundle scala-cli in scala command (For RC1 requires JVM 17, further RCs will use native launchers) +- Introduce Best Effort compilation options [#17582](https://github.com/lampepfl/dotty/pull/17582) +- Add support for Pipelined builds [#18880](https://github.com/lampepfl/dotty/pull/18880) +- Add support for `var` in refinements [#19982](https://github.com/lampepfl/dotty/pull/19982) +- Implement SIP-42 - Support for binary integer literals [#19405](https://github.com/lampepfl/dotty/pull/19405) + +# Other changes and fixes + +## Backend + +- Fix Closure span assignment in makeClosure [#15841](https://github.com/lampepfl/dotty/pull/15841) + +## Default parameters + +- Fix default args lookup for given classes [#20256](https://github.com/lampepfl/dotty/pull/20256) +- Fix implicit search failure reporting [#20261](https://github.com/lampepfl/dotty/pull/20261) + +## Derivation + +- Fix infinite loop in Mirror synthesis of unreducible match type [#20133](https://github.com/lampepfl/dotty/pull/20133) + +## Desugaring + +- Add explanation to checkCaseClassInheritanceInvariant error msg [#20141](https://github.com/lampepfl/dotty/pull/20141) + +## Exports + +- Add annotations in parameters for exports [#20140](https://github.com/lampepfl/dotty/pull/20140) +- Fix isAliasType [#20195](https://github.com/lampepfl/dotty/pull/20195) + +## Implicits + +- Fix implicitNotFound message for type aliases [#19343](https://github.com/lampepfl/dotty/pull/19343) +- Normalize types before collecting parts determining implicit scope [#20077](https://github.com/lampepfl/dotty/pull/20077) +- Better error diagnostics under -explain-cyclic [#20251](https://github.com/lampepfl/dotty/pull/20251) +- Update unreducible match types error reporting [#19954](https://github.com/lampepfl/dotty/pull/19954) +- Improve ConstraintHandling of SkolemTypes [#20175](https://github.com/lampepfl/dotty/pull/20175) + +## Incremental Compilation + +- Retain default parameters with `export` [#20167](https://github.com/lampepfl/dotty/pull/20167) + +## Inline + +- Fix by-name parameter in beta-reduction [#20096](https://github.com/lampepfl/dotty/pull/20096) +- Add warning for anonymous inline classes (#16723) [#20291](https://github.com/lampepfl/dotty/pull/20291) +- Avoid conversion of `Unit` type into `()` term [#20295](https://github.com/lampepfl/dotty/pull/20295) +- Type desugared `transparent inline def unapply` call in the correct mode [#20108](https://github.com/lampepfl/dotty/pull/20108) +- Regression: fix compilation performance on Windows [#20193](https://github.com/lampepfl/dotty/pull/20193) +- Fix inline match on blocks with multiple statements [#20125](https://github.com/lampepfl/dotty/pull/20125) +- Inline `unapply`s in the inlining phase [#19382](https://github.com/lampepfl/dotty/pull/19382) +- Fix outerSelect in Inliner [#20313](https://github.com/lampepfl/dotty/pull/20313) + +## Linting + +- Fix #20146: attach the original name if there is an import selection for an indent [#20163](https://github.com/lampepfl/dotty/pull/20163) +- Add regression test for issue 18632 [#20308](https://github.com/lampepfl/dotty/pull/20308) + +## Match Types + +- Make aliases of `MatchAlias`es normal `TypeAlias`es [#19871](https://github.com/lampepfl/dotty/pull/19871) +- Fix #19746: Do not follow param term refs in `isConcrete`. [#20015](https://github.com/lampepfl/dotty/pull/20015) +- Do match type reduction atPhaseNoLater than ElimOpaque [#20017](https://github.com/lampepfl/dotty/pull/20017) +- Do not flag match types as `Deferred` and amend #20077 [#20147](https://github.com/lampepfl/dotty/pull/20147) +- Always use baseType when constraining patternTp with scrutineeTp [#20032](https://github.com/lampepfl/dotty/pull/20032) +- Use `MirrorSource.reduce` result for `companionPath` [#20207](https://github.com/lampepfl/dotty/pull/20207) +- Regression: Fix match type extraction of a MatchAlias [#20111](https://github.com/lampepfl/dotty/pull/20111) + +## Polyfunctions + +- Discard poly-functions when trying to resolve overloading [#20181](https://github.com/lampepfl/dotty/pull/20181) + +## Presentation Compiler + +- Stabilise returned completions by improving deduplication + extra completions for constructors [#19976](https://github.com/lampepfl/dotty/pull/19976) +- Fix active param index for empty param lists [#20142](https://github.com/lampepfl/dotty/pull/20142) +- Delias type members in hover [#20173](https://github.com/lampepfl/dotty/pull/20173) +- Interactive: handle context bounds in extension construct workaround [#20201](https://github.com/lampepfl/dotty/pull/20201) +- Fix: prefer non-export definition locations [#20252](https://github.com/lampepfl/dotty/pull/20252) +- Don't show enum completions in new keyword context [#20304](https://github.com/lampepfl/dotty/pull/20304) +- Chore: Backport changes for presentation compiler [#20345](https://github.com/lampepfl/dotty/pull/20345) +- Add custom matchers for completions (fuzzy search for presentation compiler) [#19850](https://github.com/lampepfl/dotty/pull/19850) + +## Quotes + +- Fix TermRef prefixes not having their type healed [#20102](https://github.com/lampepfl/dotty/pull/20102) +- Improve reporting in staging about the possible use of an incorrect class loader [#20137](https://github.com/lampepfl/dotty/pull/20137) +- Introduce MethodTypeKind to quotes reflection API [#20249](https://github.com/lampepfl/dotty/pull/20249) +- Add quote ASTs to TASTy [#20165](https://github.com/lampepfl/dotty/pull/20165) + +## Reflection + +- Allow to beta reduce curried function applications in quotes reflect [#18121](https://github.com/lampepfl/dotty/pull/18121) +- Set the inlining phase in the Context used for checking macro trees [#20087](https://github.com/lampepfl/dotty/pull/20087) +- Add Symbol.isSuperAccessor to reflection API [#13388](https://github.com/lampepfl/dotty/pull/13388) +- Stabilize reflect `SymbolMethods.isSuperAccessor` [#20198](https://github.com/lampepfl/dotty/pull/20198) + +## Repl + +- Fix validity period of derived SingleDenotations [#19983](https://github.com/lampepfl/dotty/pull/19983) +- Fix #18383: Never consider top-level `import`s as unused in the repl. [#20310](https://github.com/lampepfl/dotty/pull/20310) + +## Reporting + +- Warn if extension receiver already has member [#17543](https://github.com/lampepfl/dotty/pull/17543) +- Deprecation of case class elements [#17911](https://github.com/lampepfl/dotty/pull/17911) +- Support src filter in -WConf (Closes #17635) [#18783](https://github.com/lampepfl/dotty/pull/18783) +- Add note about type mismatch in automatically inserted apply argument [#20023](https://github.com/lampepfl/dotty/pull/20023) +- Make error reporting resilient to exception thrown while reporting [#20158](https://github.com/lampepfl/dotty/pull/20158) +- Remove duplicate comma from Matchable selector warning [#20159](https://github.com/lampepfl/dotty/pull/20159) +- Generalize warnings for top-level calls to Any or AnyRef methods [#20312](https://github.com/lampepfl/dotty/pull/20312) +- Make CheckUnused not slow. [#20321](https://github.com/lampepfl/dotty/pull/20321) + +## Rewrites + +- Patch indentation when removing braces (and other bug fixes in `-indent -rewrite`) [#17522](https://github.com/lampepfl/dotty/pull/17522) +- Extra check to avoid converting block expressions on the rhs of an in… [#20043](https://github.com/lampepfl/dotty/pull/20043) + +## Scaladoc + +- Fix scaladoc crash on Windows - illegal path character [#20311](https://github.com/lampepfl/dotty/pull/20311) +- Scaladoc: improve refined function types rendering [#20333](https://github.com/lampepfl/dotty/pull/20333) +- Relax font-weight reset [#20348](https://github.com/lampepfl/dotty/pull/20348) + +## Scala JS + +- Optimize main.js [#20093](https://github.com/lampepfl/dotty/pull/20093) + +## Settings + +- Lift Scala Settings from experimental to stabilized [#20199](https://github.com/lampepfl/dotty/pull/20199) + +## Tooling + +- Detect macro dependencies that are missing from the classloader [#20139](https://github.com/lampepfl/dotty/pull/20139) +- Write pipelined tasty in parallel. [#20153](https://github.com/lampepfl/dotty/pull/20153) +- ConsoleReporter sends INFO to stdout [#20328](https://github.com/lampepfl/dotty/pull/20328) + +## Transform + +- Fix overloaded default methods test in RefChecks [#20218](https://github.com/lampepfl/dotty/pull/20218) +- Fix handling of AppliedType aliases in outerPrefix [#20190](https://github.com/lampepfl/dotty/pull/20190) +- Elide unit binding when beta-reducing [#20085](https://github.com/lampepfl/dotty/pull/20085) + +## Typer + +- Reduce projections of type aliases with class type prefixes [#19931](https://github.com/lampepfl/dotty/pull/19931) +- Re-lub also hard union types in simplify [#20027](https://github.com/lampepfl/dotty/pull/20027) +- Fix #19789: Merge same TypeParamRef in orDominator [#20090](https://github.com/lampepfl/dotty/pull/20090) +- Allow SAM types to contain match alias refinements [#20092](https://github.com/lampepfl/dotty/pull/20092) +- Don't dealias when deciding which arguments to defer [#20116](https://github.com/lampepfl/dotty/pull/20116) +- Avoid the TypeVar.inst trap [#20160](https://github.com/lampepfl/dotty/pull/20160) +- Avoid crash when superType does not exist after erasure [#20188](https://github.com/lampepfl/dotty/pull/20188) +- Refine overloading and implicit disambiguation [#20084](https://github.com/lampepfl/dotty/pull/20084) +- Refactor constant folding of applications [#20099](https://github.com/lampepfl/dotty/pull/20099) +- Rollback constraints if `isSameType` failed second direction [#20109](https://github.com/lampepfl/dotty/pull/20109) +- Suppress "extension method will never be selected" for overrides [#20164](https://github.com/lampepfl/dotty/pull/20164) +- Allow SAM types to contain multiple refinements [#20172](https://github.com/lampepfl/dotty/pull/20172) +- Normalize when verifying if TypeTestCasts are unchecked [#20258](https://github.com/lampepfl/dotty/pull/20258) + +# Experimental Changes + +- Named tuples second implementation [#19174](https://github.com/lampepfl/dotty/pull/19174) +- Change rules for given prioritization [#19300](https://github.com/lampepfl/dotty/pull/19300) +- Enable experimental mode when experimental feature is imported [#19807](https://github.com/lampepfl/dotty/pull/19807) +- Add message parameter to `@experimental` annotation [#19935](https://github.com/lampepfl/dotty/pull/19935) +- Implement match type amendment: extractors follow aliases and singletons [#20161](https://github.com/lampepfl/dotty/pull/20161) + +## Capture Checking + +- Carry and check universal capability from parents correctly [#20004](https://github.com/lampepfl/dotty/pull/20004) +- Make parameter types of context functions inferred type trees [#20155](https://github.com/lampepfl/dotty/pull/20155) +- Handle reach capabilities correctly in depedent functions [#20203](https://github.com/lampepfl/dotty/pull/20203) +- Fix the visibility check in `markFree` [#20221](https://github.com/lampepfl/dotty/pull/20221) +- Make inline proxy vals have inferred types [#20241](https://github.com/lampepfl/dotty/pull/20241) +- CC: Give more info when context function parameters leak [#20244](https://github.com/lampepfl/dotty/pull/20244) +- Plug soundness hole for reach capabilities [#20051](https://github.com/lampepfl/dotty/pull/20051) +- Tighten the screws a bit more to seal the soundness hole for reach capabilities [#20056](https://github.com/lampepfl/dotty/pull/20056) +- Drop retains annotations in inferred type trees [#20057](https://github.com/lampepfl/dotty/pull/20057) +- Allow @retains arguments to be context functions [#20232](https://github.com/lampepfl/dotty/pull/20232) +- Fix conversion of this.fld capture refs under separate compilation [#20238](https://github.com/lampepfl/dotty/pull/20238) + +## Erased definitions + +- Fix "Compiler crash when using CanThrow" [#20210](https://github.com/lampepfl/dotty/pull/20210) +- Only allow erased parameters in erased definitions [#19686](https://github.com/lampepfl/dotty/pull/19686) + +## Initialization + +- Deprecate `StandardPlugin.init` in favor of `initialize` method taking implicit Context [#20330](https://github.com/lampepfl/dotty/pull/20330) +- Fix missing changesParents in PostTyper [#20062](https://github.com/lampepfl/dotty/pull/20062) +- Special case for next field of colon colon in global init checker [#20281](https://github.com/lampepfl/dotty/pull/20281) +- Extend whitelist in global initialization checker [#20290](https://github.com/lampepfl/dotty/pull/20290) + +## Macro Annotations + +- Allow macro annotation to transform companion [#19677](https://github.com/lampepfl/dotty/pull/19677) +- Remove experimental `MainAnnotation`/`newMain` (replaced with `MacroAnnotation`) [#19937](https://github.com/lampepfl/dotty/pull/19937) + +## Nullability + +- Add flexible types to deal with Java-defined signatures under -Yexplicit-nulls [#18112](https://github.com/lampepfl/dotty/pull/18112) +- Fix #20287: Add flexible types to Quotes library [#20293](https://github.com/lampepfl/dotty/pull/20293) +- Add fromNullable to Predef for explicit nulls [#20222](https://github.com/lampepfl/dotty/pull/20222) + + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.4.2..3.5.0-RC1` these are: + +``` + 137 Martin Odersky + 51 Eugene Flesselle + 32 Jamie Thompson + 25 Nicolas Stucki + 22 Sébastien Doeraene + 18 noti0na1 + 16 Matt Bovel + 12 Guillaume Martres + 9 Paweł Marks + 9 Yichen Xu + 8 Jan Chyb + 7 Hamza REMMAL + 6 Jędrzej Rochala + 6 Som Snytt + 5 Fengyun Liu + 5 dependabot[bot] + 3 Mikołaj Fornal + 2 Aviv Keller + 2 EnzeXing + 2 Wojciech Mazur + 1 Chris Pado + 1 Filip Zybała + 1 Georgi Krastev + 1 Hamza Remmal + 1 Jisoo Park + 1 Katarzyna Marek + 1 Lucas Nouguier + 1 Lucy Martin + 1 Ola Flisbäck + 1 Pascal Weisenburger + 1 Quentin Bernet + 1 Raphael Jolly + 1 Stephane Bersier + 1 Tomasz Godzik + 1 Yoonjae Jeon + 1 aherlihy + 1 rochala + 1 willerf +``` diff --git a/changelogs/3.5.0-RC2.md b/changelogs/3.5.0-RC2.md new file mode 100644 index 000000000000..f3bb8b52c73c --- /dev/null +++ b/changelogs/3.5.0-RC2.md @@ -0,0 +1,25 @@ +# Backported fixes + +- Bundle scala-cli in scala command [#20351](https://github.com/scala/scala3/pull/20351) +- Avoid stacked thisCall contexts [#20488](https://github.com/scala/scala3/pull/20488) +- Adapt the workflow to release on SDKMAN! [#20535](https://github.com/scala/scala3/pull/20535) +- Adapt the release workflow to SIP-46 [#20565](https://github.com/scala/scala3/pull/20565) +- Disable ClasspathTests.unglobClasspathVerifyTest [#20551](https://github.com/scala/scala3/pull/20551) +- Set default source version to 3.5 [#20441](https://github.com/scala/scala3/pull/20441) +- Bring back ambiguity filter when we report an implicit not found error [#20368](https://github.com/scala/scala3/pull/20368) +- Treat 3.5-migration the same as 3.5 for a warning about implicit priority change [#20436](https://github.com/scala/scala3/pull/20436) +- Avoid forcing whole package when using -experimental [#20409](https://github.com/scala/scala3/pull/20409) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.5.0-RC1..3.5.0-RC2` these are: + +``` + 4 Hamza Remmal + 4 Wojciech Mazur + 3 Martin Odersky + 1 Jamie Thompson + 1 Guillaume Martres +``` diff --git a/changelogs/3.5.0-RC3.md b/changelogs/3.5.0-RC3.md new file mode 100644 index 000000000000..a7a2d164d5a7 --- /dev/null +++ b/changelogs/3.5.0-RC3.md @@ -0,0 +1,26 @@ +# Backported fixes + +- Release .zip instead of .tar.gz for windows in sdkman [#20630](https://github.com/scala/scala3/pull/20630) +- SIP 46 - read classpath from file, remove lib directory in distribution [#20631](https://github.com/scala/scala3/pull/20631) +- Bump scala-cli to 1.4.0 [#20859](https://github.com/scala/scala3/pull/20859) +- Priority warning fix alternative [#20487](https://github.com/scala/scala3/pull/20487) +- Add --skip-cli-updates by default to the scala command [#20900](https://github.com/scala/scala3/pull/20900) +- Upgrade Scala 2 to 2.13.14 (was 2.13.12) [#20902](https://github.com/scala/scala3/pull/20902) +- fix issue 20901: etaCollapse context bound type [#20910](https://github.com/scala/scala3/pull/20910) +- Use final result type to check selector bound [#20989](https://github.com/scala/scala3/pull/20989) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.5.0-RC2..3.5.0-RC3` these are: + +``` + 6 Wojciech Mazur + 5 Jamie Thompson + 5 Martin Odersky + 4 Hamza Remmal + 1 Hamza REMMAL + 1 Seth Tisue + 1 Som Snytt +``` diff --git a/changelogs/3.5.0-RC4.md b/changelogs/3.5.0-RC4.md new file mode 100644 index 000000000000..75e72870d6f4 --- /dev/null +++ b/changelogs/3.5.0-RC4.md @@ -0,0 +1,19 @@ +# Backported fixes + +- Refine implicit priority change warnings [#21045](https://github.com/scala/scala3/pull/21045) +- Use pathing jars in cli commands [#21121](https://github.com/scala/scala3/pull/21121) +- expand classpath of pathing jars in scala_legacy command [#21160](https://github.com/scala/scala3/pull/21160) +- Fix symbol reference retrivial of `scala.caps.Caps` [#20493](https://github.com/scala/scala3/pull/20493) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.5.0-RC3..3.5.0-RC4` these are: + +``` + 5 Martin Odersky + 3 Wojciech Mazur + 2 Hamza REMMAL + 1 Jamie Thompson +``` diff --git a/changelogs/3.5.0-RC5.md b/changelogs/3.5.0-RC5.md new file mode 100644 index 000000000000..405396223eb7 --- /dev/null +++ b/changelogs/3.5.0-RC5.md @@ -0,0 +1,14 @@ +# Backported fixes + +- emit generatedNonLocalClass in backend when callback is not enabled [#21186](https://github.com/scala/scala3/pull/21186) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.5.0-RC4..3.5.0-RC5` these are: + +``` + 2 Jamie Thompson + 2 Wojciech Mazur +``` diff --git a/changelogs/3.5.0-RC6.md b/changelogs/3.5.0-RC6.md new file mode 100644 index 000000000000..77731f346750 --- /dev/null +++ b/changelogs/3.5.0-RC6.md @@ -0,0 +1,13 @@ +# Backported fixes + +- Revert "Approximate MatchTypes with lub of case bodies, if non-recursive" in 3.5.0 [#21266](https://github.com/scala/scala3/pull/21266) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.5.0-RC5..3.5.0-RC6` these are: + +``` + 4 Wojciech Mazur +``` diff --git a/changelogs/3.5.0-RC7.md b/changelogs/3.5.0-RC7.md new file mode 100644 index 000000000000..dab10f60b1ee --- /dev/null +++ b/changelogs/3.5.0-RC7.md @@ -0,0 +1,15 @@ +# Backported fixes + +- Backport "Fix healAmbiguous to compareAlternatives with disambiguate = true" to 3.5.0 [#21344](https://github.com/scala/scala3/pull/21344) + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.5.0-RC6..3.5.0-RC7` these are: + +``` + 5 Martin Odersky + 4 Wojciech Mazur + 2 Eugene Flesselle +``` diff --git a/changelogs/3.5.0.md b/changelogs/3.5.0.md new file mode 100644 index 000000000000..654a19b169a8 --- /dev/null +++ b/changelogs/3.5.0.md @@ -0,0 +1,278 @@ +# Highlights of the release + +- Bundle scala-cli in scala command (For RC1 requires JVM 17, further RCs will use native launchers) +- Introduce Best Effort compilation options [#17582](https://github.com/lampepfl/dotty/pull/17582) +- Add support for Pipelined builds [#18880](https://github.com/lampepfl/dotty/pull/18880) +- Add support for `var` in refinements [#19982](https://github.com/lampepfl/dotty/pull/19982) +- Implement SIP-42 - Support for binary integer literals [#19405](https://github.com/lampepfl/dotty/pull/19405) + +# Other changes and fixes + +## Backend + +- Fix Closure span assignment in makeClosure [#15841](https://github.com/lampepfl/dotty/pull/15841) + +## Default parameters + +- Fix default args lookup for given classes [#20256](https://github.com/lampepfl/dotty/pull/20256) +- Fix implicit search failure reporting [#20261](https://github.com/lampepfl/dotty/pull/20261) + +## Derivation + +- Fix infinite loop in Mirror synthesis of unreducible match type [#20133](https://github.com/lampepfl/dotty/pull/20133) + +## Desugaring + +- Add explanation to checkCaseClassInheritanceInvariant error msg [#20141](https://github.com/lampepfl/dotty/pull/20141) + +## Exports + +- Add annotations in parameters for exports [#20140](https://github.com/lampepfl/dotty/pull/20140) +- Fix isAliasType [#20195](https://github.com/lampepfl/dotty/pull/20195) + +## Implicits + +- Fix implicitNotFound message for type aliases [#19343](https://github.com/lampepfl/dotty/pull/19343) +- Normalize types before collecting parts determining implicit scope [#20077](https://github.com/lampepfl/dotty/pull/20077) +- Better error diagnostics under -explain-cyclic [#20251](https://github.com/lampepfl/dotty/pull/20251) +- Update unreducible match types error reporting [#19954](https://github.com/lampepfl/dotty/pull/19954) +- Improve ConstraintHandling of SkolemTypes [#20175](https://github.com/lampepfl/dotty/pull/20175) + +## Incremental Compilation + +- Retain default parameters with `export` [#20167](https://github.com/lampepfl/dotty/pull/20167) + +## Inline + +- Fix by-name parameter in beta-reduction [#20096](https://github.com/lampepfl/dotty/pull/20096) +- Add warning for anonymous inline classes (#16723) [#20291](https://github.com/lampepfl/dotty/pull/20291) +- Avoid conversion of `Unit` type into `()` term [#20295](https://github.com/lampepfl/dotty/pull/20295) +- Type desugared `transparent inline def unapply` call in the correct mode [#20108](https://github.com/lampepfl/dotty/pull/20108) +- Regression: fix compilation performance on Windows [#20193](https://github.com/lampepfl/dotty/pull/20193) +- Fix inline match on blocks with multiple statements [#20125](https://github.com/lampepfl/dotty/pull/20125) +- Inline `unapply`s in the inlining phase [#19382](https://github.com/lampepfl/dotty/pull/19382) +- Fix outerSelect in Inliner [#20313](https://github.com/lampepfl/dotty/pull/20313) + +## Linting + +- Fix #20146: attach the original name if there is an import selection for an indent [#20163](https://github.com/lampepfl/dotty/pull/20163) +- Add regression test for issue 18632 [#20308](https://github.com/lampepfl/dotty/pull/20308) + +## Match Types + +- Make aliases of `MatchAlias`es normal `TypeAlias`es [#19871](https://github.com/lampepfl/dotty/pull/19871) +- Fix #19746: Do not follow param term refs in `isConcrete`. [#20015](https://github.com/lampepfl/dotty/pull/20015) +- Do match type reduction atPhaseNoLater than ElimOpaque [#20017](https://github.com/lampepfl/dotty/pull/20017) +- Do not flag match types as `Deferred` and amend #20077 [#20147](https://github.com/lampepfl/dotty/pull/20147) +- Always use baseType when constraining patternTp with scrutineeTp [#20032](https://github.com/lampepfl/dotty/pull/20032) +- Use `MirrorSource.reduce` result for `companionPath` [#20207](https://github.com/lampepfl/dotty/pull/20207) +- Regression: Fix match type extraction of a MatchAlias [#20111](https://github.com/lampepfl/dotty/pull/20111) +- Revert "Approximate MatchTypes with lub of case bodies, if non-recursive" in 3.5.0 [#21266](https://github.com/scala/scala3/pull/21266) + +## Polyfunctions + +- Discard poly-functions when trying to resolve overloading [#20181](https://github.com/lampepfl/dotty/pull/20181) + +## Presentation Compiler + +- Stabilise returned completions by improving deduplication + extra completions for constructors [#19976](https://github.com/lampepfl/dotty/pull/19976) +- Fix active param index for empty param lists [#20142](https://github.com/lampepfl/dotty/pull/20142) +- Delias type members in hover [#20173](https://github.com/lampepfl/dotty/pull/20173) +- Interactive: handle context bounds in extension construct workaround [#20201](https://github.com/lampepfl/dotty/pull/20201) +- Fix: prefer non-export definition locations [#20252](https://github.com/lampepfl/dotty/pull/20252) +- Don't show enum completions in new keyword context [#20304](https://github.com/lampepfl/dotty/pull/20304) +- Chore: Backport changes for presentation compiler [#20345](https://github.com/lampepfl/dotty/pull/20345) +- Add custom matchers for completions (fuzzy search for presentation compiler) [#19850](https://github.com/lampepfl/dotty/pull/19850) + +## Quotes + +- Fix TermRef prefixes not having their type healed [#20102](https://github.com/lampepfl/dotty/pull/20102) +- Improve reporting in staging about the possible use of an incorrect class loader [#20137](https://github.com/lampepfl/dotty/pull/20137) +- Introduce MethodTypeKind to quotes reflection API [#20249](https://github.com/lampepfl/dotty/pull/20249) +- Add quote ASTs to TASTy [#20165](https://github.com/lampepfl/dotty/pull/20165) + +## Reflection + +- Allow to beta reduce curried function applications in quotes reflect [#18121](https://github.com/lampepfl/dotty/pull/18121) +- Set the inlining phase in the Context used for checking macro trees [#20087](https://github.com/lampepfl/dotty/pull/20087) +- Add Symbol.isSuperAccessor to reflection API [#13388](https://github.com/lampepfl/dotty/pull/13388) +- Stabilize reflect `SymbolMethods.isSuperAccessor` [#20198](https://github.com/lampepfl/dotty/pull/20198) + +## Repl + +- Fix validity period of derived SingleDenotations [#19983](https://github.com/lampepfl/dotty/pull/19983) +- Fix #18383: Never consider top-level `import`s as unused in the repl. [#20310](https://github.com/lampepfl/dotty/pull/20310) + +## Reporting + +- Warn if extension receiver already has member [#17543](https://github.com/lampepfl/dotty/pull/17543) +- Deprecation of case class elements [#17911](https://github.com/lampepfl/dotty/pull/17911) +- Support src filter in -WConf (Closes #17635) [#18783](https://github.com/lampepfl/dotty/pull/18783) +- Add note about type mismatch in automatically inserted apply argument [#20023](https://github.com/lampepfl/dotty/pull/20023) +- Make error reporting resilient to exception thrown while reporting [#20158](https://github.com/lampepfl/dotty/pull/20158) +- Remove duplicate comma from Matchable selector warning [#20159](https://github.com/lampepfl/dotty/pull/20159) +- Generalize warnings for top-level calls to Any or AnyRef methods [#20312](https://github.com/lampepfl/dotty/pull/20312) +- Make CheckUnused not slow. [#20321](https://github.com/lampepfl/dotty/pull/20321) +- Bring back ambiguity filter when we report an implicit not found error [#20368](https://github.com/scala/scala3/pull/20368) +- Treat 3.5-migration the same as 3.5 for a warning about implicit priority change [#20436](https://github.com/scala/scala3/pull/20436) +- Priority warning fix alternative [#20487](https://github.com/scala/scala3/pull/20487) +- Use final result type to check selector bound [#20989](https://github.com/scala/scala3/pull/20989) +- Refine implicit priority change warnings [#21045](https://github.com/scala/scala3/pull/21045) +- Backport "Fix healAmbiguous to compareAlternatives with disambiguate = true" to 3.5.0 [#21344](https://github.com/scala/scala3/pull/21344) + +## Rewrites + +- Patch indentation when removing braces (and other bug fixes in `-indent -rewrite`) [#17522](https://github.com/lampepfl/dotty/pull/17522) +- Extra check to avoid converting block expressions on the rhs of an in… [#20043](https://github.com/lampepfl/dotty/pull/20043) + +## Scaladoc + +- Fix scaladoc crash on Windows - illegal path character [#20311](https://github.com/lampepfl/dotty/pull/20311) +- Scaladoc: improve refined function types rendering [#20333](https://github.com/lampepfl/dotty/pull/20333) +- Relax font-weight reset [#20348](https://github.com/lampepfl/dotty/pull/20348) + +## Scala JS + +- Optimize main.js [#20093](https://github.com/lampepfl/dotty/pull/20093) + +## Settings + +- Lift Scala Settings from experimental to stabilized [#20199](https://github.com/lampepfl/dotty/pull/20199) + +## Tooling + +- Detect macro dependencies that are missing from the classloader [#20139](https://github.com/lampepfl/dotty/pull/20139) +- Write pipelined tasty in parallel. [#20153](https://github.com/lampepfl/dotty/pull/20153) +- ConsoleReporter sends INFO to stdout [#20328](https://github.com/lampepfl/dotty/pull/20328) +- Bundle scala-cli in scala command [#20351](https://github.com/scala/scala3/pull/20351) +- Adapt the workflow to release on SDKMAN! [#20535](https://github.com/scala/scala3/pull/20535) +- Adapt the release workflow to SIP-46 [#20565](https://github.com/scala/scala3/pull/20565) +- Release .zip instead of .tar.gz for windows in sdkman [#20630](https://github.com/scala/scala3/pull/20630) +- SIP 46 - read classpath from file, remove lib directory in distribution [#20631](https://github.com/scala/scala3/pull/20631) +.gz for windows in sdkman [#20630](https://github.com/scala/scala3/pull/20630) +- Bump scala-cli to 1.4.0 [#20859](https://github.com/scala/scala3/pull/20859) +- Add --skip-cli-updates by default to the scala command [#20900](https://github.com/scala/scala3/pull/20900) +- Use pathing jars in cli commands [#21121](https://github.com/scala/scala3/pull/21121) +- expand classpath of pathing jars in scala_legacy command [#21160](https://github.com/scala/scala3/pull/21160) +- emit generatedNonLocalClass in backend when callback is not enabled [#21186](https://github.com/scala/scala3/pull/21186) + +## Transform + +- Fix overloaded default methods test in RefChecks [#20218](https://github.com/lampepfl/dotty/pull/20218) +- Fix handling of AppliedType aliases in outerPrefix [#20190](https://github.com/lampepfl/dotty/pull/20190) +- Elide unit binding when beta-reducing [#20085](https://github.com/lampepfl/dotty/pull/20085) + +## Typer + +- Reduce projections of type aliases with class type prefixes [#19931](https://github.com/lampepfl/dotty/pull/19931) +- Re-lub also hard union types in simplify [#20027](https://github.com/lampepfl/dotty/pull/20027) +- Fix #19789: Merge same TypeParamRef in orDominator [#20090](https://github.com/lampepfl/dotty/pull/20090) +- Allow SAM types to contain match alias refinements [#20092](https://github.com/lampepfl/dotty/pull/20092) +- Don't dealias when deciding which arguments to defer [#20116](https://github.com/lampepfl/dotty/pull/20116) +- Avoid the TypeVar.inst trap [#20160](https://github.com/lampepfl/dotty/pull/20160) +- Avoid crash when superType does not exist after erasure [#20188](https://github.com/lampepfl/dotty/pull/20188) +- Refine overloading and implicit disambiguation [#20084](https://github.com/lampepfl/dotty/pull/20084) +- Refactor constant folding of applications [#20099](https://github.com/lampepfl/dotty/pull/20099) +- Rollback constraints if `isSameType` failed second direction [#20109](https://github.com/lampepfl/dotty/pull/20109) +- Suppress "extension method will never be selected" for overrides [#20164](https://github.com/lampepfl/dotty/pull/20164) +- Allow SAM types to contain multiple refinements [#20172](https://github.com/lampepfl/dotty/pull/20172) +- Normalize when verifying if TypeTestCasts are unchecked [#20258](https://github.com/lampepfl/dotty/pull/20258) +- Avoid stacked thisCall contexts [#20488](https://github.com/scala/scala3/pull/20488) +- fix issue 20901: etaCollapse context bound type [#20910](https://github.com/scala/scala3/pull/20910) +- Fix symbol reference retrivial of `scala.caps.Caps` [#20493](https://github.com/scala/scala3/pull/20493) + +# Experimental Changes + +- Named tuples second implementation [#19174](https://github.com/lampepfl/dotty/pull/19174) +- Change rules for given prioritization [#19300](https://github.com/lampepfl/dotty/pull/19300) +- Enable experimental mode when experimental feature is imported [#19807](https://github.com/lampepfl/dotty/pull/19807) +- Add message parameter to `@experimental` annotation [#19935](https://github.com/lampepfl/dotty/pull/19935) +- Implement match type amendment: extractors follow aliases and singletons [#20161](https://github.com/lampepfl/dotty/pull/20161) +- Avoid forcing whole package when using -experimental [#20409](https://github.com/scala/scala3/pull/20409) + +## Capture Checking + +- Carry and check universal capability from parents correctly [#20004](https://github.com/lampepfl/dotty/pull/20004) +- Make parameter types of context functions inferred type trees [#20155](https://github.com/lampepfl/dotty/pull/20155) +- Handle reach capabilities correctly in depedent functions [#20203](https://github.com/lampepfl/dotty/pull/20203) +- Fix the visibility check in `markFree` [#20221](https://github.com/lampepfl/dotty/pull/20221) +- Make inline proxy vals have inferred types [#20241](https://github.com/lampepfl/dotty/pull/20241) +- CC: Give more info when context function parameters leak [#20244](https://github.com/lampepfl/dotty/pull/20244) +- Plug soundness hole for reach capabilities [#20051](https://github.com/lampepfl/dotty/pull/20051) +- Tighten the screws a bit more to seal the soundness hole for reach capabilities [#20056](https://github.com/lampepfl/dotty/pull/20056) +- Drop retains annotations in inferred type trees [#20057](https://github.com/lampepfl/dotty/pull/20057) +- Allow @retains arguments to be context functions [#20232](https://github.com/lampepfl/dotty/pull/20232) +- Fix conversion of this.fld capture refs under separate compilation [#20238](https://github.com/lampepfl/dotty/pull/20238) + +## Erased definitions + +- Fix "Compiler crash when using CanThrow" [#20210](https://github.com/lampepfl/dotty/pull/20210) +- Only allow erased parameters in erased definitions [#19686](https://github.com/lampepfl/dotty/pull/19686) + +## Initialization + +- Deprecate `StandardPlugin.init` in favor of `initialize` method taking implicit Context [#20330](https://github.com/lampepfl/dotty/pull/20330) +- Fix missing changesParents in PostTyper [#20062](https://github.com/lampepfl/dotty/pull/20062) +- Special case for next field of colon colon in global init checker [#20281](https://github.com/lampepfl/dotty/pull/20281) +- Extend whitelist in global initialization checker [#20290](https://github.com/lampepfl/dotty/pull/20290) + +## Macro Annotations + +- Allow macro annotation to transform companion [#19677](https://github.com/lampepfl/dotty/pull/19677) +- Remove experimental `MainAnnotation`/`newMain` (replaced with `MacroAnnotation`) [#19937](https://github.com/lampepfl/dotty/pull/19937) + +## Nullability + +- Add flexible types to deal with Java-defined signatures under -Yexplicit-nulls [#18112](https://github.com/lampepfl/dotty/pull/18112) +- Fix #20287: Add flexible types to Quotes library [#20293](https://github.com/lampepfl/dotty/pull/20293) +- Add fromNullable to Predef for explicit nulls [#20222](https://github.com/lampepfl/dotty/pull/20222) + + +# Contributors + +Thank you to all the contributors who made this release possible 🎉 + +According to `git shortlog -sn --no-merges 3.4.2..3.5.0` these are: + +``` + 153 Martin Odersky + 53 Eugene Flesselle + 41 Jamie Thompson + 29 Wojciech Mazur + 25 Nicolas Stucki + 22 Sébastien Doeraene + 18 noti0na1 + 16 Matt Bovel + 13 Guillaume Martres + 11 Paweł Marks + 10 Hamza REMMAL + 9 Yichen Xu + 8 Jan Chyb + 7 Hamza Remmal + 7 Som Snytt + 6 Jędrzej Rochala + 5 Fengyun Liu + 5 dependabot[bot] + 3 Mikołaj Fornal + 2 Aviv Keller + 2 EnzeXing + 1 Chris Pado + 1 Filip Zybała + 1 Georgi Krastev + 1 Jisoo Park + 1 Katarzyna Marek + 1 Lucas Nouguier + 1 Lucy Martin + 1 Ola Flisbäck + 1 Pascal Weisenburger + 1 Quentin Bernet + 1 Raphael Jolly + 1 Seth Tisue + 1 Stephane Bersier + 1 Tomasz Godzik + 1 Yoonjae Jeon + 1 aherlihy + 1 rochala + 1 willerf + +``` diff --git a/community-build/community-projects/AsyncFile b/community-build/community-projects/AsyncFile index d72a5279e4b0..7bdd11657e3e 160000 --- a/community-build/community-projects/AsyncFile +++ b/community-build/community-projects/AsyncFile @@ -1 +1 @@ -Subproject commit d72a5279e4b055ad13d1c19d75939b9bd9d014a0 +Subproject commit 7bdd11657e3e840c31f27140942c8dc9e07ea7b8 diff --git a/community-build/community-projects/Equal b/community-build/community-projects/Equal index 4340d3a6b503..4f357131b4ea 160000 --- a/community-build/community-projects/Equal +++ b/community-build/community-projects/Equal @@ -1 +1 @@ -Subproject commit 4340d3a6b503aad4fbea89ee6026923127e8b1ab +Subproject commit 4f357131b4ea719ba5d09a62dd577a8aab204c4d diff --git a/community-build/community-projects/FingerTree b/community-build/community-projects/FingerTree index 0ed006549a78..eaadc6e0c660 160000 --- a/community-build/community-projects/FingerTree +++ b/community-build/community-projects/FingerTree @@ -1 +1 @@ -Subproject commit 0ed006549a78036aac0b09375e90237e72f599fe +Subproject commit eaadc6e0c660d27fc77a2cb4ab6a265a8e746f97 diff --git a/community-build/community-projects/Log b/community-build/community-projects/Log index 1839754549ed..c55ac4b7a822 160000 --- a/community-build/community-projects/Log +++ b/community-build/community-projects/Log @@ -1 +1 @@ -Subproject commit 1839754549ed690fbba2ea1b220e3f70f8d2ba91 +Subproject commit c55ac4b7a82278f896f25b291e129440d2436fb4 diff --git a/community-build/community-projects/Lucre b/community-build/community-projects/Lucre index 0def1dcb1aa6..21a27a294ac7 160000 --- a/community-build/community-projects/Lucre +++ b/community-build/community-projects/Lucre @@ -1 +1 @@ -Subproject commit 0def1dcb1aa63ba8f398428c000cb2a2f166cca4 +Subproject commit 21a27a294ac7c413f80839d96a02942b2c6d021c diff --git a/community-build/community-projects/Model b/community-build/community-projects/Model index d797f70e9d17..fb73627f37d7 160000 --- a/community-build/community-projects/Model +++ b/community-build/community-projects/Model @@ -1 +1 @@ -Subproject commit d797f70e9d178fa6a70d6aa2d3c3324bc1c27b48 +Subproject commit fb73627f37d77d97892a4a0eebe5bd7406559366 diff --git a/community-build/community-projects/Monocle b/community-build/community-projects/Monocle index a0e70744e9b3..b303aa3b98d9 160000 --- a/community-build/community-projects/Monocle +++ b/community-build/community-projects/Monocle @@ -1 +1 @@ -Subproject commit a0e70744e9b3bfb0f12e4ea292151c49c3302cd1 +Subproject commit b303aa3b98d9a10c3f77a56765ca5be2f3cc51f7 diff --git a/community-build/community-projects/Numbers b/community-build/community-projects/Numbers index 656dfd3f7c0a..e19972adb794 160000 --- a/community-build/community-projects/Numbers +++ b/community-build/community-projects/Numbers @@ -1 +1 @@ -Subproject commit 656dfd3f7c0a541b243e2d0f5aabbd20fc8bcea6 +Subproject commit e19972adb7941871e8b1b4a76de0e0c2b4d9fbfc diff --git a/community-build/community-projects/Serial b/community-build/community-projects/Serial index c161cc36e68c..e69b44086955 160000 --- a/community-build/community-projects/Serial +++ b/community-build/community-projects/Serial @@ -1 +1 @@ -Subproject commit c161cc36e68c0d24a508fc9a52a44551c779c682 +Subproject commit e69b44086955023b8747ac10791ad10baad0c5cc diff --git a/community-build/community-projects/Span b/community-build/community-projects/Span index da4c4a9c335c..8d9b4575482e 160000 --- a/community-build/community-projects/Span +++ b/community-build/community-projects/Span @@ -1 +1 @@ -Subproject commit da4c4a9c335c114dbda829150d6476aec830cb84 +Subproject commit 8d9b4575482e103117b3fd2b016c7aaad5962789 diff --git a/community-build/community-projects/akka b/community-build/community-projects/akka index 7f5115ebc9cd..ee0ac854f36f 160000 --- a/community-build/community-projects/akka +++ b/community-build/community-projects/akka @@ -1 +1 @@ -Subproject commit 7f5115ebc9cde408433040f11834f5218b4a3357 +Subproject commit ee0ac854f36f537bf3062fd4e9d9f2ff5c1de4c9 diff --git a/community-build/community-projects/cask b/community-build/community-projects/cask index d5fa6d47da5e..2db6020a2d11 160000 --- a/community-build/community-projects/cask +++ b/community-build/community-projects/cask @@ -1 +1 @@ -Subproject commit d5fa6d47da5ea99d94887fafd555696ba07aa205 +Subproject commit 2db6020a2d11566d504ae9af4de28c7a6e20b7ed diff --git a/community-build/community-projects/discipline b/community-build/community-projects/discipline index 09c975b18dc0..27016c356287 160000 --- a/community-build/community-projects/discipline +++ b/community-build/community-projects/discipline @@ -1 +1 @@ -Subproject commit 09c975b18dc0b4e10499fb2922abac82ea8b5252 +Subproject commit 27016c3562871c136e88cc13ffa64a02380265df diff --git a/community-build/community-projects/discipline-munit b/community-build/community-projects/discipline-munit index 4e61f1861956..975ae3efaddd 160000 --- a/community-build/community-projects/discipline-munit +++ b/community-build/community-projects/discipline-munit @@ -1 +1 @@ -Subproject commit 4e61f186195660529e7a6f7461b939477735e3f4 +Subproject commit 975ae3efadddaa558435c4c8326628618048fdad diff --git a/community-build/community-projects/discipline-specs2 b/community-build/community-projects/discipline-specs2 index e689c3e809a8..eb9427335a30 160000 --- a/community-build/community-projects/discipline-specs2 +++ b/community-build/community-projects/discipline-specs2 @@ -1 +1 @@ -Subproject commit e689c3e809a89a03cdbbb3a1771e33148715f6c7 +Subproject commit eb9427335a309d6dd1e82632298529ca6a0920fa diff --git a/community-build/community-projects/endpoints4s b/community-build/community-projects/endpoints4s index 3a667a3608ff..b004d1388872 160000 --- a/community-build/community-projects/endpoints4s +++ b/community-build/community-projects/endpoints4s @@ -1 +1 @@ -Subproject commit 3a667a3608ff9950c24e9b2b5038c71c1690a21d +Subproject commit b004d13888723de9f6a86f560137fc31e22edcb6 diff --git a/community-build/community-projects/fs2 b/community-build/community-projects/fs2 index 6d7c6d6924cb..e91c54621b76 160000 --- a/community-build/community-projects/fs2 +++ b/community-build/community-projects/fs2 @@ -1 +1 @@ -Subproject commit 6d7c6d6924cb055028458ac8236622190acf66d1 +Subproject commit e91c54621b762a58c942b6576c42dcd94ba0fc0a diff --git a/community-build/community-projects/izumi-reflect b/community-build/community-projects/izumi-reflect index c0756faa7311..2c7e4a69c386 160000 --- a/community-build/community-projects/izumi-reflect +++ b/community-build/community-projects/izumi-reflect @@ -1 +1 @@ -Subproject commit c0756faa7311f70c6da6af29b8cb25506634bf09 +Subproject commit 2c7e4a69c386201e479584333a84ce018fef1795 diff --git a/community-build/community-projects/os-lib b/community-build/community-projects/os-lib index a4400deb3bec..4c8c82b23d76 160000 --- a/community-build/community-projects/os-lib +++ b/community-build/community-projects/os-lib @@ -1 +1 @@ -Subproject commit a4400deb3bec415fd82d331fc1f8b749f3d64e60 +Subproject commit 4c8c82b23d767bc927290829514b8de7148052d9 diff --git a/community-build/community-projects/scala-stm b/community-build/community-projects/scala-stm index 3244edf13c41..cf204977752a 160000 --- a/community-build/community-projects/scala-stm +++ b/community-build/community-projects/scala-stm @@ -1 +1 @@ -Subproject commit 3244edf13c41f22ff8b45143186745e9eb469220 +Subproject commit cf204977752af7ec2ca3b50c43f27daa6a628f49 diff --git a/community-build/community-projects/scalatest b/community-build/community-projects/scalatest index d430625d9621..d6eeedbfc1e0 160000 --- a/community-build/community-projects/scalatest +++ b/community-build/community-projects/scalatest @@ -1 +1 @@ -Subproject commit d430625d96218c9031b1434cc0c2110f3740fa1c +Subproject commit d6eeedbfc1e04f2eff55506f07f93f448cc21407 diff --git a/community-build/community-projects/scalaz b/community-build/community-projects/scalaz index 97cccf3b3fcb..868749fdb951 160000 --- a/community-build/community-projects/scalaz +++ b/community-build/community-projects/scalaz @@ -1 +1 @@ -Subproject commit 97cccf3b3fcb71885a32b2e567171c0f70b06104 +Subproject commit 868749fdb951909bb04bd6dd7ad2cd89295fd439 diff --git a/community-build/community-projects/scas b/community-build/community-projects/scas index fbccb263207b..acaad1055738 160000 --- a/community-build/community-projects/scas +++ b/community-build/community-projects/scas @@ -1 +1 @@ -Subproject commit fbccb263207b3a7b735b8a9dc312acf7368a0816 +Subproject commit acaad1055738dbbcae7b18e6c6c2fc95f06eb7d6 diff --git a/community-build/community-projects/scodec b/community-build/community-projects/scodec index 96a77ecaaf91..9b0423b90de9 160000 --- a/community-build/community-projects/scodec +++ b/community-build/community-projects/scodec @@ -1 +1 @@ -Subproject commit 96a77ecaaf913f195bb4079966a2e9fb41ce214e +Subproject commit 9b0423b90de95fc968fafe4543e6b16ef9f81d08 diff --git a/community-build/community-projects/shapeless-3 b/community-build/community-projects/shapeless-3 index d27c5ba1ae51..24e86dd290eb 160000 --- a/community-build/community-projects/shapeless-3 +++ b/community-build/community-projects/shapeless-3 @@ -1 +1 @@ -Subproject commit d27c5ba1ae5111b85df2cfb65a26b9246c52570c +Subproject commit 24e86dd290eba9b27599936a7f338fac975f833d diff --git a/community-build/community-projects/spire b/community-build/community-projects/spire index bc524eeea735..d60fe2c38848 160000 --- a/community-build/community-projects/spire +++ b/community-build/community-projects/spire @@ -1 +1 @@ -Subproject commit bc524eeea735a3cf4d5108039f95950b024a14e4 +Subproject commit d60fe2c38848ef193031c18eab3a14d3306b3761 diff --git a/community-build/community-projects/stdLib213 b/community-build/community-projects/stdLib213 index 6243e902928c..fcc67cd56c67 160000 --- a/community-build/community-projects/stdLib213 +++ b/community-build/community-projects/stdLib213 @@ -1 +1 @@ -Subproject commit 6243e902928c344fb0e82e21120bb257f08a2af2 +Subproject commit fcc67cd56c67851bf31019ec25ccb09d08b9561b diff --git a/community-build/community-projects/upickle b/community-build/community-projects/upickle index aa3bc0e43ec7..0c09bbcabc66 160000 --- a/community-build/community-projects/upickle +++ b/community-build/community-projects/upickle @@ -1 +1 @@ -Subproject commit aa3bc0e43ec7b618eb087753878f3d845e58277a +Subproject commit 0c09bbcabc664abf98462022fc9036a366135e70 diff --git a/community-build/community-projects/utest b/community-build/community-projects/utest index eae17c7a4d0d..f4a9789e2750 160000 --- a/community-build/community-projects/utest +++ b/community-build/community-projects/utest @@ -1 +1 @@ -Subproject commit eae17c7a4d0d63bab1406ca75791d3cb6394233d +Subproject commit f4a9789e2750523feee4a3477efb42eb15424fc7 diff --git a/community-build/community-projects/verify b/community-build/community-projects/verify index ae37d7e153fc..f82bb3f52623 160000 --- a/community-build/community-projects/verify +++ b/community-build/community-projects/verify @@ -1 +1 @@ -Subproject commit ae37d7e153fc62d64c40a72c45f810511aef2e01 +Subproject commit f82bb3f52623e44f02b4b43f8bdf27f4f0a7d3d4 diff --git a/community-build/src/scala/dotty/communitybuild/projects.scala b/community-build/src/scala/dotty/communitybuild/projects.scala index ae030dc66336..a0444505801a 100644 --- a/community-build/src/scala/dotty/communitybuild/projects.scala +++ b/community-build/src/scala/dotty/communitybuild/projects.scala @@ -149,7 +149,7 @@ final case class SbtCommunityProject( object SbtCommunityProject: def scalacOptions = List( "-Xcheck-macros", - "-Ysafe-init", + "-Wsafe-init", ) object projects: @@ -362,7 +362,7 @@ object projects: project = "shapeless-3", sbtTestCommand = "testJVM; testJS", sbtDocCommand = forceDoc("typeable", "deriving"), - scalacOptions = "-source" :: "3.3" :: SbtCommunityProject.scalacOptions.filter(_ != "-Ysafe-init"), // due to -Xfatal-warnings + scalacOptions = "-source" :: "3.3" :: SbtCommunityProject.scalacOptions.filter(_ != "-Wsafe-init"), // due to -Xfatal-warnings ) lazy val xmlInterpolator = SbtCommunityProject( @@ -429,7 +429,7 @@ object projects: sbtTestCommand = "unitTests/test", // Adds package sbtDocCommand = "coreJVM/doc", - scalacOptions = SbtCommunityProject.scalacOptions.filter(_ != "-Ysafe-init"), + scalacOptions = SbtCommunityProject.scalacOptions.filter(_ != "-Wsafe-init"), dependencies = List(munit, scodecBits), ) @@ -510,7 +510,7 @@ object projects: project = "discipline", sbtTestCommand = "coreJVM/test;coreJS/test", sbtPublishCommand = "set every credentials := Nil;coreJVM/publishLocal;coreJS/publishLocal", - scalacOptions = SbtCommunityProject.scalacOptions.filter(_ != "-Ysafe-init"), + scalacOptions = SbtCommunityProject.scalacOptions.filter(_ != "-Wsafe-init"), dependencies = List(scalacheck) ) @@ -526,7 +526,7 @@ object projects: sbtTestCommand = "test", sbtPublishCommand = "coreJVM/publishLocal;coreJS/publishLocal", dependencies = List(discipline), - scalacOptions = SbtCommunityProject.scalacOptions.filter(_ != "-Ysafe-init") + scalacOptions = SbtCommunityProject.scalacOptions.filter(_ != "-Wsafe-init") ) lazy val simulacrumScalafixAnnotations = SbtCommunityProject( @@ -540,8 +540,7 @@ object projects: sbtTestCommand = "set Global/scalaJSStage := FastOptStage;rootJVM/test;rootJS/test", sbtPublishCommand = "rootJVM/publishLocal;rootJS/publishLocal", dependencies = List(discipline, disciplineMunit, scalacheck, simulacrumScalafixAnnotations), - scalacOptions = SbtCommunityProject.scalacOptions.filter(_ != "-Ysafe-init") // disable -Ysafe-init, due to -Xfatal-warning - + scalacOptions = SbtCommunityProject.scalacOptions.filter(_ != "-Wsafe-init") // disable -Ysafe-init or -Wsafe-init, due to -Xfatal-warning ) lazy val catsMtl = SbtCommunityProject( @@ -605,7 +604,7 @@ object projects: project = "AsyncFile", sbtTestCommand = "rootJVM/test", sbtPublishCommand = "rootJVM/publishLocal", - dependencies = List(scissLog, scalatest), + dependencies = List(scissLog, scissModel, scalatest), ) lazy val scissSpan = SbtCommunityProject( @@ -656,7 +655,7 @@ object projects: """set actorTests/Compile/scalacOptions -= "-Xfatal-warnings"""", "akka-actor-tests/Test/compile", ).mkString("; "), - scalacOptions = SbtCommunityProject.scalacOptions.filter(_ != "-Ysafe-init"), + scalacOptions = SbtCommunityProject.scalacOptions.filter(_ != "-Wsafe-init"), dependencies = List(scalatest, scalatestplusJunit, scalatestplusScalacheck) ) @@ -707,7 +706,7 @@ object projects: project = "fs2", sbtTestCommand = "coreJVM/test; coreJS/test", // io/test requires JDK9+ sbtPublishCommand = "coreJVM/publishLocal; coreJS/publishLocal", - scalacOptions = SbtCommunityProject.scalacOptions.filter(_ != "-Ysafe-init"), + scalacOptions = SbtCommunityProject.scalacOptions.filter(_ != "-Wsafe-init"), dependencies = List(cats, catsEffect3, munitCatsEffect, scalacheckEffect, scodecBits) ) @@ -744,7 +743,7 @@ object projects: project = "http4s", sbtTestCommand = """set ThisBuild / tlFatalWarnings := false; rootJVM/test""", sbtPublishCommand = "publishLocal", - scalacOptions = SbtCommunityProject.scalacOptions.filter(_ != "-Ysafe-init"), + scalacOptions = SbtCommunityProject.scalacOptions.filter(_ != "-Wsafe-init"), dependencies = List(cats, catsEffect3, fs2, disciplineMunit, scalacheckEffect) ) diff --git a/compiler/src/dotty/tools/MainGenericRunner.scala b/compiler/src/dotty/tools/MainGenericRunner.scala index 1540cc86d7a6..bf477f019cba 100644 --- a/compiler/src/dotty/tools/MainGenericRunner.scala +++ b/compiler/src/dotty/tools/MainGenericRunner.scala @@ -266,6 +266,22 @@ object MainGenericRunner { run(settings.withExecuteMode(ExecuteMode.Run)) else run(settings.withExecuteMode(ExecuteMode.Repl)) + end run + + val ranByCoursierBootstrap = + sys.props.isDefinedAt("coursier.mainJar") + || sys.props.get("bootstrap.mainClass").contains("dotty.tools.MainGenericRunner") + + val silenced = sys.props.get("scala.use_legacy_launcher") == Some("true") + + if !silenced then + Console.err.println(s"[warning] MainGenericRunner class is deprecated since Scala 3.5.0, and Scala CLI features will not work.") + Console.err.println(s"[warning] Please be sure to update to the Scala CLI launcher to use the new features.") + if ranByCoursierBootstrap then + Console.err.println(s"[warning] It appears that your Coursier-based Scala installation is misconfigured.") + Console.err.println(s"[warning] To update to the new Scala CLI runner, please update (coursier, cs) commands first before re-installing scala.") + Console.err.println(s"[warning] Check the Scala 3.5.0 release notes to troubleshoot your installation.") + run(settings) match case Some(ex: (StringDriverException | ScriptingException)) => errorFn(ex.getMessage) diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala b/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala index 385521e2785f..f8866f40d9d4 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala @@ -823,7 +823,7 @@ trait BCodeHelpers extends BCodeIdiomatic { // without it. This is particularly bad because the availability of // generic information could disappear as a consequence of a seemingly // unrelated change. - ctx.base.settings.YnoGenericSig.value + ctx.base.settings.XnoGenericSig.value || sym.is(Artifact) || sym.isAllOf(LiftedMethod) || sym.is(Bridge) diff --git a/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala b/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala index ec251b4aa3f0..44498082c697 100644 --- a/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala +++ b/compiler/src/dotty/tools/backend/jvm/ClassfileWriters.scala @@ -20,7 +20,11 @@ import dotty.tools.io.JarArchive import scala.language.unsafeNulls - +/** !!! This file is now copied in `dotty.tools.io.FileWriters` in a more general way that does not rely upon + * `PostProcessorFrontendAccess`, this should probably be changed to wrap that class instead. + * + * Until then, any changes to this file should be copied to `dotty.tools.io.FileWriters` as well. + */ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) { type NullableFile = AbstractFile | Null import frontendAccess.{compilerSettings, backendReporting} diff --git a/compiler/src/dotty/tools/backend/jvm/CodeGen.scala b/compiler/src/dotty/tools/backend/jvm/CodeGen.scala index b48df60d4c1a..c5b0ec0929b8 100644 --- a/compiler/src/dotty/tools/backend/jvm/CodeGen.scala +++ b/compiler/src/dotty/tools/backend/jvm/CodeGen.scala @@ -125,8 +125,8 @@ class CodeGen(val int: DottyBackendInterface, val primitives: DottyPrimitives)( // Creates a callback that will be evaluated in PostProcessor after creating a file private def onFileCreated(cls: ClassNode, claszSymbol: Symbol, sourceFile: util.SourceFile)(using Context): AbstractFile => Unit = { - val (fullClassName, isLocal) = atPhase(sbtExtractDependenciesPhase) { - (ExtractDependencies.classNameAsString(claszSymbol), claszSymbol.isLocal) + val isLocal = atPhase(sbtExtractDependenciesPhase) { + claszSymbol.isLocal } clsFile => { val className = cls.name.replace('/', '.') @@ -134,8 +134,14 @@ class CodeGen(val int: DottyBackendInterface, val primitives: DottyPrimitives)( ctx.compilerCallback.onClassGenerated(sourceFile, convertAbstractFile(clsFile), className) ctx.withIncCallback: cb => - if (isLocal) cb.generatedLocalClass(sourceFile, clsFile.jpath) - else cb.generatedNonLocalClass(sourceFile, clsFile.jpath, className, fullClassName) + if isLocal then + cb.generatedLocalClass(sourceFile, clsFile.jpath) + else if !cb.enabled() then + // callback is not enabled, so nonLocalClasses were not reported in ExtractAPI + val fullClassName = atPhase(sbtExtractDependenciesPhase) { + ExtractDependencies.classNameAsString(claszSymbol) + } + cb.generatedNonLocalClass(sourceFile, clsFile.jpath, className, fullClassName) } } diff --git a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala index 8d467529d60e..a616241d9a3e 100644 --- a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala +++ b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala @@ -10,6 +10,10 @@ import Symbols.* import dotty.tools.io.* import scala.collection.mutable import scala.compiletime.uninitialized +import java.util.concurrent.TimeoutException + +import scala.concurrent.duration.Duration +import scala.concurrent.Await class GenBCode extends Phase { self => @@ -17,6 +21,8 @@ class GenBCode extends Phase { self => override def description: String = GenBCode.description + override def isRunnable(using Context) = super.isRunnable && !ctx.usedBestEffortTasty + private val superCallsMap = new MutableSymbolMap[Set[ClassSymbol]] def registerSuperCall(sym: Symbol, calls: ClassSymbol): Unit = { val old = superCallsMap.getOrElse(sym, Set.empty) @@ -90,6 +96,15 @@ class GenBCode extends Phase { self => try val result = super.runOn(units) generatedClassHandler.complete() + try + for + async <- ctx.run.nn.asyncTasty + bufferedReporter <- async.sync() + do + bufferedReporter.relayReports(frontendAccess.backendReporting) + catch + case ex: Exception => + report.error(s"exception from future: $ex, (${Option(ex.getCause())})") result finally // frontendAccess and postProcessor are created lazilly, clean them up only if they were initialized diff --git a/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala b/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala index f7955cbb350c..e1b2120fa848 100644 --- a/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala +++ b/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala @@ -121,12 +121,14 @@ object PostProcessorFrontendAccess { case (None, None) => "8" // least supported version by default override val debug: Boolean = ctx.debug - override val dumpClassesDirectory: Option[String] = s.Ydumpclasses.valueSetByUser + override val dumpClassesDirectory: Option[String] = s.Xdumpclasses.valueSetByUser override val outputDirectory: AbstractFile = s.outputDir.value override val mainClass: Option[String] = s.XmainClass.valueSetByUser - override val jarCompressionLevel: Int = s.YjarCompressionLevel.value + override val jarCompressionLevel: Int = s.XjarCompressionLevel.value override val backendParallelism: Int = s.YbackendParallelism.value override val backendMaxWorkerQueue: Option[Int] = s.YbackendWorkerQueue.valueSetByUser + + @annotation.nowarn("cat=deprecation") override val outputOnlyTasty: Boolean = s.YoutputOnlyTasty.value } diff --git a/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala b/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala index 2c5a6639dc8b..fbb9042affe7 100644 --- a/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala +++ b/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala @@ -12,7 +12,7 @@ class GenSJSIR extends Phase { override def description: String = GenSJSIR.description override def isRunnable(using Context): Boolean = - super.isRunnable && ctx.settings.scalajs.value + super.isRunnable && ctx.settings.scalajs.value && !ctx.usedBestEffortTasty def run(using Context): Unit = new JSCodeGen().run() diff --git a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala index 5b0d37d28a9b..6e2449b5c299 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala @@ -31,6 +31,7 @@ import org.scalajs.ir.Names.{ClassName, MethodName, SimpleMethodName} import org.scalajs.ir.OriginalName import org.scalajs.ir.OriginalName.NoOriginalName import org.scalajs.ir.Trees.OptimizerHints +import org.scalajs.ir.Version.Unversioned import dotty.tools.dotc.transform.sjs.JSSymUtils.* @@ -354,7 +355,8 @@ class JSCodeGen()(using genCtx: Context) { // Generate members (constructor + methods) - val generatedNonFieldMembers = new mutable.ListBuffer[js.MemberDef] + val methodsBuilder = List.newBuilder[js.MethodDef] + val jsNativeMembersBuilder = List.newBuilder[js.JSNativeMemberDef] val tpl = td.rhs.asInstanceOf[Template] for (tree <- tpl.constr :: tpl.body) { @@ -365,23 +367,25 @@ class JSCodeGen()(using genCtx: Context) { // fields are added via genClassFields(), but we need to generate the JS native members val sym = vd.symbol if (!sym.is(Module) && sym.hasAnnotation(jsdefn.JSNativeAnnot)) - generatedNonFieldMembers += genJSNativeMemberDef(vd) + jsNativeMembersBuilder += genJSNativeMemberDef(vd) case dd: DefDef => val sym = dd.symbol if sym.hasAnnotation(jsdefn.JSNativeAnnot) then if !sym.is(Accessor) then - generatedNonFieldMembers += genJSNativeMemberDef(dd) + jsNativeMembersBuilder += genJSNativeMemberDef(dd) else - generatedNonFieldMembers ++= genMethod(dd) + methodsBuilder ++= genMethod(dd) case _ => throw new FatalError("Illegal tree in body of genScalaClass(): " + tree) } } - // Generate fields and add to methods + ctors - val generatedMembers = genClassFields(td) ++ generatedNonFieldMembers.toList + val (fields, staticGetterDefs) = if (!isHijacked) genClassFields(td) else (Nil, Nil) + + val jsNativeMembers = jsNativeMembersBuilder.result() + val generatedMethods = methodsBuilder.result() ::: staticGetterDefs // Generate member exports val memberExports = jsExportsGen.genMemberExports(sym) @@ -422,12 +426,12 @@ class JSCodeGen()(using genCtx: Context) { if (isDynamicImportThunk) List(genDynamicImportForwarder(sym)) else Nil - val allMemberDefsExceptStaticForwarders = - generatedMembers ::: memberExports ::: optStaticInitializer ::: optDynamicImportForwarder + val allMethodsExceptStaticForwarders: List[js.MethodDef] = + generatedMethods ::: optStaticInitializer ::: optDynamicImportForwarder // Add static forwarders - val allMemberDefs = if (!isCandidateForForwarders(sym)) { - allMemberDefsExceptStaticForwarders + val allMethods = if (!isCandidateForForwarders(sym)) { + allMethodsExceptStaticForwarders } else { if (isStaticModule(sym)) { /* If the module class has no linked class, we must create one to @@ -446,23 +450,24 @@ class JSCodeGen()(using genCtx: Context) { Nil, None, None, - forwarders, - Nil + fields = Nil, + methods = forwarders, + jsConstructor = None, + jsMethodProps = Nil, + jsNativeMembers = Nil, + topLevelExportDefs = Nil )(js.OptimizerHints.empty) generatedStaticForwarderClasses += sym -> forwardersClassDef } } - allMemberDefsExceptStaticForwarders + allMethodsExceptStaticForwarders } else { val forwarders = genStaticForwardersForClassOrInterface( - allMemberDefsExceptStaticForwarders, sym) - allMemberDefsExceptStaticForwarders ::: forwarders + allMethodsExceptStaticForwarders, sym) + allMethodsExceptStaticForwarders ::: forwarders } } - // Hashed definitions of the class - val hashedDefs = ir.Hashers.hashMemberDefs(allMemberDefs) - // The complete class definition val kind = if (isStaticModule(sym)) ClassKind.ModuleClass @@ -478,11 +483,15 @@ class JSCodeGen()(using genCtx: Context) { genClassInterfaces(sym, forJSClass = false), None, None, - hashedDefs, + fields, + allMethods, + jsConstructor = None, + memberExports, + jsNativeMembers, topLevelExportDefs)( optimizerHints) - classDefinition + ir.Hashers.hashClassDef(classDefinition) } /** Gen the IR ClassDef for a Scala.js-defined JS class. */ @@ -546,22 +555,22 @@ class JSCodeGen()(using genCtx: Context) { } // Static members (exported from the companion object) - val staticMembers = { + val (staticFields, staticExports) = { val module = sym.companionModule if (!module.exists) { - Nil + (Nil, Nil) } else { val companionModuleClass = module.moduleClass - val exports = withScopedVars(currentClassSym := companionModuleClass) { + val (staticFields, staticExports) = withScopedVars(currentClassSym := companionModuleClass) { jsExportsGen.genStaticExports(companionModuleClass) } - if (exports.exists(_.isInstanceOf[js.JSFieldDef])) { - val classInitializer = + + if (staticFields.nonEmpty) { + generatedMethods += genStaticConstructorWithStats(ir.Names.ClassInitializerName, genLoadModule(companionModuleClass)) - exports :+ classInitializer - } else { - exports } + + (staticFields, staticExports) } } @@ -587,17 +596,12 @@ class JSCodeGen()(using genCtx: Context) { (ctor, jsClassCaptures) } - // Generate fields (and add to methods + ctors) - val generatedMembers = { - genClassFields(td) ::: - generatedConstructor :: - jsExportsGen.genJSClassDispatchers(sym, dispatchMethodNames.result().distinct) ::: - generatedMethods.toList ::: - staticMembers - } + // Generate fields + val (fields, staticGetterDefs) = genClassFields(td) - // Hashed definitions of the class - val hashedMemberDefs = ir.Hashers.hashMemberDefs(generatedMembers) + val methods = generatedMethods.toList ::: staticGetterDefs + val jsMethodProps = + jsExportsGen.genJSClassDispatchers(sym, dispatchMethodNames.result().distinct) ::: staticExports // The complete class definition val kind = @@ -613,11 +617,15 @@ class JSCodeGen()(using genCtx: Context) { genClassInterfaces(sym, forJSClass = true), jsSuperClass = jsClassCaptures.map(_.head.ref), None, - hashedMemberDefs, + fields ::: staticFields, + methods, + Some(generatedConstructor), + jsMethodProps, + jsNativeMembers = Nil, topLevelExports)( OptimizerHints.empty) - classDefinition + ir.Hashers.hashClassDef(classDefinition) } /** Gen the IR ClassDef for a raw JS class or trait. @@ -647,6 +655,10 @@ class JSCodeGen()(using genCtx: Context) { None, jsNativeLoadSpec, Nil, + Nil, + None, + Nil, + Nil, Nil)( OptimizerHints.empty) } @@ -681,10 +693,7 @@ class JSCodeGen()(using genCtx: Context) { if (!isCandidateForForwarders(sym)) genMethodsList else genMethodsList ::: genStaticForwardersForClassOrInterface(genMethodsList, sym) - // Hashed definitions of the interface - val hashedDefs = ir.Hashers.hashMemberDefs(allMemberDefs) - - js.ClassDef( + val classDef = js.ClassDef( classIdent, originalNameOfClass(sym), ClassKind.Interface, @@ -693,9 +702,15 @@ class JSCodeGen()(using genCtx: Context) { superInterfaces, None, None, - hashedDefs, + Nil, + allMemberDefs, + None, + Nil, + Nil, Nil)( OptimizerHints.empty) + + ir.Hashers.hashClassDef(classDef) } private def genClassInterfaces(sym: ClassSymbol, forJSClass: Boolean)( @@ -763,15 +778,15 @@ class JSCodeGen()(using genCtx: Context) { * Precondition: `isCandidateForForwarders(sym)` is true */ def genStaticForwardersForClassOrInterface( - existingMembers: List[js.MemberDef], sym: Symbol)( - implicit pos: SourcePosition): List[js.MemberDef] = { + existingMethods: List[js.MethodDef], sym: Symbol)( + implicit pos: SourcePosition): List[js.MethodDef] = { val module = sym.companionModule if (!module.exists) { Nil } else { val moduleClass = module.moduleClass if (!moduleClass.isJSType) - genStaticForwardersFromModuleClass(existingMembers, moduleClass) + genStaticForwardersFromModuleClass(existingMethods, moduleClass) else Nil } @@ -781,13 +796,13 @@ class JSCodeGen()(using genCtx: Context) { * * Precondition: `isCandidateForForwarders(moduleClass)` is true */ - def genStaticForwardersFromModuleClass(existingMembers: List[js.MemberDef], + def genStaticForwardersFromModuleClass(existingMethods: List[js.MethodDef], moduleClass: Symbol)( - implicit pos: SourcePosition): List[js.MemberDef] = { + implicit pos: SourcePosition): List[js.MethodDef] = { assert(moduleClass.is(ModuleClass), moduleClass) - val existingPublicStaticMethodNames = existingMembers.collect { + val existingPublicStaticMethodNames = existingMethods.collect { case js.MethodDef(flags, name, _, _, _, _) if flags.namespace == js.MemberNamespace.PublicStatic => name.name @@ -849,7 +864,7 @@ class JSCodeGen()(using genCtx: Context) { js.MethodDef(flags, methodIdent, originalName, jsParams, resultType, Some { genApplyMethod(genLoadModule(moduleClass), m, jsParams.map(_.ref)) - })(OptimizerHints.empty, None) + })(OptimizerHints.empty, Unversioned) } } @@ -859,20 +874,23 @@ class JSCodeGen()(using genCtx: Context) { // Generate the fields of a class ------------------------------------------ /** Gen definitions for the fields of a class. */ - private def genClassFields(td: TypeDef): List[js.MemberDef] = { + private def genClassFields(td: TypeDef): (List[js.AnyFieldDef], List[js.MethodDef]) = { val classSym = td.symbol.asClass assert(currentClassSym.get == classSym, "genClassFields called with a ClassDef other than the current one") val isJSClass = classSym.isNonNativeJSClass + val fieldDefs = List.newBuilder[js.AnyFieldDef] + val staticGetterDefs = List.newBuilder[js.MethodDef] + // Term members that are neither methods nor modules are fields classSym.info.decls.filter { f => !f.isOneOf(MethodOrModule) && f.isTerm && !f.hasAnnotation(jsdefn.JSNativeAnnot) && !f.hasAnnotation(jsdefn.JSOptionalAnnot) && !f.hasAnnotation(jsdefn.JSExportStaticAnnot) - }.flatMap({ f => + }.foreach { f => implicit val pos = f.span val isTopLevelExport = f.hasAnnotation(jsdefn.JSExportTopLevelAnnot) @@ -897,28 +915,27 @@ class JSCodeGen()(using genCtx: Context) { else irTpe0 if (isJSClass && f.isJSExposed) - js.JSFieldDef(flags, genExpr(f.jsName)(f.sourcePos), irTpe) :: Nil + fieldDefs += js.JSFieldDef(flags, genExpr(f.jsName)(f.sourcePos), irTpe) else val fieldIdent = encodeFieldSym(f) val originalName = originalNameOfField(f) - val fieldDef = js.FieldDef(flags, fieldIdent, originalName, irTpe) - val optionalStaticFieldGetter = - if isJavaStatic then - // Here we are generating a public static getter for the static field, - // this is its API for other units. This is necessary for singleton - // enum values, which are backed by static fields. - val className = encodeClassName(classSym) - val body = js.Block( - js.LoadModule(className), - js.SelectStatic(className, fieldIdent)(irTpe)) - js.MethodDef(js.MemberFlags.empty.withNamespace(js.MemberNamespace.PublicStatic), - encodeStaticMemberSym(f), originalName, Nil, irTpe, - Some(body))( - OptimizerHints.empty, None) :: Nil - else - Nil - fieldDef :: optionalStaticFieldGetter - }).toList + fieldDefs += js.FieldDef(flags, fieldIdent, originalName, irTpe) + if isJavaStatic then + // Here we are generating a public static getter for the static field, + // this is its API for other units. This is necessary for singleton + // enum values, which are backed by static fields. + val className = encodeClassName(classSym) + val body = js.Block( + js.LoadModule(className), + js.SelectStatic(fieldIdent)(irTpe)) + staticGetterDefs += js.MethodDef( + js.MemberFlags.empty.withNamespace(js.MemberNamespace.PublicStatic), + encodeStaticMemberSym(f), originalName, Nil, irTpe, + Some(body))( + OptimizerHints.empty, Unversioned) + } + + (fieldDefs.result(), staticGetterDefs.result()) } def genExposedFieldIRType(f: Symbol): jstpe.Type = { @@ -956,7 +973,7 @@ class JSCodeGen()(using genCtx: Context) { Nil, jstpe.NoType, Some(stats))( - OptimizerHints.empty, None) + OptimizerHints.empty, Unversioned) } private def genRegisterReflectiveInstantiation(sym: Symbol)( @@ -1122,49 +1139,79 @@ class JSCodeGen()(using genCtx: Context) { val constructorDef = js.JSConstructorDef( js.MemberFlags.empty.withNamespace(js.MemberNamespace.Constructor), - formalArgs, restParam, constructorBody)(OptimizerHints.empty, None) + formalArgs, restParam, constructorBody)(OptimizerHints.empty, Unversioned) (jsClassCaptures, constructorDef) } private def genPrimaryJSClassCtor(dd: DefDef): PrimaryJSCtor = { val sym = dd.symbol - val Block(stats, _) = dd.rhs: @unchecked assert(sym.isPrimaryConstructor, s"called with non-primary ctor: $sym") + var preSuperStats = List.newBuilder[js.Tree] var jsSuperCall: Option[js.JSSuperConstructorCall] = None - val jsStats = List.newBuilder[js.Tree] + val postSuperStats = List.newBuilder[js.Tree] - /* Move all statements after the super constructor call since JS - * cannot access `this` before the super constructor call. + /* Move param accessor initializers after the super constructor call since + * JS cannot access `this` before the super constructor call. * * dotc inserts statements before the super constructor call for param * accessor initializers (including val's and var's declared in the - * params). We move those after the super constructor call, and are - * therefore executed later than for a Scala class. + * params). Those statements are assignments whose rhs'es are always simple + * Idents (the constructor params). + * + * There can also be local `val`s before the super constructor call for + * default arguments to the super constructor. These must remain before. + * + * Our strategy is therefore to move only the field assignments after the + * super constructor call. They are therefore executed later than for a + * Scala class (as specified for non-native JS classes semantics). + * However, side effects and evaluation order of all the other + * computations remains unchanged. */ withPerMethodBodyState(sym) { - stats.foreach { - case tree @ Apply(fun @ Select(Super(This(_), _), _), args) - if fun.symbol.isClassConstructor => - assert(jsSuperCall.isEmpty, s"Found 2 JS Super calls at ${dd.sourcePos}") - implicit val pos: Position = tree.span - jsSuperCall = Some(js.JSSuperConstructorCall(genActualJSArgs(fun.symbol, args))) + def isThisField(tree: Tree): Boolean = tree match { + case Select(ths: This, _) => ths.symbol == currentClassSym.get + case tree: Ident => desugarIdent(tree).exists(isThisField(_)) + case _ => false + } - case stat => - val jsStat = genStat(stat) - assert(jsSuperCall.isDefined || !jsStat.isInstanceOf[js.VarDef], - "Trying to move a local VarDef after the super constructor call of a non-native JS class at " + - dd.sourcePos) - jsStats += jsStat + def rec(tree: Tree): Unit = { + tree match { + case Block(stats, expr) => + stats.foreach(rec(_)) + rec(expr) + + case tree @ Apply(fun @ Select(Super(This(_), _), _), args) + if fun.symbol.isClassConstructor => + assert(jsSuperCall.isEmpty, s"Found 2 JS Super calls at ${dd.sourcePos}") + implicit val pos: Position = tree.span + jsSuperCall = Some(js.JSSuperConstructorCall(genActualJSArgs(fun.symbol, args))) + + case tree if jsSuperCall.isDefined => + // Once we're past the super constructor call, everything goes after. + postSuperStats += genStat(tree) + + case Assign(lhs, Ident(_)) if isThisField(lhs) => + /* If that shape appears before the jsSuperCall, it is a param + * accessor initializer. We move it. + */ + postSuperStats += genStat(tree) + + case stat => + // Other statements are left before. + preSuperStats += genStat(stat) + } } + + rec(dd.rhs) } assert(jsSuperCall.isDefined, s"Did not find Super call in primary JS construtor at ${dd.sourcePos}") new PrimaryJSCtor(sym, genParamsAndInfo(sym, dd.paramss), - js.JSConstructorBody(Nil, jsSuperCall.get, jsStats.result())(dd.span)) + js.JSConstructorBody(preSuperStats.result(), jsSuperCall.get, postSuperStats.result())(dd.span)) } private def genSecondaryJSClassCtor(dd: DefDef): SplitSecondaryJSCtor = { @@ -1504,7 +1551,7 @@ class JSCodeGen()(using genCtx: Context) { } else if (sym.is(Deferred)) { Some(js.MethodDef(js.MemberFlags.empty, methodName, originalName, jsParams, toIRType(patchedResultType(sym)), None)( - OptimizerHints.empty, None)) + OptimizerHints.empty, Unversioned)) } else if (isIgnorableDefaultParam) { // #11592 None @@ -1545,7 +1592,7 @@ class JSCodeGen()(using genCtx: Context) { val namespace = js.MemberNamespace.Constructor js.MethodDef(js.MemberFlags.empty.withNamespace(namespace), methodName, originalName, jsParams, jstpe.NoType, Some(genStat(rhs)))( - optimizerHints, None) + optimizerHints, Unversioned) } else { val namespace = if (isMethodStaticInIR(sym)) { if (sym.isPrivate) js.MemberNamespace.PrivateStatic @@ -1590,7 +1637,7 @@ class JSCodeGen()(using genCtx: Context) { if (namespace.isStatic || !currentClassSym.isNonNativeJSClass) { val flags = js.MemberFlags.empty.withNamespace(namespace) js.MethodDef(flags, methodName, originalName, jsParams, resultIRType, Some(genBody()))( - optimizerHints, None) + optimizerHints, Unversioned) } else { val thisLocalIdent = freshLocalIdent("this") withScopedVars( @@ -1606,7 +1653,7 @@ class JSCodeGen()(using genCtx: Context) { js.MethodDef(flags, methodName, originalName, thisParamDef :: jsParams, resultIRType, Some(genBody()))( - optimizerHints, None) + optimizerHints, Unversioned) } } } @@ -2196,10 +2243,7 @@ class JSCodeGen()(using genCtx: Context) { if (isStaticModule(currentClassSym) && !isModuleInitialized.get.value && currentMethodSym.get.isClassConstructor) { isModuleInitialized.get.value = true - val className = encodeClassName(currentClassSym) - val thisType = jstpe.ClassType(className) - val initModule = js.StoreModule(className, js.This()(thisType)) - js.Block(superCall, initModule) + js.Block(superCall, js.StoreModule()) } else { superCall } @@ -2323,37 +2367,19 @@ class JSCodeGen()(using genCtx: Context) { // Partition class members. val privateFieldDefs = mutable.ListBuffer.empty[js.FieldDef] - val classDefMembers = mutable.ListBuffer.empty[js.MemberDef] - val instanceMembers = mutable.ListBuffer.empty[js.MemberDef] - var constructor: Option[js.JSConstructorDef] = None + val jsFieldDefs = mutable.ListBuffer.empty[js.JSFieldDef] - originalClassDef.memberDefs.foreach { + originalClassDef.fields.foreach { case fdef: js.FieldDef => privateFieldDefs += fdef case fdef: js.JSFieldDef => - instanceMembers += fdef - - case mdef: js.MethodDef => - assert(mdef.flags.namespace.isStatic, - "Non-static, unexported method in non-native JS class") - classDefMembers += mdef - - case cdef: js.JSConstructorDef => - assert(constructor.isEmpty, "two ctors in class") - constructor = Some(cdef) - - case mdef: js.JSMethodDef => - assert(!mdef.flags.namespace.isStatic, "Exported static method") - instanceMembers += mdef - - case property: js.JSPropertyDef => - instanceMembers += property - - case nativeMemberDef: js.JSNativeMemberDef => - throw new FatalError("illegal native JS member in JS class at " + nativeMemberDef.pos) + jsFieldDefs += fdef } + assert(originalClassDef.jsNativeMembers.isEmpty, + "Found JS native members in anonymous JS class at " + pos) + assert(originalClassDef.topLevelExportDefs.isEmpty, "Found top-level exports in anonymous JS class at " + pos) @@ -2363,8 +2389,9 @@ class JSCodeGen()(using genCtx: Context) { val parent = js.ClassIdent(jsNames.ObjectClass) js.ClassDef(originalClassDef.name, originalClassDef.originalName, ClassKind.AbstractJSType, None, Some(parent), interfaces = Nil, - jsSuperClass = None, jsNativeLoadSpec = None, - classDefMembers.toList, Nil)( + jsSuperClass = None, jsNativeLoadSpec = None, fields = Nil, + methods = originalClassDef.methods, jsConstructor = None, + jsMethodProps = Nil, jsNativeMembers = Nil, topLevelExportDefs = Nil)( originalClassDef.optimizerHints) } @@ -2375,7 +2402,7 @@ class JSCodeGen()(using genCtx: Context) { val jsClassCaptures = originalClassDef.jsClassCaptures.getOrElse { throw new AssertionError(s"no class captures for anonymous JS class at $pos") } - val js.JSConstructorDef(_, ctorParams, ctorRestParam, ctorBody) = constructor.getOrElse { + val js.JSConstructorDef(_, ctorParams, ctorRestParam, ctorBody) = originalClassDef.jsConstructor.getOrElse { throw new AssertionError("No ctor found") } assert(ctorParams.isEmpty && ctorRestParam.isEmpty, @@ -2399,20 +2426,12 @@ class JSCodeGen()(using genCtx: Context) { def memberLambda(params: List[js.ParamDef], restParam: Option[js.ParamDef], body: js.Tree)(implicit pos: ir.Position): js.Closure = js.Closure(arrow = false, captureParams = Nil, params, restParam, body, captureValues = Nil) - val memberDefinitions0 = instanceMembers.toList.map { - case fdef: js.FieldDef => - throw new AssertionError("unexpected FieldDef") - - case fdef: js.JSFieldDef => - implicit val pos = fdef.pos - js.Assign(js.JSSelect(selfRef, fdef.name), jstpe.zeroOf(fdef.ftpe)) - - case mdef: js.MethodDef => - throw new AssertionError("unexpected MethodDef") - - case cdef: js.JSConstructorDef => - throw new AssertionError("unexpected JSConstructorDef") + val fieldDefinitions = jsFieldDefs.toList.map { fdef => + implicit val pos = fdef.pos + js.Assign(js.JSSelect(selfRef, fdef.name), jstpe.zeroOf(fdef.ftpe)) + } + val memberDefinitions0 = originalClassDef.jsMethodProps.toList.map { case mdef: js.JSMethodDef => implicit val pos = mdef.pos val impl = memberLambda(mdef.args, mdef.restParam, mdef.body) @@ -2434,13 +2453,12 @@ class JSCodeGen()(using genCtx: Context) { js.JSMethodApply(js.JSGlobalRef("Object"), js.StringLiteral("defineProperty"), List(selfRef, pdef.name, descriptor)) - - case nativeMemberDef: js.JSNativeMemberDef => - throw new FatalError("illegal native JS member in JS class at " + nativeMemberDef.pos) } + val memberDefinitions1 = fieldDefinitions ::: memberDefinitions0 + val memberDefinitions = if (privateFieldDefs.isEmpty) { - memberDefinitions0 + memberDefinitions1 } else { /* Private fields, declared in FieldDefs, are stored in a separate * object, itself stored as a non-enumerable field of the `selfRef`. @@ -2481,7 +2499,7 @@ class JSCodeGen()(using genCtx: Context) { ) ) } - definePrivateFieldsObj :: memberDefinitions0 + definePrivateFieldsObj :: memberDefinitions1 } // Transform the constructor body. @@ -3581,7 +3599,7 @@ class JSCodeGen()(using genCtx: Context) { NoOriginalName, paramDefs, jstpe.AnyType, - Some(body))(OptimizerHints.empty, None) + Some(body))(OptimizerHints.empty, Unversioned) } } @@ -4442,13 +4460,12 @@ class JSCodeGen()(using genCtx: Context) { js.JSSelect(qual, genPrivateFieldsSymbol()), encodeFieldSymAsStringLiteral(sym)) } else { - js.JSPrivateSelect(qual, encodeClassName(sym.owner), - encodeFieldSym(sym)) + js.JSPrivateSelect(qual, encodeFieldSym(sym)) } (f, true) } else if (sym.hasAnnotation(jsdefn.JSExportTopLevelAnnot)) { - val f = js.SelectStatic(encodeClassName(sym.owner), encodeFieldSym(sym))(jstpe.AnyType) + val f = js.SelectStatic(encodeFieldSym(sym))(jstpe.AnyType) (f, true) } else if (sym.hasAnnotation(jsdefn.JSExportStaticAnnot)) { val jsName = sym.getAnnotation(jsdefn.JSExportStaticAnnot).get.argumentConstantString(0).getOrElse { @@ -4474,9 +4491,9 @@ class JSCodeGen()(using genCtx: Context) { val f = if sym.is(JavaStatic) then - js.SelectStatic(className, fieldIdent)(irType) + js.SelectStatic(fieldIdent)(irType) else - js.Select(qual, className, fieldIdent)(irType) + js.Select(qual, fieldIdent)(irType) (f, boxed) } diff --git a/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala b/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala index f2b90d5b1161..098f592daa30 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala @@ -17,7 +17,7 @@ import dotty.tools.dotc.transform.sjs.JSSymUtils.* import org.scalajs.ir import org.scalajs.ir.{Trees => js, Types => jstpe} -import org.scalajs.ir.Names.{LocalName, LabelName, FieldName, SimpleMethodName, MethodName, ClassName} +import org.scalajs.ir.Names.{LocalName, LabelName, SimpleFieldName, FieldName, SimpleMethodName, MethodName, ClassName} import org.scalajs.ir.OriginalName import org.scalajs.ir.OriginalName.NoOriginalName import org.scalajs.ir.UTF8String @@ -173,7 +173,7 @@ object JSEncoding { } def encodeFieldSym(sym: Symbol)(implicit ctx: Context, pos: ir.Position): js.FieldIdent = - js.FieldIdent(FieldName(encodeFieldSymAsString(sym))) + js.FieldIdent(FieldName(encodeClassName(sym.owner), SimpleFieldName(encodeFieldSymAsString(sym)))) def encodeFieldSymAsStringLiteral(sym: Symbol)(implicit ctx: Context, pos: ir.Position): js.StringLiteral = js.StringLiteral(encodeFieldSymAsString(sym)) diff --git a/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala b/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala index 8c72f03e7cc4..b5f9446758a9 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala @@ -27,6 +27,7 @@ import org.scalajs.ir.Names.DefaultModuleID import org.scalajs.ir.OriginalName.NoOriginalName import org.scalajs.ir.Position.NoPosition import org.scalajs.ir.Trees.OptimizerHints +import org.scalajs.ir.Version.Unversioned import dotty.tools.dotc.transform.sjs.JSExportUtils.* import dotty.tools.dotc.transform.sjs.JSSymUtils.* @@ -185,7 +186,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { }).toList } - def genStaticExports(classSym: Symbol): List[js.MemberDef] = { + def genStaticExports(classSym: Symbol): (List[js.JSFieldDef], List[js.JSMethodPropDef]) = { val exports = for { sym <- classSym.info.decls.toList info <- staticExportsOf(sym) @@ -193,10 +194,13 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { (info, sym) } - (for { + val fields = List.newBuilder[js.JSFieldDef] + val methodProps = List.newBuilder[js.JSMethodPropDef] + + for { (info, tups) <- exports.groupBy(_._1) kind <- checkSameKind(tups) - } yield { + } { def alts = tups.map(_._2) implicit val pos = info.pos @@ -205,10 +209,12 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { kind match { case Method => - genMemberExportOrDispatcher(JSName.Literal(info.jsName), isProp = false, alts, static = true) + methodProps += + genMemberExportOrDispatcher(JSName.Literal(info.jsName), isProp = false, alts, static = true) case Property => - genMemberExportOrDispatcher(JSName.Literal(info.jsName), isProp = true, alts, static = true) + methodProps += + genMemberExportOrDispatcher(JSName.Literal(info.jsName), isProp = true, alts, static = true) case Field => val sym = checkSingleField(tups) @@ -219,19 +225,21 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { .withMutable(true) val name = js.StringLiteral(info.jsName) val irTpe = genExposedFieldIRType(sym) - js.JSFieldDef(flags, name, irTpe) + fields += js.JSFieldDef(flags, name, irTpe) case kind => throw new AssertionError(s"unexpected static export kind: $kind") } - }).toList + } + + (fields.result(), methodProps.result()) } /** Generates exported methods and properties for a class. * * @param classSym symbol of the class we export for */ - def genMemberExports(classSym: ClassSymbol): List[js.MemberDef] = { + def genMemberExports(classSym: ClassSymbol): List[js.JSMethodPropDef] = { val classInfo = classSym.info val allExports = classInfo.memberDenots(takeAllFilter, { (name, buf) => if (isExportName(name)) @@ -251,7 +259,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { newlyDeclaredExportNames.map(genMemberExport(classSym, _)) } - private def genMemberExport(classSym: ClassSymbol, name: TermName): js.MemberDef = { + private def genMemberExport(classSym: ClassSymbol, name: TermName): js.JSMethodPropDef = { /* This used to be `.member(name)`, but it caused #3538, since we were * sometimes selecting mixin forwarders, whose type history does not go * far enough back in time to see varargs. We now explicitly exclude @@ -284,11 +292,11 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { genMemberExportOrDispatcher(JSName.Literal(jsName), isProp, alts.map(_.symbol), static = false) } - def genJSClassDispatchers(classSym: Symbol, dispatchMethodsNames: List[JSName]): List[js.MemberDef] = { + def genJSClassDispatchers(classSym: Symbol, dispatchMethodsNames: List[JSName]): List[js.JSMethodPropDef] = { dispatchMethodsNames.map(genJSClassDispatcher(classSym, _)) } - private def genJSClassDispatcher(classSym: Symbol, name: JSName): js.MemberDef = { + private def genJSClassDispatcher(classSym: Symbol, name: JSName): js.JSMethodPropDef = { val alts = classSym.info.membersBasedOnFlags(required = Method, excluded = Bridge) .map(_.symbol) .filter { sym => @@ -311,14 +319,14 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { em"Conflicting properties and methods for ${classSym.fullName}::$name.", firstAlt.srcPos) implicit val pos = firstAlt.span - js.JSPropertyDef(js.MemberFlags.empty, genExpr(name)(firstAlt.sourcePos), None, None) + js.JSPropertyDef(js.MemberFlags.empty, genExpr(name)(firstAlt.sourcePos), None, None)(Unversioned) } else { genMemberExportOrDispatcher(name, isProp, alts, static = false) } } private def genMemberExportOrDispatcher(jsName: JSName, isProp: Boolean, - alts: List[Symbol], static: Boolean): js.MemberDef = { + alts: List[Symbol], static: Boolean): js.JSMethodPropDef = { withNewLocalNameScope { if (isProp) genExportProperty(alts, jsName, static) @@ -362,7 +370,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { } } - js.JSPropertyDef(flags, genExpr(jsName)(alts.head.sourcePos), getterBody, setterArgAndBody) + js.JSPropertyDef(flags, genExpr(jsName)(alts.head.sourcePos), getterBody, setterArgAndBody)(Unversioned) } private def genExportMethod(alts0: List[Symbol], jsName: JSName, static: Boolean)(using Context): js.JSMethodDef = { @@ -389,7 +397,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { genOverloadDispatch(jsName, overloads, jstpe.AnyType) js.JSMethodDef(flags, genExpr(jsName), formalArgs, restParam, body)( - OptimizerHints.empty, None) + OptimizerHints.empty, Unversioned) } def genOverloadDispatch(jsName: JSName, alts: List[Exported], tpe: jstpe.Type)( diff --git a/compiler/src/dotty/tools/dotc/CompilationUnit.scala b/compiler/src/dotty/tools/dotc/CompilationUnit.scala index 78773a518b67..a9e5dbacc938 100644 --- a/compiler/src/dotty/tools/dotc/CompilationUnit.scala +++ b/compiler/src/dotty/tools/dotc/CompilationUnit.scala @@ -17,7 +17,7 @@ import config.{SourceVersion, Feature} import StdNames.nme import scala.annotation.internal.sharable import scala.util.control.NoStackTrace -import transform.MacroAnnotations +import transform.MacroAnnotations.isMacroAnnotation class CompilationUnit protected (val source: SourceFile, val info: CompilationUnitInfo | Null) { @@ -28,13 +28,16 @@ class CompilationUnit protected (val source: SourceFile, val info: CompilationUn var tpdTree: tpd.Tree = tpd.EmptyTree /** Is this the compilation unit of a Java file */ - def isJava: Boolean = source.file.name.endsWith(".java") + def isJava: Boolean = source.file.ext.isJava /** Is this the compilation unit of a Java file, or TASTy derived from a Java file */ - def typedAsJava = isJava || { - val infoNN = info - infoNN != null && infoNN.tastyInfo.exists(_.attributes.isJava) - } + def typedAsJava = + val ext = source.file.ext + ext.isJavaOrTasty && (ext.isJava || tastyInfo.exists(_.attributes.isJava)) + + def tastyInfo: Option[TastyInfo] = + val local = info + if local == null then None else local.tastyInfo /** The source version for this unit, as determined by a language import */ @@ -87,19 +90,23 @@ class CompilationUnit protected (val source: SourceFile, val info: CompilationUn /** Suspends the compilation unit by thowing a SuspendException * and recording the suspended compilation unit */ - def suspend()(using Context): Nothing = + def suspend(hint: => String)(using Context): Nothing = assert(isSuspendable) // Clear references to symbols that may become stale. No need to call // `depRecorder.sendToZinc()` since all compilation phases will be rerun // when this unit is unsuspended. depRecorder.clear() if !suspended then - if (ctx.settings.XprintSuspension.value) - report.echo(i"suspended: $this") suspended = true - ctx.run.nn.suspendedUnits += this - if ctx.phase == Phases.inliningPhase then + val currRun = ctx.run.nn + currRun.suspendedUnits += this + val isInliningPhase = ctx.phase == Phases.inliningPhase + if ctx.settings.XprintSuspension.value then + currRun.suspendedHints += (this -> (hint, isInliningPhase)) + if isInliningPhase then suspendedAtInliningPhase = true + else + currRun.suspendedAtTyperPhase = true throw CompilationUnit.SuspendException() private var myAssignmentSpans: Map[Int, List[Span]] | Null = null @@ -117,7 +124,7 @@ class CompilationUnit protected (val source: SourceFile, val info: CompilationUn override def isJava: Boolean = false - override def suspend()(using Context): Nothing = + override def suspend(hint: => String)(using Context): Nothing = throw CompilationUnit.SuspendException() override def assignmentSpans(using Context): Map[Int, List[Span]] = Map.empty @@ -190,7 +197,7 @@ object CompilationUnit { case _ => case _ => for annot <- tree.symbol.annotations do - if MacroAnnotations.isMacroAnnotation(annot) then + if annot.isMacroAnnotation then ctx.compilationUnit.hasMacroAnnotations = true traverseChildren(tree) } diff --git a/compiler/src/dotty/tools/dotc/Compiler.scala b/compiler/src/dotty/tools/dotc/Compiler.scala index 3abd46ebe8eb..ffd3d27f7c99 100644 --- a/compiler/src/dotty/tools/dotc/Compiler.scala +++ b/compiler/src/dotty/tools/dotc/Compiler.scala @@ -41,13 +41,13 @@ class Compiler { List(new semanticdb.ExtractSemanticDB.ExtractSemanticInfo) :: // Extract info into .semanticdb files List(new PostTyper) :: // Additional checks and cleanups after type checking List(new sjs.PrepJSInterop) :: // Additional checks and transformations for Scala.js (Scala.js only) - List(new sbt.ExtractAPI) :: // Sends a representation of the API of classes to sbt via callbacks List(new SetRootTree) :: // Set the `rootTreeOrProvider` on class symbols Nil /** Phases dealing with TASTY tree pickling and unpickling */ protected def picklerPhases: List[List[Phase]] = List(new Pickler) :: // Generate TASTY info + List(new sbt.ExtractAPI) :: // Sends a representation of the API of classes to sbt via callbacks List(new Inlining) :: // Inline and execute macros List(new PostInlining) :: // Add mirror support for inlined code List(new CheckUnused.PostInlining) :: // Check for unused elements diff --git a/compiler/src/dotty/tools/dotc/Driver.scala b/compiler/src/dotty/tools/dotc/Driver.scala index 196752aceb29..98abe2ac6c38 100644 --- a/compiler/src/dotty/tools/dotc/Driver.scala +++ b/compiler/src/dotty/tools/dotc/Driver.scala @@ -6,7 +6,7 @@ import core.Comments.{ContextDoc, ContextDocstrings} import core.Contexts.* import core.{MacroClassLoader, TypeError} import dotty.tools.dotc.ast.Positioned -import dotty.tools.io.AbstractFile +import dotty.tools.io.{AbstractFile, FileExtension} import reporting.* import core.Decorators.* import config.Feature @@ -39,6 +39,9 @@ class Driver { catch case ex: FatalError => report.error(ex.getMessage.nn) // signals that we should fail compilation. + case ex: Throwable if ctx.usedBestEffortTasty => + report.bestEffortError(ex, "Some best-effort tasty files were not able to be read.") + throw ex case ex: TypeError if !runOrNull.enrichedErrorMessage => println(runOrNull.enrichErrorMessage(s"${ex.toMessage} while compiling ${files.map(_.path).mkString(", ")}")) throw ex @@ -52,9 +55,12 @@ class Driver { if !ctx.reporter.errorsReported && run.suspendedUnits.nonEmpty then val suspendedUnits = run.suspendedUnits.toList if (ctx.settings.XprintSuspension.value) + val suspendedHints = run.suspendedHints.toList report.echo(i"compiling suspended $suspendedUnits%, %") + for (unit, (hint, atInlining)) <- suspendedHints do + report.echo(s" $unit at ${if atInlining then "inlining" else "typer"}: $hint") val run1 = compiler.newRun - run1.compileSuspendedUnits(suspendedUnits) + run1.compileSuspendedUnits(suspendedUnits, !run.suspendedAtTyperPhase) finish(compiler, run1)(using MacroClassLoader.init(ctx.fresh)) protected def initCtx: Context = (new ContextBase).initialCtx @@ -74,12 +80,11 @@ class Driver { val ictx = rootCtx.fresh val summary = command.distill(args, ictx.settings)(ictx.settingsState)(using ictx) ictx.setSettings(summary.sstate) - Feature.checkExperimentalSettings(using ictx) MacroClassLoader.init(ictx) Positioned.init(using ictx) inContext(ictx) { - if !ctx.settings.YdropComments.value || ctx.settings.YreadComments.value then + if !ctx.settings.XdropComments.value || ctx.settings.XreadComments.value then ictx.setProperty(ContextDoc, new ContextDocstrings) val fileNamesOrNone = command.checkUsage(summary, sourcesRequired)(using ctx.settings)(using ctx.settingsState) fileNamesOrNone.map { fileNames => @@ -97,10 +102,10 @@ class Driver { if !file.exists then report.error(em"File does not exist: ${file.path}") None - else file.extension match - case "jar" => Some(file.path) - case "tasty" => - TastyFileUtil.getClassPath(file) match + else file.ext match + case FileExtension.Jar => Some(file.path) + case FileExtension.Tasty | FileExtension.Betasty => + TastyFileUtil.getClassPath(file, ctx.withBestEffortTasty) match case Some(classpath) => Some(classpath) case _ => report.error(em"Could not load classname from: ${file.path}") diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala index d18a2ddc7db0..11a0430480d9 100644 --- a/compiler/src/dotty/tools/dotc/Run.scala +++ b/compiler/src/dotty/tools/dotc/Run.scala @@ -37,6 +37,7 @@ import scala.io.Codec import Run.Progress import scala.compiletime.uninitialized import dotty.tools.dotc.transform.MegaPhase +import dotty.tools.dotc.transform.Pickler.AsyncTastyHolder /** A compiler run. Exports various methods to compile source files */ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with ConstraintRunInfo { @@ -130,6 +131,10 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint myUnits = us var suspendedUnits: mutable.ListBuffer[CompilationUnit] = mutable.ListBuffer() + var suspendedHints: mutable.Map[CompilationUnit, (String, Boolean)] = mutable.HashMap() + + /** Were any units suspended in the typer phase? if so then pipeline tasty can not complete. */ + var suspendedAtTyperPhase: Boolean = false def checkSuspendedUnits(newUnits: List[CompilationUnit])(using Context): Unit = if newUnits.isEmpty && suspendedUnits.nonEmpty && !ctx.reporter.errorsReported then @@ -230,6 +235,22 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint if !progress.isCancelled() then progress.tickSubphase() + /** if true, then we are done writing pipelined TASTy files (i.e. finished in a previous run.) */ + private var myAsyncTastyWritten = false + + private var _asyncTasty: Option[AsyncTastyHolder] = None + + /** populated when this run needs to write pipeline TASTy files. */ + def asyncTasty: Option[AsyncTastyHolder] = _asyncTasty + + private def initializeAsyncTasty()(using Context): () => Unit = + // should we provide a custom ExecutionContext? + // currently it is just used to call the `apiPhaseCompleted` and `dependencyPhaseCompleted` callbacks in Zinc + import scala.concurrent.ExecutionContext.Implicits.global + val async = AsyncTastyHolder.init + _asyncTasty = Some(async) + () => async.cancel() + /** Will be set to true if any of the compiled compilation units contains * a pureFunctions language import. */ @@ -292,10 +313,13 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint if (ctx.settings.YtestPickler.value) List("pickler") else ctx.settings.YstopAfter.value + val runCtx = ctx.fresh + runCtx.setProfiler(Profiler()) + val pluginPlan = ctx.base.addPluginPhases(ctx.base.phasePlan) val phases = ctx.base.fusePhases(pluginPlan, ctx.settings.Yskip.value, ctx.settings.YstopBefore.value, stopAfter, ctx.settings.Ycheck.value) - ctx.base.usePhases(phases) + ctx.base.usePhases(phases, runCtx) if ctx.settings.YnoDoubleBindings.value then ctx.base.checkNoDoubleBindings = true @@ -305,9 +329,13 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint val profiler = ctx.profiler var phasesWereAdjusted = false + var forceReachPhaseMaybe = + if (ctx.isBestEffort && phases.exists(_.phaseName == "typer")) Some("typer") + else None + for phase <- allPhases do doEnterPhase(phase) - val phaseWillRun = phase.isRunnable + val phaseWillRun = phase.isRunnable || forceReachPhaseMaybe.nonEmpty if phaseWillRun then Stats.trackTime(s"phase time ms/$phase") { val start = System.currentTimeMillis @@ -320,6 +348,10 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint def printCtx(unit: CompilationUnit) = phase.printingContext( ctx.fresh.setPhase(phase.next).setCompilationUnit(unit)) lastPrintedTree = printTree(lastPrintedTree)(using printCtx(unit)) + + if forceReachPhaseMaybe.contains(phase.phaseName) then + forceReachPhaseMaybe = None + report.informTime(s"$phase ", start) Stats.record(s"total trees at end of $phase", ast.Trees.ntrees) for (unit <- units) @@ -339,15 +371,19 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint profiler.finished() } - val runCtx = ctx.fresh - runCtx.setProfiler(Profiler()) - unfusedPhases.foreach(_.initContext(runCtx)) val fusedPhases = runCtx.base.allPhases if ctx.settings.explainCyclic.value then runCtx.setProperty(CyclicReference.Trace, new CyclicReference.Trace()) runCtx.withProgressCallback: cb => _progress = Progress(cb, this, fusedPhases.map(_.traversals).sum) + val cancelAsyncTasty: () => Unit = + if !myAsyncTastyWritten && Phases.picklerPhase.exists && !ctx.settings.XearlyTastyOutput.isDefault then + initializeAsyncTasty() + else () => {} + runPhases(allPhases = fusedPhases)(using runCtx) + cancelAsyncTasty() + ctx.reporter.finalizeReporting() if (!ctx.reporter.hasErrors) Rewrites.writeBack() @@ -364,9 +400,12 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint /** Is this run started via a compilingSuspended? */ def isCompilingSuspended: Boolean = myCompilingSuspended - /** Compile units `us` which were suspended in a previous run */ - def compileSuspendedUnits(us: List[CompilationUnit]): Unit = + /** Compile units `us` which were suspended in a previous run, + * also signal if all necessary async tasty files were written in a previous run. + */ + def compileSuspendedUnits(us: List[CompilationUnit], asyncTastyWritten: Boolean): Unit = myCompilingSuspended = true + myAsyncTastyWritten = asyncTastyWritten for unit <- us do unit.suspended = false compileUnits(us) @@ -622,4 +661,6 @@ object Run { report.enrichErrorMessage(errorMessage) else errorMessage + def doNotEnrichErrorMessage: Unit = + if run != null then run.myEnrichedErrorMessage = true } diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 2d99cf201375..b1b771bc7512 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -9,10 +9,10 @@ import Decorators.* import Annotations.Annotation import NameKinds.{UniqueName, ContextBoundParamName, ContextFunctionParamName, DefaultGetterName, WildcardParamName} import typer.{Namer, Checking} -import util.{Property, SourceFile, SourcePosition, Chars} -import config.Feature.{sourceVersion, migrateTo3, enabled} +import util.{Property, SourceFile, SourcePosition, SrcPos, Chars} +import config.{Feature, Config} import config.SourceVersion.* -import collection.mutable.ListBuffer +import collection.mutable import reporting.* import annotation.constructorOnly import printing.Formatting.hl @@ -46,6 +46,11 @@ object desugar { */ val UntupledParam: Property.Key[Unit] = Property.StickyKey() + /** An attachment key to indicate that a ValDef is an evidence parameter + * for a context bound. + */ + val ContextBoundParam: Property.Key[Unit] = Property.StickyKey() + /** What static check should be applied to a Match? */ enum MatchCheck { case None, Exhaustive, IrrefutablePatDef, IrrefutableGenFrom @@ -195,17 +200,6 @@ object desugar { else vdef1 end valDef - def makeImplicitParameters( - tpts: List[Tree], implicitFlag: FlagSet, - mkParamName: Int => TermName, - forPrimaryConstructor: Boolean = false - )(using Context): List[ValDef] = - for (tpt, i) <- tpts.zipWithIndex yield { - val paramFlags: FlagSet = if (forPrimaryConstructor) LocalParamAccessor else Param - val epname = mkParamName(i) - ValDef(epname, tpt, EmptyTree).withFlags(paramFlags | implicitFlag) - } - def mapParamss(paramss: List[ParamClause]) (mapTypeParam: TypeDef => TypeDef) (mapTermParam: ValDef => ValDef)(using Context): List[ParamClause] = @@ -232,34 +226,84 @@ object desugar { private def defDef(meth: DefDef, isPrimaryConstructor: Boolean = false)(using Context): Tree = addDefaultGetters(elimContextBounds(meth, isPrimaryConstructor)) - private def elimContextBounds(meth: DefDef, isPrimaryConstructor: Boolean)(using Context): DefDef = - val DefDef(_, paramss, tpt, rhs) = meth - val evidenceParamBuf = ListBuffer[ValDef]() - - var seenContextBounds: Int = 0 - def desugarContextBounds(rhs: Tree): Tree = rhs match - case ContextBounds(tbounds, cxbounds) => - val iflag = if sourceVersion.isAtLeast(`future`) then Given else Implicit - evidenceParamBuf ++= makeImplicitParameters( - cxbounds, iflag, - // Just like with `makeSyntheticParameter` on nameless parameters of - // using clauses, we only need names that are unique among the - // parameters of the method since shadowing does not affect - // implicit resolution in Scala 3. - mkParamName = i => - val index = seenContextBounds + 1 // Start at 1 like FreshNameCreator. - val ret = ContextBoundParamName(EmptyTermName, index) - seenContextBounds += 1 - ret, - forPrimaryConstructor = isPrimaryConstructor) + /** Drop context bounds in given TypeDef, replacing them with evidence ValDefs that + * get added to a buffer. + * @param tdef The given TypeDef + * @param evidenceBuf The buffer to which evidence gets added. This buffer + * is shared between desugarings of different type parameters + * of the same method. + * @param evidenceFlags The flags to use for evidence definitions + * @param freshName A function to generate fresh names for evidence definitions + * @param allParamss If `tdef` is a type paramter, all parameters of the owning method, + * otherwise the empty list. + */ + private def desugarContextBounds( + tdef: TypeDef, + evidenceBuf: mutable.ListBuffer[ValDef], + evidenceFlags: FlagSet, + freshName: untpd.Tree => TermName, + allParamss: List[ParamClause])(using Context): TypeDef = + + val evidenceNames = mutable.ListBuffer[TermName]() + + def desugarRHS(rhs: Tree): Tree = rhs match + case ContextBounds(tbounds, ctxbounds) => + val isMember = evidenceFlags.isAllOf(DeferredGivenFlags) + for bound <- ctxbounds do + val evidenceName = bound match + case ContextBoundTypeTree(_, _, ownName) if !ownName.isEmpty => + ownName // if there is an explicitly given name, use it. + case _ => + if Config.nameSingleContextBounds + && !isMember + && ctxbounds.tail.isEmpty + && Feature.enabled(Feature.modularity) + then tdef.name.toTermName + else freshName(bound) + evidenceNames += evidenceName + val evidenceParam = ValDef(evidenceName, bound, EmptyTree).withFlags(evidenceFlags) + evidenceParam.pushAttachment(ContextBoundParam, ()) + evidenceBuf += evidenceParam tbounds case LambdaTypeTree(tparams, body) => - cpy.LambdaTypeTree(rhs)(tparams, desugarContextBounds(body)) + cpy.LambdaTypeTree(rhs)(tparams, desugarRHS(body)) case _ => rhs + + val tdef1 = cpy.TypeDef(tdef)(rhs = desugarRHS(tdef.rhs)) + // Under x.modularity, if there was a context bound, and `tdef`s name as a term name is + // neither a name of an existing parameter nor a name of generated evidence for + // the same method, add a WitnessAnnotation with all generated evidence names to `tdef`. + // This means a context bound proxy will be created later. + if Feature.enabled(Feature.modularity) + && evidenceNames.nonEmpty + && !evidenceBuf.exists(_.name == tdef.name.toTermName) + && !allParamss.nestedExists(_.name == tdef.name.toTermName) + then + tdef1.withAddedAnnotation: + WitnessNamesAnnot(evidenceNames.toList).withSpan(tdef.span) + else + tdef1 + end desugarContextBounds + + private def elimContextBounds(meth: DefDef, isPrimaryConstructor: Boolean)(using Context): DefDef = + val DefDef(_, paramss, tpt, rhs) = meth + val evidenceParamBuf = mutable.ListBuffer[ValDef]() + + var seenContextBounds: Int = 0 + def freshName(unused: Tree) = + seenContextBounds += 1 // Start at 1 like FreshNameCreator. + ContextBoundParamName(EmptyTermName, seenContextBounds) + // Just like with `makeSyntheticParameter` on nameless parameters of + // using clauses, we only need names that are unique among the + // parameters of the method since shadowing does not affect + // implicit resolution in Scala 3. + val paramssNoContextBounds = + val iflag = if Feature.sourceVersion.isAtLeast(`future`) then Given else Implicit + val flags = if isPrimaryConstructor then iflag | LocalParamAccessor else iflag | Param mapParamss(paramss) { - tparam => cpy.TypeDef(tparam)(rhs = desugarContextBounds(tparam.rhs)) + tparam => desugarContextBounds(tparam, evidenceParamBuf, flags, freshName, paramss) }(identity) rhs match @@ -305,9 +349,9 @@ object desugar { def getterParamss(n: Int): List[ParamClause] = mapParamss(takeUpTo(paramssNoRHS, n)) { - tparam => dropContextBounds(toDefParam(tparam, keepAnnotations = true)) + tparam => dropContextBounds(toMethParam(tparam, KeepAnnotations.All)) } { - vparam => toDefParam(vparam, keepAnnotations = true, keepDefault = false) + vparam => toMethParam(vparam, KeepAnnotations.All, keepDefault = false) } def defaultGetters(paramss: List[ParamClause], n: Int): List[DefDef] = paramss match @@ -399,54 +443,98 @@ object desugar { (Nil, tree) /** Add all evidence parameters in `params` as implicit parameters to `meth`. - * If the parameters of `meth` end in an implicit parameter list or using clause, - * evidence parameters are added in front of that list. Otherwise they are added - * as a separate parameter clause. + * The position of the added parameters is determined as follows: + * + * - If there is an existing parameter list that refers to one of the added + * parameters or their future context bound proxies in one of its parameter + * types, add the new parameters in front of the first such parameter list. + * - Otherwise, if the last parameter list consists of implicit or using parameters, + * join the new parameters in front of this parameter list, creating one + * parameter list (this is equivalent to Scala 2's scheme). + * - Otherwise, add the new parameter list at the end as a separate parameter clause. */ private def addEvidenceParams(meth: DefDef, params: List[ValDef])(using Context): DefDef = - params match + if params.isEmpty then return meth + + var boundNames = params.map(_.name).toSet // all evidence parameter + context bound proxy names + for mparams <- meth.paramss; mparam <- mparams do + mparam match + case tparam: TypeDef if tparam.mods.annotations.exists(WitnessNamesAnnot.unapply(_).isDefined) => + boundNames += tparam.name.toTermName + case _ => + + def referencesBoundName(vdef: ValDef): Boolean = + vdef.tpt.existsSubTree: + case Ident(name: TermName) => boundNames.contains(name) + case _ => false + + def recur(mparamss: List[ParamClause]): List[ParamClause] = mparamss match + case ValDefs(mparams) :: _ if mparams.exists(referencesBoundName) => + params :: mparamss + case ValDefs(mparams @ (mparam :: _)) :: Nil if mparam.mods.isOneOf(GivenOrImplicit) => + (params ++ mparams) :: Nil + case mparams :: mparamss1 => + mparams :: recur(mparamss1) case Nil => - meth - case evidenceParams => - val paramss1 = meth.paramss.reverse match - case ValDefs(vparams @ (vparam :: _)) :: rparamss if vparam.mods.isOneOf(GivenOrImplicit) => - ((evidenceParams ++ vparams) :: rparamss).reverse - case _ => - meth.paramss :+ evidenceParams - cpy.DefDef(meth)(paramss = paramss1) + params :: Nil + + cpy.DefDef(meth)(paramss = recur(meth.paramss)) + end addEvidenceParams /** The parameters generated from the contextual bounds of `meth`, as generated by `desugar.defDef` */ private def evidenceParams(meth: DefDef)(using Context): List[ValDef] = - meth.paramss.reverse match { - case ValDefs(vparams @ (vparam :: _)) :: _ if vparam.mods.isOneOf(GivenOrImplicit) => - vparams.takeWhile(_.name.is(ContextBoundParamName)) - case _ => - Nil - } + for + case ValDefs(vparams @ (vparam :: _)) <- meth.paramss + if vparam.mods.isOneOf(GivenOrImplicit) + param <- vparams.takeWhile(_.hasAttachment(ContextBoundParam)) + yield + param @sharable private val synthetic = Modifiers(Synthetic) - private def toDefParam(tparam: TypeDef, keepAnnotations: Boolean): TypeDef = { - var mods = tparam.rawMods - if (!keepAnnotations) mods = mods.withAnnotations(Nil) - tparam.withMods(mods & (EmptyFlags | Sealed) | Param) - } - private def toDefParam(vparam: ValDef, keepAnnotations: Boolean, keepDefault: Boolean): ValDef = { - var mods = vparam.rawMods - if (!keepAnnotations) mods = mods.withAnnotations(Nil) + /** Which annotations to keep in derived parameters */ + private enum KeepAnnotations: + case None, All, WitnessOnly + + /** Filter annotations in `mods` according to `keep` */ + private def filterAnnots(mods: Modifiers, keep: KeepAnnotations)(using Context) = keep match + case KeepAnnotations.None => mods.withAnnotations(Nil) + case KeepAnnotations.All => mods + case KeepAnnotations.WitnessOnly => + mods.withAnnotations: + mods.annotations.filter: + case WitnessNamesAnnot(_) => true + case _ => false + + /** Map type parameter accessor to corresponding method (i.e. constructor) parameter */ + private def toMethParam(tparam: TypeDef, keep: KeepAnnotations)(using Context): TypeDef = + val mods = filterAnnots(tparam.rawMods, keep) + tparam.withMods(mods & EmptyFlags | Param) + + /** Map term parameter accessor to corresponding method (i.e. constructor) parameter */ + private def toMethParam(vparam: ValDef, keep: KeepAnnotations, keepDefault: Boolean)(using Context): ValDef = { + val mods = filterAnnots(vparam.rawMods, keep) val hasDefault = if keepDefault then HasDefault else EmptyFlags - vparam.withMods(mods & (GivenOrImplicit | Erased | hasDefault) | Param) + // Need to ensure that tree is duplicated since term parameters can be watched + // and cloning a term parameter will copy its watchers to the clone, which means + // we'd get cross-talk between the original parameter and the clone. + ValDef(vparam.name, vparam.tpt, vparam.rhs) + .withSpan(vparam.span) + .withAttachmentsFrom(vparam) + .withMods(mods & (GivenOrImplicit | Erased | hasDefault | Tracked) | Param) } - def mkApply(fn: Tree, paramss: List[ParamClause])(using Context): Tree = - paramss.foldLeft(fn) { (fn, params) => params match - case TypeDefs(params) => - TypeApply(fn, params.map(refOfDef)) - case (vparam: ValDef) :: _ if vparam.mods.is(Given) => - Apply(fn, params.map(refOfDef)).setApplyKind(ApplyKind.Using) - case _ => - Apply(fn, params.map(refOfDef)) - } + /** Desugar type def (not param): Under x.moduliity this can expand + * context bounds, which are expanded to evidence ValDefs. These will + * ultimately map to deferred givens. + */ + def typeDef(tdef: TypeDef)(using Context): Tree = + val evidenceBuf = new mutable.ListBuffer[ValDef] + val result = desugarContextBounds( + tdef, evidenceBuf, + (tdef.mods.flags.toTermFlags & AccessFlags) | Lazy | DeferredGivenFlags, + inventGivenName, Nil) + if evidenceBuf.isEmpty then result else Thicket(result :: evidenceBuf.toList) /** The expansion of a class definition. See inline comments for what is involved */ def classDef(cdef: TypeDef)(using Context): Tree = { @@ -520,7 +608,7 @@ object desugar { // Annotations on class _type_ parameters are set on the derived parameters // but not on the constructor parameters. The reverse is true for // annotations on class _value_ parameters. - val constrTparams = impliedTparams.map(toDefParam(_, keepAnnotations = false)) + val constrTparams = impliedTparams.map(toMethParam(_, KeepAnnotations.WitnessOnly)) val constrVparamss = if (originalVparamss.isEmpty) { // ensure parameter list is non-empty if (isCaseClass) @@ -531,7 +619,7 @@ object desugar { report.error(CaseClassMissingNonImplicitParamList(cdef), namePos) ListOfNil } - else originalVparamss.nestedMap(toDefParam(_, keepAnnotations = true, keepDefault = true)) + else originalVparamss.nestedMap(toMethParam(_, KeepAnnotations.All, keepDefault = true)) val derivedTparams = constrTparams.zipWithConserve(impliedTparams)((tparam, impliedParam) => derivedTypeParam(tparam).withAnnotations(impliedParam.mods.annotations)) @@ -553,7 +641,7 @@ object desugar { defDef( addEvidenceParams( cpy.DefDef(ddef)(paramss = joinParams(constrTparams, ddef.paramss)), - evidenceParams(constr1).map(toDefParam(_, keepAnnotations = false, keepDefault = false))))) + evidenceParams(constr1).map(toMethParam(_, KeepAnnotations.None, keepDefault = false))))) case stat => stat } @@ -609,6 +697,11 @@ object desugar { case _ => false } + /** Is this a repeated argument x* (using a spread operator)? */ + def isRepeated(tree: Tree): Boolean = stripByNameType(tree) match + case PostfixOp(_, Ident(tpnme.raw.STAR)) => true + case _ => false + def appliedRef(tycon: Tree, tparams: List[TypeDef] = constrTparams, widenHK: Boolean = false) = { val targs = for (tparam <- tparams) yield { val targ = refOfDef(tparam) @@ -625,11 +718,6 @@ object desugar { appliedTypeTree(tycon, targs) } - def isRepeated(tree: Tree): Boolean = stripByNameType(tree) match { - case PostfixOp(_, Ident(tpnme.raw.STAR)) => true - case _ => false - } - // a reference to the class type bound by `cdef`, with type parameters coming from the constructor val classTypeRef = appliedRef(classTycon) @@ -667,7 +755,7 @@ object desugar { } ensureApplied(nu) - val copiedAccessFlags = if migrateTo3 then EmptyFlags else AccessFlags + val copiedAccessFlags = if Feature.migrateTo3 then EmptyFlags else AccessFlags // Methods to add to a case class C[..](p1: T1, ..., pN: Tn)(moreParams) // def _1: T1 = this.p1 @@ -850,19 +938,17 @@ object desugar { Nil } else { - val defParamss = constrVparamss match { + val defParamss = constrVparamss match case Nil :: paramss => paramss // drop leading () that got inserted by class // TODO: drop this once we do not silently insert empty class parameters anymore case paramss => paramss - } val finalFlag = if ctx.settings.YcompileScala2Library.value then EmptyFlags else Final // implicit wrapper is typechecked in same scope as constructor, so // we can reuse the constructor parameters; no derived params are needed. DefDef( - className.toTermName, joinParams(constrTparams, defParamss), - classTypeRef, creatorExpr) - .withMods(companionMods | mods.flags.toTermFlags & (GivenOrImplicit | Inline) | finalFlag) + className.toTermName, joinParams(constrTparams, defParamss), classTypeRef, creatorExpr + ) .withMods(companionMods | mods.flags.toTermFlags & (GivenOrImplicit | Inline) | finalFlag) .withSpan(cdef.span) :: Nil } @@ -890,7 +976,9 @@ object desugar { } if mods.isAllOf(Given | Inline | Transparent) then report.error("inline given instances cannot be trasparent", cdef) - val classMods = if mods.is(Given) then mods &~ (Inline | Transparent) | Synthetic else mods + var classMods = if mods.is(Given) then mods &~ (Inline | Transparent) | Synthetic else mods + if vparamAccessors.exists(_.mods.is(Tracked)) then + classMods |= Dependent cpy.TypeDef(cdef: TypeDef)( name = className, rhs = cpy.Template(impl)(constr, parents1, clsDerived, self1, @@ -1071,7 +1159,7 @@ object desugar { */ def normalizeName(mdef: MemberDef, impl: Tree)(using Context): Name = { var name = mdef.name - if (name.isEmpty) name = name.likeSpaced(inventGivenOrExtensionName(impl)) + if (name.isEmpty) name = name.likeSpaced(inventGivenName(impl)) def errPos = mdef.source.atSpan(mdef.nameSpan) if (ctx.owner == defn.ScalaPackageClass && defn.reservedScalaClassNames.contains(name.toTypeName)) { val kind = if (name.isTypeName) "class" else "object" @@ -1118,7 +1206,7 @@ object desugar { end makePolyFunctionType /** Invent a name for an anonympus given of type or template `impl`. */ - def inventGivenOrExtensionName(impl: Tree)(using Context): SimpleName = + def inventGivenName(impl: Tree)(using Context): SimpleName = val str = impl match case impl: Template => if impl.parents.isEmpty then @@ -1130,6 +1218,10 @@ object desugar { "given_" ++ inventTypeName(impl) str.toTermName.asSimpleName + /** Extract a synthesized given name from a type tree. This is used for + * both anonymous givens and (under x.modularity) deferred givens. + * @param followArgs if true include argument types in the name + */ private class NameExtractor(followArgs: Boolean) extends UntypedTreeAccumulator[String] { private def extractArgs(args: List[Tree])(using Context): String = args.map(argNameExtractor.apply("", _)).mkString("_") @@ -1143,6 +1235,8 @@ object desugar { case tree: TypeDef => tree.name.toString case tree: AppliedTypeTree if followArgs && tree.args.nonEmpty => s"${apply(x, tree.tpt)}_${extractArgs(tree.args)}" + case ContextBoundTypeTree(tycon, paramName, _) => + s"${apply(x, tycon)}_$paramName" case InfixOp(left, op, right) => if followArgs then s"${op.name}_${extractArgs(List(left, right))}" else op.name.toString @@ -1254,8 +1348,9 @@ object desugar { pats.forall(isVarPattern) case _ => false } + val isMatchingTuple: Tree => Boolean = { - case Tuple(es) => isTuplePattern(es.length) + case Tuple(es) => isTuplePattern(es.length) && !hasNamedArg(es) case _ => false } @@ -1379,7 +1474,7 @@ object desugar { case tree: TypeDef => if (tree.isClassDef) classDef(tree) else if (ctx.mode.isQuotedPattern) quotedPatternTypeDef(tree) - else tree + else typeDef(tree) case tree: DefDef => if (tree.name.isConstructorName) tree // was already handled by enclosing classDef else defDef(tree) @@ -1441,22 +1536,99 @@ object desugar { AppliedTypeTree( TypeTree(defn.throwsAlias.typeRef).withSpan(op.span), tpt :: excepts :: Nil) - /** Translate tuple expressions of arity <= 22 + private def checkWellFormedTupleElems(elems: List[Tree])(using Context): List[Tree] = + val seen = mutable.Set[Name]() + for case arg @ NamedArg(name, _) <- elems do + if seen.contains(name) then + report.error(em"Duplicate tuple element name", arg.srcPos) + seen += name + if name.startsWith("_") && name.toString.tail.toIntOption.isDefined then + report.error( + em"$name cannot be used as the name of a tuple element because it is a regular tuple selector", + arg.srcPos) + + elems match + case elem :: elems1 => + val mismatchOpt = + if elem.isInstanceOf[NamedArg] + then elems1.find(!_.isInstanceOf[NamedArg]) + else elems1.find(_.isInstanceOf[NamedArg]) + mismatchOpt match + case Some(misMatch) => + report.error(em"Illegal combination of named and unnamed tuple elements", misMatch.srcPos) + elems.mapConserve(stripNamedArg) + case None => elems + case _ => elems + end checkWellFormedTupleElems + + /** Translate tuple expressions * * () ==> () * (t) ==> t * (t1, ..., tN) ==> TupleN(t1, ..., tN) */ - def smallTuple(tree: Tuple)(using Context): Tree = { - val ts = tree.trees - val arity = ts.length - assert(arity <= Definitions.MaxTupleArity) - def tupleTypeRef = defn.TupleType(arity).nn - if (arity == 0) - if (ctx.mode is Mode.Type) TypeTree(defn.UnitType) else unitLiteral - else if (ctx.mode is Mode.Type) AppliedTypeTree(ref(tupleTypeRef), ts) - else Apply(ref(tupleTypeRef.classSymbol.companionModule.termRef), ts) - } + def tuple(tree: Tuple, pt: Type)(using Context): Tree = + var elems = checkWellFormedTupleElems(tree.trees) + if ctx.mode.is(Mode.Pattern) then elems = adaptPatternArgs(elems, pt) + val elemValues = elems.mapConserve(stripNamedArg) + val tup = + val arity = elems.length + if arity <= Definitions.MaxTupleArity then + def tupleTypeRef = defn.TupleType(arity).nn + val tree1 = + if arity == 0 then + if ctx.mode is Mode.Type then TypeTree(defn.UnitType) else unitLiteral + else if ctx.mode is Mode.Type then AppliedTypeTree(ref(tupleTypeRef), elemValues) + else Apply(ref(tupleTypeRef.classSymbol.companionModule.termRef), elemValues) + tree1.withSpan(tree.span) + else + cpy.Tuple(tree)(elemValues) + val names = elems.collect: + case NamedArg(name, arg) => name + if names.isEmpty || ctx.mode.is(Mode.Pattern) then + tup + else + def namesTuple = withModeBits(ctx.mode &~ Mode.Pattern | Mode.Type): + tuple(Tuple( + names.map: name => + SingletonTypeTree(Literal(Constant(name.toString))).withSpan(tree.span)), + WildcardType) + if ctx.mode.is(Mode.Type) then + AppliedTypeTree(ref(defn.NamedTupleTypeRef), namesTuple :: tup :: Nil) + else + TypeApply( + Apply(Select(ref(defn.NamedTupleModule), nme.withNames), tup), + namesTuple :: Nil) + + /** When desugaring a list pattern arguments `elems` adapt them and the + * expected type `pt` to each other. This means: + * - If `elems` are named pattern elements, rearrange them to match `pt`. + * This requires all names in `elems` to be also present in `pt`. + */ + def adaptPatternArgs(elems: List[Tree], pt: Type)(using Context): List[Tree] = + + def reorderedNamedArgs(wildcardSpan: Span): List[untpd.Tree] = + var selNames = pt.namedTupleElementTypes.map(_(0)) + if selNames.isEmpty && pt.classSymbol.is(CaseClass) then + selNames = pt.classSymbol.caseAccessors.map(_.name.asTermName) + val nameToIdx = selNames.zipWithIndex.toMap + val reordered = Array.fill[untpd.Tree](selNames.length): + untpd.Ident(nme.WILDCARD).withSpan(wildcardSpan) + for case arg @ NamedArg(name: TermName, _) <- elems do + nameToIdx.get(name) match + case Some(idx) => + if reordered(idx).isInstanceOf[Ident] then + reordered(idx) = arg + else + report.error(em"Duplicate named pattern", arg.srcPos) + case _ => + report.error(em"No element named `$name` is defined in selector type $pt", arg.srcPos) + reordered.toList + + elems match + case (first @ NamedArg(_, _)) :: _ => reorderedNamedArgs(first.span.startPos) + case _ => elems + end adaptPatternArgs private def isTopLevelDef(stat: Tree)(using Context): Boolean = stat match case _: ValDef | _: PatDef | _: DefDef | _: Export | _: ExtMethods => true @@ -1523,7 +1695,7 @@ object desugar { DefDef(nme.ANON_FUN, paramss, if (tpt == null) TypeTree() else tpt, body) .withSpan(span) .withMods(synthetic | Artifact), - Closure(Nil, Ident(nme.ANON_FUN), EmptyTree)) + Closure(Nil, Ident(nme.ANON_FUN), EmptyTree).withSpan(span)) /** If `nparams` == 1, expand partial function * @@ -1600,14 +1772,13 @@ object desugar { .collect: case vd: ValDef => vd - def makeContextualFunction(formals: List[Tree], paramNamesOrNil: List[TermName], body: Tree, erasedParams: List[Boolean])(using Context): Function = { - val mods = Given - val params = makeImplicitParameters(formals, mods, - mkParamName = i => - if paramNamesOrNil.isEmpty then ContextFunctionParamName.fresh() - else paramNamesOrNil(i)) - FunctionWithMods(params, body, Modifiers(mods), erasedParams) - } + def makeContextualFunction(formals: List[Tree], paramNamesOrNil: List[TermName], body: Tree, erasedParams: List[Boolean])(using Context): Function = + val paramNames = + if paramNamesOrNil.nonEmpty then paramNamesOrNil + else formals.map(_ => ContextFunctionParamName.fresh()) + val params = for (tpt, pname) <- formals.zip(paramNames) yield + ValDef(pname, tpt, EmptyTree).withFlags(Given | Param) + FunctionWithMods(params, body, Modifiers(Given), erasedParams) private def derivedValDef(original: Tree, named: NameTree, tpt: Tree, rhs: Tree, mods: Modifiers)(using Context) = { val vdef = ValDef(named.name.asTermName, tpt, rhs) @@ -1943,12 +2114,27 @@ object desugar { case AndType(tp1, tp2) => stripToCore(tp1) ::: stripToCore(tp2) case _ => defn.AnyType :: Nil } + + val refinements1 = Trees.flatten: + refinements.mapConserve { + case tree: ValDef if tree.mods.is(Mutable) => + val getter = + cpy.DefDef(tree)(name = tree.name, paramss = Nil, tpt = tree.tpt, rhs = tree.rhs) + .withFlags(tree.mods.flags & (AccessFlags | Synthetic)) + val setterParam = makeSyntheticParameter(tpt = tree.tpt) + val setter = + cpy.DefDef(tree)(name = tree.name.setterName, paramss = List(List(setterParam)), tpt = untpd.scalaUnit, rhs = EmptyTree) + .withFlags(tree.mods.flags & (AccessFlags | Synthetic)) + Thicket(getter, setter) + case tree => tree + } + val parentCores = stripToCore(parent.tpe) val untpdParent = TypedSplice(parent) val (classParents, self) = if (parentCores.length == 1 && (parent.tpe eq parentCores.head)) (untpdParent :: Nil, EmptyValDef) else (parentCores map TypeTree, ValDef(nme.WILDCARD, untpdParent, EmptyTree)) - val impl = Template(emptyConstructor, classParents, Nil, self, refinements) + val impl = Template(emptyConstructor, classParents, Nil, self, refinements1) TypeDef(tpnme.REFINE_CLASS, impl).withFlags(Trait) } @@ -1975,7 +2161,7 @@ object desugar { * without duplicates */ private def getVariables(tree: Tree, shouldAddGiven: Context ?=> Bind => Boolean)(using Context): List[VarInfo] = { - val buf = ListBuffer[VarInfo]() + val buf = mutable.ListBuffer[VarInfo]() def seenName(name: Name) = buf exists (_._1.name == name) def add(named: NameTree, t: Tree): Unit = if (!seenName(named.name) && named.name.isTermName) buf += ((named, t)) diff --git a/compiler/src/dotty/tools/dotc/ast/MainProxies.scala b/compiler/src/dotty/tools/dotc/ast/MainProxies.scala index 8ee75cbf364b..7bf83d548c97 100644 --- a/compiler/src/dotty/tools/dotc/ast/MainProxies.scala +++ b/compiler/src/dotty/tools/dotc/ast/MainProxies.scala @@ -12,11 +12,6 @@ import Annotations.Annotation object MainProxies { - /** Generate proxy classes for @main functions and @myMain functions where myMain <:< MainAnnotation */ - def proxies(stats: List[tpd.Tree])(using Context): List[untpd.Tree] = { - mainAnnotationProxies(stats) ++ mainProxies(stats) - } - /** Generate proxy classes for @main functions. * A function like * @@ -35,7 +30,7 @@ object MainProxies { * catch case err: ParseError => showError(err) * } */ - private def mainProxies(stats: List[tpd.Tree])(using Context): List[untpd.Tree] = { + def proxies(stats: List[tpd.Tree])(using Context): List[untpd.Tree] = { import tpd.* def mainMethods(stats: List[Tree]): List[Symbol] = stats.flatMap { case stat: DefDef if stat.symbol.hasAnnotation(defn.MainAnnot) => @@ -127,323 +122,4 @@ object MainProxies { result } - private type DefaultValueSymbols = Map[Int, Symbol] - private type ParameterAnnotationss = Seq[Seq[Annotation]] - - /** - * Generate proxy classes for main functions. - * A function like - * - * /** - * * Lorem ipsum dolor sit amet - * * consectetur adipiscing elit. - * * - * * @param x my param x - * * @param ys all my params y - * */ - * @myMain(80) def f( - * @myMain.Alias("myX") x: S, - * y: S, - * ys: T* - * ) = ... - * - * would be translated to something like - * - * final class f { - * static def main(args: Array[String]): Unit = { - * val annotation = new myMain(80) - * val info = new Info( - * name = "f", - * documentation = "Lorem ipsum dolor sit amet consectetur adipiscing elit.", - * parameters = Seq( - * new scala.annotation.MainAnnotation.Parameter("x", "S", false, false, "my param x", Seq(new scala.main.Alias("myX"))), - * new scala.annotation.MainAnnotation.Parameter("y", "S", true, false, "", Seq()), - * new scala.annotation.MainAnnotation.Parameter("ys", "T", false, true, "all my params y", Seq()) - * ) - * ), - * val command = annotation.command(info, args) - * if command.isDefined then - * val cmd = command.get - * val args0: () => S = annotation.argGetter[S](info.parameters(0), cmd(0), None) - * val args1: () => S = annotation.argGetter[S](info.parameters(1), mainArgs(1), Some(() => sum$default$1())) - * val args2: () => Seq[T] = annotation.varargGetter[T](info.parameters(2), cmd.drop(2)) - * annotation.run(() => f(args0(), args1(), args2()*)) - * } - * } - */ - private def mainAnnotationProxies(stats: List[tpd.Tree])(using Context): List[untpd.Tree] = { - import tpd.* - - /** - * Computes the symbols of the default values of the function. Since they cannot be inferred anymore at this - * point of the compilation, they must be explicitly passed by [[mainProxy]]. - */ - def defaultValueSymbols(scope: Tree, funSymbol: Symbol): DefaultValueSymbols = - scope match { - case TypeDef(_, template: Template) => - template.body.flatMap((_: Tree) match { - case dd: DefDef if dd.name.is(DefaultGetterName) && dd.name.firstPart == funSymbol.name => - val DefaultGetterName.NumberedInfo(index) = dd.name.info: @unchecked - List(index -> dd.symbol) - case _ => Nil - }).toMap - case _ => Map.empty - } - - /** Computes the list of main methods present in the code. */ - def mainMethods(scope: Tree, stats: List[Tree]): List[(Symbol, ParameterAnnotationss, DefaultValueSymbols, Option[Comment])] = stats.flatMap { - case stat: DefDef => - val sym = stat.symbol - sym.annotations.filter(_.matches(defn.MainAnnotationClass)) match { - case Nil => - Nil - case _ :: Nil => - val paramAnnotations = stat.paramss.flatMap(_.map( - valdef => valdef.symbol.annotations.filter(_.matches(defn.MainAnnotationParameterAnnotation)) - )) - (sym, paramAnnotations.toVector, defaultValueSymbols(scope, sym), stat.rawComment) :: Nil - case mainAnnot :: others => - report.error(em"method cannot have multiple main annotations", mainAnnot.tree) - Nil - } - case stat @ TypeDef(_, impl: Template) if stat.symbol.is(Module) => - mainMethods(stat, impl.body) - case _ => - Nil - } - - // Assuming that the top-level object was already generated, all main methods will have a scope - mainMethods(EmptyTree, stats).flatMap(mainAnnotationProxy) - } - - private def mainAnnotationProxy(mainFun: Symbol, paramAnnotations: ParameterAnnotationss, defaultValueSymbols: DefaultValueSymbols, docComment: Option[Comment])(using Context): Option[TypeDef] = { - val mainAnnot = mainFun.getAnnotation(defn.MainAnnotationClass).get - def pos = mainFun.sourcePos - - val documentation = new Documentation(docComment) - - /** () => value */ - def unitToValue(value: Tree): Tree = - val defDef = DefDef(nme.ANON_FUN, List(Nil), TypeTree(), value) - Block(defDef, Closure(Nil, Ident(nme.ANON_FUN), EmptyTree)) - - /** Generate a list of trees containing the ParamInfo instantiations. - * - * A ParamInfo has the following shape - * ``` - * new scala.annotation.MainAnnotation.Parameter("x", "S", false, false, "my param x", Seq(new scala.main.Alias("myX"))) - * ``` - */ - def parameterInfos(mt: MethodType): List[Tree] = - extension (tree: Tree) def withProperty(sym: Symbol, args: List[Tree]) = - Apply(Select(tree, sym.name), args) - - for ((formal, paramName), idx) <- mt.paramInfos.zip(mt.paramNames).zipWithIndex yield - val param = paramName.toString - val paramType0 = if formal.isRepeatedParam then formal.argTypes.head.dealias else formal.dealias - val paramType = paramType0.dealias - val paramTypeOwner = paramType.typeSymbol.owner - val paramTypeStr = - if paramTypeOwner == defn.EmptyPackageClass then paramType.show - else paramTypeOwner.showFullName + "." + paramType.show - val hasDefault = defaultValueSymbols.contains(idx) - val isRepeated = formal.isRepeatedParam - val paramDoc = documentation.argDocs.getOrElse(param, "") - val paramAnnots = - val annotationTrees = paramAnnotations(idx).map(instantiateAnnotation).toList - Apply(ref(defn.SeqModule.termRef), annotationTrees) - - val constructorArgs = List(param, paramTypeStr, hasDefault, isRepeated, paramDoc) - .map(value => Literal(Constant(value))) - - New(TypeTree(defn.MainAnnotationParameter.typeRef), List(constructorArgs :+ paramAnnots)) - - end parameterInfos - - /** - * Creates a list of references and definitions of arguments. - * The goal is to create the - * `val args0: () => S = annotation.argGetter[S](0, cmd(0), None)` - * part of the code. - */ - def argValDefs(mt: MethodType): List[ValDef] = - for ((formal, paramName), idx) <- mt.paramInfos.zip(mt.paramNames).zipWithIndex yield - val argName = nme.args ++ idx.toString - val isRepeated = formal.isRepeatedParam - val formalType = if isRepeated then formal.argTypes.head else formal - val getterName = if isRepeated then nme.varargGetter else nme.argGetter - val defaultValueGetterOpt = defaultValueSymbols.get(idx) match - case None => ref(defn.NoneModule.termRef) - case Some(dvSym) => - val value = unitToValue(ref(dvSym.termRef)) - Apply(ref(defn.SomeClass.companionModule.termRef), value) - val argGetter0 = TypeApply(Select(Ident(nme.annotation), getterName), TypeTree(formalType) :: Nil) - val index = Literal(Constant(idx)) - val paramInfo = Apply(Select(Ident(nme.info), nme.parameters), index) - val argGetter = - if isRepeated then Apply(argGetter0, List(paramInfo, Apply(Select(Ident(nme.cmd), nme.drop), List(index)))) - else Apply(argGetter0, List(paramInfo, Apply(Ident(nme.cmd), List(index)), defaultValueGetterOpt)) - ValDef(argName, TypeTree(), argGetter) - end argValDefs - - - /** Create a list of argument references that will be passed as argument to the main method. - * `args0`, ...`argn*` - */ - def argRefs(mt: MethodType): List[Tree] = - for ((formal, paramName), idx) <- mt.paramInfos.zip(mt.paramNames).zipWithIndex yield - val argRef = Apply(Ident(nme.args ++ idx.toString), Nil) - if formal.isRepeatedParam then repeated(argRef) else argRef - end argRefs - - - /** Turns an annotation (e.g. `@main(40)`) into an instance of the class (e.g. `new scala.main(40)`). */ - def instantiateAnnotation(annot: Annotation): Tree = - val argss = { - def recurse(t: tpd.Tree, acc: List[List[Tree]]): List[List[Tree]] = t match { - case Apply(t, args: List[tpd.Tree]) => recurse(t, extractArgs(args) :: acc) - case _ => acc - } - - def extractArgs(args: List[tpd.Tree]): List[Tree] = - args.flatMap { - case Typed(SeqLiteral(varargs, _), _) => varargs.map(arg => TypedSplice(arg)) - case arg: Select if arg.name.is(DefaultGetterName) => Nil // Ignore default values, they will be added later by the compiler - case arg => List(TypedSplice(arg)) - } - - recurse(annot.tree, Nil) - } - - New(TypeTree(annot.symbol.typeRef), argss) - end instantiateAnnotation - - def generateMainClass(mainCall: Tree, args: List[Tree], parameterInfos: List[Tree]): TypeDef = - val cmdInfo = - val nameTree = Literal(Constant(mainFun.showName)) - val docTree = Literal(Constant(documentation.mainDoc)) - val paramInfos = Apply(ref(defn.SeqModule.termRef), parameterInfos) - New(TypeTree(defn.MainAnnotationInfo.typeRef), List(List(nameTree, docTree, paramInfos))) - - val annotVal = ValDef( - nme.annotation, - TypeTree(), - instantiateAnnotation(mainAnnot) - ) - val infoVal = ValDef( - nme.info, - TypeTree(), - cmdInfo - ) - val command = ValDef( - nme.command, - TypeTree(), - Apply( - Select(Ident(nme.annotation), nme.command), - List(Ident(nme.info), Ident(nme.args)) - ) - ) - val argsVal = ValDef( - nme.cmd, - TypeTree(), - Select(Ident(nme.command), nme.get) - ) - val run = Apply(Select(Ident(nme.annotation), nme.run), mainCall) - val body0 = If( - Select(Ident(nme.command), nme.isDefined), - Block(argsVal :: args, run), - EmptyTree - ) - val body = Block(List(annotVal, infoVal, command), body0) // TODO add `if (cmd.nonEmpty)` - - val mainArg = ValDef(nme.args, TypeTree(defn.ArrayType.appliedTo(defn.StringType)), EmptyTree) - .withFlags(Param) - /** Replace typed `Ident`s that have been typed with a TypeSplice with the reference to the symbol. - * The annotations will be retype-checked in another scope that may not have the same imports. - */ - def insertTypeSplices = new TreeMap { - override def transform(tree: Tree)(using Context): Tree = tree match - case tree: tpd.Ident @unchecked => TypedSplice(tree) - case tree => super.transform(tree) - } - val annots = mainFun.annotations - .filterNot(_.matches(defn.MainAnnotationClass)) - .map(annot => insertTypeSplices.transform(annot.tree)) - val mainMeth = DefDef(nme.main, (mainArg :: Nil) :: Nil, TypeTree(defn.UnitType), body) - .withFlags(JavaStatic) - .withAnnotations(annots) - val mainTempl = Template(emptyConstructor, Nil, Nil, EmptyValDef, mainMeth :: Nil) - val mainCls = TypeDef(mainFun.name.toTypeName, mainTempl) - .withFlags(Final | Invisible) - mainCls.withSpan(mainAnnot.tree.span.toSynthetic) - end generateMainClass - - if (!mainFun.owner.isStaticOwner) - report.error(em"main method is not statically accessible", pos) - None - else mainFun.info match { - case _: ExprType => - Some(generateMainClass(unitToValue(ref(mainFun.termRef)), Nil, Nil)) - case mt: MethodType => - if (mt.isImplicitMethod) - report.error(em"main method cannot have implicit parameters", pos) - None - else mt.resType match - case restpe: MethodType => - report.error(em"main method cannot be curried", pos) - None - case _ => - Some(generateMainClass(unitToValue(Apply(ref(mainFun.termRef), argRefs(mt))), argValDefs(mt), parameterInfos(mt))) - case _: PolyType => - report.error(em"main method cannot have type parameters", pos) - None - case _ => - report.error(em"main can only annotate a method", pos) - None - } - } - - /** A class responsible for extracting the docstrings of a method. */ - private class Documentation(docComment: Option[Comment]): - import util.CommentParsing.* - - /** The main part of the documentation. */ - lazy val mainDoc: String = _mainDoc - /** The parameters identified by @param. Maps from parameter name to its documentation. */ - lazy val argDocs: Map[String, String] = _argDocs - - private var _mainDoc: String = "" - private var _argDocs: Map[String, String] = Map() - - docComment match { - case Some(comment) => if comment.isDocComment then parseDocComment(comment.raw) else _mainDoc = comment.raw - case None => - } - - private def cleanComment(raw: String): String = - var lines: Seq[String] = raw.trim.nn.split('\n').nn.toSeq - lines = lines.map(l => l.substring(skipLineLead(l, -1), l.length).nn.trim.nn) - var s = lines.foldLeft("") { - case ("", s2) => s2 - case (s1, "") if s1.last == '\n' => s1 // Multiple newlines are kept as single newlines - case (s1, "") => s1 + '\n' - case (s1, s2) if s1.last == '\n' => s1 + s2 - case (s1, s2) => s1 + ' ' + s2 - } - s.replaceAll(raw"\[\[", "").nn.replaceAll(raw"\]\]", "").nn.trim.nn - - private def parseDocComment(raw: String): Unit = - // Positions of the sections (@) in the docstring - val tidx: List[(Int, Int)] = tagIndex(raw) - - // Parse main comment - var mainComment: String = raw.substring(skipLineLead(raw, 0), startTag(raw, tidx)).nn - _mainDoc = cleanComment(mainComment) - - // Parse arguments comments - val argsCommentsSpans: Map[String, (Int, Int)] = paramDocs(raw, "@param", tidx) - val argsCommentsTextSpans = argsCommentsSpans.view.mapValues(extractSectionText(raw, _)) - val argsCommentsTexts = argsCommentsTextSpans.mapValues({ case (beg, end) => raw.substring(beg, end).nn }) - _argDocs = argsCommentsTexts.mapValues(cleanComment(_)).toMap - end Documentation } diff --git a/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala b/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala index 2960af8fcdec..f83f12e1c027 100644 --- a/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala +++ b/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala @@ -4,7 +4,7 @@ package ast import core.Contexts.* import core.Decorators.* import util.Spans.* -import Trees.{MemberDef, DefTree, WithLazyFields} +import Trees.{Closure, MemberDef, DefTree, WithLazyFields} import dotty.tools.dotc.core.Types.AnnotatedType import dotty.tools.dotc.core.Types.ImportType import dotty.tools.dotc.core.Types.Type @@ -76,7 +76,7 @@ object NavigateAST { var bestFit: List[Positioned] = path while (it.hasNext) { val path1 = it.next() match { - case p: Positioned => singlePath(p, path) + case p: Positioned if !p.isInstanceOf[Closure[?]] => singlePath(p, path) case m: untpd.Modifiers => childPath(m.productIterator, path) case xs: List[?] => childPath(xs.iterator, path) case _ => path diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index 28d3ef6daaef..97de434ba9d5 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -5,6 +5,8 @@ package ast import core.* import Flags.*, Trees.*, Types.*, Contexts.* import Names.*, StdNames.*, NameOps.*, Symbols.* +import Annotations.Annotation +import NameKinds.ContextBoundParamName import typer.ConstFold import reporting.trace @@ -108,6 +110,10 @@ trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => case _ => tree + def stripNamedArg(tree: Tree) = tree match + case NamedArg(_, arg) => arg + case _ => tree + /** The number of arguments in an application */ def numArgs(tree: Tree): Int = unsplice(tree) match { case Apply(fn, args) => numArgs(fn) + args.length @@ -376,6 +382,29 @@ trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => case _ => tree.tpe.isInstanceOf[ThisType] } + + /** Under x.modularity: Extractor for `annotation.internal.WitnessNames(name_1, ..., name_n)` + * represented as an untyped or typed tree. + */ + object WitnessNamesAnnot: + def apply(names: List[TermName])(using Context): untpd.Tree = + untpd.TypedSplice(tpd.New( + defn.WitnessNamesAnnot.typeRef, + tpd.SeqLiteral(names.map(n => tpd.Literal(Constant(n.toString))), tpd.TypeTree(defn.StringType)) :: Nil + )) + + def unapply(tree: Tree)(using Context): Option[List[TermName]] = + unsplice(tree) match + case Apply(Select(New(tpt: tpd.TypeTree), nme.CONSTRUCTOR), SeqLiteral(elems, _) :: Nil) => + tpt.tpe match + case tp: TypeRef if tp.name == tpnme.WitnessNames && tp.symbol == defn.WitnessNamesAnnot => + Some: + elems.map: + case Literal(Constant(str: String)) => + ContextBoundParamName.unmangle(str.toTermName.asSimpleName) + case _ => None + case _ => None + end WitnessNamesAnnot } trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] => @@ -919,12 +948,12 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => else cpy.PackageDef(tree)(pid, slicedStats) :: Nil case tdef: TypeDef => val sym = tdef.symbol - assert(sym.isClass) + assert(sym.isClass || ctx.tolerateErrorsForBestEffort) if (cls == sym || cls == sym.linkedClass) tdef :: Nil else Nil case vdef: ValDef => val sym = vdef.symbol - assert(sym.is(Module)) + assert(sym.is(Module) || ctx.tolerateErrorsForBestEffort) if (cls == sym.companionClass || cls == sym.moduleClass) vdef :: Nil else Nil case tree => @@ -1105,7 +1134,7 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => case AndType(ref, nn1) if qual.tpe eq ref => qual.tpe.widen match case OrNull(nn2) if nn1 eq nn2 => - Some(qual) + Some(qual) case _ => None case _ => None case _ => None diff --git a/compiler/src/dotty/tools/dotc/ast/TreeMapWithTrackedStats.scala b/compiler/src/dotty/tools/dotc/ast/TreeMapWithTrackedStats.scala new file mode 100644 index 000000000000..b302a2463a4e --- /dev/null +++ b/compiler/src/dotty/tools/dotc/ast/TreeMapWithTrackedStats.scala @@ -0,0 +1,77 @@ +package dotty.tools.dotc +package ast + +import tpd.* +import core.Contexts.* +import core.Symbols.* +import util.Property + +import scala.collection.mutable + +/** + * It is safe to assume that the companion of a tree is in the same scope. + * Therefore, when expanding MacroAnnotations, we will only keep track of + * the trees in the same scope as the current transformed tree + */ +abstract class TreeMapWithTrackedStats extends TreeMapWithImplicits: + + import TreeMapWithTrackedStats.* + + /** Fetch the corresponding tracked tree for a given symbol */ + protected final def getTracked(sym: Symbol)(using Context): Option[MemberDef] = + for trees <- ctx.property(TrackedTrees) + tree <- trees.get(sym) + yield tree + + /** Update the tracked trees */ + protected final def updateTracked(tree: Tree)(using Context): Tree = + tree match + case tree: MemberDef => + trackedTrees.update(tree.symbol, tree) + tree + case _ => tree + end updateTracked + + /** Process a list of trees and give the priority to trakced trees */ + private final def withUpdatedTrackedTrees(stats: List[Tree])(using Context) = + val trackedTrees = TreeMapWithTrackedStats.trackedTrees + stats.mapConserve: + case tree: MemberDef if trackedTrees.contains(tree.symbol) => + trackedTrees(tree.symbol) + case stat => stat + + override def transform(tree: Tree)(using Context): Tree = + tree match + case PackageDef(_, stats) => + inContext(trackedDefinitionsCtx(stats)): // Step I: Collect and memoize all the definition trees + // Step II: Transform the tree + val pkg@PackageDef(pid, stats) = super.transform(tree): @unchecked + // Step III: Reconcile between the symbols in syms and the tree + cpy.PackageDef(pkg)(pid = pid, stats = withUpdatedTrackedTrees(stats)) + case block: Block => + inContext(trackedDefinitionsCtx(block.stats)): // Step I: Collect all the member definitions in the block + // Step II: Transform the tree + val b@Block(stats, expr) = super.transform(tree): @unchecked + // Step III: Reconcile between the symbols in syms and the tree + cpy.Block(b)(expr = expr, stats = withUpdatedTrackedTrees(stats)) + case TypeDef(_, impl: Template) => + inContext(trackedDefinitionsCtx(impl.body)): // Step I: Collect and memoize all the stats + // Step II: Transform the tree + val newTree@TypeDef(name, impl: Template) = super.transform(tree): @unchecked + // Step III: Reconcile between the symbols in syms and the tree + cpy.TypeDef(newTree)(rhs = cpy.Template(impl)(body = withUpdatedTrackedTrees(impl.body))) + case _ => super.transform(tree) + +end TreeMapWithTrackedStats + +object TreeMapWithTrackedStats: + private val TrackedTrees = new Property.Key[mutable.Map[Symbol, tpd.MemberDef]] + + /** Fetch the tracked trees in the cuurent context */ + private def trackedTrees(using Context): mutable.Map[Symbol, MemberDef] = + ctx.property(TrackedTrees).get + + /** Build a context and track the provided MemberDef trees */ + private def trackedDefinitionsCtx(stats: List[Tree])(using Context): Context = + val treesToTrack = stats.collect { case m: MemberDef => (m.symbol, m) } + ctx.fresh.setProperty(TrackedTrees, mutable.Map(treesToTrack*)) diff --git a/compiler/src/dotty/tools/dotc/ast/tpd.scala b/compiler/src/dotty/tools/dotc/ast/tpd.scala index 71b85d97a187..faace26de84d 100644 --- a/compiler/src/dotty/tools/dotc/ast/tpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/tpd.scala @@ -47,7 +47,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { case _: RefTree | _: GenericApply | _: Inlined | _: Hole => ta.assignType(untpd.Apply(fn, args), fn, args) case _ => - assert(ctx.reporter.errorsReported) + assert(ctx.reporter.errorsReported || ctx.tolerateErrorsForBestEffort) ta.assignType(untpd.Apply(fn, args), fn, args) def TypeApply(fn: Tree, args: List[Tree])(using Context): TypeApply = fn match @@ -56,7 +56,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { case _: RefTree | _: GenericApply => ta.assignType(untpd.TypeApply(fn, args), fn, args) case _ => - assert(ctx.reporter.errorsReported, s"unexpected tree for type application: $fn") + assert(ctx.reporter.errorsReported || ctx.tolerateErrorsForBestEffort, s"unexpected tree for type application: $fn") ta.assignType(untpd.TypeApply(fn, args), fn, args) def Literal(const: Constant)(using Context): Literal = @@ -178,6 +178,12 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { def Splice(expr: Tree, tpe: Type)(using Context): Splice = untpd.Splice(expr).withType(tpe) + def Splice(expr: Tree)(using Context): Splice = + ta.assignType(untpd.Splice(expr), expr) + + def SplicePattern(pat: Tree, args: List[Tree], tpe: Type)(using Context): SplicePattern = + untpd.SplicePattern(pat, args).withType(tpe) + def Hole(isTerm: Boolean, idx: Int, args: List[Tree], content: Tree, tpe: Type)(using Context): Hole = untpd.Hole(isTerm, idx, args, content).withType(tpe) @@ -471,26 +477,6 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { case ConstantType(value) => Literal(value) } - /** A path that corresponds to the given type `tp`. Error if `tp` is not a refinement - * of an addressable singleton type. - */ - def pathFor(tp: Type)(using Context): Tree = { - def recur(tp: Type): Tree = tp match { - case tp: NamedType => - tp.info match { - case TypeAlias(alias) => recur(alias) - case _: TypeBounds => EmptyTree - case _ => singleton(tp) - } - case tp: TypeProxy => recur(tp.superType) - case _ => EmptyTree - } - recur(tp).orElse { - report.error(em"$tp is not an addressable singleton type") - TypeTree(tp) - } - } - /** A tree representing a `newXYZArray` operation of the right * kind for the given element type in `elemTpe`. No type arguments or * `length` arguments are given. diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index 08f3db4981ff..64f9fb4df95e 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -107,7 +107,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def forwardTo: Tree = t } case class Tuple(trees: List[Tree])(implicit @constructorOnly src: SourceFile) extends Tree { - override def isTerm: Boolean = trees.isEmpty || trees.head.isTerm + override def isTerm: Boolean = trees.isEmpty || stripNamedArg(trees.head).isTerm override def isType: Boolean = !isTerm } case class Throw(expr: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree @@ -118,6 +118,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { case class ContextBounds(bounds: TypeBoundsTree, cxBounds: List[Tree])(implicit @constructorOnly src: SourceFile) extends TypTree case class PatDef(mods: Modifiers, pats: List[Tree], tpt: Tree, rhs: Tree)(implicit @constructorOnly src: SourceFile) extends DefTree case class ExtMethods(paramss: List[ParamClause], methods: List[Tree])(implicit @constructorOnly src: SourceFile) extends Tree + case class ContextBoundTypeTree(tycon: Tree, paramName: TypeName, ownName: TermName)(implicit @constructorOnly src: SourceFile) extends Tree + // `paramName: tycon as ownName`, ownName != EmptyTermName only under x.modularity case class MacroTree(expr: Tree)(implicit @constructorOnly src: SourceFile) extends Tree case class ImportSelector(imported: Ident, renamed: Tree = EmptyTree, bound: Tree = EmptyTree)(implicit @constructorOnly src: SourceFile) extends Tree { @@ -230,6 +232,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { case class Infix()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Infix) + case class Tracked()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Tracked) + /** Used under pureFunctions to mark impure function types `A => B` in `FunctionWithMods` */ case class Impure()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Impure) } @@ -528,15 +532,15 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def makeSelfDef(name: TermName, tpt: Tree)(using Context): ValDef = ValDef(name, tpt, EmptyTree).withFlags(PrivateLocal) - def makeTupleOrParens(ts: List[Tree])(using Context): Tree = ts match { + def makeTupleOrParens(ts: List[Tree])(using Context): Tree = ts match + case (t: NamedArg) :: Nil => Tuple(t :: Nil) case t :: Nil => Parens(t) case _ => Tuple(ts) - } - def makeTuple(ts: List[Tree])(using Context): Tree = ts match { + def makeTuple(ts: List[Tree])(using Context): Tree = ts match + case (t: NamedArg) :: Nil => Tuple(t :: Nil) case t :: Nil => t case _ => Tuple(ts) - } def makeAndType(left: Tree, right: Tree)(using Context): AppliedTypeTree = AppliedTypeTree(ref(defn.andType.typeRef), left :: right :: Nil) @@ -675,6 +679,9 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def ExtMethods(tree: Tree)(paramss: List[ParamClause], methods: List[Tree])(using Context): Tree = tree match case tree: ExtMethods if (paramss eq tree.paramss) && (methods == tree.methods) => tree case _ => finalize(tree, untpd.ExtMethods(paramss, methods)(tree.source)) + def ContextBoundTypeTree(tree: Tree)(tycon: Tree, paramName: TypeName, ownName: TermName)(using Context): Tree = tree match + case tree: ContextBoundTypeTree if (tycon eq tree.tycon) && paramName == tree.paramName && ownName == tree.ownName => tree + case _ => finalize(tree, untpd.ContextBoundTypeTree(tycon, paramName, ownName)(tree.source)) def ImportSelector(tree: Tree)(imported: Ident, renamed: Tree, bound: Tree)(using Context): Tree = tree match { case tree: ImportSelector if (imported eq tree.imported) && (renamed eq tree.renamed) && (bound eq tree.bound) => tree case _ => finalize(tree, untpd.ImportSelector(imported, renamed, bound)(tree.source)) @@ -740,6 +747,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { cpy.PatDef(tree)(mods, transform(pats), transform(tpt), transform(rhs)) case ExtMethods(paramss, methods) => cpy.ExtMethods(tree)(transformParamss(paramss), transformSub(methods)) + case ContextBoundTypeTree(tycon, paramName, ownName) => + cpy.ContextBoundTypeTree(tree)(transform(tycon), paramName, ownName) case ImportSelector(imported, renamed, bound) => cpy.ImportSelector(tree)(transformSub(imported), transform(renamed), transform(bound)) case Number(_, _) | TypedSplice(_) => @@ -795,6 +804,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { this(this(this(x, pats), tpt), rhs) case ExtMethods(paramss, methods) => this(paramss.foldLeft(x)(apply), methods) + case ContextBoundTypeTree(tycon, paramName, ownName) => + this(x, tycon) case ImportSelector(imported, renamed, bound) => this(this(this(x, imported), renamed), bound) case Number(_, _) => diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala index 7c75ed833945..5c9946f6134a 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureOps.scala @@ -150,7 +150,7 @@ extension (tp: Type) case tp @ CapturingType(parent, refs) => val pcs = getBoxed(parent) if tp.isBoxed then refs ++ pcs else pcs - case tp: TypeRef if tp.symbol.isAbstractType => CaptureSet.empty + case tp: TypeRef if tp.symbol.isAbstractOrParamType => CaptureSet.empty case tp: TypeProxy => getBoxed(tp.superType) case tp: AndType => getBoxed(tp.tp1) ** getBoxed(tp.tp2) case tp: OrType => getBoxed(tp.tp1) ++ getBoxed(tp.tp2) @@ -203,10 +203,6 @@ extension (tp: Type) case _ => false - def isCapabilityClassRef(using Context) = tp.dealiasKeepAnnots match - case _: TypeRef | _: AppliedType => tp.typeSymbol.hasAnnotation(defn.CapabilityAnnot) - case _ => false - /** Drop @retains annotations everywhere */ def dropAllRetains(using Context): Type = // TODO we should drop retains from inferred types before unpickling val tm = new TypeMap: @@ -289,7 +285,7 @@ extension (tp: Type) var ok = true def traverse(t: Type): Unit = if ok then - t match + t.dealias match case CapturingType(_, cs) if cs.isUniversal && variance <= 0 => ok = false case _ => @@ -445,6 +441,14 @@ extension (tp: AnnotatedType) case ann: CaptureAnnotation => ann.boxed case _ => false +/** Drop retains annotations in the type. */ +class CleanupRetains(using Context) extends TypeMap: + def apply(tp: Type): Type = + tp match + case AnnotatedType(tp, annot) if annot.symbol == defn.RetainsAnnot || annot.symbol == defn.RetainsByNameAnnot => + RetainingType(tp, Nil, byName = annot.symbol == defn.RetainsByNameAnnot) + case _ => mapOver(tp) + /** An extractor for `caps.reachCapability(ref)`, which is used to express a reach * capability as a tree in a @retains annotation. */ diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index de584797f154..a5bb8792af2c 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -20,7 +20,7 @@ import Recheck.* import scala.collection.mutable import CaptureSet.{withCaptureSetsExplained, IdempotentCaptRefMap, CompareResult} import StdNames.nme -import NameKinds.{DefaultGetterName, WildcardParamName} +import NameKinds.{DefaultGetterName, WildcardParamName, UniqueNameKind} import reporting.trace /** The capture checker */ @@ -249,6 +249,44 @@ class CheckCaptures extends Recheck, SymTransformer: else i"references $cs1$cs1description are not all", pos, provenance) + def showRef(ref: CaptureRef)(using Context): String = + ctx.printer.toTextCaptureRef(ref).show + + // Uses 4-space indent as a trial + def checkReachCapsIsolated(tpe: Type, pos: SrcPos)(using Context): Unit = + + object checker extends TypeTraverser: + var refVariances: Map[Boolean, Int] = Map.empty + var seenReach: CaptureRef | Null = null + def traverse(tp: Type) = + tp.dealias match + case CapturingType(parent, refs) => + traverse(parent) + for ref <- refs.elems do + if ref.isReach && !ref.stripReach.isInstanceOf[TermParamRef] + || ref.isRootCapability + then + val isReach = ref.isReach + def register() = + refVariances = refVariances.updated(isReach, variance) + seenReach = ref + refVariances.get(isReach) match + case None => register() + case Some(v) => if v != 0 && variance == 0 then register() + case _ => + traverseChildren(tp) + + checker.traverse(tpe) + if checker.refVariances.size == 2 + && checker.refVariances(true) >= 0 + && checker.refVariances(false) <= 0 + then + report.error( + em"""Reach capability ${showRef(checker.seenReach.nn)} and universal capability cap cannot both + |appear in the type $tpe of this expression""", + pos) + end checkReachCapsIsolated + /** The current environment */ private val rootEnv: Env = inContext(ictx): Env(defn.RootClass, EnvKind.Regular, CaptureSet.empty, null) @@ -320,9 +358,17 @@ class CheckCaptures extends Recheck, SymTransformer: def markFree(cs: CaptureSet, pos: SrcPos)(using Context): Unit = if !cs.isAlwaysEmpty then forallOuterEnvsUpTo(ctx.owner.topLevelClass): env => - def isVisibleFromEnv(sym: Symbol) = - (env.kind == EnvKind.NestedInOwner || env.owner != sym) - && env.owner.isContainedIn(sym) + // Whether a symbol is defined inside the owner of the environment? + inline def isContainedInEnv(sym: Symbol) = + if env.kind == EnvKind.NestedInOwner then + sym.isProperlyContainedIn(env.owner) + else + sym.isContainedIn(env.owner) + // A captured reference with the symbol `sym` is visible from the environment + // if `sym` is not defined inside the owner of the environment + inline def isVisibleFromEnv(sym: Symbol) = !isContainedInEnv(sym) + // Only captured references that are visible from the environment + // should be included. val included = cs.filter: case ref: TermRef => isVisibleFromEnv(ref.symbol.owner) case ref: ThisType => isVisibleFromEnv(ref.cls) @@ -340,6 +386,7 @@ class CheckCaptures extends Recheck, SymTransformer: // there won't be an apply; need to include call captures now includeCallCaptures(tree.symbol, tree.srcPos) else + //debugShowEnvs() markFree(tree.symbol, tree.srcPos) super.recheckIdent(tree, pt) @@ -490,7 +537,8 @@ class CheckCaptures extends Recheck, SymTransformer: */ def addParamArgRefinements(core: Type, initCs: CaptureSet): (Type, CaptureSet) = var refined: Type = core - var allCaptures: CaptureSet = initCs + var allCaptures: CaptureSet = if setup.isCapabilityClassRef(core) + then CaptureSet.universal else initCs for (getterName, argType) <- mt.paramNames.lazyZip(argTypes) do val getter = cls.info.member(getterName).suchThat(_.is(ParamAccessor)).symbol if getter.termRef.isTracked && !getter.is(Private) then @@ -779,8 +827,10 @@ class CheckCaptures extends Recheck, SymTransformer: report.error(ex.getMessage.nn) tree.tpe finally curEnv = saved - if tree.isTerm && !pt.isBoxedCapturing then - markFree(res.boxedCaptureSet, tree.srcPos) + if tree.isTerm then + checkReachCapsIsolated(res.widen, tree.srcPos) + if !pt.isBoxedCapturing then + markFree(res.boxedCaptureSet, tree.srcPos) res override def recheckFinish(tpe: Type, tree: Tree, pt: Type)(using Context): Type = @@ -906,6 +956,19 @@ class CheckCaptures extends Recheck, SymTransformer: expected end addOuterRefs + /** A debugging method for showing the envrionments during capture checking. */ + private def debugShowEnvs()(using Context): Unit = + def showEnv(env: Env): String = i"Env(${env.owner}, ${env.kind}, ${env.captured})" + val sb = StringBuilder() + @annotation.tailrec def walk(env: Env | Null): Unit = + if env != null then + sb ++= showEnv(env) + sb ++= "\n" + walk(env.outer0) + sb ++= "===== Current Envs ======\n" + walk(curEnv) + sb ++= "===== End ======\n" + println(sb.result()) /** Adapt `actual` type to `expected` type by inserting boxing and unboxing conversions * @@ -1045,6 +1108,7 @@ class CheckCaptures extends Recheck, SymTransformer: pos) } if !insertBox then // unboxing + //debugShowEnvs() markFree(criticalSet, pos) adaptedType(!boxed) else @@ -1248,10 +1312,14 @@ class CheckCaptures extends Recheck, SymTransformer: val added = widened.filter(isAllowed(_)) capt.println(i"heal $ref in $cs by widening to $added") if !added.subCaptures(cs, frozen = false).isOK then - val location = if meth.exists then i" of $meth" else "" + val location = if meth.exists then i" of ${meth.showLocated}" else "" + val paramInfo = + if ref.paramName.info.kind.isInstanceOf[UniqueNameKind] + then i"${ref.paramName} from ${ref.binder}" + else i"${ref.paramName}" val debugSetInfo = if ctx.settings.YccDebug.value then i" $cs" else "" report.error( - i"local reference ${ref.paramName} leaks into outer capture set$debugSetInfo of type parameter $paramName$location", + i"local reference $paramInfo leaks into outer capture set$debugSetInfo of type parameter $paramName$location", tree.srcPos) else widened.elems.foreach(recur) diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index 9ab41859f170..e6953dbf67b7 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -23,6 +23,7 @@ trait SetupAPI: def setupUnit(tree: Tree, recheckDef: DefRecheck)(using Context): Unit def isPreCC(sym: Symbol)(using Context): Boolean def postCheck()(using Context): Unit + def isCapabilityClassRef(tp: Type)(using Context): Boolean object Setup: @@ -67,6 +68,31 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: && !sym.owner.is(CaptureChecked) && !defn.isFunctionSymbol(sym.owner) + private val capabilityClassMap = new util.HashMap[Symbol, Boolean] + + /** Check if the class is capability, which means: + * 1. the class has a capability annotation, + * 2. or at least one of its parent type has universal capability. + */ + def isCapabilityClassRef(tp: Type)(using Context): Boolean = tp.dealiasKeepAnnots match + case _: TypeRef | _: AppliedType => + val sym = tp.classSymbol + def checkSym: Boolean = + sym.hasAnnotation(defn.CapabilityAnnot) + || sym.info.parents.exists(hasUniversalCapability) + sym.isClass && capabilityClassMap.getOrElseUpdate(sym, checkSym) + case _ => false + + private def hasUniversalCapability(tp: Type)(using Context): Boolean = tp.dealiasKeepAnnots match + case CapturingType(parent, refs) => + refs.isUniversal || hasUniversalCapability(parent) + case AnnotatedType(parent, ann) => + if ann.symbol.isRetains then + try ann.tree.toCaptureSet.isUniversal || hasUniversalCapability(parent) + catch case ex: IllegalCaptureRef => false + else hasUniversalCapability(parent) + case tp => isCapabilityClassRef(tp) + private def fluidify(using Context) = new TypeMap with IdempotentCaptRefMap: def apply(t: Type): Type = t match case t: MethodType => @@ -269,12 +295,6 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: CapturingType(fntpe, cs, boxed = false) else fntpe - /** Map references to capability classes C to C^ */ - private def expandCapabilityClass(tp: Type): Type = - if tp.isCapabilityClassRef - then CapturingType(tp, defn.expandedUniversalSet, boxed = false) - else tp - private def recur(t: Type): Type = normalizeCaptures(mapOver(t)) def apply(t: Type) = @@ -297,7 +317,8 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: case t: TypeVar => this(t.underlying) case t => - if t.isCapabilityClassRef + // Map references to capability classes C to C^ + if isCapabilityClassRef(t) then CapturingType(t, defn.expandedUniversalSet, boxed = false) else recur(t) end expandAliases @@ -369,12 +390,16 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: def setupTraverser(recheckDef: DefRecheck) = new TreeTraverserWithPreciseImportContexts: def transformResultType(tpt: TypeTree, sym: Symbol)(using Context): Unit = - transformTT(tpt, - boxed = !ccConfig.allowUniversalInBoxed && sym.is(Mutable, butNot = Method), - // types of mutable variables are boxed in pre 3.3 codee - exact = sym.allOverriddenSymbols.hasNext, - // types of symbols that override a parent don't get a capture set TODO drop - ) + try + transformTT(tpt, + boxed = !ccConfig.allowUniversalInBoxed && sym.is(Mutable, butNot = Method), + // types of mutable variables are boxed in pre 3.3 codee + exact = sym.allOverriddenSymbols.hasNext, + // types of symbols that override a parent don't get a capture set TODO drop + ) + catch case ex: IllegalCaptureRef => + capt.println(i"fail while transforming result type $tpt of $sym") + throw ex val addDescription = new TypeTraverser: def traverse(tp: Type) = tp match case tp @ CapturingType(parent, refs) => @@ -407,8 +432,12 @@ class Setup extends PreRecheck, SymTransformer, SetupAPI: case tree @ TypeApply(fn, args) => traverse(fn) - for case arg: TypeTree <- args do - transformTT(arg, boxed = true, exact = false) // type arguments in type applications are boxed + fn match + case Select(qual, nme.asInstanceOf_) => + // No need to box type arguments of an asInstanceOf call. See #20224. + case _ => + for case arg: TypeTree <- args do + transformTT(arg, boxed = true, exact = false) // type arguments in type applications are boxed case tree: TypeDef if tree.symbol.isClass => inContext(ctx.withOwner(tree.symbol)): diff --git a/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala index 4c5b632bf6ab..cd44ba27df96 100644 --- a/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala @@ -33,25 +33,6 @@ case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { packageIndex.getOrElseUpdate(pkg.dottedString, aggregates.filter(_.hasPackage(pkg))) } - override def findClass(className: String): Option[ClassRepresentation] = { - val (pkg, _) = PackageNameUtils.separatePkgAndClassNames(className) - - def findEntry(isSource: Boolean): Option[ClassRepresentation] = - aggregatesForPackage(PackageName(pkg)).iterator.map(_.findClass(className)).collectFirst { - case Some(s: SourceFileEntry) if isSource => s - case Some(s: BinaryFileEntry) if !isSource => s - } - - val classEntry = findEntry(isSource = false) - val sourceEntry = findEntry(isSource = true) - - (classEntry, sourceEntry) match { - case (Some(c: BinaryFileEntry), Some(s: SourceFileEntry)) => Some(BinaryAndSourceFilesEntry(c, s)) - case (c @ Some(_), _) => c - case (_, s) => s - } - } - override def asURLs: Seq[URL] = aggregates.flatMap(_.asURLs) override def asClassPathStrings: Seq[String] = aggregates.map(_.asClassPathString).distinct diff --git a/compiler/src/dotty/tools/dotc/classpath/ClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/ClassPath.scala index 3210c6221a78..5f545e1b93a5 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ClassPath.scala @@ -6,6 +6,7 @@ package dotty.tools.dotc.classpath import dotty.tools.dotc.classpath.FileUtils.isTasty import dotty.tools.io.AbstractFile import dotty.tools.io.ClassRepresentation +import dotty.tools.io.FileExtension case class ClassPathEntries(packages: scala.collection.Seq[PackageEntry], classesAndSources: scala.collection.Seq[ClassRepresentation]) { def toTuple: (scala.collection.Seq[PackageEntry], scala.collection.Seq[ClassRepresentation]) = (packages, classesAndSources) @@ -52,7 +53,7 @@ sealed trait BinaryFileEntry extends ClassRepresentation { object BinaryFileEntry { def apply(file: AbstractFile): BinaryFileEntry = if file.isTasty then - if file.resolveSiblingWithExtension("class") != null then TastyWithClassFileEntry(file) + if file.resolveSiblingWithExtension(FileExtension.Class) != null then TastyWithClassFileEntry(file) else StandaloneTastyFileEntry(file) else ClassFileEntry(file) diff --git a/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala b/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala index 0b66f339bf53..080f8d4e63d2 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala @@ -7,6 +7,7 @@ import dotty.tools.io.{AbstractFile, VirtualDirectory} import FileUtils.* import dotty.tools.io.ClassPath import dotty.tools.dotc.core.Contexts.* +import java.nio.file.Files /** * Provides factory methods for classpath. When creating classpath instances for a given path, @@ -52,14 +53,30 @@ class ClassPathFactory { // Internal protected def classesInPathImpl(path: String, expand: Boolean)(using Context): List[ClassPath] = - for { + val files = for { file <- expandPath(path, expand) dir <- { def asImage = if (file.endsWith(".jimage")) Some(AbstractFile.getFile(file)) else None Option(AbstractFile.getDirectory(file)).orElse(asImage) } } - yield newClassPath(dir) + yield dir + + val expanded = + if scala.util.Properties.propOrFalse("scala.expandjavacp") then + for + file <- files + a <- ClassPath.expandManifestPath(file.absolutePath) + path = java.nio.file.Paths.get(a.toURI()).nn + if Files.exists(path) + yield + newClassPath(AbstractFile.getFile(path)) + else + Seq.empty + + files.map(newClassPath) ++ expanded + + end classesInPathImpl private def createSourcePath(file: AbstractFile)(using Context): ClassPath = if (file.isJarOrZip) diff --git a/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala index c5b267bc774d..aed5be45cb0d 100644 --- a/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala @@ -274,23 +274,18 @@ final class CtSymClassPath(ctSym: java.nio.file.Path, release: Int) extends Clas } case class DirectoryClassPath(dir: JFile) extends JFileDirectoryLookup[BinaryFileEntry] with NoSourcePaths { - override def findClass(className: String): Option[ClassRepresentation] = - findClassFile(className).map(BinaryFileEntry(_)) def findClassFile(className: String): Option[AbstractFile] = { val relativePath = FileUtils.dirPath(className) - val tastyFile = new JFile(dir, relativePath + ".tasty") - if tastyFile.exists then Some(tastyFile.toPath.toPlainFile) - else - val classFile = new JFile(dir, relativePath + ".class") - if classFile.exists then Some(classFile.toPath.toPlainFile) - else None + val classFile = new JFile(dir, relativePath + ".class") + if classFile.exists then Some(classFile.toPath.toPlainFile) + else None } protected def createFileEntry(file: AbstractFile): BinaryFileEntry = BinaryFileEntry(file) protected def isMatchingFile(f: JFile): Boolean = - f.isTasty || (f.isClass && f.classToTasty.isEmpty) + f.isTasty || f.isBestEffortTasty || (f.isClass && !f.hasSiblingTasty) private[dotty] def classes(inPackage: PackageName): Seq[BinaryFileEntry] = files(inPackage) } @@ -301,16 +296,5 @@ case class DirectorySourcePath(dir: JFile) extends JFileDirectoryLookup[SourceFi protected def createFileEntry(file: AbstractFile): SourceFileEntry = SourceFileEntry(file) protected def isMatchingFile(f: JFile): Boolean = endsScalaOrJava(f.getName) - override def findClass(className: String): Option[ClassRepresentation] = findSourceFile(className).map(SourceFileEntry(_)) - - private def findSourceFile(className: String): Option[AbstractFile] = { - val relativePath = FileUtils.dirPath(className) - val sourceFile = LazyList("scala", "java") - .map(ext => new JFile(dir, relativePath + "." + ext)) - .collectFirst { case file if file.exists() => file } - - sourceFile.map(_.toPath.toPlainFile) - } - private[dotty] def sources(inPackage: PackageName): Seq[SourceFileEntry] = files(inPackage) } diff --git a/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala b/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala index b8cb9a2155dc..4fe57a722780 100644 --- a/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala +++ b/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala @@ -17,21 +17,24 @@ object FileUtils { extension (file: AbstractFile) { def isPackage: Boolean = file.isDirectory && mayBeValidPackage(file.name) - def isClass: Boolean = !file.isDirectory && hasClassExtension && !file.name.endsWith("$class.class") - // FIXME: drop last condition when we stop being compatible with Scala 2.11 + def isClass: Boolean = !file.isDirectory && hasClassExtension - def hasClassExtension: Boolean = file.hasExtension("class") + def hasClassExtension: Boolean = file.ext.isClass - def hasTastyExtension: Boolean = file.hasExtension("tasty") + def hasTastyExtension: Boolean = file.ext.isTasty + + def hasBetastyExtension: Boolean = file.ext.isBetasty def isTasty: Boolean = !file.isDirectory && hasTastyExtension + def isBestEffortTasty: Boolean = !file.isDirectory && hasBetastyExtension + def isScalaBinary: Boolean = file.isClass || file.isTasty - def isScalaOrJavaSource: Boolean = !file.isDirectory && (file.hasExtension("scala") || file.hasExtension("java")) + def isScalaOrJavaSource: Boolean = !file.isDirectory && file.ext.isScalaOrJava // TODO do we need to check also other files using ZipMagicNumber like in scala.tools.nsc.io.Jar.isJarOrZip? - def isJarOrZip: Boolean = file.hasExtension("jar") || file.hasExtension("zip") + def isJarOrZip: Boolean = file.ext.isJarOrZip /** * Safe method returning a sequence containing one URL representing this file, when underlying file exists, @@ -39,33 +42,41 @@ object FileUtils { */ def toURLs(default: => Seq[URL] = Seq.empty): Seq[URL] = if (file.file == null) default else Seq(file.toURL) - /** Returns the tasty file associated with this class file */ - def classToTasty: Option[AbstractFile] = - assert(file.isClass, s"non-class: $file") - val tastyName = classNameToTasty(file.name) - Option(file.resolveSibling(tastyName)) + /** + * Returns if there is an existing sibling `.tasty` file. + */ + def hasSiblingTasty: Boolean = + assert(file.hasClassExtension, s"non-class: $file") + file.resolveSibling(classNameToTasty(file.name)) != null } extension (file: JFile) { def isPackage: Boolean = file.isDirectory && mayBeValidPackage(file.getName) - def isClass: Boolean = file.isFile && file.getName.endsWith(SUFFIX_CLASS) && !file.getName.endsWith("$class.class") - // FIXME: drop last condition when we stop being compatible with Scala 2.11 + def isClass: Boolean = file.isFile && hasClassExtension + + def hasClassExtension: Boolean = file.getName.endsWith(SUFFIX_CLASS) def isTasty: Boolean = file.isFile && file.getName.endsWith(SUFFIX_TASTY) - /** Returns the tasty file associated with this class file */ - def classToTasty: Option[JFile] = - assert(file.isClass, s"non-class: $file") - val tastyName = classNameToTasty(file.getName.stripSuffix(".class")) - val tastyPath = file.toPath.resolveSibling(tastyName) - if java.nio.file.Files.exists(tastyPath) then Some(tastyPath.toFile) else None + def isBestEffortTasty: Boolean = file.isFile && file.getName.endsWith(SUFFIX_BETASTY) + + + /** + * Returns if there is an existing sibling `.tasty` file. + */ + def hasSiblingTasty: Boolean = + assert(file.hasClassExtension, s"non-class: $file") + val path = file.toPath + val tastyPath = path.resolveSibling(classNameToTasty(file.getName)) + java.nio.file.Files.exists(tastyPath) } private val SUFFIX_CLASS = ".class" private val SUFFIX_SCALA = ".scala" private val SUFFIX_TASTY = ".tasty" + private val SUFFIX_BETASTY = ".betasty" private val SUFFIX_JAVA = ".java" private val SUFFIX_SIG = ".sig" diff --git a/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala index 93583c85fff7..0616d6c14ba6 100644 --- a/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala @@ -38,16 +38,12 @@ case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath wi def asURLs: Seq[URL] = Seq(new URI(dir.name).toURL) def asClassPathStrings: Seq[String] = Seq(dir.path) - override def findClass(className: String): Option[ClassRepresentation] = - findClassFile(className).map(BinaryFileEntry(_)) - def findClassFile(className: String): Option[AbstractFile] = { val pathSeq = FileUtils.dirPath(className).split(java.io.File.separator) val parentDir = lookupPath(dir)(pathSeq.init.toSeq, directory = true) - if parentDir == null then return None + if parentDir == null then None else - Option(lookupPath(parentDir)(pathSeq.last + ".tasty" :: Nil, directory = false)) - .orElse(Option(lookupPath(parentDir)(pathSeq.last + ".class" :: Nil, directory = false))) + Option(lookupPath(parentDir)(pathSeq.last + ".class" :: Nil, directory = false)) } private[dotty] def classes(inPackage: PackageName): Seq[BinaryFileEntry] = files(inPackage) @@ -55,5 +51,5 @@ case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath wi protected def createFileEntry(file: AbstractFile): BinaryFileEntry = BinaryFileEntry(file) protected def isMatchingFile(f: AbstractFile): Boolean = - f.isTasty || (f.isClass && f.classToTasty.isEmpty) + f.isTasty || (f.isClass && !f.hasSiblingTasty) } diff --git a/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala b/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala index dac156c5f647..d5473e6b26c3 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala @@ -45,21 +45,15 @@ object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { with NoSourcePaths { override def findClassFile(className: String): Option[AbstractFile] = - findClass(className).map(_.file) - - // This method is performance sensitive as it is used by SBT's ExtractDependencies phase. - override def findClass(className: String): Option[BinaryFileEntry] = { val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) - val binaries = files(PackageName(pkg), simpleClassName + ".tasty", simpleClassName + ".class") - binaries.find(_.file.isTasty).orElse(binaries.find(_.file.isClass)) - } + file(PackageName(pkg), simpleClassName + ".class").map(_.file) override private[dotty] def classes(inPackage: PackageName): Seq[BinaryFileEntry] = files(inPackage) override protected def createFileEntry(file: FileZipArchive#Entry): BinaryFileEntry = BinaryFileEntry(file) override protected def isRequiredFileType(file: AbstractFile): Boolean = - file.isTasty || (file.isClass && file.classToTasty.isEmpty) + file.isTasty || (file.isClass && !file.hasSiblingTasty) } /** diff --git a/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala b/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala index ca8636e3884f..4595f7978999 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala @@ -43,15 +43,6 @@ trait ZipArchiveFileLookup[FileEntryType <: ClassRepresentation] extends Efficie } yield createFileEntry(entry) - protected def files(inPackage: PackageName, names: String*): Seq[FileEntryType] = - for { - dirEntry <- findDirEntry(inPackage).toSeq - name <- names - entry <- Option(dirEntry.lookupName(name, directory = false)) - if isRequiredFileType(entry) - } - yield createFileEntry(entry) - protected def file(inPackage: PackageName, name: String): Option[FileEntryType] = for { dirEntry <- findDirEntry(inPackage) diff --git a/compiler/src/dotty/tools/dotc/config/CliCommand.scala b/compiler/src/dotty/tools/dotc/config/CliCommand.scala index be97297218fa..5ac6b772df95 100644 --- a/compiler/src/dotty/tools/dotc/config/CliCommand.scala +++ b/compiler/src/dotty/tools/dotc/config/CliCommand.scala @@ -60,7 +60,8 @@ trait CliCommand: def defaultValue = s.default match case _: Int | _: String => s.default.toString case _ => "" - val info = List(shortHelp(s), if defaultValue.nonEmpty then s"Default $defaultValue" else "", if s.legalChoices.nonEmpty then s"Choices : ${s.legalChoices}" else "") + val deprecationMessage = s.deprecation.map(d => s"Option deprecated.\n${d.msg}").getOrElse("") + val info = List(deprecationMessage, shortHelp(s), if defaultValue.nonEmpty then s"Default $defaultValue" else "", if s.legalChoices.nonEmpty then s"Choices : ${s.legalChoices}" else "") (s.name, info.filter(_.nonEmpty).mkString("\n")) end help diff --git a/compiler/src/dotty/tools/dotc/config/Config.scala b/compiler/src/dotty/tools/dotc/config/Config.scala index 2746476261e5..ee8ed4b215d7 100644 --- a/compiler/src/dotty/tools/dotc/config/Config.scala +++ b/compiler/src/dotty/tools/dotc/config/Config.scala @@ -235,4 +235,12 @@ object Config { */ inline val checkLevelsOnConstraints = false inline val checkLevelsOnInstantiation = true + + /** Under x.modularity: + * If a type parameter `X` has a single context bound `X: C`, should the + * witness parameter be named `X`? This would prevent the creation of a + * context bound companion. + */ + inline val nameSingleContextBounds = false } + diff --git a/compiler/src/dotty/tools/dotc/config/Feature.scala b/compiler/src/dotty/tools/dotc/config/Feature.scala index 7eb95badd4d0..0d551094da4d 100644 --- a/compiler/src/dotty/tools/dotc/config/Feature.scala +++ b/compiler/src/dotty/tools/dotc/config/Feature.scala @@ -10,6 +10,7 @@ import util.{SrcPos, NoSourcePosition} import SourceVersion.* import reporting.Message import NameKinds.QualifiedName +import Annotations.ExperimentalAnnotation object Feature: @@ -32,6 +33,14 @@ object Feature: val pureFunctions = experimental("pureFunctions") val captureChecking = experimental("captureChecking") val into = experimental("into") + val namedTuples = experimental("namedTuples") + val modularity = experimental("modularity") + val betterMatchTypeExtractors = experimental("betterMatchTypeExtractors") + + def experimentalAutoEnableFeatures(using Context): List[TermName] = + defn.languageExperimentalFeatures + .map(sym => experimental(sym.name)) + .filterNot(_ == captureChecking) // TODO is this correct? /** Is `feature` enabled by by a command-line setting? The enabling setting is * @@ -81,6 +90,8 @@ object Feature: def scala2ExperimentalMacroEnabled(using Context) = enabled(scala2macros) + def betterMatchTypeExtractorsEnabled(using Context) = enabled(betterMatchTypeExtractors) + /** Is pureFunctions enabled for this compilation unit? */ def pureFunsEnabled(using Context) = enabledBySetting(pureFunctions) @@ -131,12 +142,7 @@ object Feature: def checkExperimentalFeature(which: String, srcPos: SrcPos, note: => String = "")(using Context) = if !isExperimentalEnabled then - report.error( - em"""Experimental $which may only be used under experimental mode: - | 1. in a definition marked as @experimental, or - | 2. compiling with the -experimental compiler flag, or - | 3. with a nightly or snapshot version of the compiler.$note - """, srcPos) + report.error(experimentalUseSite(which) + note, srcPos) private def ccException(sym: Symbol)(using Context): Boolean = ccEnabled && defn.ccExperimental.contains(sym) @@ -146,21 +152,34 @@ object Feature: if sym.hasAnnotation(defn.ExperimentalAnnot) then sym else if sym.owner.hasAnnotation(defn.ExperimentalAnnot) then sym.owner else NoSymbol - if !ccException(experimentalSym) then - val note = + if !isExperimentalEnabled && !ccException(experimentalSym) then + val msg = + experimentalSym.getAnnotation(defn.ExperimentalAnnot).map { + case ExperimentalAnnotation(msg) if msg.nonEmpty => s": $msg" + case _ => "" + }.getOrElse("") + val markedExperimental = if experimentalSym.exists - then i"$experimentalSym is marked @experimental" - else i"$sym inherits @experimental" - checkExperimentalFeature("definition", srcPos, s"\n\n$note") + then i"$experimentalSym is marked @experimental$msg" + else i"$sym inherits @experimental$msg" + report.error(markedExperimental + "\n\n" + experimentalUseSite("definition"), srcPos) - /** Check that experimental compiler options are only set for snapshot or nightly compiler versions. */ - def checkExperimentalSettings(using Context): Unit = - for setting <- ctx.settings.language.value - if setting.startsWith("experimental.") && setting != "experimental.macros" - do checkExperimentalFeature(s"feature $setting", NoSourcePosition) + private def experimentalUseSite(which: String): String = + s"""Experimental $which may only be used under experimental mode: + | 1. in a definition marked as @experimental, or + | 2. an experimental feature is imported at the package level, or + | 3. compiling with the -experimental compiler flag. + |""".stripMargin def isExperimentalEnabled(using Context): Boolean = - (Properties.unstableExperimentalEnabled && !ctx.settings.YnoExperimental.value) || ctx.settings.experimental.value + ctx.settings.experimental.value || + experimentalAutoEnableFeatures.exists(enabled) + + def experimentalEnabledByLanguageSetting(using Context): Option[TermName] = + experimentalAutoEnableFeatures.find(enabledBySetting) + + def isExperimentalEnabledByImport(using Context): Boolean = + experimentalAutoEnableFeatures.exists(enabledByImport) /** Handle language import `import language..` if it is one * of the global imports `pureFunctions` or `captureChecking`. In this case diff --git a/compiler/src/dotty/tools/dotc/config/Properties.scala b/compiler/src/dotty/tools/dotc/config/Properties.scala index 3392882057e7..2a362a707ade 100644 --- a/compiler/src/dotty/tools/dotc/config/Properties.scala +++ b/compiler/src/dotty/tools/dotc/config/Properties.scala @@ -84,12 +84,6 @@ trait PropertiesTrait { */ val versionString: String = "version " + simpleVersionString - /** Whether the current version of compiler is experimental - * - * Snapshot, nightly releases and non-bootstrapped compiler are experimental. - */ - val unstableExperimentalEnabled: Boolean = versionString.contains("SNAPSHOT") || versionString.contains("NIGHTLY") || versionString.contains("nonbootstrapped") - /** Whether the current version of compiler supports research plugins. */ val researchPluginEnabled: Boolean = versionString.contains("SNAPSHOT") || versionString.contains("NIGHTLY") || versionString.contains("nonbootstrapped") diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index 687adfe05ca7..86b657ddf00d 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -3,7 +3,7 @@ package config import scala.language.unsafeNulls import dotty.tools.dotc.config.PathResolver.Defaults -import dotty.tools.dotc.config.Settings.{Setting, SettingGroup, SettingCategory} +import dotty.tools.dotc.config.Settings.{Setting, SettingGroup, SettingCategory, Deprecation} import dotty.tools.dotc.config.SourceVersion import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.rewrites.Rewrites @@ -31,7 +31,7 @@ object ScalaSettings extends ScalaSettings // Kept as seperate type to avoid breaking backward compatibility abstract class ScalaSettings extends SettingGroup, AllScalaSettings: - val settingsByCategory: Map[SettingCategory, List[Setting[_]]] = + val settingsByCategory: Map[SettingCategory, List[Setting[_]]] = allSettings.groupBy(_.category) .view.mapValues(_.toList).toMap .withDefaultValue(Nil) @@ -43,7 +43,7 @@ abstract class ScalaSettings extends SettingGroup, AllScalaSettings: val verboseSettings: List[Setting[_]] = settingsByCategory(VerboseSetting).sortBy(_.name) val settingsByAliases: Map[String, Setting[_]] = allSettings.flatMap(s => s.aliases.map(_ -> s)).toMap - + trait AllScalaSettings extends CommonScalaSettings, PluginSettings, VerboseSettings, WarningSettings, XSettings, YSettings: self: SettingGroup => @@ -247,6 +247,9 @@ private sealed trait WarningSettings: | - Message name: name=PureExpressionInStatementPosition | The message name is printed with the warning in verbose warning mode. | + | - Source location: src=regex + | The regex is evaluated against the full source path. + | |In verbose warning mode the compiler prints matching filters for warnings. |Verbose mode can be enabled globally using `-Wconf:any:verbose`, or locally |using the @nowarn annotation (example: `@nowarn("v") def test = try 1`). @@ -266,6 +269,7 @@ private sealed trait WarningSettings: |Examples: | - change every warning into an error: -Wconf:any:error | - silence deprecations: -Wconf:cat=deprecation:s + | - silence warnings in src_managed directory: -Wconf:src=src_managed/.*:s | |Note: on the command-line you might need to quote configurations containing `*` or `&` |to prevent the shell from expanding patterns.""".stripMargin, @@ -292,6 +296,7 @@ private sealed trait WarningSettings: def typeParameterShadow(using Context) = allOr("type-parameter-shadow") + val WcheckInit: Setting[Boolean] = BooleanSetting(WarningSetting, "Wsafe-init", "Ensure safe initialization of objects.") /** -X "Extended" or "Advanced" settings */ private sealed trait XSettings: @@ -318,6 +323,27 @@ private sealed trait XSettings: val XmainClass: Setting[String] = StringSetting(AdvancedSetting, "Xmain-class", "path", "Class for manifest's Main-Class entry (only useful with -d )", "") val XimplicitSearchLimit: Setting[Int] = IntSetting(AdvancedSetting, "Ximplicit-search-limit", "Maximal number of expressions to be generated in an implicit search", 50000) + val XtermConflict: Setting[String] = ChoiceSetting(AdvancedSetting, "Xresolve-term-conflict", "strategy", "Resolve term conflicts", List("package", "object", "error"), "error") + val XnoGenericSig: Setting[Boolean] = BooleanSetting(AdvancedSetting, "Xno-generic-signatures", "Suppress generation of generic signatures for Java.") + val Xdumpclasses: Setting[String] = StringSetting(AdvancedSetting, "Xdump-classes", "dir", "Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders).", "") + val XjarCompressionLevel: Setting[Int] = IntChoiceSetting(AdvancedSetting, "Xjar-compression-level", "compression level to use when writing jar files", Deflater.DEFAULT_COMPRESSION to Deflater.BEST_COMPRESSION, Deflater.DEFAULT_COMPRESSION) + val XkindProjector: Setting[String] = ChoiceSetting(AdvancedSetting, "Xkind-projector", "[underscores, enable, disable]", "Allow `*` as type lambda placeholder to be compatible with kind projector. When invoked as -Xkind-projector:underscores will repurpose `_` to be a type parameter placeholder, this will disable usage of underscore as a wildcard.", List("disable", "", "underscores"), "disable", legacyArgs = true) + + /** Documentation related settings */ + val XdropComments: Setting[Boolean] = BooleanSetting(AdvancedSetting, "Xdrop-docs", "Drop documentation when scanning source files.", aliases = List("-Xdrop-comments")) + val XcookComments: Setting[Boolean] = BooleanSetting(AdvancedSetting, "Xcook-docs", "Cook the documentation (type check `@usecase`, etc.)", aliases = List("-Xcook-comments")) + val XreadComments: Setting[Boolean] = BooleanSetting(AdvancedSetting, "Xread-docs", "Read documentation from tasty.") + + /** Area-specific debug output */ + val XnoDecodeStacktraces: Setting[Boolean] = BooleanSetting(AdvancedSetting, "Xno-decode-stacktraces", "Show raw StackOverflow stacktraces, instead of decoding them into triggering operations.") + val XnoEnrichErrorMessages: Setting[Boolean] = BooleanSetting(AdvancedSetting, "Xno-enrich-error-messages", "Show raw error messages, instead of enriching them with contextual information.") + val XdebugMacros: Setting[Boolean] = BooleanSetting(AdvancedSetting, "Xdebug-macros", "Show debug info when quote pattern match fails") + + /** Pipeline compilation options */ + val XjavaTasty: Setting[Boolean] = BooleanSetting(AdvancedSetting, "Xjava-tasty", "Pickler phase should compute TASTy for .java defined symbols for use by build tools", aliases = List("-Xpickle-java", "-Yjava-tasty", "-Ypickle-java"), preferPrevious = true) + val XearlyTastyOutput: Setting[AbstractFile] = OutputSetting(AdvancedSetting, "Xearly-tasty-output", "directory|jar", "Destination to write generated .tasty files to for use in pipelined compilation.", NoAbstractFile, aliases = List("-Xpickle-write", "-Yearly-tasty-output", "-Ypickle-write"), preferPrevious = true) + val XallowOutlineFromTasty: Setting[Boolean] = BooleanSetting(AdvancedSetting, "Xallow-outline-from-tasty", "Allow outline TASTy to be loaded with the -from-tasty option.", aliases = List("-Yallow-outline-from-tasty")) + val XmixinForceForwarders = ChoiceSetting( AdvancedSetting, name = "Xmixin-force-forwarders", @@ -333,8 +359,8 @@ private sealed trait XSettings: val XmacroSettings: Setting[List[String]] = MultiStringSetting(AdvancedSetting, "Xmacro-settings", "setting1,setting2,..settingN", "List of settings which exposed to the macros") - // Deprecated - val Xlint: Setting[_] = DeprecatedSetting(AdvancedSetting, "Xlint", "Enable or disable specific warnings", "Use -Wshadow to enable shadowing lints.") + @deprecated(message = "Superseded by -Wshadow, Scheduled for removal", since = "3.5.0") + val Xlint: Setting[_] = BooleanSetting(AdvancedSetting, "Xlint", "Enable or disable specific warnings", deprecation = Some(Deprecation("Use -Wshadow to enable shadowing lints. Scheduled for removal.")), ignoreInvalidArgs = true) end XSettings @@ -356,7 +382,6 @@ private sealed trait YSettings: val YdebugError: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydebug-error", "Print the stack trace when any error is caught.", false) val YdebugUnpickling: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydebug-unpickling", "Print the stack trace when an error occurs when reading Tasty.", false) val YdebugCyclic: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydebug-cyclic", "Print the stack trace when a cyclic reference error occurs.", false) - val YtermConflict: Setting[String] = ChoiceSetting(ForkSetting, "Yresolve-term-conflict", "strategy", "Resolve term conflicts", List("package", "object", "error"), "error") val Ylog: Setting[List[String]] = PhasesSetting(ForkSetting, "Ylog", "Log operations during") val YlogClasspath: Setting[Boolean] = BooleanSetting(ForkSetting, "Ylog-classpath", "Output information about what classpath is being applied.") val YdisableFlatCpCaching: Setting[Boolean] = BooleanSetting(ForkSetting, "YdisableFlatCpCaching", "Do not cache flat classpath representation of classpath elements from jars across compiler instances.") @@ -365,21 +390,18 @@ private sealed trait YSettings: val YnoImports: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-imports", "Compile without importing scala.*, java.lang.*, or Predef.") val Yimports: Setting[List[String]] = MultiStringSetting(ForkSetting, "Yimports", helpArg="", "Custom root imports. If set, none of scala.*, java.lang.*, or Predef.* will be imported unless explicitly included.") - val YnoGenericSig: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-generic-signatures", "Suppress generation of generic signatures for Java.") val YnoPredef: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-predef", "Compile without importing Predef.") val Yskip: Setting[List[String]] = PhasesSetting(ForkSetting, "Yskip", "Skip") - val Ydumpclasses: Setting[String] = StringSetting(ForkSetting, "Ydump-classes", "dir", "Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders).", "") - val YjarCompressionLevel: Setting[Int] = IntChoiceSetting(ForkSetting, "Yjar-compression-level", "compression level to use when writing jar files", Deflater.DEFAULT_COMPRESSION to Deflater.BEST_COMPRESSION, Deflater.DEFAULT_COMPRESSION) val YbackendParallelism: Setting[Int] = IntChoiceSetting(ForkSetting, "Ybackend-parallelism", "maximum worker threads for backend", 1 to 16, 1) val YbackendWorkerQueue: Setting[Int] = IntChoiceSetting(ForkSetting, "Ybackend-worker-queue", "backend threads worker queue size", 0 to 1000, 0) val YstopAfter: Setting[List[String]] = PhasesSetting(ForkSetting, "Ystop-after", "Stop after", aliases = List("-stop")) // backward compat val YstopBefore: Setting[List[String]] = PhasesSetting(ForkSetting, "Ystop-before", "Stop before") // stop before erasure as long as we have not debugged it fully val YshowSuppressedErrors: Setting[Boolean] = BooleanSetting(ForkSetting, "Yshow-suppressed-errors", "Also show follow-on errors and warnings that are normally suppressed.") val YdetailedStats: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydetailed-stats", "Show detailed internal compiler stats (needs Stats.enabled to be set to true).") - val YkindProjector: Setting[String] = ChoiceSetting(ForkSetting, "Ykind-projector", "[underscores, enable, disable]", "Allow `*` as type lambda placeholder to be compatible with kind projector. When invoked as -Ykind-projector:underscores will repurpose `_` to be a type parameter placeholder, this will disable usage of underscore as a wildcard.", List("disable", "", "underscores"), "disable", legacyArgs = true) val YprintPos: Setting[Boolean] = BooleanSetting(ForkSetting, "Yprint-pos", "Show tree positions.") val YprintPosSyms: Setting[Boolean] = BooleanSetting(ForkSetting, "Yprint-pos-syms", "Show symbol definitions positions.") val YnoDeepSubtypes: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-deep-subtypes", "Throw an exception on deep subtyping call stacks.") + val YnoSuspendedUnits: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-suspended-units", "Do not suspend units, e.g. when calling a macro defined in the same run. This will error instead of suspending.") val YnoPatmatOpt: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-patmat-opt", "Disable all pattern matching optimizations.") val YplainPrinter: Setting[Boolean] = BooleanSetting(ForkSetting, "Yplain-printer", "Pretty-print using a plain printer.") val YprintSyms: Setting[Boolean] = BooleanSetting(ForkSetting, "Yprint-syms", "When printing trees print info in symbols instead of corresponding info in trees.") @@ -391,9 +413,6 @@ private sealed trait YSettings: val YtestPickler: Setting[Boolean] = BooleanSetting(ForkSetting, "Ytest-pickler", "Self-test for pickling functionality; should be used with -Ystop-after:pickler.") val YtestPicklerCheck: Setting[Boolean] = BooleanSetting(ForkSetting, "Ytest-pickler-check", "Self-test for pickling -print-tasty output; should be used with -Ytest-pickler.") val YcheckReentrant: Setting[Boolean] = BooleanSetting(ForkSetting, "Ycheck-reentrant", "Check that compiled program does not contain vars that can be accessed from a global root.") - val YdropComments: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydrop-docs", "Drop documentation when scanning source files.", aliases = List("-Ydrop-comments")) - val YcookComments: Setting[Boolean] = BooleanSetting(ForkSetting, "Ycook-docs", "Cook the documentation (type check `@usecase`, etc.)", aliases = List("-Ycook-comments")) - val YreadComments: Setting[Boolean] = BooleanSetting(ForkSetting, "Yread-docs", "Read documentation from tasty.") val YforceSbtPhases: Setting[Boolean] = BooleanSetting(ForkSetting, "Yforce-sbt-phases", "Run the phases used by sbt for incremental compilation (ExtractDependencies and ExtractAPI) even if the compiler is ran outside of sbt, for debugging.") val YdumpSbtInc: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydump-sbt-inc", "For every compiled foo.scala, output the API representation and dependencies used for sbt incremental compilation in foo.inc, implies -Yforce-sbt-phases.") val YcheckAllPatmat: Setting[Boolean] = BooleanSetting(ForkSetting, "Ycheck-all-patmat", "Check exhaustivity and redundancy of all pattern matching (used for testing the algorithm).") @@ -401,10 +420,8 @@ private sealed trait YSettings: val YretainTrees: Setting[Boolean] = BooleanSetting(ForkSetting, "Yretain-trees", "Retain trees for top-level classes, accessible from ClassSymbol#tree") val YshowTreeIds: Setting[Boolean] = BooleanSetting(ForkSetting, "Yshow-tree-ids", "Uniquely tag all tree nodes in debugging output.") val YfromTastyIgnoreList: Setting[List[String]] = MultiStringSetting(ForkSetting, "Yfrom-tasty-ignore-list", "file", "List of `tasty` files in jar files that will not be loaded when using -from-tasty.") - val YnoExperimental: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-experimental", "Disable experimental language features by default in NIGHTLY/SNAPSHOT versions of the compiler.") val YlegacyLazyVals: Setting[Boolean] = BooleanSetting(ForkSetting, "Ylegacy-lazy-vals", "Use legacy (pre 3.3.0) implementation of lazy vals.") val YcompileScala2Library: Setting[Boolean] = BooleanSetting(ForkSetting, "Ycompile-scala2-library", "Used when compiling the Scala 2 standard library.") - val YoutputOnlyTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Youtput-only-tasty", "Used to only generate the TASTy file without the classfiles") val YprofileEnabled: Setting[Boolean] = BooleanSetting(ForkSetting, "Yprofile-enabled", "Enable profiling.") val YprofileDestination: Setting[String] = StringSetting(ForkSetting, "Yprofile-destination", "file", "Where to send profiling output - specify a file, default is to the console.", "") //.withPostSetHook( _ => YprofileEnabled.value = true ) @@ -413,10 +430,13 @@ private sealed trait YSettings: val YprofileRunGcBetweenPhases: Setting[List[String]] = PhasesSetting(ForkSetting, "Yprofile-run-gc", "Run a GC between phases - this allows heap size to be accurate at the expense of more time. Specify a list of phases, or *", "_") //.withPostSetHook( _ => YprofileEnabled.value = true ) + val YbestEffort: Setting[Boolean] = BooleanSetting(ForkSetting, "Ybest-effort", "Enable best-effort compilation attempting to produce betasty to the META-INF/best-effort directory, regardless of errors, as part of the pickler phase.") + val YwithBestEffortTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Ywith-best-effort-tasty", "Allow to compile using best-effort tasty files. If such file is used, the compiler will stop after the pickler phase.") + // Experimental language features val YnoKindPolymorphism: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-kind-polymorphism", "Disable kind polymorphism.") val YexplicitNulls: Setting[Boolean] = BooleanSetting(ForkSetting, "Yexplicit-nulls", "Make reference types non-nullable. Nullable types can be expressed with unions: e.g. String|Null.") - val YcheckInit: Setting[Boolean] = BooleanSetting(ForkSetting, "Ysafe-init", "Ensure safe initialization of objects.") + val YnoFlexibleTypes: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-flexible-types", "Disable turning nullable Java return types and parameter types into flexible types, which behave like abstract types with a nullable lower bound and non-nullable upper bound.") val YcheckInitGlobal: Setting[Boolean] = BooleanSetting(ForkSetting, "Ysafe-init-global", "Check safe initialization of global objects.") val YrequireTargetName: Setting[Boolean] = BooleanSetting(ForkSetting, "Yrequire-targetName", "Warn if an operator is defined without a @targetName annotation.") val YrecheckTest: Setting[Boolean] = BooleanSetting(ForkSetting, "Yrecheck-test", "Run basic rechecking (internal test only).") @@ -430,16 +450,45 @@ private sealed trait YSettings: val YnoDoubleBindings: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-double-bindings", "Assert no namedtype is bound twice (should be enabled only if program is error-free).") val YshowVarBounds: Setting[Boolean] = BooleanSetting(ForkSetting, "Yshow-var-bounds", "Print type variables with their bounds.") - val YnoDecodeStacktraces: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-decode-stacktraces", "Show raw StackOverflow stacktraces, instead of decoding them into triggering operations.") - val YnoEnrichErrorMessages: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-enrich-error-messages", "Show raw error messages, instead of enriching them with contextual information.") - val Yinstrument: Setting[Boolean] = BooleanSetting(ForkSetting, "Yinstrument", "Add instrumentation code that counts allocations and closure creations.") val YinstrumentDefs: Setting[Boolean] = BooleanSetting(ForkSetting, "Yinstrument-defs", "Add instrumentation code that counts method calls; needs -Yinstrument to be set, too.") - val YdebugMacros: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydebug-macros", "Show debug info when quote pattern match fails") - - // Pipeline compilation options - val YjavaTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Yjava-tasty", "Pickler phase should compute pickles for .java defined symbols for use by build tools") - val YjavaTastyOutput: Setting[AbstractFile] = OutputSetting(ForkSetting, "Yjava-tasty-output", "directory|jar", "(Internal use only!) destination for generated .tasty files containing Java type signatures.", NoAbstractFile) - val YallowOutlineFromTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Yallow-outline-from-tasty", "Allow outline TASTy to be loaded with the -from-tasty option.") + // Deprecated: lifted from -Y to -X + @deprecated(message = "Lifted to -X, Scheduled for removal.", since = "3.5.0") + val YtermConflict: Setting[String] = ChoiceSetting(ForkSetting, "Yresolve-term-conflict", "strategy", "Resolve term conflicts", List("package", "object", "error"), "error", deprecation = Deprecation.renamed("-Xresolve-term-conflict")) + @deprecated(message = "Lifted to -X, Scheduled for removal.", since = "3.5.0") + val YnoGenericSig: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-generic-signatures", "Suppress generation of generic signatures for Java.", deprecation = Deprecation.renamed("-Xno-generic-signatures")) + @deprecated(message = "Lifted to -X, Scheduled for removal.", since = "3.5.0") + val Ydumpclasses: Setting[String] = StringSetting(ForkSetting, "Ydump-classes", "dir", "Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders).", default = "", deprecation = Deprecation.renamed("-Xdump-classes")) + @deprecated(message = "Lifted to -X, Scheduled for removal.", since = "3.5.0") + val YjarCompressionLevel: Setting[Int] = IntChoiceSetting(ForkSetting, "Yjar-compression-level", "compression level to use when writing jar files", Deflater.DEFAULT_COMPRESSION to Deflater.BEST_COMPRESSION, Deflater.DEFAULT_COMPRESSION, deprecation = Deprecation.renamed("-Xjar-compression-level")) + @deprecated(message = "Lifted to -X, Scheduled for removal.", since = "3.5.0") + val YkindProjector: Setting[String] = ChoiceSetting(ForkSetting, "Ykind-projector", "[underscores, enable, disable]", "Allow `*` as type lambda placeholder to be compatible with kind projector. When invoked as -Ykind-projector:underscores will repurpose `_` to be a type parameter placeholder, this will disable usage of underscore as a wildcard.", List("disable", "", "underscores"), "disable", legacyArgs = true, deprecation = Deprecation.renamed("-Xkind-projector")) + @deprecated(message = "Lifted to -X, Scheduled for removal.", since = "3.5.0") + val YdropComments: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydrop-docs", "Drop documentation when scanning source files.", aliases = List("-Ydrop-comments"), deprecation = Deprecation.renamed("-Xdrop-docs")) + @deprecated(message = "Lifted to -X, Scheduled for removal.", since = "3.5.0") + val YcookComments: Setting[Boolean] = BooleanSetting(ForkSetting, "Ycook-docs", "Cook the documentation (type check `@usecase`, etc.)", aliases = List("-Ycook-comments"), deprecation = Deprecation.renamed("-Xcook-docs")) + @deprecated(message = "Lifted to -X, Scheduled for removal.", since = "3.5.0") + val YreadComments: Setting[Boolean] = BooleanSetting(ForkSetting, "Yread-docs", "Read documentation from tasty.", deprecation = Deprecation.renamed("-Xread-docs")) + @deprecated(message = "Lifted to -X, Scheduled for removal.", since = "3.5.0") + val YnoDecodeStacktraces: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-decode-stacktraces", "Show raw StackOverflow stacktraces, instead of decoding them into triggering operations.", deprecation = Deprecation.renamed("-Xno-decode-stacktraces")) + @deprecated(message = "Lifted to -X, Scheduled for removal.", since = "3.5.0") + val YnoEnrichErrorMessages: Setting[Boolean] = BooleanSetting(ForkSetting, "Yno-enrich-error-messages", "Show raw error messages, instead of enriching them with contextual information.", deprecation = Deprecation.renamed("-Xno-enrich-error-messages")) + @deprecated(message = "Lifted to -X, Scheduled for removal.", since = "3.5.0") + val YdebugMacros: Setting[Boolean] = BooleanSetting(ForkSetting, "Ydebug-macros", "Show debug info when quote pattern match fails", deprecation = Deprecation.renamed("-Xdebug-macros")) + + // @deprecated(message = "Lifted to -X, Scheduled for removal.", since = "3.7.0") + // val YjavaTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Yjava-tasty", "Pickler phase should compute TASTy for .java defined symbols for use by build tools", aliases = List("-Ypickle-java"), preferPrevious = true, deprecation = Deprecation.lifted("-Xjava-tasty")) + // @deprecated(message = "Lifted to -X, Scheduled for removal.", since = "3.7.0") + // val YearlyTastyOutput: Setting[AbstractFile] = OutputSetting(ForkSetting, "Yearly-tasty-output", "directory|jar", "Destination to write generated .tasty files to for use in pipelined compilation.", NoAbstractFile, aliases = List("-Ypickle-write"), preferPrevious = true, deprecation = Deprecation.lifted("-Xearly-tasty-output")) + // @deprecated(message = "Lifted to -X, Scheduled for removal.", since = "3.7.0") + // val YallowOutlineFromTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Yallow-outline-from-tasty", "Allow outline TASTy to be loaded with the -from-tasty option.", deprecation = Deprecation.lifted("-Xallow-outline-from-tasty")) + + // Deprecated: lifted from -Y to -W + @deprecated(message = "Lifted to -W, Scheduled for removal.", since = "3.5.0") + val YcheckInit: Setting[Boolean] = BooleanSetting(ForkSetting, "Ysafe-init", "Ensure safe initialization of objects.", deprecation = Deprecation.renamed("-Wsafe-init")) + + // Deprecated: Scheduled for removal + @deprecated(message = "Scheduled for removal.", since = "3.5.0") + val YoutputOnlyTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Youtput-only-tasty", "Used to only generate the TASTy file without the classfiles", deprecation = Deprecation.removed()) end YSettings diff --git a/compiler/src/dotty/tools/dotc/config/Settings.scala b/compiler/src/dotty/tools/dotc/config/Settings.scala index a65072427ba7..1e2ced4d65a7 100644 --- a/compiler/src/dotty/tools/dotc/config/Settings.scala +++ b/compiler/src/dotty/tools/dotc/config/Settings.scala @@ -53,18 +53,20 @@ object Settings: sstate: SettingsState, arguments: List[String], errors: List[String], - warnings: List[String]) { + warnings: List[String]): def fail(msg: String): Settings.ArgsSummary = ArgsSummary(sstate, arguments.tail, errors :+ msg, warnings) def warn(msg: String): Settings.ArgsSummary = ArgsSummary(sstate, arguments.tail, errors, warnings :+ msg) - } + + def deprecated(msg: String, extraArgs: List[String] = Nil): Settings.ArgsSummary = + ArgsSummary(sstate, extraArgs ++ arguments.tail, errors, warnings :+ msg) @unshared val settingCharacters = "[a-zA-Z0-9_\\-]*".r - def validateSettingString(name: String): Unit = + def validateSettingString(name: String): Unit = assert(settingCharacters.matches(name), s"Setting string $name contains invalid characters") @@ -79,11 +81,12 @@ object Settings: aliases: List[String] = Nil, depends: List[(Setting[?], Any)] = Nil, ignoreInvalidArgs: Boolean = false, + preferPrevious: Boolean = false, propertyClass: Option[Class[?]] = None, - deprecationMsg: Option[String] = None, - // kept only for -Ykind-projector option compatibility - legacyArgs: Boolean = false)(private[Settings] val idx: Int) { - + deprecation: Option[Deprecation] = None, + // kept only for -Xkind-projector option compatibility + legacyArgs: Boolean = false)(private[Settings] val idx: Int): + validateSettingString(prefix.getOrElse(name)) aliases.foreach(validateSettingString) assert(name.startsWith(s"-${category.prefixLetter}"), s"Setting $name does not start with category -$category") @@ -92,7 +95,7 @@ object Settings: // Example: -opt Main.scala would be interpreted as -opt:Main.scala, and the source file would be ignored. assert(!(summon[ClassTag[T]] == ListTag && ignoreInvalidArgs), s"Ignoring invalid args is not supported for multivalue settings: $name") - val allFullNames: List[String] = s"$name" :: s"-$name" :: aliases + val allFullNames: List[String] = s"$name" :: s"-$name" :: aliases def valueIn(state: SettingsState): T = state.value(idx).asInstanceOf[T] @@ -105,100 +108,116 @@ object Settings: def isMultivalue: Boolean = summon[ClassTag[T]] == ListTag def acceptsNoArg: Boolean = summon[ClassTag[T]] == BooleanTag || summon[ClassTag[T]] == OptionTag || choices.exists(_.contains("")) - + def legalChoices: String = - choices match { + choices match case Some(xs) if xs.isEmpty => "" case Some(r: Range) => s"${r.head}..${r.last}" case Some(xs) => xs.mkString(", ") case None => "" - } - def tryToSet(state: ArgsSummary): ArgsSummary = { + def tryToSet(state: ArgsSummary): ArgsSummary = val ArgsSummary(sstate, arg :: args, errors, warnings) = state: @unchecked - def update(value: Any, args: List[String]): ArgsSummary = - var dangers = warnings - val valueNew = - if sstate.wasChanged(idx) && isMultivalue then - val valueList = value.asInstanceOf[List[String]] - val current = valueIn(sstate).asInstanceOf[List[String]] - valueList.filter(current.contains).foreach(s => dangers :+= s"Setting $name set to $s redundantly") - current ++ valueList - else - if sstate.wasChanged(idx) then dangers :+= s"Flag $name set repeatedly" - value - ArgsSummary(updateIn(sstate, valueNew), args, errors, dangers) - end update - def fail(msg: String, args: List[String]) = - ArgsSummary(sstate, args, errors :+ msg, warnings) + /** + * Updates the value in state + * + * @param getValue it is crucial that this argument is passed by name, as [setOutput] have side effects. + * @param argStringValue string value of currently proccessed argument that will be used to set deprecation replacement + * @param args remaining arguments to process + * @return new argumment state + */ + def update(getValue: => Any, argStringValue: String, args: List[String]): ArgsSummary = + deprecation match + case Some(Deprecation(msg, Some(replacedBy))) => + val deprecatedMsg = s"Option $name is deprecated: $msg" + if argStringValue.isEmpty then state.deprecated(deprecatedMsg, List(replacedBy)) + else state.deprecated(deprecatedMsg, List(s"$replacedBy:$argStringValue")) + + case Some(Deprecation(msg, _)) => + state.deprecated(s"Option $name is deprecated: $msg") + + case None => + val value = getValue + var dangers = warnings + val valueNew = + if sstate.wasChanged(idx) && isMultivalue then + val valueList = value.asInstanceOf[List[String]] + val current = valueIn(sstate).asInstanceOf[List[String]] + valueList.filter(current.contains).foreach(s => dangers :+= s"Setting $name set to $s redundantly") + current ++ valueList + else + if sstate.wasChanged(idx) then + assert(!preferPrevious, "should have shortcutted with ignoreValue, side-effect may be present!") + dangers :+= s"Flag $name set repeatedly" + value + ArgsSummary(updateIn(sstate, valueNew), args, errors, dangers) + end update - def warn(msg: String, args: List[String]) = - ArgsSummary(sstate, args, errors, warnings :+ msg) + def ignoreValue(args: List[String]): ArgsSummary = + ArgsSummary(sstate, args, errors, warnings) def missingArg = val msg = s"missing argument for option $name" - if ignoreInvalidArgs then warn(msg + ", the tag was ignored", args) else fail(msg, args) + if ignoreInvalidArgs then state.warn(msg + ", the tag was ignored") else state.fail(msg) def invalidChoices(invalid: List[String]) = val msg = s"invalid choice(s) for $name: ${invalid.mkString(",")}" - if ignoreInvalidArgs then warn(msg + ", the tag was ignored", args) else fail(msg, args) + if ignoreInvalidArgs then state.warn(msg + ", the tag was ignored") else state.fail(msg) def setBoolean(argValue: String, args: List[String]) = - if argValue.equalsIgnoreCase("true") || argValue.isEmpty then update(true, args) - else if argValue.equalsIgnoreCase("false") then update(false, args) - else fail(s"$argValue is not a valid choice for boolean setting $name", args) + if argValue.equalsIgnoreCase("true") || argValue.isEmpty then update(true, argValue, args) + else if argValue.equalsIgnoreCase("false") then update(false, argValue, args) + else state.fail(s"$argValue is not a valid choice for boolean setting $name") def setString(argValue: String, args: List[String]) = choices match case Some(xs) if !xs.contains(argValue) => - fail(s"$argValue is not a valid choice for $name", args) + state.fail(s"$argValue is not a valid choice for $name") case _ => - update(argValue, args) + update(argValue, argValue, args) def setInt(argValue: String, args: List[String]) = - try - val x = argValue.toInt + argValue.toIntOption.map: intValue => choices match - case Some(r: Range) if x < r.head || r.last < x => - fail(s"$argValue is out of legal range ${r.head}..${r.last} for $name", args) - case Some(xs) if !xs.contains(x) => - fail(s"$argValue is not a valid choice for $name", args) + case Some(r: Range) if intValue < r.head || r.last < intValue => + state.fail(s"$argValue is out of legal range ${r.head}..${r.last} for $name") + case Some(xs) if !xs.contains(intValue) => + state.fail(s"$argValue is not a valid choice for $name") case _ => - update(x, args) - catch case _: NumberFormatException => - fail(s"$argValue is not an integer argument for $name", args) - - def setOutput(argValue: String, args: List[String]) = + update(intValue, argValue, args) + .getOrElse: + state.fail(s"$argValue is not an integer argument for $name") + + def setOutput(argValue: String, args: List[String]) = val path = Directory(argValue) - val isJar = path.extension == "jar" - if (!isJar && !path.isDirectory) - fail(s"'$argValue' does not exist or is not a directory or .jar file", args) - else { - val output = if (isJar) JarArchive.create(path) else new PlainDirectory(path) - update(output, args) - } - + val isJar = path.ext.isJar + if (!isJar && !path.isDirectory) then + state.fail(s"'$argValue' does not exist or is not a directory or .jar file") + else + /* Side effect, do not change this method to evaluate eagerly */ + def output = if (isJar) JarArchive.create(path) else new PlainDirectory(path) + update(output, argValue, args) + def setVersion(argValue: String, args: List[String]) = - ScalaVersion.parse(argValue) match { - case Success(v) => update(v, args) - case Failure(ex) => fail(ex.getMessage, args) - } + ScalaVersion.parse(argValue) match + case Success(v) => update(v, argValue, args) + case Failure(ex) => state.fail(ex.getMessage) - def appendList(strings: List[String], args: List[String]) = + def appendList(strings: List[String], argValue: String, args: List[String]) = choices match case Some(valid) => strings.filterNot(valid.contains) match - case Nil => update(strings, args) + case Nil => update(strings, argValue, args) case invalid => invalidChoices(invalid) - case _ => update(strings, args) + case _ => update(strings, argValue, args) - - def doSet(argRest: String) = - ((summon[ClassTag[T]], args): @unchecked) match { + def doSet(argRest: String) = + ((summon[ClassTag[T]], args): @unchecked) match case (BooleanTag, _) => - setBoolean(argRest, args) + if sstate.wasChanged(idx) && preferPrevious then ignoreValue(args) + else setBoolean(argRest, args) case (OptionTag, _) => - update(Some(propertyClass.get.getConstructor().newInstance()), args) + update(Some(propertyClass.get.getConstructor().newInstance()), "", args) case (ct, args) => val argInArgRest = !argRest.isEmpty || legacyArgs val argAfterParam = !argInArgRest && args.nonEmpty && (ct == IntTag || !args.head.startsWith("-")) @@ -207,16 +226,18 @@ object Settings: else if argAfterParam then doSetArg(args.head, args.tail) else missingArg - } def doSetArg(arg: String, argsLeft: List[String]) = summon[ClassTag[T]] match case ListTag => val strings = arg.split(",").toList - appendList(strings, argsLeft) + appendList(strings, arg, argsLeft) case StringTag => setString(arg, argsLeft) case OutputTag => - setOutput(arg, argsLeft) + if sstate.wasChanged(idx) && preferPrevious then + ignoreValue(argsLeft) // do not risk side effects e.g. overwriting a jar + else + setOutput(arg, argsLeft) case IntTag => setInt(arg, argsLeft) case VersionTag => @@ -224,21 +245,40 @@ object Settings: case _ => missingArg - def matches(argName: String): Boolean = + def matches(argName: String): Boolean = (allFullNames).exists(_ == argName.takeWhile(_ != ':')) || prefix.exists(arg.startsWith) - def argValRest: String = + def argValRest: String = if(prefix.isEmpty) arg.dropWhile(_ != ':').drop(1) else arg.drop(prefix.get.length) - - if matches(arg) then - if deprecationMsg.isDefined then - warn(s"Option $name is deprecated: ${deprecationMsg.get}", args) - else - doSet(argValRest) - else - state - } - } + + if matches(arg) then + deprecation match + case Some(Deprecation(msg, _)) if ignoreInvalidArgs => // a special case for Xlint + state.deprecated(s"Option $name is deprecated: $msg") + case _ => doSet(argValRest) + else state + + end tryToSet + end Setting + + /** + * Class used for deprecating purposes. + * It contains all necessary information to deprecate given option. + * Scala Settings are considered deprecated when this object is present at their creation site. + * + * @param msg deprecation message that will be displayed in following format: s"Option $name is deprecated: $msg" + * @param replacedBy option that is substituting current option + */ + case class Deprecation( + msg: String, + replacedBy: Option[String] = None, + ) + + object Deprecation: + def renamed(replacement: String) = Some(Deprecation(s"Use $replacement instead.", Some(replacement))) + def removed(removedVersion: Option[String] = None) = + val msg = removedVersion.map(" in " + _).getOrElse(".") + Some(Deprecation(s"Scheduled for removal$msg", None)) object Setting: extension [T](setting: Setting[T]) @@ -259,7 +299,7 @@ object Settings: s"\n- $name${if description.isEmpty() then "" else s" :\n\t${description.replace("\n","\n\t")}"}" end Setting - class SettingGroup { + class SettingGroup: @unshared private val _allSettings = new ArrayBuffer[Setting[?]] @@ -277,11 +317,10 @@ object Settings: userSetSettings(state.sstate).foldLeft(state)(checkDependenciesOfSetting) private def checkDependenciesOfSetting(state: ArgsSummary, setting: Setting[?]) = - setting.depends.foldLeft(state) { (s, dep) => + setting.depends.foldLeft(state): (s, dep) => val (depSetting, reqValue) = dep if (depSetting.valueIn(state.sstate) == reqValue) s else s.fail(s"incomplete option ${setting.name} (requires ${depSetting.name})") - } /** Iterates over the arguments applying them to settings where applicable. * Then verifies setting dependencies are met. @@ -323,60 +362,57 @@ object Settings: def processArguments(arguments: List[String], processAll: Boolean, settingsState: SettingsState = defaultState): ArgsSummary = processArguments(ArgsSummary(settingsState, arguments, Nil, Nil), processAll, Nil) - def publish[T](settingf: Int => Setting[T]): Setting[T] = { + def publish[T](settingf: Int => Setting[T]): Setting[T] = val setting = settingf(_allSettings.length) _allSettings += setting setting - } def prependName(name: String): String = assert(!name.startsWith("-"), s"Setting $name cannot start with -") "-" + name - def BooleanSetting(category: SettingCategory, name: String, descr: String, initialValue: Boolean = false, aliases: List[String] = Nil): Setting[Boolean] = - publish(Setting(category, prependName(name), descr, initialValue, aliases = aliases)) + def BooleanSetting(category: SettingCategory, name: String, descr: String, initialValue: Boolean = false, aliases: List[String] = Nil, preferPrevious: Boolean = false, deprecation: Option[Deprecation] = None, ignoreInvalidArgs: Boolean = false): Setting[Boolean] = + publish(Setting(category, prependName(name), descr, initialValue, aliases = aliases, preferPrevious = preferPrevious, deprecation = deprecation, ignoreInvalidArgs = ignoreInvalidArgs)) - def StringSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: String, aliases: List[String] = Nil): Setting[String] = - publish(Setting(category, prependName(name), descr, default, helpArg, aliases = aliases)) + def StringSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: String, aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[String] = + publish(Setting(category, prependName(name), descr, default, helpArg, aliases = aliases, deprecation = deprecation)) - def ChoiceSetting(category: SettingCategory, name: String, helpArg: String, descr: String, choices: List[String], default: String, aliases: List[String] = Nil, legacyArgs: Boolean = false): Setting[String] = - publish(Setting(category, prependName(name), descr, default, helpArg, Some(choices), aliases = aliases, legacyArgs = legacyArgs)) + def ChoiceSetting(category: SettingCategory, name: String, helpArg: String, descr: String, choices: List[String], default: String, aliases: List[String] = Nil, legacyArgs: Boolean = false, deprecation: Option[Deprecation] = None): Setting[String] = + publish(Setting(category, prependName(name), descr, default, helpArg, Some(choices), aliases = aliases, legacyArgs = legacyArgs, deprecation = deprecation)) - def MultiChoiceSetting(category: SettingCategory, name: String, helpArg: String, descr: String, choices: List[String], default: List[String], aliases: List[String] = Nil): Setting[List[String]] = - publish(Setting(category, prependName(name), descr, default, helpArg, Some(choices), aliases = aliases)) + def MultiChoiceSetting(category: SettingCategory, name: String, helpArg: String, descr: String, choices: List[String], default: List[String], aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[List[String]] = + publish(Setting(category, prependName(name), descr, default, helpArg, Some(choices), aliases = aliases, deprecation = deprecation)) - def MultiChoiceHelpSetting(category: SettingCategory, name: String, helpArg: String, descr: String, choices: List[ChoiceWithHelp[String]], default: List[ChoiceWithHelp[String]], aliases: List[String] = Nil): Setting[List[ChoiceWithHelp[String]]] = - publish(Setting(category, prependName(name), descr, default, helpArg, Some(choices), aliases = aliases)) + def MultiChoiceHelpSetting(category: SettingCategory, name: String, helpArg: String, descr: String, choices: List[ChoiceWithHelp[String]], default: List[ChoiceWithHelp[String]], aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[List[ChoiceWithHelp[String]]] = + publish(Setting(category, prependName(name), descr, default, helpArg, Some(choices), aliases = aliases, deprecation = deprecation)) - def IntSetting(category: SettingCategory, name: String, descr: String, default: Int, aliases: List[String] = Nil): Setting[Int] = - publish(Setting(category, prependName(name), descr, default, aliases = aliases)) + def IntSetting(category: SettingCategory, name: String, descr: String, default: Int, aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[Int] = + publish(Setting(category, prependName(name), descr, default, aliases = aliases, deprecation = deprecation)) - def IntChoiceSetting(category: SettingCategory, name: String, descr: String, choices: Seq[Int], default: Int): Setting[Int] = - publish(Setting(category, prependName(name), descr, default, choices = Some(choices))) + def IntChoiceSetting(category: SettingCategory, name: String, descr: String, choices: Seq[Int], default: Int, deprecation: Option[Deprecation] = None): Setting[Int] = + publish(Setting(category, prependName(name), descr, default, choices = Some(choices), deprecation = deprecation)) - def MultiStringSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: List[String] = Nil, aliases: List[String] = Nil): Setting[List[String]] = - publish(Setting(category, prependName(name), descr, default, helpArg, aliases = aliases)) + def MultiStringSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: List[String] = Nil, aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[List[String]] = + publish(Setting(category, prependName(name), descr, default, helpArg, aliases = aliases, deprecation = deprecation)) - def OutputSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: AbstractFile): Setting[AbstractFile] = - publish(Setting(category, prependName(name), descr, default, helpArg)) + def OutputSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: AbstractFile, aliases: List[String] = Nil, preferPrevious: Boolean = false, deprecation: Option[Deprecation] = None): Setting[AbstractFile] = + publish(Setting(category, prependName(name), descr, default, helpArg, aliases = aliases, preferPrevious = preferPrevious, deprecation = deprecation)) - def PathSetting(category: SettingCategory, name: String, descr: String, default: String, aliases: List[String] = Nil): Setting[String] = - publish(Setting(category, prependName(name), descr, default, aliases = aliases)) + def PathSetting(category: SettingCategory, name: String, descr: String, default: String, aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[String] = + publish(Setting(category, prependName(name), descr, default, aliases = aliases, deprecation = deprecation)) - def PhasesSetting(category: SettingCategory, name: String, descr: String, default: String = "", aliases: List[String] = Nil): Setting[List[String]] = - publish(Setting(category, prependName(name), descr, if (default.isEmpty) Nil else List(default), aliases = aliases)) + def PhasesSetting(category: SettingCategory, name: String, descr: String, default: String = "", aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[List[String]] = + publish(Setting(category, prependName(name), descr, if (default.isEmpty) Nil else List(default), aliases = aliases, deprecation = deprecation)) - def PrefixSetting(category: SettingCategory, name: String, descr: String): Setting[List[String]] = + def PrefixSetting(category: SettingCategory, name: String, descr: String, deprecation: Option[Deprecation] = None): Setting[List[String]] = val prefix = name.takeWhile(_ != '<') - publish(Setting(category, "-" + name, descr, Nil, prefix = Some(prefix))) + publish(Setting(category, "-" + name, descr, Nil, prefix = Some(prefix), deprecation = deprecation)) + + def VersionSetting(category: SettingCategory, name: String, descr: String, default: ScalaVersion = NoScalaVersion, legacyArgs: Boolean = false, deprecation: Option[Deprecation] = None): Setting[ScalaVersion] = + publish(Setting(category, prependName(name), descr, default, legacyArgs = legacyArgs, deprecation = deprecation)) - def VersionSetting(category: SettingCategory, name: String, descr: String, default: ScalaVersion = NoScalaVersion, legacyArgs: Boolean = false): Setting[ScalaVersion] = - publish(Setting(category, prependName(name), descr, default, legacyArgs = legacyArgs)) + def OptionSetting[T: ClassTag](category: SettingCategory, name: String, descr: String, aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[Option[T]] = + publish(Setting(category, prependName(name), descr, None, propertyClass = Some(summon[ClassTag[T]].runtimeClass), aliases = aliases, deprecation = deprecation)) - def OptionSetting[T: ClassTag](category: SettingCategory, name: String, descr: String, aliases: List[String] = Nil): Setting[Option[T]] = - publish(Setting(category, prependName(name), descr, None, propertyClass = Some(summon[ClassTag[T]].runtimeClass), aliases = aliases)) - - def DeprecatedSetting(category: SettingCategory, name: String, descr: String, deprecationMsg: String): Setting[Boolean] = - publish(Setting(category, prependName(name), descr, false, deprecationMsg = Some(deprecationMsg))) - } + end SettingGroup end Settings diff --git a/compiler/src/dotty/tools/dotc/config/SourceVersion.scala b/compiler/src/dotty/tools/dotc/config/SourceVersion.scala index 7a464d331930..02140c3f4e3b 100644 --- a/compiler/src/dotty/tools/dotc/config/SourceVersion.scala +++ b/compiler/src/dotty/tools/dotc/config/SourceVersion.scala @@ -11,6 +11,8 @@ enum SourceVersion: case `3.3-migration`, `3.3` case `3.4-migration`, `3.4` case `3.5-migration`, `3.5` + case `3.6-migration`, `3.6` + case `3.7-migration`, `3.7` // !!! Keep in sync with scala.runtime.stdlibPatches.language !!! case `future-migration`, `future` @@ -27,7 +29,7 @@ enum SourceVersion: def isAtMost(v: SourceVersion) = stable.ordinal <= v.ordinal object SourceVersion extends Property.Key[SourceVersion]: - def defaultSourceVersion = `3.4` + def defaultSourceVersion = `3.5` /** language versions that may appear in a language import, are deprecated, but not removed from the standard library. */ val illegalSourceVersionNames = List("3.1-migration").map(_.toTermName) diff --git a/compiler/src/dotty/tools/dotc/core/Annotations.scala b/compiler/src/dotty/tools/dotc/core/Annotations.scala index 45dba97a79f7..a5ef4c26eed1 100644 --- a/compiler/src/dotty/tools/dotc/core/Annotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Annotations.scala @@ -275,4 +275,30 @@ object Annotations { } } } + + object ExperimentalAnnotation { + + /** Create an instance of `@experimental()` */ + def apply(msg: String, span: Span)(using Context): Annotation = + Annotation(defn.ExperimentalAnnot, Literal(Constant(msg)), span) + + /** Matches and extracts the message from an instance of `@experimental(msg)` + * Returns `Some("")` for `@experimental` with no message. + */ + def unapply(a: Annotation)(using Context): Option[String] = + if a.symbol ne defn.ExperimentalAnnot then + None + else a.argumentConstant(0) match + case Some(Constant(msg: String)) => Some(msg) + case _ => Some("") + + /** Makes a copy of the `@experimental(msg)` annotation on `sym` + * None is returned if the symbol does not have an `@experimental` annotation. + */ + def copy(sym: Symbol)(using Context): Option[Annotation] = + sym.getAnnotation(defn.ExperimentalAnnot).map { + case annot @ ExperimentalAnnotation(msg) => ExperimentalAnnotation(msg, annot.tree.span) + } + } + } diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala index 1870956357d6..06711ec97abf 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -647,9 +647,9 @@ trait ConstraintHandling { * At this point we also drop the @Repeated annotation to avoid inferring type arguments with it, * as those could leak the annotation to users (see run/inferred-repeated-result). */ - def widenInferred(inst: Type, bound: Type, widenUnions: Boolean)(using Context): Type = + def widenInferred(inst: Type, bound: Type, widen: Widen)(using Context): Type = def widenOr(tp: Type) = - if widenUnions then + if widen == Widen.Unions then val tpw = tp.widenUnion if tpw ne tp then if tpw.isTransparent() then @@ -667,14 +667,10 @@ trait ConstraintHandling { val tpw = tp.widenSingletons(skipSoftUnions) if (tpw ne tp) && (tpw <:< bound) then tpw else tp - def isSingleton(tp: Type): Boolean = tp match - case WildcardType(optBounds) => optBounds.exists && isSingleton(optBounds.bounds.hi) - case _ => isSubTypeWhenFrozen(tp, defn.SingletonType) - val wideInst = - if isSingleton(bound) then inst + if widen == Widen.None || bound.isSingletonBounded(frozen = true) then inst else - val widenedFromSingle = widenSingle(inst, skipSoftUnions = widenUnions) + val widenedFromSingle = widenSingle(inst, skipSoftUnions = widen == Widen.Unions) val widenedFromUnion = widenOr(widenedFromSingle) val widened = dropTransparentTraits(widenedFromUnion, bound) widenIrreducible(widened) @@ -696,9 +692,11 @@ trait ConstraintHandling { tp.rebind(tp.parent.hardenUnions) case tp: HKTypeLambda => tp.derivedLambdaType(resType = tp.resType.hardenUnions) + case tp: FlexibleType => + tp.derivedFlexibleType(tp.hi.hardenUnions) case tp: OrType => - val tp1 = tp.stripNull - if tp1 ne tp then tp.derivedOrType(tp1.hardenUnions, defn.NullType) + val tp1 = tp.stripNull(stripFlexibleTypes = false) + if tp1 ne tp then tp.derivedOrType(tp1.hardenUnions, defn.NullType, soft = false) else tp.derivedOrType(tp.tp1.hardenUnions, tp.tp2.hardenUnions, soft = false) case _ => tp @@ -711,10 +709,10 @@ trait ConstraintHandling { * The instance type is not allowed to contain references to types nested deeper * than `maxLevel`. */ - def instanceType(param: TypeParamRef, fromBelow: Boolean, widenUnions: Boolean, maxLevel: Int)(using Context): Type = { + def instanceType(param: TypeParamRef, fromBelow: Boolean, widen: Widen, maxLevel: Int)(using Context): Type = { val approx = approximation(param, fromBelow, maxLevel).simplified if fromBelow then - val widened = widenInferred(approx, param, widenUnions) + val widened = widenInferred(approx, param, widen) // Widening can add extra constraints, in particular the widened type might // be a type variable which is now instantiated to `param`, and therefore // cannot be used as an instantiation of `param` without creating a loop. @@ -722,7 +720,7 @@ trait ConstraintHandling { // (we do not check for non-toplevel occurrences: those should never occur // since `addOneBound` disallows recursive lower bounds). if constraint.occursAtToplevel(param, widened) then - instanceType(param, fromBelow, widenUnions, maxLevel) + instanceType(param, fromBelow, widen, maxLevel) else widened else diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index ae21c6fb8763..79a0b279aefe 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -12,6 +12,7 @@ import Symbols.* import Scopes.* import Uniques.* import ast.Trees.* +import Flags.ParamAccessor import ast.untpd import util.{NoSource, SimpleIdentityMap, SourceFile, HashSet, ReusableInstance} import typer.{Implicits, ImportInfo, SearchHistory, SearchRoot, TypeAssigner, Typer, Nullables} @@ -95,14 +96,14 @@ object Contexts { inline def atPhaseNoEarlier[T](limit: Phase)(inline op: Context ?=> T)(using Context): T = op(using if !limit.exists || limit <= ctx.phase then ctx else ctx.withPhase(limit)) - inline private def inMode[T](mode: Mode)(inline op: Context ?=> T)(using ctx: Context): T = + inline def withModeBits[T](mode: Mode)(inline op: Context ?=> T)(using ctx: Context): T = op(using if mode != ctx.mode then ctx.fresh.setMode(mode) else ctx) inline def withMode[T](mode: Mode)(inline op: Context ?=> T)(using ctx: Context): T = - inMode(ctx.mode | mode)(op) + withModeBits(ctx.mode | mode)(op) inline def withoutMode[T](mode: Mode)(inline op: Context ?=> T)(using ctx: Context): T = - inMode(ctx.mode &~ mode)(op) + withModeBits(ctx.mode &~ mode)(op) /** A context is passed basically everywhere in dotc. * This is convenient but carries the risk of captured contexts in @@ -399,7 +400,8 @@ object Contexts { * * - as owner: The primary constructor of the class * - as outer context: The context enclosing the class context - * - as scope: The parameter accessors in the class context + * - as scope: type parameters, the parameter accessors, and + * the context bound companions in the class context, * * The reasons for this peculiar choice of attributes are as follows: * @@ -413,10 +415,11 @@ object Contexts { * context see the constructor parameters instead, but then we'd need a final substitution step * from constructor parameters to class parameter accessors. */ - def superCallContext: Context = { - val locals = newScopeWith(owner.typeParams ++ owner.asClass.paramAccessors*) - superOrThisCallContext(owner.primaryConstructor, locals) - } + def superCallContext: Context = + val locals = owner.typeParams + ++ owner.asClass.unforcedDecls.filter: sym => + sym.is(ParamAccessor) || sym.isContextBoundCompanion + superOrThisCallContext(owner.primaryConstructor, newScopeWith(locals*)) /** The context for the arguments of a this(...) constructor call. * The context is computed from the local auxiliary constructor context. @@ -437,7 +440,7 @@ object Contexts { /** The super- or this-call context with given owner and locals. */ private def superOrThisCallContext(owner: Symbol, locals: Scope): FreshContext = { - var classCtx = outersIterator.dropWhile(!_.isClassDefContext).next() + val classCtx = outersIterator.dropWhile(!_.isClassDefContext).next() classCtx.outer.fresh.setOwner(owner) .setScope(locals) .setMode(classCtx.mode) @@ -472,6 +475,24 @@ object Contexts { /** Is the explicit nulls option set? */ def explicitNulls: Boolean = base.settings.YexplicitNulls.value + /** Is the flexible types option set? */ + def flexibleTypes: Boolean = base.settings.YexplicitNulls.value && !base.settings.YnoFlexibleTypes.value + + /** Is the best-effort option set? */ + def isBestEffort: Boolean = base.settings.YbestEffort.value + + /** Is the with-best-effort-tasty option set? */ + def withBestEffortTasty: Boolean = base.settings.YwithBestEffortTasty.value + + /** Were any best effort tasty dependencies used during compilation? */ + def usedBestEffortTasty: Boolean = base.usedBestEffortTasty + + /** Confirm that a best effort tasty dependency was used during compilation. */ + def setUsedBestEffortTasty(): Unit = base.usedBestEffortTasty = true + + /** Is either the best-effort option set or .betasty files were used during compilation? */ + def tolerateErrorsForBestEffort = isBestEffort || usedBestEffortTasty + /** A fresh clone of this context embedded in this context. */ def fresh: FreshContext = freshOver(this) @@ -682,6 +703,7 @@ object Contexts { updateStore(compilationUnitLoc, compilationUnit) } + def setCompilerCallback(callback: CompilerCallback): this.type = updateStore(compilerCallbackLoc, callback) def setIncCallback(callback: IncrementalCallback): this.type = updateStore(incCallbackLoc, callback) def setProgressCallback(callback: ProgressCallback): this.type = updateStore(progressCallbackLoc, callback) @@ -889,7 +911,7 @@ object Contexts { val definitions: Definitions = new Definitions // Set up some phases to get started */ - usePhases(List(SomePhase)) + usePhases(List(SomePhase), FreshContext(this)) /** Initializes the `ContextBase` with a starting context. * This initializes the `platform` and the `definitions`. @@ -956,6 +978,9 @@ object Contexts { val sources: util.HashMap[AbstractFile, SourceFile] = util.HashMap[AbstractFile, SourceFile]() val files: util.HashMap[TermName, AbstractFile] = util.HashMap() + /** Was best effort file used during compilation? */ + private[core] var usedBestEffortTasty = false + // Types state /** A table for hash consing unique types */ private[core] val uniques: Uniques = Uniques() diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 789e744fbfc9..6a1332e91afb 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -59,10 +59,10 @@ class Definitions { private def enterCompleteClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet, parents: List[TypeRef], decls: Scope) = newCompleteClassSymbol(owner, name, flags | Permanent | NoInits | Open, parents, decls).entered - private def enterTypeField(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope) = + private def enterTypeField(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope): TypeSymbol = scope.enter(newPermanentSymbol(cls, name, flags, TypeBounds.empty)) - private def enterTypeParam(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope) = + private def enterTypeParam(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope): TypeSymbol = enterTypeField(cls, name, flags | ClassTypeParamCreationFlags, scope) private def enterSyntheticTypeParam(cls: ClassSymbol, paramFlags: FlagSet, scope: MutableScope, suffix: String = "T0") = @@ -240,6 +240,7 @@ class Definitions { @tu lazy val Compiletime_codeOf: Symbol = CompiletimePackageClass.requiredMethod("codeOf") @tu lazy val Compiletime_erasedValue : Symbol = CompiletimePackageClass.requiredMethod("erasedValue") @tu lazy val Compiletime_uninitialized: Symbol = CompiletimePackageClass.requiredMethod("uninitialized") + @tu lazy val Compiletime_deferred : Symbol = CompiletimePackageClass.requiredMethod("deferred") @tu lazy val Compiletime_error : Symbol = CompiletimePackageClass.requiredMethod(nme.error) @tu lazy val Compiletime_requireConst : Symbol = CompiletimePackageClass.requiredMethod("requireConst") @tu lazy val Compiletime_constValue : Symbol = CompiletimePackageClass.requiredMethod("constValue") @@ -458,6 +459,13 @@ class Definitions { @tu lazy val andType: TypeSymbol = enterBinaryAlias(tpnme.AND, AndType(_, _)) @tu lazy val orType: TypeSymbol = enterBinaryAlias(tpnme.OR, OrType(_, _, soft = false)) + @tu lazy val CBCompanion: TypeSymbol = // type ``[-Refs] + enterPermanentSymbol(tpnme.CBCompanion, + TypeBounds(NothingType, + HKTypeLambda(tpnme.syntheticTypeParamName(0) :: Nil, Contravariant :: Nil)( + tl => TypeBounds.empty :: Nil, + tl => AnyType))).asType + /** Method representing a throw */ @tu lazy val throwMethod: TermSymbol = enterMethod(OpsPackageClass, nme.THROWkw, MethodType(List(ThrowableType), NothingType)) @@ -527,12 +535,16 @@ class Definitions { def ConsType: TypeRef = ConsClass.typeRef @tu lazy val SeqFactoryClass: Symbol = requiredClass("scala.collection.SeqFactory") + @tu lazy val PreciseClass: ClassSymbol = requiredClass("scala.Precise") + @tu lazy val SingletonClass: ClassSymbol = // needed as a synthetic class because Scala 2.x refers to it in classfiles // but does not define it as an explicit class. - enterCompleteClassSymbol( - ScalaPackageClass, tpnme.Singleton, PureInterfaceCreationFlags | Final, - List(AnyType), EmptyScope) + val cls = enterCompleteClassSymbol( + ScalaPackageClass, tpnme.Singleton, PureInterfaceCreationFlags | Final | Erased, + List(AnyType)) + enterTypeField(cls, tpnme.Self, Deferred, cls.info.decls.openForMutations) + cls @tu lazy val SingletonType: TypeRef = SingletonClass.typeRef @tu lazy val MaybeCapabilityAnnot: ClassSymbol = @@ -648,7 +660,7 @@ class Definitions { @tu lazy val StringModule: Symbol = StringClass.linkedClass @tu lazy val String_+ : TermSymbol = enterMethod(StringClass, nme.raw.PLUS, methOfAny(StringType), Final) @tu lazy val String_valueOf_Object: Symbol = StringModule.info.member(nme.valueOf).suchThat(_.info.firstParamTypes match { - case List(pt) => pt.isAny || pt.stripNull.isAnyRef + case List(pt) => pt.isAny || pt.stripNull().isAnyRef case _ => false }).symbol @@ -660,13 +672,13 @@ class Definitions { @tu lazy val ClassCastExceptionClass: ClassSymbol = requiredClass("java.lang.ClassCastException") @tu lazy val ClassCastExceptionClass_stringConstructor: TermSymbol = ClassCastExceptionClass.info.member(nme.CONSTRUCTOR).suchThat(_.info.firstParamTypes match { case List(pt) => - pt.stripNull.isRef(StringClass) + pt.stripNull().isRef(StringClass) case _ => false }).symbol.asTerm @tu lazy val ArithmeticExceptionClass: ClassSymbol = requiredClass("java.lang.ArithmeticException") @tu lazy val ArithmeticExceptionClass_stringConstructor: TermSymbol = ArithmeticExceptionClass.info.member(nme.CONSTRUCTOR).suchThat(_.info.firstParamTypes match { case List(pt) => - pt.stripNull.isRef(StringClass) + pt.stripNull().isRef(StringClass) case _ => false }).symbol.asTerm @@ -926,12 +938,6 @@ class Definitions { @tu lazy val XMLTopScopeModule: Symbol = requiredModule("scala.xml.TopScope") - @tu lazy val MainAnnotationClass: ClassSymbol = requiredClass("scala.annotation.MainAnnotation") - @tu lazy val MainAnnotationInfo: ClassSymbol = requiredClass("scala.annotation.MainAnnotation.Info") - @tu lazy val MainAnnotationParameter: ClassSymbol = requiredClass("scala.annotation.MainAnnotation.Parameter") - @tu lazy val MainAnnotationParameterAnnotation: ClassSymbol = requiredClass("scala.annotation.MainAnnotation.ParameterAnnotation") - @tu lazy val MainAnnotationCommand: ClassSymbol = requiredClass("scala.annotation.MainAnnotation.Command") - @tu lazy val CommandLineParserModule: Symbol = requiredModule("scala.util.CommandLineParser") @tu lazy val CLP_ParseError: ClassSymbol = CommandLineParserModule.requiredClass("ParseError").typeRef.symbol.asClass @tu lazy val CLP_parseArgument: Symbol = CommandLineParserModule.requiredMethod("parseArgument") @@ -955,6 +961,9 @@ class Definitions { def TupleXXL_fromIterator(using Context): Symbol = TupleXXLModule.requiredMethod("fromIterator") def TupleXXL_unapplySeq(using Context): Symbol = TupleXXLModule.requiredMethod(nme.unapplySeq) + @tu lazy val NamedTupleModule = requiredModule("scala.NamedTuple") + @tu lazy val NamedTupleTypeRef: TypeRef = NamedTupleModule.termRef.select(tpnme.NamedTuple).asInstanceOf + @tu lazy val RuntimeTupleMirrorTypeRef: TypeRef = requiredClassRef("scala.runtime.TupleMirror") @tu lazy val RuntimeTuplesModule: Symbol = requiredModule("scala.runtime.Tuples") @@ -982,7 +991,7 @@ class Definitions { @tu lazy val CapsModule: Symbol = requiredModule("scala.caps") @tu lazy val captureRoot: TermSymbol = CapsModule.requiredValue("cap") - @tu lazy val Caps_Cap: TypeSymbol = CapsModule.requiredType("Cap") + @tu lazy val Caps_Cap: TypeSymbol = requiredClass("scala.caps.Cap") @tu lazy val Caps_reachCapability: TermSymbol = CapsModule.requiredMethod("reachCapability") @tu lazy val CapsUnsafeModule: Symbol = requiredModule("scala.caps.unsafe") @tu lazy val Caps_unsafeAssumePure: Symbol = CapsUnsafeModule.requiredMethod("unsafeAssumePure") @@ -1062,7 +1071,9 @@ class Definitions { @tu lazy val RetainsAnnot: ClassSymbol = requiredClass("scala.annotation.retains") @tu lazy val RetainsCapAnnot: ClassSymbol = requiredClass("scala.annotation.retainsCap") @tu lazy val RetainsByNameAnnot: ClassSymbol = requiredClass("scala.annotation.retainsByName") + @tu lazy val RetainsArgAnnot: ClassSymbol = requiredClass("scala.annotation.retainsArg") @tu lazy val PublicInBinaryAnnot: ClassSymbol = requiredClass("scala.annotation.publicInBinary") + @tu lazy val WitnessNamesAnnot: ClassSymbol = requiredClass("scala.annotation.internal.WitnessNames") @tu lazy val JavaRepeatableAnnot: ClassSymbol = requiredClass("java.lang.annotation.Repeatable") @@ -1309,9 +1320,20 @@ class Definitions { case ByNameFunction(_) => true case _ => false + object NamedTuple: + def apply(nmes: Type, vals: Type)(using Context): Type = + AppliedType(NamedTupleTypeRef, nmes :: vals :: Nil) + def unapply(t: Type)(using Context): Option[(Type, Type)] = t match + case AppliedType(tycon, nmes :: vals :: Nil) if tycon.typeSymbol == NamedTupleTypeRef.symbol => + Some((nmes, vals)) + case _ => None + final def isCompiletime_S(sym: Symbol)(using Context): Boolean = sym.name == tpnme.S && sym.owner == CompiletimeOpsIntModuleClass + final def isNamedTuple_From(sym: Symbol)(using Context): Boolean = + sym.name == tpnme.From && sym.owner == NamedTupleModule.moduleClass + private val compiletimePackageAnyTypes: Set[Name] = Set( tpnme.Equals, tpnme.NotEquals, tpnme.IsConst, tpnme.ToString ) @@ -1340,7 +1362,7 @@ class Definitions { tpnme.Plus, tpnme.Length, tpnme.Substring, tpnme.Matches, tpnme.CharAt ) private val compiletimePackageOpTypes: Set[Name] = - Set(tpnme.S) + Set(tpnme.S, tpnme.From) ++ compiletimePackageAnyTypes ++ compiletimePackageIntTypes ++ compiletimePackageLongTypes @@ -1353,6 +1375,7 @@ class Definitions { compiletimePackageOpTypes.contains(sym.name) && ( isCompiletime_S(sym) + || isNamedTuple_From(sym) || sym.owner == CompiletimeOpsAnyModuleClass && compiletimePackageAnyTypes.contains(sym.name) || sym.owner == CompiletimeOpsIntModuleClass && compiletimePackageIntTypes.contains(sym.name) || sym.owner == CompiletimeOpsLongModuleClass && compiletimePackageLongTypes.contains(sym.name) @@ -2013,6 +2036,13 @@ class Definitions { CapabilityAnnot, RequiresCapabilityAnnot, RetainsAnnot, RetainsCapAnnot, RetainsByNameAnnot) + /** Experimental language features defined in `scala.runtime.stdLibPatches.language.experimental`. + * + * This list does not include `scala.language.experimental.macros`. + */ + @tu lazy val languageExperimentalFeatures: List[TermSymbol] = + LanguageExperimentalModule.moduleClass.info.decls.toList.filter(_.isAllOf(Lazy | Module)).map(_.asTerm) + // ----- primitive value class machinery ------------------------------------------ class PerRun[T](generate: Context ?=> T) { @@ -2140,6 +2170,7 @@ class Definitions { NullClass, NothingClass, SingletonClass, + CBCompanion, MaybeCapabilityAnnot) @tu lazy val syntheticCoreClasses: List[Symbol] = syntheticScalaClasses ++ List( diff --git a/compiler/src/dotty/tools/dotc/core/DenotTransformers.scala b/compiler/src/dotty/tools/dotc/core/DenotTransformers.scala index 59982fb99b5f..451561c1b84d 100644 --- a/compiler/src/dotty/tools/dotc/core/DenotTransformers.scala +++ b/compiler/src/dotty/tools/dotc/core/DenotTransformers.scala @@ -28,6 +28,8 @@ object DenotTransformers { /** The transformation method */ def transform(ref: SingleDenotation)(using Context): SingleDenotation + + override def isRunnable(using Context) = super.isRunnable && !ctx.usedBestEffortTasty } /** A transformer that only transforms the info field of denotations */ diff --git a/compiler/src/dotty/tools/dotc/core/Denotations.scala b/compiler/src/dotty/tools/dotc/core/Denotations.scala index bd92fa814a6e..2418aba1978b 100644 --- a/compiler/src/dotty/tools/dotc/core/Denotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Denotations.scala @@ -580,7 +580,7 @@ object Denotations { /** A non-overloaded denotation */ abstract class SingleDenotation(symbol: Symbol, initInfo: Type, isType: Boolean) extends Denotation(symbol, initInfo, isType) { - protected def newLikeThis(symbol: Symbol, info: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation + protected def newLikeThis(symbol: Symbol, info: Type, pre: Type, isRefinedMethod: Boolean)(using Context): SingleDenotation final def name(using Context): Name = symbol.name @@ -719,7 +719,8 @@ object Denotations { ctx.runId >= validFor.runId || ctx.settings.YtestPickler.value // mixing test pickler with debug printing can travel back in time || ctx.mode.is(Mode.Printing) // no use to be picky when printing error messages - || symbol.isOneOf(ValidForeverFlags), + || symbol.isOneOf(ValidForeverFlags) + || ctx.tolerateErrorsForBestEffort, s"denotation $this invalid in run ${ctx.runId}. ValidFor: $validFor") var d: SingleDenotation = this while ({ @@ -1162,11 +1163,11 @@ object Denotations { prefix: Type) extends NonSymSingleDenotation(symbol, initInfo, prefix) { validFor = initValidFor override def hasUniqueSym: Boolean = true - protected def newLikeThis(s: Symbol, i: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation = + protected def newLikeThis(s: Symbol, i: Type, pre: Type, isRefinedMethod: Boolean)(using Context): SingleDenotation = if isRefinedMethod then - new JointRefDenotation(s, i, validFor, pre, isRefinedMethod) + new JointRefDenotation(s, i, currentStablePeriod, pre, isRefinedMethod) else - new UniqueRefDenotation(s, i, validFor, pre) + new UniqueRefDenotation(s, i, currentStablePeriod, pre) } class JointRefDenotation( @@ -1177,15 +1178,15 @@ object Denotations { override val isRefinedMethod: Boolean) extends NonSymSingleDenotation(symbol, initInfo, prefix) { validFor = initValidFor override def hasUniqueSym: Boolean = false - protected def newLikeThis(s: Symbol, i: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation = - new JointRefDenotation(s, i, validFor, pre, isRefinedMethod) + protected def newLikeThis(s: Symbol, i: Type, pre: Type, isRefinedMethod: Boolean)(using Context): SingleDenotation = + new JointRefDenotation(s, i, currentStablePeriod, pre, isRefinedMethod) } class ErrorDenotation(using Context) extends NonSymSingleDenotation(NoSymbol, NoType, NoType) { override def exists: Boolean = false override def hasUniqueSym: Boolean = false validFor = Period.allInRun(ctx.runId) - protected def newLikeThis(s: Symbol, i: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation = + protected def newLikeThis(s: Symbol, i: Type, pre: Type, isRefinedMethod: Boolean)(using Context): SingleDenotation = this } diff --git a/compiler/src/dotty/tools/dotc/core/Flags.scala b/compiler/src/dotty/tools/dotc/core/Flags.scala index 249940d8ff99..b1bf7a266c91 100644 --- a/compiler/src/dotty/tools/dotc/core/Flags.scala +++ b/compiler/src/dotty/tools/dotc/core/Flags.scala @@ -137,7 +137,7 @@ object Flags { def flagStrings(privateWithin: String = ""): Seq[String] = { var rawStrings = (2 to MaxFlag).flatMap(x.flagString(_)) // DOTTY problem: cannot drop with (_) if (!privateWithin.isEmpty && !x.is(Protected)) - rawStrings = rawStrings :+ "private" + rawStrings :+= "private" val scopeStr = if (x.is(Local)) "this" else privateWithin if (scopeStr != "") rawStrings.filter(_ != "").map { @@ -377,6 +377,9 @@ object Flags { /** Symbol cannot be found as a member during typer */ val (Invisible @ _, _, _) = newFlags(45, "") + /** Tracked modifier for class parameter / a class with some tracked parameters */ + val (Tracked @ _, _, Dependent @ _) = newFlags(46, "tracked") + // ------------ Flags following this one are not pickled ---------------------------------- /** Symbol is not a member of its owner */ @@ -452,7 +455,7 @@ object Flags { CommonSourceModifierFlags.toTypeFlags | Abstract | Sealed | Opaque | Open val TermSourceModifierFlags: FlagSet = - CommonSourceModifierFlags.toTermFlags | Inline | AbsOverride | Lazy + CommonSourceModifierFlags.toTermFlags | Inline | AbsOverride | Lazy | Tracked /** Flags representing modifiers that can appear in trees */ val ModifierFlags: FlagSet = @@ -466,7 +469,7 @@ object Flags { val FromStartFlags: FlagSet = commonFlags( Module, Package, Deferred, Method, Case, Enum, Param, ParamAccessor, Scala2SpecialFlags, MutableOrOpen, Opaque, Touched, JavaStatic, - OuterOrCovariant, LabelOrContravariant, CaseAccessor, + OuterOrCovariant, LabelOrContravariant, CaseAccessor, Tracked, Extension, NonMember, Implicit, Given, Permanent, Synthetic, Exported, SuperParamAliasOrScala2x, Inline, Macro, ConstructorProxy, Invisible) @@ -477,7 +480,7 @@ object Flags { */ val AfterLoadFlags: FlagSet = commonFlags( FromStartFlags, AccessFlags, Final, AccessorOrSealed, - Abstract, LazyOrTrait, SelfName, JavaDefined, JavaAnnotation, Transparent) + Abstract, LazyOrTrait, SelfName, JavaDefined, JavaAnnotation, Transparent, Tracked) /** A value that's unstable unless complemented with a Stable flag */ val UnstableValueFlags: FlagSet = Mutable | Method @@ -535,13 +538,14 @@ object Flags { /** Flags retained in term export forwarders */ val RetainedExportTermFlags = Infix | Given | Implicit | Inline | Transparent | Erased | HasDefaultParams | NoDefaultParams | ExtensionMethod + /** Flags retained in parameters of term export forwarders */ + val RetainedExportTermParamFlags = Given | Implicit | Erased | HasDefault | Inline + val MandatoryExportTermFlags = Exported | Method | Final /** Flags retained in type export forwarders */ val RetainedExportTypeFlags = Infix - val MandatoryExportTypeFlags = Exported | Final - /** Flags that apply only to classes */ val ClassOnlyFlags = Sealed | Open | Abstract.toTypeFlags @@ -569,6 +573,7 @@ object Flags { val DeferredOrLazyOrMethod: FlagSet = Deferred | Lazy | Method val DeferredOrTermParamOrAccessor: FlagSet = Deferred | ParamAccessor | TermParam // term symbols without right-hand sides val DeferredOrTypeParam: FlagSet = Deferred | TypeParam // type symbols without right-hand sides + val DeferredGivenFlags: FlagSet = Deferred | Given | HasDefault val EnumValue: FlagSet = Enum | StableRealizable // A Scala enum value val FinalOrInline: FlagSet = Final | Inline val FinalOrModuleClass: FlagSet = Final | ModuleClass // A module class or a final class diff --git a/compiler/src/dotty/tools/dotc/core/JavaNullInterop.scala b/compiler/src/dotty/tools/dotc/core/JavaNullInterop.scala index 6244923cfb52..46ce0d2d7852 100644 --- a/compiler/src/dotty/tools/dotc/core/JavaNullInterop.scala +++ b/compiler/src/dotty/tools/dotc/core/JavaNullInterop.scala @@ -78,11 +78,11 @@ object JavaNullInterop { * but the result type is not nullable. */ private def nullifyExceptReturnType(tp: Type)(using Context): Type = - new JavaNullMap(true)(tp) + new JavaNullMap(outermostLevelAlreadyNullable = true)(tp) /** Nullifies a Java type by adding `| Null` in the relevant places. */ private def nullifyType(tp: Type)(using Context): Type = - new JavaNullMap(false)(tp) + new JavaNullMap(outermostLevelAlreadyNullable = false)(tp) /** A type map that implements the nullification function on types. Given a Java-sourced type, this adds `| Null` * in the right places to make the nulls explicit in Scala. @@ -96,25 +96,29 @@ object JavaNullInterop { * to `(A & B) | Null`, instead of `(A | Null & B | Null) | Null`. */ private class JavaNullMap(var outermostLevelAlreadyNullable: Boolean)(using Context) extends TypeMap { + def nullify(tp: Type): Type = if ctx.flexibleTypes then FlexibleType(tp) else OrNull(tp) + /** Should we nullify `tp` at the outermost level? */ def needsNull(tp: Type): Boolean = - !outermostLevelAlreadyNullable && (tp match { - case tp: TypeRef => + if outermostLevelAlreadyNullable then false + else tp match + case tp: TypeRef if // We don't modify value types because they're non-nullable even in Java. - !tp.symbol.isValueClass && + tp.symbol.isValueClass + // We don't modify unit types. + || tp.isRef(defn.UnitClass) // We don't modify `Any` because it's already nullable. - !tp.isRef(defn.AnyClass) && + || tp.isRef(defn.AnyClass) // We don't nullify Java varargs at the top level. // Example: if `setNames` is a Java method with signature `void setNames(String... names)`, // then its Scala signature will be `def setNames(names: (String|Null)*): Unit`. // This is because `setNames(null)` passes as argument a single-element array containing the value `null`, // and not a `null` array. - !tp.isRef(defn.RepeatedParamClass) + || !ctx.flexibleTypes && tp.isRef(defn.RepeatedParamClass) => false case _ => true - }) override def apply(tp: Type): Type = tp match { - case tp: TypeRef if needsNull(tp) => OrNull(tp) + case tp: TypeRef if needsNull(tp) => nullify(tp) case appTp @ AppliedType(tycon, targs) => val oldOutermostNullable = outermostLevelAlreadyNullable // We don't make the outmost levels of type arguments nullable if tycon is Java-defined. @@ -124,7 +128,7 @@ object JavaNullInterop { val targs2 = targs map this outermostLevelAlreadyNullable = oldOutermostNullable val appTp2 = derivedAppliedType(appTp, tycon, targs2) - if needsNull(tycon) then OrNull(appTp2) else appTp2 + if needsNull(tycon) then nullify(appTp2) else appTp2 case ptp: PolyType => derivedLambdaType(ptp)(ptp.paramInfos, this(ptp.resType)) case mtp: MethodType => @@ -138,12 +142,12 @@ object JavaNullInterop { // nullify(A & B) = (nullify(A) & nullify(B)) | Null, but take care not to add // duplicate `Null`s at the outermost level inside `A` and `B`. outermostLevelAlreadyNullable = true - OrNull(derivedAndType(tp, this(tp.tp1), this(tp.tp2))) - case tp: TypeParamRef if needsNull(tp) => OrNull(tp) + nullify(derivedAndType(tp, this(tp.tp1), this(tp.tp2))) + case tp: TypeParamRef if needsNull(tp) => nullify(tp) // In all other cases, return the type unchanged. // In particular, if the type is a ConstantType, then we don't nullify it because it is the // type of a final non-nullable field. case _ => tp } } -} +} \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala b/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala index fb278ab92dc9..e16a950aa32a 100644 --- a/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala +++ b/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala @@ -12,6 +12,7 @@ object MatchTypeTrace: private enum TraceEntry: case TryReduce(scrut: Type) + case NoMatches(scrut: Type, cases: List[MatchTypeCaseSpec]) case Stuck(scrut: Type, stuckCase: MatchTypeCaseSpec, otherCases: List[MatchTypeCaseSpec]) case NoInstance(scrut: Type, stuckCase: MatchTypeCaseSpec, fails: List[(Name, TypeBounds)]) case EmptyScrutinee(scrut: Type) @@ -50,6 +51,12 @@ object MatchTypeTrace: case _ => case _ => + /** Record a failure that scrutinee `scrut` does not match any case in `cases`. + * Only the first failure is recorded. + */ + def noMatches(scrut: Type, cases: List[MatchTypeCaseSpec])(using Context) = + matchTypeFail(NoMatches(scrut, cases)) + /** Record a failure that scrutinee `scrut` does not match `stuckCase` but is * not disjoint from it either, which means that the remaining cases `otherCases` * cannot be visited. Only the first failure is recorded. @@ -71,7 +78,7 @@ object MatchTypeTrace: */ def recurseWith(scrut: Type)(op: => Type)(using Context): Type = ctx.property(MatchTrace) match - case Some(trace) => + case Some(trace) if !trace.entries.contains(TryReduce(scrut)) => val prev = trace.entries trace.entries = TryReduce(scrut) :: prev val res = op @@ -95,6 +102,11 @@ object MatchTypeTrace: private def explainEntry(entry: TraceEntry)(using Context): String = entry match case TryReduce(scrut: Type) => i" trying to reduce $scrut" + case NoMatches(scrut, cases) => + i""" failed since selector $scrut + | matches none of the cases + | + | ${casesText(cases)}""" case EmptyScrutinee(scrut) => i""" failed since selector $scrut | is uninhabited (there are no values of that type).""" diff --git a/compiler/src/dotty/tools/dotc/core/Mode.scala b/compiler/src/dotty/tools/dotc/core/Mode.scala index 71b49394ae14..14d7827974c0 100644 --- a/compiler/src/dotty/tools/dotc/core/Mode.scala +++ b/compiler/src/dotty/tools/dotc/core/Mode.scala @@ -41,6 +41,8 @@ object Mode { val Pattern: Mode = newMode(0, "Pattern") val Type: Mode = newMode(1, "Type") + val PatternOrTypeBits: Mode = Pattern | Type + val ImplicitsEnabled: Mode = newMode(2, "ImplicitsEnabled") val InferringReturnType: Mode = newMode(3, "InferringReturnType") @@ -101,16 +103,19 @@ object Mode { */ val CheckBoundsOrSelfType: Mode = newMode(14, "CheckBoundsOrSelfType") - /** Use Scala2 scheme for overloading and implicit resolution */ - val OldOverloadingResolution: Mode = newMode(15, "OldOverloadingResolution") + /** Use previous Scheme for implicit resolution. Currently significant + * in 3.0-migration where we use Scala-2's scheme instead and in 3.5 and 3.6-migration + * where we use the previous scheme up to 3.4 for comparison with the new scheme. + */ + val OldImplicitResolution: Mode = newMode(15, "OldImplicitResolution") /** Treat CapturingTypes as plain AnnotatedTypes even in phase CheckCaptures. - * Reuses the value of OldOverloadingResolution to save Mode bits. - * This is OK since OldOverloadingResolution only affects implicit search, which + * Reuses the value of OldImplicitResolution to save Mode bits. + * This is OK since OldImplicitResolution only affects implicit search, which * is done during phases Typer and Inlinig, and IgnoreCaptures only has an * effect during phase CheckCaptures. */ - val IgnoreCaptures = OldOverloadingResolution + val IgnoreCaptures = OldImplicitResolution /** Allow hk applications of type lambdas to wildcard arguments; * used for checking that such applications do not normally arise @@ -120,8 +125,6 @@ object Mode { /** Read original positions when unpickling from TASTY */ val ReadPositions: Mode = newMode(17, "ReadPositions") - val PatternOrTypeBits: Mode = Pattern | Type - /** We are elaborating the fully qualified name of a package clause. * In this case, identifiers should never be imported. */ @@ -133,6 +136,8 @@ object Mode { /** We are typing the body of an inline method */ val InlineableBody: Mode = newMode(21, "InlineableBody") + val NewGivenRules: Mode = newMode(22, "NewGivenRules") + /** We are synthesizing the receiver of an extension method */ val SynthesizeExtMethodReceiver: Mode = newMode(23, "SynthesizeExtMethodReceiver") diff --git a/compiler/src/dotty/tools/dotc/core/NameKinds.scala b/compiler/src/dotty/tools/dotc/core/NameKinds.scala index d4f009cbbbd5..74d440562824 100644 --- a/compiler/src/dotty/tools/dotc/core/NameKinds.scala +++ b/compiler/src/dotty/tools/dotc/core/NameKinds.scala @@ -182,13 +182,13 @@ object NameKinds { case DerivedName(underlying, info: this.NumberedInfo) => Some((underlying, info.num)) case _ => None } - protected def skipSeparatorAndNum(name: SimpleName, separator: String): Int = { + protected def skipSeparatorAndNum(name: SimpleName, separator: String): Int = var i = name.length - while (i > 0 && name(i - 1).isDigit) i -= 1 - if (i > separator.length && i < name.length && - name.slice(i - separator.length, i).toString == separator) i + while i > 0 && name(i - 1).isDigit do i -= 1 + if i >= separator.length && i < name.length + && name.slice(i - separator.length, i).toString == separator + then i else -1 - } numberedNameKinds(tag) = this: @unchecked } @@ -240,6 +240,16 @@ object NameKinds { } } + /** Unique names that can be unmangled */ + class UniqueNameKindWithUnmangle(separator: String) extends UniqueNameKind(separator): + override def unmangle(name: SimpleName): TermName = + val i = skipSeparatorAndNum(name, separator) + if i > 0 then + val index = name.drop(i).toString.toInt + val original = name.take(i - separator.length).asTermName + apply(original, index) + else name + /** Names of the form `prefix . name` */ val QualifiedName: QualifiedNameKind = new QualifiedNameKind(QUALIFIED, ".") @@ -288,7 +298,7 @@ object NameKinds { * * The "evidence$" prefix is a convention copied from Scala 2. */ - val ContextBoundParamName: UniqueNameKind = new UniqueNameKind("evidence$") + val ContextBoundParamName: UniqueNameKind = new UniqueNameKindWithUnmangle("evidence$") /** The name of an inferred contextual function parameter: * @@ -323,20 +333,7 @@ object NameKinds { val InlineBinderName: UniqueNameKind = new UniqueNameKind("$proxy") val MacroNames: UniqueNameKind = new UniqueNameKind("$macro$") - /** A kind of unique extension methods; Unlike other unique names, these can be - * unmangled. - */ - val UniqueExtMethName: UniqueNameKind = new UniqueNameKind("$extension") { - override def unmangle(name: SimpleName): TermName = { - val i = skipSeparatorAndNum(name, separator) - if (i > 0) { - val index = name.drop(i).toString.toInt - val original = name.take(i - separator.length).asTermName - apply(original, index) - } - else name - } - } + val UniqueExtMethName: UniqueNameKind = new UniqueNameKindWithUnmangle("$extension") /** Kinds of unique names generated by the pattern matcher */ val PatMatStdBinderName: UniqueNameKind = new UniqueNameKind("x") diff --git a/compiler/src/dotty/tools/dotc/core/NamerOps.scala b/compiler/src/dotty/tools/dotc/core/NamerOps.scala index 75a135826785..07cb9292baa4 100644 --- a/compiler/src/dotty/tools/dotc/core/NamerOps.scala +++ b/compiler/src/dotty/tools/dotc/core/NamerOps.scala @@ -4,7 +4,10 @@ package core import Contexts.*, Symbols.*, Types.*, Flags.*, Scopes.*, Decorators.*, Names.*, NameOps.* import SymDenotations.{LazyType, SymDenotation}, StdNames.nme +import ContextOps.enter import TypeApplications.EtaExpansion +import collection.mutable +import config.Printers.typr /** Operations that are shared between Namer and TreeUnpickler */ object NamerOps: @@ -15,8 +18,41 @@ object NamerOps: */ def effectiveResultType(ctor: Symbol, paramss: List[List[Symbol]])(using Context): Type = paramss match - case TypeSymbols(tparams) :: _ => ctor.owner.typeRef.appliedTo(tparams.map(_.typeRef)) - case _ => ctor.owner.typeRef + case TypeSymbols(tparams) :: rest => + addParamRefinements(ctor.owner.typeRef.appliedTo(tparams.map(_.typeRef)), rest) + case _ => + addParamRefinements(ctor.owner.typeRef, paramss) + + /** Given a method with tracked term-parameters `p1, ..., pn`, and result type `R`, add the + * refinements R { p1 = p1' } ... { pn = pn' }, where pi' is the TermParamRef + * of the parameter and pi is its name. This matters only under experimental.modularity, + * since without it there are no tracked parameters. Parameter refinements are added for + * constructors and given companion methods. + */ + def addParamRefinements(resType: Type, paramss: List[List[Symbol]])(using Context): Type = + paramss.flatten.foldLeft(resType): (rt, param) => + if param.is(Tracked) then RefinedType(rt, param.name, param.termRef) + else rt + + /** Split dependent class refinements off parent type. Add them to `refinements`, + * unless it is null. + */ + extension (tp: Type) + def separateRefinements(cls: ClassSymbol, refinements: mutable.LinkedHashMap[Name, Type] | Null)(using Context): Type = + tp match + case RefinedType(tp1, rname, rinfo) => + try tp1.separateRefinements(cls, refinements) + finally + if refinements != null then + refinements(rname) = refinements.get(rname) match + case Some(tp) => tp & rinfo + case None => rinfo + case tp @ AnnotatedType(tp1, ann) => + tp.derivedAnnotatedType(tp1.separateRefinements(cls, refinements), ann) + case tp: RecType => + tp.parent.substRecThis(tp, cls.thisType).separateRefinements(cls, refinements) + case tp => + tp /** If isConstructor, make sure it has at least one non-implicit parameter list * This is done by adding a () in front of a leading old style implicit parameter, @@ -222,4 +258,55 @@ object NamerOps: rhsCtx.gadtState.addBound(psym, tr, isUpper = true) } + /** Create a context-bound companion for type symbol `tsym`, which has a context + * bound that defines a set of witnesses with names `witnessNames`. + * + * @param params If `tsym` is a type parameter, a list of parameter symbols + * that includes all witnesses, otherwise the empty list. + * + * The context-bound companion has as name the name of `tsym` translated to + * a term name. We create a synthetic val of the form + * + * val A: ``[witnessRef1 | ... | witnessRefN] + * + * where + * + * is the CBCompanion type created in Definitions + * withnessRefK is a refence to the K'th witness. + * + * The companion has the same access flags as the original type. + */ + def addContextBoundCompanionFor(tsym: Symbol, witnessNames: List[TermName], params: List[Symbol])(using Context): Unit = + val prefix = ctx.owner.thisType + val companionName = tsym.name.toTermName + val witnessRefs = + if params.nonEmpty then + witnessNames.map: witnessName => + prefix.select(params.find(_.name == witnessName).get) + else + witnessNames.map(TermRef(prefix, _)) + val cbtype = defn.CBCompanion.typeRef.appliedTo: + witnessRefs.reduce[Type](OrType(_, _, soft = false)) + val cbc = newSymbol( + ctx.owner, companionName, + (tsym.flagsUNSAFE & (AccessFlags)).toTermFlags | Synthetic, + cbtype) + typr.println(s"context bound companion created $cbc for $witnessNames in ${ctx.owner}") + ctx.enter(cbc) + end addContextBoundCompanionFor + + /** Add context bound companions to all context-bound types declared in + * this class. This assumes that these types already have their + * WitnessNames annotation set even before they are completed. This is + * the case for unpickling but currently not for Namer. So the method + * is only called during unpickling. + */ + def addContextBoundCompanions(cls: ClassSymbol)(using Context): Unit = + for sym <- cls.info.decls do + if sym.isType && !sym.isClass then + for ann <- sym.annotationsUNSAFE do + if ann.symbol == defn.WitnessNamesAnnot then + ann.tree match + case ast.tpd.WitnessNamesAnnot(witnessNames) => + addContextBoundCompanionFor(sym, witnessNames, Nil) end NamerOps diff --git a/compiler/src/dotty/tools/dotc/core/NullOpsDecorator.scala b/compiler/src/dotty/tools/dotc/core/NullOpsDecorator.scala index 4f22f9d31e36..291498dbc558 100644 --- a/compiler/src/dotty/tools/dotc/core/NullOpsDecorator.scala +++ b/compiler/src/dotty/tools/dotc/core/NullOpsDecorator.scala @@ -14,7 +14,7 @@ object NullOpsDecorator: * If this type isn't (syntactically) nullable, then returns the type unchanged. * The type will not be changed if explicit-nulls is not enabled. */ - def stripNull(using Context): Type = { + def stripNull(stripFlexibleTypes: Boolean = true)(using Context): Type = { def strip(tp: Type): Type = val tpWiden = tp.widenDealias val tpStripped = tpWiden match { @@ -33,6 +33,9 @@ object NullOpsDecorator: if (tp1s ne tp1) && (tp2s ne tp2) then tp.derivedAndType(tp1s, tp2s) else tp + case tp: FlexibleType => + val hi1 = strip(tp.hi) + if stripFlexibleTypes then hi1 else tp.derivedFlexibleType(hi1) case tp @ TypeBounds(lo, hi) => tp.derivedTypeBounds(strip(lo), strip(hi)) case tp => tp @@ -44,7 +47,7 @@ object NullOpsDecorator: /** Is self (after widening and dealiasing) a type of the form `T | Null`? */ def isNullableUnion(using Context): Boolean = { - val stripped = self.stripNull + val stripped = self.stripNull() stripped ne self } end extension diff --git a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala index e11ac26ef93c..8256a3cdbab1 100644 --- a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala +++ b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala @@ -315,7 +315,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, override def tyconTypeParams(tp: AppliedType)(using Context): List[ParamInfo] = def tparams(tycon: Type): List[ParamInfo] = tycon match - case tycon: TypeVar if !tycon.inst.exists => tparams(tycon.origin) + case tycon: TypeVar if !tycon.isPermanentlyInstantiated => tparams(tycon.origin) case tycon: TypeParamRef if !hasBounds(tycon) => val entryParams = entry(tycon).typeParams if entryParams.nonEmpty then entryParams @@ -562,11 +562,11 @@ class OrderingConstraint(private val boundsMap: ParamBounds, val underlying1 = recur(tp.underlying) if underlying1 ne tp.underlying then underlying1 else tp case CapturingType(parent, refs) => - val parent1 = recur(parent) - if parent1 ne parent then tp.derivedCapturingType(parent1, refs) else tp + tp.derivedCapturingType(recur(parent), refs) + case tp: FlexibleType => + tp.derivedFlexibleType(recur(tp.hi)) case tp: AnnotatedType => - val parent1 = recur(tp.parent) - if parent1 ne tp.parent then tp.derivedAnnotatedType(parent1, tp.annot) else tp + tp.derivedAnnotatedType(recur(tp.parent), tp.annot) case _ => val tp1 = tp.dealiasKeepAnnots if tp1 ne tp then @@ -715,7 +715,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, var newDepEntry = newEntry replacedTypeVar match case tvar: TypeVar => - if tvar.inst.exists // `isInstantiated` would use ctx.typerState.constraint rather than the current constraint + if tvar.isPermanentlyInstantiated // `isInstantiated` would use ctx.typerState.constraint rather than the current constraint then // If the type variable has been instantiated, we need to forget about // the instantiation for old dependencies. @@ -781,7 +781,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, @tailrec def allRemovable(last: Int): Boolean = if (last < 0) true else typeVar(entries, last) match { - case tv: TypeVar => tv.inst.exists && allRemovable(last - 1) + case tv: TypeVar => tv.isPermanentlyInstantiated && allRemovable(last - 1) case _ => false } allRemovable(paramCount(entries) - 1) @@ -887,7 +887,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, val limit = paramCount(entries) while i < limit do typeVar(entries, i) match - case tv: TypeVar if !tv.inst.exists => op(tv) + case tv: TypeVar if !tv.isPermanentlyInstantiated => op(tv) case _ => i += 1 } @@ -896,12 +896,12 @@ class OrderingConstraint(private val boundsMap: ParamBounds, /** The uninstantiated typevars of this constraint */ def uninstVars: collection.Seq[TypeVar] = { - if (myUninstVars == null || myUninstVars.uncheckedNN.exists(_.inst.exists)) { + if (myUninstVars == null || myUninstVars.uncheckedNN.exists(_.isPermanentlyInstantiated)) { myUninstVars = new mutable.ArrayBuffer[TypeVar] boundsMap.foreachBinding { (poly, entries) => for (i <- 0 until paramCount(entries)) typeVar(entries, i) match { - case tv: TypeVar if !tv.inst.exists && isBounds(entries(i)) => myUninstVars.uncheckedNN += tv + case tv: TypeVar if !tv.isPermanentlyInstantiated && isBounds(entries(i)) => myUninstVars.uncheckedNN += tv case _ => } } diff --git a/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala b/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala index 38f8e19e2737..9baf0c40a80b 100644 --- a/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala +++ b/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala @@ -88,11 +88,6 @@ trait PatternTypeConstrainer { self: TypeComparer => } } - def stripRefinement(tp: Type): Type = tp match { - case tp: RefinedOrRecType => stripRefinement(tp.parent) - case tp => tp - } - def tryConstrainSimplePatternType(pat: Type, scrut: Type) = { val patCls = pat.classSymbol val scrCls = scrut.classSymbol @@ -163,7 +158,7 @@ trait PatternTypeConstrainer { self: TypeComparer => } } - def dealiasDropNonmoduleRefs(tp: Type) = tp.dealias match { + def dealiasDropNonmoduleRefs(tp: Type): Type = tp.dealias match { case tp: TermRef => // we drop TermRefs that don't have a class symbol, as they can't // meaningfully participate in GADT reasoning and just get in the way. @@ -172,6 +167,7 @@ trait PatternTypeConstrainer { self: TypeComparer => // additional trait - argument-less enum cases desugar to vals. // See run/enum-Tree.scala. if tp.classSymbol.exists then tp else tp.info + case tp: FlexibleType => dealiasDropNonmoduleRefs(tp.underlying) case tp => tp } @@ -181,14 +177,14 @@ trait PatternTypeConstrainer { self: TypeComparer => case AndType(scrut1, scrut2) => constrainPatternType(pat, scrut1) && constrainPatternType(pat, scrut2) case scrut: RefinedOrRecType => - constrainPatternType(pat, stripRefinement(scrut)) + constrainPatternType(pat, scrut.stripRefinement) case scrut => dealiasDropNonmoduleRefs(pat) match { case OrType(pat1, pat2) => either(constrainPatternType(pat1, scrut), constrainPatternType(pat2, scrut)) case AndType(pat1, pat2) => constrainPatternType(pat1, scrut) && constrainPatternType(pat2, scrut) case pat: RefinedOrRecType => - constrainPatternType(stripRefinement(pat), scrut) + constrainPatternType(pat.stripRefinement, scrut) case pat => tryConstrainSimplePatternType(pat, scrut) || classesMayBeCompatible && constrainUpcasted(scrut) @@ -200,8 +196,8 @@ trait PatternTypeConstrainer { self: TypeComparer => * * This function expects to receive two types (scrutinee and pattern), both * of which have class symbols, one of which is derived from another. If the - * type "being derived from" is an applied type, it will 1) "upcast" the - * deriving type to an applied type with the same constructor and 2) infer + * type "being derived from" is an applied type, it will 1) "upcast" both + * types to an applied type with the same constructor and 2) infer * constraints for the applied types' arguments that follow from both * types being inhabited by one value (the scrutinee). * @@ -252,11 +248,9 @@ trait PatternTypeConstrainer { self: TypeComparer => val scrutineeCls = scrutineeTp.classSymbol // NOTE: we already know that there is a derives-from relationship in either direction - val upcastPattern = - patternCls.derivesFrom(scrutineeCls) - - val pt = if upcastPattern then patternTp.baseType(scrutineeCls) else patternTp - val tp = if !upcastPattern then scrutineeTp.baseType(patternCls) else scrutineeTp + val base = if patternCls.derivesFrom(scrutineeCls) then scrutineeCls else patternCls + val pt = patternTp.baseType(base) + val tp = scrutineeTp.baseType(base) val assumeInvariantRefinement = migrateTo3 || forceInvariantRefinement || refinementIsInvariant(patternTp) diff --git a/compiler/src/dotty/tools/dotc/core/Phases.scala b/compiler/src/dotty/tools/dotc/core/Phases.scala index c704846a82da..7f925b0fc322 100644 --- a/compiler/src/dotty/tools/dotc/core/Phases.scala +++ b/compiler/src/dotty/tools/dotc/core/Phases.scala @@ -126,7 +126,7 @@ object Phases { * The list should never contain NoPhase. * if fusion is enabled, phases in same subgroup will be fused to single phase. */ - final def usePhases(phasess: List[Phase], fuse: Boolean = true): Unit = { + final def usePhases(phasess: List[Phase], runCtx: FreshContext, fuse: Boolean = true): Unit = { val flatPhases = collection.mutable.ListBuffer[Phase]() @@ -161,11 +161,21 @@ object Phases { phase match { case p: MegaPhase => val miniPhases = p.miniPhases - miniPhases.foreach{ phase => + for phase <- miniPhases do checkRequirements(phase) - phase.init(this, nextPhaseId)} + // Given phases a chance to initialize state based on the run context. + // + // `phase.initContext` should be called before `phase.init` as the later calls abstract methods + // `changesMembers` and `changeParents` which may depend on the run context. + // + // See `PostTyper.changeParents` + phase.initContext(runCtx) + phase.init(this, nextPhaseId) + end for p.init(this, miniPhases.head.id, miniPhases.last.id) case _ => + // See comment above about the ordering of the two calls. + phase.initContext(runCtx) phase.init(this, nextPhaseId) checkRequirements(phase) } @@ -210,6 +220,7 @@ object Phases { private var myTyperPhase: Phase = uninitialized private var myPostTyperPhase: Phase = uninitialized private var mySbtExtractDependenciesPhase: Phase = uninitialized + private var mySbtExtractAPIPhase: Phase = uninitialized private var myPicklerPhase: Phase = uninitialized private var myInliningPhase: Phase = uninitialized private var myStagingPhase: Phase = uninitialized @@ -220,6 +231,7 @@ object Phases { private var myPatmatPhase: Phase = uninitialized private var myElimRepeatedPhase: Phase = uninitialized private var myElimByNamePhase: Phase = uninitialized + private var myElimOpaquePhase: Phase = uninitialized private var myExtensionMethodsPhase: Phase = uninitialized private var myExplicitOuterPhase: Phase = uninitialized private var myGettersPhase: Phase = uninitialized @@ -235,6 +247,7 @@ object Phases { final def typerPhase: Phase = myTyperPhase final def postTyperPhase: Phase = myPostTyperPhase final def sbtExtractDependenciesPhase: Phase = mySbtExtractDependenciesPhase + final def sbtExtractAPIPhase: Phase = mySbtExtractAPIPhase final def picklerPhase: Phase = myPicklerPhase final def inliningPhase: Phase = myInliningPhase final def stagingPhase: Phase = myStagingPhase @@ -245,6 +258,7 @@ object Phases { final def patmatPhase: Phase = myPatmatPhase final def elimRepeatedPhase: Phase = myElimRepeatedPhase final def elimByNamePhase: Phase = myElimByNamePhase + final def elimOpaquePhase: Phase = myElimOpaquePhase final def extensionMethodsPhase: Phase = myExtensionMethodsPhase final def explicitOuterPhase: Phase = myExplicitOuterPhase final def gettersPhase: Phase = myGettersPhase @@ -263,6 +277,7 @@ object Phases { myTyperPhase = phaseOfClass(classOf[TyperPhase]) myPostTyperPhase = phaseOfClass(classOf[PostTyper]) mySbtExtractDependenciesPhase = phaseOfClass(classOf[sbt.ExtractDependencies]) + mySbtExtractAPIPhase = phaseOfClass(classOf[sbt.ExtractAPI]) myPicklerPhase = phaseOfClass(classOf[Pickler]) myInliningPhase = phaseOfClass(classOf[Inlining]) myStagingPhase = phaseOfClass(classOf[Staging]) @@ -272,6 +287,7 @@ object Phases { myRefChecksPhase = phaseOfClass(classOf[RefChecks]) myElimRepeatedPhase = phaseOfClass(classOf[ElimRepeated]) myElimByNamePhase = phaseOfClass(classOf[ElimByName]) + myElimOpaquePhase = phaseOfClass(classOf[ElimOpaque]) myExtensionMethodsPhase = phaseOfClass(classOf[ExtensionMethods]) myErasurePhase = phaseOfClass(classOf[Erasure]) myElimErasedValueTypePhase = phaseOfClass(classOf[ElimErasedValueType]) @@ -333,22 +349,32 @@ object Phases { def subPhases: List[Run.SubPhase] = Nil final def traversals: Int = if subPhases.isEmpty then 1 else subPhases.length - /** skip the phase for a Java compilation unit, may depend on -Yjava-tasty */ + /** skip the phase for a Java compilation unit, may depend on -Xjava-tasty */ def skipIfJava(using Context): Boolean = true + final def isAfterLastJavaPhase(using Context): Boolean = + // With `-Xjava-tasty` nominally the final phase is expected be ExtractAPI, + // otherwise drop Java sources at the end of TyperPhase. + // Checks if the last Java phase is before this phase, + // which always fails if the terminal phase is before lastJavaPhase. + val lastJavaPhase = if ctx.settings.XjavaTasty.value then sbtExtractAPIPhase else typerPhase + lastJavaPhase <= this + /** @pre `isRunnable` returns true */ def run(using Context): Unit /** @pre `isRunnable` returns true */ def runOn(units: List[CompilationUnit])(using runCtx: Context): List[CompilationUnit] = val buf = List.newBuilder[CompilationUnit] - // factor out typedAsJava check when not needed - val doSkipJava = ctx.settings.YjavaTasty.value && this <= picklerPhase && skipIfJava + + // Test that we are in a state where we need to check if the phase should be skipped for a java file, + // this prevents checking the expensive `unit.typedAsJava` unnecessarily. + val doCheckJava = skipIfJava && !isAfterLastJavaPhase for unit <- units do given unitCtx: Context = runCtx.fresh.setPhase(this.start).setCompilationUnit(unit).withRootImports if ctx.run.enterUnit(unit) then try - if doSkipJava && unit.typedAsJava then + if doCheckJava && unit.typedAsJava then () else run @@ -503,6 +529,7 @@ object Phases { def typerPhase(using Context): Phase = ctx.base.typerPhase def postTyperPhase(using Context): Phase = ctx.base.postTyperPhase def sbtExtractDependenciesPhase(using Context): Phase = ctx.base.sbtExtractDependenciesPhase + def sbtExtractAPIPhase(using Context): Phase = ctx.base.sbtExtractAPIPhase def picklerPhase(using Context): Phase = ctx.base.picklerPhase def inliningPhase(using Context): Phase = ctx.base.inliningPhase def stagingPhase(using Context): Phase = ctx.base.stagingPhase @@ -511,6 +538,7 @@ object Phases { def refchecksPhase(using Context): Phase = ctx.base.refchecksPhase def elimRepeatedPhase(using Context): Phase = ctx.base.elimRepeatedPhase def elimByNamePhase(using Context): Phase = ctx.base.elimByNamePhase + def elimOpaquePhase(using Context): Phase = ctx.base.elimOpaquePhase def extensionMethodsPhase(using Context): Phase = ctx.base.extensionMethodsPhase def explicitOuterPhase(using Context): Phase = ctx.base.explicitOuterPhase def gettersPhase(using Context): Phase = ctx.base.gettersPhase diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index 9772199678d7..b935488695e0 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -288,6 +288,7 @@ object StdNames { // Compiler-internal val CAPTURE_ROOT: N = "cap" + val CBCompanion: N = "" val CONSTRUCTOR: N = "" val STATIC_CONSTRUCTOR: N = "" val EVT2U: N = "evt2u$" @@ -362,6 +363,8 @@ object StdNames { val EnumValue: N = "EnumValue" val ExistentialTypeTree: N = "ExistentialTypeTree" val Flag : N = "Flag" + val Fields: N = "Fields" + val From: N = "From" val Ident: N = "Ident" val Import: N = "Import" val Literal: N = "Literal" @@ -374,6 +377,7 @@ object StdNames { val MirroredMonoType: N = "MirroredMonoType" val MirroredType: N = "MirroredType" val Modifiers: N = "Modifiers" + val NamedTuple: N = "NamedTuple" val NestedAnnotArg: N = "NestedAnnotArg" val NoFlags: N = "NoFlags" val NoPrefix: N = "NoPrefix" @@ -384,6 +388,7 @@ object StdNames { val RootPackage: N = "RootPackage" val RootClass: N = "RootClass" val Select: N = "Select" + val Self: N = "Self" val Shape: N = "Shape" val StringContext: N = "StringContext" val This: N = "This" @@ -393,6 +398,7 @@ object StdNames { val TypeApply: N = "TypeApply" val TypeRef: N = "TypeRef" val UNIT : N = "UNIT" + val WitnessNames: N = "WitnessNames" val acc: N = "acc" val adhocExtensions: N = "adhocExtensions" val andThen: N = "andThen" @@ -452,6 +458,7 @@ object StdNames { val create: N = "create" val currentMirror: N = "currentMirror" val curried: N = "curried" + val deferred: N = "deferred" val definitions: N = "definitions" val delayedInit: N = "delayedInit" val delayedInitArg: N = "delayedInit$body" @@ -620,11 +627,13 @@ object StdNames { val throws: N = "throws" val toArray: N = "toArray" val toList: N = "toList" + val toTuple: N = "toTuple" val toObjectArray : N = "toObjectArray" val toSeq: N = "toSeq" val toString_ : N = "toString" val toTypeConstructor: N = "toTypeConstructor" val tpe : N = "tpe" + val tracked: N = "tracked" val transparent : N = "transparent" val tree : N = "tree" val true_ : N = "true" @@ -649,6 +658,7 @@ object StdNames { val wildcardType: N = "wildcardType" val withFilter: N = "withFilter" val withFilterIfRefutable: N = "withFilterIfRefutable$" + val withNames: N = "withNames" val WorksheetWrapper: N = "WorksheetWrapper" val wrap: N = "wrap" val writeReplace: N = "writeReplace" diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index 14ba05568735..3904228756a0 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -23,7 +23,7 @@ import scala.util.control.NonFatal import config.Config import reporting.* import collection.mutable -import cc.{CapturingType, derivedCapturingType} +import cc.{CapturingType, derivedCapturingType, stripCapturing} import scala.annotation.internal.sharable import scala.compiletime.uninitialized @@ -168,16 +168,11 @@ object SymDenotations { } } else - val traceCycles = CyclicReference.isTraced - try - if traceCycles then - CyclicReference.pushTrace("compute the signature of ", symbol, "") + CyclicReference.trace("compute the signature of ", symbol): if myFlags.is(Touched) then throw CyclicReference(this)(using ctx.withOwner(symbol)) myFlags |= Touched atPhase(validFor.firstPhaseId)(completer.complete(this)) - finally - if traceCycles then CyclicReference.popTrace() protected[dotc] def info_=(tp: Type): Unit = { /* // DEBUG @@ -685,11 +680,9 @@ object SymDenotations { def isWrappedToplevelDef(using Context): Boolean = !isConstructor && owner.isPackageObject - /** Is this symbol an abstract type? */ - final def isAbstractType(using Context): Boolean = this.is(DeferredType) - /** Is this symbol an alias type? */ - final def isAliasType(using Context): Boolean = isAbstractOrAliasType && !this.is(Deferred) + final def isAliasType(using Context): Boolean = + isAbstractOrAliasType && !isAbstractOrParamType /** Is this symbol an abstract or alias type? */ final def isAbstractOrAliasType: Boolean = isType & !isClass @@ -720,12 +713,16 @@ object SymDenotations { * TODO: Find a more robust way to characterize self symbols, maybe by * spending a Flag on them? */ - final def isSelfSym(using Context): Boolean = owner.infoOrCompleter match { - case ClassInfo(_, _, _, _, selfInfo) => - selfInfo == symbol || - selfInfo.isInstanceOf[Type] && name == nme.WILDCARD - case _ => false - } + final def isSelfSym(using Context): Boolean = + if !ctx.isBestEffort || exists then + owner.infoOrCompleter match { + case ClassInfo(_, _, _, _, selfInfo) => + selfInfo == symbol || + selfInfo.isInstanceOf[Type] && name == nme.WILDCARD + case _ => false + } + else false + /** Is this definition contained in `boundary`? * Same as `ownersIterator contains boundary` but more efficient. @@ -772,7 +769,7 @@ object SymDenotations { * This can mean one of two things: * - the method and class are defined in a structural given instance, or * - the class is an implicit class and the method is its implicit conversion. - */ + */ final def isCoDefinedGiven(cls: Symbol)(using Context): Boolean = is(Method) && isOneOf(GivenOrImplicit) && ( is(Synthetic) // previous scheme used in 3.0 @@ -1071,8 +1068,8 @@ object SymDenotations { */ final def moduleClass(using Context): Symbol = { def notFound = { - if (Config.showCompletions) println(s"missing module class for $name: $myInfo") - NoSymbol + if (Config.showCompletions) println(s"missing module class for $name: $myInfo") + NoSymbol } if (this.is(ModuleVal)) myInfo match { @@ -1190,21 +1187,25 @@ object SymDenotations { final def isExtensibleClass(using Context): Boolean = isClass && !isOneOf(FinalOrModuleClass) && !isAnonymousClass - /** A symbol is effectively final if it cannot be overridden in a subclass */ + /** A symbol is effectively final if it cannot be overridden */ final def isEffectivelyFinal(using Context): Boolean = isOneOf(EffectivelyFinalFlags) || is(Inline, butNot = Deferred) || is(JavaDefinedVal, butNot = Method) || isConstructor - || !owner.isExtensibleClass + || !owner.isExtensibleClass && !is(Deferred) + // Deferred symbols can arise through parent refinements under x.modularity. + // For them, the overriding relationship reverses anyway, so + // being in a final class does not mean the symbol cannot be + // implemented concretely in a superclass. /** A class is effectively sealed if has the `final` or `sealed` modifier, or it * is defined in Scala 3 and is neither abstract nor open. */ final def isEffectivelySealed(using Context): Boolean = isOneOf(FinalOrSealed) - || isClass && (!isOneOf(EffectivelyOpenFlags) - || isLocalToCompilationUnit) + || isClass + && (!isOneOf(EffectivelyOpenFlags) || isLocalToCompilationUnit) final def isLocalToCompilationUnit(using Context): Boolean = is(Private) @@ -1355,7 +1356,7 @@ object SymDenotations { * @param inClass The class containing the result symbol's definition * @param site The base type from which member types are computed * - * inClass <-- find denot.symbol class C { <-- symbol is here + * inClass <-- find denot.symbol class C { <-- symbol is here } * * site: Subtype of both inClass and C */ @@ -1609,7 +1610,7 @@ object SymDenotations { case tp: RefinedType => hasSkolems(tp.parent) || hasSkolems(tp.refinedInfo) case tp: RecType => hasSkolems(tp.parent) case tp: TypeBounds => hasSkolems(tp.lo) || hasSkolems(tp.hi) - case tp: TypeVar => hasSkolems(tp.inst) + case tp: TypeVar => hasSkolems(tp.permanentInst) case tp: ExprType => hasSkolems(tp.resType) case tp: AppliedType => hasSkolems(tp.tycon) || tp.args.exists(hasSkolems) case tp: LambdaType => tp.paramInfos.exists(hasSkolems) || hasSkolems(tp.resType) @@ -1624,11 +1625,11 @@ object SymDenotations { // ----- copies and transforms ---------------------------------------- - protected def newLikeThis(s: Symbol, i: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation = + protected def newLikeThis(s: Symbol, i: Type, pre: Type, isRefinedMethod: Boolean)(using Context): SingleDenotation = if isRefinedMethod then - new JointRefDenotation(s, i, validFor, pre, isRefinedMethod) + new JointRefDenotation(s, i, currentStablePeriod, pre, isRefinedMethod) else - new UniqueRefDenotation(s, i, validFor, pre) + new UniqueRefDenotation(s, i, currentStablePeriod, pre) /** Copy this denotation, overriding selective fields */ final def copySymDenotation( @@ -2003,7 +2004,7 @@ object SymDenotations { case p :: parents1 => p.classSymbol match { case pcls: ClassSymbol => builder.addAll(pcls.baseClasses) - case _ => assert(isRefinementClass || p.isError || ctx.mode.is(Mode.Interactive), s"$this has non-class parent: $p") + case _ => assert(isRefinementClass || p.isError || ctx.mode.is(Mode.Interactive) || ctx.tolerateErrorsForBestEffort, s"$this has non-class parent: $p") } traverse(parents1) case nil => @@ -2228,7 +2229,7 @@ object SymDenotations { tp match { case tp @ TypeRef(prefix, _) => def foldGlb(bt: Type, ps: List[Type]): Type = ps match { - case p :: ps1 => foldGlb(bt & recur(p), ps1) + case p :: ps1 => foldGlb(bt & recur(p.stripCapturing), ps1) case _ => bt } @@ -2990,12 +2991,9 @@ object SymDenotations { def apply(clsd: ClassDenotation)(implicit onBehalf: BaseData, ctx: Context) : (List[ClassSymbol], BaseClassSet) = { assert(isValid) - val traceCycles = CyclicReference.isTraced - try - if traceCycles then - CyclicReference.pushTrace("compute the base classes of ", clsd.symbol, "") - if (cache != null) cache.uncheckedNN - else { + CyclicReference.trace("compute the base classes of ", clsd.symbol): + if cache != null then cache.uncheckedNN + else if (locked) throw CyclicReference(clsd) locked = true provisional = false @@ -3005,10 +3003,6 @@ object SymDenotations { if (!provisional) cache = computed else onBehalf.signalProvisional() computed - } - finally - if traceCycles then CyclicReference.popTrace() - addDependent(onBehalf) } def sameGroup(p1: Phase, p2: Phase) = p1.sameParentsStartId == p2.sameParentsStartId diff --git a/compiler/src/dotty/tools/dotc/core/SymUtils.scala b/compiler/src/dotty/tools/dotc/core/SymUtils.scala index 65634241b790..3a97a0053dbd 100644 --- a/compiler/src/dotty/tools/dotc/core/SymUtils.scala +++ b/compiler/src/dotty/tools/dotc/core/SymUtils.scala @@ -87,6 +87,9 @@ class SymUtils: !d.isPrimitiveValueClass } + def isContextBoundCompanion(using Context): Boolean = + self.is(Synthetic) && self.infoOrCompleter.typeSymbol == defn.CBCompanion + /** Is this a case class for which a product mirror is generated? * Excluded are value classes, abstract classes and case classes with more than one * parameter section. diff --git a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala index 75c610b29140..51e6a5e6138a 100644 --- a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala +++ b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala @@ -7,7 +7,7 @@ import java.nio.channels.ClosedByInterruptException import scala.util.control.NonFatal -import dotty.tools.dotc.classpath.FileUtils.isTasty +import dotty.tools.dotc.classpath.FileUtils.{hasTastyExtension, hasBetastyExtension} import dotty.tools.io.{ ClassPath, ClassRepresentation, AbstractFile } import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions @@ -26,6 +26,7 @@ import parsing.JavaParsers.OutlineJavaParser import parsing.Parsers.OutlineParser import dotty.tools.tasty.{TastyHeaderUnpickler, UnpickleException, UnpicklerConfig, TastyVersion} import dotty.tools.dotc.core.tasty.TastyUnpickler +import dotty.tools.tasty.besteffort.BestEffortTastyHeaderUnpickler object SymbolLoaders { import ast.untpd.* @@ -79,12 +80,12 @@ object SymbolLoaders { // offer a setting to resolve the conflict one way or the other. // This was motivated by the desire to use YourKit probes, which // require yjp.jar at runtime. See SI-2089. - if (ctx.settings.YtermConflict.value == "package" || ctx.mode.is(Mode.Interactive)) { + if (ctx.settings.XtermConflict.value == "package" || ctx.mode.is(Mode.Interactive)) { report.warning( s"Resolving package/object name conflict in favor of package ${preExisting.fullName}. The object will be inaccessible.") owner.asClass.delete(preExisting) } - else if (ctx.settings.YtermConflict.value == "object") { + else if (ctx.settings.XtermConflict.value == "object") { report.warning( s"Resolving package/object name conflict in favor of object ${preExisting.fullName}. The package will be inaccessible.") return NoSymbol @@ -198,7 +199,7 @@ object SymbolLoaders { enterToplevelsFromSource(owner, nameOf(classRep), src) case (Some(bin), _) => val completer = - if bin.isTasty then ctx.platform.newTastyLoader(bin) + if bin.hasTastyExtension || bin.hasBetastyExtension then ctx.platform.newTastyLoader(bin) else ctx.platform.newClassLoader(bin) enterClassAndModule(owner, nameOf(classRep), completer) } @@ -221,8 +222,8 @@ object SymbolLoaders { Stats.record("package scopes") /** The scope of a package. This is different from a normal scope - * in that names of scope entries are kept in mangled form. - */ + * in that names of scope entries are kept in mangled form. + */ final class PackageScope extends MutableScope(0) { override def newScopeEntry(name: Name, sym: Symbol)(using Context): ScopeEntry = super.newScopeEntry(name.mangled, sym) @@ -261,7 +262,8 @@ object SymbolLoaders { (idx + str.TOPLEVEL_SUFFIX.length + 1 != name.length || !name.endsWith(str.TOPLEVEL_SUFFIX)) } - def maybeModuleClass(classRep: ClassRepresentation): Boolean = classRep.name.last == '$' + def maybeModuleClass(classRep: ClassRepresentation): Boolean = + classRep.name.nonEmpty && classRep.name.last == '$' private def enterClasses(root: SymDenotation, packageName: String, flat: Boolean)(using Context) = { def isAbsent(classRep: ClassRepresentation) = @@ -416,34 +418,45 @@ class ClassfileLoader(val classfile: AbstractFile) extends SymbolLoader { } class TastyLoader(val tastyFile: AbstractFile) extends SymbolLoader { - + val isBestEffortTasty = tastyFile.hasBetastyExtension private val unpickler: tasty.DottyUnpickler = handleUnpicklingExceptions: val tastyBytes = tastyFile.toByteArray - new tasty.DottyUnpickler(tastyFile, tastyBytes) // reads header and name table + new tasty.DottyUnpickler(tastyFile, tastyBytes, isBestEffortTasty) // reads header and name table val compilationUnitInfo: CompilationUnitInfo | Null = unpickler.compilationUnitInfo - def description(using Context): String = "TASTy file " + tastyFile.toString + def description(using Context): String = + if isBestEffortTasty then "Best Effort TASTy file " + tastyFile.toString + else "TASTy file " + tastyFile.toString override def doComplete(root: SymDenotation)(using Context): Unit = handleUnpicklingExceptions: - checkTastyUUID() val (classRoot, moduleRoot) = rootDenots(root.asClass) - unpickler.enter(roots = Set(classRoot, moduleRoot, moduleRoot.sourceModule))(using ctx.withSource(util.NoSource)) - if mayLoadTreesFromTasty then - classRoot.classSymbol.rootTreeOrProvider = unpickler - moduleRoot.classSymbol.rootTreeOrProvider = unpickler + if (!isBestEffortTasty || ctx.withBestEffortTasty) then + val tastyBytes = tastyFile.toByteArray + unpickler.enter(roots = Set(classRoot, moduleRoot, moduleRoot.sourceModule))(using ctx.withSource(util.NoSource)) + if mayLoadTreesFromTasty || isBestEffortTasty then + classRoot.classSymbol.rootTreeOrProvider = unpickler + moduleRoot.classSymbol.rootTreeOrProvider = unpickler + if isBestEffortTasty then + checkBeTastyUUID(tastyFile, tastyBytes) + ctx.setUsedBestEffortTasty() + else + checkTastyUUID() + else + report.error(em"Cannot read Best Effort TASTy $tastyFile without the ${ctx.settings.YwithBestEffortTasty.name} option") private def handleUnpicklingExceptions[T](thunk: =>T): T = try thunk catch case e: RuntimeException => + val tastyType = if (isBestEffortTasty) "Best Effort TASTy" else "TASTy" val message = e match case e: UnpickleException => - s"""TASTy file ${tastyFile.canonicalPath} could not be read, failing with: + s"""$tastyType file ${tastyFile.canonicalPath} could not be read, failing with: | ${Option(e.getMessage).getOrElse("")}""".stripMargin case _ => - s"""TASTy file ${tastyFile.canonicalPath} is broken, reading aborted with ${e.getClass} + s"""$tastyFile file ${tastyFile.canonicalPath} is broken, reading aborted with ${e.getClass} | ${Option(e.getMessage).getOrElse("")}""".stripMargin throw IOException(message, e) @@ -456,9 +469,13 @@ class TastyLoader(val tastyFile: AbstractFile) extends SymbolLoader { val tastyUUID = unpickler.unpickler.header.uuid new ClassfileTastyUUIDParser(classfile)(ctx).checkTastyUUID(tastyUUID) else - // This will be the case in any of our tests that compile with `-Youtput-only-tasty` + // This will be the case in any of our tests that compile with `-Youtput-only-tasty`, or when + // tasty file compiled by `-Xearly-tasty-output-write` comes from an early output jar. report.inform(s"No classfiles found for $tastyFile when checking TASTy UUID") + private def checkBeTastyUUID(tastyFile: AbstractFile, tastyBytes: Array[Byte])(using Context): Unit = + new BestEffortTastyHeaderUnpickler(tastyBytes).readHeader() + private def mayLoadTreesFromTasty(using Context): Boolean = ctx.settings.YretainTrees.value || ctx.settings.fromTasty.value } diff --git a/compiler/src/dotty/tools/dotc/core/Symbols.scala b/compiler/src/dotty/tools/dotc/core/Symbols.scala index 78c736649605..da0ecac47b7d 100644 --- a/compiler/src/dotty/tools/dotc/core/Symbols.scala +++ b/compiler/src/dotty/tools/dotc/core/Symbols.scala @@ -84,7 +84,7 @@ object Symbols extends SymUtils { ctx.settings.YretainTrees.value || denot.owner.isTerm || // no risk of leaking memory after a run for these denot.isOneOf(InlineOrProxy) || // need to keep inline info - ctx.settings.YcheckInit.value || // initialization check + ctx.settings.WcheckInit.value || // initialization check ctx.settings.YcheckInitGlobal.value /** The last denotation of this symbol */ @@ -165,6 +165,10 @@ object Symbols extends SymUtils { final def isDefinedInSource(using Context): Boolean = span.exists && isValidInCurrentRun && associatedFileMatches(!_.isScalaBinary) + /** Is this symbol valid in the current run, but comes from the classpath? */ + final def isDefinedInBinary(using Context): Boolean = + isValidInCurrentRun && associatedFileMatches(_.isScalaBinary) + /** Is symbol valid in current run? */ final def isValidInCurrentRun(using Context): Boolean = (lastDenot.validFor.runId == ctx.runId || stillValid(lastDenot)) && @@ -308,7 +312,6 @@ object Symbols extends SymUtils { * With the given setup, all such calls will give implicit-not found errors */ final def symbol(implicit ev: DontUseSymbolOnSymbol): Nothing = unsupported("symbol") - type DontUseSymbolOnSymbol final def source(using Context): SourceFile = { def valid(src: SourceFile): SourceFile = @@ -398,13 +401,12 @@ object Symbols extends SymUtils { flags: FlagSet = this.flags, info: Type = this.info, privateWithin: Symbol = this.privateWithin, - coord: Coord = NoCoord, // Can be `= owner.coord` once we bootstrap - compUnitInfo: CompilationUnitInfo | Null = null // Can be `= owner.associatedFile` once we bootstrap + coord: Coord = NoCoord, // Can be `= owner.coord` once we have new default args + compUnitInfo: CompilationUnitInfo | Null = null // Can be `= owner.compilationUnitInfo` once we have new default args ): Symbol = { val coord1 = if (coord == NoCoord) owner.coord else coord val compilationUnitInfo1 = if (compilationUnitInfo == null) owner.compilationUnitInfo else compilationUnitInfo - if isClass then newClassSymbol(owner, name.asTypeName, flags, _ => info, privateWithin, coord1, compilationUnitInfo1) else @@ -932,6 +934,8 @@ object Symbols extends SymUtils { case (x: Symbol) :: _ if x.isType => Some(xs.asInstanceOf[List[TypeSymbol]]) case _ => None + type DontUseSymbolOnSymbol + // ----- Locating predefined symbols ---------------------------------------- def requiredPackage(path: PreName)(using Context): TermSymbol = { diff --git a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala index eeb18eaa9cc7..54636ff4ad58 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala @@ -461,7 +461,7 @@ class TypeApplications(val self: Type) extends AnyVal { */ final def toBounds(using Context): TypeBounds = self match { case self: TypeBounds => self // this can happen for wildcard args - case _ => if (self.isMatch) MatchAlias(self) else TypeAlias(self) + case _ => AliasingBounds(self) } /** Translate a type of the form From[T] to either To[T] or To[? <: T] (if `wildcardArg` is set). Keep other types as they are. @@ -541,6 +541,7 @@ class TypeApplications(val self: Type) extends AnyVal { */ final def argInfos(using Context): List[Type] = self.stripped match case AppliedType(tycon, args) => args + case tp: FlexibleType => tp.underlying.argInfos case _ => Nil /** If this is an encoding of a function type, return its arguments, otherwise return Nil. diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index b677dae3a38b..93ed6e7d03a5 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -10,7 +10,7 @@ import TypeOps.refineUsingParent import collection.mutable import util.{Stats, NoSourcePosition, EqHashMap} import config.Config -import config.Feature.{migrateTo3, sourceVersion} +import config.Feature.{betterMatchTypeExtractorsEnabled, migrateTo3, sourceVersion} import config.Printers.{subtyping, gadts, matchTypes, noPrinter} import config.SourceVersion import TypeErasure.{erasedLub, erasedGlb} @@ -864,6 +864,8 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling false } compareClassInfo + case tp2: FlexibleType => + recur(tp1, tp2.lo) case _ => fourthTry } @@ -1059,6 +1061,8 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case tp1: ExprType if ctx.phaseId > gettersPhase.id => // getters might have converted T to => T, need to compensate. recur(tp1.widenExpr, tp2) + case tp1: FlexibleType => + recur(tp1.hi, tp2) case _ => false } @@ -1375,7 +1379,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * tp1 <:< app2 using isSubType (this might instantiate params in tp2) */ def compareLower(tycon2bounds: TypeBounds, tyconIsTypeRef: Boolean): Boolean = - if ((tycon2bounds.lo `eq` tycon2bounds.hi) && !tycon2bounds.isInstanceOf[MatchAlias]) + if ((tycon2bounds.lo `eq` tycon2bounds.hi) && !tycon2bounds.isMatchAlias) if (tyconIsTypeRef) recur(tp1, tp2.superTypeNormalized) && recordGadtUsageIf(MatchType.thatReducesUsingGadt(tp2)) else isSubApproxHi(tp1, tycon2bounds.lo.applyIfParameterized(args2)) else @@ -1596,7 +1600,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling val tycon1 = liftToThis(tp.tycon) if (tycon1 ne tp.tycon) tp.derivedAppliedType(tycon1, tp.args) else tp case tp: TypeVar if tp.isInstantiated => - liftToThis(tp.inst) + liftToThis(tp.instanceOpt) case tp: AnnotatedType => val parent1 = liftToThis(tp.parent) if (parent1 ne tp.parent) tp.derivedAnnotatedType(parent1, tp.annot) else tp @@ -1714,8 +1718,8 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * comparison will instantiate or constrain type variables first. */ def isIncomplete(arg1: Type, arg2: Type): Boolean = - val arg1d = arg1.strippedDealias - val arg2d = arg2.strippedDealias + val arg1d = arg1.stripped + val arg2d = arg2.stripped (v >= 0) && (arg1d.isInstanceOf[AndType] || arg2d.isInstanceOf[OrType]) || (v <= 0) && (arg1d.isInstanceOf[OrType] || arg2d.isInstanceOf[AndType]) @@ -2304,7 +2308,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling Stats.record("cache same type") sames = new util.EqHashMap() val res = - try isSubType(tp1, tp2) && isSubType(tp2, tp1) + try rollbackConstraintsUnless(isSubType(tp1, tp2) && isSubType(tp2, tp1)) finally sameLevel -= 1 sames = savedSames @@ -2352,8 +2356,8 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling } /** The greatest lower bound of two types */ - def glb(tp1: Type, tp2: Type): Type = /*>|>*/ trace(s"glb(${tp1.show}, ${tp2.show})", subtyping, show = true) /*<|<*/ { - if (tp1 eq tp2) tp1 + def glb(tp1: Type, tp2: Type): Type = // trace(s"glb(${tp1.show}, ${tp2.show})", subtyping, show = true): + if tp1 eq tp2 then tp1 else if !tp1.exists || (tp1 eq WildcardType) then tp2 else if !tp2.exists || (tp2 eq WildcardType) then tp1 else if tp1.isAny && !tp2.isLambdaSub || tp1.isAnyKind || isBottom(tp2) then tp2 @@ -2366,12 +2370,12 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling val tp2a = dropIfSuper(tp2, tp1) if tp2a ne tp2 then glb(tp1, tp2a) else tp2 match // normalize to disjunctive normal form if possible. - case tp2 @ OrType(tp21, tp22) => - lub(tp1 & tp21, tp1 & tp22, isSoft = tp2.isSoft) + case tp2 @ OrType(tp2L, tp2R) => + lub(tp1 & tp2L, tp1 & tp2R, isSoft = tp2.isSoft) case _ => tp1 match - case tp1 @ OrType(tp11, tp12) => - lub(tp11 & tp2, tp12 & tp2, isSoft = tp1.isSoft) + case tp1 @ OrType(tp1L, tp1R) => + lub(tp1L & tp2, tp1R & tp2, isSoft = tp1.isSoft) case tp1: ConstantType => tp2 match case tp2: ConstantType => @@ -2386,8 +2390,10 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling NothingType case _ => andType(tp1, tp2) case _ => andType(tp1, tp2) + end mergedGlb + mergedGlb(dropExpr(tp1.stripLazyRef), dropExpr(tp2.stripLazyRef)) - } + end glb def widenInUnions(using Context): Boolean = migrateTo3 || ctx.erasedTypes @@ -2396,14 +2402,23 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * @param canConstrain If true, new constraints might be added to simplify the lub. * @param isSoft If the lub is a union, this determines whether it's a soft union. */ - def lub(tp1: Type, tp2: Type, canConstrain: Boolean = false, isSoft: Boolean = true): Type = /*>|>*/ trace(s"lub(${tp1.show}, ${tp2.show}, canConstrain=$canConstrain, isSoft=$isSoft)", subtyping, show = true) /*<|<*/ { - if (tp1 eq tp2) tp1 + def lub(tp1: Type, tp2: Type, canConstrain: Boolean = false, isSoft: Boolean = true): Type = // trace(s"lub(${tp1.show}, ${tp2.show}, canConstrain=$canConstrain, isSoft=$isSoft)", subtyping, show = true): + if tp1 eq tp2 then tp1 else if !tp1.exists || (tp2 eq WildcardType) then tp1 else if !tp2.exists || (tp1 eq WildcardType) then tp2 else if tp1.isAny && !tp2.isLambdaSub || tp1.isAnyKind || isBottom(tp2) then tp1 else if tp2.isAny && !tp1.isLambdaSub || tp2.isAnyKind || isBottom(tp1) then tp2 else - def mergedLub(tp1: Type, tp2: Type): Type = { + def mergedLub(tp1: Type, tp2: Type): Type = + // First, if tp1 and tp2 are the same singleton type, return one of them. + if tp1.isSingleton && isSubType(tp1, tp2, whenFrozen = !canConstrain) then + return tp2 + if tp2.isSingleton && isSubType(tp2, tp1, whenFrozen = !canConstrain) then + return tp1 + + // Second, handle special cases when tp1 and tp2 are disjunctions of + // singleton types. This saves time otherwise spent in + // costly subtype comparisons performed in dropIfSub below. tp1.atoms match case Atoms.Range(lo1, hi1) if !widenInUnions => tp2.atoms match @@ -2413,20 +2428,24 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling if (hi1 & hi2).isEmpty then return orType(tp1, tp2, isSoft = isSoft) case none => case none => - val t1 = mergeIfSuper(tp1, tp2, canConstrain) - if (t1.exists) return t1 - - val t2 = mergeIfSuper(tp2, tp1, canConstrain) - if (t2.exists) return t2 - def widen(tp: Type) = if (widenInUnions) tp.widen else tp.widenIfUnstable + // Third, try to simplify after widening as follows: + // 1. Drop all or-factors in tp2 that are subtypes of an or-factor + // in tp1, yielding tp2Final. + // 2. Drop all or-factors in tp1 that are subtypes of an or-factor + // in tp2Final, yielding tp1Final. + // 3. Combine the two final types in an OrType + def widen(tp: Type) = + if widenInUnions then tp.widen else tp.widenIfUnstable val tp1w = widen(tp1) val tp2w = widen(tp2) - if ((tp1 ne tp1w) || (tp2 ne tp2w)) lub(tp1w, tp2w, canConstrain = canConstrain, isSoft = isSoft) - else orType(tp1w, tp2w, isSoft = isSoft) // no need to check subtypes again - } + val tp2Final = dropIfSub(tp2w, tp1w, canConstrain) + val tp1Final = dropIfSub(tp1w, tp2Final, canConstrain) + recombine(tp1Final, tp2Final, orType(_, _, isSoft = isSoft)) + end mergedLub + mergedLub(dropExpr(tp1.stripLazyRef), dropExpr(tp2.stripLazyRef)) - } + end lub /** Try to produce joint arguments for a lub `A[T_1, ..., T_n] | A[T_1', ..., T_n']` using * the following strategies: @@ -2488,60 +2507,48 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling Nil } - private def recombineAnd(tp: AndType, tp1: Type, tp2: Type) = - if (!tp1.exists) tp2 - else if (!tp2.exists) tp1 - else tp.derivedAndType(tp1, tp2) + private def recombine(tp1: Type, tp2: Type, rebuild: (Type, Type) => Type): Type = + if !tp1.exists then tp2 + else if !tp2.exists then tp1 + else rebuild(tp1, tp2) + + private def recombine(tp1: Type, tp2: Type, tp: AndOrType): Type = + recombine(tp1, tp2, tp.derivedAndOrType) /** If some (&-operand of) `tp` is a supertype of `sub` replace it with `NoType`. */ private def dropIfSuper(tp: Type, sub: Type): Type = - if (isSubTypeWhenFrozen(sub, tp)) NoType - else tp match { + + def isSuperOf(sub: Type): Boolean = sub match + case AndType(sub1, sub2) => isSuperOf(sub1) || isSuperOf(sub2) + case sub: TypeVar if sub.isInstantiated => isSuperOf(sub.instanceOpt) + case _ => isSubTypeWhenFrozen(sub, tp) + + tp match case tp @ AndType(tp1, tp2) => - recombineAnd(tp, dropIfSuper(tp1, sub), dropIfSuper(tp2, sub)) + recombine(dropIfSuper(tp1, sub), dropIfSuper(tp2, sub), tp) + case tp: TypeVar if tp.isInstantiated => + dropIfSuper(tp.instanceOpt, sub) case _ => - tp - } + if isSuperOf(sub) then NoType else tp + end dropIfSuper - /** Merge `t1` into `tp2` if t1 is a subtype of some &-summand of tp2. - */ - private def mergeIfSub(tp1: Type, tp2: Type): Type = - if (isSubTypeWhenFrozen(tp1, tp2)) tp1 - else tp2 match { - case tp2 @ AndType(tp21, tp22) => - val lower1 = mergeIfSub(tp1, tp21) - if (lower1 eq tp21) tp2 - else if (lower1.exists) lower1 & tp22 - else { - val lower2 = mergeIfSub(tp1, tp22) - if (lower2 eq tp22) tp2 - else if (lower2.exists) tp21 & lower2 - else NoType - } - case _ => - NoType - } + /** If some (|-operand of) `tp` is a subtype of `sup` replace it with `NoType`. */ + private def dropIfSub(tp: Type, sup: Type, canConstrain: Boolean): Type = - /** Merge `tp1` into `tp2` if tp1 is a supertype of some |-summand of tp2. - * @param canConstrain If true, new constraints might be added to make the merge possible. - */ - private def mergeIfSuper(tp1: Type, tp2: Type, canConstrain: Boolean): Type = - if (isSubType(tp2, tp1, whenFrozen = !canConstrain)) tp1 - else tp2 match { - case tp2 @ OrType(tp21, tp22) => - val higher1 = mergeIfSuper(tp1, tp21, canConstrain) - if (higher1 eq tp21) tp2 - else if (higher1.exists) lub(higher1, tp22, isSoft = tp2.isSoft) - else { - val higher2 = mergeIfSuper(tp1, tp22, canConstrain) - if (higher2 eq tp22) tp2 - else if (higher2.exists) lub(tp21, higher2, isSoft = tp2.isSoft) - else NoType - } + def isSubOf(sup: Type): Boolean = sup match + case OrType(sup1, sup2) => isSubOf(sup1) || isSubOf(sup2) + case sup: TypeVar if sup.isInstantiated => isSubOf(sup.instanceOpt) + case _ => isSubType(tp, sup, whenFrozen = !canConstrain) + + tp match + case tp @ OrType(tp1, tp2) => + recombine(dropIfSub(tp1, sup, canConstrain), dropIfSub(tp2, sup, canConstrain), tp) + case tp: TypeVar if tp.isInstantiated => + dropIfSub(tp.instanceOpt, sup, canConstrain) case _ => - NoType - } + if isSubOf(sup) then NoType else tp + end dropIfSub /** There's a window of vulnerability between ElimByName and Erasure where some * ExprTypes `=> T` that appear as parameters of function types are not yet converted @@ -2897,13 +2904,6 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling tp case tp: HKTypeLambda => tp - case tp: ParamRef => - val st = tp.superTypeNormalized - if st.exists then - disjointnessBoundary(st) - else - // workaround for when ParamRef#underlying returns NoType - defn.AnyType case tp: TypeProxy => disjointnessBoundary(tp.superTypeNormalized) case tp: WildcardType => @@ -3250,8 +3250,8 @@ object TypeComparer { def subtypeCheckInProgress(using Context): Boolean = comparing(_.subtypeCheckInProgress) - def instanceType(param: TypeParamRef, fromBelow: Boolean, widenUnions: Boolean, maxLevel: Int = Int.MaxValue)(using Context): Type = - comparing(_.instanceType(param, fromBelow, widenUnions, maxLevel)) + def instanceType(param: TypeParamRef, fromBelow: Boolean, widen: Widen, maxLevel: Int = Int.MaxValue)(using Context): Type = + comparing(_.instanceType(param, fromBelow, widen: Widen, maxLevel)) def approximation(param: TypeParamRef, fromBelow: Boolean, maxLevel: Int = Int.MaxValue)(using Context): Type = comparing(_.approximation(param, fromBelow, maxLevel)) @@ -3271,8 +3271,8 @@ object TypeComparer { def addToConstraint(tl: TypeLambda, tvars: List[TypeVar])(using Context): Boolean = comparing(_.addToConstraint(tl, tvars)) - def widenInferred(inst: Type, bound: Type, widenUnions: Boolean)(using Context): Type = - comparing(_.widenInferred(inst, bound, widenUnions)) + def widenInferred(inst: Type, bound: Type, widen: Widen)(using Context): Type = + comparing(_.widenInferred(inst, bound, widen: Widen)) def dropTransparentTraits(tp: Type, bound: Type)(using Context): Type = comparing(_.dropTransparentTraits(tp, bound)) @@ -3400,37 +3400,46 @@ class MatchReducer(initctx: Context) extends TypeComparer(initctx) { * * See notably neg/wildcard-match.scala for examples of this. * - * See neg/i13780.scala and neg/i13780-1.scala for ClassCastException - * reproducers if we disable this check. + * See neg/i13780.scala, neg/i13780-1.scala and neg/i19746.scala for + * ClassCastException reproducers if we disable this check. */ - def followEverythingConcrete(tp: Type): Type = - val widenedTp = tp.widenDealias - val tp1 = widenedTp.normalized - - def followTp1: Type = - // If both widenDealias and normalized did something, start again - if (tp1 ne widenedTp) && (widenedTp ne tp) then followEverythingConcrete(tp1) - else tp1 + def isConcrete(tp: Type): Boolean = + val tp1 = tp.normalized tp1 match case tp1: TypeRef => - tp1.info match - case TypeAlias(tl: HKTypeLambda) => tl - case MatchAlias(tl: HKTypeLambda) => tl - case _ => followTp1 - case tp1 @ AppliedType(tycon, args) => - val concreteTycon = followEverythingConcrete(tycon) - if concreteTycon eq tycon then followTp1 - else followEverythingConcrete(concreteTycon.applyIfParameterized(args)) + if tp1.symbol.isClass then true + else + tp1.info match + case info: AliasingBounds => isConcrete(info.alias) + case _ => false + case tp1: AppliedType => + isConcrete(tp1.tycon) && isConcrete(tp1.superType) + case tp1: HKTypeLambda => + true + case tp1: TermRef => + !tp1.symbol.is(Param) && isConcrete(tp1.underlying) + case tp1: TermParamRef => + false + case tp1: SingletonType => + isConcrete(tp1.underlying) + case tp1: ExprType => + isConcrete(tp1.underlying) + case tp1: AnnotatedType => + isConcrete(tp1.parent) + case tp1: RefinedType => + isConcrete(tp1.underlying) + case tp1: RecType => + isConcrete(tp1.underlying) + case tp1: AndOrType => + isConcrete(tp1.tp1) && isConcrete(tp1.tp2) + case tp1: FlexibleType => + isConcrete(tp1.hi) case _ => - followTp1 - end followEverythingConcrete - - def isConcrete(tp: Type): Boolean = - followEverythingConcrete(tp) match - case tp1: AndOrType => isConcrete(tp1.tp1) && isConcrete(tp1.tp2) - case tp1 => tp1.underlyingClassRef(refinementOK = true).exists + val tp2 = tp1.stripped.stripLazyRef + (tp2 ne tp) && isConcrete(tp2) + end isConcrete // Actual matching logic @@ -3502,20 +3511,77 @@ class MatchReducer(initctx: Context) extends TypeComparer(initctx) { false case MatchTypeCasePattern.TypeMemberExtractor(typeMemberName, capture) => + /** Try to remove references to `skolem` from a type in accordance with the spec. + * + * If `betterMatchTypeExtractorsEnabled` is enabled then references + * to `skolem` occuring are avoided by following aliases and + * singletons, otherwise no attempt made to avoid references to + * `skolem`. + * + * If any reference to `skolem` remains in the result type, + * `refersToSkolem` is set to true. + */ + class DropSkolemMap(skolem: SkolemType) extends TypeMap: + var refersToSkolem = false + def apply(tp: Type): Type = + if refersToSkolem then + return tp + tp match + case `skolem` => + refersToSkolem = true + tp + case tp: NamedType if betterMatchTypeExtractorsEnabled => + val pre1 = apply(tp.prefix) + if refersToSkolem then + tp match + case tp: TermRef => tp.info.widenExpr.dealias match + case info: SingletonType => + refersToSkolem = false + apply(info) + case _ => + tp.derivedSelect(pre1) + case tp: TypeRef => tp.info match + case info: AliasingBounds => + refersToSkolem = false + apply(info.alias) + case _ => + tp.derivedSelect(pre1) + else + tp.derivedSelect(pre1) + case tp: LazyRef if betterMatchTypeExtractorsEnabled => + // By default, TypeMap maps LazyRefs lazily. We need to + // force it for `refersToSkolem` to be correctly set. + apply(tp.ref) + case _ => + mapOver(tp) + end DropSkolemMap + /** Try to remove references to `skolem` from `u` in accordance with the spec. + * + * If any reference to `skolem` remains in the result type, return + * NoType instead. + */ + def dropSkolem(u: Type, skolem: SkolemType): Type = + val dmap = DropSkolemMap(skolem) + val res = dmap(u) + if dmap.refersToSkolem then NoType else res + val stableScrut: SingletonType = scrut match case scrut: SingletonType => scrut case _ => SkolemType(scrut) + stableScrut.member(typeMemberName) match case denot: SingleDenotation if denot.exists => val info = denot.info match - case TypeAlias(alias) => alias // Extract the alias + case alias: AliasingBounds => alias.alias // Extract the alias case ClassInfo(prefix, cls, _, _, _) => prefix.select(cls) // Re-select the class from the prefix case info => info // Notably, RealTypeBounds, which will eventually give a MatchResult.NoInstances - val infoRefersToSkolem = stableScrut.isInstanceOf[SkolemType] && stableScrut.occursIn(info) - val info1 = info match - case info: TypeBounds => info // Will already trigger a MatchResult.NoInstances - case _ if infoRefersToSkolem => RealTypeBounds(info, info) // Explicitly trigger a MatchResult.NoInstances - case _ => info // We have a match + val info1 = stableScrut match + case skolem: SkolemType => + dropSkolem(info, skolem).orElse: + info match + case info: TypeBounds => info // Will already trigger a MatchResult.NoInstances + case _ => RealTypeBounds(info, info) // Explicitly trigger a MatchResult.NoInstances + case _ => info rec(capture, info1, variance = 0, scrutIsWidenedAbstract) case _ => false @@ -3634,23 +3700,14 @@ class MatchReducer(initctx: Context) extends TypeComparer(initctx) { MatchTypeTrace.emptyScrutinee(scrut) NoType case Nil => - val casesText = MatchTypeTrace.noMatchesText(scrut, cases) - ErrorType(reporting.MatchTypeNoCases(casesText)) - - inFrozenConstraint { - if scrut.isError then - // if the scrutinee is an error type - // then just return that as the result - // not doing so will result in the first type case matching - // because ErrorType (as a FlexType) is <:< any type case - // this situation can arise from any kind of nesting of match types, - // e.g. neg/i12049 `Tuple.Concat[Reverse[ts], (t2, t1)]` - // if Reverse[ts] fails with no matches, - // the error type should be the reduction of the Concat too - scrut - else - recur(cases) - } + /* TODO warn ? then re-enable warn/12974.scala:26 + val noCasesText = MatchTypeTrace.noMatchesText(scrut, cases) + report.warning(reporting.MatchTypeNoCases(noCasesText), pos = ???) + */ + MatchTypeTrace.noMatches(scrut, cases) + NoType + + inFrozenConstraint(recur(cases)) } } diff --git a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala index 0474aff4087a..ce4956e6e847 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala @@ -266,7 +266,7 @@ object TypeErasure { tp.paramNames, tp.paramNames map (Function.const(TypeBounds.upper(defn.ObjectType))), tp.resultType) if (defn.isPolymorphicAfterErasure(sym)) eraseParamBounds(sym.info.asInstanceOf[PolyType]) - else if (sym.isAbstractType) TypeAlias(WildcardType) + else if (sym.isAbstractOrParamType) TypeAlias(WildcardType) else if sym.is(ConstructorProxy) then NoType else if (sym.isConstructor) outer.addParam(sym.owner.asClass, erase(tp)(using preErasureCtx)) else if (sym.is(Label)) erase.eraseResult(sym.info)(using preErasureCtx) @@ -747,16 +747,19 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst assert(!etp.isInstanceOf[WildcardType] || inSigName, i"Unexpected WildcardType erasure for $tp") etp - /** Like translucentSuperType, but issue a fatal error if it does not exist. */ + /** Like translucentSuperType, but issue a fatal error if it does not exist. + * If using the best-effort option, the fatal error will not be issued. + */ private def checkedSuperType(tp: TypeProxy)(using Context): Type = val tp1 = tp.translucentSuperType if !tp1.exists then - val msg = tp.typeConstructor match + val typeErr = tp.typeConstructor match case tycon: TypeRef => - MissingType(tycon.prefix, tycon.name).toMessage.message + MissingType(tycon.prefix, tycon.name) case _ => - i"Cannot resolve reference to $tp" - throw FatalError(msg) + TypeError(em"Cannot resolve reference to $tp") + if ctx.isBestEffort then report.error(typeErr.toMessage) + else throw typeErr tp1 /** Widen term ref, skipping any `()` parameter of an eventual getter. Used to erase a TermRef. diff --git a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala index 240bc4eebd84..5b19fe0e7bdd 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala @@ -46,7 +46,8 @@ abstract class TypeError(using creationContext: Context) extends Exception(""): def toMessage(using Context): Message /** Uses creationContext to produce the message */ - override def getMessage: String = toMessage.message + override def getMessage: String = + try toMessage.message catch case ex: Throwable => "TypeError" object TypeError: def apply(msg: Message)(using Context) = new TypeError: @@ -101,7 +102,7 @@ extends TypeError: em"""Recursion limit exceeded. |Maybe there is an illegal cyclic reference? |If that's not the case, you could also try to increase the stacksize using the -Xss JVM option. - |For the unprocessed stack trace, compile with -Yno-decode-stacktraces. + |For the unprocessed stack trace, compile with -Xno-decode-stacktraces. |A recurring operation is (inner to outer): |${opsString(mostCommon).stripMargin}""" @@ -121,7 +122,7 @@ object handleRecursive: e def apply(op: String, details: => String, exc: Throwable, weight: Int = 1)(using Context): Nothing = - if ctx.settings.YnoDecodeStacktraces.value then + if ctx.settings.XnoDecodeStacktraces.value then throw exc else exc match case _: RecursionOverflow => @@ -197,20 +198,31 @@ object CyclicReference: cyclicErrors.println(elem.toString) ex - type TraceElement = (/*prefix:*/ String, Symbol, /*suffix:*/ String) + type TraceElement = Context ?=> String type Trace = mutable.ArrayBuffer[TraceElement] val Trace = Property.Key[Trace] - def isTraced(using Context) = + private def isTraced(using Context) = ctx.property(CyclicReference.Trace).isDefined - def pushTrace(info: TraceElement)(using Context): Unit = + private def pushTrace(info: TraceElement)(using Context): Unit = for buf <- ctx.property(CyclicReference.Trace) do buf += info - def popTrace()(using Context): Unit = + private def popTrace()(using Context): Unit = for buf <- ctx.property(CyclicReference.Trace) do buf.dropRightInPlace(1) + + inline def trace[T](info: TraceElement)(inline op: => T)(using Context): T = + val traceCycles = isTraced + try + if traceCycles then pushTrace(info) + op + finally + if traceCycles then popTrace() + + inline def trace[T](prefix: String, sym: Symbol)(inline op: => T)(using Context): T = + trace((ctx: Context) ?=> i"$prefix$sym")(op) end CyclicReference class UnpicklingError(denot: Denotation, where: String, cause: Throwable)(using Context) extends TypeError: diff --git a/compiler/src/dotty/tools/dotc/core/TypeEval.scala b/compiler/src/dotty/tools/dotc/core/TypeEval.scala index b5684b07f181..af4f1e0153dd 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeEval.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeEval.scala @@ -6,11 +6,14 @@ import Types.*, Contexts.*, Symbols.*, Constants.*, Decorators.* import config.Printers.typr import reporting.trace import StdNames.tpnme +import Flags.CaseClass +import TypeOps.nestedPairs object TypeEval: def tryCompiletimeConstantFold(tp: AppliedType)(using Context): Type = tp.tycon match case tycon: TypeRef if defn.isCompiletimeAppliedType(tycon.symbol) => + extension (tp: Type) def fixForEvaluation: Type = tp.normalized.dealias match // enable operations for constant singleton terms. E.g.: @@ -94,6 +97,22 @@ object TypeEval: throw TypeError(em"${e.getMessage.nn}") ConstantType(Constant(result)) + def fieldsOf: Option[Type] = + expectArgsNum(1) + val arg = tp.args.head + val cls = arg.classSymbol + if cls.is(CaseClass) then + val fields = cls.caseAccessors + val fieldLabels = fields.map: field => + ConstantType(Constant(field.name.toString)) + val fieldTypes = fields.map(arg.memberInfo) + Some: + defn.NamedTupleTypeRef.appliedTo: + nestedPairs(fieldLabels) :: nestedPairs(fieldTypes) :: Nil + else arg.widenDealias match + case arg @ defn.NamedTuple(_, _) => Some(arg) + case _ => None + def constantFold1[T](extractor: Type => Option[T], op: T => Any): Option[Type] = expectArgsNum(1) extractor(tp.args.head).map(a => runConstantOp(op(a))) @@ -122,11 +141,14 @@ object TypeEval: yield runConstantOp(op(a, b, c)) trace(i"compiletime constant fold $tp", typr, show = true) { - val name = tycon.symbol.name - val owner = tycon.symbol.owner + val sym = tycon.symbol + val name = sym.name + val owner = sym.owner val constantType = - if defn.isCompiletime_S(tycon.symbol) then + if defn.isCompiletime_S(sym) then constantFold1(natValue, _ + 1) + else if defn.isNamedTuple_From(sym) then + fieldsOf else if owner == defn.CompiletimeOpsAnyModuleClass then name match case tpnme.Equals => constantFold2(constValue, _ == _) case tpnme.NotEquals => constantFold2(constValue, _ != _) diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index 587c52688456..1282b77f013e 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -143,7 +143,7 @@ object TypeOps: defn.MatchCase(simplify(pat, theMap), body) case tp: AppliedType => tp.tycon match - case tycon: TypeRef if tycon.info.isInstanceOf[MatchAlias] => + case tycon: TypeRef if tp.isMatchAlias => isFullyDefined(tp, ForceDegree.all) case _ => val normed = tp.tryNormalize @@ -157,15 +157,8 @@ object TypeOps: tp.derivedAlias(simplify(tp.alias, theMap)) case AndType(l, r) if !ctx.mode.is(Mode.Type) => simplify(l, theMap) & simplify(r, theMap) - case tp @ OrType(l, r) - if !ctx.mode.is(Mode.Type) - && (tp.isSoft || l.isBottomType || r.isBottomType) => - // Normalize A | Null and Null | A to A even if the union is hard (i.e. - // explicitly declared), but not if -Yexplicit-nulls is set. The reason is - // that in this case the normal asSeenFrom machinery is not prepared to deal - // with Nulls (which have no base classes). Under -Yexplicit-nulls, we take - // corrective steps, so no widening is wanted. - simplify(l, theMap) | simplify(r, theMap) + case tp @ OrType(l, r) if !ctx.mode.is(Mode.Type) => + TypeComparer.lub(simplify(l, theMap), simplify(r, theMap), isSoft = tp.isSoft) case tp @ CapturingType(parent, refs) => if !ctx.mode.is(Mode.Type) && refs.subCaptures(parent.captureSet, frozen = true).isOK @@ -256,7 +249,8 @@ object TypeOps: mergeRefinedOrApplied(tp1, tp21) & mergeRefinedOrApplied(tp1, tp22) case _ => fail - tp1 match { + if tp1 eq tp2 then tp1 + else tp1 match { case tp1 @ RefinedType(parent1, name1, rinfo1) => tp2 match { case RefinedType(parent2, `name1`, rinfo2) => @@ -280,6 +274,7 @@ object TypeOps: } case AndType(tp11, tp12) => mergeRefinedOrApplied(tp11, tp2) & mergeRefinedOrApplied(tp12, tp2) + case tp1: TypeParamRef if tp1 == tp2 => tp1 case _ => fail } } @@ -390,7 +385,12 @@ object TypeOps: (tp.tp1.dealias, tp.tp2.dealias) match case (tp1 @ AppliedType(tycon1, args1), tp2 @ AppliedType(tycon2, args2)) if tycon1.typeSymbol == tycon2.typeSymbol && (tycon1 =:= tycon2) => - mergeRefinedOrApplied(tp1, tp2) + mergeRefinedOrApplied(tp1, tp2) match + case tp: AppliedType if tp.isUnreducibleWild => + // fall back to or-dominators rather than inferring a type that would + // cause an unreducible type error later. + approximateOr(tp1, tp2) + case tp => tp case (tp1, tp2) => approximateOr(tp1, tp2) case _ => @@ -545,7 +545,7 @@ object TypeOps: val lo = TypeComparer.instanceType( tp.origin, fromBelow = variance > 0 || variance == 0 && tp.hasLowerBound, - widenUnions = tp.widenUnions)(using mapCtx) + tp.widenPolicy)(using mapCtx) val lo1 = apply(lo) if (lo1 ne lo) lo1 else tp case _ => diff --git a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala index c76b5117dc89..afc2cc39f9cf 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeUtils.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeUtils.scala @@ -4,13 +4,17 @@ package core import TypeErasure.ErasedValueType import Types.*, Contexts.*, Symbols.*, Flags.*, Decorators.* +import Names.{Name, TermName} +import Constants.Constant + import Names.Name +import config.Feature -class TypeUtils { +class TypeUtils: /** A decorator that provides methods on types * that are needed in the transformer pipeline. */ - extension (self: Type) { + extension (self: Type) def isErasedValueType(using Context): Boolean = self.isInstanceOf[ErasedValueType] @@ -19,7 +23,11 @@ class TypeUtils { self.classSymbol.isPrimitiveValueClass def isErasedClass(using Context): Boolean = - self.underlyingClassRef(refinementOK = true).typeSymbol.is(Flags.Erased) + val cls = self.underlyingClassRef(refinementOK = true).typeSymbol + cls.is(Flags.Erased) + && (cls != defn.SingletonClass || Feature.enabled(Feature.modularity)) + // Singleton counts as an erased class only under x.modularity + /** Is this type a checked exception? This is the case if the type * derives from Exception but not from RuntimeException. According to @@ -65,8 +73,12 @@ class TypeUtils { case tp: AppliedType if defn.isTupleNType(tp) && normalize => Some(tp.args) // if normalize is set, use the dealiased tuple // otherwise rely on the default case below to print unaliased tuples. + case tp: SkolemType => + recur(tp.underlying, bound) case tp: SingletonType => - if tp.termSymbol == defn.EmptyTupleModule then Some(Nil) else None + if tp.termSymbol == defn.EmptyTupleModule then Some(Nil) + else if normalize then recur(tp.widen, bound) + else None case _ => if defn.isTupleClass(tp.typeSymbol) && !normalize then Some(tp.dealias.argInfos) else None @@ -114,22 +126,35 @@ class TypeUtils { case Some(types) => TypeOps.nestedPairs(types) case None => throw new AssertionError("not a tuple") - def refinedWith(name: Name, info: Type)(using Context) = RefinedType(self, name, info) + def namedTupleElementTypesUpTo(bound: Int, normalize: Boolean = true)(using Context): List[(TermName, Type)] = + (if normalize then self.normalized else self).dealias match + case defn.NamedTuple(nmes, vals) => + val names = nmes.tupleElementTypesUpTo(bound, normalize).getOrElse(Nil).map: + case ConstantType(Constant(str: String)) => str.toTermName + case t => throw TypeError(em"Malformed NamedTuple: names must be string types, but $t was found.") + val values = vals.tupleElementTypesUpTo(bound, normalize).getOrElse(Nil) + names.zip(values) + case t => + Nil + + def namedTupleElementTypes(using Context): List[(TermName, Type)] = + namedTupleElementTypesUpTo(Int.MaxValue) + + def isNamedTupleType(using Context): Boolean = self match + case defn.NamedTuple(_, _) => true + case _ => false - /** The TermRef referring to the companion of the underlying class reference - * of this type, while keeping the same prefix. - */ - def mirrorCompanionRef(using Context): TermRef = self match { - case AndType(tp1, tp2) => - val c1 = tp1.classSymbol - val c2 = tp2.classSymbol - if c1.isSubClass(c2) then tp1.mirrorCompanionRef - else tp2.mirrorCompanionRef // precondition: the parts of the AndType have already been checked to be non-overlapping - case self @ TypeRef(prefix, _) if self.symbol.isClass => - prefix.select(self.symbol.companionModule).asInstanceOf[TermRef] - case self: TypeProxy => - self.superType.mirrorCompanionRef - } + /** Drop all named elements in tuple type */ + def stripNamedTuple(using Context): Type = self.normalized.dealias match + case defn.NamedTuple(_, vals) => + vals + case self @ AnnotatedType(tp, annot) => + val tp1 = tp.stripNamedTuple + if tp1 ne tp then AnnotatedType(tp1, annot) else self + case _ => + self + + def refinedWith(name: Name, info: Type)(using Context) = RefinedType(self, name, info) /** Is this type a methodic type that takes at least one parameter? */ def takesParams(using Context): Boolean = self.stripPoly match @@ -150,5 +175,20 @@ class TypeUtils { case _ => val cls = self.underlyingClassRef(refinementOK = false).typeSymbol cls.isTransparentClass && (!traitOnly || cls.is(Trait)) - } -} + + /** Is this type the ThisType of class `cls?`. Note we can't use `self eq cls.thisType` for this, + * since ThisTypes take TermRef parameters and semantically equal TermRefs could have different + * forms (for instance one could use as a prefix the ThisType of an enclosing static module or package, + * and the other could select it from something further out) + */ + def isThisTypeOf(cls: Symbol)(using Context) = self match + case self: Types.ThisType => self.cls == cls + case _ => false + + /** Strip all outer refinements off this type */ + def stripRefinement: Type = self match + case self: RefinedOrRecType => self.parent.stripRefinement + case seld => self + +end TypeUtils + diff --git a/compiler/src/dotty/tools/dotc/core/TyperState.scala b/compiler/src/dotty/tools/dotc/core/TyperState.scala index ef7329c3698d..160d7749de61 100644 --- a/compiler/src/dotty/tools/dotc/core/TyperState.scala +++ b/compiler/src/dotty/tools/dotc/core/TyperState.scala @@ -231,7 +231,7 @@ class TyperState() { val tvars = tl.paramRefs.map(other.typeVarOfParam(_)).collect { case tv: TypeVar => tv } if this.isCommittable then tvars.foreach(tvar => - if !tvar.inst.exists && !isOwnedAnywhere(this, tvar) then includeVar(tvar)) + if !tvar.isPermanentlyInstantiated && !isOwnedAnywhere(this, tvar) then includeVar(tvar)) typeComparer.addToConstraint(tl, tvars) }) && // Integrate the additional constraints on type variables from `other` @@ -287,10 +287,10 @@ class TyperState() { for tvar <- ownedVars do val tvarState = tvar.owningState.nn.get assert(tvarState eqn this, s"Inconsistent state in $this: it owns $tvar whose owningState is ${tvarState}") - assert(!tvar.inst.exists, s"Inconsistent state in $this: it owns $tvar which is already instantiated") + assert(!tvar.isPermanentlyInstantiated, s"Inconsistent state in $this: it owns $tvar which is already instantiated") val inst = constraint.instType(tvar) if inst.exists then - tvar.setInst(inst) + tvar.setPermanentInst(inst) val tl = tvar.origin.binder if constraint.isRemovable(tl) then toCollect += tl for tl <- toCollect do diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index 701a6360fd3d..eeffc41d4159 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -44,8 +44,6 @@ import CaptureSet.{CompareResult, IdempotentCaptRefMap, IdentityCaptRefMap} import scala.annotation.internal.sharable import scala.annotation.threadUnsafe - - object Types extends TypeUtils { @sharable private var nextId = 0 @@ -77,6 +75,7 @@ object Types extends TypeUtils { * | +- TypeVar * | +- HKTypeLambda * | +- MatchType + * | +- FlexibleType * | * +- GroundType -+- AndType * +- OrType @@ -139,7 +138,7 @@ object Types extends TypeUtils { case t: AppliedType => t.fold(false, (x, tp) => x || test(tp, theAcc)) case t: TypeVar => - !t.inst.exists || test(t.inst, theAcc) + !t.isPermanentlyInstantiated || test(t.permanentInst, theAcc) case t: LazyRef => !t.completed || test(t.ref, theAcc) case _ => @@ -307,6 +306,7 @@ object Types extends TypeUtils { isRef(defn.ObjectClass) && (typeSymbol eq defn.FromJavaObjectSymbol) def containsFromJavaObject(using Context): Boolean = this match + case tp: FlexibleType => tp.underlying.containsFromJavaObject case tp: OrType => tp.tp1.containsFromJavaObject || tp.tp2.containsFromJavaObject case tp: AndType => tp.tp1.containsFromJavaObject && tp.tp2.containsFromJavaObject case _ => isFromJavaObject @@ -328,6 +328,21 @@ object Types extends TypeUtils { /** Is this type a (possibly aliased) singleton type? */ def isSingleton(using Context): Boolean = dealias.isInstanceOf[SingletonType] + /** Is this upper-bounded by a (possibly aliased) singleton type? + * Overridden in TypeVar + */ + def isSingletonBounded(frozen: Boolean)(using Context): Boolean = this.dealias.normalized match + case tp: SingletonType => tp.isStable + case tp: TypeRef => + tp.name == tpnme.Singleton && tp.symbol == defn.SingletonClass + || tp.superType.isSingletonBounded(frozen) + case tp: TypeVar if !tp.isInstantiated => + if frozen then tp frozen_<:< defn.SingletonType else tp <:< defn.SingletonType + case tp: HKTypeLambda => false + case tp: TypeProxy => tp.superType.isSingletonBounded(frozen) + case AndType(tpL, tpR) => tpL.isSingletonBounded(frozen) || tpR.isSingletonBounded(frozen) + case _ => false + /** Is this type of kind `AnyKind`? */ def hasAnyKind(using Context): Boolean = { @tailrec def loop(tp: Type): Boolean = tp match { @@ -345,6 +360,7 @@ object Types extends TypeUtils { /** Is this type guaranteed not to have `null` as a value? */ final def isNotNull(using Context): Boolean = this match { case tp: ConstantType => tp.value.value != null + case tp: FlexibleType => false case tp: ClassInfo => !tp.cls.isNullableClass && tp.cls != defn.NothingClass case tp: AppliedType => tp.superType.isNotNull case tp: TypeBounds => tp.lo.isNotNull @@ -374,6 +390,7 @@ object Types extends TypeUtils { case AppliedType(tycon, args) => tycon.unusableForInference || args.exists(_.unusableForInference) case RefinedType(parent, _, rinfo) => parent.unusableForInference || rinfo.unusableForInference case TypeBounds(lo, hi) => lo.unusableForInference || hi.unusableForInference + case tp: FlexibleType => tp.underlying.unusableForInference case tp: AndOrType => tp.tp1.unusableForInference || tp.tp2.unusableForInference case tp: LambdaType => tp.resultType.unusableForInference || tp.paramInfos.exists(_.unusableForInference) case WildcardType(optBounds) => optBounds.unusableForInference @@ -456,14 +473,19 @@ object Types extends TypeUtils { /** Is this a MethodType for which the parameters will not be used? */ def hasErasedParams(using Context): Boolean = false - /** Is this a match type or a higher-kinded abstraction of one? - */ - def isMatch(using Context): Boolean = underlyingMatchType.exists + /** Is this a match type or a higher-kinded abstraction of one? */ + def isMatch(using Context): Boolean = stripped match + case tp: MatchType => true + case tp: HKTypeLambda => tp.resType.isMatch + case _ => false + + /** Does this application expand to a match type? */ + def isMatchAlias(using Context): Boolean = underlyingMatchType.exists def underlyingMatchType(using Context): Type = stripped match { case tp: MatchType => tp case tp: HKTypeLambda => tp.resType.underlyingMatchType - case tp: AppliedType if tp.isMatchAlias => tp.superType.underlyingMatchType + case tp: AppliedType => tp.underlyingMatchType case _ => NoType } @@ -905,7 +927,7 @@ object Types extends TypeUtils { def goSuper(tp: SuperType) = go(tp.underlying) match { case d: JointRefDenotation => typr.println(i"redirecting super.$name from $tp to ${d.symbol.showLocated}") - new UniqueRefDenotation(d.symbol, tp.memberInfo(d.symbol), d.validFor, pre) + new UniqueRefDenotation(d.symbol, tp.memberInfo(d.symbol), currentStablePeriod, pre) case d => d } @@ -925,7 +947,7 @@ object Types extends TypeUtils { // Selecting `name` from a type `T | Null` is like selecting `name` from `T`, if // unsafeNulls is enabled and T is a subtype of AnyRef. // This can throw at runtime, but we trade soundness for usability. - tp1.findMember(name, pre.stripNull, required, excluded) + tp1.findMember(name, pre.stripNull(), required, excluded) case _ => searchAfterJoin else searchAfterJoin @@ -1340,7 +1362,7 @@ object Types extends TypeUtils { * * For instance, if `A` is an unconstrained type variable, then * - * ArrayBuffer[Int] | ArrayBuffer[A] + * ArrayBuffer[Int] | ArrayBuffer[A] * * is approximated by constraining `A` to be =:= to `Int` and returning `ArrayBuffer[Int]` * instead of `ArrayBuffer[? >: Int | A <: Int & A]` @@ -1349,13 +1371,13 @@ object Types extends TypeUtils { * then the top-level union isn't widened. This is needed so that type inference can infer nullable types. */ def widenUnion(using Context): Type = widen match - case tp: OrType => tp match - case OrNull(tp1) => - // Don't widen `T|Null`, since otherwise we wouldn't be able to infer nullable unions. + case tp: OrType => + val tp1 = tp.stripNull(stripFlexibleTypes = false) + if tp1 ne tp then val tp1Widen = tp1.widenUnionWithoutNull - if (tp1Widen.isRef(defn.AnyClass)) tp1Widen + if tp1Widen.isRef(defn.AnyClass) then tp1Widen else tp.derivedOrType(tp1Widen, defn.NullType) - case _ => + else tp.widenUnionWithoutNull case tp => tp.widenUnionWithoutNull @@ -1370,6 +1392,8 @@ object Types extends TypeUtils { tp.rebind(tp.parent.widenUnion) case tp: HKTypeLambda => tp.derivedLambdaType(resType = tp.resType.widenUnion) + case tp: FlexibleType => + tp.derivedFlexibleType(tp.hi.widenUnionWithoutNull) case tp => tp @@ -1631,17 +1655,19 @@ object Types extends TypeUtils { * * P { ... type T = / += / -= U ... } # T * - * to just U. Does not perform the reduction if the resulting type would contain - * a reference to the "this" of the current refined type, except in the following situation + * to just U. Analogously, `P { val x: S} # x` is reduced to `S` if `S` + * is a singleton type. * - * (1) The "this" reference can be avoided by following an alias. Example: + * Does not perform the reduction if the resulting type would contain + * a reference to the "this" of the current refined type, except if the "this" + * reference can be avoided by following an alias. Example: * * P { type T = String, type R = P{...}.T } # R --> String * * (*) normalizes means: follow instantiated typevars and aliases. */ - def lookupRefined(name: Name)(using Context): Type = { - @tailrec def loop(pre: Type): Type = pre.stripTypeVar match { + def lookupRefined(name: Name)(using Context): Type = + @tailrec def loop(pre: Type): Type = pre match case pre: RefinedType => pre.refinedInfo match { case tp: AliasingBounds => @@ -1664,12 +1690,13 @@ object Types extends TypeUtils { case TypeAlias(alias) => loop(alias) case _ => NoType } + case pre: (TypeVar | AnnotatedType) => + loop(pre.underlying) case _ => NoType - } loop(this) - } + end lookupRefined /** The type , reduced if possible */ def select(name: Name)(using Context): Type = @@ -2690,7 +2717,7 @@ object Types extends TypeUtils { symd.maybeOwner.membersNeedAsSeenFrom(prefix) && !symd.is(NonMember) || prefix.match case prefix: Types.ThisType => - (symd.isAbstractType + (symd.isAbstractOrParamType || symd.isTerm && !symd.flagsUNSAFE.isOneOf(Module | Final | Param) && !symd.isConstructor @@ -2708,13 +2735,21 @@ object Types extends TypeUtils { case _ => true } - /** Reduce a type-ref `T { X = U; ... } # X` to `U` - * provided `U` does not refer with a RecThis to the - * refinement type `T { X = U; ... }` + /** Reduce a type ref P # X, where X is a type alias and P is a refined type or + * a class type. If P is a refined type `T { X = U; ... }`, reduce P to U, + * provided U does not refer with a RecThis to the same refined type. If P is a + * class type, reduce it to the dealiasd version of P # X. This means that at typer + * we create projections only for inner classes with class prefixes, since projections + * of P # X where X is an abstract type are handled by skolemization. At later phases + * these projections might arise, though. */ def reduceProjection(using Context): Type = val reduced = prefix.lookupRefined(name) - if reduced.exists then reduced else this + if reduced.exists then reduced + else prefix.stripTypeVar match + case pre: (AppliedType | TypeRef) + if prefix.dealias.typeSymbol.isClass && this.symbol.isAliasType => dealias + case _ => this /** Guard against cycles that can arise if given `op` * follows info. The problematic cases are a type alias to itself or @@ -2801,35 +2836,30 @@ object Types extends TypeUtils { def derivedSelect(prefix: Type)(using Context): Type = if prefix eq this.prefix then this else if prefix.isExactlyNothing then prefix - else { - val res = - if (isType && currentValidSymbol.isAllOf(ClassTypeParam)) argForParam(prefix) + else + val reduced = + if isType && currentValidSymbol.isAllOf(ClassTypeParam) then argForParam(prefix) else prefix.lookupRefined(name) - if (res.exists) return res - if (isType) { - if (Config.splitProjections) - prefix match { - case prefix: AndType => - def isMissing(tp: Type) = tp match { - case tp: TypeRef => !tp.info.exists - case _ => false - } - val derived1 = derivedSelect(prefix.tp1) - val derived2 = derivedSelect(prefix.tp2) - return ( - if (isMissing(derived1)) derived2 - else if (isMissing(derived2)) derived1 - else prefix.derivedAndType(derived1, derived2)) - case prefix: OrType => - val derived1 = derivedSelect(prefix.tp1) - val derived2 = derivedSelect(prefix.tp2) - return prefix.derivedOrType(derived1, derived2) - case _ => - } - } - if (prefix.isInstanceOf[WildcardType]) WildcardType.sameKindAs(this) + if reduced.exists then return reduced + if Config.splitProjections && isType then + prefix match + case prefix: AndType => + def isMissing(tp: Type) = tp match + case tp: TypeRef => !tp.info.exists + case _ => false + val derived1 = derivedSelect(prefix.tp1) + val derived2 = derivedSelect(prefix.tp2) + return + if isMissing(derived1) then derived2 + else if isMissing(derived2) then derived1 + else prefix.derivedAndType(derived1, derived2) + case prefix: OrType => + val derived1 = derivedSelect(prefix.tp1) + val derived2 = derivedSelect(prefix.tp2) + return prefix.derivedOrType(derived1, derived2) + case _ => + if prefix.isInstanceOf[WildcardType] then WildcardType.sameKindAs(this) else withPrefix(prefix) - } /** A reference like this one, but with the given symbol, if it exists */ private def withSym(sym: Symbol)(using Context): ThisType = @@ -2973,7 +3003,7 @@ object Types extends TypeUtils { */ override def isTrackableRef(using Context) = ((prefix eq NoPrefix) - || symbol.is(ParamAccessor) && (prefix eq symbol.owner.thisType) + || symbol.is(ParamAccessor) && prefix.isThisTypeOf(symbol.owner) || isRootCapability ) && !symbol.isOneOf(UnstableValueFlags) @@ -3131,7 +3161,8 @@ object Types extends TypeUtils { if (ctx.erasedTypes) tref else cls.info match { case cinfo: ClassInfo => cinfo.selfType - case _: ErrorType | NoType if ctx.mode.is(Mode.Interactive) => cls.info + case _: ErrorType | NoType + if ctx.mode.is(Mode.Interactive) || ctx.tolerateErrorsForBestEffort => cls.info // can happen in IDE if `cls` is stale } @@ -3440,6 +3471,50 @@ object Types extends TypeUtils { } } + // --- FlexibleType ----------------------------------------------------------------- + + /* A flexible type is a type with a custom subtyping relationship. + * It is used by explicit nulls to represent a type coming from Java which can be + * considered as nullable or non-nullable depending on the context, in a similar way to Platform + * Types in Kotlin. A `FlexibleType(T)` generally behaves like a type variable with special bounds + * `T | Null .. T`, so that `T | Null <: FlexibleType(T) <: T`. + * A flexible type will be erased to its original type `T`. + */ + case class FlexibleType protected(lo: Type, hi: Type) extends CachedProxyType with ValueType { + + override def underlying(using Context): Type = hi + + def derivedFlexibleType(hi: Type)(using Context): Type = + if hi eq this.hi then this else FlexibleType(hi) + + override def computeHash(bs: Binders): Int = doHash(bs, hi) + + override final def baseClasses(using Context): List[ClassSymbol] = hi.baseClasses + } + + object FlexibleType { + def apply(tp: Type)(using Context): FlexibleType = tp match { + case ft: FlexibleType => ft + case _ => + // val tp1 = tp.stripNull() + // if tp1.isNullType then + // // (Null)? =:= ? >: Null <: (Object & Null) + // FlexibleType(tp, AndType(defn.ObjectType, defn.NullType)) + // else + // // (T | Null)? =:= ? >: T | Null <: T + // // (T)? =:= ? >: T | Null <: T + // val hi = tp1 + // val lo = if hi eq tp then OrNull(hi) else tp + // FlexibleType(lo, hi) + // + // The commented out code does more work to analyze the original type to ensure the + // flexible type is always a subtype of the original type and the Object type. + // It is not necessary according to the use cases, so we choose to use a simpler + // rule. + FlexibleType(OrNull(tp), tp) + } + } + // --- AndType/OrType --------------------------------------------------------------- abstract class AndOrType extends CachedGroundType with ValueType { @@ -3480,7 +3555,7 @@ object Types extends TypeUtils { private var myFactorCount = 0 override def andFactorCount = if myFactorCount == 0 then - myFactorCount = tp1.andFactorCount + tp2.andFactorCount + myFactorCount = tp1.andFactorCount + tp2.andFactorCount myFactorCount def derivedAndType(tp1: Type, tp2: Type)(using Context): Type = @@ -3598,12 +3673,11 @@ object Types extends TypeUtils { override def widenUnionWithoutNull(using Context): Type = if myUnionPeriod != ctx.period then - myUnion = - if isSoft then - TypeComparer.lub(tp1.widenUnionWithoutNull, tp2.widenUnionWithoutNull, canConstrain = true, isSoft = isSoft) match - case union: OrType => union.join - case res => res - else derivedOrType(tp1.widenUnionWithoutNull, tp2.widenUnionWithoutNull, soft = isSoft) + val union = TypeComparer.lub( + tp1.widenUnionWithoutNull, tp2.widenUnionWithoutNull, canConstrain = isSoft, isSoft = isSoft) + myUnion = union match + case union: OrType if isSoft => union.join + case _ => union if !isProvisional then myUnionPeriod = ctx.period myUnion @@ -3658,8 +3732,9 @@ object Types extends TypeUtils { def apply(tp1: Type, tp2: Type, soft: Boolean)(using Context): OrType = { def where = i"in union $tp1 | $tp2" - expectValueTypeOrWildcard(tp1, where) - expectValueTypeOrWildcard(tp2, where) + if !ctx.usedBestEffortTasty then + expectValueTypeOrWildcard(tp1, where) + expectValueTypeOrWildcard(tp2, where) assertUnerased() unique(new CachedOrType(tp1, tp2, soft)) } @@ -3695,7 +3770,8 @@ object Types extends TypeUtils { assert(!ctx.isAfterTyper, s"$tp in $where") // we check correct kinds at PostTyper throw TypeError(em"$tp is not a value type, cannot be used $where") - /** An extractor object to pattern match against a nullable union. + /** An extractor object to pattern match against a nullable union + * (including flexible types). * e.g. * * (tp: Type) match @@ -3706,7 +3782,7 @@ object Types extends TypeUtils { def apply(tp: Type)(using Context) = if tp.isNullType then tp else OrType(tp, defn.NullType, soft = false) def unapply(tp: Type)(using Context): Option[Type] = - val tp1 = tp.stripNull + val tp1 = tp.stripNull() if tp1 ne tp then Some(tp1) else None } @@ -3759,7 +3835,7 @@ object Types extends TypeUtils { * LambdaType | TermLambda | TypeLambda * -------------+-------------------+------------------ * HKLambda | HKTermLambda | HKTypeLambda - * MethodOrPoly | MethodType | PolyType + * MethodOrPoly | MethodType | PolyType */ trait LambdaType extends BindingType with TermType { self => type ThisName <: Name @@ -3992,7 +4068,7 @@ object Types extends TypeUtils { tp match case CapturingType(parent, refs) => (compute(status, parent, theAcc) /: refs.elems) { - (s, ref) => ref match + (s, ref) => ref.stripReach match case tp: TermParamRef if tp.binder eq thisLambdaType => combine(s, CaptureDeps) case _ => s } @@ -4522,6 +4598,9 @@ object Types extends TypeUtils { private var myEvalRunId: RunId = NoRunId private var myEvalued: Type = uninitialized + private var validUnderlyingMatch: Period = Nowhere + private var cachedUnderlyingMatch: Type = uninitialized + def isGround(acc: TypeAccumulator[Boolean])(using Context): Boolean = if myGround == 0 then myGround = if acc.foldOver(true, this) then 1 else -1 myGround > 0 @@ -4578,31 +4657,38 @@ object Types extends TypeUtils { case nil => x foldArgs(op(x, tycon), args) + /** Exists if the tycon is a TypeRef of an alias with an underlying match type. + * Anything else should have already been reduced in `appliedTo` by the TypeAssigner. + */ + override def underlyingMatchType(using Context): Type = + if ctx.period != validUnderlyingMatch then + validUnderlyingMatch = if tycon.isProvisional then Nowhere else ctx.period + cachedUnderlyingMatch = superType.underlyingMatchType + cachedUnderlyingMatch + override def tryNormalize(using Context): Type = tycon.stripTypeVar match { case tycon: TypeRef => - def tryMatchAlias = tycon.info match { - case MatchAlias(alias) => + def tryMatchAlias = tycon.info match + case AliasingBounds(alias) if isMatchAlias => trace(i"normalize $this", typr, show = true) { MatchTypeTrace.recurseWith(this) { alias.applyIfParameterized(args.map(_.normalized)).tryNormalize + /* `applyIfParameterized` may reduce several HKTypeLambda applications + * before the underlying MatchType is reached. + * Even if they do not involve any match type normalizations yet, + * we still want to record these reductions in the MatchTypeTrace. + * They should however only be attempted if they eventually expand + * to a match type, which is ensured by the `isMatchAlias` guard. + */ } } case _ => NoType - } tryCompiletimeConstantFold.orElse(tryMatchAlias) case _ => NoType } - /** Does this application expand to a match type? */ - def isMatchAlias(using Context): Boolean = tycon.stripTypeVar match - case tycon: TypeRef => - tycon.info match - case _: MatchAlias => true - case _ => false - case _ => false - /** Is this an unreducible application to wildcard arguments? * This is the case if tycon is higher-kinded. This means * it is a subtype of a hk-lambda, but not a match alias. @@ -4850,8 +4936,13 @@ object Types extends TypeUtils { * @param origin the parameter that's tracked by the type variable. * @param creatorState the typer state in which the variable was created. * @param initNestingLevel the initial nesting level of the type variable. (c.f. nestingLevel) + * @param precise whether we should use instantiation without widening for this TypeVar. */ - final class TypeVar private(initOrigin: TypeParamRef, creatorState: TyperState | Null, val initNestingLevel: Int) extends CachedProxyType with ValueType { + final class TypeVar private( + initOrigin: TypeParamRef, + creatorState: TyperState | Null, + val initNestingLevel: Int, + val precise: Boolean) extends CachedProxyType with ValueType { private var currentOrigin = initOrigin def origin: TypeParamRef = currentOrigin @@ -4862,11 +4953,15 @@ object Types extends TypeUtils { def setOrigin(p: TypeParamRef) = currentOrigin = p /** The permanent instance type of the variable, or NoType is none is given yet */ - private var myInst: Type = NoType + private var inst: Type = NoType - private[core] def inst: Type = myInst - private[core] def setInst(tp: Type): Unit = - myInst = tp + /** The permanent instance type that's stored in the type variable, so it cannot be retracted + * anymore, or NoType if the variable can still be further constrained or a provisional + * instance type in the constraint can be retracted. + */ + private[core] def permanentInst = inst + private[core] def setPermanentInst(tp: Type): Unit = + inst = tp if tp.exists && owningState != null then val owningState1 = owningState.uncheckedNN.get if owningState1 != null then @@ -4874,8 +4969,8 @@ object Types extends TypeUtils { owningState = null // no longer needed; null out to avoid a memory leak private[core] def resetInst(ts: TyperState): Unit = - assert(myInst.exists) - myInst = NoType + assert(inst.exists) + inst = NoType owningState = new WeakReference(ts) /** The state owning the variable. This is at first `creatorState`, but it can @@ -4913,10 +5008,15 @@ object Types extends TypeUtils { /** Is the variable already instantiated? */ def isInstantiated(using Context): Boolean = instanceOpt.exists + /** Is the variable already instantiated so that the instance cannot be + * retracted anymore? + */ + def isPermanentlyInstantiated: Boolean = inst.exists + /** Instantiate variable with given type */ def instantiateWith(tp: Type)(using Context): Type = { assert(tp ne this, i"self instantiation of $origin, constraint = ${ctx.typerState.constraint}") - assert(!myInst.exists, i"$origin is already instantiated to $myInst but we attempted to instantiate it to $tp") + assert(!inst.exists, i"$origin is already instantiated to $inst but we attempted to instantiate it to $tp") typr.println(i"instantiating $this with $tp") if Config.checkConstraintsSatisfiable then @@ -4924,13 +5024,13 @@ object Types extends TypeUtils { i"$origin is constrained to be $currentEntry but attempted to instantiate it to $tp") if ((ctx.typerState eq owningState.nn.get.uncheckedNN) && !TypeComparer.subtypeCheckInProgress) - setInst(tp) + setPermanentInst(tp) ctx.typerState.constraint = ctx.typerState.constraint.replace(origin, tp) tp } def typeToInstantiateWith(fromBelow: Boolean)(using Context): Type = - TypeComparer.instanceType(origin, fromBelow, widenUnions, nestingLevel) + TypeComparer.instanceType(origin, fromBelow, widenPolicy, nestingLevel) /** Instantiate variable from the constraints over its `origin`. * If `fromBelow` is true, the variable is instantiated to the lub @@ -4941,13 +5041,31 @@ object Types extends TypeUtils { */ def instantiate(fromBelow: Boolean)(using Context): Type = val tp = typeToInstantiateWith(fromBelow) - if myInst.exists then // The line above might have triggered instantiation of the current type variable - myInst + if inst.exists then // The line above might have triggered instantiation of the current type variable + inst else instantiateWith(tp) - /** Widen unions when instantiating this variable in the current context? */ - def widenUnions(using Context): Boolean = !ctx.typerState.constraint.isHard(this) + /** Should we suppress widening? True if this TypeVar is precise + * or if it has as an upper bound a precise TypeVar. + */ + def isPrecise(using Context) = + precise || hasPreciseUpperBound + + private def hasPreciseUpperBound(using Context) = + val constr = ctx.typerState.constraint + constr.upper(origin).exists: tparam => + constr.typeVarOfParam(tparam) match + case tvar: TypeVar => tvar.precise + case _ => false + + /** The policy used for widening singletons or unions when instantiating + * this variable in the current context. + */ + def widenPolicy(using Context): Widen = + if isPrecise then Widen.None + else if ctx.typerState.constraint.isHard(this) then Widen.Singletons + else Widen.Unions /** For uninstantiated type variables: the entry in the constraint (either bounds or * provisional instance value) @@ -4988,8 +5106,18 @@ object Types extends TypeUtils { } } object TypeVar: - def apply(using Context)(initOrigin: TypeParamRef, creatorState: TyperState | Null, nestingLevel: Int = ctx.nestingLevel) = - new TypeVar(initOrigin, creatorState, nestingLevel) + def apply(using Context)( + initOrigin: TypeParamRef, + creatorState: TyperState | Null, + nestingLevel: Int = ctx.nestingLevel, + precise: Boolean = false) = + new TypeVar(initOrigin, creatorState, nestingLevel, precise) + + /** The three possible widening policies */ + enum Widen: + case None // no widening + case Singletons // widen singletons but not unions + case Unions // widen singletons and unions type TypeVars = SimpleIdentitySet[TypeVar] @@ -5017,7 +5145,7 @@ object Types extends TypeUtils { def underlying(using Context): Type = bound private var myReduced: Type | Null = null - private var reductionContext: util.MutableMap[Type, Type] = uninitialized + private var reductionContext: util.MutableMap[Type, Type] | Null = null override def tryNormalize(using Context): Type = try @@ -5028,7 +5156,7 @@ object Types extends TypeUtils { private def thisMatchType = this - def reduced(using Context): Type = { + def reduced(using Context): Type = atPhaseNoLater(elimOpaquePhase) { def contextInfo(tp: Type): Type = tp match { case tp: TypeParamRef => @@ -5042,10 +5170,6 @@ object Types extends TypeUtils { tp.underlying } - def isUpToDate: Boolean = - reductionContext.keysIterator.forall: tp => - reductionContext(tp) `eq` contextInfo(tp) - def setReductionContext(): Unit = new TypeTraverser: var footprint: Set[Type] = Set() @@ -5076,33 +5200,36 @@ object Types extends TypeUtils { cases.foreach(traverse) reductionContext = util.HashMap() for tp <- footprint do - reductionContext(tp) = contextInfo(tp) + reductionContext.nn(tp) = contextInfo(tp) matchTypes.println(i"footprint for $thisMatchType $hashCode: ${footprint.toList.map(x => (x, contextInfo(x)))}%, %") end setReductionContext + def changedReductionContext(): Boolean = + val isUpToDate = reductionContext != null && reductionContext.nn.iterator.forall(contextInfo(_) `eq` _) + if !isUpToDate then setReductionContext() + !isUpToDate + record("MatchType.reduce called") if !Config.cacheMatchReduced || myReduced == null - || !isUpToDate + || changedReductionContext() || MatchTypeTrace.isRecording then record("MatchType.reduce computed") if (myReduced != null) record("MatchType.reduce cache miss") - myReduced = - trace(i"reduce match type $this $hashCode", matchTypes, show = true): + val saved = ctx.typerState.snapshot() + try + myReduced = trace(i"reduce match type $this $hashCode", matchTypes, show = true): withMode(Mode.Type): - setReductionContext() - def matchCases(cmp: MatchReducer): Type = - val saved = ctx.typerState.snapshot() - try - cmp.matchCases(scrutinee.normalized, cases.map(MatchTypeCaseSpec.analyze(_))) - catch case ex: Throwable => - handleRecursive("reduce type ", i"$scrutinee match ...", ex) - finally - ctx.typerState.resetTo(saved) - // this drops caseLambdas in constraint and undoes any typevar - // instantiations during matchtype reduction - TypeComparer.reduceMatchWith(matchCases) + TypeComparer.reduceMatchWith: cmp => + cmp.matchCases(scrutinee.normalized, cases.map(MatchTypeCaseSpec.analyze)) + catch case ex: Throwable => + myReduced = NoType + handleRecursive("reduce type ", i"$scrutinee match ...", ex) + finally + ctx.typerState.resetTo(saved) + // this drops caseLambdas in constraint and undoes any typevar + // instantiations during matchtype reduction //else println(i"no change for $this $hashCode / $myReduced") myReduced.nn @@ -5110,10 +5237,9 @@ object Types extends TypeUtils { /** True if the reduction uses GADT constraints. */ def reducesUsingGadt(using Context): Boolean = - (reductionContext ne null) && reductionContext.keysIterator.exists { - case tp: TypeRef => reductionContext(tp).exists - case _ => false - } + reductionContext != null && reductionContext.nn.iterator.exists: + case (tp: TypeRef, tpCtx) => tpCtx.exists + case _ => false override def computeHash(bs: Binders): Int = doHash(bs, scrutinee, bound :: cases) @@ -5130,20 +5256,13 @@ object Types extends TypeUtils { def apply(bound: Type, scrutinee: Type, cases: List[Type])(using Context): MatchType = unique(new CachedMatchType(bound, scrutinee, cases)) - def thatReducesUsingGadt(tp: Type)(using Context): Boolean = tp match - case MatchType.InDisguise(mt) => mt.reducesUsingGadt - case mt: MatchType => mt.reducesUsingGadt - case _ => false - - /** Extractor for match types hidden behind an AppliedType/MatchAlias. */ - object InDisguise: - def unapply(tp: AppliedType)(using Context): Option[MatchType] = tp match - case AppliedType(tycon: TypeRef, args) => tycon.info match - case MatchAlias(alias) => alias.applyIfParameterized(args) match - case mt: MatchType => Some(mt) - case _ => None - case _ => None - case _ => None + def thatReducesUsingGadt(tp: Type)(using Context): Boolean = tp.underlyingMatchType match + case mt: MatchType => mt.reducesUsingGadt + case _ => false + + object Normalizing: + def unapply(tp: Type)(using Context): Option[Type] = + Some(tp.tryNormalize).filter(_.exists) } enum MatchTypeCasePattern: @@ -5629,6 +5748,14 @@ object Types extends TypeUtils { def lower(lo: Type)(using Context): TypeBounds = apply(lo, defn.AnyType) } + object AliasingBounds: + /** A MatchAlias if alias is a match type and a TypeAlias o.w. + * Note that aliasing a MatchAlias returns a normal TypeAlias. + */ + def apply(alias: Type)(using Context): AliasingBounds = + if alias.isMatch then MatchAlias(alias) else TypeAlias(alias) + def unapply(tp: AliasingBounds): Option[Type] = Some(tp.alias) + object TypeAlias { def apply(alias: Type)(using Context): TypeAlias = unique(new TypeAlias(alias)) def unapply(tp: TypeAlias): Option[Type] = Some(tp.alias) @@ -5885,9 +6012,9 @@ object Types extends TypeUtils { /** Copy type aliases refinements to `toTp` from `fromTp` */ def withRefinements(toType: Type, fromTp: Type): Type = fromTp.dealias match - case RefinedType(fromParent, name, info: TypeAlias) if tp0.member(name).exists => + case RefinedType(fromParent, name, info: AliasingBounds) if tp0.member(name).exists => val parent1 = withRefinements(toType, fromParent) - RefinedType(toType, name, info) + RefinedType(parent1, name, info) case _ => toType val tp = withRefinements(tp0, origTp) @@ -5943,6 +6070,8 @@ object Types extends TypeUtils { samClass(tp.underlying) case tp: AnnotatedType => samClass(tp.underlying) + case tp: FlexibleType => + samClass(tp.underlying) case _ => NoSymbol @@ -6073,6 +6202,8 @@ object Types extends TypeUtils { tp.derivedJavaArrayType(elemtp) protected def derivedExprType(tp: ExprType, restpe: Type): Type = tp.derivedExprType(restpe) + protected def derivedFlexibleType(tp: FlexibleType, hi: Type): Type = + tp.derivedFlexibleType(hi) // note: currying needed because Scala2 does not support param-dependencies protected def derivedLambdaType(tp: LambdaType)(formals: List[tp.PInfo], restpe: Type): Type = tp.derivedLambdaType(tp.paramNames, formals, restpe) @@ -6098,6 +6229,10 @@ object Types extends TypeUtils { variance = saved derivedLambdaType(tp)(ptypes1, this(restpe)) + protected def mapOverTypeVar(tp: TypeVar) = + val inst = tp.instanceOpt + if (inst.exists) apply(inst) else tp + def isRange(tp: Type): Boolean = tp.isInstanceOf[Range] protected def mapCapturingType(tp: Type, parent: Type, refs: CaptureSet, v: Int): Type = @@ -6135,8 +6270,7 @@ object Types extends TypeUtils { derivedTypeBounds(tp, lo1, this(tp.hi)) case tp: TypeVar => - val inst = tp.instanceOpt - if (inst.exists) apply(inst) else tp + mapOverTypeVar(tp) case tp: ExprType => derivedExprType(tp, this(tp.resultType)) @@ -6196,6 +6330,9 @@ object Types extends TypeUtils { case tp: OrType => derivedOrType(tp, this(tp.tp1), this(tp.tp2)) + case tp: FlexibleType => + derivedFlexibleType(tp, this(tp.hi)) + case tp: MatchType => val bound1 = this(tp.bound) val scrut1 = atVariance(0)(this(tp.scrutinee)) @@ -6483,6 +6620,17 @@ object Types extends TypeUtils { if (underlying.isExactlyNothing) underlying else tp.derivedAnnotatedType(underlying, annot) } + + override protected def derivedFlexibleType(tp: FlexibleType, hi: Type): Type = + hi match { + case Range(lo, hi) => + // We know FlexibleType(t).hi = t and FlexibleType(t).lo = OrNull(t) + range(OrNull(lo), hi) + case _ => + if (hi.isExactlyNothing) hi + else tp.derivedFlexibleType(hi) + } + override protected def derivedCapturingType(tp: Type, parent: Type, refs: CaptureSet): Type = parent match // TODO ^^^ handle ranges in capture sets as well case Range(lo, hi) => @@ -6536,6 +6684,16 @@ object Types extends TypeUtils { tp.derivedLambdaType(tp.paramNames, formals, restpe) } + override protected def mapOverTypeVar(tp: TypeVar) = + val inst = tp.instanceOpt + if !inst.exists then tp + else + // We can keep the original type var if its instance is not transformed + // by the ApproximatingTypeMap. This allows for simpler bounds and for + // derivedSkolemType to retain more skolems, by keeping the info unchanged. + val res = apply(inst) + if res eq inst then tp else res + protected def reapply(tp: Type): Type = apply(tp) } @@ -6612,6 +6770,9 @@ object Types extends TypeUtils { case tp: TypeVar => this(x, tp.underlying) + case tp: FlexibleType => + this(x, tp.underlying) + case ExprType(restpe) => this(x, restpe) diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala index 894d430fe54b..22a43dd524e1 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala @@ -23,7 +23,7 @@ import scala.annotation.switch import typer.Checking.checkNonCyclic import io.{AbstractFile, ZipArchive} import scala.util.control.NonFatal -import dotty.tools.dotc.classpath.FileUtils.classToTasty +import dotty.tools.dotc.classpath.FileUtils.hasSiblingTasty import scala.compiletime.uninitialized @@ -1143,7 +1143,7 @@ class ClassfileParser( if (scan(tpnme.TASTYATTR)) { val hint = - if classfile.classToTasty.isDefined then "This is likely a bug in the compiler. Please report." + if classfile.hasSiblingTasty then "This is likely a bug in the compiler. Please report." else "This `.tasty` file is missing. Try cleaning the project to fix this issue." report.error(s"Loading Scala 3 binary from $classfile. It should have been loaded from `.tasty` file. $hint", NoSourcePosition) return None diff --git a/compiler/src/dotty/tools/dotc/core/tasty/BestEffortTastyWriter.scala b/compiler/src/dotty/tools/dotc/core/tasty/BestEffortTastyWriter.scala new file mode 100644 index 000000000000..9cdfb042b8fb --- /dev/null +++ b/compiler/src/dotty/tools/dotc/core/tasty/BestEffortTastyWriter.scala @@ -0,0 +1,43 @@ +package dotty.tools.dotc +package core +package tasty + +import scala.language.unsafeNulls +import java.nio.file.{Path as JPath, Files as JFiles} +import java.nio.channels.ClosedByInterruptException +import java.io.DataOutputStream +import dotty.tools.io.{File, PlainFile} +import dotty.tools.dotc.core.Contexts.Context + +object BestEffortTastyWriter: + + def write(dir: JPath, units: List[CompilationUnit])(using Context): Unit = + if JFiles.exists(dir) then JFiles.createDirectories(dir) + + units.foreach { unit => + unit.pickled.foreach { (clz, binary) => + val parts = clz.fullName.mangledString.split('.') + val outPath = outputPath(parts.toList, dir) + val outTastyFile = new PlainFile(new File(outPath)) + val outstream = new DataOutputStream(outTastyFile.bufferedOutput) + try outstream.write(binary()) + catch case ex: ClosedByInterruptException => + try + outTastyFile.delete() // don't leave an empty or half-written tastyfile around after an interrupt + catch + case _: Throwable => + throw ex + finally outstream.close() + } + } + + def outputPath(parts: List[String], acc: JPath): JPath = + parts match + case Nil => throw new Exception("Invalid class name") + case last :: Nil => + val name = last.stripSuffix("$") + acc.resolve(s"$name.betasty") + case pkg :: tail => + val next = acc.resolve(pkg) + if !JFiles.exists(next) then JFiles.createDirectory(next) + outputPath(tail, next) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala index 4f083b09b015..3605a6cc9515 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala @@ -23,10 +23,14 @@ object DottyUnpickler { /** Exception thrown if classfile is corrupted */ class BadSignature(msg: String) extends RuntimeException(msg) - class TreeSectionUnpickler(compilationUnitInfo: CompilationUnitInfo, posUnpickler: Option[PositionUnpickler], commentUnpickler: Option[CommentUnpickler]) - extends SectionUnpickler[TreeUnpickler](ASTsSection) { + class TreeSectionUnpickler( + compilationUnitInfo: CompilationUnitInfo, + posUnpickler: Option[PositionUnpickler], + commentUnpickler: Option[CommentUnpickler], + isBestEffortTasty: Boolean + ) extends SectionUnpickler[TreeUnpickler](ASTsSection) { def unpickle(reader: TastyReader, nameAtRef: NameTable): TreeUnpickler = - new TreeUnpickler(reader, nameAtRef, compilationUnitInfo, posUnpickler, commentUnpickler) + new TreeUnpickler(reader, nameAtRef, compilationUnitInfo, posUnpickler, commentUnpickler, isBestEffortTasty) } class PositionsSectionUnpickler extends SectionUnpickler[PositionUnpickler](PositionsSection) { @@ -46,15 +50,21 @@ object DottyUnpickler { } /** A class for unpickling Tasty trees and symbols. - * @param tastyFile tasty file from which we unpickle (used for CompilationUnitInfo) - * @param bytes the bytearray containing the Tasty file from which we unpickle - * @param mode the tasty file contains package (TopLevel), an expression (Term) or a type (TypeTree) + * @param tastyFile tasty file from which we unpickle (used for CompilationUnitInfo) + * @param bytes the bytearray containing the Tasty file from which we unpickle + * @param isBestEffortTasty specifies whether file should be unpickled as a Best Effort TASTy + * @param mode the tasty file contains package (TopLevel), an expression (Term) or a type (TypeTree) */ -class DottyUnpickler(tastyFile: AbstractFile, bytes: Array[Byte], mode: UnpickleMode = UnpickleMode.TopLevel) extends ClassfileParser.Embedded with tpd.TreeProvider { +class DottyUnpickler( + tastyFile: AbstractFile, + bytes: Array[Byte], + isBestEffortTasty: Boolean, + mode: UnpickleMode = UnpickleMode.TopLevel +) extends ClassfileParser.Embedded with tpd.TreeProvider { import tpd.* import DottyUnpickler.* - val unpickler: TastyUnpickler = new TastyUnpickler(bytes) + val unpickler: TastyUnpickler = new TastyUnpickler(bytes, isBestEffortTasty) val tastyAttributes: Attributes = unpickler.unpickle(new AttributesSectionUnpickler) @@ -67,7 +77,7 @@ class DottyUnpickler(tastyFile: AbstractFile, bytes: Array[Byte], mode: Unpickle private val posUnpicklerOpt = unpickler.unpickle(new PositionsSectionUnpickler) private val commentUnpicklerOpt = unpickler.unpickle(new CommentsSectionUnpickler) - private val treeUnpickler = unpickler.unpickle(treeSectionUnpickler(posUnpicklerOpt, commentUnpicklerOpt)).get + private val treeUnpickler = unpickler.unpickle(treeSectionUnpickler(posUnpicklerOpt, commentUnpicklerOpt, isBestEffortTasty)).get /** Enter all toplevel classes and objects into their scopes * @param roots a set of SymDenotations that should be overwritten by unpickling @@ -78,8 +88,9 @@ class DottyUnpickler(tastyFile: AbstractFile, bytes: Array[Byte], mode: Unpickle protected def treeSectionUnpickler( posUnpicklerOpt: Option[PositionUnpickler], commentUnpicklerOpt: Option[CommentUnpickler], + withBestEffortTasty: Boolean ): TreeSectionUnpickler = - new TreeSectionUnpickler(compilationUnitInfo, posUnpicklerOpt, commentUnpicklerOpt) + new TreeSectionUnpickler(compilationUnitInfo, posUnpicklerOpt, commentUnpicklerOpt, withBestEffortTasty) protected def computeRootTrees(using Context): List[Tree] = treeUnpickler.unpickle(mode) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyAnsiiPrinter.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyAnsiiPrinter.scala index a3d8cedacb4a..3755b6e8b4b6 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyAnsiiPrinter.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyAnsiiPrinter.scala @@ -2,9 +2,9 @@ package dotty.tools.dotc package core package tasty -class TastyAnsiiPrinter(bytes: Array[Byte], testPickler: Boolean) extends TastyPrinter(bytes, testPickler) { +class TastyAnsiiPrinter(bytes: Array[Byte], isBestEffortTasty: Boolean, testPickler: Boolean) extends TastyPrinter(bytes, isBestEffortTasty, testPickler) { - def this(bytes: Array[Byte]) = this(bytes, testPickler = false) + def this(bytes: Array[Byte]) = this(bytes, isBestEffortTasty = false, testPickler = false) override protected def nameStr(str: String): String = Console.MAGENTA + str + Console.RESET override protected def treeStr(str: String): String = Console.YELLOW + str + Console.RESET diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyClassName.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyClassName.scala index 0a7068b65445..f9d8e10cf16a 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyClassName.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyClassName.scala @@ -12,9 +12,9 @@ import TastyUnpickler.* import dotty.tools.tasty.TastyFormat.ASTsSection /** Reads the package and class name of the class contained in this TASTy */ -class TastyClassName(bytes: Array[Byte]) { +class TastyClassName(bytes: Array[Byte], isBestEffortTasty: Boolean = false) { - val unpickler: TastyUnpickler = new TastyUnpickler(bytes) + val unpickler: TastyUnpickler = new TastyUnpickler(bytes, isBestEffortTasty) import unpickler.{nameAtRef, unpickle} /** Returns a tuple with the package and class names */ diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyHTMLPrinter.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyHTMLPrinter.scala index b234705413ae..b9cba2e09937 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyHTMLPrinter.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyHTMLPrinter.scala @@ -2,7 +2,7 @@ package dotty.tools.dotc package core package tasty -class TastyHTMLPrinter(bytes: Array[Byte]) extends TastyPrinter(bytes) { +class TastyHTMLPrinter(bytes: Array[Byte]) extends TastyPrinter(bytes, isBestEffortTasty = false, testPickler = false) { override protected def nameStr(str: String): String = s"$str" override protected def treeStr(str: String): String = s"$str" override protected def lengthStr(str: String): String = s"$str" diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala index 214f7a5f6702..e35ed5bb2466 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala @@ -6,6 +6,7 @@ package tasty import scala.language.unsafeNulls import dotty.tools.tasty.{TastyBuffer, TastyFormat, TastyHash} +import dotty.tools.tasty.besteffort.BestEffortTastyFormat import TastyFormat.* import TastyBuffer.* @@ -16,7 +17,7 @@ import Decorators.* object TastyPickler: private val versionString = s"Scala ${config.Properties.simpleVersionString}" -class TastyPickler(val rootCls: ClassSymbol) { +class TastyPickler(val rootCls: ClassSymbol, isBestEffortTasty: Boolean) { private val sections = new mutable.ArrayBuffer[(NameRef, TastyBuffer)] @@ -42,10 +43,12 @@ class TastyPickler(val rootCls: ClassSymbol) { val uuidHi: Long = otherSectionHashes.fold(0L)(_ ^ _) val headerBuffer = { - val buf = new TastyBuffer(header.length + TastyPickler.versionString.length + 32) - for (ch <- header) buf.writeByte(ch.toByte) + val fileHeader = if isBestEffortTasty then BestEffortTastyFormat.bestEffortHeader else header + val buf = new TastyBuffer(fileHeader.length + TastyPickler.versionString.length + 32) + for (ch <- fileHeader) buf.writeByte(ch.toByte) buf.writeNat(MajorVersion) buf.writeNat(MinorVersion) + if isBestEffortTasty then buf.writeNat(BestEffortTastyFormat.PatchVersion) buf.writeNat(ExperimentalVersion) buf.writeUtf8(TastyPickler.versionString) buf.writeUncompressedLong(uuidLow) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala index af2097f347ba..72f6895f122c 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala @@ -18,46 +18,50 @@ import scala.collection.immutable.BitSet import scala.compiletime.uninitialized import dotty.tools.tasty.TastyBuffer.Addr import dotty.tools.dotc.core.Names.TermName +import dotty.tools.dotc.classpath.FileUtils.hasTastyExtension object TastyPrinter: def showContents(bytes: Array[Byte], noColor: Boolean): String = - showContents(bytes, noColor, testPickler = false) + showContents(bytes, noColor, isBestEffortTasty = false, testPickler = false) - def showContents(bytes: Array[Byte], noColor: Boolean, testPickler: Boolean = false): String = + def showContents(bytes: Array[Byte], noColor: Boolean, isBestEffortTasty: Boolean, testPickler: Boolean = false): String = val printer = - if noColor then new TastyPrinter(bytes, testPickler) - else new TastyAnsiiPrinter(bytes, testPickler) + if noColor then new TastyPrinter(bytes, isBestEffortTasty, testPickler) + else new TastyAnsiiPrinter(bytes, isBestEffortTasty, testPickler) printer.showContents() def main(args: Array[String]): Unit = { // TODO: Decouple CliCommand from Context and use CliCommand.distill? + val betastyOpt = "-Ywith-best-effort-tasty" val lineWidth = 80 val line = "-" * lineWidth val noColor = args.contains("-color:never") + val allowBetasty = args.contains(betastyOpt) var printLastLine = false - def printTasty(fileName: String, bytes: Array[Byte]): Unit = + def printTasty(fileName: String, bytes: Array[Byte], isBestEffortTasty: Boolean): Unit = println(line) println(fileName) println(line) - println(showContents(bytes, noColor)) + println(showContents(bytes, noColor, isBestEffortTasty, testPickler = false)) println() printLastLine = true for arg <- args do if arg == "-color:never" then () // skip + else if arg == betastyOpt then () // skip else if arg.startsWith("-") then println(s"bad option '$arg' was ignored") - else if arg.endsWith(".tasty") then + else if arg.endsWith(".tasty") || (allowBetasty && arg.endsWith(".betasty")) then val path = Paths.get(arg) if Files.exists(path) then - printTasty(arg, Files.readAllBytes(path).nn) + printTasty(arg, Files.readAllBytes(path).nn, arg.endsWith(".betasty")) else println("File not found: " + arg) System.exit(1) else if arg.endsWith(".jar") then val jar = JarArchive.open(Path(arg), create = false) try - for file <- jar.iterator() if file.name.endsWith(".tasty") do - printTasty(s"$arg ${file.path}", file.toByteArray) + for file <- jar.iterator() if file.hasTastyExtension do + printTasty(s"$arg ${file.path}", file.toByteArray, isBestEffortTasty = false) finally jar.close() else println(s"Not a '.tasty' or '.jar' file: $arg") @@ -67,11 +71,11 @@ object TastyPrinter: println(line) } -class TastyPrinter(bytes: Array[Byte], val testPickler: Boolean) { +class TastyPrinter(bytes: Array[Byte], isBestEffortTasty: Boolean, val testPickler: Boolean) { - def this(bytes: Array[Byte]) = this(bytes, testPickler = false) + def this(bytes: Array[Byte]) = this(bytes, isBestEffortTasty = false, testPickler = false) - class TastyPrinterUnpickler extends TastyUnpickler(bytes) { + class TastyPrinterUnpickler extends TastyUnpickler(bytes, isBestEffortTasty) { var namesStart: Addr = uninitialized var namesEnd: Addr = uninitialized override def readNames() = { @@ -129,7 +133,7 @@ class TastyPrinter(bytes: Array[Byte], val testPickler: Boolean) { }) class TreeSectionUnpickler(sb: StringBuilder) extends PrinterSectionUnpickler[Unit](ASTsSection) { - import dotty.tools.tasty.TastyFormat.* + import dotty.tools.tasty.besteffort.BestEffortTastyFormat.* // superset on TastyFormat def unpickle0(reader: TastyReader)(using refs: NameRefs): Unit = { import reader.* var indent = 0 diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala index 6fe648ee98d3..f034f03298b1 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala @@ -2,9 +2,12 @@ package dotty.tools.dotc package core package tasty +import java.util.UUID import scala.language.unsafeNulls import dotty.tools.tasty.{TastyFormat, TastyVersion, TastyBuffer, TastyReader, TastyHeaderUnpickler, UnpicklerConfig} +import dotty.tools.tasty.besteffort.{BestEffortTastyHeader, BestEffortTastyHeaderUnpickler} + import TastyFormat.NameTags.*, TastyFormat.nameTagToString import TastyBuffer.NameRef @@ -14,6 +17,18 @@ import NameKinds.* import dotty.tools.tasty.TastyHeader import dotty.tools.tasty.TastyBuffer.Addr +case class CommonTastyHeader( + uuid: UUID, + majorVersion: Int, + minorVersion: Int, + experimentalVersion: Int, + toolingVersion: String +): + def this(h: TastyHeader) = + this(h.uuid, h.majorVersion, h.minorVersion, h.experimentalVersion, h.toolingVersion) + def this(h: BestEffortTastyHeader) = + this(h.uuid, h.majorVersion, h.minorVersion, h.experimentalVersion, h.toolingVersion) + object TastyUnpickler { abstract class SectionUnpickler[R](val name: String) { @@ -63,10 +78,11 @@ object TastyUnpickler { import TastyUnpickler.* -class TastyUnpickler(protected val reader: TastyReader) { +class TastyUnpickler(protected val reader: TastyReader, isBestEffortTasty: Boolean) { import reader.* - def this(bytes: Array[Byte]) = this(new TastyReader(bytes)) + def this(bytes: Array[Byte]) = this(new TastyReader(bytes), false) + def this(bytes: Array[Byte], isBestEffortTasty: Boolean) = this(new TastyReader(bytes), isBestEffortTasty) private val sectionReader = new mutable.HashMap[String, TastyReader] val nameAtRef: NameTable = new NameTable @@ -123,8 +139,11 @@ class TastyUnpickler(protected val reader: TastyReader) { result } - val header: TastyHeader = - new TastyHeaderUnpickler(scala3CompilerConfig, reader).readFullHeader() + val header: CommonTastyHeader = + if isBestEffortTasty then + new CommonTastyHeader(new BestEffortTastyHeaderUnpickler(scala3CompilerConfig, reader).readFullHeader()) + else + new CommonTastyHeader(new TastyHeaderUnpickler(reader).readFullHeader()) def readNames(): Unit = until(readEnd()) { nameAtRef.add(readNameContents()) } diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 7d2d95aa9601..8d1eca8fb5f0 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -6,6 +6,7 @@ package tasty import scala.language.unsafeNulls import dotty.tools.tasty.TastyFormat.* +import dotty.tools.tasty.besteffort.BestEffortTastyFormat.ERRORtype import dotty.tools.tasty.TastyBuffer.* import ast.Trees.* @@ -65,6 +66,17 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { fillRef(lengthAddr, currentAddr, relative = true) } + /** There are certain expectations with code which is naturally able to reach + * pickling phase as opposed to one that uses best-effort compilation features. + * When pickling betasty files, we do some custom checks, in case those + * expectations cannot be fulfilled, and if so, then we can try to do something + * else (usually pickle an ERRORtype). + * For regular non best-effort compilation (without -Ybest-effort with thrown errors + * and without using .betasty on classpath), this will always return true. + */ + private inline def passesConditionForErroringBestEffortCode(condition: => Boolean)(using Context): Boolean = + !((ctx.isBestEffort && ctx.reporter.errorsReported) || ctx.usedBestEffortTasty) || condition + def addrOfSym(sym: Symbol): Option[Addr] = symRefs.get(sym) @@ -272,6 +284,9 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { case tpe: OrType => writeByte(ORtype) withLength { pickleType(tpe.tp1, richTypes); pickleType(tpe.tp2, richTypes) } + case tpe: FlexibleType => + writeByte(FLEXIBLEtype) + withLength { pickleType(tpe.underlying, richTypes) } case tpe: ExprType => writeByte(BYNAMEtype) pickleType(tpe.underlying) @@ -292,9 +307,13 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { else if tpe.isImplicitMethod then mods |= Implicit pickleMethodic(METHODtype, tpe, mods) case tpe: ParamRef => - assert(pickleParamRef(tpe), s"orphan parameter reference: $tpe") + val pickled = pickleParamRef(tpe) + if !ctx.isBestEffort then assert(pickled, s"orphan parameter reference: $tpe") + else if !pickled then pickleErrorType() case tpe: LazyRef => pickleType(tpe.ref) + case _ if ctx.isBestEffort => + pickleErrorType() } def pickleMethodic(tag: Int, tpe: LambdaType, mods: FlagSet)(using Context): Unit = { @@ -318,8 +337,13 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { pickled } + def pickleErrorType(): Unit = { + writeByte(ERRORtype) + } + def pickleTpt(tpt: Tree)(using Context): Unit = - pickleTree(tpt) + if passesConditionForErroringBestEffortCode(tpt.isType) then pickleTree(tpt) + else pickleErrorType() def pickleTreeUnlessEmpty(tree: Tree)(using Context): Unit = { if (!tree.isEmpty) pickleTree(tree) @@ -333,39 +357,45 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { def pickleDef(tag: Int, mdef: MemberDef, tpt: Tree, rhs: Tree = EmptyTree, pickleParams: => Unit = ())(using Context): Unit = { val sym = mdef.symbol - assert(symRefs(sym) == NoAddr, sym) - registerDef(sym) - writeByte(tag) - val addr = currentAddr - try - withLength { - pickleName(sym.name) - pickleParams - tpt match { - case _: Template | _: Hole => pickleTree(tpt) - case _ if tpt.isType => pickleTpt(tpt) + def isDefSymPreRegisteredAndTreeHasCorrectStructure() = + symRefs.get(sym) == Some(NoAddr) && // check if symbol id preregistered (with the preRegister method) + !(tag == TYPEDEF && tpt.isInstanceOf[Template] && !tpt.symbol.exists) // in case this is a TEMPLATE, check if we are able to pickle it + + if passesConditionForErroringBestEffortCode(isDefSymPreRegisteredAndTreeHasCorrectStructure()) then + assert(symRefs(sym) == NoAddr, sym) + registerDef(sym) + writeByte(tag) + val addr = currentAddr + try + withLength { + pickleName(sym.name) + pickleParams + tpt match { + case _: Template | _: Hole => pickleTree(tpt) + case _ if tpt.isType => pickleTpt(tpt) + case _ if ctx.isBestEffort => pickleErrorType() + } + if isOutlinePickle && sym.isTerm && isJavaPickle then + // TODO: if we introduce outline typing for Scala definitions + // then we will need to update the check here + pickleElidedUnlessEmpty(rhs, tpt.tpe) + else + pickleTreeUnlessEmpty(rhs) + pickleModifiers(sym, mdef) } - if isOutlinePickle && sym.isTerm && isJavaPickle then - // TODO: if we introduce outline typing for Scala definitions - // then we will need to update the check here - pickleElidedUnlessEmpty(rhs, tpt.tpe) - else - pickleTreeUnlessEmpty(rhs) - pickleModifiers(sym, mdef) - } - catch - case ex: Throwable => - if !ctx.settings.YnoDecodeStacktraces.value - && handleRecursive.underlyingStackOverflowOrNull(ex) != null then - throw StackSizeExceeded(mdef) - else - throw ex - if sym.is(Method) && sym.owner.isClass then - profile.recordMethodSize(sym, (currentAddr.index - addr.index) max 1, mdef.span) - for docCtx <- ctx.docCtx do - val comment = docCtx.docstrings.lookup(sym) - if comment != null then - docStrings(mdef) = comment + catch + case ex: Throwable => + if !ctx.settings.XnoDecodeStacktraces.value + && handleRecursive.underlyingStackOverflowOrNull(ex) != null then + throw StackSizeExceeded(mdef) + else + throw ex + if sym.is(Method) && sym.owner.isClass then + profile.recordMethodSize(sym, (currentAddr.index - addr.index) max 1, mdef.span) + for docCtx <- ctx.docCtx do + val comment = docCtx.docstrings.lookup(sym) + if comment != null then + docStrings(mdef) = comment } def pickleParam(tree: Tree)(using Context): Unit = { @@ -395,15 +425,17 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { else try tree match { case Ident(name) => - tree.tpe match { - case tp: TermRef if name != nme.WILDCARD => - // wildcards are pattern bound, need to be preserved as ids. - pickleType(tp) - case tp => - writeByte(if (tree.isType) IDENTtpt else IDENT) - pickleName(name) - pickleType(tp) - } + if passesConditionForErroringBestEffortCode(tree.hasType) then + tree.tpe match { + case tp: TermRef if name != nme.WILDCARD => + // wildcards are pattern bound, need to be preserved as ids. + pickleType(tp) + case tp => + writeByte(if (tree.isType) IDENTtpt else IDENT) + pickleName(name) + pickleType(tp) + } + else pickleErrorType() case This(qual) => // This may be needed when pickling a `This` inside a capture set. See #19662 and #19859. // In this case, we pickle the tree as null.asInstanceOf[tree.tpe]. @@ -419,6 +451,8 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { case ThisType(tref) => writeByte(QUALTHIS) pickleTree(qual.withType(tref)) + case _: ErrorType if ctx.isBestEffort => + pickleTree(qual) case _ => pickleCapturedThis case Select(qual, name) => name match { @@ -431,25 +465,31 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { pickleType(tp) } case _ => - val sig = tree.tpe.signature - var ename = tree.symbol.targetName - val selectFromQualifier = - name.isTypeName - || qual.isInstanceOf[Hole] // holes have no symbol - || sig == Signature.NotAMethod // no overload resolution necessary - || !tree.denot.symbol.exists // polymorphic function type - || tree.denot.asSingleDenotation.isRefinedMethod // refined methods have no defining class symbol - if selectFromQualifier then + if passesConditionForErroringBestEffortCode(tree.hasType) then + val sig = tree.tpe.signature + var ename = tree.symbol.targetName + val selectFromQualifier = + name.isTypeName + || qual.isInstanceOf[Hole] // holes have no symbol + || sig == Signature.NotAMethod // no overload resolution necessary + || !tree.denot.symbol.exists // polymorphic function type + || tree.denot.asSingleDenotation.isRefinedMethod // refined methods have no defining class symbol + if selectFromQualifier then + writeByte(if name.isTypeName then SELECTtpt else SELECT) + pickleNameAndSig(name, sig, ename) + pickleTree(qual) + else // select from owner + writeByte(SELECTin) + withLength { + pickleNameAndSig(name, tree.symbol.signature, ename) + pickleTree(qual) + pickleType(tree.symbol.owner.typeRef) + } + else writeByte(if name.isTypeName then SELECTtpt else SELECT) - pickleNameAndSig(name, sig, ename) + val ename = tree.symbol.targetName + pickleNameAndSig(name, Signature.NotAMethod, ename) pickleTree(qual) - else // select from owner - writeByte(SELECTin) - withLength { - pickleNameAndSig(name, tree.symbol.signature, ename) - pickleTree(qual) - pickleType(tree.symbol.owner.typeRef) - } } case Apply(fun, args) => if (fun.symbol eq defn.throwMethod) { @@ -477,12 +517,14 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { args.foreach(pickleTpt) } case Literal(const1) => - pickleConstant { - tree.tpe match { - case ConstantType(const2) => const2 - case _ => const1 + if passesConditionForErroringBestEffortCode(tree.hasType) then + pickleConstant { + tree.tpe match { + case ConstantType(const2) => const2 + case _ => const1 + } } - } + else pickleConstant(const1) case Super(qual, mix) => writeByte(SUPER) withLength { @@ -654,19 +696,22 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { writeByte(PACKAGE) withLength { pickleType(pid.tpe); pickleStats(stats) } case tree: TypeTree => - pickleType(tree.tpe) + if passesConditionForErroringBestEffortCode(tree.hasType) then pickleType(tree.tpe) + else pickleErrorType() case SingletonTypeTree(ref) => writeByte(SINGLETONtpt) pickleTree(ref) case RefinedTypeTree(parent, refinements) => if (refinements.isEmpty) pickleTree(parent) else { - val refineCls = refinements.head.symbol.owner.asClass - registerDef(refineCls) - pickledTypes(refineCls.typeRef) = currentAddr - writeByte(REFINEDtpt) - refinements.foreach(preRegister) - withLength { pickleTree(parent); refinements.foreach(pickleTree) } + if passesConditionForErroringBestEffortCode(refinements.head.symbol.exists) then + val refineCls = refinements.head.symbol.owner.asClass + registerDef(refineCls) + pickledTypes(refineCls.typeRef) = currentAddr + writeByte(REFINEDtpt) + refinements.foreach(preRegister) + withLength { pickleTree(parent); refinements.foreach(pickleTree) } + else pickleErrorType() } case AppliedTypeTree(tycon, args) => writeByte(APPLIEDtpt) @@ -698,28 +743,40 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { pickleTree(alias) } case tree @ Quote(body, Nil) => - // TODO: Add QUOTE tag to TASTy assert(body.isTerm, """Quote with type should not be pickled. |Quote with type should only exists after staging phase at staging level 0.""".stripMargin) - pickleTree( - // scala.quoted.runtime.Expr.quoted[]() - ref(defn.QuotedRuntime_exprQuote) - .appliedToType(tree.bodyType) - .appliedTo(body) - .withSpan(tree.span) - ) + writeByte(QUOTE) + withLength { + pickleTree(body) + pickleType(tree.bodyType) + } case Splice(expr) => - pickleTree( // TODO: Add SPLICE tag to TASTy - // scala.quoted.runtime.Expr.splice[]() - ref(defn.QuotedRuntime_exprSplice) - .appliedToType(tree.tpe) - .appliedTo(expr) - .withSpan(tree.span) - ) - case tree: QuotePattern => - // TODO: Add QUOTEPATTERN tag to TASTy - pickleTree(QuotePatterns.encode(tree)) + writeByte(SPLICE) + withLength { + pickleTree(expr) + pickleType(tree.tpe) + } + case QuotePattern(bindings, body, quotes) => + writeByte(QUOTEPATTERN) + withLength { + if body.isType then writeByte(EXPLICITtpt) + pickleTree(body) + pickleTree(quotes) + pickleType(tree.tpe) + bindings.foreach(pickleTree) + } + case SplicePattern(pat, args) => + val targs = Nil // SplicePattern `targs` will be added with #18271 + writeByte(SPLICEPATTERN) + withLength { + pickleTree(pat) + pickleType(tree.tpe) + for targ <- targs do + writeByte(EXPLICITtpt) + pickleTree(targ) + args.foreach(pickleTree) + } case Hole(_, idx, args, _) => writeByte(HOLE) withLength { @@ -732,10 +789,13 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { pickleTree(arg) } } + case other if ctx.isBestEffort => + pickleErrorType() } catch { case ex: TypeError => report.error(ex.toMessage, tree.srcPos.focus) + pickleErrorType() case ex: AssertionError => println(i"error when pickling tree $tree") throw ex @@ -807,6 +867,7 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { if (flags.is(Exported)) writeModTag(EXPORTED) if (flags.is(Given)) writeModTag(GIVEN) if (flags.is(Implicit)) writeModTag(IMPLICIT) + if (flags.is(Tracked)) writeModTag(TRACKED) if (isTerm) { if (flags.is(Lazy, butNot = Module)) writeModTag(LAZY) if (flags.is(AbsOverride)) { writeModTag(ABSTRACT); writeModTag(OVERRIDE) } @@ -837,6 +898,7 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { // a different toplevel class, it is impossible to pickle a reference to it. // Such annotations will be reconstituted when unpickling the child class. // See tests/pickling/i3149.scala + case _ if ctx.isBestEffort && !ann.symbol.denot.isError => true case _ => ann.symbol == defn.BodyAnnot // inline bodies are reconstituted automatically when unpickling } @@ -863,7 +925,7 @@ class TreePickler(pickler: TastyPickler, attributes: Attributes) { em"""Recursion limit exceeded while pickling ${ex.mdef} |in ${ex.mdef.symbol.showLocated}. |You could try to increase the stacksize using the -Xss JVM option. - |For the unprocessed stack trace, compile with -Yno-decode-stacktraces.""", + |For the unprocessed stack trace, compile with -Xno-decode-stacktraces.""", ex.mdef.srcPos) def missing = forwardSymRefs.keysIterator diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 57c0b2217e9d..91a5899146cc 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -31,7 +31,8 @@ import util.{SourceFile, Property} import ast.{Trees, tpd, untpd} import Trees.* import Decorators.* -import dotty.tools.dotc.quoted.QuotePatterns +import config.Feature +import quoted.QuotePatterns import dotty.tools.tasty.{TastyBuffer, TastyReader} import TastyBuffer.* @@ -42,6 +43,7 @@ import scala.collection.mutable import config.Printers.pickling import dotty.tools.tasty.TastyFormat.* +import dotty.tools.tasty.besteffort.BestEffortTastyFormat.ERRORtype import scala.annotation.constructorOnly import scala.annotation.internal.sharable @@ -53,12 +55,14 @@ import scala.compiletime.uninitialized * @param posUnpicklerOpt the unpickler for positions, if it exists * @param commentUnpicklerOpt the unpickler for comments, if it exists * @param attributeUnpicklerOpt the unpickler for attributes, if it exists + * @param isBestEffortTasty decides whether to unpickle as a Best Effort TASTy */ class TreeUnpickler(reader: TastyReader, nameAtRef: NameTable, compilationUnitInfo: CompilationUnitInfo, posUnpicklerOpt: Option[PositionUnpickler], - commentUnpicklerOpt: Option[CommentUnpickler]) { + commentUnpicklerOpt: Option[CommentUnpickler], + isBestEffortTasty: Boolean = false) { import TreeUnpickler.* import tpd.* @@ -155,20 +159,15 @@ class TreeUnpickler(reader: TastyReader, if f == null then "" else s" in $f" def fail(ex: Throwable) = throw UnpicklingError(denot, where, ex) treeAtAddr(currentAddr) = - val traceCycles = CyclicReference.isTraced - try - if traceCycles then - CyclicReference.pushTrace("read the definition of ", denot.symbol, where) - atPhaseBeforeTransforms { - new TreeReader(reader).readIndexedDef()( - using ctx.withOwner(owner).withModeBits(mode).withSource(source)) - } - catch - case ex: CyclicReference => throw ex - case ex: AssertionError => fail(ex) - case ex: Exception => fail(ex) - finally - if traceCycles then CyclicReference.popTrace() + CyclicReference.trace(i"read the definition of ${denot.symbol}$where"): + try + atPhaseBeforeTransforms: + new TreeReader(reader).readIndexedDef()( + using ctx.withOwner(owner).withModeBits(mode).withSource(source)) + catch + case ex: CyclicReference => throw ex + case ex: AssertionError => fail(ex) + case ex: Exception => fail(ex) } class TreeReader(val reader: TastyReader) { @@ -412,9 +411,7 @@ class TreeUnpickler(reader: TastyReader, readType().appliedTo(until(end)(readType())) case TYPEBOUNDS => val lo = readType() - if nothingButMods(end) then - if lo.isMatch then MatchAlias(readVariances(lo)) - else TypeAlias(readVariances(lo)) + if nothingButMods(end) then AliasingBounds(readVariances(lo)) else val hi = readVariances(readType()) createNullableTypeBounds(lo, hi) @@ -444,6 +441,11 @@ class TreeUnpickler(reader: TastyReader, readTypeRef() match { case binder: LambdaType => binder.paramRefs(readNat()) } + case FLEXIBLEtype => + FlexibleType(readType()) + case _ if isBestEffortTasty => + goto(end) + new PreviousErrorType } assert(currentAddr == end, s"$start $currentAddr $end ${astTagToString(tag)}") result @@ -491,6 +493,9 @@ class TreeUnpickler(reader: TastyReader, typeAtAddr.getOrElseUpdate(ref, forkAt(ref).readType()) case BYNAMEtype => ExprType(readType()) + case ERRORtype => + if isBestEffortTasty then new PreviousErrorType + else throw new Error(s"Illegal ERRORtype in non Best Effort TASTy file") case _ => ConstantType(readConstant(tag)) } @@ -751,6 +756,7 @@ class TreeUnpickler(reader: TastyReader, case INVISIBLE => addFlag(Invisible) case TRANSPARENT => addFlag(Transparent) case INFIX => addFlag(Infix) + case TRACKED => addFlag(Tracked) case PRIVATEqualified => readByte() privateWithin = readWithin @@ -918,6 +924,8 @@ class TreeUnpickler(reader: TastyReader, val resType = if name == nme.CONSTRUCTOR then effectiveResultType(sym, paramss) + else if sym.isAllOf(Given | Method) && Feature.enabled(Feature.modularity) then + addParamRefinements(tpt.tpe, paramss) else tpt.tpe sym.info = methodType(paramss, resType) @@ -946,6 +954,7 @@ class TreeUnpickler(reader: TastyReader, val rhs = readTpt()(using localCtx) sym.info = new NoCompleter: + override def complete(denot: SymDenotation)(using Context): Unit = if !isBestEffortTasty then unsupported("complete") override def completerTypeParams(sym: Symbol)(using Context) = rhs.tpe.typeParams @@ -981,8 +990,8 @@ class TreeUnpickler(reader: TastyReader, if !sym.isType && !sym.is(ParamAccessor) then sym.info = ta.avoidPrivateLeaks(sym) - if (ctx.settings.YreadComments.value) { - assert(ctx.docCtx.isDefined, "`-Yread-docs` enabled, but no `docCtx` is set.") + if (ctx.settings.XreadComments.value) { + assert(ctx.docCtx.isDefined, "`-Xread-docs` enabled, but no `docCtx` is set.") commentUnpicklerOpt.foreach { commentUnpickler => val comment = commentUnpickler.commentAt(start) ctx.docCtx.get.addDocstring(tree.symbol, comment) @@ -1021,8 +1030,14 @@ class TreeUnpickler(reader: TastyReader, case nu: New => try nu.tpe finally goto(end) + case other if isBestEffortTasty => + try other.tpe + finally goto(end) case SHAREDterm => forkAt(readAddr()).readParentType() + case SELECT if isBestEffortTasty => + goto(readEnd()) + new PreviousErrorType /** Read template parents * @param withArgs if false, only read enough of parent trees to determine their type @@ -1063,7 +1078,7 @@ class TreeUnpickler(reader: TastyReader, } val parentReader = fork val parents = readParents(withArgs = false)(using parentCtx) - val parentTypes = parents.map(_.tpe.dealias) + val parentTypes = parents.map(_.tpe.dealiasKeepAnnots.separateRefinements(cls, null)) if cls.is(JavaDefined) && parentTypes.exists(_.derivesFrom(defn.JavaAnnotationClass)) then cls.setFlag(JavaAnnotation) val self = @@ -1123,6 +1138,7 @@ class TreeUnpickler(reader: TastyReader, }) defn.patchStdLibClass(cls) NamerOps.addConstructorProxies(cls) + NamerOps.addContextBoundCompanions(cls) setSpan(start, untpd.Template(constr, mappedParents, self, lazyStats) .withType(localDummy.termRef)) @@ -1246,6 +1262,7 @@ class TreeUnpickler(reader: TastyReader, case path: TermRef => ref(path) case path: ThisType => untpd.This(untpd.EmptyTypeIdent).withType(path) case path: ConstantType => Literal(path.value) + case path: ErrorType if isBestEffortTasty => TypeTree(path) } } @@ -1379,9 +1396,9 @@ class TreeUnpickler(reader: TastyReader, val fn = readTree() val args = until(end)(readTree()) if fn.symbol.isConstructor then constructorApply(fn, args) - else if fn.symbol == defn.QuotedRuntime_exprQuote then quotedExpr(fn, args) - else if fn.symbol == defn.QuotedRuntime_exprSplice then splicedExpr(fn, args) - else if fn.symbol == defn.QuotedRuntime_exprNestedSplice then nestedSpliceExpr(fn, args) + else if fn.symbol == defn.QuotedRuntime_exprQuote then quotedExpr(fn, args) // decode pre 3.5.0 encoding + else if fn.symbol == defn.QuotedRuntime_exprSplice then splicedExpr(fn, args) // decode pre 3.5.0 encoding + else if fn.symbol == defn.QuotedRuntime_exprNestedSplice then nestedSpliceExpr(fn, args) // decode pre 3.5.0 encoding else tpd.Apply(fn, args) case TYPEAPPLY => tpd.TypeApply(readTree(), until(end)(readTpt())) @@ -1503,7 +1520,7 @@ class TreeUnpickler(reader: TastyReader, val unapply = UnApply(fn, implicitArgs, argPats, patType) if fn.symbol == defn.QuoteMatching_ExprMatch_unapply || fn.symbol == defn.QuoteMatching_TypeMatch_unapply - then QuotePatterns.decode(unapply) + then QuotePatterns.decode(unapply) // decode pre 3.5.0 encoding else unapply case REFINEDtpt => val refineCls = symAtAddr.getOrElse(start, @@ -1544,8 +1561,6 @@ class TreeUnpickler(reader: TastyReader, // as the reduction of the match type definition! // // We also override the type, as that's what Typer does. - // The difference here is that a match type that reduces to a non-match type - // makes the TypeRef for that definition will have a TypeAlias info instead of a MatchAlias. tpt.overwriteType(tpt.tpe.normalized) tpt case TYPEBOUNDStpt => @@ -1553,6 +1568,24 @@ class TreeUnpickler(reader: TastyReader, val hi = if currentAddr == end then lo else readTpt() val alias = if currentAddr == end then EmptyTree else readTpt() createNullableTypeBoundsTree(lo, hi, alias) + case QUOTE => + Quote(readTree(), Nil).withBodyType(readType()) + case SPLICE => + Splice(readTree()).withType(readType()) + case QUOTEPATTERN => + val bodyReader = fork + skipTree() + val quotes = readTree() + val patType = readType() + val bindings = readStats(ctx.owner, end) + val body = bodyReader.readTree() // need bindings in scope, so needs to be read before + QuotePattern(bindings, body, quotes, patType) + case SPLICEPATTERN => + val pat = readTree() + val patType = readType() + val (targs, args) = until(end)(readTree()).span(_.isType) + assert(targs.isEmpty, "unexpected type arguments in SPLICEPATTERN") // `targs` will be needed for #18271. Until this fearure is added they should be empty. + SplicePattern(pat, args, patType) case HOLE => readHole(end, isTerm = true) case _ => diff --git a/compiler/src/dotty/tools/dotc/decompiler/IDEDecompilerDriver.scala b/compiler/src/dotty/tools/dotc/decompiler/IDEDecompilerDriver.scala index c1bd6b6778fd..18c5ceb5f346 100644 --- a/compiler/src/dotty/tools/dotc/decompiler/IDEDecompilerDriver.scala +++ b/compiler/src/dotty/tools/dotc/decompiler/IDEDecompilerDriver.scala @@ -19,7 +19,7 @@ class IDEDecompilerDriver(val settings: List[String]) extends dotc.Driver { private val myInitCtx: Context = { val rootCtx = initCtx.fresh.addMode(Mode.Interactive | Mode.ReadPositions) - rootCtx.setSetting(rootCtx.settings.YreadComments, true) + rootCtx.setSetting(rootCtx.settings.XreadComments, true) rootCtx.setSetting(rootCtx.settings.YretainTrees, true) rootCtx.setSetting(rootCtx.settings.fromTasty, true) val ctx = setup(settings.toArray :+ "dummy.scala", rootCtx).get._2 diff --git a/compiler/src/dotty/tools/dotc/fromtasty/Debug.scala b/compiler/src/dotty/tools/dotc/fromtasty/Debug.scala index 979fae239e59..2e6b699b4e36 100644 --- a/compiler/src/dotty/tools/dotc/fromtasty/Debug.scala +++ b/compiler/src/dotty/tools/dotc/fromtasty/Debug.scala @@ -40,7 +40,7 @@ object Debug { val tastyFiles = Directory(fromSourcesOut).walk - .filter(x => x.isFile && "tasty".equalsIgnoreCase(x.extension)) + .filter(x => x.isFile && x.ext.isTasty) .map(_.toString) .toList diff --git a/compiler/src/dotty/tools/dotc/fromtasty/ReadTasty.scala b/compiler/src/dotty/tools/dotc/fromtasty/ReadTasty.scala index 74010b3f64d1..f13bcdf00b34 100644 --- a/compiler/src/dotty/tools/dotc/fromtasty/ReadTasty.scala +++ b/compiler/src/dotty/tools/dotc/fromtasty/ReadTasty.scala @@ -48,11 +48,11 @@ class ReadTasty extends Phase { if (cls.rootTree.isEmpty) None else { val attributes = unpickler.tastyAttributes - if attributes.isJava && !ctx.settings.YjavaTasty.value then - // filter out Java compilation units if -Yjava-tasty is not set + if attributes.isJava && !ctx.settings.XjavaTasty.value then + // filter out Java compilation units if -Xjava-tasty is not set None - else if attributes.isOutline && !ctx.settings.YallowOutlineFromTasty.value then - cannotUnpickle("it contains outline signatures and -Yallow-outline-from-tasty is not set.") + else if attributes.isOutline && !ctx.settings.XallowOutlineFromTasty.value then + cannotUnpickle("it contains outline signatures and -Xallow-outline-from-tasty is not set.") else val unit = CompilationUnit(cls, cls.rootTree, forceTrees = true) unit.pickled += (cls -> (() => unpickler.unpickler.bytes)) diff --git a/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala b/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala index 98ab8e2b6226..d01f60571601 100644 --- a/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala +++ b/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala @@ -4,7 +4,7 @@ package fromtasty import scala.language.unsafeNulls -import io.{JarArchive, AbstractFile, Path} +import io.{JarArchive, AbstractFile, Path, FileExtension} import core.Contexts.* import core.Decorators.em import java.io.File @@ -19,14 +19,16 @@ class TASTYRun(comp: Compiler, ictx: Context) extends Run(comp, ictx) { val fromTastyIgnoreList = ctx.settings.YfromTastyIgnoreList.value.toSet // Resolve class names of tasty and jar files val classNames = files.flatMap { file => - file.extension match - case "jar" => + file.ext match + case FileExtension.Jar => JarArchive.open(Path(file.path), create = false).allFileNames() .map(_.stripPrefix("/")) // change paths from absolute to relative - .filter(e => Path.extension(e) == "tasty" && !fromTastyIgnoreList(e.replace("/", File.separator))) + .filter(e => Path.fileExtension(e).isTasty && !fromTastyIgnoreList(e.replace("/", File.separator))) .map(e => e.stripSuffix(".tasty").replace("/", ".")) .toList - case "tasty" => TastyFileUtil.getClassName(file) + case FileExtension.Tasty => TastyFileUtil.getClassName(file) + case FileExtension.Betasty if ctx.withBestEffortTasty => + TastyFileUtil.getClassName(file, withBestEffortTasty = true) case _ => report.error(em"File extension is not `tasty` or `jar`: ${file.path}") Nil diff --git a/compiler/src/dotty/tools/dotc/fromtasty/TastyFileUtil.scala b/compiler/src/dotty/tools/dotc/fromtasty/TastyFileUtil.scala index bc04cc648a65..b1277accc621 100644 --- a/compiler/src/dotty/tools/dotc/fromtasty/TastyFileUtil.scala +++ b/compiler/src/dotty/tools/dotc/fromtasty/TastyFileUtil.scala @@ -6,6 +6,8 @@ import scala.language.unsafeNulls import dotty.tools.dotc.core.tasty.TastyClassName import dotty.tools.dotc.core.StdNames.nme.EMPTY_PACKAGE import dotty.tools.io.AbstractFile +import dotty.tools.dotc.classpath.FileUtils.hasTastyExtension +import dotty.tools.dotc.classpath.FileUtils.hasBetastyExtension object TastyFileUtil { /** Get the class path of a tasty file @@ -17,9 +19,10 @@ object TastyFileUtil { * ``` * then `getClassName("./out/foo/Foo.tasty") returns `Some("./out")` */ - def getClassPath(file: AbstractFile): Option[String] = - getClassName(file).map { className => - val classInPath = className.replace(".", java.io.File.separator) + ".tasty" + def getClassPath(file: AbstractFile, fromBestEffortTasty: Boolean = false): Option[String] = + getClassName(file, fromBestEffortTasty).map { className => + val extension = if (fromBestEffortTasty) then ".betasty" else ".tasty" + val classInPath = className.replace(".", java.io.File.separator) + extension file.path.replace(classInPath, "") } @@ -32,11 +35,11 @@ object TastyFileUtil { * ``` * then `getClassName("./out/foo/Foo.tasty") returns `Some("foo.Foo")` */ - def getClassName(file: AbstractFile): Option[String] = { + def getClassName(file: AbstractFile, withBestEffortTasty: Boolean = false): Option[String] = { assert(file.exists) - assert(file.extension == "tasty") + assert(file.hasTastyExtension || (withBestEffortTasty && file.hasBetastyExtension)) val bytes = file.toByteArray - val names = new TastyClassName(bytes).readName() + val names = new TastyClassName(bytes, file.hasBetastyExtension).readName() names.map { case (packageName, className) => val fullName = packageName match { case EMPTY_PACKAGE => s"${className.lastPart}" diff --git a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala index 8bd89a71fa50..7c79e972c126 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala @@ -237,7 +237,7 @@ class Inliner(val call: tpd.Tree)(using Context): if bindingFlags.is(Inline) && argIsBottom then newArg = Typed(newArg, TypeTree(formal.widenExpr)) // type ascribe RHS to avoid type errors in expansion. See i8612.scala if isByName then DefDef(boundSym, newArg) - else ValDef(boundSym, newArg) + else ValDef(boundSym, newArg, inferred = true) }.withSpan(boundSym.span) inlining.println(i"parameter binding: $binding, $argIsBottom") buf += binding @@ -315,11 +315,11 @@ class Inliner(val call: tpd.Tree)(using Context): case Super(qual, _) => qual case pre => pre val preLevel = classNestingLevel(inlinedMethod.owner) - if preLevel > level then outerSelect(pre, inlinedMethod.owner, preLevel - level, selfSym.info) + if preLevel > level then outerSelect(pre, inlinedMethod.owner.enclosingClass, preLevel - level, selfSym.info) else pre val binding = accountForOpaques( - ValDef(selfSym.asTerm, QuoteUtils.changeOwnerOfTree(rhs, selfSym)).withSpan(selfSym.span)) + ValDef(selfSym.asTerm, QuoteUtils.changeOwnerOfTree(rhs, selfSym), inferred = true).withSpan(selfSym.span)) bindingsBuf += binding inlining.println(i"proxy at $level: $selfSym = ${bindingsBuf.last}") lastSelf = selfSym @@ -368,7 +368,7 @@ class Inliner(val call: tpd.Tree)(using Context): RefinedType(parent, refinement._1, TypeAlias(refinement._2)) ) val refiningSym = newSym(InlineBinderName.fresh(), Synthetic, refinedType).asTerm - val refiningDef = ValDef(refiningSym, tpd.ref(ref).cast(refinedType)).withSpan(span) + val refiningDef = ValDef(refiningSym, tpd.ref(ref).cast(refinedType), inferred = true).withSpan(span) inlining.println(i"add opaque alias proxy $refiningDef for $ref in $tp") bindingsBuf += refiningDef opaqueProxies += ((ref, refiningSym.termRef)) @@ -834,7 +834,7 @@ class Inliner(val call: tpd.Tree)(using Context): override def typedSplice(tree: untpd.Splice, pt: Type)(using Context): Tree = super.typedSplice(tree, pt) match - case tree1 @ Splice(expr) if level == 0 && !hasInliningErrors => + case tree1 @ Splice(expr) if level == 0 && !hasInliningErrors && !ctx.usedBestEffortTasty => val expanded = expandMacro(expr, tree1.srcPos) transform.TreeChecker.checkMacroGeneratedTree(tree1, expanded) typedExpr(expanded) // Inline calls and constant fold code generated by the macro @@ -860,46 +860,71 @@ class Inliner(val call: tpd.Tree)(using Context): case _ => sel.tpe } val selType = if (sel.isEmpty) wideSelType else selTyped(sel) - reduceInlineMatch(sel, selType, cases.asInstanceOf[List[CaseDef]], this) match { - case Some((caseBindings, rhs0)) => - // drop type ascriptions/casts hiding pattern-bound types (which are now aliases after reducing the match) - // note that any actually necessary casts will be reinserted by the typing pass below - val rhs1 = rhs0 match { - case Block(stats, t) if t.span.isSynthetic => - t match { - case Typed(expr, _) => - Block(stats, expr) - case TypeApply(sel@Select(expr, _), _) if sel.symbol.isTypeCast => - Block(stats, expr) - case _ => - rhs0 + + /** Make an Inlined that has no bindings. */ + def flattenInlineBlock(tree: Tree): Tree = { + def inlineBlock(call: Tree, stats: List[Tree], expr: Tree): Block = + def inlinedTree(tree: Tree) = Inlined(call, Nil, tree).withSpan(tree.span) + val stats1 = stats.map: + case stat: ValDef => cpy.ValDef(stat)(rhs = inlinedTree(stat.rhs)) + case stat: DefDef => cpy.DefDef(stat)(rhs = inlinedTree(stat.rhs)) + case stat => inlinedTree(stat) + cpy.Block(tree)(stats1, flattenInlineBlock(inlinedTree(expr))) + + tree match + case tree @ Inlined(call, bindings, expr) if !bindings.isEmpty => + inlineBlock(call, bindings, expr) + case tree @ Inlined(call, Nil, Block(stats, expr)) => + inlineBlock(call, stats, expr) + case _ => + tree + } + + def reduceInlineMatchExpr(sel: Tree): Tree = flattenInlineBlock(sel) match + case Block(stats, expr) => + cpy.Block(sel)(stats, reduceInlineMatchExpr(expr)) + case _ => + reduceInlineMatch(sel, selType, cases.asInstanceOf[List[CaseDef]], this) match { + case Some((caseBindings, rhs0)) => + // drop type ascriptions/casts hiding pattern-bound types (which are now aliases after reducing the match) + // note that any actually necessary casts will be reinserted by the typing pass below + val rhs1 = rhs0 match { + case Block(stats, t) if t.span.isSynthetic => + t match { + case Typed(expr, _) => + Block(stats, expr) + case TypeApply(sel@Select(expr, _), _) if sel.symbol.isTypeCast => + Block(stats, expr) + case _ => + rhs0 + } + case _ => rhs0 } - case _ => rhs0 - } - val rhs2 = rhs1 match { - case Typed(expr, tpt) if rhs1.span.isSynthetic => constToLiteral(expr) - case _ => constToLiteral(rhs1) + val rhs2 = rhs1 match { + case Typed(expr, tpt) if rhs1.span.isSynthetic => constToLiteral(expr) + case _ => constToLiteral(rhs1) + } + val (usedBindings, rhs3) = dropUnusedDefs(caseBindings, rhs2) + val rhs = seq(usedBindings, rhs3) + inlining.println(i"""--- reduce: + |$tree + |--- to: + |$rhs""") + typedExpr(rhs, pt) + case None => + def guardStr(guard: untpd.Tree) = if (guard.isEmpty) "" else i" if $guard" + def patStr(cdef: untpd.CaseDef) = i"case ${cdef.pat}${guardStr(cdef.guard)}" + val msg = + if (tree.selector.isEmpty) + em"""cannot reduce summonFrom with + | patterns : ${tree.cases.map(patStr).mkString("\n ")}""" + else + em"""cannot reduce inline match with + | scrutinee: $sel : ${selType} + | patterns : ${tree.cases.map(patStr).mkString("\n ")}""" + errorTree(tree, msg) } - val (usedBindings, rhs3) = dropUnusedDefs(caseBindings, rhs2) - val rhs = seq(usedBindings, rhs3) - inlining.println(i"""--- reduce: - |$tree - |--- to: - |$rhs""") - typedExpr(rhs, pt) - case None => - def guardStr(guard: untpd.Tree) = if (guard.isEmpty) "" else i" if $guard" - def patStr(cdef: untpd.CaseDef) = i"case ${cdef.pat}${guardStr(cdef.guard)}" - val msg = - if (tree.selector.isEmpty) - em"""cannot reduce summonFrom with - | patterns : ${tree.cases.map(patStr).mkString("\n ")}""" - else - em"""cannot reduce inline match with - | scrutinee: $sel : ${selType} - | patterns : ${tree.cases.map(patStr).mkString("\n ")}""" - errorTree(tree, msg) - } + reduceInlineMatchExpr(sel) } override def newLikeThis(nestingLevel: Int): Typer = new InlineTyper(initialErrorCount, nestingLevel) @@ -1033,19 +1058,30 @@ class Inliner(val call: tpd.Tree)(using Context): } } - private def expandMacro(body: Tree, splicePos: SrcPos)(using Context) = { + private def expandMacro(body: Tree, splicePos: SrcPos)(using Context): Tree = { assert(level == 0) val inlinedFrom = enclosingInlineds.last val dependencies = macroDependencies(body)(using spliceContext) val suspendable = ctx.compilationUnit.isSuspendable + val printSuspensions = ctx.settings.XprintSuspension.value if dependencies.nonEmpty && !ctx.reporter.errorsReported then + val hints: mutable.ListBuffer[String] | Null = + if printSuspensions then mutable.ListBuffer.empty[String] else null for sym <- dependencies do if ctx.compilationUnit.source.file == sym.associatedFile then report.error(em"Cannot call macro $sym defined in the same source file", call.srcPos) - if (suspendable && ctx.settings.XprintSuspension.value) - report.echo(i"suspension triggered by macro call to ${sym.showLocated} in ${sym.associatedFile}", call.srcPos) + else if ctx.settings.YnoSuspendedUnits.value then + val addendum = ", suspension prevented by -Yno-suspended-units" + report.error(em"Cannot call macro $sym defined in the same compilation run$addendum", call.srcPos) + if suspendable && printSuspensions then + hints.nn += i"suspension triggered by macro call to ${sym.showLocated} in ${sym.associatedFile}" if suspendable then - ctx.compilationUnit.suspend() // this throws a SuspendException + if ctx.settings.YnoSuspendedUnits.value then + return ref(defn.Predef_undefined) + .withType(ErrorType(em"could not expand macro, suspended units are disabled by -Yno-suspended-units")) + .withSpan(splicePos.span) + else + ctx.compilationUnit.suspend(hints.nn.toList.mkString(", ")) // this throws a SuspendException val evaluatedSplice = inContext(quoted.MacroExpansion.context(inlinedFrom)) { Splicer.splice(body, splicePos, inlinedFrom.srcPos, MacroClassLoader.fromContext) diff --git a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala index 65792d09f88c..fffe87c3f57a 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala @@ -452,9 +452,8 @@ object Inlines: unrollTupleTypes(tail).map(head :: _) case tpe: TermRef if tpe.symbol == defn.EmptyTupleModule => Some(Nil) - case tpRef: TypeRef => tpRef.info match - case MatchAlias(alias) => unrollTupleTypes(alias.tryNormalize) - case _ => None + case tpe: AppliedType if tpe.isMatchAlias => + unrollTupleTypes(tpe.tryNormalize) case _ => None diff --git a/compiler/src/dotty/tools/dotc/interactive/Completion.scala b/compiler/src/dotty/tools/dotc/interactive/Completion.scala index 025a2022500d..7882d635f84a 100644 --- a/compiler/src/dotty/tools/dotc/interactive/Completion.scala +++ b/compiler/src/dotty/tools/dotc/interactive/Completion.scala @@ -71,10 +71,11 @@ object Completion: mode: Mode, rawPrefix: String, tpdPath: List[tpd.Tree], - untpdPath: List[untpd.Tree] + untpdPath: List[untpd.Tree], + customMatcher: Option[Name => Boolean] = None )(using Context): CompletionMap = val adjustedPath = typeCheckExtensionConstructPath(untpdPath, tpdPath, pos) - computeCompletions(pos, mode, rawPrefix, adjustedPath) + computeCompletions(pos, mode, rawPrefix, adjustedPath, untpdPath, customMatcher) /** * Inspect `path` to determine what kinds of symbols should be considered. @@ -86,26 +87,21 @@ object Completion: * * Otherwise, provide no completion suggestion. */ - def completionMode(path: List[untpd.Tree], pos: SourcePosition): Mode = - - val completionSymbolKind: Mode = - path match - case GenericImportSelector(sel) => - if sel.imported.span.contains(pos.span) then Mode.ImportOrExport // import scala.@@ - else if sel.isGiven && sel.bound.span.contains(pos.span) then Mode.ImportOrExport - else Mode.None // import scala.{util => u@@} - case GenericImportOrExport(_) => Mode.ImportOrExport | Mode.Scope // import TrieMa@@ - case untpd.Literal(Constants.Constant(_: String)) :: _ => Mode.Term | Mode.Scope // literal completions - case (ref: untpd.RefTree) :: _ => - val maybeSelectMembers = if ref.isInstanceOf[untpd.Select] then Mode.Member else Mode.Scope - - if (ref.name.isTermName) Mode.Term | maybeSelectMembers - else if (ref.name.isTypeName) Mode.Type | maybeSelectMembers - else Mode.None - - case _ => Mode.None - - completionSymbolKind + def completionMode(path: List[untpd.Tree], pos: SourcePosition): Mode = path match + case GenericImportSelector(sel) => + if sel.imported.span.contains(pos.span) then Mode.ImportOrExport // import scala.@@ + else if sel.isGiven && sel.bound.span.contains(pos.span) then Mode.ImportOrExport + else Mode.None // import scala.{util => u@@} + case GenericImportOrExport(_) => Mode.ImportOrExport | Mode.Scope // import TrieMa@@ + case untpd.Literal(Constants.Constant(_: String)) :: _ => Mode.Term | Mode.Scope // literal completions + case (ref: untpd.RefTree) :: _ => + val maybeSelectMembers = if ref.isInstanceOf[untpd.Select] then Mode.Member else Mode.Scope + + if (ref.name.isTermName) Mode.Term | maybeSelectMembers + else if (ref.name.isTypeName) Mode.Type | maybeSelectMembers + else Mode.None + + case _ => Mode.None /** When dealing with in varios palces we check to see if they are * due to incomplete backticks. If so, we ensure we get the full prefix @@ -130,7 +126,7 @@ object Completion: def completionPrefix(path: List[untpd.Tree], pos: SourcePosition)(using Context): String = def fallback: Int = var i = pos.point - 1 - while i >= 0 && Chars.isIdentifierPart(pos.source.content()(i)) do i -= 1 + while i >= 0 && Character.isUnicodeIdentifierPart(pos.source.content()(i)) do i -= 1 i + 1 path match @@ -190,7 +186,12 @@ object Completion: )(using Context): List[tpd.Tree] = untpdPath.collectFirst: case untpd.ExtMethods(paramss, _) => - val enclosingParam = paramss.flatten.find(_.span.contains(pos.span)) + val enclosingParam = paramss.flatten + .find(_.span.contains(pos.span)) + .flatMap: + case untpd.TypeDef(_, bounds: untpd.ContextBounds) => bounds.cxBounds.find(_.span.contains(pos.span)) + case other => Some(other) + enclosingParam.map: param => ctx.typer.index(paramss.flatten) val typedEnclosingParam = ctx.typer.typed(param) @@ -198,11 +199,16 @@ object Completion: .flatten.getOrElse(tpdPath) private def computeCompletions( - pos: SourcePosition, mode: Mode, rawPrefix: String, adjustedPath: List[tpd.Tree] + pos: SourcePosition, + mode: Mode, rawPrefix: String, + adjustedPath: List[tpd.Tree], + untpdPath: List[untpd.Tree], + matches: Option[Name => Boolean] )(using Context): CompletionMap = val hasBackTick = rawPrefix.headOption.contains('`') val prefix = if hasBackTick then rawPrefix.drop(1) else rawPrefix - val completer = new Completer(mode, prefix, pos) + val matches0 = matches.getOrElse(_.startsWith(prefix)) + val completer = new Completer(mode, pos, untpdPath, matches0) val result = adjustedPath match // Ignore synthetic select from `This` because in code it was `Ident` @@ -214,7 +220,6 @@ object Completion: case _ => completer.scopeCompletions interactiv.println(i"""completion info with pos = $pos, - | prefix = ${completer.prefix}, | term = ${completer.mode.is(Mode.Term)}, | type = ${completer.mode.is(Mode.Type)}, | scope = ${completer.mode.is(Mode.Scope)}, @@ -278,6 +283,43 @@ object Completion: if denot.isType then denot.symbol.showFullName else denot.info.widenTermRefExpr.show + + def isInNewContext(untpdPath: List[untpd.Tree]): Boolean = + untpdPath match + case _ :: untpd.New(selectOrIdent: (untpd.Select | untpd.Ident)) :: _ => true + case _ => false + + /** Include in completion sets only symbols that + * 1. is not absent (info is not NoType) + * 2. are not a primary constructor, + * 3. have an existing source symbol, + * 4. are the module class in case of packages, + * 5. are mutable accessors, to exclude setters for `var`, + * 6. symbol is not a package object + * 7. symbol is not an artifact of the compiler + * 8. symbol is not a constructor proxy module when in type completion mode + * 9. have same term/type kind as name prefix given so far + */ + def isValidCompletionSymbol(sym: Symbol, completionMode: Mode, isNew: Boolean)(using Context): Boolean = + + lazy val isEnum = sym.is(Enum) || + (sym.companionClass.exists && sym.companionClass.is(Enum)) + + sym.exists && + !sym.isAbsent() && + !sym.isPrimaryConstructor && + sym.sourceSymbol.exists && + (!sym.is(Package) || sym.is(ModuleClass)) && + !sym.isAllOf(Mutable | Accessor) && + !sym.isPackageObject && + !sym.is(Artifact) && + !(completionMode.is(Mode.Type) && sym.isAllOf(ConstructorProxyModule)) && + !(isNew && isEnum) && + ( + (completionMode.is(Mode.Term) && (sym.isTerm || sym.is(ModuleClass)) + || (completionMode.is(Mode.Type) && (sym.isType || sym.isStableMember))) + ) + given ScopeOrdering(using Context): Ordering[Seq[SingleDenotation]] with val order = List(defn.ScalaPredefModuleClass, defn.ScalaPackageClass, defn.JavaLangPackageClass) @@ -290,13 +332,13 @@ object Completion: /** Computes code completions depending on the context in which completion is requested * @param mode Should complete names of terms, types or both - * @param prefix The prefix that all suggested completions should start with * @param pos Cursor position where completion was requested + * @param matches Function taking name used to filter completions * * For the results of all `xyzCompletions` methods term names and type names are always treated as different keys in the same map * and they never conflict with each other. */ - class Completer(val mode: Mode, val prefix: String, pos: SourcePosition): + class Completer(val mode: Mode, pos: SourcePosition, untpdPath: List[untpd.Tree], matches: Name => Boolean): /** Completions for terms and types that are currently in scope: * the members of the current class, local definitions and the symbols that have been imported, * recursively adding completions from outer scopes. @@ -503,7 +545,7 @@ object Completion: // There are four possible ways for an extension method to be applicable // 1. The extension method is visible under a simple name, by being defined or inherited or imported in a scope enclosing the reference. - val termCompleter = new Completer(Mode.Term, prefix, pos) + val termCompleter = new Completer(Mode.Term, pos, untpdPath, matches) val extMethodsInScope = termCompleter.scopeCompletions.toList.flatMap: case (name, denots) => denots.collect: case d: SymDenotation if d.isTerm && d.termRef.symbol.is(Extension) => (d.termRef, name.asTermName) @@ -530,35 +572,16 @@ object Completion: } extMethodsWithAppliedReceiver.groupByName + lazy val isNew: Boolean = isInNewContext(untpdPath) + /** Include in completion sets only symbols that - * 1. start with given name prefix, and - * 2. is not absent (info is not NoType) - * 3. are not a primary constructor, - * 4. have an existing source symbol, - * 5. are the module class in case of packages, - * 6. are mutable accessors, to exclude setters for `var`, - * 7. symbol is not a package object - * 8. symbol is not an artifact of the compiler - * 9. have same term/type kind as name prefix given so far + * 1. match the filter method, + * 2. satisfy [[Completion.isValidCompletionSymbol]] */ private def include(denot: SingleDenotation, nameInScope: Name)(using Context): Boolean = - val sym = denot.symbol - - - nameInScope.startsWith(prefix) && - sym.exists && + matches(nameInScope) && completionsFilter(NoType, nameInScope) && - !sym.isAbsent() && - !sym.isPrimaryConstructor && - sym.sourceSymbol.exists && - (!sym.is(Package) || sym.is(ModuleClass)) && - !sym.isAllOf(Mutable | Accessor) && - !sym.isPackageObject && - !sym.is(Artifact) && - ( - (mode.is(Mode.Term) && (sym.isTerm || sym.is(ModuleClass)) - || (mode.is(Mode.Type) && (sym.isType || sym.isStableMember))) - ) + isValidCompletionSymbol(denot.symbol, mode, isNew) private def extractRefinements(site: Type)(using Context): Seq[SingleDenotation] = site match @@ -605,7 +628,6 @@ object Completion: private def implicitConversionTargets(qual: tpd.Tree)(using Context): Set[SearchSuccess] = { val typer = ctx.typer val conversions = new typer.ImplicitSearch(defn.AnyType, qual, pos.span).allImplicits - conversions.map(_.tree.typeOpt) interactiv.println(i"implicit conversion targets considered: ${conversions.toList}%, %") conversions diff --git a/compiler/src/dotty/tools/dotc/interactive/InteractiveDriver.scala b/compiler/src/dotty/tools/dotc/interactive/InteractiveDriver.scala index 8f42c62cb3b0..673874ae2769 100644 --- a/compiler/src/dotty/tools/dotc/interactive/InteractiveDriver.scala +++ b/compiler/src/dotty/tools/dotc/interactive/InteractiveDriver.scala @@ -33,8 +33,8 @@ class InteractiveDriver(val settings: List[String]) extends Driver { private val myInitCtx: Context = { val rootCtx = initCtx.fresh.addMode(Mode.ReadPositions).addMode(Mode.Interactive) rootCtx.setSetting(rootCtx.settings.YretainTrees, true) - rootCtx.setSetting(rootCtx.settings.YcookComments, true) - rootCtx.setSetting(rootCtx.settings.YreadComments, true) + rootCtx.setSetting(rootCtx.settings.XcookComments, true) + rootCtx.setSetting(rootCtx.settings.XreadComments, true) val ctx = setup(settings.toArray, rootCtx) match case Some((_, ctx)) => ctx case None => rootCtx diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index addd54df9d69..e28ba5fd669e 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -62,7 +62,7 @@ object Parsers { case ExtensionFollow // extension clause, following extension parameter def isClass = // owner is a class - this == Class || this == CaseClass + this == Class || this == CaseClass || this == Given def takesOnlyUsingClauses = // only using clauses allowed for this owner this == Given || this == ExtensionFollow def acceptsVariance = @@ -646,6 +646,14 @@ object Parsers { ts.toList else leading :: Nil + def maybeNamed(op: () => Tree): () => Tree = () => + if isIdent && in.lookahead.token == EQUALS && in.featureEnabled(Feature.namedTuples) then + atSpan(in.offset): + val name = ident() + in.nextToken() + NamedArg(name, op()) + else op() + def inSepRegion[T](f: Region => Region)(op: => T): T = val cur = in.currentRegion in.currentRegion = f(cur) @@ -814,6 +822,8 @@ object Parsers { * 6. the opening brace does not follow a `=>`. The reason for this condition is that * rewriting back to braces does not work after `=>` (since in most cases braces are omitted * after a `=>` it would be annoying if braces were inserted). + * 7. not a code block being the input to a direct symbolic function call `inst method {\n expr \n}` cannot + * become `inst method :\n expr` for a fully symbolic method */ def bracesToIndented[T](body: => T, rewriteWithColon: Boolean): T = { val underColonSyntax = possibleColonOffset == in.lastOffset @@ -827,10 +837,28 @@ object Parsers { } var canRewrite = allBraces(in.currentRegion) && // test (1) !testChars(in.lastOffset - 3, " =>") // test(6) + + def isStartOfSymbolicFunction: Boolean = + opStack.headOption.exists { x => + val bq = x.operator.isBackquoted + val op = x.operator.name.toSimpleName.decode.forall { + Chars.isOperatorPart + } + val loc = startOpening < x.offset && x.offset < endOpening + val res = !bq && op && loc + res + } val t = enclosed(LBRACE, { canRewrite &= in.isAfterLineEnd // test (2) val curOffset = in.offset - try body + try { + val bodyResolved = body + bodyResolved match + case x:(Match | Block) => + canRewrite &= !isStartOfSymbolicFunction // test (7) + case _ => + bodyResolved + } finally { canRewrite &= in.isAfterLineEnd && in.offset != curOffset // test (3)(4) } @@ -948,12 +976,14 @@ object Parsers { * i.e. an identifier followed by type and value parameters, followed by `:`? * @pre The current token is an identifier */ - def followingIsGivenSig() = + def followingIsOldStyleGivenSig() = val lookahead = in.LookaheadScanner() if lookahead.isIdent then lookahead.nextToken() + var paramsSeen = false def skipParams(): Unit = if lookahead.token == LPAREN || lookahead.token == LBRACKET then + paramsSeen = true lookahead.skipParens() skipParams() else if lookahead.isNewLine then @@ -961,6 +991,16 @@ object Parsers { skipParams() skipParams() lookahead.isColon + && { + !in.featureEnabled(Feature.modularity) + || { // with modularity language import, a `:` at EOL after an identifier represents a single identifier given + // Example: + // given C: + // def f = ... + lookahead.nextToken() + !lookahead.isAfterLineEnd + } + } def followingIsExtension() = val next = in.lookahead.token @@ -1594,7 +1634,7 @@ object Parsers { imods &~= Given syntaxError(em"context function types require at least one parameter", paramSpan) FunctionWithMods(params, resultType, imods, erasedArgs.toList) - else if !ctx.settings.YkindProjector.isDefault then + else if !ctx.settings.XkindProjector.isDefault then val (newParams :+ newResultType, tparams) = replaceKindProjectorPlaceholders(params :+ resultType): @unchecked lambdaAbstract(tparams, Function(newParams, newResultType)) else @@ -1624,7 +1664,14 @@ object Parsers { && in.featureEnabled(Feature.into) && canStartTypeTokens.contains(in.lookahead.token) - var isValParamList = false + def convertToElem(t: Tree): Tree = t match + case ByNameTypeTree(t1) => + syntaxError(ByNameParameterNotSupported(t), t.span) + t1 + case ValDef(name, tpt, _) => + NamedArg(name, convertToElem(tpt)).withSpan(t.span) + case _ => t + if in.token == LPAREN then in.nextToken() if in.token == RPAREN then @@ -1640,7 +1687,6 @@ object Parsers { in.currentRegion.withCommasExpected: funArgType() match case Ident(name) if name != tpnme.WILDCARD && in.isColon => - isValParamList = true def funParam(start: Offset, mods: Modifiers) = atSpan(start): addErased() @@ -1678,12 +1724,13 @@ object Parsers { cpy.Function(arg)(args, sanitize(res)) case arg => arg - val args1 = args.mapConserve(sanitize) - if isValParamList || in.isArrow || isPureArrow then + + if in.isArrow || isPureArrow || erasedArgs.contains(true) then functionRest(args) else - val tuple = atSpan(start)(makeTupleOrParens(args1)) + val tuple = atSpan(start): + makeTupleOrParens(args.mapConserve(convertToElem)) typeRest: infixTypeRest: refinedTypeRest: @@ -1721,7 +1768,7 @@ object Parsers { val isVarianceAnnotated = name.startsWith("+") || name.startsWith("-") // We remove the variance marker from the name without passing along the specified variance at all // The real variance will be inferred at a later stage but may contradict the variance specified, - // This is ok, because `-Ykind-projector` is for cross-compiling existing Scala 2 code, not for writing new code, + // This is ok, because `-Xkind-projector` is for cross-compiling existing Scala 2 code, not for writing new code, // we may assume that variance annotations have already been checked by the Scala 2 compiler. val unannotatedName = if (isVarianceAnnotated) name.mapLast(_.drop(1)) else name TypeDef(unannotatedName, WildcardTypeBoundsTree()).withFlags(Param) @@ -1738,7 +1785,7 @@ object Parsers { Ident(name) } - val uscores = ctx.settings.YkindProjector.value == "underscores" + val uscores = ctx.settings.XkindProjector.value == "underscores" val newParams = params.mapConserve { case param @ Ident(tpnme.raw.STAR | tpnme.raw.MINUS_STAR | tpnme.raw.PLUS_STAR) => addParam() case param @ Ident(tpnme.USCOREkw | tpnme.raw.MINUS_USCORE | tpnme.raw.PLUS_USCORE) if uscores => addParam() @@ -1771,9 +1818,11 @@ object Parsers { */ def infixType(): Tree = infixTypeRest(refinedType()) - def infixTypeRest(t: Tree): Tree = - infixOps(t, canStartInfixTypeTokens, refinedTypeFn, Location.ElseWhere, ParseKind.Type, - isOperator = !followingIsVararg() && !isPureArrow + def infixTypeRest(t: Tree, operand: Location => Tree = refinedTypeFn): Tree = + infixOps(t, canStartInfixTypeTokens, operand, Location.ElseWhere, ParseKind.Type, + isOperator = !followingIsVararg() + && !isPureArrow + && !(isIdent(nme.as) && in.featureEnabled(Feature.modularity)) && nextCanFollowOperator(canStartInfixTypeTokens)) /** RefinedType ::= WithType {[nl] Refinement} [`^` CaptureSet] @@ -1837,6 +1886,10 @@ object Parsers { */ def annotType(): Tree = annotTypeRest(simpleType()) + /** AnnotType1 ::= SimpleType1 {Annotation} + */ + def annotType1(): Tree = annotTypeRest(simpleType1()) + def annotTypeRest(t: Tree): Tree = if (in.token == AT) annotTypeRest(atSpan(startOffset(t)) { @@ -1924,7 +1977,7 @@ object Parsers { if isSimpleLiteral then SingletonTypeTree(simpleLiteral()) else if in.token == USCORE then - if ctx.settings.YkindProjector.value == "underscores" then + if ctx.settings.XkindProjector.value == "underscores" then val start = in.skipToken() Ident(tpnme.USCOREkw).withSpan(Span(start, in.lastOffset, start)) else @@ -1940,7 +1993,7 @@ object Parsers { typeBounds().withSpan(Span(start, in.lastOffset, start)) // Allow symbols -_ and +_ through for compatibility with code written using kind-projector in Scala 3 underscore mode. // While these signify variant type parameters in Scala 2 + kind-projector, we ignore their variance markers since variance is inferred. - else if (isIdent(nme.MINUS) || isIdent(nme.PLUS)) && in.lookahead.token == USCORE && ctx.settings.YkindProjector.value == "underscores" then + else if (isIdent(nme.MINUS) || isIdent(nme.PLUS)) && in.lookahead.token == USCORE && ctx.settings.XkindProjector.value == "underscores" then val identName = in.name.toTypeName ++ nme.USCOREkw val start = in.skipToken() in.nextToken() @@ -1959,6 +2012,7 @@ object Parsers { * | Singleton `.' id * | Singleton `.' type * | ‘(’ ArgTypes ‘)’ + * | ‘(’ NamesAndTypes ‘)’ * | Refinement * | TypeSplice -- deprecated syntax (since 3.0.0) * | SimpleType1 TypeArgs @@ -1967,7 +2021,7 @@ object Parsers { def simpleType1() = simpleTypeRest { if in.token == LPAREN then atSpan(in.offset) { - makeTupleOrParens(inParensWithCommas(argTypes(namedOK = false, wildOK = true))) + makeTupleOrParens(inParensWithCommas(argTypes(namedOK = false, wildOK = true, tupleOK = true))) } else if in.token == LBRACE then atSpan(in.offset) { RefinedTypeTree(EmptyTree, refinement(indentOK = false)) } @@ -1992,7 +2046,7 @@ object Parsers { val applied = rejectWildcardType(t) val args = typeArgs(namedOK = false, wildOK = true) - if (!ctx.settings.YkindProjector.isDefault) { + if (!ctx.settings.XkindProjector.isDefault) { def fail(): Tree = { syntaxError( em"λ requires a single argument of the form X => ... or (X, Y) => ...", @@ -2024,7 +2078,7 @@ object Parsers { } }) case _ => - if (!ctx.settings.YkindProjector.isDefault) { + if (!ctx.settings.XkindProjector.isDefault) { t match { case Tuple(params) => val (newParams, tparams) = replaceKindProjectorPlaceholders(params) @@ -2050,32 +2104,33 @@ object Parsers { /** ArgTypes ::= Type {`,' Type} * | NamedTypeArg {`,' NamedTypeArg} * NamedTypeArg ::= id `=' Type + * NamesAndTypes ::= NameAndType {‘,’ NameAndType} + * NameAndType ::= id ':' Type */ - def argTypes(namedOK: Boolean, wildOK: Boolean): List[Tree] = { - - def argType() = { + def argTypes(namedOK: Boolean, wildOK: Boolean, tupleOK: Boolean): List[Tree] = + def argType() = val t = typ() - if (wildOK) t else rejectWildcardType(t) - } + if wildOK then t else rejectWildcardType(t) - def namedTypeArg() = { - val name = ident() - accept(EQUALS) - NamedArg(name.toTypeName, argType()) - } + def namedArgType() = + atSpan(in.offset): + val name = ident() + accept(EQUALS) + NamedArg(name.toTypeName, argType()) - if (namedOK && in.token == IDENTIFIER) - in.currentRegion.withCommasExpected { - argType() match { - case Ident(name) if in.token == EQUALS => - in.nextToken() - commaSeparatedRest(NamedArg(name, argType()), () => namedTypeArg()) - case firstArg => - commaSeparatedRest(firstArg, () => argType()) - } - } - else commaSeparated(() => argType()) - } + def namedElem() = + atSpan(in.offset): + val name = ident() + acceptColon() + NamedArg(name, argType()) + + if namedOK && isIdent && in.lookahead.token == EQUALS then + commaSeparated(() => namedArgType()) + else if tupleOK && isIdent && in.lookahead.isColon && in.featureEnabled(Feature.namedTuples) then + commaSeparated(() => namedElem()) + else + commaSeparated(() => argType()) + end argTypes def paramTypeOf(core: () => Tree): Tree = if in.token == ARROW || isPureArrow(nme.PUREARROW) then @@ -2122,7 +2177,7 @@ object Parsers { * NamedTypeArgs ::= `[' NamedTypeArg {`,' NamedTypeArg} `]' */ def typeArgs(namedOK: Boolean, wildOK: Boolean): List[Tree] = - inBracketsWithCommas(argTypes(namedOK, wildOK)) + inBracketsWithCommas(argTypes(namedOK, wildOK, tupleOK = false)) /** Refinement ::= `{' RefineStatSeq `}' */ @@ -2141,20 +2196,33 @@ object Parsers { if (in.token == tok) { in.nextToken(); toplevelTyp() } else EmptyTree - /** TypeParamBounds ::= TypeBounds {`<%' Type} {`:' Type} + /** TypeAndCtxBounds ::= TypeBounds [`:` ContextBounds] */ - def typeParamBounds(pname: TypeName): Tree = { + def typeAndCtxBounds(pname: TypeName): Tree = { val t = typeBounds() val cbs = contextBounds(pname) if (cbs.isEmpty) t else atSpan((t.span union cbs.head.span).start) { ContextBounds(t, cbs) } } + /** ContextBound ::= Type [`as` id] */ + def contextBound(pname: TypeName): Tree = + val t = toplevelTyp() + val ownName = + if isIdent(nme.as) && in.featureEnabled(Feature.modularity) then + in.nextToken() + ident() + else EmptyTermName + ContextBoundTypeTree(t, pname, ownName) + + /** ContextBounds ::= ContextBound | `{` ContextBound {`,` ContextBound} `}` + */ def contextBounds(pname: TypeName): List[Tree] = if in.isColon then - atSpan(in.skipToken()) { - AppliedTypeTree(toplevelTyp(), Ident(pname)) - } :: contextBounds(pname) + in.nextToken() + if in.token == LBRACE && in.featureEnabled(Feature.modularity) + then inBraces(commaSeparated(() => contextBound(pname))) + else contextBound(pname) :: contextBounds(pname) else if in.token == VIEWBOUND then report.errorOrMigrationWarning( em"view bounds `<%' are no longer supported, use a context bound `:' instead", @@ -2699,7 +2767,9 @@ object Parsers { } /** ExprsInParens ::= ExprInParens {`,' ExprInParens} + * | NamedExprInParens {‘,’ NamedExprInParens} * Bindings ::= Binding {`,' Binding} + * NamedExprInParens ::= id '=' ExprInParens */ def exprsInParensOrBindings(): List[Tree] = if in.token == RPAREN then Nil @@ -2709,7 +2779,7 @@ object Parsers { if isErasedKw then isFormalParams = true if isFormalParams then binding(Modifiers()) else - val t = exprInParens() + val t = maybeNamed(exprInParens)() if t.isInstanceOf[ValDef] then isFormalParams = true t commaSeparatedRest(exprOrBinding(), exprOrBinding) @@ -3063,7 +3133,7 @@ object Parsers { * | Literal * | Quoted * | XmlPattern - * | `(' [Patterns] `)' + * | `(' [Patterns | NamedPatterns] `)' * | SimplePattern1 [TypeArgs] [ArgumentPatterns] * | ‘given’ RefinedType * SimplePattern1 ::= SimpleRef @@ -3114,9 +3184,12 @@ object Parsers { p /** Patterns ::= Pattern [`,' Pattern] + * | NamedPattern {‘,’ NamedPattern} + * NamedPattern ::= id '=' Pattern */ def patterns(location: Location = Location.InPattern): List[Tree] = - commaSeparated(() => pattern(location)) + commaSeparated(maybeNamed(() => pattern(location))) + // check that patterns are all named or all unnamed is done at desugaring def patternsOpt(location: Location = Location.InPattern): List[Tree] = if (in.token == RPAREN) Nil else patterns(location) @@ -3147,6 +3220,7 @@ object Parsers { case nme.open => Mod.Open() case nme.transparent => Mod.Transparent() case nme.infix => Mod.Infix() + case nme.tracked => Mod.Tracked() } } @@ -3213,7 +3287,8 @@ object Parsers { * | AccessModifier * | override * | opaque - * LocalModifier ::= abstract | final | sealed | open | implicit | lazy | inline | transparent | infix | erased + * LocalModifier ::= abstract | final | sealed | open | implicit | lazy | erased | + * inline | transparent | infix */ def modifiers(allowed: BitSet = modifierTokens, start: Modifiers = Modifiers()): Modifiers = { @tailrec @@ -3330,7 +3405,7 @@ object Parsers { val isAbstractOwner = paramOwner == ParamOwner.Type || paramOwner == ParamOwner.TypeParam val start = in.offset var mods = annotsAsMods() | Param - if paramOwner == ParamOwner.Class || paramOwner == ParamOwner.CaseClass then + if paramOwner.isClass then mods |= PrivateLocal if isIdent(nme.raw.PLUS) && checkVarianceOK() then mods |= Covariant @@ -3344,7 +3419,7 @@ object Parsers { } else ident().toTypeName val hkparams = typeParamClauseOpt(ParamOwner.Type) - val bounds = if (isAbstractOwner) typeBounds() else typeParamBounds(name) + val bounds = if (isAbstractOwner) typeBounds() else typeAndCtxBounds(name) TypeDef(name, lambdaAbstract(hkparams, bounds)).withMods(mods) } } @@ -3366,8 +3441,8 @@ object Parsers { /** ClsTermParamClause ::= ‘(’ ClsParams ‘)’ | UsingClsTermParamClause * UsingClsTermParamClause::= ‘(’ ‘using’ [‘erased’] (ClsParams | ContextTypes) ‘)’ * ClsParams ::= ClsParam {‘,’ ClsParam} - * ClsParam ::= {Annotation} [{Modifier} (‘val’ | ‘var’)] Param - * + * ClsParam ::= {Annotation} + * [{Modifier | ‘tracked’} (‘val’ | ‘var’)] Param * TypelessClause ::= DefTermParamClause * | UsingParamClause * @@ -3403,6 +3478,8 @@ object Parsers { if isErasedKw then mods = addModifier(mods) if paramOwner.isClass then + if isIdent(nme.tracked) && in.featureEnabled(Feature.modularity) && !in.lookahead.isColon then + mods = addModifier(mods) mods = addFlag(modifiers(start = mods), ParamAccessor) mods = if in.token == VAL then @@ -3465,22 +3542,27 @@ object Parsers { paramMods() if paramOwner.takesOnlyUsingClauses && !impliedMods.is(Given) then syntaxError(em"`using` expected") - val (firstParamMod, isParams) = + val (firstParamMod, paramsAreNamed) = var mods = EmptyModifiers if in.lookahead.isColon then (mods, true) else if isErased then mods = addModifier(mods) - val isParams = + val paramsAreNamed = !impliedMods.is(Given) || startParamTokens.contains(in.token) - || isIdent && (in.name == nme.inline || in.lookahead.isColon) - (mods, isParams) - (if isParams then commaSeparated(() => param()) - else contextTypes(paramOwner, numLeadParams, impliedMods)) match { + || isIdent + && (in.name == nme.inline // inline starts a name binding + || in.name == nme.tracked // tracked starts a name binding under x.modularity + && in.featureEnabled(Feature.modularity) + || in.lookahead.isColon) // a following `:` starts a name binding + (mods, paramsAreNamed) + val params = + if paramsAreNamed then commaSeparated(() => param()) + else contextTypes(paramOwner, numLeadParams, impliedMods) + params match case Nil => Nil case (h :: t) => h.withAddedFlags(firstParamMod.flags) :: t - } checkVarArgsRules(clause) clause } @@ -3852,14 +3934,16 @@ object Parsers { argumentExprss(mkApply(Ident(nme.CONSTRUCTOR), argumentExprs())) } - /** TypeDef ::= id [TypeParamClause] {FunParamClause} TypeBounds [‘=’ Type] + /** TypeDef ::= id [TypeParamClause] {FunParamClause} TypeAndCtxBounds [‘=’ Type] */ def typeDefOrDcl(start: Offset, mods: Modifiers): Tree = { newLinesOpt() atSpan(start, nameStart) { val nameIdent = typeIdent() + val tname = nameIdent.name.asTypeName val tparams = typeParamClauseOpt(ParamOwner.Type) val vparamss = funParamClauses() + def makeTypeDef(rhs: Tree): Tree = { val rhs1 = lambdaAbstractAll(tparams :: vparamss, rhs) val tdef = TypeDef(nameIdent.name.toTypeName, rhs1) @@ -3867,36 +3951,37 @@ object Parsers { tdef.pushAttachment(Backquoted, ()) finalizeDef(tdef, mods, start) } + in.token match { case EQUALS => in.nextToken() makeTypeDef(toplevelTyp()) case SUBTYPE | SUPERTYPE => - val bounds = typeBounds() - if (in.token == EQUALS) { - val eqOffset = in.skipToken() - var rhs = toplevelTyp() - rhs match { - case mtt: MatchTypeTree => - bounds match { - case TypeBoundsTree(EmptyTree, upper, _) => - rhs = MatchTypeTree(upper, mtt.selector, mtt.cases) - case _ => - syntaxError(em"cannot combine lower bound and match type alias", eqOffset) - } - case _ => - if mods.is(Opaque) then - rhs = TypeBoundsTree(bounds.lo, bounds.hi, rhs) - else - syntaxError(em"cannot combine bound and alias", eqOffset) - } - makeTypeDef(rhs) - } - else makeTypeDef(bounds) + typeAndCtxBounds(tname) match + case bounds: TypeBoundsTree if in.token == EQUALS => + val eqOffset = in.skipToken() + var rhs = toplevelTyp() + rhs match { + case mtt: MatchTypeTree => + bounds match { + case TypeBoundsTree(EmptyTree, upper, _) => + rhs = MatchTypeTree(upper, mtt.selector, mtt.cases) + case _ => + syntaxError(em"cannot combine lower bound and match type alias", eqOffset) + } + case _ => + if mods.is(Opaque) then + rhs = TypeBoundsTree(bounds.lo, bounds.hi, rhs) + else + syntaxError(em"cannot combine bound and alias", eqOffset) + } + makeTypeDef(rhs) + case bounds => makeTypeDef(bounds) case SEMI | NEWLINE | NEWLINES | COMMA | RBRACE | OUTDENT | EOF => - makeTypeDef(typeBounds()) - case _ if (staged & StageKind.QuotedPattern) != 0 => - makeTypeDef(typeBounds()) + makeTypeDef(typeAndCtxBounds(tname)) + case _ if (staged & StageKind.QuotedPattern) != 0 + || in.featureEnabled(Feature.modularity) && in.isColon => + makeTypeDef(typeAndCtxBounds(tname)) case _ => syntaxErrorOrIncomplete(ExpectedTypeBoundOrEquals(in.token)) return EmptyTree // return to avoid setting the span to EmptyTree @@ -4050,13 +4135,41 @@ object Parsers { syntaxError(em"extension clause can only define methods", stat.span) } - /** GivenDef ::= [GivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) - * GivenSig ::= [id] [DefTypeParamClause] {UsingParamClauses} ‘:’ + /** GivenDef ::= OldGivenDef | NewGivenDef + * OldGivenDef ::= [OldGivenSig] (GivenType [‘=’ Expr] | StructuralInstance) + * OldGivenSig ::= [id] [DefTypeParamClause] {UsingParamClauses} ‘:’ + * StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] + * + * NewGivenDef ::= [GivenConditional '=>'] NewGivenSig + * GivenConditional ::= [DefTypeParamClause | UsingParamClause] {UsingParamClause} + * NewGivenSig ::= GivenType ['as' id] ([‘=’ Expr] | TemplateBody) + * | ConstrApps ['as' id] TemplateBody + * + * GivenType ::= AnnotType1 {id [nl] AnnotType1} */ def givenDef(start: Offset, mods: Modifiers, givenMod: Mod) = atSpan(start, nameStart) { var mods1 = addMod(mods, givenMod) val nameStart = in.offset - val name = if isIdent && followingIsGivenSig() then ident() else EmptyTermName + var name = if isIdent && followingIsOldStyleGivenSig() then ident() else EmptyTermName + var newSyntaxAllowed = in.featureEnabled(Feature.modularity) + + def moreConstrApps() = + if newSyntaxAllowed && in.token == COMMA then + in.nextToken() + constrApps() + else // need to be careful with last `with` + withConstrApps() + + // Adjust parameter modifiers so that they are now parameters of a method + // (originally, we created class parameters) + // TODO: syntax.md should be adjusted to reflect the difference that + // parameters of an alias given cannot be vals. + def adjustDefParams(paramss: List[ParamClause]): List[ParamClause] = + paramss.nestedMap: param => + if !param.mods.isAllOf(PrivateLocal) then + syntaxError(em"method parameter ${param.name} may not be a `val`", param.span) + param.withMods(param.mods &~ (AccessFlags | ParamAccessor | Mutable) | Param) + .asInstanceOf[List[ParamClause]] val gdef = val tparams = typeParamClauseOpt(ParamOwner.Given) @@ -4067,31 +4180,55 @@ object Parsers { else Nil newLinesOpt() val noParams = tparams.isEmpty && vparamss.isEmpty - if !(name.isEmpty && noParams) then acceptColon() + val hasParamsOrId = !name.isEmpty || !noParams + if hasParamsOrId then + if in.isColon then + newSyntaxAllowed = false + in.nextToken() + else if newSyntaxAllowed then accept(ARROW) + else acceptColon() val parents = - if isSimpleLiteral then rejectWildcardType(annotType()) :: Nil - else refinedTypeRest(constrApp()) :: withConstrApps() + if isSimpleLiteral then + rejectWildcardType(annotType()) :: Nil + else constrApp() match + case parent: Apply => parent :: moreConstrApps() + case parent if in.isIdent && newSyntaxAllowed => + infixTypeRest(parent, _ => annotType1()) :: Nil + case parent => parent :: moreConstrApps() + if newSyntaxAllowed && in.isIdent(nme.as) then + in.nextToken() + name = ident() + val parentsIsType = parents.length == 1 && parents.head.isType if in.token == EQUALS && parentsIsType then + // given alias accept(EQUALS) mods1 |= Final if noParams && !mods.is(Inline) then mods1 |= Lazy ValDef(name, parents.head, subExpr()) else - DefDef(name, joinParams(tparams, vparamss), parents.head, subExpr()) - else if (isStatSep || isStatSeqEnd) && parentsIsType then + DefDef(name, adjustDefParams(joinParams(tparams, vparamss)), parents.head, subExpr()) + else if (isStatSep || isStatSeqEnd) && parentsIsType && !newSyntaxAllowed then + // old-style abstract given if name.isEmpty then syntaxError(em"anonymous given cannot be abstract") - DefDef(name, joinParams(tparams, vparamss), parents.head, EmptyTree) + DefDef(name, adjustDefParams(joinParams(tparams, vparamss)), parents.head, EmptyTree) else - val tparams1 = tparams.map(tparam => tparam.withMods(tparam.mods | PrivateLocal)) - val vparamss1 = vparamss.map(_.map(vparam => - vparam.withMods(vparam.mods &~ Param | ParamAccessor | Protected))) - val constr = makeConstructor(tparams1, vparamss1) + // structural instance + val vparamss1 = vparamss.nestedMap: vparam => + if vparam.mods.is(Private) + then vparam.withMods(vparam.mods &~ PrivateLocal | Protected) + else vparam + val constr = makeConstructor(tparams, vparamss1) val templ = - if isStatSep || isStatSeqEnd then Template(constr, parents, Nil, EmptyValDef, Nil) - else withTemplate(constr, parents) + if isStatSep || isStatSeqEnd then + Template(constr, parents, Nil, EmptyValDef, Nil) + else if !newSyntaxAllowed || in.token == WITH then + withTemplate(constr, parents) + else + possibleTemplateStart() + templateBodyOpt(constr, parents, Nil) if noParams && !mods.is(Inline) then ModuleDef(name, templ) else TypeDef(name.toTypeName, templ) end gdef @@ -4163,10 +4300,10 @@ object Parsers { /* -------- TEMPLATES ------------------------------------------- */ - /** ConstrApp ::= SimpleType1 {Annotation} {ParArgumentExprs} + /** ConstrApp ::= AnnotType1 {ParArgumentExprs} */ val constrApp: () => Tree = () => - val t = rejectWildcardType(annotTypeRest(simpleType1()), + val t = rejectWildcardType(annotType1(), fallbackTree = Ident(tpnme.ERROR)) // Using Ident(tpnme.ERROR) to avoid causing cascade errors on non-user-written code if in.token == LPAREN then parArgumentExprss(wrapNew(t)) else t @@ -4381,6 +4518,7 @@ object Parsers { /** RefineStatSeq ::= RefineStat {semi RefineStat} * RefineStat ::= ‘val’ VarDef + * | ‘var’ VarDef * | ‘def’ DefDef * | ‘type’ {nl} TypeDef * (in reality we admit class defs and vars and filter them out afterwards in `checkLegal`) @@ -4393,10 +4531,7 @@ object Parsers { syntaxError(msg, tree.span) Nil tree match - case tree: ValDef if tree.mods.is(Mutable) => - fail(em"""refinement cannot be a mutable var. - |You can use an explicit getter ${tree.name} and setter ${tree.name}_= instead""") - case tree: MemberDef if !(tree.mods.flags & ModifierFlags).isEmpty => + case tree: MemberDef if !(tree.mods.flags & (ModifierFlags &~ Mutable)).isEmpty => fail(em"refinement cannot be ${(tree.mods.flags & ModifierFlags).flagStrings().mkString("`", "`, `", "`")}") case tree: DefDef if tree.termParamss.nestedExists(!_.rhs.isEmpty) => fail(em"refinement cannot have default arguments") diff --git a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala index ea43706e9fdb..831d31d6fa6e 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala @@ -174,7 +174,7 @@ object Scanners { } class Scanner(source: SourceFile, override val startFrom: Offset = 0, profile: Profile = NoProfile, allowIndent: Boolean = true)(using Context) extends ScannerCommon(source) { - val keepComments = !ctx.settings.YdropComments.value + val keepComments = !ctx.settings.XdropComments.value /** A switch whether operators at the start of lines can be infix operators */ private[Scanners] var allowLeadingInfixOperators = true @@ -884,7 +884,7 @@ object Scanners { nextChar() ch match { case 'x' | 'X' => base = 16 ; nextChar() - //case 'b' | 'B' => base = 2 ; nextChar() + case 'b' | 'B' => base = 2 ; nextChar() case _ => base = 10 ; putChar('0') } if (base != 10 && !isNumberSeparator(ch) && digit2int(ch, base) < 0) diff --git a/compiler/src/dotty/tools/dotc/plugins/Plugin.scala b/compiler/src/dotty/tools/dotc/plugins/Plugin.scala index ce77a5b9d97a..fdb41fc56689 100644 --- a/compiler/src/dotty/tools/dotc/plugins/Plugin.scala +++ b/compiler/src/dotty/tools/dotc/plugins/Plugin.scala @@ -13,6 +13,7 @@ import java.io.InputStream import java.util.Properties import scala.util.{ Try, Success, Failure } +import scala.annotation.nowarn trait PluginPhase extends MiniPhase { def runsBefore: Set[String] = Set.empty @@ -50,7 +51,20 @@ trait StandardPlugin extends Plugin { * @param options commandline options to the plugin. * @return a list of phases to be added to the phase plan */ - def init(options: List[String]): List[PluginPhase] + @deprecatedOverriding("Method 'init' does not allow to access 'Context', use 'initialize' instead.", since = "Scala 3.5.0") + @deprecated("Use 'initialize' instead.", since = "Scala 3.5.0") + def init(options: List[String]): List[PluginPhase] = Nil + + /** Non-research plugins should override this method to return the phases + * + * The phases returned must be freshly constructed (not reused + * and returned again on subsequent calls). + * + * @param options commandline options to the plugin. + * @return a list of phases to be added to the phase plan + */ + @nowarn("cat=deprecation") + def initialize(options: List[String])(using Context): List[PluginPhase] = init(options) } /** A research plugin may customize the compilation pipeline freely diff --git a/compiler/src/dotty/tools/dotc/plugins/Plugins.scala b/compiler/src/dotty/tools/dotc/plugins/Plugins.scala index 31176bb2fb2c..a6672d475129 100644 --- a/compiler/src/dotty/tools/dotc/plugins/Plugins.scala +++ b/compiler/src/dotty/tools/dotc/plugins/Plugins.scala @@ -125,7 +125,7 @@ trait Plugins { } // schedule plugins according to ordering constraints - val pluginPhases = plugins.collect { case p: StandardPlugin => p }.flatMap { plug => plug.init(options(plug)) } + val pluginPhases = plugins.collect { case p: StandardPlugin => p }.flatMap { plug => plug.initialize(options(plug)) } val updatedPlan = Plugins.schedule(plan, pluginPhases) // add research plugins diff --git a/compiler/src/dotty/tools/dotc/printing/Formatting.scala b/compiler/src/dotty/tools/dotc/printing/Formatting.scala index ac13f0161c70..43cac17e6318 100644 --- a/compiler/src/dotty/tools/dotc/printing/Formatting.scala +++ b/compiler/src/dotty/tools/dotc/printing/Formatting.scala @@ -2,8 +2,6 @@ package dotty.tools package dotc package printing -import scala.language.unsafeNulls - import scala.collection.mutable import core.* @@ -42,7 +40,9 @@ object Formatting { trait CtxShow: def run(using Context): Shown - private inline def CtxShow(inline x: Context ?=> Shown) = new CtxShow { def run(using Context) = x(using ctx) } + private inline def CtxShow(inline x: Context ?=> Shown) = + class InlinedCtxShow extends CtxShow { def run(using Context) = x(using ctx) } + new InlinedCtxShow private def toStr[A: Show](x: A)(using Context): String = Shown.toStr(toShown(x)) private def toShown[A: Show](x: A)(using Context): Shown = Show[A].show(x).runCtxShow @@ -50,7 +50,11 @@ object Formatting { object ShowAny extends Show[Any]: def show(x: Any): Shown = x - class ShowImplicits3: + class ShowImplicits4: + given [X: Show]: Show[X | Null] with + def show(x: X | Null) = if x == null then "null" else CtxShow(toStr(x.nn)) + + class ShowImplicits3 extends ShowImplicits4: given Show[Product] = ShowAny class ShowImplicits2 extends ShowImplicits3: @@ -75,15 +79,10 @@ object Formatting { given [K: Show, V: Show]: Show[Map[K, V]] with def show(x: Map[K, V]) = CtxShow(x.map((k, v) => s"${toStr(k)} => ${toStr(v)}")) - end given given [H: Show, T <: Tuple: Show]: Show[H *: T] with def show(x: H *: T) = CtxShow(toStr(x.head) *: toShown(x.tail).asInstanceOf[Tuple]) - end given - - given [X: Show]: Show[X | Null] with - def show(x: X | Null) = if x == null then "null" else CtxShow(toStr(x.nn)) given Show[FlagSet] with def show(x: FlagSet) = x.flagsString @@ -146,8 +145,8 @@ object Formatting { private def treatArg(arg: Shown, suffix: String)(using Context): (String, String) = arg.runCtxShow match { case arg: Seq[?] if suffix.indexOf('%') == 0 && suffix.indexOf('%', 1) != -1 => val end = suffix.indexOf('%', 1) - val sep = StringContext.processEscapes(suffix.substring(1, end)) - (arg.mkString(sep), suffix.substring(end + 1)) + val sep = StringContext.processEscapes(suffix.substring(1, end).nn) + (arg.mkString(sep), suffix.substring(end + 1).nn) case arg: Seq[?] => (arg.map(showArg).mkString("[", ", ", "]"), suffix) case arg => diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala index ac7b4ef39604..c06b43cafe17 100644 --- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -69,7 +69,8 @@ class PlainPrinter(_ctx: Context) extends Printer { homogenize(tp.ref) case tp @ AppliedType(tycon, args) => if (defn.isCompiletimeAppliedType(tycon.typeSymbol)) tp.tryCompiletimeConstantFold - else tycon.dealias.appliedTo(args) + else if !tycon.typeSymbol.isOpaqueAlias then tycon.dealias.appliedTo(args) + else tp case tp: NamedType => tp.reduceProjection case _ => @@ -112,7 +113,7 @@ class PlainPrinter(_ctx: Context) extends Printer { protected def refinementNameString(tp: RefinedType): String = nameString(tp.refinedName) /** String representation of a refinement */ - protected def toTextRefinement(rt: RefinedType): Text = + def toTextRefinement(rt: RefinedType): Text = val keyword = rt.refinedInfo match { case _: ExprType | _: MethodOrPoly => "def " case _: TypeBounds => "type " @@ -121,16 +122,17 @@ class PlainPrinter(_ctx: Context) extends Printer { } (keyword ~ refinementNameString(rt) ~ toTextRHS(rt.refinedInfo)).close - protected def argText(arg: Type, isErased: Boolean = false): Text = keywordText("erased ").provided(isErased) ~ (homogenizeArg(arg) match { - case arg: TypeBounds => "?" ~ toText(arg) - case arg => toText(arg) - }) + protected def argText(arg: Type, isErased: Boolean = false): Text = + keywordText("erased ").provided(isErased) + ~ homogenizeArg(arg).match + case arg: TypeBounds => "?" ~ toText(arg) + case arg => toText(arg) /** Pretty-print comma-separated type arguments for a constructor to be inserted among parentheses or brackets * (hence with `GlobalPrec` precedence). */ protected def argsText(args: List[Type]): Text = - atPrec(GlobalPrec) { Text(args.map(arg => argText(arg) ), ", ") } + atPrec(GlobalPrec) { Text(args.map(argText(_)), ", ") } /** The longest sequence of refinement types, starting at given type * and following parents. @@ -294,6 +296,8 @@ class PlainPrinter(_ctx: Context) extends Printer { && !printDebug then atPrec(GlobalPrec)( Str("into ") ~ toText(tpe) ) else toTextLocal(tpe) ~ " " ~ toText(annot) + case FlexibleType(_, tpe) => + "(" ~ toText(tpe) ~ ")?" case tp: TypeVar => def toTextCaret(tp: Type) = if printDebug then toTextLocal(tp) ~ Str("^") else toText(tp) if (tp.isInstantiated) @@ -430,11 +434,11 @@ class PlainPrinter(_ctx: Context) extends Printer { sym.isEffectiveRoot || sym.isAnonymousClass || sym.name.isReplWrapperName /** String representation of a definition's type following its name, - * if symbol is completed, "?" otherwise. + * if symbol is completed, ": ?" otherwise. */ protected def toTextRHS(optType: Option[Type]): Text = optType match { case Some(tp) => toTextRHS(tp) - case None => "?" + case None => ": ?" } protected def decomposeLambdas(bounds: TypeBounds): (Text, TypeBounds) = diff --git a/compiler/src/dotty/tools/dotc/printing/Printer.scala b/compiler/src/dotty/tools/dotc/printing/Printer.scala index 8687925ed5fb..297dc31ea94a 100644 --- a/compiler/src/dotty/tools/dotc/printing/Printer.scala +++ b/compiler/src/dotty/tools/dotc/printing/Printer.scala @@ -4,7 +4,7 @@ package printing import core.* import Texts.*, ast.Trees.* -import Types.{Type, SingletonType, LambdaParam, NamedType}, +import Types.{Type, SingletonType, LambdaParam, NamedType, RefinedType}, Symbols.Symbol, Scopes.Scope, Constants.Constant, Names.Name, Denotations._, Annotations.Annotation, Contexts.Context import typer.Implicits.* @@ -104,6 +104,9 @@ abstract class Printer { /** Textual representation of a prefix of some reference, ending in `.` or `#` */ def toTextPrefixOf(tp: NamedType): Text + /** textual representation of a refinement, with no enclosing {...} */ + def toTextRefinement(rt: RefinedType): Text + /** Textual representation of a reference in a capture set */ def toTextCaptureRef(tp: Type): Text diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 93e280f8a13c..1ff4c8cae339 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -205,6 +205,11 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { def toTextTuple(args: List[Type]): Text = "(" ~ argsText(args) ~ ")" + def toTextNamedTuple(elems: List[(TermName, Type)]): Text = + val elemsText = atPrec(GlobalPrec): + Text(elems.map((name, tp) => toText(name) ~ " : " ~ argText(tp)), ", ") + "(" ~ elemsText ~ ")" + def isInfixType(tp: Type): Boolean = tp match case AppliedType(tycon, args) => args.length == 2 @@ -239,8 +244,16 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { def appliedText(tp: Type): Text = tp match case tp @ AppliedType(tycon, args) => - tp.tupleElementTypesUpTo(200, normalize = false) match - case Some(types) if types.size >= 2 && !printDebug => toTextTuple(types) + val namedElems = + try tp.namedTupleElementTypesUpTo(200, normalize = false) + catch case ex: TypeError => Nil + if namedElems.nonEmpty then + toTextNamedTuple(namedElems) + else tp.tupleElementTypesUpTo(200, normalize = false) match + //case Some(types @ (defn.NamedTupleElem(_, _) :: _)) if !printDebug => + // toTextTuple(types) + case Some(types) if types.size >= 2 && !printDebug => + toTextTuple(types) case _ => val tsym = tycon.typeSymbol if tycon.isRepeatedParam then toTextLocal(args.head) ~ "*" @@ -373,7 +386,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { changePrec(GlobalPrec) { keywordStr("for ") ~ Text(enums map enumText, "; ") ~ sep ~ toText(expr) } def cxBoundToText(bound: untpd.Tree): Text = bound match { // DD - case AppliedTypeTree(tpt, _) => " : " ~ toText(tpt) + case ContextBoundTypeTree(tpt, _, _) => " : " ~ toText(tpt) case untpd.Function(_, tpt) => " <% " ~ toText(tpt) } @@ -478,9 +491,9 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { if isWildcardStarArg(tree) then expr match case Ident(nme.WILDCARD_STAR) => - // `_*` is used as a wildcard name to indicate a vararg splice pattern; - // avoid the double `*` in this case. - toText(expr) + // `_*` is used as a wildcard name to indicate a vararg splice pattern; + // avoid the double `*` in this case. + toText(expr) case _ => toText(expr) ~ "*" else @@ -490,7 +503,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { exprText ~ colon ~ toText(tpt) } case NamedArg(name, arg) => - toText(name) ~ " = " ~ toText(arg) + toText(name) ~ (if name.isTermName && arg.isType then " : " else " = ") ~ toText(arg) case Assign(lhs, rhs) => changePrec(GlobalPrec) { toTextLocal(lhs) ~ " = " ~ toText(rhs) } case block: Block => @@ -559,7 +572,12 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { changePrec(AndTypePrec) { toText(args(0)) ~ " & " ~ atPrec(AndTypePrec + 1) { toText(args(1)) } } else if defn.isFunctionSymbol(tpt.symbol) && tpt.isInstanceOf[TypeTree] && tree.hasType && !printDebug - then changePrec(GlobalPrec) { toText(tree.typeOpt) } + then + changePrec(GlobalPrec) { toText(tree.typeOpt) } + else if tpt.symbol == defn.NamedTupleTypeRef.symbol + && !printDebug && tree.typeOpt.exists + then + toText(tree.typeOpt) else args match case arg :: _ if arg.isTerm => toTextLocal(tpt) ~ "(" ~ Text(args.map(argText), ", ") ~ ")" @@ -640,7 +658,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { def toTextAnnot = toTextLocal(arg) ~~ annotText(annot.symbol.enclosingClass, annot) def toTextRetainsAnnot = - try changePrec(GlobalPrec)(toText(arg) ~ "^" ~ toTextCaptureSet(captureSet)) + try changePrec(GlobalPrec)(toTextLocal(arg) ~ "^" ~ toTextCaptureSet(captureSet)) catch case ex: IllegalCaptureRef => toTextAnnot if annot.symbol.maybeOwner.isRetains && Feature.ccEnabled && !printDebug @@ -729,9 +747,18 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case GenAlias(pat, expr) => toText(pat) ~ " = " ~ toText(expr) case ContextBounds(bounds, cxBounds) => - cxBounds.foldLeft(toText(bounds)) {(t, cxb) => - t ~ cxBoundToText(cxb) - } + if Feature.enabled(Feature.modularity) then + def boundsText(bounds: Tree) = bounds match + case ContextBoundTypeTree(tpt, _, ownName) => + toText(tpt) ~ (" as " ~ toText(ownName) `provided` !ownName.isEmpty) + case bounds => toText(bounds) + cxBounds match + case bound :: Nil => ": " ~ boundsText(bound) + case _ => ": {" ~ Text(cxBounds.map(boundsText), ", ") ~ "}" + else + cxBounds.foldLeft(toText(bounds)) {(t, cxb) => + t ~ cxBoundToText(cxb) + } case PatDef(mods, pats, tpt, rhs) => modText(mods, NoSymbol, keywordStr("val"), isType = false) ~~ toText(pats, ", ") ~ optAscription(tpt) ~ optText(rhs)(" = " ~ _) @@ -776,6 +803,8 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { prefix ~~ idx.toString ~~ "|" ~~ tpeText ~~ "|" ~~ argsText ~~ "|" ~~ contentText ~~ postfix case CapturesAndResult(refs, parent) => changePrec(GlobalPrec)("^{" ~ Text(refs.map(toText), ", ") ~ "}" ~ toText(parent)) + case ContextBoundTypeTree(tycon, pname, ownName) => + toText(pname) ~ " : " ~ toText(tycon) ~ (" as " ~ toText(ownName) `provided` !ownName.isEmpty) case _ => tree.fallbackToText(this) } diff --git a/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala b/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala index 17e23ebcf014..e34d35065476 100644 --- a/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala +++ b/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala @@ -166,8 +166,8 @@ class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context): val inst = try loadModule(moduleClass) catch - case MissingClassDefinedInCurrentRun(sym) => - suspendOnMissing(sym, pos) + case MissingClassValidInCurrentRun(sym, origin) => + suspendOnMissing(sym, origin, pos) val clazz = inst.getClass val name = fn.name.asTermName val method = getMethod(clazz, name, paramsSig(fn)) @@ -213,8 +213,8 @@ class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context): private def loadClass(name: String): Class[?] = try classLoader.loadClass(name) catch - case MissingClassDefinedInCurrentRun(sym) => - suspendOnMissing(sym, pos) + case MissingClassValidInCurrentRun(sym, origin) => + suspendOnMissing(sym, origin, pos) private def getMethod(clazz: Class[?], name: Name, paramClasses: List[Class[?]]): JLRMethod = @@ -223,8 +223,8 @@ class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context): case _: NoSuchMethodException => val msg = em"Could not find method ${clazz.getCanonicalName}.$name with parameters ($paramClasses%, %)" throw new StopInterpretation(msg, pos) - case MissingClassDefinedInCurrentRun(sym) => - suspendOnMissing(sym, pos) + case MissingClassValidInCurrentRun(sym, origin) => + suspendOnMissing(sym, origin, pos) } private def stopIfRuntimeException[T](thunk: => T, method: JLRMethod): T = @@ -242,8 +242,8 @@ class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context): ex.getTargetException match { case ex: scala.quoted.runtime.StopMacroExpansion => throw ex - case MissingClassDefinedInCurrentRun(sym) => - suspendOnMissing(sym, pos) + case MissingClassValidInCurrentRun(sym, origin) => + suspendOnMissing(sym, origin, pos) case targetException => val sw = new StringWriter() sw.write("Exception occurred while executing macro expansion.\n") @@ -348,8 +348,11 @@ object Interpreter: } end Call - object MissingClassDefinedInCurrentRun { - def unapply(targetException: Throwable)(using Context): Option[Symbol] = { + enum ClassOrigin: + case Classpath, Source + + object MissingClassValidInCurrentRun { + def unapply(targetException: Throwable)(using Context): Option[(Symbol, ClassOrigin)] = { if !ctx.compilationUnit.isSuspendable then None else targetException match case _: NoClassDefFoundError | _: ClassNotFoundException => @@ -358,16 +361,34 @@ object Interpreter: else val className = message.replace('/', '.') val sym = - if className.endsWith(str.MODULE_SUFFIX) then staticRef(className.toTermName).symbol.moduleClass - else staticRef(className.toTypeName).symbol - // If the symbol does not a a position we assume that it came from the current run and it has an error - if sym.isDefinedInCurrentRun || (sym.exists && !sym.srcPos.span.exists) then Some(sym) - else None + if className.endsWith(str.MODULE_SUFFIX) then + staticRef(className.stripSuffix(str.MODULE_SUFFIX).toTermName).symbol.moduleClass + else + staticRef(className.toTypeName).symbol + if sym.isDefinedInBinary then + // i.e. the associated file is `.tasty`, if the macro classloader is not able to find the class, + // possibly it indicates that it comes from a pipeline-compiled dependency. + Some((sym, ClassOrigin.Classpath)) + else if sym.isDefinedInCurrentRun || (sym.exists && !sym.srcPos.span.exists) then + // If the symbol does not a a position we assume that it came from the current run and it has an error + Some((sym, ClassOrigin.Source)) + else + None case _ => None } } - def suspendOnMissing(sym: Symbol, pos: SrcPos)(using Context): Nothing = - if ctx.settings.XprintSuspension.value then - report.echo(i"suspension triggered by a dependency on $sym", pos) - ctx.compilationUnit.suspend() // this throws a SuspendException + def suspendOnMissing(sym: Symbol, origin: ClassOrigin, pos: SrcPos)(using Context): Nothing = + if origin == ClassOrigin.Classpath then + throw StopInterpretation( + em"""Macro code depends on ${sym.showLocated} found on the classpath, but could not be loaded while evaluating the macro. + | This is likely because class files could not be found in the classpath entry for the symbol. + | + | A possible cause is if the origin of this symbol was built with pipelined compilation; + | in which case, this problem may go away by disabling pipelining for that origin. + | + | $sym is defined in file ${sym.associatedFile}""", pos) + else if ctx.settings.YnoSuspendedUnits.value then + throw StopInterpretation(em"suspension triggered by a dependency on missing ${sym.showLocated} not allowed with -Yno-suspended-units", pos) + else + ctx.compilationUnit.suspend(i"suspension triggered by a dependency on missing ${sym.showLocated}") // this throws a SuspendException diff --git a/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala b/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala index 8ebd1f6973f2..6d6e2ff01ad4 100644 --- a/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala +++ b/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala @@ -217,7 +217,7 @@ object PickledQuotes { /** Pickle tree into it's TASTY bytes s*/ private def pickle(tree: Tree)(using Context): Array[Byte] = { quotePickling.println(i"**** pickling quote of\n$tree") - val pickler = new TastyPickler(defn.RootClass) + val pickler = new TastyPickler(defn.RootClass, isBestEffortTasty = false) val treePkl = new TreePickler(pickler, Attributes.empty) treePkl.pickle(tree :: Nil) treePkl.compactify() @@ -229,7 +229,7 @@ object PickledQuotes { positionWarnings.foreach(report.warning(_)) val pickled = pickler.assembleParts() - quotePickling.println(s"**** pickled quote\n${TastyPrinter.showContents(pickled, ctx.settings.color.value == "never")}") + quotePickling.println(s"**** pickled quote\n${TastyPrinter.showContents(pickled, ctx.settings.color.value == "never", isBestEffortTasty = false)}") pickled } @@ -266,10 +266,10 @@ object PickledQuotes { inContext(unpicklingContext) { - quotePickling.println(s"**** unpickling quote from TASTY\n${TastyPrinter.showContents(bytes, ctx.settings.color.value == "never")}") + quotePickling.println(s"**** unpickling quote from TASTY\n${TastyPrinter.showContents(bytes, ctx.settings.color.value == "never", isBestEffortTasty = false)}") val mode = if (isType) UnpickleMode.TypeTree else UnpickleMode.Term - val unpickler = new DottyUnpickler(NoAbstractFile, bytes, mode) + val unpickler = new DottyUnpickler(NoAbstractFile, bytes, isBestEffortTasty = false, mode) unpickler.enter(Set.empty) val tree = unpickler.tree diff --git a/compiler/src/dotty/tools/dotc/report.scala b/compiler/src/dotty/tools/dotc/report.scala index a63b6569fefe..1d8ca5f208fa 100644 --- a/compiler/src/dotty/tools/dotc/report.scala +++ b/compiler/src/dotty/tools/dotc/report.scala @@ -81,6 +81,22 @@ object report: if ctx.settings.YdebugError.value then Thread.dumpStack() if ctx.settings.YdebugTypeError.value then ex.printStackTrace() + def bestEffortError(ex: Throwable, msg: String)(using Context): Unit = + val stackTrace = + Option(ex.getStackTrace()).map { st => + if st.nn.isEmpty then "" + else s"Stack trace: \n ${st.nn.mkString("\n ")}".stripMargin + }.getOrElse("") + // Build tools and dotty's test framework may check precisely for + // "Unsuccessful best-effort compilation." error text. + val fullMsg = + em"""Unsuccessful best-effort compilation. + |${msg} + |Cause: + | ${ex.toString.replace("\n", "\n ")} + |${stackTrace}""" + ctx.reporter.report(new Error(fullMsg, NoSourcePosition)) + def errorOrMigrationWarning(msg: Message, pos: SrcPos, migrationVersion: MigrationVersion)(using Context): Unit = if sourceVersion.isAtLeast(migrationVersion.errorFrom) then if !sourceVersion.isMigrating then error(msg, pos) @@ -130,7 +146,7 @@ object report: // Should only be called from Run#enrichErrorMessage. def enrichErrorMessage(errorMessage: String)(using Context): String = - if ctx.settings.YnoEnrichErrorMessages.value then errorMessage + if ctx.settings.XnoEnrichErrorMessages.value then errorMessage else try enrichErrorMessage1(errorMessage) catch case _: Throwable => errorMessage // don't introduce new errors trying to report errors, so swallow exceptions @@ -155,7 +171,7 @@ object report: | An unhandled exception was thrown in the compiler. | Please file a crash report here: | https://github.com/scala/scala3/issues/new/choose - | For non-enriched exceptions, compile with -Yno-enrich-error-messages. + | For non-enriched exceptions, compile with -Xno-enrich-error-messages. | |$info1 |""".stripMargin diff --git a/compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala b/compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala index 270c35d0add7..3dc73983056a 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala @@ -5,15 +5,18 @@ package reporting import core.Contexts.* import java.io.{ BufferedReader, PrintWriter } import Diagnostic.* +import dotty.tools.dotc.interfaces.Diagnostic.INFO /** * This class implements a Reporter that displays messages on a text console */ class ConsoleReporter( reader: BufferedReader = Console.in, - writer: PrintWriter = new PrintWriter(Console.err, true) + writer: PrintWriter = new PrintWriter(Console.err, true), + echoer: PrintWriter = new PrintWriter(Console.out, true) ) extends ConsoleReporter.AbstractConsoleReporter { - override def printMessage(msg: String): Unit = { writer.print(msg + "\n"); writer.flush() } + override def printMessage(msg: String): Unit = { writer.println(msg); writer.flush() } + override def echoMessage(msg: String): Unit = { echoer.println(msg); echoer.flush() } override def flush()(using Context): Unit = writer.flush() override def doReport(dia: Diagnostic)(using Context): Unit = { @@ -22,18 +25,21 @@ class ConsoleReporter( dia match case _: Error => Reporter.displayPrompt(reader, writer) case _: Warning if ctx.settings.XfatalWarnings.value => Reporter.displayPrompt(reader, writer) - case _ => + case _ => } } object ConsoleReporter { abstract class AbstractConsoleReporter extends AbstractReporter { - /** Prints the message. */ + /** Print the diagnostic message. */ def printMessage(msg: String): Unit - /** Prints the message with the given position indication. */ - def doReport(dia: Diagnostic)(using Context): Unit = { - printMessage(messageAndPos(dia)) - } + /** Print the informative message. */ + def echoMessage(msg: String): Unit + + /** Print the message with the given position indication. */ + def doReport(dia: Diagnostic)(using Context): Unit = + if dia.level == INFO then echoMessage(messageAndPos(dia)) + else printMessage(messageAndPos(dia)) } } diff --git a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala index 6011587a7100..04380a7b8e4a 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala @@ -207,6 +207,10 @@ enum ErrorMessageID(val isActive: Boolean = true) extends java.lang.Enum[ErrorMe case MatchTypeLegacyPatternID // errorNumber: 191 case UnstableInlineAccessorID // errorNumber: 192 case VolatileOnValID // errorNumber: 193 + case ExtensionNullifiedByMemberID // errorNumber: 194 + case ConstructorProxyNotValueID // errorNumber: 195 + case ContextBoundCompanionNotValueID // errorNumber: 196 + case InlinedAnonClassWarningID // errorNumber: 197 def errorNumber = ordinal - 1 diff --git a/compiler/src/dotty/tools/dotc/reporting/ExploringReporter.scala b/compiler/src/dotty/tools/dotc/reporting/ExploringReporter.scala index f469c03764c0..99720b8e4d29 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ExploringReporter.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ExploringReporter.scala @@ -18,6 +18,9 @@ class ExploringReporter extends StoreReporter(null, fromTyperState = false): override def removeBufferedMessages(using Context): List[Diagnostic] = try infos.toList finally reset() + override def mapBufferedMessages(f: Diagnostic => Diagnostic)(using Context): Unit = + infos.mapInPlace(f) + def reset(): Unit = infos.clear() -end ExploringReporter \ No newline at end of file +end ExploringReporter diff --git a/compiler/src/dotty/tools/dotc/reporting/Reporter.scala b/compiler/src/dotty/tools/dotc/reporting/Reporter.scala index 22500cbbaa48..61f842800b78 100644 --- a/compiler/src/dotty/tools/dotc/reporting/Reporter.scala +++ b/compiler/src/dotty/tools/dotc/reporting/Reporter.scala @@ -14,7 +14,8 @@ import dotty.tools.dotc.util.NoSourcePosition import java.io.{BufferedReader, PrintWriter} import scala.annotation.internal.sharable import scala.collection.mutable -import core.Decorators.em +import core.Decorators.{em, toMessage} +import core.handleRecursive object Reporter { /** Convert a SimpleReporter into a real Reporter */ @@ -155,6 +156,12 @@ abstract class Reporter extends interfaces.ReporterResult { addUnreported(key, 1) case _ => if !isHidden(dia) then // avoid isHidden test for summarized warnings so that message is not forced + try + withMode(Mode.Printing)(doReport(dia)) + catch case ex: Throwable => + // #20158: Don't increment the error count, otherwise we might suppress + // the RecursiveOverflow error and not print any error at all. + handleRecursive("error reporting", dia.message, ex) dia match { case w: Warning => warnings = w :: warnings @@ -168,7 +175,6 @@ abstract class Reporter extends interfaces.ReporterResult { // match error if d is something else } markReported(dia) - withMode(Mode.Printing)(doReport(dia)) end issueUnconfigured def issueIfNotSuppressed(dia: Diagnostic)(using Context): Unit = @@ -230,10 +236,9 @@ abstract class Reporter extends interfaces.ReporterResult { report(Warning(msg, NoSourcePosition)) /** Print the summary of warnings and errors */ - def printSummary()(using Context): Unit = { + def printSummary()(using Context): Unit = val s = summary - if (s != "") report(new Info(s, NoSourcePosition)) - } + if (s != "") doReport(Warning(s.toMessage, NoSourcePosition)) /** Returns a string meaning "n elements". */ protected def countString(n: Int, elements: String): String = n match { @@ -263,6 +268,9 @@ abstract class Reporter extends interfaces.ReporterResult { /** If this reporter buffers messages, remove and return all buffered messages. */ def removeBufferedMessages(using Context): List[Diagnostic] = Nil + /** If this reporter buffers messages, apply `f` to all buffered messages. */ + def mapBufferedMessages(f: Diagnostic => Diagnostic)(using Context): Unit = () + /** Issue all messages in this reporter to next outer one, or make sure they are written. */ def flush()(using Context): Unit = val msgs = removeBufferedMessages diff --git a/compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala b/compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala index aef5f2c5863b..9395788d4cc7 100644 --- a/compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala +++ b/compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala @@ -21,7 +21,7 @@ class StoreReporter(outer: Reporter | Null = Reporter.NoReporter, fromTyperState protected var infos: mutable.ListBuffer[Diagnostic] | Null = null - def doReport(dia: Diagnostic)(using Context): Unit = { + override def doReport(dia: Diagnostic)(using Context): Unit = { typr.println(s">>>> StoredError: ${dia.message}") // !!! DEBUG if (infos == null) infos = new mutable.ListBuffer infos.uncheckedNN += dia @@ -37,6 +37,9 @@ class StoreReporter(outer: Reporter | Null = Reporter.NoReporter, fromTyperState if (infos != null) try infos.uncheckedNN.toList finally infos = null else Nil + override def mapBufferedMessages(f: Diagnostic => Diagnostic)(using Context): Unit = + if infos != null then infos.uncheckedNN.mapInPlace(f) + override def pendingMessages(using Context): List[Diagnostic] = if (infos != null) infos.uncheckedNN.toList else Nil diff --git a/compiler/src/dotty/tools/dotc/reporting/WConf.scala b/compiler/src/dotty/tools/dotc/reporting/WConf.scala index cc0a63cb1532..54a6fc14e054 100644 --- a/compiler/src/dotty/tools/dotc/reporting/WConf.scala +++ b/compiler/src/dotty/tools/dotc/reporting/WConf.scala @@ -5,7 +5,9 @@ package reporting import scala.language.unsafeNulls import dotty.tools.dotc.core.Contexts.* -import dotty.tools.dotc.util.SourcePosition +import dotty.tools.dotc.util.{NoSourcePosition, SourcePosition} +import dotty.tools.dotc.interfaces.SourceFile +import dotty.tools.dotc.reporting.MessageFilter.SourcePattern import java.util.regex.PatternSyntaxException import scala.annotation.internal.sharable @@ -21,11 +23,19 @@ enum MessageFilter: val noHighlight = message.msg.message.replaceAll("\\e\\[[\\d;]*[^\\d;]","") pattern.findFirstIn(noHighlight).nonEmpty case MessageID(errorId) => message.msg.errorId == errorId + case SourcePattern(pattern) => + val source = message.position.orElse(NoSourcePosition).source() + val path = source.jfile() + .map(_.toPath.toAbsolutePath.toUri.normalize().getRawPath) + .orElse(source.path()) + pattern.findFirstIn(path).nonEmpty + case None => false case Any, Deprecated, Feature, Unchecked, None case MessagePattern(pattern: Regex) case MessageID(errorId: ErrorMessageID) + case SourcePattern(pattern: Regex) enum Action: case Error, Warning, Verbose, Info, Silent @@ -84,6 +94,9 @@ object WConf: case "feature" => Right(Feature) case "unchecked" => Right(Unchecked) case _ => Left(s"unknown category: $conf") + + case "src" => regex(conf).map(SourcePattern.apply) + case _ => Left(s"unknown filter: $filter") case _ => Left(s"unknown filter: $s") diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index 484bc88c0983..9a20f149a6d1 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -95,7 +95,8 @@ abstract class CyclicMsg(errorId: ErrorMessageID)(using Context) extends Message protected def context: String = ex.optTrace match case Some(trace) => s"\n\nThe error occurred while trying to ${ - trace.map((prefix, sym, suffix) => i"$prefix$sym$suffix").mkString("\n which required to ") + trace.map(identity) // map with identity will turn Context ?=> String elements to String elements + .mkString("\n which required to ") }$debugInfo" case None => "\n\n Run with -explain-cyclic for more details." @@ -289,7 +290,7 @@ extends NotFoundMsg(MissingIdentID) { } } -class TypeMismatch(val found: Type, expected: Type, inTree: Option[untpd.Tree], addenda: => String*)(using Context) +class TypeMismatch(val found: Type, expected: Type, val inTree: Option[untpd.Tree], addenda: => String*)(using Context) extends TypeMismatchMsg(found, expected)(TypeMismatchID): def msg(using Context) = @@ -924,7 +925,7 @@ class MatchableWarning(tp: Type, pattern: Boolean)(using Context) extends TypeMsg(MatchableWarningID) { def msg(using Context) = val kind = if pattern then "pattern selector" else "value" - i"""${kind} should be an instance of Matchable,, + i"""${kind} should be an instance of Matchable, |but it has unmatchable type $tp instead""" def explain(using Context) = @@ -2451,6 +2452,17 @@ class SynchronizedCallOnBoxedClass(stat: tpd.Tree)(using Context) |you intended.""" } +class ExtensionNullifiedByMember(method: Symbol, target: Symbol)(using Context) + extends Message(ExtensionNullifiedByMemberID): + def kind = MessageKind.PotentialIssue + def msg(using Context) = + i"""Extension method ${hl(method.name.toString)} will never be selected + |because ${hl(target.name.toString)} already has a member with the same name and compatible parameter types.""" + def explain(using Context) = + i"""An extension method can be invoked as a regular method, but if that is intended, + |it should not be defined as an extension. + |Although extensions can be overloaded, they do not overload existing member methods.""" + class TraitCompanionWithMutableStatic()(using Context) extends SyntaxMsg(TraitCompanionWithMutableStaticID) { def msg(using Context) = i"Companion of traits cannot define mutable @static fields" @@ -2810,19 +2822,27 @@ class MissingImplicitArgument( val idx = paramNames.indexOf(name) if (idx >= 0) Some(i"${args(idx)}") else None """\$\{\s*([^}\s]+)\s*\}""".r.replaceAllIn(raw, (_: Regex.Match) match - case Regex.Groups(v) => quoteReplacement(translate(v).getOrElse("")).nn + case Regex.Groups(v) => quoteReplacement(translate(v).getOrElse("?" + v)).nn ) /** @param rawMsg Message template with variables, e.g. "Variable A is ${A}" * @param sym Symbol of the annotated type or of the method whose parameter was annotated + * @param paramNames Names of type parameters to substitute with `args` in the message template + * @param args Resolved type arguments to substitute for `paramNames` in the message template * @param substituteType Function substituting specific types for abstract types associated with variables, e.g A -> Int */ - def formatAnnotationMessage(rawMsg: String, sym: Symbol, substituteType: Type => Type)(using Context): String = + def formatAnnotationMessage( + rawMsg: String, + sym: Symbol, + paramNames: List[Name], + args: List[Type], + substituteType: Type => Type, + )(using Context): String = val substitutableTypesSymbols = substitutableTypeSymbolsInScope(sym) userDefinedErrorString( rawMsg, - paramNames = substitutableTypesSymbols.map(_.name.unexpandedName.toString), - args = substitutableTypesSymbols.map(_.typeRef).map(substituteType) + paramNames = (paramNames ::: substitutableTypesSymbols.map(_.name)).map(_.unexpandedName.toString), + args = args ::: substitutableTypesSymbols.map(_.typeRef).map(substituteType) ) /** Extract a user defined error message from a symbol `sym` @@ -2834,14 +2854,17 @@ class MissingImplicitArgument( msg <- ann.argumentConstantString(0) yield msg - def userDefinedImplicitNotFoundTypeMessageFor(sym: Symbol)(using Context): Option[String] = - for - rawMsg <- userDefinedMsg(sym, defn.ImplicitNotFoundAnnot) - if Feature.migrateTo3 || sym != defn.Function1 - // Don't inherit "No implicit view available..." message if subtypes of Function1 are not treated as implicit conversions anymore - yield - val substituteType = (_: Type).asSeenFrom(pt, sym) - formatAnnotationMessage(rawMsg, sym, substituteType) + def userDefinedImplicitNotFoundTypeMessageFor( + sym: Symbol, + params: List[ParamInfo] = Nil, + args: List[Type] = Nil + )(using Context): Option[String] = for + rawMsg <- userDefinedMsg(sym, defn.ImplicitNotFoundAnnot) + if Feature.migrateTo3 || sym != defn.Function1 + // Don't inherit "No implicit view available..." message if subtypes of Function1 are not treated as implicit conversions anymore + yield + val paramNames = params.map(_.paramName) + formatAnnotationMessage(rawMsg, sym, paramNames, args, _.asSeenFrom(pt, sym)) /** Extracting the message from a method parameter, e.g. in * @@ -2856,19 +2879,22 @@ class MissingImplicitArgument( val targs = tpd.typeArgss(applTree).flatten val methodOwner = fn.symbol.owner val methodOwnerType = tpd.qualifier(fn).tpe - val methodTypeParams = fn.symbol.paramSymss.flatten.filter(_.isType) + val methodTypeParams = fn.symbol.paramSymss.flatten.withFilter(_.isType).map(_.name) val methodTypeArgs = targs.map(_.tpe) - val substituteType = (_: Type).asSeenFrom(methodOwnerType, methodOwner).subst(methodTypeParams, methodTypeArgs) - formatAnnotationMessage(rawMsg, sym.owner, substituteType) + formatAnnotationMessage(rawMsg, sym.owner, methodTypeParams, methodTypeArgs, _.asSeenFrom(methodOwnerType, methodOwner)) def userDefinedImplicitNotFoundTypeMessage(using Context): Option[String] = - def recur(tp: Type): Option[String] = tp match + def recur(tp: Type, params: List[ParamInfo] = Nil, args: List[Type] = Nil): Option[String] = tp match + case tp: AppliedType => + val tycon = tp.typeConstructor + val typeParams = if tycon.isLambdaSub then tycon.hkTypeParams else tycon.typeParams + recur(tycon, typeParams ::: params, tp.args ::: args) case tp: TypeRef => - val sym = tp.symbol - userDefinedImplicitNotFoundTypeMessageFor(sym).orElse(recur(tp.info)) + userDefinedImplicitNotFoundTypeMessageFor(tp.symbol, params, args) + .orElse(recur(tp.info)) case tp: ClassInfo => tp.baseClasses.iterator - .map(userDefinedImplicitNotFoundTypeMessageFor) + .map(userDefinedImplicitNotFoundTypeMessageFor(_)) .find(_.isDefined).flatten case tp: TypeProxy => recur(tp.superType) @@ -2927,11 +2953,20 @@ class MissingImplicitArgument( def location(preposition: String) = if (where.isEmpty) "" else s" $preposition $where" + /** Default error message for non-nested ambiguous implicits. */ def defaultAmbiguousImplicitMsg(ambi: AmbiguousImplicits) = - s"Ambiguous given instances: ${ambi.explanation}${location("of")}" + s"Ambiguous given instances: ${ambi.explanation}${location("of")}${ambi.priorityChangeWarningNote}" + /** Default error messages for non-ambiguous implicits, or nested ambiguous + * implicits. + * + * The default message is shown for ambiguous implicits only if they have + * the `nested` flag set. In this case, we output "no best given instance" + * instead of "no given instance". + */ def defaultImplicitNotFoundMessage = - i"No given instance of type $pt was found${location("for")}" + val bestStr = if arg.tpe.isInstanceOf[AmbiguousImplicits] then " best" else "" + i"No$bestStr given instance of type $pt was found${location("for")}" /** Construct a custom error message given an ambiguous implicit * candidate `alt` and a user defined message `raw`. @@ -2969,7 +3004,7 @@ class MissingImplicitArgument( * def foo(implicit foo: Foo): Any = ??? */ arg.tpe match - case ambi: AmbiguousImplicits => + case ambi: AmbiguousImplicits if !ambi.nested => (ambi.alt1, ambi.alt2) match case (alt @ AmbiguousImplicitMsg(msg), _) => userDefinedAmbiguousImplicitMsg(alt, msg) @@ -3078,6 +3113,15 @@ extends SyntaxMsg(InlineGivenShouldNotBeFunctionID): | inline def apply(x: A) = x.toB """ +class InlinedAnonClassWarning()(using Context) + extends Message(InlinedAnonClassWarningID): + def kind = MessageKind.PotentialIssue + def msg(using Context) = "New anonymous class definition will be duplicated at each inline site" + def explain(using Context) = + i"""Anonymous class will be defined at each use site, which may lead to a larger number of classfiles. + | + |To inline class definitions, you may provide an explicit class name to avoid this warning.""" + class ValueDiscarding(tp: Type)(using Context) extends Message(ValueDiscardingID): def kind = MessageKind.PotentialIssue @@ -3159,3 +3203,39 @@ class VolatileOnVal()(using Context) extends SyntaxMsg(VolatileOnValID): protected def msg(using Context): String = "values cannot be volatile" protected def explain(using Context): String = "" + +class ConstructorProxyNotValue(sym: Symbol)(using Context) +extends TypeMsg(ConstructorProxyNotValueID): + protected def msg(using Context): String = + i"constructor proxy $sym cannot be used as a value" + protected def explain(using Context): String = + i"""A constructor proxy is a symbol made up by the compiler to represent a non-existent + |factory method of a class. For instance, in + | + | class C(x: Int) + | + |C does not have an apply method since it is not a case class. Yet one can + |still create instances with applications like `C(3)` which expand to `new C(3)`. + |The `C` in this call is a constructor proxy. It can only be used as applications + |but not as a stand-alone value.""" + +class ContextBoundCompanionNotValue(sym: Symbol)(using Context) +extends TypeMsg(ConstructorProxyNotValueID): + protected def msg(using Context): String = + i"context bound companion $sym cannot be used as a value" + protected def explain(using Context): String = + i"""A context bound companion is a symbol made up by the compiler to represent the + |witness or witnesses generated for the context bound(s) of a type parameter or type. + |For instance, in + | + | class Monoid extends SemiGroup: + | type Self + | def unit: Self + | + | type A: Monoid + | + |there is just a type `A` declared but not a value `A`. Nevertheless, one can write + |the selection `A.unit`, which works because the compiler created a context bound + |companion value with the (term-)name `A`. However, these context bound companions + |are not values themselves, they can only be referred to in selections.""" + diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index dafb44d525e4..75f04908ac55 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -15,12 +15,16 @@ import Trees.* import Types.* import Symbols.* import Names.* +import StdNames.str import NameOps.* import inlines.Inlines import transform.ValueClasses -import dotty.tools.io.File +import transform.Pickler +import dotty.tools.io.{File, FileExtension, JarArchive} +import util.{Property, SourceFile} import java.io.PrintWriter +import ExtractAPI.NonLocalClassSymbolsInCurrentUnits import scala.collection.mutable import scala.util.hashing.MurmurHash3 @@ -48,13 +52,13 @@ class ExtractAPI extends Phase { override def description: String = ExtractAPI.description override def isRunnable(using Context): Boolean = { - super.isRunnable && ctx.runZincPhases + super.isRunnable && (ctx.runZincPhases || ctx.settings.XjavaTasty.value) } // Check no needed. Does not transform trees override def isCheckable: Boolean = false - // when `-Yjava-tasty` is set we actually want to run this phase on Java sources + // when `-Xjava-tasty` is set we actually want to run this phase on Java sources override def skipIfJava(using Context): Boolean = false // SuperAccessors need to be part of the API (see the scripted test @@ -62,7 +66,63 @@ class ExtractAPI extends Phase { // after `PostTyper` (unlike `ExtractDependencies`, the simplication to trees // done by `PostTyper` do not affect this phase because it only cares about // definitions, and `PostTyper` does not change definitions). - override def runsAfter: Set[String] = Set(transform.PostTyper.name) + override def runsAfter: Set[String] = Set(transform.Pickler.name) + + override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = + val doZincCallback = ctx.runZincPhases + val nonLocalClassSymbols = new mutable.HashSet[Symbol] + val units0 = + if doZincCallback then + val ctx0 = ctx.withProperty(NonLocalClassSymbolsInCurrentUnits, Some(nonLocalClassSymbols)) + super.runOn(units)(using ctx0) + else + units // still run the phase for the side effects (writing TASTy files to -Yearly-tasty-output) + if doZincCallback then + ctx.withIncCallback(recordNonLocalClasses(nonLocalClassSymbols, _)) + if ctx.settings.XjavaTasty.value then + units0.filterNot(_.typedAsJava) // remove java sources, this is the terminal phase when `-Xjava-tasty` is set + else + units0 + end runOn + + private def recordNonLocalClasses(nonLocalClassSymbols: mutable.HashSet[Symbol], cb: interfaces.IncrementalCallback)(using Context): Unit = + for cls <- nonLocalClassSymbols do + val sourceFile = cls.source + if sourceFile.exists && cls.isDefinedInCurrentRun then + recordNonLocalClass(cls, sourceFile, cb) + ctx.run.nn.asyncTasty.foreach(_.signalAPIComplete()) + + private def recordNonLocalClass(cls: Symbol, sourceFile: SourceFile, cb: interfaces.IncrementalCallback)(using Context): Unit = + def registerProductNames(fullClassName: String, binaryClassName: String) = + val pathToClassFile = s"${binaryClassName.replace('.', java.io.File.separatorChar)}.class" + + val classFile = { + ctx.settings.outputDir.value match { + case jar: JarArchive => + // important detail here, even on Windows, Zinc expects the separator within the jar + // to be the system default, (even if in the actual jar file the entry always uses '/'). + // see https://github.com/sbt/zinc/blob/dcddc1f9cfe542d738582c43f4840e17c053ce81/internal/compiler-bridge/src/main/scala/xsbt/JarUtils.scala#L47 + new java.io.File(s"$jar!$pathToClassFile") + case outputDir => + new java.io.File(outputDir.file, pathToClassFile) + } + } + + cb.generatedNonLocalClass(sourceFile, classFile.toPath(), binaryClassName, fullClassName) + end registerProductNames + + val fullClassName = atPhase(sbtExtractDependenciesPhase) { + ExtractDependencies.classNameAsString(cls) + } + val binaryClassName = cls.binaryClassName + registerProductNames(fullClassName, binaryClassName) + + // Register the names of top-level module symbols that emit two class files + val isTopLevelUniqueModule = + cls.owner.is(PackageClass) && cls.is(ModuleClass) && cls.companionClass == NoSymbol + if isTopLevelUniqueModule then + registerProductNames(fullClassName, binaryClassName.stripSuffix(str.MODULE_SUFFIX)) + end recordNonLocalClass override def run(using Context): Unit = { val unit = ctx.compilationUnit @@ -70,13 +130,14 @@ class ExtractAPI extends Phase { ctx.withIncCallback: cb => cb.startSource(sourceFile) - val apiTraverser = new ExtractAPICollector + val nonLocalClassSymbols = ctx.property(NonLocalClassSymbolsInCurrentUnits).get + val apiTraverser = ExtractAPICollector(nonLocalClassSymbols) val classes = apiTraverser.apiSource(unit.tpdTree) val mainClasses = apiTraverser.mainClasses if (ctx.settings.YdumpSbtInc.value) { // Append to existing file that should have been created by ExtractDependencies - val pw = new PrintWriter(File(sourceFile.file.jpath).changeExtension("inc").toFile + val pw = new PrintWriter(File(sourceFile.file.jpath).changeExtension(FileExtension.Inc).toFile .bufferedWriter(append = true), true) try { classes.foreach(source => pw.println(DefaultShowAPI(source))) @@ -94,6 +155,8 @@ object ExtractAPI: val name: String = "sbt-api" val description: String = "sends a representation of the API of classes to sbt" + private val NonLocalClassSymbolsInCurrentUnits: Property.Key[mutable.HashSet[Symbol]] = Property.Key() + /** Extracts full (including private members) API representation out of Symbols and Types. * * The exact representation used for each type is not important: the only thing @@ -136,7 +199,7 @@ object ExtractAPI: * without going through an intermediate representation, see * http://www.scala-sbt.org/0.13/docs/Understanding-Recompilation.html#Hashing+an+API+representation */ -private class ExtractAPICollector(using Context) extends ThunkHolder { +private class ExtractAPICollector(nonLocalClassSymbols: mutable.HashSet[Symbol])(using Context) extends ThunkHolder { import tpd.* import xsbti.api @@ -254,6 +317,8 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { childrenOfSealedClass, topLevel, tparams) allNonLocalClassesInSrc += cl + if !sym.isLocal then + nonLocalClassSymbols += sym if (sym.isStatic && !sym.is(Trait) && ctx.platform.hasMainMethod(sym)) { // If sym is an object, all main methods count, otherwise only @static ones count. @@ -448,7 +513,7 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { if (sym.isAliasType) api.TypeAlias.of(name, access, modifiers, as.toArray, typeParams, apiType(tpe.bounds.hi)) else { - assert(sym.isAbstractType) + assert(sym.isAbstractOrParamType) api.TypeDeclaration.of(name, access, modifiers, as.toArray, typeParams, apiType(tpe.bounds.lo), apiType(tpe.bounds.hi)) } } @@ -565,6 +630,8 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { case tp: OrType => val s = combineApiTypes(apiType(tp.tp1), apiType(tp.tp2)) withMarker(s, orMarker) + case tp: FlexibleType => + apiType(tp.underlying) case ExprType(resultType) => withMarker(apiType(resultType), byNameMarker) case MatchType(bound, scrut, cases) => diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala index a35628dc52e4..dfff5971889e 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala @@ -8,7 +8,7 @@ import java.nio.file.Path import java.util.{Arrays, EnumSet} import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.classpath.FileUtils.{isTasty, hasClassExtension, hasTastyExtension} +import dotty.tools.dotc.classpath.FileUtils.{hasClassExtension, hasTastyExtension} import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Decorators.* import dotty.tools.dotc.core.Flags.* @@ -21,7 +21,7 @@ import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.util.{SrcPos, NoSourcePosition} import dotty.tools.io -import dotty.tools.io.{AbstractFile, PlainFile, ZipArchive, NoAbstractFile} +import dotty.tools.io.{AbstractFile, PlainFile, ZipArchive, NoAbstractFile, FileExtension} import xsbti.UseScope import xsbti.api.DependencyContext import xsbti.api.DependencyContext.* @@ -64,7 +64,7 @@ class ExtractDependencies extends Phase { // Check no needed. Does not transform trees override def isCheckable: Boolean = false - // when `-Yjava-tasty` is set we actually want to run this phase on Java sources + // when `-Xjava-tasty` is set we actually want to run this phase on Java sources override def skipIfJava(using Context): Boolean = false // This phase should be run directly after `Frontend`, if it is run after @@ -84,7 +84,7 @@ class ExtractDependencies extends Phase { Arrays.sort(deps) Arrays.sort(names) - val pw = io.File(unit.source.file.jpath).changeExtension("inc").toFile.printWriter() + val pw = io.File(unit.source.file.jpath).changeExtension(FileExtension.Inc).toFile.printWriter() // val pw = Console.out try { pw.println("Used Names:") @@ -495,7 +495,7 @@ class DependencyRecorder { if depFile != null then { // Cannot ignore inheritance relationship coming from the same source (see sbt/zinc#417) def allowLocal = depCtx == DependencyByInheritance || depCtx == LocalDependencyByInheritance - val isTasty = depFile.hasTastyExtension + val isTastyOrSig = depFile.hasTastyExtension def processExternalDependency() = { val binaryClassName = depClass.binaryClassName @@ -506,13 +506,13 @@ class DependencyRecorder { binaryDependency(zip.jpath, binaryClassName) case _ => case pf: PlainFile => // The dependency comes from a class file, Zinc handles JRT filesystem - binaryDependency(if isTasty then cachedSiblingClass(pf) else pf.jpath, binaryClassName) + binaryDependency(if isTastyOrSig then cachedSiblingClass(pf) else pf.jpath, binaryClassName) case _ => internalError(s"Ignoring dependency $depFile of unknown class ${depFile.getClass}}", fromClass.srcPos) } } - if isTasty || depFile.hasClassExtension then + if isTastyOrSig || depFile.hasClassExtension then processExternalDependency() else if allowLocal || depFile != sourceFile.file then // We cannot ignore dependencies coming from the same source file because diff --git a/compiler/src/dotty/tools/dotc/sbt/interfaces/IncrementalCallback.java b/compiler/src/dotty/tools/dotc/sbt/interfaces/IncrementalCallback.java index 4c6afa113f4f..ebdb1b7b24d4 100644 --- a/compiler/src/dotty/tools/dotc/sbt/interfaces/IncrementalCallback.java +++ b/compiler/src/dotty/tools/dotc/sbt/interfaces/IncrementalCallback.java @@ -7,6 +7,7 @@ /* User code should not implement this interface, it is intended to be a wrapper around xsbti.AnalysisCallback. */ public interface IncrementalCallback { + default void api(SourceFile sourceFile, xsbti.api.ClassLike classApi) { } @@ -36,4 +37,10 @@ default void generatedLocalClass(SourceFile source, Path classFile) { default void generatedNonLocalClass(SourceFile source, Path classFile, String binaryClassName, String srcClassName) { } + + default void apiPhaseCompleted() { + } + + default void dependencyPhaseCompleted() { + } } diff --git a/compiler/src/dotty/tools/dotc/sbt/package.scala b/compiler/src/dotty/tools/dotc/sbt/package.scala index dc0df381f08f..1c6b38b07a84 100644 --- a/compiler/src/dotty/tools/dotc/sbt/package.scala +++ b/compiler/src/dotty/tools/dotc/sbt/package.scala @@ -6,10 +6,29 @@ import dotty.tools.dotc.core.NameOps.stripModuleClassSuffix import dotty.tools.dotc.core.Names.Name import dotty.tools.dotc.core.Names.termName +import interfaces.IncrementalCallback +import dotty.tools.io.FileWriters.BufferingReporter +import dotty.tools.dotc.core.Decorators.em + +import scala.util.chaining.given +import scala.util.control.NonFatal + inline val TermNameHash = 1987 // 300th prime inline val TypeNameHash = 1993 // 301st prime inline val InlineParamHash = 1997 // 302nd prime +def asyncZincPhasesCompleted(cb: IncrementalCallback, pending: Option[BufferingReporter]): BufferingReporter = + val zincReporter = pending match + case Some(buffered) => buffered + case None => BufferingReporter() + try + cb.apiPhaseCompleted() + cb.dependencyPhaseCompleted() + catch + case NonFatal(t) => + zincReporter.exception(em"signaling API and Dependencies phases completion", t) + zincReporter + extension (sym: Symbol) /** Mangle a JVM symbol name in a format better suited for internal uses by sbt. diff --git a/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala b/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala index 77eef4564bbf..357202229e50 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala @@ -56,7 +56,7 @@ class ExtractSemanticDB private (phaseMode: ExtractSemanticDB.PhaseMode) extends override def isRunnable(using Context) = import ExtractSemanticDB.{semanticdbTarget, outputDirectory} def writesToOutputJar = semanticdbTarget.isEmpty && outputDirectory.isInstanceOf[JarArchive] - super.isRunnable && ctx.settings.Xsemanticdb.value && !writesToOutputJar + (super.isRunnable || ctx.isBestEffort) && ctx.settings.Xsemanticdb.value && !writesToOutputJar // Check not needed since it does not transform trees override def isCheckable: Boolean = false diff --git a/compiler/src/dotty/tools/dotc/staging/HealType.scala b/compiler/src/dotty/tools/dotc/staging/HealType.scala index 08e1e91b0bad..5a26803c8137 100644 --- a/compiler/src/dotty/tools/dotc/staging/HealType.scala +++ b/compiler/src/dotty/tools/dotc/staging/HealType.scala @@ -35,7 +35,7 @@ class HealType(pos: SrcPos)(using Context) extends TypeMap { case tp: TermRef => val inconsistentRoot = levelInconsistentRootOfPath(tp) if inconsistentRoot.exists then levelError(inconsistentRoot, tp, pos) - else tp + else mapOver(tp) case tp: AnnotatedType => derivedAnnotatedType(tp, apply(tp.parent), tp.annot) case _ => diff --git a/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala b/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala index 6d445887e1d9..1f9334164496 100644 --- a/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala +++ b/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala @@ -12,6 +12,7 @@ import Decorators.* import Types.* import util.Spans.Span import config.Printers.transforms +import Annotations.ExperimentalAnnotation /** A utility class for generating access proxies. Currently used for * inline accessors and protected accessors. @@ -84,8 +85,7 @@ abstract class AccessProxies { val sym = newSymbol(owner, name, Synthetic | Method, info, coord = accessed.span).entered if accessed.is(Private) then sym.setFlag(Final) else if sym.allOverriddenSymbols.exists(!_.is(Deferred)) then sym.setFlag(Override) - if accessed.hasAnnotation(defn.ExperimentalAnnot) then - sym.addAnnotation(defn.ExperimentalAnnot) + ExperimentalAnnotation.copy(accessed).foreach(sym.addAnnotation) sym } diff --git a/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala b/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala index 653a5e17990f..60c1bc7c61bb 100644 --- a/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala +++ b/compiler/src/dotty/tools/dotc/transform/BetaReduce.scala @@ -8,6 +8,7 @@ import MegaPhase.* import Symbols.*, Contexts.*, Types.*, Decorators.* import StdNames.nme import ast.TreeTypeMap +import Constants.Constant import scala.collection.mutable.ListBuffer @@ -127,15 +128,20 @@ object BetaReduce: case ref @ TermRef(NoPrefix, _) if isPurePath(arg) => ref.symbol case _ => - val flags = Synthetic | (param.symbol.flags & Erased) - val tpe = + val isByNameArg = param.tpt.tpe.isInstanceOf[ExprType] + val flags = + if isByNameArg then Synthetic | Method | (param.symbol.flags & Erased) + else Synthetic | (param.symbol.flags & Erased) + val tpe0 = if arg.tpe.isBottomType then param.tpe.widenTermRefExpr else if arg.tpe.dealias.isInstanceOf[ConstantType] then arg.tpe.dealias else arg.tpe.widen - val binding = ValDef(newSymbol(ctx.owner, param.name, flags, tpe, coord = arg.span), arg).withSpan(arg.span) - if !(tpe.isInstanceOf[ConstantType] && isPureExpr(arg)) then - bindings += binding - binding.symbol + val tpe = if isByNameArg then ExprType(tpe0) else tpe0 + val bindingSymbol = newSymbol(ctx.owner, param.name, flags, tpe, coord = arg.span) + val binding = if isByNameArg then DefDef(bindingSymbol, arg) else ValDef(bindingSymbol, arg) + if isByNameArg || !((tpe.isInstanceOf[ConstantType] || tpe.derivesFrom(defn.UnitClass)) && isPureExpr(arg)) then + bindings += binding.withSpan(arg.span) + bindingSymbol val expansion = TreeTypeMap( oldOwners = ddef.symbol :: Nil, @@ -147,6 +153,8 @@ object BetaReduce: val expansion1 = new TreeMap { override def transform(tree: Tree)(using Context) = tree.tpe.widenTermRefExpr match case ConstantType(const) if isPureExpr(tree) => cpy.Literal(tree)(const) + case tpe: TypeRef if tree.isTerm && tpe.derivesFrom(defn.UnitClass) && isPureExpr(tree) => + cpy.Literal(tree)(Constant(())) case _ => super.transform(tree) }.transform(expansion) diff --git a/compiler/src/dotty/tools/dotc/transform/Bridges.scala b/compiler/src/dotty/tools/dotc/transform/Bridges.scala index 2110ac1464c2..482e5056fad0 100644 --- a/compiler/src/dotty/tools/dotc/transform/Bridges.scala +++ b/compiler/src/dotty/tools/dotc/transform/Bridges.scala @@ -26,8 +26,8 @@ class Bridges(root: ClassSymbol, thisPhase: DenotTransformer)(using Context) { override def isSubParent(parent: Symbol, bc: Symbol)(using Context) = true - // Never consider a bridge if there is a superclass that would contain it - // See run/t2857.scala for a test that would break with a VerifyError otherwise. + // Never consider a bridge if there is a superclass that would contain it + // See run/t2857.scala for a test that would break with a VerifyError otherwise. /** Only use the superclass of `root` as a parent class. This means * overriding pairs that have a common implementation in a trait parent diff --git a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala index 7cff6fa5f1f0..ba77167de736 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala @@ -1,5 +1,8 @@ package dotty.tools.dotc.transform +import scala.annotation.tailrec + +import dotty.tools.uncheckedNN import dotty.tools.dotc.ast.tpd import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.ast.tpd.{Inlined, TreeTraverser} @@ -8,6 +11,7 @@ import dotty.tools.dotc.ast.untpd.ImportSelector import dotty.tools.dotc.config.ScalaSettings import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.core.Decorators.{em, i} +import dotty.tools.dotc.core.Denotations.SingleDenotation import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.Phases.Phase import dotty.tools.dotc.core.StdNames @@ -20,12 +24,14 @@ import dotty.tools.dotc.core.Types.{AnnotatedType, ConstantType, NoType, TermRef import dotty.tools.dotc.core.Flags.flagsString import dotty.tools.dotc.core.Flags import dotty.tools.dotc.core.Names.Name +import dotty.tools.dotc.core.NameOps.isReplWrapperName import dotty.tools.dotc.transform.MegaPhase.MiniPhase import dotty.tools.dotc.core.Annotations import dotty.tools.dotc.core.Definitions import dotty.tools.dotc.core.NameKinds.WildcardParamName import dotty.tools.dotc.core.Symbols.Symbol import dotty.tools.dotc.core.StdNames.nme +import dotty.tools.dotc.util.Spans.Span import scala.math.Ordering @@ -39,8 +45,10 @@ class CheckUnused private (phaseMode: CheckUnused.PhaseMode, suffix: String, _ke import CheckUnused.* import UnusedData.* - private def unusedDataApply[U](f: UnusedData => U)(using Context): Context = - ctx.property(_key).foreach(f) + private inline def unusedDataApply[U](inline f: UnusedData => U)(using Context): Context = + ctx.property(_key) match + case Some(ud) => f(ud) + case None => () ctx override def phaseName: String = CheckUnused.phaseNamePrefix + suffix @@ -85,18 +93,25 @@ class CheckUnused private (phaseMode: CheckUnused.PhaseMode, suffix: String, _ke override def prepareForIdent(tree: tpd.Ident)(using Context): Context = if tree.symbol.exists then - val prefixes = LazyList.iterate(tree.typeOpt.normalizedPrefix)(_.normalizedPrefix).takeWhile(_ != NoType) - .take(10) // Failsafe for the odd case if there was an infinite cycle - for prefix <- prefixes do - unusedDataApply(_.registerUsed(prefix.classSymbol, None)) - unusedDataApply(_.registerUsed(tree.symbol, Some(tree.name))) + unusedDataApply { ud => + @tailrec + def loopOnNormalizedPrefixes(prefix: Type, depth: Int): Unit = + // limit to 10 as failsafe for the odd case where there is an infinite cycle + if depth < 10 && prefix.exists then + ud.registerUsed(prefix.classSymbol, None) + loopOnNormalizedPrefixes(prefix.normalizedPrefix, depth + 1) + + loopOnNormalizedPrefixes(tree.typeOpt.normalizedPrefix, depth = 0) + ud.registerUsed(tree.symbol, Some(tree.name)) + } else if tree.hasType then unusedDataApply(_.registerUsed(tree.tpe.classSymbol, Some(tree.name))) else ctx override def prepareForSelect(tree: tpd.Select)(using Context): Context = - unusedDataApply(_.registerUsed(tree.symbol, Some(tree.name))) + val name = tree.removeAttachment(OriginalName) + unusedDataApply(_.registerUsed(tree.symbol, name, includeForImport = tree.qualifier.span.isSynthetic)) override def prepareForBlock(tree: tpd.Block)(using Context): Context = pushInBlockTemplatePackageDef(tree) @@ -113,9 +128,8 @@ class CheckUnused private (phaseMode: CheckUnused.PhaseMode, suffix: String, _ke traverseAnnotations(tree.symbol) if !tree.symbol.is(Module) then ud.registerDef(tree) - if tree.name.mangledString.startsWith(nme.derived.mangledString + "$") - && tree.typeOpt != NoType then - ud.registerUsed(tree.typeOpt.typeSymbol, None, true) + if tree.name.startsWith("derived$") && tree.typeOpt != NoType then + ud.registerUsed(tree.typeOpt.typeSymbol, None, isDerived = true) ud.addIgnoredUsage(tree.symbol) } @@ -196,9 +210,6 @@ class CheckUnused private (phaseMode: CheckUnused.PhaseMode, suffix: String, _ke } ctx - private def newCtx(tree: tpd.Tree)(using Context) = - if tree.symbol.exists then ctx.withOwner(tree.symbol) else ctx - /** * This traverse is the **main** component of this phase * @@ -281,7 +292,7 @@ class CheckUnused private (phaseMode: CheckUnused.PhaseMode, suffix: String, _ke /** Do the actual reporting given the result of the anaylsis */ private def reportUnused(res: UnusedData.UnusedResult)(using Context): Unit = - res.warnings.toList.sortBy(_.pos.line)(using Ordering[Int]).foreach { s => + res.warnings.toList.sortBy(_.pos.span.point)(using Ordering[Int]).foreach { s => s match case UnusedSymbol(t, _, WarnTypes.Imports) => report.warning(s"unused import", t) @@ -327,6 +338,8 @@ object CheckUnused: */ private val _key = Property.StickyKey[UnusedData] + val OriginalName = Property.StickyKey[Name] + class PostTyper extends CheckUnused(PhaseMode.Aggregate, "PostTyper", _key) class PostInlining extends CheckUnused(PhaseMode.Report, "PostInlining", _key) @@ -347,24 +360,24 @@ object CheckUnused: var unusedAggregate: Option[UnusedResult] = None /* IMPORTS */ - private val impInScope = MutStack(MutList[tpd.Import]()) + private val impInScope = MutStack(MutList[ImportSelectorData]()) /** * We store the symbol along with their accessibility without import. * Accessibility to their definition in outer context/scope * * See the `isAccessibleAsIdent` extension method below in the file */ - private val usedInScope = MutStack(MutSet[(Symbol,Boolean, Option[Name], Boolean)]()) - private val usedInPosition = MutSet[(SrcPos, Name)]() + private val usedInScope = MutStack(MutSet[(Symbol, Option[Name], Boolean)]()) + private val usedInPosition = MutMap.empty[Name, MutSet[Symbol]] /* unused import collected during traversal */ - private val unusedImport = MutSet[ImportSelector]() + private val unusedImport = MutList.empty[ImportSelectorData] /* LOCAL DEF OR VAL / Private Def or Val / Pattern variables */ - private val localDefInScope = MutSet[tpd.MemberDef]() - private val privateDefInScope = MutSet[tpd.MemberDef]() - private val explicitParamInScope = MutSet[tpd.MemberDef]() - private val implicitParamInScope = MutSet[tpd.MemberDef]() - private val patVarsInScope = MutSet[tpd.Bind]() + private val localDefInScope = MutList.empty[tpd.MemberDef] + private val privateDefInScope = MutList.empty[tpd.MemberDef] + private val explicitParamInScope = MutList.empty[tpd.MemberDef] + private val implicitParamInScope = MutList.empty[tpd.MemberDef] + private val patVarsInScope = MutList.empty[tpd.Bind] /** All variables sets*/ private val setVars = MutSet[Symbol]() @@ -397,16 +410,27 @@ object CheckUnused: * The optional name will be used to target the right import * as the same element can be imported with different renaming */ - def registerUsed(sym: Symbol, name: Option[Name], isDerived: Boolean = false)(using Context): Unit = - if !isConstructorOfSynth(sym) && !doNotRegister(sym) then - if sym.isConstructor && sym.exists then - registerUsed(sym.owner, None) // constructor are "implicitly" imported with the class + def registerUsed(sym: Symbol, name: Option[Name], includeForImport: Boolean = true, isDerived: Boolean = false)(using Context): Unit = + if sym.exists && !isConstructorOfSynth(sym) && !doNotRegister(sym) then + if sym.isConstructor then + registerUsed(sym.owner, None, includeForImport) // constructor are "implicitly" imported with the class else - usedInScope.top += ((sym, sym.isAccessibleAsIdent, name, isDerived)) - usedInScope.top += ((sym.companionModule, sym.isAccessibleAsIdent, name, isDerived)) - usedInScope.top += ((sym.companionClass, sym.isAccessibleAsIdent, name, isDerived)) + // If the symbol is accessible in this scope without an import, do not register it for unused import analysis + val includeForImport1 = + includeForImport + && (name.exists(_.toTermName != sym.name.toTermName) || !sym.isAccessibleAsIdent) + + def addIfExists(sym: Symbol): Unit = + if sym.exists then + usedDef += sym + if includeForImport1 then + usedInScope.top += ((sym, name, isDerived)) + addIfExists(sym) + addIfExists(sym.companionModule) + addIfExists(sym.companionClass) if sym.sourcePos.exists then - name.map(n => usedInPosition += ((sym.sourcePos, n))) + for n <- name do + usedInPosition.getOrElseUpdate(n, MutSet.empty) += sym /** Register a symbol that should be ignored */ def addIgnoredUsage(sym: Symbol)(using Context): Unit = @@ -421,11 +445,28 @@ object CheckUnused: /** Register an import */ def registerImport(imp: tpd.Import)(using Context): Unit = - if !tpd.languageImport(imp.expr).nonEmpty && !imp.isGeneratedByEnum && !isTransparentAndInline(imp) then - impInScope.top += imp - unusedImport ++= imp.selectors.filter { s => - !shouldSelectorBeReported(imp, s) && !isImportExclusion(s) && !isImportIgnored(imp, s) - } + if + !tpd.languageImport(imp.expr).nonEmpty + && !imp.isGeneratedByEnum + && !isTransparentAndInline(imp) + && currScopeType.top != ScopeType.ReplWrapper // #18383 Do not report top-level import's in the repl as unused + then + val qualTpe = imp.expr.tpe + + // Put wildcard imports at the end, because they have lower priority within one Import + val reorderdSelectors = + val (wildcardSels, nonWildcardSels) = imp.selectors.partition(_.isWildcard) + nonWildcardSels ::: wildcardSels + + val newDataInScope = + for sel <- reorderdSelectors yield + val data = new ImportSelectorData(qualTpe, sel) + if shouldSelectorBeReported(imp, sel) || isImportExclusion(sel) || isImportIgnored(imp, sel) then + // Immediately mark the selector as used + data.markUsed() + data + impInScope.top.prependAll(newDataInScope) + end registerImport /** Register (or not) some `val` or `def` according to the context, scope and flags */ def registerDef(memDef: tpd.MemberDef)(using Context): Unit = @@ -462,42 +503,27 @@ object CheckUnused: * - If there are imports in this scope check for unused ones */ def popScope()(using Context): Unit = - // used symbol in this scope - val used = usedInScope.pop().toSet - // used imports in this scope - val imports = impInScope.pop() - val kept = used.filterNot { (sym, isAccessible, optName, isDerived) => - // keep the symbol for outer scope, if it matches **no** import - // This is the first matching wildcard selector - var selWildCard: Option[ImportSelector] = None - - val matchedExplicitImport = imports.exists { imp => - sym.isInImport(imp, isAccessible, optName, isDerived) match - case None => false - case optSel@Some(sel) if sel.isWildcard => - if selWildCard.isEmpty then selWildCard = optSel - // We keep wildcard symbol for the end as they have the least precedence - false - case Some(sel) => - unusedImport -= sel - true + currScopeType.pop() + val usedInfos = usedInScope.pop() + val selDatas = impInScope.pop() + + for usedInfo <- usedInfos do + val (sym, optName, isDerived) = usedInfo + val usedData = selDatas.find { selData => + sym.isInImport(selData, optName, isDerived) } - if !matchedExplicitImport && selWildCard.isDefined then - unusedImport -= selWildCard.get - true // a matching import exists so the symbol won't be kept for outer scope - else - matchedExplicitImport - } - - // if there's an outer scope - if usedInScope.nonEmpty then - // we keep the symbols not referencing an import in this scope - // as it can be the only reference to an outer import - usedInScope.top ++= kept - // register usage in this scope for other warnings at the end of the phase - usedDef ++= used.map(_._1) - // retrieve previous scope type - currScopeType.pop + usedData match + case Some(data) => + data.markUsed() + case None => + // Propagate the symbol one level up + if usedInScope.nonEmpty then + usedInScope.top += usedInfo + end for // each in `used` + + for selData <- selDatas do + if !selData.isUsed then + unusedImport += selData end popScope /** @@ -508,72 +534,74 @@ object CheckUnused: def getUnused(using Context): UnusedResult = popScope() + + def isUsedInPosition(name: Name, span: Span): Boolean = + usedInPosition.get(name) match + case Some(syms) => syms.exists(sym => span.contains(sym.span)) + case None => false + val sortedImp = if ctx.settings.WunusedHas.imports || ctx.settings.WunusedHas.strictNoImplicitWarn then - unusedImport.map(d => UnusedSymbol(d.srcPos, d.name, WarnTypes.Imports)).toList + unusedImport.toList + .map(d => UnusedSymbol(d.selector.srcPos, d.selector.name, WarnTypes.Imports)) else Nil // Partition to extract unset local variables from usedLocalDefs val (usedLocalDefs, unusedLocalDefs) = if ctx.settings.WunusedHas.locals then - localDefInScope.partition(d => d.symbol.usedDefContains) + localDefInScope.toList.partition(d => d.symbol.usedDefContains) else (Nil, Nil) val sortedLocalDefs = unusedLocalDefs - .filterNot(d => usedInPosition.exists { case (pos, name) => d.span.contains(pos.span) && name == d.symbol.name}) + .filterNot(d => isUsedInPosition(d.symbol.name, d.span)) .filterNot(d => containsSyntheticSuffix(d.symbol)) - .map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.LocalDefs)).toList + .map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.LocalDefs)) val unsetLocalDefs = usedLocalDefs.filter(isUnsetVarDef).map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.UnsetLocals)).toList val sortedExplicitParams = if ctx.settings.WunusedHas.explicits then - explicitParamInScope + explicitParamInScope.toList .filterNot(d => d.symbol.usedDefContains) - .filterNot(d => usedInPosition.exists { case (pos, name) => d.span.contains(pos.span) && name == d.symbol.name}) + .filterNot(d => isUsedInPosition(d.symbol.name, d.span)) .filterNot(d => containsSyntheticSuffix(d.symbol)) - .map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.ExplicitParams)).toList + .map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.ExplicitParams)) else Nil val sortedImplicitParams = if ctx.settings.WunusedHas.implicits then - implicitParamInScope + implicitParamInScope.toList .filterNot(d => d.symbol.usedDefContains) .filterNot(d => containsSyntheticSuffix(d.symbol)) - .map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.ImplicitParams)).toList + .map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.ImplicitParams)) else Nil // Partition to extract unset private variables from usedPrivates val (usedPrivates, unusedPrivates) = if ctx.settings.WunusedHas.privates then - privateDefInScope.partition(d => d.symbol.usedDefContains) + privateDefInScope.toList.partition(d => d.symbol.usedDefContains) else (Nil, Nil) - val sortedPrivateDefs = unusedPrivates.filterNot(d => containsSyntheticSuffix(d.symbol)).map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.PrivateMembers)).toList - val unsetPrivateDefs = usedPrivates.filter(isUnsetVarDef).map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.UnsetPrivates)).toList + val sortedPrivateDefs = unusedPrivates.filterNot(d => containsSyntheticSuffix(d.symbol)).map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.PrivateMembers)) + val unsetPrivateDefs = usedPrivates.filter(isUnsetVarDef).map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.UnsetPrivates)) val sortedPatVars = if ctx.settings.WunusedHas.patvars then - patVarsInScope + patVarsInScope.toList .filterNot(d => d.symbol.usedDefContains) .filterNot(d => containsSyntheticSuffix(d.symbol)) - .filterNot(d => usedInPosition.exists { case (pos, name) => d.span.contains(pos.span) && name == d.symbol.name}) - .map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.PatVars)).toList + .filterNot(d => isUsedInPosition(d.symbol.name, d.span)) + .map(d => UnusedSymbol(d.namePos, d.name, WarnTypes.PatVars)) else Nil val warnings = - val unsorted = - sortedImp ::: - sortedLocalDefs ::: - sortedExplicitParams ::: - sortedImplicitParams ::: - sortedPrivateDefs ::: - sortedPatVars ::: - unsetLocalDefs ::: - unsetPrivateDefs - unsorted.sortBy { s => - val pos = s.pos.sourcePos - (pos.line, pos.column) - } + sortedImp ::: + sortedLocalDefs ::: + sortedExplicitParams ::: + sortedImplicitParams ::: + sortedPrivateDefs ::: + sortedPatVars ::: + unsetLocalDefs ::: + unsetPrivateDefs UnusedResult(warnings.toSet) end getUnused //============================ HELPERS ==================================== @@ -668,45 +696,45 @@ object CheckUnused: extension (sym: Symbol) /** is accessible without import in current context */ private def isAccessibleAsIdent(using Context): Boolean = - sym.exists && - ctx.outersIterator.exists{ c => - c.owner == sym.owner - || sym.owner.isClass && c.owner.isClass - && c.owner.thisType.baseClasses.contains(sym.owner) - && c.owner.thisType.member(sym.name).alternatives.contains(sym) - } + ctx.outersIterator.exists{ c => + c.owner == sym.owner + || sym.owner.isClass && c.owner.isClass + && c.owner.thisType.baseClasses.contains(sym.owner) + && c.owner.thisType.member(sym.name).alternatives.contains(sym) + } /** Given an import and accessibility, return selector that matches import<->symbol */ - private def isInImport(imp: tpd.Import, isAccessible: Boolean, symName: Option[Name], isDerived: Boolean)(using Context): Option[ImportSelector] = - val tpd.Import(qual, sels) = imp - val dealiasedSym = dealias(sym) - val simpleSelections = qual.tpe.member(sym.name).alternatives - val typeSelections = sels.flatMap(n => qual.tpe.member(n.name.toTypeName).alternatives) - val termSelections = sels.flatMap(n => qual.tpe.member(n.name.toTermName).alternatives) - val sameTermPath = qual.isTerm && sym.exists && sym.owner.isType && qual.tpe.typeSymbol == sym.owner.asType - val selectionsToDealias = typeSelections ::: termSelections - val renamedSelection = if sameTermPath then sels.find(sel => sel.imported.name == sym.name) else None - val qualHasSymbol = simpleSelections.map(_.symbol).contains(sym) || (simpleSelections ::: selectionsToDealias).map(_.symbol).map(dealias).contains(dealiasedSym) || renamedSelection.isDefined - def selector = sels.find(sel => (sel.name.toTermName == sym.name || sel.name.toTypeName == sym.name) && symName.map(n => n.toTermName == sel.rename).getOrElse(true)) - def dealiasedSelector = if(isDerived) sels.flatMap(sel => selectionsToDealias.map(m => (sel, m.symbol))).collect { - case (sel, sym) if dealias(sym) == dealiasedSym => sel - }.headOption else None - def givenSelector = if sym.is(Given) || sym.is(Implicit) - then sels.filter(sel => sel.isGiven && !sel.bound.isEmpty).find(sel => sel.boundTpe =:= sym.info) - else None - def wildcard = sels.find(sel => sel.isWildcard && ((sym.is(Given) == sel.isGiven && sel.bound.isEmpty) || sym.is(Implicit))) - if qualHasSymbol && (!isAccessible || sym.isRenamedSymbol(symName)) && sym.exists then - selector.orElse(dealiasedSelector).orElse(givenSelector).orElse(wildcard).orElse(renamedSelection) // selector with name or wildcard (or given) + private def isInImport(selData: ImportSelectorData, altName: Option[Name], isDerived: Boolean)(using Context): Boolean = + assert(sym.exists) + + val selector = selData.selector + + if !selector.isWildcard then + if altName.exists(explicitName => selector.rename != explicitName.toTermName) then + // if there is an explicit name, it must match + false + else + if isDerived then + // See i15503i.scala, grep for "package foo.test.i17156" + selData.allSymbolsDealiasedForNamed.contains(dealias(sym)) + else + selData.allSymbolsForNamed.contains(sym) else - None - - private def isRenamedSymbol(symNameInScope: Option[Name])(using Context) = - sym.name != nme.NO_NAME && symNameInScope.exists(_.toSimpleName != sym.name.toSimpleName) + // Wildcard + if !selData.qualTpe.member(sym.name).hasAltWith(_.symbol == sym) then + // The qualifier does not have the target symbol as a member + false + else + if selector.isGiven then + // Further check that the symbol is a given or implicit and conforms to the bound + sym.isOneOf(Given | Implicit) + && (selector.bound.isEmpty || sym.info.finalResultType <:< selector.boundTpe) + else + // Normal wildcard, check that the symbol is not a given (but can be implicit) + !sym.is(Given) + end if + end isInImport - private def dealias(symbol: Symbol)(using Context): Symbol = - if(symbol.isType && symbol.asType.denot.isAliasType) then - symbol.asType.typeRef.dealias.typeSymbol - else symbol /** Annotated with @unused */ private def isUnusedAnnot(using Context): Boolean = sym.annotations.exists(a => a.symbol == ctx.definitions.UnusedAnnot) @@ -791,23 +819,53 @@ object CheckUnused: end UnusedData private object UnusedData: - enum ScopeType: - case Local - case Template - case Other - - object ScopeType: - /** return the scope corresponding to the enclosing scope of the given tree */ - def fromTree(tree: tpd.Tree): ScopeType = tree match - case _:tpd.Template => Template - case _:tpd.Block => Local - case _ => Other - - case class UnusedSymbol(pos: SrcPos, name: Name, warnType: WarnTypes) - /** A container for the results of the used elements analysis */ - case class UnusedResult(warnings: Set[UnusedSymbol]) - object UnusedResult: - val Empty = UnusedResult(Set.empty) + enum ScopeType: + case Local + case Template + case ReplWrapper + case Other + + object ScopeType: + /** return the scope corresponding to the enclosing scope of the given tree */ + def fromTree(tree: tpd.Tree)(using Context): ScopeType = tree match + case tree: tpd.Template => if tree.symbol.name.isReplWrapperName then ReplWrapper else Template + case _:tpd.Block => Local + case _ => Other + + final class ImportSelectorData(val qualTpe: Type, val selector: ImportSelector): + private var myUsed: Boolean = false + + def markUsed(): Unit = myUsed = true + + def isUsed: Boolean = myUsed + + private var myAllSymbols: Set[Symbol] | Null = null + + def allSymbolsForNamed(using Context): Set[Symbol] = + if myAllSymbols == null then + val allDenots = qualTpe.member(selector.name).alternatives ::: qualTpe.member(selector.name.toTypeName).alternatives + myAllSymbols = allDenots.map(_.symbol).toSet + myAllSymbols.uncheckedNN + + private var myAllSymbolsDealiased: Set[Symbol] | Null = null + + def allSymbolsDealiasedForNamed(using Context): Set[Symbol] = + if myAllSymbolsDealiased == null then + myAllSymbolsDealiased = allSymbolsForNamed.map(sym => dealias(sym)) + myAllSymbolsDealiased.uncheckedNN + end ImportSelectorData + + case class UnusedSymbol(pos: SrcPos, name: Name, warnType: WarnTypes) + /** A container for the results of the used elements analysis */ + case class UnusedResult(warnings: Set[UnusedSymbol]) + object UnusedResult: + val Empty = UnusedResult(Set.empty) + end UnusedData -end CheckUnused + private def dealias(symbol: Symbol)(using Context): Symbol = + if symbol.isType && symbol.asType.denot.isAliasType then + symbol.asType.typeRef.dealias.typeSymbol + else + symbol +end CheckUnused diff --git a/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala b/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala index 80115ca651bb..c31b2673e04a 100644 --- a/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala +++ b/compiler/src/dotty/tools/dotc/transform/ContextFunctionResults.scala @@ -4,6 +4,7 @@ package transform import core.* import Contexts.*, Symbols.*, Types.*, Annotations.*, Constants.*, Phases.* +import Decorators.* import StdNames.nme import ast.untpd import ast.tpd.* @@ -115,11 +116,9 @@ object ContextFunctionResults: else tree match case Select(qual, name) => if name == nme.apply then - qual.tpe.nn.dealias match + qual.tpe.nn.widenDealias match case defn.FunctionTypeOfMethod(mt) if mt.isContextualMethod => integrateSelect(qual, n + 1) - case _ if defn.isContextFunctionClass(tree.symbol.maybeOwner) => // for TermRefs - integrateSelect(qual, n + 1) case _ => n > 0 && contextResultCount(tree.symbol) >= n else diff --git a/compiler/src/dotty/tools/dotc/transform/CookComments.scala b/compiler/src/dotty/tools/dotc/transform/CookComments.scala index d443e31fdc39..5dec0fff1e39 100644 --- a/compiler/src/dotty/tools/dotc/transform/CookComments.scala +++ b/compiler/src/dotty/tools/dotc/transform/CookComments.scala @@ -12,7 +12,7 @@ class CookComments extends MegaPhase.MiniPhase { override def description: String = CookComments.description override def transformTypeDef(tree: tpd.TypeDef)(using Context): tpd.Tree = { - if (ctx.settings.YcookComments.value && tree.isClassDef) { + if (ctx.settings.XcookComments.value && tree.isClassDef) { val cls = tree.symbol val cookingCtx = ctx.localContext(tree, cls).setNewScope val template = tree.rhs.asInstanceOf[tpd.Template] diff --git a/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala b/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala index b98d7d525089..ae2fc578728f 100644 --- a/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala +++ b/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala @@ -293,7 +293,7 @@ class ElimRepeated extends MiniPhase with InfoTransformer { thisPhase => val element = array.elemType.hiBound // T if element <:< defn.AnyRefType - || ctx.mode.is(Mode.SafeNulls) && element.stripNull <:< defn.AnyRefType + || ctx.mode.is(Mode.SafeNulls) && element.stripNull() <:< defn.AnyRefType || element.typeSymbol.isPrimitiveValueClass then array else defn.ArrayOf(TypeBounds.upper(AndType(element, defn.AnyRefType))) // Array[? <: T & AnyRef] diff --git a/compiler/src/dotty/tools/dotc/transform/Erasure.scala b/compiler/src/dotty/tools/dotc/transform/Erasure.scala index 9fdffb0ed537..a25a2fcb5c6d 100644 --- a/compiler/src/dotty/tools/dotc/transform/Erasure.scala +++ b/compiler/src/dotty/tools/dotc/transform/Erasure.scala @@ -13,6 +13,7 @@ import core.Types.* import core.Names.* import core.StdNames.* import core.NameOps.* +import core.Periods.currentStablePeriod import core.NameKinds.{AdaptedClosureName, BodyRetainerName, DirectMethName} import core.Scopes.newScopeWith import core.Decorators.* @@ -132,7 +133,7 @@ class Erasure extends Phase with DenotTransformer { } case ref: JointRefDenotation => new UniqueRefDenotation( - ref.symbol, transformInfo(ref.symbol, ref.symbol.info), ref.validFor, ref.prefix) + ref.symbol, transformInfo(ref.symbol, ref.symbol.info), currentStablePeriod, ref.prefix) case _ => ref.derivedSingleDenotation(ref.symbol, transformInfo(ref.symbol, ref.symbol.info)) } @@ -566,7 +567,13 @@ object Erasure { case Some(annot) => val message = annot.argumentConstant(0) match case Some(c) => - c.stringValue.toMessage + val addendum = tree match + case tree: RefTree + if tree.symbol == defn.Compiletime_deferred && tree.name != nme.deferred => + i".\nNote that `deferred` can only be used under its own name when implementing a given in a trait; `${tree.name}` is not accepted." + case _ => + "" + (c.stringValue ++ addendum).toMessage case _ => em"""Reference to ${tree.symbol.showLocated} should not have survived, |it should have been processed and eliminated during expansion of an enclosing macro or term erasure.""" @@ -593,9 +600,9 @@ object Erasure { def erasedDef(sym: Symbol)(using Context): Tree = if sym.isClass then - // We cannot simply drop erased classes, since then they would not generate classfiles - // and would not be visible under separate compilation. So we transform them to - // empty interfaces instead. + // We cannot simply drop erased classes, since then they would not generate classfiles + // and would not be visible under separate compilation. So we transform them to + // empty interfaces instead. tpd.ClassDef(sym.asClass, DefDef(sym.primaryConstructor.asTerm), Nil) else if sym.owner.isClass then sym.dropAfter(erasurePhase) @@ -666,7 +673,7 @@ object Erasure { */ override def typedSelect(tree: untpd.Select, pt: Type)(using Context): Tree = { if tree.name == nme.apply && integrateSelect(tree) then - return typed(tree.qualifier, pt) + return typed(tree.qualifier, pt) val qual1 = typed(tree.qualifier, AnySelectionProto) diff --git a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala index b976dfaa2f9f..15dfda845389 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala @@ -343,25 +343,12 @@ object ExplicitOuter { private final val HoistableFlags = Method | Lazy | Module /** The outer prefix implied by type `tpe` */ - private def outerPrefix(tpe: Type)(using Context): Type = tpe match { - case tpe: TypeRef => - tpe.symbol match { - case cls: ClassSymbol => - if (tpe.prefix eq NoPrefix) cls.owner.enclosingClass.thisType - else tpe.prefix - case _ => - // Need to be careful to dealias before erasure, otherwise we lose prefixes. - atPhaseNoLater(erasurePhase)(outerPrefix(tpe.underlying)) - // underlying is fine here and below since we are calling this after erasure. - // However, there is some weird stuff going on with parboiled2 where an - // AppliedType with a type alias as constructor is fed to outerPrefix. - // For some other unknown reason this works with underlying but not with superType. - // I was not able to minimize the problem and parboiled2 spits out way too much - // macro generated code to be able to pinpoint the root problem. - } + private def outerPrefix(tpe: Type)(using Context): Type = tpe match + case tpe: TypeRef if tpe.symbol.isClass => + if tpe.prefix eq NoPrefix then tpe.symbol.owner.enclosingClass.thisType + else tpe.prefix case tpe: TypeProxy => - outerPrefix(tpe.underlying) - } + atPhaseNoLater(erasurePhase)(outerPrefix(tpe.superType)) /** It's possible (i1755.scala gives an example) that the type * given by outerPrefix contains a This-reference to a module outside diff --git a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala index c75ac9982317..217c843c4e50 100644 --- a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala +++ b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala @@ -445,7 +445,7 @@ object GenericSignatures { } } - private def needsJavaSig(tp: Type, throwsArgs: List[Type])(using Context): Boolean = !ctx.settings.YnoGenericSig.value && { + private def needsJavaSig(tp: Type, throwsArgs: List[Type])(using Context): Boolean = !ctx.settings.XnoGenericSig.value && { def needs(tp: Type) = (new NeedsSigCollector).apply(false, tp) needs(tp) || throwsArgs.exists(needs) } diff --git a/compiler/src/dotty/tools/dotc/transform/Inlining.scala b/compiler/src/dotty/tools/dotc/transform/Inlining.scala index 907fe948ac30..335d5a38931a 100644 --- a/compiler/src/dotty/tools/dotc/transform/Inlining.scala +++ b/compiler/src/dotty/tools/dotc/transform/Inlining.scala @@ -1,20 +1,24 @@ package dotty.tools.dotc package transform +import ast.tpd +import ast.Trees.* +import ast.TreeMapWithTrackedStats import core.* import Flags.* +import Decorators.* import Contexts.* import Symbols.* +import Decorators.* +import config.Printers.inlining +import DenotTransformers.IdentityDenotTransformer +import MacroAnnotations.hasMacroAnnotation +import inlines.Inlines +import quoted.* +import staging.StagingLevel +import util.Property -import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.ast.Trees.* -import dotty.tools.dotc.quoted.* -import dotty.tools.dotc.inlines.Inlines -import dotty.tools.dotc.ast.TreeMapWithImplicits -import dotty.tools.dotc.core.DenotTransformers.IdentityDenotTransformer -import dotty.tools.dotc.staging.StagingLevel - -import scala.collection.mutable.ListBuffer +import scala.collection.mutable /** Inlines all calls to inline methods that are not in an inline method or a quote */ class Inlining extends MacroTransform, IdentityDenotTransformer { @@ -56,44 +60,23 @@ class Inlining extends MacroTransform, IdentityDenotTransformer { def newTransformer(using Context): Transformer = new Transformer { override def transform(tree: tpd.Tree)(using Context): tpd.Tree = - new InliningTreeMap().transform(tree) + InliningTreeMap().transform(tree) } - private class InliningTreeMap extends TreeMapWithImplicits { + private class InliningTreeMap extends TreeMapWithTrackedStats { /** List of top level classes added by macro annotation in a package object. * These are added to the PackageDef that owns this particular package object. */ - private val newTopClasses = MutableSymbolMap[ListBuffer[Tree]]() + private val newTopClasses = MutableSymbolMap[mutable.ListBuffer[Tree]]() override def transform(tree: Tree)(using Context): Tree = { tree match case tree: MemberDef => - if tree.symbol.is(Inline) then tree - else if tree.symbol.is(Param) then super.transform(tree) - else if - !tree.symbol.isPrimaryConstructor - && StagingLevel.level == 0 - && MacroAnnotations.hasMacroAnnotation(tree.symbol) - then - val trees = (new MacroAnnotations(self)).expandAnnotations(tree) - val trees1 = trees.map(super.transform) - - // Find classes added to the top level from a package object - val (topClasses, trees2) = - if ctx.owner.isPackageObject then trees1.partition(_.symbol.owner == ctx.owner.owner) - else (Nil, trees1) - if topClasses.nonEmpty then - newTopClasses.getOrElseUpdate(ctx.owner.owner, new ListBuffer) ++= topClasses - - flatTree(trees2) - else super.transform(tree) + // Fetch the latest tracked tree (It might have already been transformed by its companion) + transformMemberDef(getTracked(tree.symbol).getOrElse(tree)) case _: Typed | _: Block => super.transform(tree) - case _ if Inlines.needsInlining(tree) => - val tree1 = super.transform(tree) - if tree1.tpe.isError then tree1 - else Inlines.inlineCall(tree1) case _: PackageDef => super.transform(tree) match case tree1: PackageDef => @@ -106,9 +89,60 @@ class Inlining extends MacroTransform, IdentityDenotTransformer { case tree1 => tree1 case _ => if tree.isType then tree + else if Inlines.needsInlining(tree) then + tree match + case tree: UnApply => + val fun1 = Inlines.inlinedUnapplyFun(tree.fun) + super.transform(cpy.UnApply(tree)(fun = fun1)) + case _ => + val tree1 = super.transform(tree) + if tree1.tpe.isError then tree1 + else Inlines.inlineCall(tree1) else super.transform(tree) } + + private def transformMemberDef(tree: MemberDef)(using Context) : Tree = + if tree.symbol.is(Inline) then tree + else if tree.symbol.is(Param) then + super.transform(tree) + else if + !tree.symbol.isPrimaryConstructor + && StagingLevel.level == 0 + && tree.symbol.hasMacroAnnotation + then + // Fetch the companion's tree + val companionSym = + if tree.symbol.is(ModuleClass) then tree.symbol.companionClass + else if tree.symbol.is(ModuleVal) then NoSymbol + else tree.symbol.companionModule.moduleClass + + // Expand and process MacroAnnotations + val companion = getTracked(companionSym) + val (trees, newCompanion) = MacroAnnotations.expandAnnotations(tree, companion) + + // Enter the new symbols & Update the tracked trees + (newCompanion.toList ::: trees).foreach: tree => + MacroAnnotations.enterMissingSymbols(tree, self) + + // Perform inlining on the expansion of the annotations + val trees1 = trees.map(super.transform) + trees1.foreach(updateTracked) + if newCompanion ne companion then + newCompanion.map(super.transform).foreach(updateTracked) + + // Find classes added to the top level from a package object + val (topClasses, trees2) = + if ctx.owner.isPackageObject then trees1.partition(_.symbol.owner == ctx.owner.owner) + else (Nil, trees1) + if topClasses.nonEmpty then + newTopClasses.getOrElseUpdate(ctx.owner.owner, new mutable.ListBuffer) ++= topClasses + flatTree(trees2) + else + updateTracked(super.transform(tree)) + end transformMemberDef + } + } object Inlining: diff --git a/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala b/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala index 939497caf31c..6625190661e3 100644 --- a/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala +++ b/compiler/src/dotty/tools/dotc/transform/MacroAnnotations.scala @@ -3,19 +3,19 @@ package transform import scala.language.unsafeNulls -import dotty.tools.dotc.ast.tpd -import dotty.tools.dotc.ast.Trees.* -import dotty.tools.dotc.config.Printers.{macroAnnot => debug} -import dotty.tools.dotc.core.Annotations.* -import dotty.tools.dotc.core.Contexts.* -import dotty.tools.dotc.core.Decorators.* -import dotty.tools.dotc.core.DenotTransformers.IdentityDenotTransformer -import dotty.tools.dotc.core.Flags.* -import dotty.tools.dotc.core.MacroClassLoader -import dotty.tools.dotc.core.Symbols.* -import dotty.tools.dotc.core.Types.* -import dotty.tools.dotc.quoted.* -import dotty.tools.dotc.util.SrcPos +import ast.tpd +import ast.Trees.* +import config.Printers.macroAnnot as debug +import core.Annotations.* +import core.Contexts.* +import core.Decorators.* +import core.DenotTransformers.DenotTransformer +import core.Flags.* +import core.MacroClassLoader +import core.Symbols.* +import core.Types.* +import quoted.* +import util.SrcPos import scala.quoted.runtime.impl.{QuotesImpl, SpliceScope} import scala.quoted.Quotes @@ -23,94 +23,118 @@ import scala.util.control.NonFatal import java.lang.reflect.InvocationTargetException -class MacroAnnotations(phase: IdentityDenotTransformer): +object MacroAnnotations: import tpd.* - import MacroAnnotations.* + + extension (annot: Annotation) + /** Is this an annotation that implements `scala.annation.MacroAnnotation` */ + def isMacroAnnotation(using Context): Boolean = + annot.tree.symbol.maybeOwner.derivesFrom(defn.MacroAnnotationClass) + end extension + + extension (sym: Symbol) + /** Is this symbol annotated with an annotation that implements `scala.annation.MacroAnnotation` */ + def hasMacroAnnotation(using Context): Boolean = + sym.getAnnotation(defn.MacroAnnotationClass).isDefined + end extension /** Expands every macro annotation that is on this tree. * Returns a list with transformed definition and any added definitions. */ - def expandAnnotations(tree: MemberDef)(using Context): List[DefTree] = - if !hasMacroAnnotation(tree.symbol) then - List(tree) - else if tree.symbol.is(Module) && !tree.symbol.isClass then - // only class is transformed - List(tree) + def expandAnnotations(tree: MemberDef, companion: Option[MemberDef])(using Context): (List[MemberDef], Option[MemberDef]) = + if !tree.symbol.hasMacroAnnotation then + (List(tree), companion) + else if tree.symbol.is(ModuleVal) then + // only module classes are transformed + (List(tree), companion) else if tree.symbol.isType && !tree.symbol.isClass then report.error("macro annotations are not supported on type", tree) - List(tree) + (List(tree), companion) else debug.println(i"Expanding macro annotations of:\n$tree") - val macroInterpreter = new Interpreter(tree.srcPos, MacroClassLoader.fromContext) - val allTrees = List.newBuilder[DefTree] - var insertedAfter: List[List[DefTree]] = Nil + val prefixedTrees = List.newBuilder[MemberDef] // Apply all macro annotation to `tree` and collect new definitions in order - val transformedTree: DefTree = tree.symbol.annotations.foldLeft(tree) { (tree, annot) => - if isMacroAnnotation(annot) then - debug.println(i"Expanding macro annotation: ${annot}") - - // Interpret call to `new myAnnot(..).transform(using )()` - val transformedTrees = callMacro(macroInterpreter, tree, annot) - transformedTrees.span(_.symbol != tree.symbol) match - case (prefixed, newTree :: suffixed) => - allTrees ++= prefixed - insertedAfter = suffixed :: insertedAfter - for prefixedTree <- prefixed do - checkMacroDef(prefixedTree, tree, annot) - for suffixedTree <- suffixed do - checkMacroDef(suffixedTree, tree, annot) - TreeChecker.checkMacroGeneratedTree(tree, newTree) - newTree - case (Nil, Nil) => - report.error(i"Unexpected `Nil` returned by `(${annot.tree}).transform(..)` during macro expansion", annot.tree.srcPos) - tree - case (_, Nil) => - report.error(i"Transformed tree for ${tree} was not return by `(${annot.tree}).transform(..)` during macro expansion", annot.tree.srcPos) - tree - else - tree - } - - allTrees += transformedTree - insertedAfter.foreach(allTrees.++=) - - val result = allTrees.result() - for tree <- result do enterMissingSymbols(tree) + val unprocessed = (tree, companion, List.empty[MemberDef]) + val (transformedTree, transformedCompanion, suffixed) = + tree.symbol.annotations.foldLeft(unprocessed): (lastResult, annot) => + if annot.isMacroAnnotation then + val (tree, companion, suffixed) = lastResult + debug.println(i"Expanding macro annotation: ${annot}") + // Interpret call to `new myAnnot(..).transform(using )(, )` + val (transformedTrees, transformedCompanion) = callMacro(macroInterpreter, tree, companion, annot) + // Establish the trees order and check the integrity of the trees + transformedTrees.span(_.symbol != tree.symbol) match + case (newPrefixed, newTree :: newSuffixed) => + // Check the integrity of the generated trees + for prefixedTree <- newPrefixed do checkMacroDef(prefixedTree, tree, annot) + for suffixedTree <- newSuffixed do checkMacroDef(suffixedTree, tree, annot) + for tcompanion <- transformedCompanion do TreeChecker.checkMacroGeneratedTree(companion.get, tcompanion) + TreeChecker.checkMacroGeneratedTree(tree, newTree) + prefixedTrees ++= newPrefixed + (newTree, transformedCompanion, newSuffixed ::: suffixed) + case (_, Nil) => + report.error(i"Transformed tree for ${tree.symbol} was not return by `(${annot.tree}).transform(..)` during macro expansion", annot.tree.srcPos) + lastResult + else + lastResult + end val + + // Complete the list of transformed/generated definitions + val result = prefixedTrees.result() ::: transformedTree :: suffixed debug.println(result.map(_.show).mkString("expanded to:\n", "\n", "")) - result + (result, transformedCompanion) + end expandAnnotations - /** Interpret the code `new annot(..).transform(using )()` */ - private def callMacro(interpreter: Interpreter, tree: MemberDef, annot: Annotation)(using Context): List[MemberDef] = - // TODO: Remove when scala.annaotaion.MacroAnnotation is no longer experimental + /** Interpret the code `new annot(..).transform(using )(, )` */ + private def callMacro(interpreter: Interpreter, tree: MemberDef, companion: Option[MemberDef], annot: Annotation) + (using Context): (List[MemberDef], Option[MemberDef]) = + // TODO: Remove when scala.annotation.MacroAnnotation is no longer experimental import scala.reflect.Selectable.reflectiveSelectable type MacroAnnotation = { - def transform(using Quotes)(tree: Object/*Erased type of quotes.refelct.Definition*/): List[MemberDef /*quotes.refelct.Definition known to be MemberDef in QuotesImpl*/] + def transform(using Quotes)( + tree: Object/*Erased type of quotes.reflect.Definition*/, + companion: Option[Object/*Erased type of quotes.reflect.Definition*/] + ): List[MemberDef /*quotes.refelct.Definition known to be MemberDef in QuotesImpl*/] } // Interpret macro annotation instantiation `new myAnnot(..)` + // TODO: Make this error handling stronger (no error handling at the moment) val annotInstance = interpreter.interpret[MacroAnnotation](annot.tree).get + // TODO: Remove when scala.annaotaion.MacroAnnotation is no longer experimental assert(annotInstance.getClass.getClassLoader.loadClass("scala.annotation.MacroAnnotation").isInstance(annotInstance)) val quotes = QuotesImpl()(using SpliceScope.contextWithNewSpliceScope(tree.symbol.sourcePos)(using MacroExpansion.context(tree)).withOwner(tree.symbol.owner)) - try annotInstance.transform(using quotes)(tree.asInstanceOf[quotes.reflect.Definition]) + try + val result = annotInstance.transform(using quotes)(tree, companion) + // Process the result based on if the companion was present or not + // The idea is that we try to find a transformation of the companion if we do provide one + companion.map(_.symbol) match + case None => (result, companion) + case Some(companionSym) => result.partition(_.symbol == companionSym) match + case (Nil, result) => (result, companion) // companion didn't change + case (newCompanion :: Nil, result) => (result, Some(newCompanion)) + case (_, result) => + report.error(i"Transformed companion for ${tree.symbol} was returned more than once by `(${annot.tree}).transform(..)` during macro expansion", annot.tree) + (result, companion) + catch - // TODO: Replace this case when scala.annaotaion.MacroAnnotation is no longer experimental and reflectiveSelectable is not used + // TODO: Replace this case when scala.annotation.MacroAnnotation is no longer experimental and reflectiveSelectable is not used // Replace this case with the nested cases. case ex0: InvocationTargetException => ex0.getCause match case ex: scala.quoted.runtime.StopMacroExpansion => if !ctx.reporter.hasErrors then report.error("Macro expansion was aborted by the macro without any errors reported. Macros should issue errors to end-users when aborting a macro expansion with StopMacroExpansion.", annot.tree) - List(tree) - case Interpreter.MissingClassDefinedInCurrentRun(sym) => - Interpreter.suspendOnMissing(sym, annot.tree) + (List(tree), companion) + case Interpreter.MissingClassValidInCurrentRun(sym, origin) => + Interpreter.suspendOnMissing(sym, origin, annot.tree) case NonFatal(ex) => - val stack0 = ex.getStackTrace.takeWhile(_.getClassName != "dotty.tools.dotc.transform.MacroAnnotations") + val stack0 = ex.getStackTrace.takeWhile(_.getClassName != this.getClass().getName()) val stack = stack0.take(1 + stack0.lastIndexWhere(_.getMethodName == "transform")) val msg = em"""Failed to evaluate macro. @@ -118,9 +142,10 @@ class MacroAnnotations(phase: IdentityDenotTransformer): | ${stack.mkString("\n ")} |""" report.error(msg, annot.tree) - List(tree) + (List(tree), companion) case _ => throw ex0 + end callMacro /** Check that this tree can be added by the macro annotation */ private def checkMacroDef(newTree: DefTree, annotatedTree: Tree, annot: Annotation)(using Context) = @@ -133,14 +158,13 @@ class MacroAnnotations(phase: IdentityDenotTransformer): report.error(i"macro annotation $annot added $sym with an inconsistent owner. Expected it to be owned by ${annotated.owner} but was owned by ${sym.owner}.", annot.tree) else if annotated.isClass && annotated.owner.is(Package) /*&& !sym.isClass*/ then report.error(i"macro annotation can not add top-level ${sym.showKind}. $annot tried to add $sym.", annot.tree) + end checkMacroDef - /** - * Enter the symbols generated by MacroAnnotations - */ - private def enterMissingSymbols(tree: DefTree)(using Context) = new TreeTraverser { + /** Enter the symbols generated by MacroAnnotations */ + def enterMissingSymbols(tree: MemberDef, phase: DenotTransformer)(using Context) = new TreeTraverser { def traverse(tree: tpd.Tree)(using Context): Unit = tree match case tdef @ TypeDef(_, template: Template) => - val isSymbolInDecls = tdef.symbol.asClass.info.decls.toList.toSet + val isSymbolInDecls = atNextPhase(tdef.symbol.asClass.info.decls.toList.toSet) for tree <- template.body if tree.isDef do if tree.symbol.owner != tdef.symbol then report.error(em"Macro added a definition with the wrong owner - ${tree.symbol.owner} - ${tdef.symbol} in ${tree.source}", tree.srcPos) @@ -150,12 +174,4 @@ class MacroAnnotations(phase: IdentityDenotTransformer): case _ => traverseChildren(tree) }.traverse(tree) -object MacroAnnotations: - - /** Is this an annotation that implements `scala.annation.MacroAnnotation` */ - def isMacroAnnotation(annot: Annotation)(using Context): Boolean = - annot.tree.symbol.maybeOwner.derivesFrom(defn.MacroAnnotationClass) - - /** Is this symbol annotated with an annotation that implements `scala.annation.MacroAnnotation` */ - def hasMacroAnnotation(sym: Symbol)(using Context): Boolean = - sym.getAnnotation(defn.MacroAnnotationClass).isDefined +end MacroAnnotations diff --git a/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala b/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala index 887a962f7a65..137fbf4f837c 100644 --- a/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala +++ b/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala @@ -13,6 +13,8 @@ abstract class MacroTransform extends Phase { import ast.tpd.* + override def isRunnable(using Context) = super.isRunnable && !ctx.usedBestEffortTasty + override def run(using Context): Unit = { val unit = ctx.compilationUnit unit.tpdTree = atPhase(transformPhase)(newTransformer.transform(unit.tpdTree)) diff --git a/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala b/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala index 252babe7058f..86acd009fd09 100644 --- a/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala +++ b/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala @@ -136,6 +136,8 @@ object MegaPhase { override def run(using Context): Unit = singletonGroup.run + + override def isRunnable(using Context): Boolean = super.isRunnable && !ctx.usedBestEffortTasty } } import MegaPhase.* @@ -164,6 +166,8 @@ class MegaPhase(val miniPhases: Array[MiniPhase]) extends Phase { relaxedTypingCache } + override def isRunnable(using Context): Boolean = super.isRunnable && !ctx.usedBestEffortTasty + private val cpy: TypedTreeCopier = cpyBetweenPhases /** Transform node using all phases in this group that have idxInGroup >= start */ diff --git a/compiler/src/dotty/tools/dotc/transform/Mixin.scala b/compiler/src/dotty/tools/dotc/transform/Mixin.scala index 6df4bebde132..9a19c0dc414f 100644 --- a/compiler/src/dotty/tools/dotc/transform/Mixin.scala +++ b/compiler/src/dotty/tools/dotc/transform/Mixin.scala @@ -18,6 +18,8 @@ import NameKinds.* import NameOps.* import ast.Trees.* +import dotty.tools.dotc.transform.sjs.JSSymUtils.isJSType + object Mixin { val name: String = "mixin" val description: String = "expand trait fields and trait initializers" @@ -273,7 +275,15 @@ class Mixin extends MiniPhase with SymTransformer { thisPhase => else if (getter.is(Lazy, butNot = Module)) transformFollowing(superRef(getter).appliedToNone) else if (getter.is(Module)) - New(getter.info.resultType, List(This(cls))) + if ctx.settings.scalajs.value && getter.moduleClass.isJSType then + if getter.is(Scala2x) then + report.error( + em"""Implementation restriction: cannot extend the Scala 2 trait $mixin + |containing the object $getter that extends js.Any""", + cls.srcPos) + transformFollowing(superRef(getter).appliedToNone) + else + New(getter.info.resultType, List(This(cls))) else Underscore(getter.info.resultType) // transformFollowing call is needed to make memoize & lazy vals run diff --git a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala index bed29a122399..0b8507f3b6c7 100644 --- a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala +++ b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala @@ -112,8 +112,13 @@ object PatternMatcher { sanitize(tpe), coord = rhs.span) // TODO: Drop Case once we use everywhere else `isPatmatGenerated`. + private def dropNamedTuple(tree: Tree): Tree = + val tpe = tree.tpe.widen + if tpe.isNamedTupleType then tree.cast(tpe.stripNamedTuple) else tree + /** The plan `let x = rhs in body(x)` where `x` is a fresh variable */ - private def letAbstract(rhs: Tree, tpe: Type = NoType)(body: Symbol => Plan): Plan = { + private def letAbstract(rhs0: Tree, tpe: Type = NoType)(body: Symbol => Plan): Plan = { + val rhs = dropNamedTuple(rhs0) val declTpe = if tpe.exists then tpe else rhs.tpe val vble = newVar(rhs, EmptyFlags, declTpe) initializer(vble) = rhs @@ -334,6 +339,7 @@ object PatternMatcher { def unapplyPlan(unapp: Tree, args: List[Tree]): Plan = { def caseClass = unapp.symbol.owner.linkedClass lazy val caseAccessors = caseClass.caseAccessors + val unappType = unapp.tpe.widen.stripNamedTuple def isSyntheticScala2Unapply(sym: Symbol) = sym.is(Synthetic) && sym.owner.is(Scala2x) @@ -341,39 +347,45 @@ object PatternMatcher { def tupleApp(i: Int, receiver: Tree) = // manually inlining the call to NonEmptyTuple#apply, because it's an inline method ref(defn.RuntimeTuplesModule) .select(defn.RuntimeTuples_apply) - .appliedTo(receiver, Literal(Constant(i))) + .appliedTo( + receiver.ensureConforms(defn.NonEmptyTupleTypeRef), // If scrutinee is a named tuple, cast to underlying tuple + Literal(Constant(i))) if (isSyntheticScala2Unapply(unapp.symbol) && caseAccessors.length == args.length) - def tupleSel(sym: Symbol) = ref(scrutinee).select(sym) + def tupleSel(sym: Symbol) = + // If scrutinee is a named tuple, cast to underlying tuple, so that we can + // continue to select with _1, _2, ... + ref(scrutinee).ensureConforms(scrutinee.info.stripNamedTuple).select(sym) val isGenericTuple = defn.isTupleClass(caseClass) && !defn.isTupleNType(tree.tpe match { case tp: OrType => tp.join case tp => tp }) // widen even hard unions, to see if it's a union of tuples - val components = if isGenericTuple then caseAccessors.indices.toList.map(tupleApp(_, ref(scrutinee))) else caseAccessors.map(tupleSel) + val components = + if isGenericTuple then caseAccessors.indices.toList.map(tupleApp(_, ref(scrutinee))) + else caseAccessors.map(tupleSel) matchArgsPlan(components, args, onSuccess) - else if (unapp.tpe <:< (defn.BooleanType)) + else if unappType.isRef(defn.BooleanClass) then TestPlan(GuardTest, unapp, unapp.span, onSuccess) else letAbstract(unapp) { unappResult => val isUnapplySeq = unapp.symbol.name == nme.unapplySeq - if (isProductMatch(unapp.tpe.widen, args.length) && !isUnapplySeq) { - val selectors = productSelectors(unapp.tpe).take(args.length) + if isProductMatch(unappType, args.length) && !isUnapplySeq then + val selectors = productSelectors(unappType).take(args.length) .map(ref(unappResult).select(_)) matchArgsPlan(selectors, args, onSuccess) - } - else if (isUnapplySeq && unapplySeqTypeElemTp(unapp.tpe.widen.finalResultType).exists) { + else if isUnapplySeq && unapplySeqTypeElemTp(unappType.finalResultType).exists then unapplySeqPlan(unappResult, args) - } - else if (isUnapplySeq && isProductSeqMatch(unapp.tpe.widen, args.length, unapp.srcPos)) { - val arity = productArity(unapp.tpe.widen, unapp.srcPos) + else if isUnapplySeq && isProductSeqMatch(unappType, args.length, unapp.srcPos) then + val arity = productArity(unappType, unapp.srcPos) unapplyProductSeqPlan(unappResult, args, arity) - } else if unappResult.info <:< defn.NonEmptyTupleTypeRef then - val components = (0 until foldApplyTupleType(unappResult.denot.info).length).toList.map(tupleApp(_, ref(unappResult))) + val components = + (0 until unappResult.denot.info.tupleElementTypes.getOrElse(Nil).length) + .toList.map(tupleApp(_, ref(unappResult))) matchArgsPlan(components, args, onSuccess) else { - assert(isGetMatch(unapp.tpe)) + assert(isGetMatch(unappType)) val argsPlan = { val get = ref(unappResult).select(nme.get, _.info.isParameterless) - val arity = productArity(get.tpe, unapp.srcPos) + val arity = productArity(get.tpe.stripNamedTuple, unapp.srcPos) if (isUnapplySeq) letAbstract(get) { getResult => if unapplySeqTypeElemTp(get.tpe).exists @@ -384,7 +396,7 @@ object PatternMatcher { letAbstract(get) { getResult => val selectors = if (args.tail.isEmpty) ref(getResult) :: Nil - else productSelectors(get.tpe).map(ref(getResult).select(_)) + else productSelectors(getResult.info).map(ref(getResult).select(_)) matchArgsPlan(selectors, args, onSuccess) } } diff --git a/compiler/src/dotty/tools/dotc/transform/Pickler.scala b/compiler/src/dotty/tools/dotc/transform/Pickler.scala index b0aed580e824..6c3dcc669877 100644 --- a/compiler/src/dotty/tools/dotc/transform/Pickler.scala +++ b/compiler/src/dotty/tools/dotc/transform/Pickler.scala @@ -9,7 +9,7 @@ import tasty.* import config.Printers.{noPrinter, pickling} import config.Feature import java.io.PrintStream -import io.FileWriters.TastyWriter +import io.FileWriters.{TastyWriter, ReadOnlyContext} import StdNames.{str, nme} import Periods.* import Phases.* @@ -22,6 +22,17 @@ import compiletime.uninitialized import dotty.tools.io.{JarArchive, AbstractFile} import dotty.tools.dotc.printing.OutlinePrinter import scala.annotation.constructorOnly +import scala.concurrent.Promise +import dotty.tools.dotc.transform.Pickler.writeSigFilesAsync + +import scala.util.chaining.given +import dotty.tools.io.FileWriters.{EagerReporter, BufferingReporter} +import dotty.tools.dotc.sbt.interfaces.IncrementalCallback +import dotty.tools.dotc.sbt.asyncZincPhasesCompleted +import scala.concurrent.ExecutionContext +import scala.util.control.NonFatal +import java.util.concurrent.atomic.AtomicBoolean +import java.nio.file.Files object Pickler { val name: String = "pickler" @@ -33,32 +44,161 @@ object Pickler { */ inline val ParallelPickling = true - class EarlyFileWriter private (writer: TastyWriter, origin: AbstractFile): - def this(dest: AbstractFile)(using @constructorOnly ctx: Context) = this(TastyWriter(dest), dest) + /**A holder for syncronization points and reports when writing TASTy asynchronously. + * The callbacks should only be called once. + */ + class AsyncTastyHolder private ( + val earlyOut: AbstractFile, incCallback: IncrementalCallback | Null)(using @constructorOnly ex: ExecutionContext): + import scala.concurrent.Future as StdFuture + import scala.concurrent.Await + import scala.concurrent.duration.Duration + import AsyncTastyHolder.Signal + + private val _cancelled = AtomicBoolean(false) + + /**Cancel any outstanding work. + * This should be done at the end of a run, e.g. background work may be running even though + * errors in main thread will prevent reaching the backend. */ + def cancel(): Unit = + if _cancelled.compareAndSet(false, true) then + asyncTastyWritten.trySuccess(None) // cancel the wait for TASTy writing + if incCallback != null then + asyncAPIComplete.trySuccess(Signal.Cancelled) // cancel the wait for API completion + else + () // nothing else to do + + /** check if the work has been cancelled. */ + def cancelled: Boolean = _cancelled.get() + + private val asyncTastyWritten = Promise[Option[AsyncTastyHolder.State]]() + private val asyncAPIComplete = + if incCallback == null then Promise.successful(Signal.Done) // no need to wait for API completion + else Promise[Signal]() + + private val backendFuture: StdFuture[Option[BufferingReporter]] = + val asyncState = asyncTastyWritten.future + .zipWith(asyncAPIComplete.future)((state, api) => state.filterNot(_ => api == Signal.Cancelled)) + asyncState.map: optState => + optState.flatMap: state => + if incCallback != null && state.done && !state.hasErrors then + asyncZincPhasesCompleted(incCallback, state.pending).toBuffered + else state.pending + + /** awaits the state of async TASTy operations indefinitely, returns optionally any buffered reports. */ + def sync(): Option[BufferingReporter] = + Await.result(backendFuture, Duration.Inf) + + def signalAPIComplete(): Unit = + if incCallback != null then + asyncAPIComplete.trySuccess(Signal.Done) + + /** should only be called once */ + def signalAsyncTastyWritten()(using ctx: ReadOnlyContext): Unit = + val done = !ctx.run.suspendedAtTyperPhase + if done then + try + // when we are done, i.e. no suspended units, + // we should close the file system so it can be read in the same JVM process. + // Note: we close even if we have been cancelled. + earlyOut match + case jar: JarArchive => jar.close() + case _ => + catch + case NonFatal(t) => + ctx.reporter.error(em"Error closing early output: ${t}") + + asyncTastyWritten.trySuccess: + Some( + AsyncTastyHolder.State( + hasErrors = ctx.reporter.hasErrors, + done = done, + pending = ctx.reporter.toBuffered + ) + ) + end signalAsyncTastyWritten + end AsyncTastyHolder + + object AsyncTastyHolder: + /** The state after writing async tasty. Any errors should have been reported, or pending. + * if suspendedUnits is true, then we can't signal Zinc yet. + */ + private class State(val hasErrors: Boolean, val done: Boolean, val pending: Option[BufferingReporter]) + private enum Signal: + case Done, Cancelled + + /**Create a holder for Asynchronous state of early-TASTy operations. + * the `ExecutionContext` parameter is used to call into Zinc to signal + * that API and Dependency phases are complete. + */ + def init(using Context, ExecutionContext): AsyncTastyHolder = + AsyncTastyHolder(ctx.settings.XearlyTastyOutput.value, ctx.incCallback) + + + /** Asynchronously writes TASTy files to the destination -Yearly-tasty-output. + * If no units have been suspended, then we are "done", which enables Zinc to be signalled. + * + * If there are suspended units, (due to calling a macro defined in the same run), then the API is incomplete, + * so it would be a mistake to signal Zinc. This is a sensible default, because Zinc by default will ignore the + * signal if there are macros in the API. + * - See `sbt-test/pipelining/pipelining-scala-macro` for an example. + * + * TODO: The user can override this default behaviour in Zinc to always listen to the signal, + * (e.g. if they define the macro implementation in an upstream, non-pipelined project). + * - See `sbt-test/pipelining/pipelining-scala-macro-force` where we force Zinc to listen to the signal. + * If the user wants force early output to be written, then they probably also want to benefit from pipelining, + * which then makes suspension problematic as it increases compilation times. + * Proposal: perhaps we should provide a flag `-Ystrict-pipelining` (as an alternative to `-Yno-suspended-units`), + * which fails in the condition of definition of a macro where its implementation is in the same project. + * (regardless of if it is used); this is also more strict than preventing suspension at typer. + * The user is then certain that they are always benefitting as much as possible from pipelining. + */ + def writeSigFilesAsync( + tasks: List[(String, Array[Byte])], + writer: EarlyFileWriter, + async: AsyncTastyHolder)(using ctx: ReadOnlyContext): Unit = { + try + try + for (internalName, pickled) <- tasks do + if !async.cancelled then + val _ = writer.writeTasty(internalName, pickled) + catch + case NonFatal(t) => ctx.reporter.exception(em"writing TASTy to early output", t) + finally + writer.close() + catch + case NonFatal(t) => ctx.reporter.exception(em"closing early output writer", t) + finally + async.signalAsyncTastyWritten() + } - export writer.writeTasty + class EarlyFileWriter private (writer: TastyWriter): + def this(dest: AbstractFile)(using @constructorOnly ctx: ReadOnlyContext) = this(TastyWriter(dest)) - def close(): Unit = - writer.close() - origin match { - case jar: JarArchive => jar.close() // also close the file system - case _ => - } + export writer.{writeTasty, close} } /** This phase pickles trees */ class Pickler extends Phase { import ast.tpd.* + private def doAsyncTasty(using Context): Boolean = ctx.run.nn.asyncTasty.isDefined + + private var fastDoAsyncTasty: Boolean = false + override def phaseName: String = Pickler.name override def description: String = Pickler.description - // No need to repickle trees coming from TASTY + // No need to repickle trees coming from TASTY, however in the case that we need to write TASTy to early-output, + // then we need to run this phase to send the tasty from compilation units to the early-output. override def isRunnable(using Context): Boolean = - super.isRunnable && (!ctx.settings.fromTasty.value || ctx.settings.YjavaTasty.value) + (super.isRunnable || ctx.isBestEffort) + && (!ctx.settings.fromTasty.value || doAsyncTasty) + && (!ctx.usedBestEffortTasty || ctx.isBestEffort) + // we do not want to pickle `.betasty` if do not plan to actually create the + // betasty file (as signified by the -Ybest-effort option) - // when `-Yjava-tasty` is set we actually want to run this phase on Java sources + // when `-Xjava-tasty` is set we actually want to run this phase on Java sources override def skipIfJava(using Context): Boolean = false private def output(name: String, msg: String) = { @@ -86,26 +226,54 @@ class Pickler extends Phase { */ object serialized: val scratch = new ScratchData + private val buf = mutable.ListBuffer.empty[(String, Array[Byte])] def run(body: ScratchData => Array[Byte]): Array[Byte] = synchronized { scratch.reset() body(scratch) } + def commit(internalName: String, tasty: Array[Byte]): Unit = synchronized { + buf += ((internalName, tasty)) + } + def result(): List[(String, Array[Byte])] = synchronized { + val res = buf.toList + buf.clear() + res + } private val executor = Executor[Array[Byte]]() private def useExecutor(using Context) = - Pickler.ParallelPickling && !ctx.settings.YtestPickler.value && - !ctx.settings.YjavaTasty.value // disable parallel pickling when `-Yjava-tasty` is set (internal testing only) + Pickler.ParallelPickling && !ctx.isBestEffort && !ctx.settings.YtestPickler.value private def printerContext(isOutline: Boolean)(using Context): Context = if isOutline then ctx.fresh.setPrinterFn(OutlinePrinter(_)) else ctx + /** only ran under -Ypickle-write and -from-tasty */ + private def runFromTasty(unit: CompilationUnit)(using Context): Unit = { + val pickled = unit.pickled + for (cls, bytes) <- pickled do + serialized.commit(computeInternalName(cls), bytes()) + } + + private def computeInternalName(cls: ClassSymbol)(using Context): String = + if cls.is(Module) then cls.binaryClassName.stripSuffix(str.MODULE_SUFFIX).nn + else cls.binaryClassName + override def run(using Context): Unit = { val unit = ctx.compilationUnit + val isBestEffort = ctx.reporter.errorsReported || ctx.usedBestEffortTasty pickling.println(i"unpickling in run ${ctx.runId}") + if ctx.settings.fromTasty.value then + // skip the rest of the phase, as tasty is already "pickled", + // however we still need to set up tasks to write TASTy to + // early output when pipelining is enabled. + if fastDoAsyncTasty then + runFromTasty(unit) + return () + for cls <- dropCompanionModuleClasses(topLevelClasses(unit.tpdTree)) tree <- sliceTopLevel(unit.tpdTree, cls) @@ -118,8 +286,8 @@ class Pickler extends Phase { util.SourceFile.relativePath(unit.source, reference) val isJavaAttr = unit.isJava // we must always set JAVAattr when pickling Java sources if isJavaAttr then - // assert that Java sources didn't reach Pickler without `-Yjava-tasty`. - assert(ctx.settings.YjavaTasty.value, "unexpected Java source file without -Yjava-tasty") + // assert that Java sources didn't reach Pickler without `-Xjava-tasty`. + assert(ctx.settings.XjavaTasty.value, "unexpected Java source file without -Xjava-tasty") val isOutline = isJavaAttr // TODO: later we may want outline for Scala sources too val attributes = Attributes( sourceFile = sourceRelativePath, @@ -131,14 +299,23 @@ class Pickler extends Phase { isOutline = isOutline ) - val pickler = new TastyPickler(cls) + val pickler = new TastyPickler(cls, isBestEffortTasty = isBestEffort) val treePkl = new TreePickler(pickler, attributes) - treePkl.pickle(tree :: Nil) + val successful = + try + treePkl.pickle(tree :: Nil) + true + catch + case NonFatal(ex) if ctx.isBestEffort => + report.bestEffortError(ex, "Some best-effort tasty files will not be generated.") + false Profile.current.recordTasty(treePkl.buf.length) val positionWarnings = new mutable.ListBuffer[Message]() def reportPositionWarnings() = positionWarnings.foreach(report.warning(_)) + val internalName = if fastDoAsyncTasty then computeInternalName(cls) else "" + def computePickled(): Array[Byte] = inContext(ctx.fresh) { serialized.run { scratch => treePkl.compactify(scratch) @@ -149,7 +326,7 @@ class Pickler extends Phase { unit.source, tree :: Nil, positionWarnings, scratch.positionBuffer, scratch.pickledIndices) - if !ctx.settings.YdropComments.value then + if !ctx.settings.XdropComments.value then CommentPickler.pickleComments( pickler, treePkl.buf.addrOfTree, treePkl.docString, tree, scratch.commentBuffer) @@ -166,55 +343,67 @@ class Pickler extends Phase { // println(i"rawBytes = \n$rawBytes%\n%") // DEBUG if ctx.settings.YprintTasty.value || pickling != noPrinter then println(i"**** pickled info of $cls") - println(TastyPrinter.showContents(pickled, ctx.settings.color.value == "never")) + println(TastyPrinter.showContents(pickled, ctx.settings.color.value == "never", isBestEffortTasty = false)) println(i"**** end of pickled info of $cls") + + if fastDoAsyncTasty then + serialized.commit(internalName, pickled) + pickled } } - /** A function that returns the pickled bytes. Depending on `Pickler.ParallelPickling` - * either computes the pickled data in a future or eagerly before constructing the - * function value. - */ - val demandPickled: () => Array[Byte] = - if useExecutor then - val futurePickled = executor.schedule(computePickled) - () => - try futurePickled.force.get - finally reportPositionWarnings() - else - val pickled = computePickled() - reportPositionWarnings() - if ctx.settings.YtestPickler.value then - pickledBytes(cls) = (unit, pickled) - if ctx.settings.YtestPicklerCheck.value then - printedTasty(cls) = TastyPrinter.showContents(pickled, noColor = true, testPickler = true) - () => pickled - - unit.pickled += (cls -> demandPickled) + if successful then + /** A function that returns the pickled bytes. Depending on `Pickler.ParallelPickling` + * either computes the pickled data in a future or eagerly before constructing the + * function value. + */ + val demandPickled: () => Array[Byte] = + if useExecutor then + val futurePickled = executor.schedule(computePickled) + () => + try futurePickled.force.get + finally reportPositionWarnings() + else + val pickled = computePickled() + reportPositionWarnings() + if ctx.settings.YtestPickler.value then + pickledBytes(cls) = (unit, pickled) + if ctx.settings.YtestPicklerCheck.value then + printedTasty(cls) = TastyPrinter.showContents(pickled, noColor = true, isBestEffortTasty = false, testPickler = true) + () => pickled + + unit.pickled += (cls -> demandPickled) end for } override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = { - val sigWriter: Option[Pickler.EarlyFileWriter] = ctx.settings.YjavaTastyOutput.value match - case jar: JarArchive if jar.exists => - Some(Pickler.EarlyFileWriter(jar)) - case _ => - None - val units0 = - if ctx.settings.fromTasty.value then - // we still run the phase for the side effect of writing the pipeline tasty files - units + val useExecutor = this.useExecutor + + val writeTask: Option[() => Unit] = + ctx.run.nn.asyncTasty.map: async => + fastDoAsyncTasty = true + () => + given ReadOnlyContext = if useExecutor then ReadOnlyContext.buffered else ReadOnlyContext.eager + val writer = Pickler.EarlyFileWriter(async.earlyOut) + writeSigFilesAsync(serialized.result(), writer, async) + + def runPhase(writeCB: (doWrite: () => Unit) => Unit) = + super.runOn(units).tap(_ => writeTask.foreach(writeCB)) + + val result = + if useExecutor then + executor.start() + try + runPhase: doWrite => + // unless we redesign executor to have "Unit" schedule overload, we need some sentinel value. + executor.schedule(() => { doWrite(); Array.emptyByteArray }) + finally executor.close() else - if useExecutor then - executor.start() - try super.runOn(units) - finally executor.close() - else - super.runOn(units) + runPhase(_()) if ctx.settings.YtestPickler.value then val ctx2 = ctx.fresh - .setSetting(ctx.settings.YreadComments, true) + .setSetting(ctx.settings.XreadComments, true) .setSetting(ctx.settings.YshowPrintErrors, true) testUnpickler( using ctx2 @@ -222,43 +411,23 @@ class Pickler extends Phase { .setReporter(new ThrowingReporter(ctx.reporter)) .addMode(Mode.ReadPositions) ) - val result = - if ctx.settings.YjavaTasty.value then - sigWriter.foreach(writeJavaSigFiles(units0, _)) - units0.filterNot(_.typedAsJava) // remove java sources, this is the terminal phase when `-Yjava-tasty` is set - else - units0 + if ctx.isBestEffort then + val outpath = + ctx.settings.outputDir.value.jpath.toAbsolutePath.nn.normalize.nn + .resolve("META-INF").nn + .resolve("best-effort") + Files.createDirectories(outpath) + BestEffortTastyWriter.write(outpath.nn, result) result } - private def writeJavaSigFiles(units: List[CompilationUnit], writer: Pickler.EarlyFileWriter)(using Context): Unit = { - var count = 0 - try - for - unit <- units if unit.typedAsJava - (cls, pickled) <- unit.pickled - if cls.isDefinedInCurrentRun - do - val binaryClassName = cls.binaryClassName - val internalName = - if (cls.is(Module)) binaryClassName.stripSuffix(str.MODULE_SUFFIX).nn - else binaryClassName - val _ = writer.writeTasty(internalName, pickled()) - count += 1 - finally - writer.close() - if ctx.settings.verbose.value then - report.echo(s"[$count java sig files written]") - end try - } - private def testUnpickler(using Context): Unit = pickling.println(i"testing unpickler at run ${ctx.runId}") ctx.initialize() val resolveCheck = ctx.settings.YtestPicklerCheck.value val unpicklers = for ((cls, (unit, bytes)) <- pickledBytes) yield { - val unpickler = new DottyUnpickler(unit.source.file, bytes) + val unpickler = new DottyUnpickler(unit.source.file, bytes, isBestEffortTasty = false) unpickler.enter(roots = Set.empty) val optCheck = if resolveCheck then diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index 3bcec80b5b10..c6ad1bb860e8 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -19,6 +19,8 @@ import config.Feature import util.SrcPos import reporting.* import NameKinds.WildcardParamName +import cc.* +import dotty.tools.dotc.transform.MacroAnnotations.hasMacroAnnotation object PostTyper { val name: String = "posttyper" @@ -75,13 +77,29 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => override def changesMembers: Boolean = true // the phase adds super accessors and synthetic members + /** + * Serializable and AbstractFunction1 are added for companion objects of case classes in scala2-library + */ + override def changesParents: Boolean = + if !initContextCalled then + throw new Exception("Calling changesParents before initContext, should call initContext first") + compilingScala2StdLib + override def transformPhase(using Context): Phase = thisPhase.next def newTransformer(using Context): Transformer = new PostTyperTransformer + /** + * Used to check that `changesParents` is called after `initContext`. + * + * This contract is easy to break and results in subtle bugs. + */ + private var initContextCalled = false + private var compilingScala2StdLib = false override def initContext(ctx: FreshContext): Unit = + initContextCalled = true compilingScala2StdLib = ctx.settings.YcompileScala2Library.value(using ctx) val superAcc: SuperAccessors = new SuperAccessors(thisPhase) @@ -261,9 +279,15 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => } } - def checkNoConstructorProxy(tree: Tree)(using Context): Unit = + def checkUsableAsValue(tree: Tree)(using Context): Tree = + def unusable(msg: Symbol => Message) = + errorTree(tree, msg(tree.symbol)) if tree.symbol.is(ConstructorProxy) then - report.error(em"constructor proxy ${tree.symbol} cannot be used as a value", tree.srcPos) + unusable(ConstructorProxyNotValue(_)) + else if tree.symbol.isContextBoundCompanion then + unusable(ContextBoundCompanionNotValue(_)) + else + tree def checkStableSelection(tree: Tree)(using Context): Unit = def check(qual: Tree) = @@ -279,6 +303,21 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => if !tree.symbol.is(Package) then tree else errorTree(tree, em"${tree.symbol} cannot be used as a type") + // Cleans up retains annotations in inferred type trees. This is needed because + // during the typer, it is infeasible to correctly infer the capture sets in most + // cases, resulting ill-formed capture sets that could crash the pickler later on. + // See #20035. + private def cleanupRetainsAnnot(symbol: Symbol, tpt: Tree)(using Context): Tree = + tpt match + case tpt: InferredTypeTree + if !symbol.allOverriddenSymbols.hasNext => + // if there are overridden symbols, the annotation comes from an explicit type of the overridden symbol + // and should be retained. + val tm = new CleanupRetains + val tpe1 = tm(tpt.tpe) + tpt.withType(tpe1) + case _ => tpt + override def transform(tree: Tree)(using Context): Tree = try tree match { // TODO move CaseDef case lower: keep most probable trees first for performance @@ -293,11 +332,11 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => if tree.isType then checkNotPackage(tree) else - checkNoConstructorProxy(tree) registerNeedsInlining(tree) - tree.tpe match { + val tree1 = checkUsableAsValue(tree) + tree1.tpe match { case tpe: ThisType => This(tpe.cls).withSpan(tree.span) - case _ => tree + case _ => tree1 } case tree @ Select(qual, name) => registerNeedsInlining(tree) @@ -305,8 +344,9 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => Checking.checkRealizable(qual.tpe, qual.srcPos) withMode(Mode.Type)(super.transform(checkNotPackage(tree))) else - checkNoConstructorProxy(tree) - transformSelect(tree, Nil) + checkUsableAsValue(tree) match + case tree1: Select => transformSelect(tree1, Nil) + case tree1 => tree1 case tree: Apply => val methType = tree.fun.tpe.widen.asInstanceOf[MethodType] val app = @@ -328,19 +368,23 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => else tree def app1 = - // reverse order of transforming args and fun. This way, we get a chance to see other - // well-formedness errors before reporting errors in possible inferred type args of fun. + // reverse order of transforming args and fun. This way, we get a chance to see other + // well-formedness errors before reporting errors in possible inferred type args of fun. val args1 = transform(app.args) cpy.Apply(app)(transform(app.fun), args1) methPart(app) match case Select(nu: New, nme.CONSTRUCTOR) if isCheckable(nu) => // need to check instantiability here, because the type of the New itself // might be a type constructor. - ctx.typer.checkClassType(tree.tpe, tree.srcPos, traitReq = false, stablePrefixReq = true) + def checkClassType(tpe: Type, stablePrefixReq: Boolean) = + ctx.typer.checkClassType(tpe, tree.srcPos, + traitReq = false, stablePrefixReq = stablePrefixReq, + refinementOK = Feature.enabled(Feature.modularity)) + checkClassType(tree.tpe, true) if !nu.tpe.isLambdaSub then // Check the constructor type as well; it could be an illegal singleton type // which would not be reflected as `tree.tpe` - ctx.typer.checkClassType(nu.tpe, tree.srcPos, traitReq = false, stablePrefixReq = false) + checkClassType(nu.tpe, false) Checking.checkInstantiable(tree.tpe, nu.tpe, nu.srcPos) withNoCheckNews(nu :: Nil)(app1) case _ => @@ -384,28 +428,27 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => ) } case tree: ValDef => - annotateExperimental(tree.symbol) + annotateExperimentalCompanion(tree.symbol) registerIfHasMacroAnnotations(tree) checkErasedDef(tree) Checking.checkPolyFunctionType(tree.tpt) - val tree1 = cpy.ValDef(tree)(rhs = normalizeErasedRhs(tree.rhs, tree.symbol)) + val tree1 = cpy.ValDef(tree)(tpt = cleanupRetainsAnnot(tree.symbol, tree.tpt), rhs = normalizeErasedRhs(tree.rhs, tree.symbol)) if tree1.removeAttachment(desugar.UntupledParam).isDefined then checkStableSelection(tree.rhs) processValOrDefDef(super.transform(tree1)) case tree: DefDef => - annotateExperimental(tree.symbol) registerIfHasMacroAnnotations(tree) checkErasedDef(tree) Checking.checkPolyFunctionType(tree.tpt) annotateContextResults(tree) - val tree1 = cpy.DefDef(tree)(rhs = normalizeErasedRhs(tree.rhs, tree.symbol)) + val tree1 = cpy.DefDef(tree)(tpt = cleanupRetainsAnnot(tree.symbol, tree.tpt), rhs = normalizeErasedRhs(tree.rhs, tree.symbol)) processValOrDefDef(superAcc.wrapDefDef(tree1)(super.transform(tree1).asInstanceOf[DefDef])) case tree: TypeDef => registerIfHasMacroAnnotations(tree) val sym = tree.symbol if (sym.isClass) VarianceChecker.check(tree) - annotateExperimental(sym) + annotateExperimentalCompanion(sym) checkMacroAnnotation(sym) if sym.isOneOf(GivenOrImplicit) then sym.keepAnnotationsCarrying(thisPhase, Set(defn.CompanionClassMetaAnnot), orNoneOf = defn.MetaAnnots) @@ -416,8 +459,12 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => // Constructor parameters are in scope when typing a parent. // While they can safely appear in a parent tree, to preserve // soundness we need to ensure they don't appear in a parent - // type (#16270). - val illegalRefs = parent.tpe.namedPartsWith(p => p.symbol.is(ParamAccessor) && (p.symbol.owner eq sym)) + // type (#16270). We can strip any refinement of a parent type since + // these refinements are split off from the parent type constructor + // application `parent` in Namer and don't show up as parent types + // of the class. + val illegalRefs = parent.tpe.dealias.stripRefinement.namedPartsWith: + p => p.symbol.is(ParamAccessor) && (p.symbol.owner eq sym) if illegalRefs.nonEmpty then report.error( em"The type of a class parent cannot refer to constructor parameters, but ${parent.tpe} refers to ${illegalRefs.map(_.name.show).mkString(",")}", parent.srcPos) @@ -431,6 +478,12 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => else if !sym.is(Param) && !sym.owner.isOneOf(AbstractOrTrait) then Checking.checkGoodBounds(tree.symbol) + // Delete all context bound companions of this TypeDef + if sym.owner.isClass && sym.hasAnnotation(defn.WitnessNamesAnnot) then + val decls = sym.owner.info.decls + for cbCompanion <- decls.lookupAll(sym.name.toTermName) do + if cbCompanion.isContextBoundCompanion then + decls.openForMutations.unlink(cbCompanion) (tree.rhs, sym.info) match case (rhs: LambdaTypeTree, bounds: TypeBounds) => VarianceChecker.checkLambda(rhs, bounds) @@ -513,8 +566,8 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => } override def transformStats[T](trees: List[Tree], exprOwner: Symbol, wrapResult: List[Tree] => Context ?=> T)(using Context): T = - try super.transformStats(trees, exprOwner, wrapResult) - finally Checking.checkExperimentalImports(trees) + Checking.checkAndAdaptExperimentalImports(trees) + super.transformStats(trees, exprOwner, wrapResult) /** Transforms the rhs tree into a its default tree if it is in an `erased` val/def. * Performed to shrink the tree that is known to be erased later. @@ -528,7 +581,7 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => /** Check if the definition has macro annotation and sets `compilationUnit.hasMacroAnnotations` if needed. */ private def registerIfHasMacroAnnotations(tree: DefTree)(using Context) = - if !Inlines.inInlineMethod && MacroAnnotations.hasMacroAnnotation(tree.symbol) then + if !Inlines.inInlineMethod && tree.symbol.hasMacroAnnotation then ctx.compilationUnit.hasMacroAnnotations = true /** Check macro annotations implementations */ @@ -537,24 +590,27 @@ class PostTyper extends MacroTransform with InfoTransformer { thisPhase => report.error("classes that extend MacroAnnotation must not be inner/local classes", sym.srcPos) private def checkErasedDef(tree: ValOrDefDef)(using Context): Unit = + def checkOnlyErasedParams(): Unit = tree match + case tree: DefDef => + for params <- tree.paramss; param <- params if !param.symbol.isType && !param.symbol.is(Erased) do + report.error("erased definition can only have erased parameters", param.srcPos) + case _ => + if tree.symbol.is(Erased, butNot = Macro) then + checkOnlyErasedParams() val tpe = tree.rhs.tpe if tpe.derivesFrom(defn.NothingClass) then report.error("`erased` definition cannot be implemented with en expression of type Nothing", tree.srcPos) else if tpe.derivesFrom(defn.NullClass) then report.error("`erased` definition cannot be implemented with en expression of type Null", tree.srcPos) - private def annotateExperimental(sym: Symbol)(using Context): Unit = - def isTopLevelDefinitionInSource(sym: Symbol) = - !sym.is(Package) && !sym.name.isPackageObjectName && - (sym.owner.is(Package) || (sym.owner.isPackageObject && !sym.isConstructor)) - if !sym.hasAnnotation(defn.ExperimentalAnnot) - && (ctx.settings.experimental.value && isTopLevelDefinitionInSource(sym)) - || (sym.is(Module) && sym.companionClass.hasAnnotation(defn.ExperimentalAnnot)) - then - sym.addAnnotation(Annotation(defn.ExperimentalAnnot, sym.span)) - - private def scala2LibPatch(tree: TypeDef)(using Context) = + private def annotateExperimentalCompanion(sym: Symbol)(using Context): Unit = + if sym.is(Module) then + ExperimentalAnnotation.copy(sym.companionClass).foreach(sym.addAnnotation) + + // It needs to run at the phase of the postTyper --- otherwise, the test of the symbols will use + // the transformed denotation with added `Serializable` and `AbstractFunction1`. + private def scala2LibPatch(tree: TypeDef)(using Context) = atPhase(thisPhase): val sym = tree.symbol if compilingScala2StdLib && sym.is(ModuleClass) then // Add Serializable to companion objects of serializable classes, diff --git a/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala b/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala index 9bb30926d45a..47eb70cb46d4 100644 --- a/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala +++ b/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala @@ -10,6 +10,7 @@ import Symbols.* import typer.RefChecks import MegaPhase.MiniPhase import ast.tpd +import reporting.InlinedAnonClassWarning import config.Feature import Decorators.* @@ -51,6 +52,17 @@ class PruneErasedDefs extends MiniPhase with SymTransformer { thisTransform => else cpy.ValDef(tree)(rhs = trivialErasedTree(tree.rhs)) override def transformDefDef(tree: DefDef)(using Context): Tree = + def checkNoInlineAnnoClasses(tree: DefDef)(using Context): Unit = + if tree.symbol.is(Inline) then + new TreeTraverser { + def traverse(tree: Tree)(using Context): Unit = + tree match + case tree: TypeDef if tree.symbol.isAnonymousClass => + report.warning(new InlinedAnonClassWarning(), tree.symbol.sourcePos) + case _ => traverseChildren(tree) + }.traverse(tree) + + checkNoInlineAnnoClasses(tree) checkErasedInExperimental(tree.symbol) if !tree.symbol.isEffectivelyErased || tree.rhs.isEmpty then tree else cpy.DefDef(tree)(rhs = trivialErasedTree(tree.rhs)) diff --git a/compiler/src/dotty/tools/dotc/transform/Recheck.scala b/compiler/src/dotty/tools/dotc/transform/Recheck.scala index 44d5caba631a..f809fbd176ce 100644 --- a/compiler/src/dotty/tools/dotc/transform/Recheck.scala +++ b/compiler/src/dotty/tools/dotc/transform/Recheck.scala @@ -427,7 +427,7 @@ abstract class Recheck extends Phase, SymTransformer: TypeComparer.lub(bodyType :: casesTypes) def recheckSeqLiteral(tree: SeqLiteral, pt: Type)(using Context): Type = - val elemProto = pt.stripNull.elemType match + val elemProto = pt.stripNull().elemType match case NoType => WildcardType case bounds: TypeBounds => WildcardType(bounds) case elemtp => elemtp diff --git a/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala b/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala index 6d2aedb9b47b..45606b0dbef5 100644 --- a/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala +++ b/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala @@ -252,7 +252,7 @@ class SyntheticMembers(thisPhase: DenotTransformer) { // Second constructor of ioob that takes a String argument def filterStringConstructor(s: Symbol): Boolean = s.info match { case m: MethodType if s.isConstructor && m.paramInfos.size == 1 => - m.paramInfos.head.stripNull == defn.StringType + m.paramInfos.head.stripNull() == defn.StringType case _ => false } val constructor = ioob.typeSymbol.info.decls.find(filterStringConstructor _).asTerm diff --git a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala index 4a7548f40f43..c4e1c7892e8d 100644 --- a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala +++ b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala @@ -2,6 +2,7 @@ package dotty.tools package dotc package transform +import config.Printers.checks as printer import core.Names.Name import core.DenotTransformers.* import core.SymDenotations.* @@ -310,9 +311,11 @@ object TreeChecker { def assertDefined(tree: untpd.Tree)(using Context): Unit = if (tree.symbol.maybeOwner.isTerm) { val sym = tree.symbol + def isAllowed = // constructor proxies and context bound companions are flagged at PostTyper + isSymWithoutDef(sym) && ctx.phase.id < postTyperPhase.id assert( - nowDefinedSyms.contains(sym) || patBoundSyms.contains(sym), - i"undefined symbol ${sym} at line " + tree.srcPos.line + nowDefinedSyms.contains(sym) || patBoundSyms.contains(sym) || isAllowed, + i"undefined symbol ${sym} in ${sym.owner} at line " + tree.srcPos.line ) if (!ctx.phase.patternTranslated) @@ -383,6 +386,9 @@ object TreeChecker { case _ => } + def isSymWithoutDef(sym: Symbol)(using Context): Boolean = + sym.is(ConstructorProxy) || sym.isContextBoundCompanion + /** Exclude from double definition checks any erased symbols that were * made `private` in phase `UnlinkErasedDecls`. These symbols will be removed * completely in phase `Erasure` if they are defined in a currently compiled unit. @@ -418,31 +424,35 @@ object TreeChecker { } override def typedUnadapted(tree: untpd.Tree, pt: Type, locked: TypeVars)(using Context): Tree = { - val res = tree match { - case _: untpd.TypedSplice | _: untpd.Thicket | _: EmptyValDef[?] => - super.typedUnadapted(tree, pt, locked) - case _ if tree.isType => - promote(tree) - case _ => - val tree1 = super.typedUnadapted(tree, pt, locked) - def isSubType(tp1: Type, tp2: Type) = - (tp1 eq tp2) || // accept NoType / NoType - (tp1 <:< tp2) - def divergenceMsg(tp1: Type, tp2: Type) = - s"""Types differ - |Original type : ${tree.typeOpt.show} - |After checking: ${tree1.tpe.show} - |Original tree : ${tree.show} - |After checking: ${tree1.show} - |Why different : - """.stripMargin + core.TypeComparer.explained(_.isSubType(tp1, tp2)) - if (tree.hasType) // it might not be typed because Typer sometimes constructs new untyped trees and resubmits them to typedUnadapted - assert(isSubType(tree1.tpe, tree.typeOpt), divergenceMsg(tree1.tpe, tree.typeOpt)) - tree1 - } - checkNoOrphans(res.tpe) - phasesToCheck.foreach(_.checkPostCondition(res)) - res + try + val res = tree match + case _: untpd.TypedSplice | _: untpd.Thicket | _: EmptyValDef[?] => + super.typedUnadapted(tree, pt, locked) + case _ if tree.isType => + promote(tree) + case _ => + val tree1 = super.typedUnadapted(tree, pt, locked) + def isSubType(tp1: Type, tp2: Type) = + (tp1 eq tp2) || // accept NoType / NoType + (tp1 <:< tp2) + def divergenceMsg(tp1: Type, tp2: Type) = + s"""Types differ + |Original type : ${tree.typeOpt.show} + |After checking: ${tree1.tpe.show} + |Original tree : ${tree.show} + |After checking: ${tree1.show} + |Why different : + """.stripMargin + core.TypeComparer.explained(_.isSubType(tp1, tp2)) + if (tree.hasType) // it might not be typed because Typer sometimes constructs new untyped trees and resubmits them to typedUnadapted + assert(isSubType(tree1.tpe, tree.typeOpt), divergenceMsg(tree1.tpe, tree.typeOpt)) + tree1 + checkNoOrphans(res.tpe) + phasesToCheck.foreach(_.checkPostCondition(res)) + res + catch case NonFatal(ex) if !ctx.run.enrichedErrorMessage => + val treeStr = tree.show(using ctx.withPhase(ctx.phase.prev.megaPhase)) + printer.println(ctx.run.enrichErrorMessage(s"exception while retyping $treeStr of class ${tree.className} # ${tree.uniqueId}")) + throw ex } def checkNotRepeated(tree: Tree)(using Context): tree.type = { @@ -609,14 +619,12 @@ object TreeChecker { val decls = cls.classInfo.decls.toList.toSet.filter(isNonMagicalMember) val defined = impl.body.map(_.symbol) - def isAllowed(sym: Symbol): Boolean = sym.is(ConstructorProxy) + val symbolsMissingDefs = (decls -- defined - constr.symbol).filterNot(isSymWithoutDef) - val symbolsNotDefined = (decls -- defined - constr.symbol).filterNot(isAllowed) - - assert(symbolsNotDefined.isEmpty, - i" $cls tree does not define members: ${symbolsNotDefined.toList}%, %\n" + - i"expected: ${decls.toList}%, %\n" + - i"defined: ${defined}%, %") + assert(symbolsMissingDefs.isEmpty, + i"""$cls tree does not define members: ${symbolsMissingDefs.toList}%, % + |expected: ${decls.toList}%, % + |defined: ${defined}%, %""") super.typedClassDef(cdef, cls) } @@ -837,9 +845,19 @@ object TreeChecker { def checkMacroGeneratedTree(original: tpd.Tree, expansion: tpd.Tree)(using Context): Unit = if ctx.settings.XcheckMacros.value then + // We want make sure that transparent inline macros are checked in the same way that + // non transparent macros are, so we try to prepare a context which would make + // the checks behave the same way for both types of macros. + // + // E.g. Different instances of skolem types are by definition not able to be a subtype of + // one another, however in practice this is only upheld during typer phase, and we do not want + // it to be upheld during this check. + // See issue: #17009 val checkingCtx = ctx .fresh .setReporter(new ThrowingReporter(ctx.reporter)) + .setPhase(ctx.base.inliningPhase) + val phases = ctx.base.allPhases.toList val treeChecker = new LocalChecker(previousPhases(phases)) diff --git a/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala index 74a4845424ea..082c239c6443 100644 --- a/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala +++ b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala @@ -135,6 +135,7 @@ object TypeTestsCasts { def recur(X: Type, P: Type): String = trace(s"recur(${X.show}, ${P.show})") { (X <:< P) ||| P.dealias.match case _: SingletonType => "" + case MatchType.Normalizing(tp) => recur(X, tp) case _: TypeProxy if isAbstract(P) => i"it refers to an abstract type member or type parameter" case defn.ArrayOf(tpT) => @@ -151,7 +152,8 @@ object TypeTestsCasts { // - T1 & T2 <:< T3 // See TypeComparer#either recur(tp1, P) && recur(tp2, P) - + case tpX: FlexibleType => + recur(tpX.underlying, P) case x => // always false test warnings are emitted elsewhere // provablyDisjoint wants fully applied types as input; because we're in the middle of erasure, we sometimes get raw types here diff --git a/compiler/src/dotty/tools/dotc/transform/init/Checker.scala b/compiler/src/dotty/tools/dotc/transform/init/Checker.scala index 692b3177786d..9e78bd5474a3 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Checker.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Checker.scala @@ -29,24 +29,28 @@ class Checker extends Phase: override val runsAfter = Set(Pickler.name) override def isEnabled(using Context): Boolean = - super.isEnabled && (ctx.settings.YcheckInit.value || ctx.settings.YcheckInitGlobal.value) + super.isEnabled && (ctx.settings.WcheckInit.value || ctx.settings.YcheckInitGlobal.value) def traverse(traverser: InitTreeTraverser)(using Context): Boolean = monitor(phaseName): val unit = ctx.compilationUnit traverser.traverse(unit.tpdTree) override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = - val checkCtx = ctx.fresh.setPhase(this.start) + val checkCtx = ctx.fresh.setPhase(this) val traverser = new InitTreeTraverser() - val unitContexts = units.map(unit => checkCtx.fresh.setCompilationUnit(unit)) val units0 = - for unitContext <- unitContexts if traverse(traverser)(using unitContext) yield unitContext.compilationUnit + for + unit <- units + unitContext = checkCtx.fresh.setCompilationUnit(unit) + if traverse(traverser)(using unitContext) + yield + unitContext.compilationUnit cancellable { val classes = traverser.getClasses() - if ctx.settings.YcheckInit.value then + if ctx.settings.WcheckInit.value then Semantic.checkClasses(classes)(using checkCtx) if ctx.settings.YcheckInitGlobal.value then diff --git a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala index cbbd97e3810e..52e90c0857ed 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Objects.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Objects.scala @@ -11,6 +11,7 @@ import StdNames.* import Names.TermName import NameKinds.OuterSelectName import NameKinds.SuperAccessorName +import Decorators.* import ast.tpd.* import util.{ SourcePosition, NoSourcePosition } @@ -28,6 +29,7 @@ import scala.collection.mutable import scala.annotation.tailrec import scala.annotation.constructorOnly import dotty.tools.dotc.core.Flags.AbstractOrTrait +import Decorators.* /** Check initialization safety of static objects * @@ -66,13 +68,22 @@ import dotty.tools.dotc.core.Flags.AbstractOrTrait * whole-program analysis. However, the check is not modular in terms of project boundaries. * */ -import Decorators.* class Objects(using Context @constructorOnly): - val immutableHashSetBuider: Symbol = requiredClass("scala.collection.immutable.HashSetBuilder") + val immutableHashSetNode: Symbol = requiredClass("scala.collection.immutable.SetNode") // TODO: this should really be an annotation on the rhs of the field initializer rather than the field itself. - val HashSetBuilder_rootNode: Symbol = immutableHashSetBuider.requiredValue("rootNode") - - val whiteList = Set(HashSetBuilder_rootNode) + val SetNode_EmptySetNode: Symbol = Denotations.staticRef("scala.collection.immutable.SetNode.EmptySetNode".toTermName).symbol + val immutableHashSet: Symbol = requiredModule("scala.collection.immutable.HashSet") + val HashSet_EmptySet: Symbol = Denotations.staticRef("scala.collection.immutable.HashSet.EmptySet".toTermName).symbol + val immutableVector: Symbol = requiredModule("scala.collection.immutable.Vector") + val Vector_EmptyIterator: Symbol = immutableVector.requiredValue("emptyIterator") + val immutableMapNode: Symbol = requiredModule("scala.collection.immutable.MapNode") + val MapNode_EmptyMapNode: Symbol = immutableMapNode.requiredValue("EmptyMapNode") + val immutableHashMap: Symbol = requiredModule("scala.collection.immutable.HashMap") + val HashMap_EmptyMap: Symbol = immutableHashMap.requiredValue("EmptyMap") + val immutableLazyList: Symbol = requiredModule("scala.collection.immutable.LazyList") + val LazyList_empty: Symbol = immutableLazyList.requiredValue("_empty") + + val whiteList: Set[Symbol] = Set(SetNode_EmptySetNode, HashSet_EmptySet, Vector_EmptyIterator, MapNode_EmptyMapNode, HashMap_EmptyMap, LazyList_empty) // ----------------------------- abstract domain ----------------------------- @@ -812,7 +823,8 @@ class Objects(using Context @constructorOnly): else Bottom else if target.exists then - if target.isOneOf(Flags.Mutable) then + def isNextFieldOfColonColon: Boolean = ref.klass == defn.ConsClass && target.name.toString == "next" + if target.isOneOf(Flags.Mutable) && !isNextFieldOfColonColon then if ref.hasVar(target) then val addr = ref.varAddr(target) if addr.owner == State.currentObject then diff --git a/compiler/src/dotty/tools/dotc/transform/init/Util.scala b/compiler/src/dotty/tools/dotc/transform/init/Util.scala index 756fd1a0a8e7..e11d0e1e21a5 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Util.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Util.scala @@ -20,6 +20,7 @@ object Util: def typeRefOf(tp: Type)(using Context): TypeRef = tp.dealias.typeConstructor match case tref: TypeRef => tref + case RefinedType(parent, _, _) => typeRefOf(parent) case hklambda: HKTypeLambda => typeRefOf(hklambda.resType) diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala b/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala index 853fead6f799..5c7119860ae4 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala @@ -637,7 +637,11 @@ class ExplicitJSClasses extends MiniPhase with InfoTransformer { thisPhase => private def maybeWrapSuperCallWithContextualJSClassValue(tree: Tree)(using Context): Tree = { methPart(tree) match { case Select(sup: Super, _) if isInnerOrLocalJSClass(sup.symbol.asClass.superClass) => - wrapWithContextualJSClassValue(sup.symbol.asClass.superClass.typeRef)(tree) + val superClass = sup.symbol.asClass.superClass + val jsClassTypeInSuperClass = superClass.typeRef + // scala-js#4801 Rebase the super class type on the current class' this type + val jsClassTypeAsSeenFromThis = jsClassTypeInSuperClass.asSeenFrom(currentClass.thisType, superClass) + wrapWithContextualJSClassValue(jsClassTypeAsSeenFromThis)(tree) case _ => tree } diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala index dbd6e1a8f412..f66141bff8ad 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala @@ -45,101 +45,28 @@ object PrepJSExports { private final case class ExportInfo(jsName: String, destination: ExportDestination)(val pos: SrcPos) - /** Checks a class or module class for export. + /** Generate exports for the given Symbol. * - * Note that non-module Scala classes are never actually exported; their constructors are. - * However, the checks are performed on the class when the class is annotated. + * - Registers top-level and static exports. + * - Returns (non-static) exporters for this symbol. */ - def checkClassOrModuleExports(sym: Symbol)(using Context): Unit = { - val exports = exportsOf(sym) - if (exports.nonEmpty) - checkClassOrModuleExports(sym, exports.head.pos) - } + def genExport(sym: Symbol)(using Context): List[Tree] = { + // Scala classes are never exported: Their constructors are. + val isScalaClass = sym.isClass && !sym.isOneOf(Trait | Module) && !isJSAny(sym) - /** Generate the exporter for the given DefDef or ValDef. - * - * If this DefDef is a constructor, it is registered to be exported by - * GenJSCode instead and no trees are returned. - */ - def genExportMember(baseSym: Symbol)(using Context): List[Tree] = { - val clsSym = baseSym.owner + // Filter constructors of module classes: The module classes themselves will be exported. + val isModuleClassCtor = sym.isConstructor && sym.owner.is(ModuleClass) - val exports = exportsOf(baseSym) + val exports = + if (isScalaClass || isModuleClassCtor) Nil + else exportsOf(sym) - // Helper function for errors - def err(msg: String): List[Tree] = { - report.error(msg, exports.head.pos) - Nil - } + assert(exports.isEmpty || !sym.is(Bridge), + s"found exports for bridge symbol $sym. exports: $exports") - def memType = if (baseSym.isConstructor) "constructor" else "method" - - if (exports.isEmpty) { - Nil - } else if (!hasLegalExportVisibility(baseSym)) { - err(s"You may only export public and protected ${memType}s") - } else if (baseSym.is(Inline)) { - err("You may not export an inline method") - } else if (isJSAny(clsSym)) { - err(s"You may not export a $memType of a subclass of js.Any") - } else if (baseSym.isLocalToBlock) { - err("You may not export a local definition") - } else if (hasIllegalRepeatedParam(baseSym)) { - err(s"In an exported $memType, a *-parameter must come last (through all parameter lists)") - } else if (hasIllegalDefaultParam(baseSym)) { - err(s"In an exported $memType, all parameters with defaults must be at the end") - } else if (baseSym.isConstructor) { - // Constructors do not need an exporter method. We only perform the checks at this phase. - checkClassOrModuleExports(clsSym, exports.head.pos) - Nil - } else { - assert(!baseSym.is(Bridge), s"genExportMember called for bridge symbol $baseSym") - val normalExports = exports.filter(_.destination == ExportDestination.Normal) - normalExports.flatMap(exp => genExportDefs(baseSym, exp.jsName, exp.pos.span)) - } - } - - /** Check a class or module for export. - * - * There are 2 ways that this method can be reached: - * - via `registerClassExports` - * - via `genExportMember` (constructor of Scala class) - */ - private def checkClassOrModuleExports(sym: Symbol, errPos: SrcPos)(using Context): Unit = { - val isMod = sym.is(ModuleClass) - - def err(msg: String): Unit = - report.error(msg, errPos) - - def hasAnyNonPrivateCtor: Boolean = - sym.info.decl(nme.CONSTRUCTOR).hasAltWith(denot => !isPrivateMaybeWithin(denot.symbol)) - - if (sym.is(Trait)) { - err("You may not export a trait") - } else if (sym.hasAnnotation(jsdefn.JSNativeAnnot)) { - err("You may not export a native JS " + (if (isMod) "object" else "class")) - } else if (!hasLegalExportVisibility(sym)) { - err("You may only export public and protected " + (if (isMod) "objects" else "classes")) - } else if (isJSAny(sym.owner)) { - err("You may not export a " + (if (isMod) "object" else "class") + " in a subclass of js.Any") - } else if (sym.isLocalToBlock) { - err("You may not export a local " + (if (isMod) "object" else "class")) - } else if (!sym.isStatic) { - if (isMod) - err("You may not export a nested object") - else - err("You may not export a nested class. Create an exported factory method in the outer class to work around this limitation.") - } else if (sym.is(Abstract, butNot = Trait) && !isJSAny(sym)) { - err("You may not export an abstract class") - } else if (!isMod && !hasAnyNonPrivateCtor) { - /* This test is only relevant for JS classes but doesn't hurt for Scala - * classes as we could not reach it if there were only private - * constructors. - */ - err("You may not export a class that has only private constructors") - } else { - // OK - } + // For normal exports, generate exporter methods. + val normalExports = exports.filter(_.destination == ExportDestination.Normal) + normalExports.flatMap(exp => genExportDefs(sym, exp.jsName, exp.pos.span)) } /** Computes the ExportInfos for sym from its annotations. */ @@ -152,6 +79,10 @@ object PrepJSExports { else sym } + val symOwner = + if (sym.isConstructor) sym.owner.owner + else sym.owner + val JSExportAnnot = jsdefn.JSExportAnnot val JSExportTopLevelAnnot = jsdefn.JSExportTopLevelAnnot val JSExportStaticAnnot = jsdefn.JSExportStaticAnnot @@ -161,19 +92,42 @@ object PrepJSExports { val directMemberAnnots = Set[Symbol](JSExportAnnot, JSExportTopLevelAnnot, JSExportStaticAnnot) val directAnnots = trgSym.annotations.filter(annot => directMemberAnnots.contains(annot.symbol)) - // Is this a member export (i.e. not a class or module export)? - val isMember = !sym.isClass && !sym.isConstructor - - // Annotations for this member on the whole unit + /* Annotations for this member on the whole unit + * + * Note that for top-level classes / modules this is always empty, because + * packages cannot have annotations. + */ val unitAnnots = { - if (isMember && sym.isPublic && !sym.is(Synthetic)) - sym.owner.annotations.filter(_.symbol == JSExportAllAnnot) + val useExportAll = { + sym.isPublic && + !sym.is(Synthetic) && + !sym.isConstructor && + !sym.is(Trait) && + (!sym.isClass || sym.is(ModuleClass)) + } + + if (useExportAll) + symOwner.annotations.filter(_.symbol == JSExportAllAnnot) else Nil } + val allAnnots = { + val allAnnots0 = directAnnots ++ unitAnnots + + if (allAnnots0.nonEmpty) { + val errorPos: SrcPos = + if (allAnnots0.head.symbol == JSExportAllAnnot) sym + else allAnnots0.head.tree + if (checkExportTarget(sym, errorPos)) allAnnots0 + else Nil // prevent code generation from running to avoid crashes. + } else { + Nil + } + } + val allExportInfos = for { - annot <- directAnnots ++ unitAnnots + annot <- allAnnots } yield { val isExportAll = annot.symbol == JSExportAllAnnot val isTopLevelExport = annot.symbol == JSExportTopLevelAnnot @@ -194,7 +148,13 @@ object PrepJSExports { "dummy" } } else { - sym.defaultJSName + val name = (if (sym.isConstructor) sym.owner else sym).defaultJSName + if (name.endsWith(str.SETTER_SUFFIX) && !sym.isJSSetter) { + report.error( + "You must set an explicit name when exporting a non-setter with a name ending in _=", + exportPos) + } + name } } @@ -217,20 +177,33 @@ object PrepJSExports { } } - // Enforce proper setter signature - if (sym.isJSSetter) - checkSetterSignature(sym, exportPos, exported = true) - // Enforce no __ in name if (!isTopLevelExport && name.contains("__")) report.error("An exported name may not contain a double underscore (`__`)", exportPos) - /* Illegal function application exports, i.e., method named 'apply' - * without an explicit export name. - */ - if (isMember && !hasExplicitName && sym.name == nme.apply) { - destination match { - case ExportDestination.Normal => + // Destination-specific restrictions + destination match { + case ExportDestination.Normal => + // Disallow @JSExport on top-level definitions. + if (symOwner.is(Package) || symOwner.isPackageObject) { + report.error("@JSExport is forbidden on top-level definitions. Use @JSExportTopLevel instead.", exportPos) + } + + // Make sure we do not override the default export of toString + def isIllegalToString = { + name == "toString" && sym.name != nme.toString_ && + sym.info.paramInfoss.forall(_.isEmpty) && !sym.isJSGetter + } + if (isIllegalToString) { + report.error( + "You may not export a zero-argument method named other than 'toString' under the name 'toString'", + exportPos) + } + + /* Illegal function application exports, i.e., method named 'apply' + * without an explicit export name. + */ + if (!hasExplicitName && sym.name == nme.apply) { def shouldBeTolerated = { isExportAll && directAnnots.exists { annot => annot.symbol == JSExportAnnot && @@ -246,44 +219,6 @@ object PrepJSExports { "Add @JSExport(\"apply\") to export under the name apply.", exportPos) } - - case _: ExportDestination.TopLevel => - throw new AssertionError( - em"Found a top-level export without an explicit name at ${exportPos.sourcePos}") - - case ExportDestination.Static => - report.error( - "A member cannot be exported to function application as static. " + - "Use @JSExportStatic(\"apply\") to export it under the name 'apply'.", - exportPos) - } - } - - val symOwner = - if (sym.isConstructor) sym.owner.owner - else sym.owner - - // Destination-specific restrictions - destination match { - case ExportDestination.Normal => - // Make sure we do not override the default export of toString - def isIllegalToString = { - isMember && name == "toString" && sym.name != nme.toString_ && - sym.info.paramInfoss.forall(_.isEmpty) && !sym.isJSGetter - } - if (isIllegalToString) { - report.error( - "You may not export a zero-argument method named other than 'toString' under the name 'toString'", - exportPos) - } - - // Disallow @JSExport at the top-level, as well as on objects and classes - if (symOwner.is(Package) || symOwner.isPackageObject) { - report.error("@JSExport is forbidden on top-level definitions. Use @JSExportTopLevel instead.", exportPos) - } else if (!isMember && !sym.is(Trait)) { - report.error( - "@JSExport is forbidden on objects and classes. Use @JSExport'ed factory methods instead.", - exportPos) } case _: ExportDestination.TopLevel => @@ -292,10 +227,8 @@ object PrepJSExports { else if (sym.is(Method, butNot = Accessor) && sym.isJSProperty) report.error("You may not export a getter or a setter to the top level", exportPos) - /* Disallow non-static methods. - * Note: Non-static classes have more specific error messages in checkClassOrModuleExports. - */ - if (sym.isTerm && (!symOwner.isStatic || !symOwner.is(ModuleClass))) + // Disallow non-static definitions. + if (!symOwner.isStatic || !symOwner.is(ModuleClass)) report.error("Only static objects may export their members to the top level", exportPos) // The top-level name must be a valid JS identifier @@ -317,14 +250,20 @@ object PrepJSExports { exportPos) } - if (isMember) { - if (sym.is(Lazy)) - report.error("You may not export a lazy val as static", exportPos) - } else { - if (sym.is(Trait)) - report.error("You may not export a trait as static.", exportPos) - else - report.error("Implementation restriction: cannot export a class or object as static", exportPos) + if (sym.is(Lazy)) + report.error("You may not export a lazy val as static", exportPos) + + // Illegal function application export + if (!hasExplicitName && sym.name == nme.apply) { + report.error( + "A member cannot be exported to function application as " + + "static. Use @JSExportStatic(\"apply\") to export it under " + + "the name 'apply'.", + exportPos) + } + + if (sym.isClass || sym.isConstructor) { + report.error("Implementation restriction: cannot export a class or object as static", exportPos) } } @@ -342,9 +281,9 @@ object PrepJSExports { } .foreach(_ => report.warning("Found duplicate @JSExport", sym)) - /* Make sure that no field is exported *twice* as static, nor both as - * static and as top-level (it is possible to export a field several times - * as top-level, though). + /* Check that no field is exported *twice* as static, nor both as static + * and as top-level (it is possible to export a field several times as + * top-level, though). */ if (!sym.is(Method)) { for (firstStatic <- allExportInfos.find(_.destination == ExportDestination.Static)) { @@ -370,32 +309,114 @@ object PrepJSExports { allExportInfos.distinct } + /** Checks whether the given target is suitable for export and exporting + * should be performed. + * + * Reports any errors for unsuitable targets. + * @returns a boolean indicating whether exporting should be performed. Note: + * a result of true is not a guarantee that no error was emitted. But it is + * a guarantee that the target is not "too broken" to run the rest of + * the generation. This approximation is done to avoid having to complicate + * shared code verifying conditions. + */ + private def checkExportTarget(sym: Symbol, errPos: SrcPos)(using Context): Boolean = { + def err(msg: String): Boolean = { + report.error(msg, errPos) + false + } + + def hasLegalExportVisibility(sym: Symbol): Boolean = + sym.isPublic || sym.is(Protected, butNot = Local) + + def isMemberOfJSAny: Boolean = + isJSAny(sym.owner) || (sym.isConstructor && isJSAny(sym.owner.owner)) + + def hasIllegalRepeatedParam: Boolean = { + val paramInfos = sym.info.paramInfoss.flatten + paramInfos.nonEmpty && paramInfos.init.exists(_.isRepeatedParam) + } + + def hasIllegalDefaultParam: Boolean = { + sym.hasDefaultParams + && sym.paramSymss.flatten.reverse.dropWhile(_.is(HasDefault)).exists(_.is(HasDefault)) + } + + def hasAnyNonPrivateCtor: Boolean = + sym.info.member(nme.CONSTRUCTOR).hasAltWith(d => !isPrivateMaybeWithin(d.symbol)) + + if (sym.is(Trait)) { + err("You may not export a trait") + } else if (sym.hasAnnotation(jsdefn.JSNativeAnnot)) { + err("You may not export a native JS definition") + } else if (!hasLegalExportVisibility(sym)) { + err("You may only export public and protected definitions") + } else if (sym.isConstructor && !hasLegalExportVisibility(sym.owner)) { + err("You may only export constructors of public and protected classes") + } else if (sym.is(Macro)) { + err("You may not export a macro") + } else if (isMemberOfJSAny) { + err("You may not export a member of a subclass of js.Any") + } else if (sym.isLocalToBlock) { + err("You may not export a local definition") + } else if (sym.isConstructor && sym.owner.isLocalToBlock) { + err("You may not export constructors of local classes") + } else if (hasIllegalRepeatedParam) { + err("In an exported method or constructor, a *-parameter must come last " + + "(through all parameter lists)") + } else if (hasIllegalDefaultParam) { + err("In an exported method or constructor, all parameters with " + + "defaults must be at the end") + } else if (sym.isConstructor && sym.owner.is(Abstract, butNot = Trait) && !isJSAny(sym)) { + err("You may not export an abstract class") + } else if (sym.isClass && !sym.is(ModuleClass) && isJSAny(sym) && !hasAnyNonPrivateCtor) { + /* This test is only relevant for JS classes: We'll complain on the + * individual exported constructors in case of a Scala class. + */ + err("You may not export a class that has only private constructors") + } else { + if (sym.isJSSetter) + checkSetterSignature(sym, errPos, exported = true) + + true // ok even if a setter has the wrong signature. + } + } + /** Generates an exporter for a DefDef including default parameter methods. */ - private def genExportDefs(defSym: Symbol, jsName: String, span: Span)(using Context): List[Tree] = { - val clsSym = defSym.owner.asClass + private def genExportDefs(sym: Symbol, jsName: String, span: Span)(using Context): List[Tree] = { + val siblingSym = + if (sym.isConstructor) sym.owner + else sym + + val clsSym = siblingSym.owner.asClass + + val isProperty = sym.is(ModuleClass) || isJSAny(sym) || sym.isJSProperty + + val copiedFlags0 = (siblingSym.flags & (Protected | Final)).toTermFlags + val copiedFlags = + if (siblingSym.is(HasDefaultParams)) copiedFlags0 | HasDefaultParams // term flag only + else copiedFlags0 // Create symbol for new method - val name = makeExportName(jsName, !defSym.is(Method) || defSym.isJSProperty) - val flags = (defSym.flags | Method | Synthetic) - &~ (Deferred | Accessor | ParamAccessor | CaseAccessor | Mutable | Lazy | Override) + val scalaName = makeExportName(jsName, !sym.is(Method) || sym.isJSProperty) + val flags = Method | Synthetic | copiedFlags val info = - if (defSym.isConstructor) defSym.info - else if (defSym.is(Method)) finalResultTypeToAny(defSym.info) + if (sym.isConstructor) sym.info + else if (sym.is(Method)) finalResultTypeToAny(sym.info) else ExprType(defn.AnyType) - val expSym = newSymbol(clsSym, name, flags, info, defSym.privateWithin, span).entered + val expSym = newSymbol(clsSym, scalaName, flags, info, sym.privateWithin, span).entered // Construct exporter DefDef tree - val exporter = genProxyDefDef(clsSym, defSym, expSym, span) + val exporter = genProxyDefDef(clsSym, sym, expSym, span) // Construct exporters for default getters - val defaultGetters = if (!defSym.hasDefaultParams) { + val defaultGetters = if (!sym.hasDefaultParams) { Nil } else { for { - (param, i) <- defSym.paramSymss.flatten.zipWithIndex + (param, i) <- sym.paramSymss.flatten.zipWithIndex if param.is(HasDefault) } yield { - genExportDefaultGetter(clsSym, defSym, expSym, i, span) + genExportDefaultGetter(clsSym, sym, expSym, i, span) } } @@ -431,7 +452,27 @@ object PrepJSExports { proxySym: TermSymbol, span: Span)(using Context): Tree = { DefDef(proxySym, { argss => - This(clsSym).select(trgSym).appliedToArgss(argss) + if (trgSym.isConstructor) { + val tycon = trgSym.owner.typeRef + New(tycon).select(TermRef(tycon, trgSym)).appliedToArgss(argss) + } else if (trgSym.is(ModuleClass)) { + assert(argss.isEmpty, + s"got a module export with non-empty paramss. target: $trgSym, proxy: $proxySym at $span") + ref(trgSym.sourceModule) + } else if (trgSym.isClass) { + assert(isJSAny(trgSym), s"got a class export for a non-JS class ($trgSym) at $span") + val tpe = argss match { + case Nil => + trgSym.typeRef + case (targs @ (first :: _)) :: Nil if first.isType => + trgSym.typeRef.appliedTo(targs.map(_.tpe)) + case _ => + throw AssertionError(s"got a class export with unexpected paramss. target: $trgSym, proxy: $proxySym at $span") + } + ref(jsdefn.JSPackage_constructorOf).appliedToType(tpe) + } else { + This(clsSym).select(trgSym).appliedToArgss(argss) + } }).withSpan(span) } @@ -448,20 +489,4 @@ object PrepJSExports { case _ => defn.AnyType } - - /** Whether the given symbol has a visibility that allows exporting */ - private def hasLegalExportVisibility(sym: Symbol)(using Context): Boolean = - sym.isPublic || sym.is(Protected, butNot = Local) - - /** Checks whether this type has a repeated parameter elsewhere than at the end of all the params. */ - private def hasIllegalRepeatedParam(sym: Symbol)(using Context): Boolean = { - val paramInfos = sym.info.paramInfoss.flatten - paramInfos.nonEmpty && paramInfos.init.exists(_.isRepeatedParam) - } - - /** Checks whether there are default parameters not at the end of the flattened parameter list. */ - private def hasIllegalDefaultParam(sym: Symbol)(using Context): Boolean = { - sym.hasDefaultParams - && sym.paramSymss.flatten.reverse.dropWhile(_.is(HasDefault)).exists(_.is(HasDefault)) - } } diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala index 610fca869ad2..1b8fdd268ece 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala @@ -160,7 +160,9 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP tree match { case tree: TypeDef if tree.isClassDef => - checkClassOrModuleExports(sym) + val exports = genExport(sym) + if (exports.nonEmpty) + exporters.getOrElseUpdate(sym.owner, mutable.ListBuffer.empty) ++= exports if (isJSAny(sym)) transformJSClassDef(tree) @@ -172,7 +174,11 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP case tree: ValOrDefDef => // Prepare exports - exporters.getOrElseUpdate(sym.owner, mutable.ListBuffer.empty) ++= genExportMember(sym) + val exports = genExport(sym) + if (exports.nonEmpty) { + val target = if (sym.isConstructor) sym.owner.owner else sym.owner + exporters.getOrElseUpdate(target, mutable.ListBuffer.empty) ++= exports + } if (sym.isLocalToBlock) super.transform(tree) @@ -247,6 +253,8 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP exporters.get(clsSym).fold { transformedTree } { exports => + assert(exports.nonEmpty, s"found empty exporters for $clsSym" ) + checkNoDoubleDeclaration(clsSym) cpy.Template(transformedTree)( diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index 82f4c89ae203..9a5db44b15ca 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -18,11 +18,12 @@ import Names.* import StdNames.* import ContextOps.* import NameKinds.DefaultGetterName +import Typer.tryEither import ProtoTypes.* import Inferencing.* import reporting.* import Nullables.*, NullOpsDecorator.* -import config.Feature +import config.{Feature, SourceVersion} import collection.mutable import config.Printers.{overload, typr, unapp} @@ -135,14 +136,6 @@ object Applications { sels.takeWhile(_.exists).toList } - def getUnapplySelectors(tp: Type, args: List[untpd.Tree], pos: SrcPos)(using Context): List[Type] = - if (args.length > 1 && !(tp.derivesFrom(defn.SeqClass))) { - val sels = productSelectorTypes(tp, pos) - if (sels.length == args.length) sels - else tp :: Nil - } - else tp :: Nil - def productSeqSelectors(tp: Type, argsNum: Int, pos: SrcPos)(using Context): List[Type] = { val selTps = productSelectorTypes(tp, pos) val arity = selTps.length @@ -150,61 +143,122 @@ object Applications { (0 until argsNum).map(i => if (i < arity - 1) selTps(i) else elemTp).toList } - def unapplyArgs(unapplyResult: Type, unapplyFn: Tree, args: List[untpd.Tree], pos: SrcPos)(using Context): List[Type] = { - def getName(fn: Tree): Name = + /** A utility class that matches results of unapplys with patterns. Two queriable members: + * val argTypes: List[Type] + * def typedPatterns(qual: untpd.Tree, typer: Typer): List[Tree] + * TODO: Move into Applications trait. No need to keep it outside. But it's a large + * refactor, so do this when the rest is merged. + */ + class UnapplyArgs(unapplyResult: Type, unapplyFn: Tree, unadaptedArgs: List[untpd.Tree], pos: SrcPos)(using Context): + private var args = unadaptedArgs + + private def getName(fn: Tree): Name = fn match case TypeApply(fn, _) => getName(fn) case Apply(fn, _) => getName(fn) case fn: RefTree => fn.name - val unapplyName = getName(unapplyFn) // tolerate structural `unapply`, which does not have a symbol + private val unapplyName = getName(unapplyFn) // tolerate structural `unapply`, which does not have a symbol - def getTp = extractorMemberType(unapplyResult, nme.get, pos) + private def getTp = extractorMemberType(unapplyResult, nme.get, pos) - def fail = { + private def fail = { report.error(UnapplyInvalidReturnType(unapplyResult, unapplyName), pos) Nil } - def unapplySeq(tp: Type)(fallback: => List[Type]): List[Type] = { + private def unapplySeq(tp: Type)(fallback: => List[Type]): List[Type] = val elemTp = unapplySeqTypeElemTp(tp) - if (elemTp.exists) args.map(Function.const(elemTp)) - else if (isProductSeqMatch(tp, args.length, pos)) productSeqSelectors(tp, args.length, pos) - else if tp.derivesFrom(defn.NonEmptyTupleClass) then foldApplyTupleType(tp) + if elemTp.exists then + args.map(Function.const(elemTp)) + else if isProductSeqMatch(tp, args.length, pos) then + productSeqSelectors(tp, args.length, pos) + else if tp.derivesFrom(defn.NonEmptyTupleClass) then + tp.tupleElementTypes.getOrElse(Nil) else fallback - } - if (unapplyName == nme.unapplySeq) - unapplySeq(unapplyResult) { - if (isGetMatch(unapplyResult, pos)) unapplySeq(getTp)(fail) - else fail - } - else { - assert(unapplyName == nme.unapply) - if (isProductMatch(unapplyResult, args.length, pos)) - productSelectorTypes(unapplyResult, pos) - else if (isGetMatch(unapplyResult, pos)) - getUnapplySelectors(getTp, args, pos) - else if (unapplyResult.widenSingleton isRef defn.BooleanClass) - Nil - else if (defn.isProductSubType(unapplyResult) && productArity(unapplyResult, pos) != 0) - productSelectorTypes(unapplyResult, pos) - // this will cause a "wrong number of arguments in pattern" error later on, - // which is better than the message in `fail`. - else if unapplyResult.derivesFrom(defn.NonEmptyTupleClass) then - foldApplyTupleType(unapplyResult) - else fail - } - } + private def tryAdaptPatternArgs(elems: List[untpd.Tree], pt: Type)(using Context): Option[List[untpd.Tree]] = + tryEither[Option[List[untpd.Tree]]] + (Some(desugar.adaptPatternArgs(elems, pt))) + ((_, _) => None) + + private def getUnapplySelectors(tp: Type)(using Context): List[Type] = + // We treat patterns as product elements if + // they are named, or there is more than one pattern + val isProduct = args match + case x :: xs => x.isInstanceOf[untpd.NamedArg] || xs.nonEmpty + case _ => false + if isProduct && !tp.derivesFrom(defn.SeqClass) then + productUnapplySelectors(tp).getOrElse: + // There are unapplys with return types which have `get` and `_1, ..., _n` + // as members, but which are not subtypes of Product. So `productUnapplySelectors` + // would return None for these, but they are still valid types + // for a get match. A test case is pos/extractors.scala. + val sels = productSelectorTypes(tp, pos) + if (sels.length == args.length) sels + else tp :: Nil + else tp :: Nil + + private def productUnapplySelectors(tp: Type)(using Context): Option[List[Type]] = + if defn.isProductSubType(tp) then + tryAdaptPatternArgs(args, tp) match + case Some(args1) if isProductMatch(tp, args1.length, pos) => + args = args1 + Some(productSelectorTypes(tp, pos)) + case _ => None + else tp.widen.normalized.dealias match + case tp @ defn.NamedTuple(_, tt) => + tryAdaptPatternArgs(args, tp) match + case Some(args1) => + args = args1 + tt.tupleElementTypes + case _ => None + case _ => None + + /** The computed argument types which will be the scutinees of the sub-patterns. */ + val argTypes: List[Type] = + if unapplyName == nme.unapplySeq then + unapplySeq(unapplyResult): + if (isGetMatch(unapplyResult, pos)) unapplySeq(getTp)(fail) + else fail + else + assert(unapplyName == nme.unapply) + productUnapplySelectors(unapplyResult).getOrElse: + if isGetMatch(unapplyResult, pos) then + getUnapplySelectors(getTp) + else if unapplyResult.derivesFrom(defn.BooleanClass) then + Nil + else if unapplyResult.derivesFrom(defn.NonEmptyTupleClass) then + unapplyResult.tupleElementTypes.getOrElse(Nil) + else if defn.isProductSubType(unapplyResult) && productArity(unapplyResult, pos) != 0 then + productSelectorTypes(unapplyResult, pos) + // this will cause a "wrong number of arguments in pattern" error later on, + // which is better than the message in `fail`. + else fail + + /** The typed pattens of this unapply */ + def typedPatterns(qual: untpd.Tree, typer: Typer): List[Tree] = + unapp.println(i"unapplyQual = $qual, unapplyArgs = ${unapplyResult} with $argTypes / $args") + for argType <- argTypes do + assert(!isBounds(argType), unapplyResult.show) + val alignedArgs = argTypes match + case argType :: Nil + if args.lengthCompare(1) > 0 + && Feature.autoTuplingEnabled + && defn.isTupleNType(argType) => + untpd.Tuple(args) :: Nil + case _ => + args + val alignedArgTypes = + if argTypes.length == alignedArgs.length then + argTypes + else + report.error(UnapplyInvalidNumberOfArguments(qual, argTypes), pos) + argTypes.take(args.length) ++ + List.fill(argTypes.length - args.length)(WildcardType) + alignedArgs.lazyZip(alignedArgTypes).map(typer.typed(_, _)) + .showing(i"unapply patterns = $result", unapp) - def foldApplyTupleType(tp: Type)(using Context): List[Type] = - object tupleFold extends TypeAccumulator[List[Type]]: - override def apply(accum: List[Type], t: Type): List[Type] = - t match - case AppliedType(tycon, x :: x2 :: Nil) if tycon.typeSymbol == defn.PairClass => - apply(x :: accum, x2) - case x => foldOver(accum, x) - end tupleFold - tupleFold(Nil, tp).reverse + end UnapplyArgs def wrapDefs(defs: mutable.ListBuffer[Tree] | Null, tree: Tree)(using Context): Tree = if (defs != null && defs.nonEmpty) tpd.Block(defs.toList, tree) else tree @@ -275,7 +329,20 @@ object Applications { if (getterDenot.exists) qual.select(TermRef(qual.tpe, getterName, getterDenot)) else EmptyTree if !meth.isClassConstructor then - selectGetter(receiver) + val res = selectGetter(receiver) + if res.isEmpty && meth.is(Given) then + val classSym = meth.info.finalResultType.typeSymbol + if classSym.isClass && classSym.isAllOf(Given | Synthetic) then + // `meth` is an implicit wrapper: the `given def` desugared from a + // `given C(...)` or `given C with ...` by `desugar#classDef`. + // Therefore, we can try to look for the default getters of the + // constructor of the `given class`. We find it via the `given + // def`'s result type. See #20088 and associated test cases. + val classRefTree = receiver.select(classSym) + val constructorSym = classSym.primaryConstructor.asTerm + findDefaultGetter(constructorSym, classRefTree, idx) + else res + else res else // default getters for class constructors are found in the companion object val cls = meth.owner @@ -346,6 +413,22 @@ object Applications { val flags2 = sym1.flags | NonMember // ensures Select typing doesn't let TermRef#withPrefix revert the type val sym2 = sym1.copy(info = methType, flags = flags2) // symbol not entered, to avoid overload resolution problems fun.withType(sym2.termRef) + + /** Drop any leading implicit parameter sections */ + def stripImplicit(tp: Type, wildcardOnly: Boolean = false)(using Context): Type = tp match { + case mt: MethodType if mt.isImplicitMethod => + stripImplicit(resultTypeApprox(mt, wildcardOnly)) + case pt: PolyType => + pt.derivedLambdaType(pt.paramNames, pt.paramInfos, + stripImplicit(pt.resultType, wildcardOnly = true)) + // can't use TypeParamRefs for parameter references in `resultTypeApprox` + // since their bounds can refer to type parameters in `pt` that are not + // bound by the constraint. This can lead to hygiene violations if subsequently + // `pt` itself is added to the constraint. Test case is run/enrich-gentraversable.scala. + .asInstanceOf[PolyType].flatten + case _ => + tp + } } trait Applications extends Compatibility { @@ -971,7 +1054,7 @@ trait Applications extends Compatibility { // one can imagine the original signature-polymorphic method as // being infinitely overloaded, with each individual overload only // being brought into existence as needed - val originalResultType = funRef.symbol.info.resultType.stripNull + val originalResultType = funRef.symbol.info.resultType.stripNull() val resultType = if !originalResultType.isRef(defn.ObjectClass) then originalResultType else AvoidWildcardsMap()(proto.resultType.deepenProtoTrans) match @@ -1008,6 +1091,33 @@ trait Applications extends Compatibility { } } + /** If the applied function is an automatically inserted `apply` + * method and one of its arguments has a type mismatch , append + * a note to the error message that explains where the required + * type comes from. See #19680 and associated test case. + */ + def maybeAddInsertedApplyNote(failedState: TyperState, fun1: Tree)(using Context): Unit = + if fun1.symbol.name == nme.apply && fun1.span.isSynthetic then + fun1 match + case Select(qualifier, _) => + def mapMessage(dia: Diagnostic): Diagnostic = + dia match + case dia: Diagnostic.Error => + dia.msg match + case msg: TypeMismatch => + msg.inTree match + case Some(arg) if tree.args.exists(_.span == arg.span) => + val noteText = + i"""The required type comes from a parameter of the automatically + |inserted `apply` method of `${qualifier.tpe}`.""".stripMargin + Diagnostic.Error(msg.appendExplanation("\n\n" + noteText), dia.pos) + case _ => dia + case msg => dia + case dia => dia + failedState.reporter.mapBufferedMessages(mapMessage) + case _ => () + else () + fun1.tpe match { case err: ErrorType => cpy.Apply(tree)(fun1, proto.typedArgs()).withType(err) case TryDynamicCallType => @@ -1068,7 +1178,11 @@ trait Applications extends Compatibility { simpleApply(fun1, proto) } { (failedVal, failedState) => - def fail = { failedState.commit(); failedVal } + def fail = + maybeAddInsertedApplyNote(failedState, fun1) + failedState.commit() + failedVal + // Try once with original prototype and once (if different) with tupled one. // The reason we need to try both is that the decision whether to use tupled // or not was already taken but might have to be revised when an implicit @@ -1124,12 +1238,13 @@ trait Applications extends Compatibility { } app } - app1 match { + val app2 = app1 match { case Apply(Block(stats, fn), args) => tpd.cpy.Block(app1)(stats, tpd.cpy.Apply(app1)(fn, args)) case _ => app1 } + ConstFold(app2) } /** Typecheck an Apply node with a typed function and possibly-typed arguments coming from `proto` */ @@ -1282,9 +1397,10 @@ trait Applications extends Compatibility { case _ => false case _ => false - def typedUnApply(tree: untpd.Apply, selType: Type)(using Context): Tree = { + def typedUnApply(tree: untpd.Apply, selType0: Type)(using Context): Tree = { record("typedUnApply") - val Apply(qual, args) = tree + val Apply(qual, unadaptedArgs) = tree + val selType = selType0.stripNamedTuple def notAnExtractor(tree: Tree): Tree = // prefer inner errors @@ -1359,8 +1475,8 @@ trait Applications extends Compatibility { || ctx.reporter.hasErrors then result else notAnExtractor(result) - // It might be that the result of typedExpr is an `apply` selection or implicit conversion. - // Reject in this case. + // It might be that the result of typedExpr is an `apply` selection or implicit conversion. + // Reject in this case. def tryWithTypeArgs(qual: untpd.Tree, targs: List[Tree])(fallBack: (Tree, TyperState) => Tree): Tree = tryEither { @@ -1429,7 +1545,8 @@ trait Applications extends Compatibility { report.error(em"Structural unapply is not supported", unapplyFn.srcPos) (unapplyFn, unapplyAppCall) case Apply(fn, `dummyArg` :: Nil) => - val inlinedUnapplyFn = Inlines.inlinedUnapplyFun(fn) + val inlinedUnapplyFn = withoutMode(Mode.Pattern): + Inlines.inlinedUnapplyFun(fn) (inlinedUnapplyFn, inlinedUnapplyFn.appliedToArgs(`dummyArg` :: Nil)) case Apply(fn, args) => val (fn1, app) = rec(fn) @@ -1496,27 +1613,14 @@ trait Applications extends Compatibility { typedExpr(untpd.TypedSplice(Apply(unapplyFn, dummyArg :: Nil))) inlinedUnapplyFnAndApp(dummyArg, unapplyAppCall) - var argTypes = unapplyArgs(unapplyApp.tpe, unapplyFn, args, tree.srcPos) - for (argType <- argTypes) assert(!isBounds(argType), unapplyApp.tpe.show) - val bunchedArgs = argTypes match { - case argType :: Nil => - if (args.lengthCompare(1) > 0 && Feature.autoTuplingEnabled && defn.isTupleNType(argType)) untpd.Tuple(args) :: Nil - else args - case _ => args - } - if (argTypes.length != bunchedArgs.length) { - report.error(UnapplyInvalidNumberOfArguments(qual, argTypes), tree.srcPos) - argTypes = argTypes.take(args.length) ++ - List.fill(argTypes.length - args.length)(WildcardType) - } - val unapplyPatterns = bunchedArgs.lazyZip(argTypes) map (typed(_, _)) + val unapplyPatterns = UnapplyArgs(unapplyApp.tpe, unapplyFn, unadaptedArgs, tree.srcPos) + .typedPatterns(qual, this) val result = assignType(cpy.UnApply(tree)(newUnapplyFn, unapplyImplicits(dummyArg, unapplyApp), unapplyPatterns), ownType) - unapp.println(s"unapply patterns = $unapplyPatterns") if (ownType.stripped eq selType.stripped) || ownType.isError then result else tryWithTypeTest(Typed(result, TypeTree(ownType)), selType) case tp => val unapplyErr = if (tp.isError) unapplyFn else notAnExtractor(unapplyFn) - val typedArgsErr = args mapconserve (typed(_, defn.AnyType)) + val typedArgsErr = unadaptedArgs.mapconserve(typed(_, defn.AnyType)) cpy.UnApply(tree)(unapplyErr, Nil, typedArgsErr) withType unapplyErr.tpe } } @@ -1589,22 +1693,6 @@ trait Applications extends Compatibility { tp } - /** Drop any leading implicit parameter sections */ - def stripImplicit(tp: Type, wildcardOnly: Boolean = false)(using Context): Type = tp match { - case mt: MethodType if mt.isImplicitMethod => - stripImplicit(resultTypeApprox(mt, wildcardOnly)) - case pt: PolyType => - pt.derivedLambdaType(pt.paramNames, pt.paramInfos, - stripImplicit(pt.resultType, wildcardOnly = true)) - // can't use TypeParamRefs for parameter references in `resultTypeApprox` - // since their bounds can refer to type parameters in `pt` that are not - // bound by the constraint. This can lead to hygiene violations if subsequently - // `pt` itself is added to the constraint. Test case is run/enrich-gentraversable.scala. - .asInstanceOf[PolyType].flatten - case _ => - tp - } - /** Compare owner inheritance level. * @param sym1 The first owner * @param sym2 The second owner @@ -1614,11 +1702,12 @@ trait Applications extends Compatibility { * Module classes also inherit the relationship from their companions. This means, * if no direct derivation exists between `sym1` and `sym2` also perform the following * tests: - * - If both sym1 and sym1 are module classes that have companion classes, - * and sym2 does not inherit implicit members from a base class (#), - * compare the companion classes. - * - If sym1 is a module class with a companion, and sym2 is a normal class or trait, - * compare the companion with sym2. + * - If both sym1 and sym2 are module classes that have companion classes, + * compare the companion classes. Return the result of that comparison, + * provided the module class with the larger companion class does not itself + * inherit implicit members from a base class (#), + * - If one sym is a module class with a companion, and the other is a normal class or trait, + * compare the companion with the other class or trait. * * Condition (#) is necessary to make `compareOwner(_, _) > 0` a transitive relation. * For instance: @@ -1642,21 +1731,43 @@ trait Applications extends Compatibility { * This means we get an ambiguity between `a` and `b` in all cases. */ def compareOwner(sym1: Symbol, sym2: Symbol)(using Context): Int = + def cls1 = sym1.companionClass + def cls2 = sym2.companionClass if sym1 == sym2 then 0 else if sym1.isSubClass(sym2) then 1 else if sym2.isSubClass(sym1) then -1 - else if sym1.is(Module) then - val cls1 = sym1.companionClass - if sym2.is(Module) then - if sym2.thisType.implicitMembers.forall(_.symbol.owner == sym2) then // test for (#) - compareOwner(cls1, sym2.companionClass) - else 0 - else compareOwner(cls1, sym2) - else 0 + else + if sym1.is(Module) && sym2.is(Module) then + val r = compareOwner(cls1, cls2) + if r == 0 then 0 + else + val larger = if r < 0 then sym1 else sym2 + if larger.thisType.implicitMembers.forall(_.symbol.owner == larger) then r + else 0 + else if sym1.is(Module) then compareOwner(cls1, sym2) + else if sym2.is(Module) then compareOwner(sym1, cls2) + else 0 + + enum CompareScheme: + case Old // Normal specificity test for overloading resolution (where `preferGeneral` is false) + // and in mode Scala3-migration when we compare with the old Scala 2 rules. + + case Intermediate // Intermediate rules: better means specialize, but map all type arguments downwards + // These are enabled for 3.0-3.5, or if OldImplicitResolution + // is specified, and also for all comparisons between old-style implicits, + + case New // New rules: better means generalize, givens (and extensions) always beat implicits + end CompareScheme /** Compare two alternatives of an overloaded call or an implicit search. * * @param alt1, alt2 Non-overloaded references indicating the two choices + * @param preferGeneral When comparing two value types, prefer the more general one + * over the more specific one iff `preferGeneral` is true. + * `preferGeneral` is set to `true` when we compare two given values, since + * then we want the most general evidence that matches the target + * type. It is set to `false` for overloading resolution, when we want the + * most specific type instead. * @return 1 if 1st alternative is preferred over 2nd * -1 if 2nd alternative is preferred over 1st * 0 if neither alternative is preferred over the other @@ -1672,27 +1783,35 @@ trait Applications extends Compatibility { * an alternative that takes more implicit parameters wins over one * that takes fewer. */ - def compare(alt1: TermRef, alt2: TermRef)(using Context): Int = trace(i"compare($alt1, $alt2)", overload) { + def compare(alt1: TermRef, alt2: TermRef, preferGeneral: Boolean = false)(using Context): Int = trace(i"compare($alt1, $alt2)", overload) { record("resolveOverloaded.compare") - - /** Is alternative `alt1` with type `tp1` as specific as alternative + val scheme = + val oldResolution = ctx.mode.is(Mode.OldImplicitResolution) + if !preferGeneral || Feature.migrateTo3 && oldResolution then + CompareScheme.Old + else if Feature.sourceVersion.isAtMost(SourceVersion.`3.5`) + || oldResolution + || alt1.symbol.is(Implicit) && alt2.symbol.is(Implicit) + then CompareScheme.Intermediate + else CompareScheme.New + + /** Is alternative `alt1` with type `tp1` as good as alternative * `alt2` with type `tp2` ? * - * 1. A method `alt1` of type `(p1: T1, ..., pn: Tn)U` is as specific as `alt2` + * 1. A method `alt1` of type `(p1: T1, ..., pn: Tn)U` is as good as `alt2` * if `alt1` is nullary or `alt2` is applicable to arguments (p1, ..., pn) of * types T1,...,Tn. If the last parameter `pn` has a vararg type T*, then * `alt1` must be applicable to arbitrary numbers of `T` parameters (which * implies that it must be a varargs method as well). * 2. A polymorphic member of type [a1 >: L1 <: U1, ..., an >: Ln <: Un]T is as - * specific as `alt2` of type `tp2` if T is as specific as `tp2` under the + * good as `alt2` of type `tp2` if T is as good as `tp2` under the * assumption that for i = 1,...,n each ai is an abstract type name bounded * from below by Li and from above by Ui. * 3. A member of any other type `tp1` is: - * a. always as specific as a method or a polymorphic method. - * b. as specific as a member of any other type `tp2` if `tp1` is compatible - * with `tp2`. + * a. always as good as a method or a polymorphic method. + * b. as good as a member of any other type `tp2` if `asGoodValueType(tp1, tp2) = true` */ - def isAsSpecific(alt1: TermRef, tp1: Type, alt2: TermRef, tp2: Type): Boolean = trace(i"isAsSpecific $tp1 $tp2", overload) { + def isAsGood(alt1: TermRef, tp1: Type, alt2: TermRef, tp2: Type): Boolean = trace(i"isAsGood $tp1 $tp2", overload) { tp1 match case tp1: MethodType => // (1) tp1.paramInfos.isEmpty && tp2.isInstanceOf[LambdaType] @@ -1714,69 +1833,83 @@ trait Applications extends Compatibility { fullyDefinedType(tp1Params, "type parameters of alternative", alt1.symbol.srcPos) val tparams = newTypeParams(alt1.symbol, tp1.paramNames, EmptyFlags, tp1.instantiateParamInfos(_)) - isAsSpecific(alt1, tp1.instantiate(tparams.map(_.typeRef)), alt2, tp2) + isAsGood(alt1, tp1.instantiate(tparams.map(_.typeRef)), alt2, tp2) } case _ => // (3) + def compareValues(tp2: Type)(using Context) = + isAsGoodValueType(tp1, tp2, alt1.symbol.is(Implicit)) tp2 match case tp2: MethodType => true // (3a) case tp2: PolyType if tp2.resultType.isInstanceOf[MethodType] => true // (3a) case tp2: PolyType => // (3b) - explore(isAsSpecificValueType(tp1, instantiateWithTypeVars(tp2))) + explore(compareValues(instantiateWithTypeVars(tp2))) case _ => // 3b) - isAsSpecificValueType(tp1, tp2) + compareValues(tp2) } - /** Test whether value type `tp1` is as specific as value type `tp2`. - * Let's abbreviate this to `tp1 <:s tp2`. - * Previously, `<:s` was the same as `<:`. This behavior is still - * available under mode `Mode.OldOverloadingResolution`. The new behavior - * is different, however. Here, `T <:s U` iff + /** Test whether value type `tp1` is as good as value type `tp2`. + * Let's abbreviate this to `tp1 <:p tp2`. The behavior depends on the Scala version + * and mode. * - * flip(T) <: flip(U) + * - In Scala 2, `<:p` was the same as `<:`. This behavior is still + * available in 3.0-migration if mode `Mode.OldImplicitResolution` is turned on as well. + * It is used to highlight differences between Scala 2 and 3 behavior. * - * where `flip` changes covariant occurrences of contravariant type parameters to - * covariant ones. Intuitively `<:s` means subtyping `<:`, except that all arguments - * to contravariant parameters are compared as if they were covariant. E.g. given class + * - In Scala 3.0-3.6, the behavior is as follows: `T <:p U` iff there is an implicit conversion + * from `T` to `U`, or * - * class Cmp[-X] + * flip(T) <: flip(U) * - * `Cmp[T] <:s Cmp[U]` if `T <: U`. On the other hand, non-variant occurrences - * of parameters are not affected. So `T <: U` would imply `Set[Cmp[U]] <:s Set[Cmp[T]]`, - * as usual, because `Set` is non-variant. + * where `flip` changes covariant occurrences of contravariant type parameters to + * covariant ones. Intuitively `<:p` means subtyping `<:`, except that all arguments + * to contravariant parameters are compared as if they were covariant. E.g. given class * - * This relation might seem strange, but it models closely what happens for methods. - * Indeed, if we integrate the existing rules for methods into `<:s` we have now that + * class Cmp[-X] * - * (T)R <:s (U)R + * `Cmp[T] <:p Cmp[U]` if `T <: U`. On the other hand, non-variant occurrences + * of parameters are not affected. So `T <: U` would imply `Set[Cmp[U]] <:p Set[Cmp[T]]`, + * as usual, because `Set` is non-variant. * - * iff + * - From Scala 3.7, `T <:p U` means `T <: U` or `T` convertible to `U` + * for overloading resolution (when `preferGeneral is false), and the opposite relation + * `U <: T` or `U convertible to `T` for implicit disambiguation between givens + * (when `preferGeneral` is true). For old-style implicit values, the 3.5 behavior is kept. + * If one of the alternatives is an implicit and the other is a given (or an extension), the implicit loses. * - * T => R <:s U => R + * - In Scala 3.6 and Scala 3.7-migration, we issue a warning if the result under + * Scala 3.7 differs wrt to the old behavior up to 3.6. * - * Also: If a compared type refers to a given or its module class, use + * Also and only for given resolution: If a compared type refers to a given or its module class, use * the intersection of its parent classes instead. */ - def isAsSpecificValueType(tp1: Type, tp2: Type)(using Context) = - if (ctx.mode.is(Mode.OldOverloadingResolution)) + def isAsGoodValueType(tp1: Type, tp2: Type, alt1IsImplicit: Boolean)(using Context): Boolean = + if scheme == CompareScheme.Old then + // Normal specificity test for overloading resolution (where `preferGeneral` is false) + // and in mode Scala3-migration when we compare with the old Scala 2 rules. isCompatible(tp1, tp2) - else { - val flip = new TypeMap { - def apply(t: Type) = t match { - case t @ AppliedType(tycon, args) => - def mapArg(arg: Type, tparam: TypeParamInfo) = - if (variance > 0 && tparam.paramVarianceSign < 0) defn.FunctionNOf(arg :: Nil, defn.UnitType) - else arg - mapOver(t.derivedAppliedType(tycon, args.zipWithConserve(tycon.typeParams)(mapArg))) - case _ => mapOver(t) - } - } - def prepare(tp: Type) = tp.stripTypeVar match { + else + def prepare(tp: Type) = tp.stripTypeVar match case tp: NamedType if tp.symbol.is(Module) && tp.symbol.sourceModule.is(Given) => - flip(tp.widen.widenToParents) - case _ => flip(tp) - } - (prepare(tp1) relaxed_<:< prepare(tp2)) || viewExists(tp1, tp2) - } + tp.widen.widenToParents + case _ => + tp + + val tp1p = prepare(tp1) + val tp2p = prepare(tp2) + + if scheme == CompareScheme.Intermediate || alt1IsImplicit then + val flip = new TypeMap: + def apply(t: Type) = t match + case t @ AppliedType(tycon, args) => + def mapArg(arg: Type, tparam: TypeParamInfo) = + if (variance > 0 && tparam.paramVarianceSign < 0) defn.FunctionNOf(arg :: Nil, defn.UnitType) + else arg + mapOver(t.derivedAppliedType(tycon, args.zipWithConserve(tycon.typeParams)(mapArg))) + case _ => mapOver(t) + (flip(tp1p) relaxed_<:< flip(tp2p)) || viewExists(tp1, tp2) + else + (tp2p relaxed_<:< tp1p) || viewExists(tp2, tp1) + end isAsGoodValueType /** Widen the result type of synthetic given methods from the implementation class to the * type that's implemented. Example @@ -1807,10 +1940,37 @@ trait Applications extends Compatibility { else tp } - def compareWithTypes(tp1: Type, tp2: Type) = { + def widenPrefix(alt: TermRef): Type = alt.prefix.widen match + case pre: (TypeRef | ThisType) if pre.typeSymbol.is(Module) => + pre.parents.reduceLeft(TypeComparer.andType(_, _)) + case wpre => wpre + + /** If two alternatives have the same symbol, we pick the one with the most + * specific prefix. To determine that, we widen the prefix types and also + * widen module classes to the intersection of their parent classes. Then + * if one of the resulting types is a more specific value type than the other, + * it wins. Example: + * + * trait A { given M = ... } + * trait B extends A + * object a extends A + * object b extends B + * + * In this case `b.M` would be regarded as more specific than `a.M`. + */ + def comparePrefixes = + val pre1 = widenPrefix(alt1) + val pre2 = widenPrefix(alt2) + val winsPrefix1 = isCompatible(pre1, pre2) + val winsPrefix2 = isCompatible(pre2, pre1) + if winsPrefix1 == winsPrefix2 then 0 + else if winsPrefix1 then 1 + else -1 + + def compareWithTypes(tp1: Type, tp2: Type) = val ownerScore = compareOwner(alt1.symbol.maybeOwner, alt2.symbol.maybeOwner) - def winsType1 = isAsSpecific(alt1, tp1, alt2, tp2) - def winsType2 = isAsSpecific(alt2, tp2, alt1, tp1) + val winsType1 = isAsGood(alt1, tp1, alt2, tp2) + val winsType2 = isAsGood(alt2, tp2, alt1, tp1) overload.println(i"compare($alt1, $alt2)? $tp1 $tp2 $ownerScore $winsType1 $winsType2") if winsType1 && winsType2 @@ -1819,15 +1979,20 @@ trait Applications extends Compatibility { // alternatives are the same after following ExprTypes, pick one of them // (prefer the one that is not a method, but that's arbitrary). if alt1.widenExpr =:= alt2 then -1 else 1 - else if ownerScore == 1 then - if winsType1 || !winsType2 then 1 else 0 - else if ownerScore == -1 then - if winsType2 || !winsType1 then -1 else 0 - else if winsType1 then - if winsType2 then 0 else 1 else - if winsType2 then -1 else 0 - } + // For new implicit resolution, take ownerscore as more significant than type resolution + // Reason: People use owner hierarchies to explicitly prioritize, we should not + // break that by changing implicit priority of types. + def drawOrOwner = + if scheme == CompareScheme.New then ownerScore else 0 + ownerScore match + case 1 => if winsType1 || !winsType2 then 1 else drawOrOwner + case -1 => if winsType2 || !winsType1 then -1 else drawOrOwner + case 0 => + if winsType1 != winsType2 then if winsType1 then 1 else -1 + else if alt1.symbol == alt2.symbol then comparePrefixes + else 0 + end compareWithTypes if alt1.symbol.is(ConstructorProxy) && !alt2.symbol.is(ConstructorProxy) then -1 else if alt2.symbol.is(ConstructorProxy) && !alt1.symbol.is(ConstructorProxy) then 1 @@ -2232,13 +2397,13 @@ trait Applications extends Compatibility { case _ => (Nil, 0) /** Resolve overloading by mapping to a different problem where each alternative's - * type is mapped with `f`, alternatives with non-existing types are dropped, and the + * type is mapped with `f`, alternatives with non-existing types or symbols are dropped, and the * expected type is `pt`. Map the results back to the original alternatives. */ def resolveMapped(alts: List[TermRef], f: TermRef => Type, pt: Type)(using Context): List[TermRef] = val reverseMapping = alts.flatMap { alt => val t = f(alt) - if t.exists then + if t.exists && alt.symbol.exists then val (trimmed, skipped) = trimParamss(t.stripPoly, alt.symbol.rawParamss) val mappedSym = alt.symbol.asTerm.copy(info = t) mappedSym.rawParamss = trimmed diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 56f67574a72d..1f82b9ddc084 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -29,12 +29,11 @@ import config.Printers.{typr, patmatch} import NameKinds.DefaultGetterName import NameOps.* import SymDenotations.{NoCompleter, NoDenotation} -import Applications.unapplyArgs +import Applications.UnapplyArgs import Inferencing.isFullyDefined import transform.patmat.SpaceEngine.{isIrrefutable, isIrrefutableQuotePattern} import transform.ValueClasses.underlyingOfValueClass -import config.Feature -import config.Feature.sourceVersion +import config.Feature, Feature.{sourceVersion, modularity} import config.SourceVersion.* import config.MigrationVersion import printing.Formatting.hlAsKeyword @@ -42,6 +41,7 @@ import cc.{isCaptureChecking, isRetainsLike} import collection.mutable import reporting.* +import Annotations.ExperimentalAnnotation object Checking { import tpd.* @@ -197,7 +197,7 @@ object Checking { * and that the instance conforms to the self type of the created class. */ def checkInstantiable(tp: Type, srcTp: Type, pos: SrcPos)(using Context): Unit = - tp.underlyingClassRef(refinementOK = false) match + tp.underlyingClassRef(refinementOK = Feature.enabled(modularity)) match case tref: TypeRef => val cls = tref.symbol if (cls.isOneOf(AbstractOrTrait)) { @@ -349,10 +349,7 @@ object Checking { } if isInteresting(pre) then - val traceCycles = CyclicReference.isTraced - try - if traceCycles then - CyclicReference.pushTrace("explore ", tp.symbol, " for cyclic references") + CyclicReference.trace(i"explore ${tp.symbol} for cyclic references"): val pre1 = this(pre, false, false) if locked.contains(tp) || tp.symbol.infoOrCompleter.isInstanceOf[NoCompleter] @@ -367,8 +364,6 @@ object Checking { finally locked -= tp tp.withPrefix(pre1) - finally - if traceCycles then CyclicReference.popTrace() else tp } catch { @@ -605,6 +600,7 @@ object Checking { // The issue with `erased inline` is that the erased semantics get lost // as the code is inlined and the reference is removed before the erased usage check. checkCombination(Erased, Inline) + checkNoConflict(Tracked, Mutable, em"mutable variables may not be `tracked`") checkNoConflict(Lazy, ParamAccessor, em"parameter may not be `lazy`") } @@ -802,50 +798,58 @@ object Checking { tree /** Check that experimental language imports in `trees` - * are done only in experimental scopes, or in a top-level - * scope with only @experimental definitions. + * are done only in experimental scopes. For top-level + * experimental imports, all top-level definitions are transformed + * to @experimental definitions. + * */ - def checkExperimentalImports(trees: List[Tree])(using Context): Unit = - - def nonExperimentalStat(trees: List[Tree]): Tree = trees match - case (_: Import | EmptyTree) :: rest => - nonExperimentalStat(rest) - case (tree @ TypeDef(_, impl: Template)) :: rest if tree.symbol.isPackageObject => - nonExperimentalStat(impl.body).orElse(nonExperimentalStat(rest)) - case (tree: PackageDef) :: rest => - nonExperimentalStat(tree.stats).orElse(nonExperimentalStat(rest)) - case (tree: MemberDef) :: rest => - if tree.symbol.isExperimental || tree.symbol.is(Synthetic) then - nonExperimentalStat(rest) - else - tree - case tree :: rest => - tree - case Nil => - EmptyTree - - for case imp @ Import(qual, selectors) <- trees do + def checkAndAdaptExperimentalImports(trees: List[Tree])(using Context): Unit = + def nonExperimentalTopLevelDefs(pack: Symbol): Iterator[Symbol] = + def isNonExperimentalTopLevelDefinition(sym: Symbol) = + sym.isDefinedInCurrentRun + && sym.source == ctx.compilationUnit.source + && !sym.isConstructor // not constructor of package object + && !sym.is(Package) && !sym.name.isPackageObjectName + && !sym.isExperimental + + pack.info.decls.toList.iterator.flatMap: sym => + if sym.isClass && (sym.is(Package) || sym.isPackageObject) then + nonExperimentalTopLevelDefs(sym) + else if isNonExperimentalTopLevelDefinition(sym) then + sym :: Nil + else Nil + + def unitExperimentalLanguageImports = def isAllowedImport(sel: untpd.ImportSelector) = val name = Feature.experimental(sel.name) name == Feature.scala2macros - || name == Feature.erasedDefinitions || name == Feature.captureChecking + trees.filter { + case Import(qual, selectors) => + languageImport(qual) match + case Some(nme.experimental) => + !selectors.forall(isAllowedImport) && !ctx.owner.isInExperimentalScope + case _ => false + case _ => false + } + + if ctx.owner.is(Package) || ctx.owner.name.startsWith(str.REPL_SESSION_LINE) then + def markTopLevelDefsAsExperimental(why: String): Unit = + for sym <- nonExperimentalTopLevelDefs(ctx.owner) do + sym.addAnnotation(ExperimentalAnnotation(s"Added by $why", sym.span)) - languageImport(qual) match - case Some(nme.experimental) - if !ctx.owner.isInExperimentalScope && !selectors.forall(isAllowedImport) => - def check(stable: => String) = - Feature.checkExperimentalFeature("features", imp.srcPos, - s"\n\nNote: the scope enclosing the import is not considered experimental because it contains the\nnon-experimental $stable") - if ctx.owner.is(Package) then - // allow top-level experimental imports if all definitions are @experimental - nonExperimentalStat(trees) match - case EmptyTree => - case tree: MemberDef => check(i"${tree.symbol}") - case tree => check(i"expression ${tree}") - else Feature.checkExperimentalFeature("features", imp.srcPos) + unitExperimentalLanguageImports match + case imp :: _ => markTopLevelDefsAsExperimental(i"top level $imp") case _ => - end checkExperimentalImports + Feature.experimentalEnabledByLanguageSetting match + case Some(sel) => markTopLevelDefsAsExperimental(i"-language:experimental.$sel") + case _ if ctx.settings.experimental.value => markTopLevelDefsAsExperimental(i"-experimental") + case _ => + else + for imp <- unitExperimentalLanguageImports do + Feature.checkExperimentalFeature("feature local import", imp.srcPos) + + end checkAndAdaptExperimentalImports /** Checks that PolyFunction only have valid refinements. * @@ -963,10 +967,16 @@ trait Checking { false } - def check(pat: Tree, pt: Type): Boolean = + // Is scrutinee type `pt` a subtype of `pat.tpe`, after stripping named tuples + // and accounting for large generic tuples? + // Named tuples need to be stripped off, since names are dropped in patterns + def conforms(pat: Tree, pt: Type): Boolean = pt.isTupleXXLExtract(pat.tpe) // See isTupleXXLExtract, fixes TupleXXL parameter type - || pt <:< pat.tpe - || fail(pat, pt, Reason.NonConforming) + || pt.stripNamedTuple <:< pat.tpe + || (pt.widen ne pt) && conforms(pat, pt.widen) + + def check(pat: Tree, pt: Type): Boolean = + conforms(pat, pt) || fail(pat, pt, Reason.NonConforming) def recur(pat: Tree, pt: Type): Boolean = !sourceVersion.isAtLeast(`3.2`) @@ -979,7 +989,7 @@ trait Checking { case UnApply(fn, implicits, pats) => check(pat, pt) && (isIrrefutable(fn, pats.length) || fail(pat, pt, Reason.RefutableExtractor)) && { - val argPts = unapplyArgs(fn.tpe.widen.finalResultType, fn, pats, pat.srcPos) + val argPts = UnapplyArgs(fn.tpe.widen.finalResultType, fn, pats, pat.srcPos).argTypes pats.corresponds(argPts)(recur) } case Alternative(pats) => @@ -1058,8 +1068,8 @@ trait Checking { * check that class prefix is stable. * @return `tp` itself if it is a class or trait ref, ObjectType if not. */ - def checkClassType(tp: Type, pos: SrcPos, traitReq: Boolean, stablePrefixReq: Boolean)(using Context): Type = - tp.underlyingClassRef(refinementOK = false) match { + def checkClassType(tp: Type, pos: SrcPos, traitReq: Boolean, stablePrefixReq: Boolean, refinementOK: Boolean = false)(using Context): Type = + tp.underlyingClassRef(refinementOK) match case tref: TypeRef => if (traitReq && !tref.symbol.is(Trait)) report.error(TraitIsExpected(tref.symbol), pos) if (stablePrefixReq && ctx.phase <= refchecksPhase) checkStable(tref.prefix, pos, "class prefix") @@ -1067,7 +1077,6 @@ trait Checking { case _ => report.error(NotClassType(tp), pos) defn.ObjectType - } /** If `sym` is an old-style implicit conversion, check that implicit conversions are enabled. * @pre sym.is(GivenOrImplicit) @@ -1323,20 +1332,20 @@ trait Checking { } /** Check that user-defined (result) type is fully applied */ - def checkFullyAppliedType(tree: Tree)(using Context): Unit = tree match + def checkFullyAppliedType(tree: Tree, prefix: String)(using Context): Unit = tree match case TypeBoundsTree(lo, hi, alias) => - checkFullyAppliedType(lo) - checkFullyAppliedType(hi) - checkFullyAppliedType(alias) + checkFullyAppliedType(lo, prefix) + checkFullyAppliedType(hi, prefix) + checkFullyAppliedType(alias, prefix) case Annotated(arg, annot) => - checkFullyAppliedType(arg) + checkFullyAppliedType(arg, prefix) case LambdaTypeTree(_, body) => - checkFullyAppliedType(body) + checkFullyAppliedType(body, prefix) case _: TypeTree => case _ => if tree.tpe.typeParams.nonEmpty then val what = if tree.symbol.exists then tree.symbol.show else i"type $tree" - report.error(em"$what takes type parameters", tree.srcPos) + report.error(em"$prefix$what takes type parameters", tree.srcPos) /** Check that we are in an inline context (inside an inline method or in inline code) */ def checkInInlineContext(what: String, pos: SrcPos)(using Context): Unit = @@ -1508,7 +1517,7 @@ trait Checking { val annotCls = Annotations.annotClass(annot) val concreteAnnot = Annotations.ConcreteAnnotation(annot) val pos = annot.srcPos - if (annotCls == defn.MainAnnot || concreteAnnot.matches(defn.MainAnnotationClass)) { + if (annotCls == defn.MainAnnot) { if (!sym.isRealMethod) report.error(em"main annotation cannot be applied to $sym", pos) if (!sym.owner.is(Module) || !sym.owner.isStatic) @@ -1601,7 +1610,7 @@ trait ReChecking extends Checking { override def checkEnumParent(cls: Symbol, firstParent: Symbol)(using Context): Unit = () override def checkEnum(cdef: untpd.TypeDef, cls: Symbol, firstParent: Symbol)(using Context): Unit = () override def checkRefsLegal(tree: tpd.Tree, badOwner: Symbol, allowed: (Name, Symbol) => Boolean, where: String)(using Context): Unit = () - override def checkFullyAppliedType(tree: Tree)(using Context): Unit = () + override def checkFullyAppliedType(tree: Tree, prefix: String)(using Context): Unit = () override def checkEnumCaseRefsLegal(cdef: TypeDef, enumCtx: Context)(using Context): Unit = () override def checkAnnotApplicable(annot: Tree, sym: Symbol)(using Context): Boolean = true override def checkMatchable(tp: Type, pos: SrcPos, pattern: Boolean)(using Context): Unit = () @@ -1617,7 +1626,7 @@ trait NoChecking extends ReChecking { override def checkNonCyclic(sym: Symbol, info: TypeBounds, reportErrors: Boolean)(using Context): Type = info override def checkNonCyclicInherited(joint: Type, parents: List[Type], decls: Scope, pos: SrcPos)(using Context): Unit = () override def checkStable(tp: Type, pos: SrcPos, kind: String)(using Context): Unit = () - override def checkClassType(tp: Type, pos: SrcPos, traitReq: Boolean, stablePrefixReq: Boolean)(using Context): Type = tp + override def checkClassType(tp: Type, pos: SrcPos, traitReq: Boolean, stablePrefixReq: Boolean, refinementOK: Boolean)(using Context): Type = tp override def checkImplicitConversionDefOK(sym: Symbol)(using Context): Unit = () override def checkImplicitConversionUseOK(tree: Tree, expected: Type)(using Context): Unit = () override def checkFeasibleParent(tp: Type, pos: SrcPos, where: => String = "")(using Context): Type = tp diff --git a/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala b/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala index 1e0907ee74a6..5ce1b02733d0 100644 --- a/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala @@ -3,6 +3,7 @@ package dotc package transform import core.* +import Annotations.Annotation import Symbols.*, Types.*, Contexts.*, Flags.*, Decorators.*, reporting.* import util.SrcPos import config.{ScalaVersion, NoScalaVersion, Feature, ScalaRelease} @@ -131,12 +132,14 @@ class CrossVersionChecks extends MiniPhase: } override def transformOther(tree: Tree)(using Context): Tree = - tree.foreachSubTree { // Find references in type trees and imports - case tree: Ident => transformIdent(tree) - case tree: Select => transformSelect(tree) - case tree: TypeTree => transformTypeTree(tree) - case _ => - } + val inPackage = ctx.owner.is(Package) || ctx.owner.isPackageObject + if !(inPackage && tree.isInstanceOf[ImportOrExport] && Feature.isExperimentalEnabledByImport) then + tree.foreachSubTree { // Find references in type trees and imports + case tree: Ident => transformIdent(tree) + case tree: Select => transformSelect(tree) + case tree: TypeTree => transformTypeTree(tree) + case _ => + } tree end CrossVersionChecks @@ -161,29 +164,42 @@ object CrossVersionChecks: /** If @deprecated is present, and the point of reference is not enclosed * in either a deprecated member or a scala bridge method, issue a warning. + * + * Also check for deprecation of the companion class for synthetic methods in the companion module. */ private[CrossVersionChecks] def checkDeprecatedRef(sym: Symbol, pos: SrcPos)(using Context): Unit = - - // Also check for deprecation of the companion class for synthetic methods - val toCheck = sym :: (if sym.isAllOf(SyntheticMethod) then sym.owner.companionClass :: Nil else Nil) - for sym <- toCheck; annot <- sym.getAnnotation(defn.DeprecatedAnnot) do - if !skipWarning(sym) then - val msg = annot.argumentConstant(0).map(": " + _.stringValue).getOrElse("") - val since = annot.argumentConstant(1).map(" since " + _.stringValue).getOrElse("") - report.deprecationWarning(em"${sym.showLocated} is deprecated${since}${msg}", pos) - - /** Skip warnings for synthetic members of case classes during declaration and - * scan the chain of outer declaring scopes from the current context - * a deprecation warning will be skipped if one the following holds - * for a given declaring scope: - * - the symbol associated with the scope is also deprecated. - * - if and only if `sym` is an enum case, the scope is either - * a module that declares `sym`, or the companion class of the - * module that declares `sym`. + def maybeWarn(annotee: Symbol, annot: Annotation) = if !skipWarning(sym) then + val message = annot.argumentConstantString(0).filter(!_.isEmpty).map(": " + _).getOrElse("") + val since = annot.argumentConstantString(1).filter(!_.isEmpty).map(" since " + _).getOrElse("") + report.deprecationWarning(em"${annotee.showLocated} is deprecated${since}${message}", pos) + sym.getAnnotation(defn.DeprecatedAnnot) match + case Some(annot) => maybeWarn(sym, annot) + case _ => + if sym.isAllOf(SyntheticMethod) then + val companion = sym.owner.companionClass + if companion.is(CaseClass) then companion.getAnnotation(defn.DeprecatedAnnot).foreach(maybeWarn(companion, _)) + + /** Decide whether the deprecation of `sym` should be ignored in this context. + * + * The warning is skipped if any symbol in the context owner chain is deprecated, + * that is, an enclosing scope is associated with a deprecated symbol. + * + * Further exclusions are needed for enums and case classes, + * since they typically need to refer to deprecated members + * even if the enclosing enum or case class is not deprecated. + * + * If and only if `sym` is an enum case, the warning is skipped + * if an enclosing scope is either a module that declares `sym`, + * or the companion class of the module that declares `sym`. + * + * For a deprecated case class or case class element, + * the warning is skipped for synthetic sites where the enclosing + * class (or its companion) is either the deprecated case class + * or the case class of the deprecated element. */ private def skipWarning(sym: Symbol)(using Context): Boolean = - /** is the owner an enum or its companion and also the owner of sym */ + // is the owner an enum or its companion and also the owner of sym def isEnumOwner(owner: Symbol)(using Context) = // pre: sym is an enumcase if owner.isEnumClass then owner.companionClass eq sym.owner @@ -194,6 +210,19 @@ object CrossVersionChecks: // pre: sym is an enumcase owner.isDeprecated || isEnumOwner(owner) - (ctx.owner.is(Synthetic) && sym.is(CaseClass)) - || ctx.owner.ownersIterator.exists(if sym.isEnumCase then isDeprecatedOrEnum else _.isDeprecated) + def siteIsEnclosedByDeprecatedElement = + ctx.owner.ownersIterator.exists: + if sym.isEnumCase then isDeprecatedOrEnum else _.isDeprecated + + def siteIsSyntheticCaseClassMember = + val owner = ctx.owner + def symIsCaseOrMember = + val enclosing = owner.enclosingClass + val companion = enclosing.companionClass + // deprecated sym is either enclosing case class or a sibling member + def checkSym(k: Symbol) = sym == k || sym.owner == k + (enclosing.is(CaseClass) || companion.is(CaseClass)) && (checkSym(enclosing) || checkSym(companion)) + owner.is(Synthetic) && symIsCaseOrMember + + siteIsSyntheticCaseClassMember || siteIsEnclosedByDeprecatedElement end skipWarning diff --git a/compiler/src/dotty/tools/dotc/typer/Deriving.scala b/compiler/src/dotty/tools/dotc/typer/Deriving.scala index f3be1dcff766..619dfcf4d7cb 100644 --- a/compiler/src/dotty/tools/dotc/typer/Deriving.scala +++ b/compiler/src/dotty/tools/dotc/typer/Deriving.scala @@ -31,7 +31,7 @@ trait Deriving { /** A version of Type#underlyingClassRef that works also for higher-kinded types */ private def underlyingClassRef(tp: Type): Type = tp match { case tp: TypeRef if tp.symbol.isClass => tp - case tp: TypeRef if tp.symbol.isAbstractType => NoType + case tp: TypeRef if tp.symbol.isAbstractOrParamType => NoType case tp: TermRef => NoType case tp: TypeProxy => underlyingClassRef(tp.superType) case _ => NoType diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 5162b3fed1b9..5ca5ac5bb59d 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -411,14 +411,20 @@ object Implicits: /** Search mode to use for possibly avoiding looping givens */ enum SearchMode: case Old, // up to 3.3, old mode w/o protection - CompareWarn, // from 3.4, old mode, warn if new mode would change result - CompareErr, // from 3.5, old mode, error if new mode would change result - New // from future, new mode where looping givens are avoided + CompareWarn, // from 3.4, old mode, warn if new mode would change result + CompareErr, // from 3.5, old mode, error if new mode would change result + New // from future, new mode where looping givens are avoided /** The result of an implicit search */ sealed abstract class SearchResult extends Showable { def tree: Tree def toText(printer: Printer): Text = printer.toText(this) + + /** The references that were found, there can be two of them in the case + * of an AmbiguousImplicits failure + */ + def found: List[TermRef] + def recoverWith(other: SearchFailure => SearchResult): SearchResult = this match { case _: SearchSuccess => this case fail: SearchFailure => other(fail) @@ -434,13 +440,17 @@ object Implicits: * @param tstate The typer state to be committed if this alternative is chosen */ case class SearchSuccess(tree: Tree, ref: TermRef, level: Int, isExtension: Boolean = false)(val tstate: TyperState, val gstate: GadtConstraint) - extends SearchResult with RefAndLevel with Showable + extends SearchResult with RefAndLevel with Showable: + final def found = ref :: Nil /** A failed search */ case class SearchFailure(tree: Tree) extends SearchResult { require(tree.tpe.isInstanceOf[SearchFailureType], s"unexpected type for ${tree}") final def isAmbiguous: Boolean = tree.tpe.isInstanceOf[AmbiguousImplicits | TooUnspecific] final def reason: SearchFailureType = tree.tpe.asInstanceOf[SearchFailureType] + final def found = tree.tpe match + case tpe: AmbiguousImplicits => tpe.alt1.ref :: tpe.alt2.ref :: Nil + case _ => Nil } object SearchFailure { @@ -531,13 +541,18 @@ object Implicits: |must be more specific than $target""" :: Nil override def msg(using Context) = - super.msg.append("\nThe expected type $target is not specific enough, so no search was attempted") + super.msg.append(i"\nThe expected type $target is not specific enough, so no search was attempted") override def toString = s"TooUnspecific" end TooUnspecific /** An ambiguous implicits failure */ - class AmbiguousImplicits(val alt1: SearchSuccess, val alt2: SearchSuccess, val expectedType: Type, val argument: Tree) extends SearchFailureType: + class AmbiguousImplicits(val alt1: SearchSuccess, val alt2: SearchSuccess, val expectedType: Type, val argument: Tree, val nested: Boolean = false) extends SearchFailureType: + + private[Implicits] var priorityChangeWarnings: List[Message] = Nil + + def priorityChangeWarningNote(using Context): String = + priorityChangeWarnings.map(msg => s"\n\nNote: $msg").mkString def msg(using Context): Message = var str1 = err.refStr(alt1.ref) @@ -621,8 +636,8 @@ trait ImplicitRunInfo: private def isAnchor(sym: Symbol) = sym.isClass && !isExcluded(sym) || sym.isOpaqueAlias - || sym.is(Deferred, butNot = Param) - || sym.info.isInstanceOf[MatchAlias] + || sym.is(Deferred) + || sym.info.isMatchAlias private def computeIScope(rootTp: Type): OfTypeImplicits = @@ -663,7 +678,6 @@ trait ImplicitRunInfo: traverseChildren(t) case t => traverseChildren(t) - traverse(t.normalized) catch case ex: Throwable => handleRecursive("collectParts of", t.show, ex) def apply(tp: Type): collection.Set[Type] = @@ -775,6 +789,7 @@ trait ImplicitRunInfo: * if `T` is of the form `(P#x).type`, the anchors of `P`. * - If `T` is the this-type of a static object, the anchors of a term reference to that object. * - If `T` is some other this-type `P.this.type`, the anchors of `P`. + * - If `T` is match type or an applied match alias, the anchors of the normalization of `T`. * - If `T` is some other type, the union of the anchors of each constituent type of `T`. * * The _implicit scope_ of a type `tp` is the smallest set S of term references (i.e. TermRefs) @@ -787,7 +802,7 @@ trait ImplicitRunInfo: * - If `T` is a reference to an opaque type alias named `A`, S includes * a reference to an object `A` defined in the same scope as the type, if it exists, * as well as the implicit scope of `T`'s underlying type or bounds. - * - If `T` is a reference to an an abstract type or match type alias named `A`, + * - If `T` is a reference to an an abstract type or unreducible match type alias named `A`, * S includes a reference to an object `A` defined in the same scope as the type, * if it exists, as well as the implicit scopes of `T`'s lower and upper bound, * if present. @@ -817,7 +832,7 @@ trait ImplicitRunInfo: else AndType.make(apply(lo), apply(hi)) case u => apply(u) - def apply(t: Type) = t.dealias match + def apply(t: Type) = t.dealias.normalized match case t: TypeRef => if t.symbol.isClass || isAnchor(t.symbol) then t else applyToUnderlying(t) case t: TypeVar => apply(t.underlying) @@ -857,6 +872,8 @@ trait Implicits: || inferView(dummyTreeOfType(from), to) (using ctx.fresh.addMode(Mode.ImplicitExploration).setExploreTyperState()).isSuccess // TODO: investigate why we can't TyperState#test here + || from.widen.isNamedTupleType && to.derivesFrom(defn.TupleClass) + && from.widen.stripNamedTuple <:< to ) /** Find an implicit conversion to apply to given tree `from` so that the @@ -922,10 +939,10 @@ trait Implicits: /** Search an implicit argument and report error if not found */ - def implicitArgTree(formal: Type, span: Span)(using Context): Tree = { + def implicitArgTree(formal: Type, span: Span, where: => String = "")(using Context): Tree = { val arg = inferImplicitArg(formal, span) if (arg.tpe.isInstanceOf[SearchFailureType]) - report.error(missingArgMsg(arg, formal, ""), ctx.source.atSpan(span)) + report.error(missingArgMsg(arg, formal, where), ctx.source.atSpan(span)) arg } @@ -1065,7 +1082,7 @@ trait Implicits: trace(s"search implicit ${pt.show}, arg = ${argument.show}: ${argument.tpe.show}", implicits, show = true) { record("inferImplicit") assert(ctx.phase.allowsImplicitSearch, - if (argument.isEmpty) i"missing implicit parameter of type $pt after typer at phase ${ctx.phase.phaseName}" + if (argument.isEmpty) i"missing implicit parameter of type $pt after typer at phase ${ctx.phase}" else i"type error: ${argument.tpe} does not conform to $pt${err.whyNoMatchStr(argument.tpe, pt)}") val usableForInference = pt.exists && !pt.unusableForInference @@ -1084,10 +1101,15 @@ trait Implicits: (searchCtx.scope eq ctx.scope) && (searchCtx.owner eq ctx.owner.owner) do () - try ImplicitSearch(pt, argument, span)(using searchCtx).bestImplicit - catch case ce: CyclicReference => - ce.inImplicitSearch = true - throw ce + def searchStr = + if argument.isEmpty then i"argument of type $pt" + else i"conversion from ${argument.tpe} to $pt" + + CyclicReference.trace(i"searching for an implicit $searchStr"): + try ImplicitSearch(pt, argument, span)(using searchCtx).bestImplicit + catch case ce: CyclicReference => + ce.inImplicitSearch = true + throw ce else NoMatchingImplicitsFailure val result = @@ -1105,8 +1127,8 @@ trait Implicits: case result: SearchFailure if result.isAmbiguous => val deepPt = pt.deepenProto if (deepPt ne pt) inferImplicit(deepPt, argument, span) - else if (migrateTo3 && !ctx.mode.is(Mode.OldOverloadingResolution)) - withMode(Mode.OldOverloadingResolution)(inferImplicit(pt, argument, span)) match { + else if (migrateTo3 && !ctx.mode.is(Mode.OldImplicitResolution)) + withMode(Mode.OldImplicitResolution)(inferImplicit(pt, argument, span)) match { case altResult: SearchSuccess => report.migrationWarning( result.reason.msg @@ -1221,7 +1243,7 @@ trait Implicits: assert(argument.isEmpty || argument.tpe.isValueType || argument.tpe.isInstanceOf[ExprType], em"found: $argument: ${argument.tpe}, expected: $pt") - private def nestedContext() = + private def searchContext() = ctx.fresh.setMode(ctx.mode &~ Mode.ImplicitsEnabled) private def isCoherent = pt.isRef(defn.CanEqualClass) @@ -1265,7 +1287,7 @@ trait Implicits: else val history = ctx.searchHistory.nest(cand, pt) val typingCtx = - nestedContext().setNewTyperState().setFreshGADTBounds.setSearchHistory(history) + searchContext().setNewTyperState().setFreshGADTBounds.setSearchHistory(history) val result = typedImplicit(cand, pt, argument, span)(using typingCtx) result match case res: SearchSuccess => @@ -1283,16 +1305,78 @@ trait Implicits: /** Search a list of eligible implicit references */ private def searchImplicit(eligible: List[Candidate], contextual: Boolean): SearchResult = + // A map that associates a priority change warning (between -source 3.6 and 3.7) + // with the candidate refs mentioned in the warning. We report the associated + // message if one of the critical candidates is part of the result of the implicit search. + val priorityChangeWarnings = mutable.ListBuffer[(/*critical:*/ List[TermRef], Message)]() + + val sv = Feature.sourceVersion + val isLastOldVersion = sv.stable == SourceVersion.`3.6` + val isWarnPriorityChangeVersion = isLastOldVersion || sv == SourceVersion.`3.7-migration` + /** Compare `alt1` with `alt2` to determine which one should be chosen. * * @return a number > 0 if `alt1` is preferred over `alt2` * a number < 0 if `alt2` is preferred over `alt1` * 0 if neither alternative is preferred over the other + * The behavior depends on the source version + * before 3.6: compare with preferGeneral = false + * 3.6: compare twice with preferGeneral = false and true, warning if result is different, + * return old result with preferGeneral = false + * 3.7-migration: compare twice with preferGeneral = false and true, warning if result is different, + * return new result with preferGeneral = true + * 3.7 and higher: compare with preferGeneral = true + * + * @param disambiguate The call is used to disambiguate two successes, not for ranking. + * When ranking, we are always filtering out either > 0 or <= 0 results. + * In each case a priority change from 0 to -1 or vice versa makes no difference. */ - def compareAlternatives(alt1: RefAndLevel, alt2: RefAndLevel): Int = + def compareAlternatives(alt1: RefAndLevel, alt2: RefAndLevel, disambiguate: Boolean = false): Int = + def comp(using Context) = explore(compare(alt1.ref, alt2.ref, preferGeneral = true)) if alt1.ref eq alt2.ref then 0 else if alt1.level != alt2.level then alt1.level - alt2.level - else explore(compare(alt1.ref, alt2.ref))(using nestedContext()) + else + val cmp = comp(using searchContext()) + if isWarnPriorityChangeVersion then + val prev = comp(using searchContext().addMode(Mode.OldImplicitResolution)) + if disambiguate && cmp != prev then + implicits.println(i"PRIORITY CHANGE ${alt1.ref}, ${alt2.ref}") + val (loser, winner) = + prev match + case 1 => (alt1, alt2) + case -1 => (alt2, alt1) + case 0 => + cmp match + case 1 => (alt2, alt1) + case -1 => (alt1, alt2) + def choice(nth: String, c: Int) = + if c == 0 then "none - it's ambiguous" + else s"the $nth alternative" + val (change, whichChoice) = + if isLastOldVersion + then ("will change", "Current choice ") + else ("has changed", "Previous choice") + val msg = + em"""Given search preference for $pt between alternatives + | ${loser.ref} + |and + | ${winner.ref} + |$change. + |$whichChoice : ${choice("first", prev)} + |New choice from Scala 3.7: ${choice("second", cmp)}""" + val critical = alt1.ref :: alt2.ref :: Nil + priorityChangeWarnings += ((critical, msg)) + if isLastOldVersion then prev else cmp + else cmp max prev + // When ranking, alt1 is always the new candidate and alt2 is the + // solution found previously. We keep the candidate if the outcome is 0 + // (ambiguous) or 1 (first wins). Or, when ranking in healImplicit we keep the + // candidate only if the outcome is 1. In both cases, keeping the better + // of `cmp` and `prev` means we keep candidates that could match + // in either scheme. This means that subsequent disambiguation + // comparisons will record a warning if cmp != prev. + else cmp + end compareAlternatives /** If `alt1` is also a search success, try to disambiguate as follows: * - If alt2 is preferred over alt1, pick alt2, otherwise return an @@ -1300,9 +1384,11 @@ trait Implicits: */ def disambiguate(alt1: SearchResult, alt2: SearchSuccess) = alt1 match case alt1: SearchSuccess => - var diff = compareAlternatives(alt1, alt2) - assert(diff <= 0) // diff > 0 candidates should already have been eliminated in `rank` - if diff == 0 && alt2.isExtension then + var diff = compareAlternatives(alt1, alt2, disambiguate = true) + // diff > 0 candidates should already have been eliminated in `rank` + if diff == 0 && alt1.ref =:= alt2.ref then + diff = 1 // See i12951 for a test where this happens + else if diff == 0 && alt2.isExtension then if alt1.isExtension then // Fall back: if both results are extension method applications, // compare the extension methods instead of their wrappers. @@ -1328,8 +1414,8 @@ trait Implicits: else ctx.typerState - diff = inContext(ctx.withTyperState(comparisonState)): - compare(ref1, ref2) + diff = inContext(searchContext().withTyperState(comparisonState)): + compare(ref1, ref2, preferGeneral = true) else // alt1 is a conversion, prefer extension alt2 over it diff = -1 if diff < 0 then alt2 @@ -1359,12 +1445,27 @@ trait Implicits: pending match { case cand :: remaining => /** To recover from an ambiguous implicit failure, we need to find a pending - * candidate that is strictly better than the failed candidate(s). + * candidate that is strictly better than the failed `ambiguous` candidate(s). * If no such candidate is found, we propagate the ambiguity. */ - def healAmbiguous(fail: SearchFailure, betterThanFailed: Candidate => Boolean) = - val newPending = remaining.filter(betterThanFailed) - rank(newPending, fail, Nil).recoverWith(_ => fail) + def healAmbiguous(fail: SearchFailure, ambiguous: List[RefAndLevel]) = + def betterThanAmbiguous(newCand: RefAndLevel, disambiguate: Boolean): Boolean = + ambiguous.forall(compareAlternatives(newCand, _, disambiguate) > 0) + + inline def betterByCurrentScheme(newCand: RefAndLevel): Boolean = + if isWarnPriorityChangeVersion then + // newCand may have only been kept in pending because it was better in the other priotization scheme. + // If that candidate produces a SearchSuccess, disambiguate will return it as the found SearchResult. + // We must now recheck it was really better than the ambigous candidates we are recovering from, + // under the rules of the current scheme, which are applied when disambiguate = true. + betterThanAmbiguous(newCand, disambiguate = true) + else true + + val newPending = remaining.filter(betterThanAmbiguous(_, disambiguate = false)) + rank(newPending, fail, Nil) match + case found: SearchSuccess if betterByCurrentScheme(found) => found + case _ => fail + end healAmbiguous negateIfNot(tryImplicit(cand, contextual)) match { case fail: SearchFailure => @@ -1379,9 +1480,12 @@ trait Implicits: else // The ambiguity happened in a nested search: to recover we // need a candidate better than `cand` - healAmbiguous(fail, newCand => - compareAlternatives(newCand, cand) > 0) - else rank(remaining, found, fail :: rfailures) + healAmbiguous(fail, cand :: Nil) + else + // keep only warnings that don't involve the failed candidate reference + priorityChangeWarnings.filterInPlace: (critical, _) => + !critical.contains(cand.ref) + rank(remaining, found, fail :: rfailures) case best: SearchSuccess => if (ctx.mode.is(Mode.ImplicitExploration) || isCoherent) best @@ -1389,16 +1493,13 @@ trait Implicits: case retained: SearchSuccess => val newPending = if (retained eq found) || remaining.isEmpty then remaining - else remaining.filterConserve(cand => - compareAlternatives(retained, cand) <= 0) + else remaining.filterConserve(newCand => compareAlternatives(newCand, retained) >= 0) rank(newPending, retained, rfailures) case fail: SearchFailure => // The ambiguity happened in the current search: to recover we // need a candidate better than the two ambiguous alternatives. val ambi = fail.reason.asInstanceOf[AmbiguousImplicits] - healAmbiguous(fail, newCand => - compareAlternatives(newCand, ambi.alt1) > 0 && - compareAlternatives(newCand, ambi.alt2) > 0) + healAmbiguous(fail, ambi.alt1 :: ambi.alt2 :: Nil) } } case nil => @@ -1536,7 +1637,25 @@ trait Implicits: validateOrdering(ord) throw ex - rank(sort(eligible), NoMatchingImplicitsFailure, Nil) + val res = rank(sort(eligible), NoMatchingImplicitsFailure, Nil) + + // Issue all priority change warnings that can affect the result + val shownWarnings = priorityChangeWarnings.toList.collect: + case (critical, msg) if res.found.exists(critical.contains(_)) => + msg + res match + case res: SearchFailure => + res.reason match + case ambi: AmbiguousImplicits => + // Make warnings part of error message because otherwise they are suppressed when + // the error is emitted. + ambi.priorityChangeWarnings = shownWarnings + case _ => + case _ => + for msg <- shownWarnings do + report.warning(msg, srcPos) + + res end searchImplicit def isUnderSpecifiedArgument(tp: Type): Boolean = diff --git a/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala b/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala index 33643a0fae2f..5ab6a4a5fae6 100644 --- a/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala +++ b/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala @@ -296,7 +296,7 @@ trait ImportSuggestions: var i = 0 var diff = 0 while i < filled && diff == 0 do - diff = compare(ref, top(i))(using noImplicitsCtx) + diff = compare(ref, top(i), preferGeneral = true)(using noImplicitsCtx) if diff > 0 then rest += top(i) top(i) = ref diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 24721f1cd758..83964417a6f1 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -26,7 +26,7 @@ import Nullables.* import transform.ValueClasses.* import TypeErasure.erasure import reporting.* -import config.Feature.sourceVersion +import config.Feature.{sourceVersion, modularity} import config.SourceVersion.* import scala.compiletime.uninitialized @@ -55,11 +55,12 @@ class Namer { typer: Typer => import untpd.* - val TypedAhead : Property.Key[tpd.Tree] = new Property.Key - val ExpandedTree : Property.Key[untpd.Tree] = new Property.Key - val ExportForwarders: Property.Key[List[tpd.MemberDef]] = new Property.Key - val SymOfTree : Property.Key[Symbol] = new Property.Key - val AttachedDeriver : Property.Key[Deriver] = new Property.Key + val TypedAhead : Property.Key[tpd.Tree] = new Property.Key + val ExpandedTree : Property.Key[untpd.Tree] = new Property.Key + val ExportForwarders : Property.Key[List[tpd.MemberDef]] = new Property.Key + val ParentRefinements: Property.Key[List[Symbol]] = new Property.Key + val SymOfTree : Property.Key[Symbol] = new Property.Key + val AttachedDeriver : Property.Key[Deriver] = new Property.Key // was `val Deriver`, but that gave shadowing problems with constructor proxies /** A partial map from unexpanded member and pattern defs and to their expansions. @@ -121,7 +122,8 @@ class Namer { typer: Typer => /** Record `sym` as the symbol defined by `tree` */ def recordSym(sym: Symbol, tree: Tree)(using Context): Symbol = { - for (refs <- tree.removeAttachment(References); ref <- refs) ref.watching(sym) + for refs <- tree.removeAttachment(References); ref <- refs do + ref.watching(sym) tree.pushAttachment(SymOfTree, sym) sym } @@ -268,13 +270,13 @@ class Namer { typer: Typer => else if flags.isAllOf(EnumValue) && ctx.owner.isStaticOwner then flags |= JavaStatic case tree: TypeDef => def analyzeRHS(rhs: Tree): Unit = rhs match - case _: TypeBoundsTree | _: MatchTypeTree => - flags |= Deferred // Typedefs with Match rhs classify as abstract + case _: TypeBoundsTree => + flags |= Deferred case LambdaTypeTree(_, body) => analyzeRHS(body) case _ => if rhs.isEmpty || flags.is(Opaque) then flags |= Deferred - analyzeRHS(tree.rhs) + if flags.is(Param) then tree.rhs else analyzeRHS(tree.rhs) // to complete a constructor, move one context further out -- this // is the context enclosing the class. Note that the context in which a @@ -294,12 +296,15 @@ class Namer { typer: Typer => createOrRefine[Symbol](tree, name, flags, ctx.owner, _ => info, (fs, _, pwithin) => newSymbol(ctx.owner, name, fs, info, pwithin, tree.nameSpan)) case tree: Import => - recordSym(newImportSymbol(ctx.owner, Completer(tree)(ctx), tree.span), tree) + recordSym(importSymbol(tree), tree) case _ => NoSymbol } } + private def importSymbol(imp: Import)(using Context): Symbol = + newImportSymbol(ctx.owner, Completer(imp)(ctx), imp.span) + /** If `sym` exists, enter it in effective scope. Check that * package members are not entered twice in the same run. */ @@ -401,6 +406,11 @@ class Namer { typer: Typer => enterSymbol(sym) setDocstring(sym, origStat) addEnumConstants(mdef, sym) + mdef match + case tdef: TypeDef if ctx.owner.isClass => + for case WitnessNamesAnnot(witnessNames) <- tdef.mods.annotations do + addContextBoundCompanionFor(symbolOfTree(tdef), witnessNames, Nil) + case _ => ctx case stats: Thicket => stats.toList.foreach(recur) @@ -524,11 +534,9 @@ class Namer { typer: Typer => } /** Transfer all references to `from` to `to` */ - def transferReferences(from: ValDef, to: ValDef): Unit = { - val fromRefs = from.removeAttachment(References).getOrElse(Nil) - val toRefs = to.removeAttachment(References).getOrElse(Nil) - to.putAttachment(References, fromRefs ++ toRefs) - } + def transferReferences(from: ValDef, to: ValDef): Unit = + for ref <- from.removeAttachment(References).getOrElse(Nil) do + ref.watching(to) /** Merge the module class `modCls` in the expanded tree of `mdef` with the * body and derived clause of the synthetic module class `fromCls`. @@ -706,7 +714,18 @@ class Namer { typer: Typer => enterSymbol(companion) end addAbsentCompanions - stats.foreach(expand) + /** Expand each statement, keeping track of language imports in the context. This is + * necessary since desugaring might depend on language imports. + */ + def expandTopLevel(stats: List[Tree])(using Context): Unit = stats match + case (imp @ Import(qual, _)) :: stats1 if untpd.languageImport(qual).isDefined => + expandTopLevel(stats1)(using ctx.importContext(imp, importSymbol(imp))) + case stat :: stats1 => + expand(stat) + expandTopLevel(stats1) + case Nil => + + expandTopLevel(stats) mergeCompanionDefs() val ctxWithStats = stats.foldLeft(ctx)((ctx, stat) => indexExpanded(stat)(using ctx)) inContext(ctxWithStats) { @@ -1203,7 +1222,9 @@ class Namer { typer: Typer => target = target.etaExpand newSymbol( cls, forwarderName, - MandatoryExportTypeFlags | (sym.flags & RetainedExportTypeFlags), + Exported + | (sym.flags & RetainedExportTypeFlags) + | (if Feature.enabled(modularity) then EmptyFlags else Final), TypeAlias(target), coord = span) // Note: This will always create unparameterzied aliases. So even if the original type is @@ -1255,12 +1276,13 @@ class Namer { typer: Typer => newSymbol(cls, forwarderName, mbrFlags, mbrInfo, coord = span) forwarder.info = avoidPrivateLeaks(forwarder) + + // Add annotations at the member level forwarder.addAnnotations(sym.annotations.filterConserve { annot => annot.symbol != defn.BodyAnnot && annot.symbol != defn.TailrecAnnot && annot.symbol != defn.MainAnnot && !annot.symbol.derivesFrom(defn.MacroAnnotationClass) - && !annot.symbol.derivesFrom(defn.MainAnnotationClass) }) if forwarder.isType then @@ -1291,6 +1313,17 @@ class Namer { typer: Typer => foreachDefaultGetterOf(sym.asTerm, getter => addForwarder( getter.name.asTermName, getter.asSeenFrom(path.tpe), span)) + + // adding annotations and flags at the parameter level + // TODO: This probably needs to be filtered to avoid adding some annotation + // such as MacroAnnotations + if sym.is(Method) then + for (orig, forwarded) <- sym.paramSymss.lazyZip(forwarder.paramSymss) + (origParameter, exportedParameter) <- orig.lazyZip(forwarded) + do + exportedParameter.addAnnotations(origParameter.annotations) + if exportedParameter.isTerm then + exportedParameter.setFlag(origParameter.flags & RetainedExportTermParamFlags) end addForwarder def addForwardersNamed(name: TermName, alias: TermName, span: Span): Unit = @@ -1430,7 +1463,8 @@ class Namer { typer: Typer => def process(stats: List[Tree])(using Context): Unit = stats match case (stat: Export) :: stats1 => - processExport(stat, NoSymbol) + CyclicReference.trace(i"elaborate the export clause $stat"): + processExport(stat, NoSymbol) process(stats1) case (stat: Import) :: stats1 => process(stats1)(using ctx.importContext(stat, symbolOfTree(stat))) @@ -1500,6 +1534,7 @@ class Namer { typer: Typer => /** The type signature of a ClassDef with given symbol */ override def completeInCreationContext(denot: SymDenotation): Unit = { val parents = impl.parents + val parentRefinements = new mutable.LinkedHashMap[Name, Type] /* The type of a parent constructor. Types constructor arguments * only if parent type contains uninstantiated type parameters. @@ -1513,8 +1548,9 @@ class Namer { typer: Typer => core match case Select(New(tpt), nme.CONSTRUCTOR) => val targs1 = targs map (typedAheadType(_)) - val ptype = typedAheadType(tpt).tpe appliedTo targs1.tpes - if (ptype.typeParams.isEmpty) ptype + val ptype = typedAheadType(tpt).tpe.appliedTo(targs1.tpes) + if ptype.typeParams.isEmpty && !ptype.dealias.typeSymbol.is(Dependent) then + ptype else if (denot.is(ModuleClass) && denot.sourceModule.isOneOf(GivenOrImplicit)) missingType(denot.symbol, "parent ")(using creationContext) @@ -1554,8 +1590,13 @@ class Namer { typer: Typer => val ptype = parentType(parent)(using completerCtx.superCallContext).dealiasKeepAnnots if (cls.isRefinementClass) ptype else { - val pt = checkClassType(ptype, parent.srcPos, - traitReq = parent ne parents.head, stablePrefixReq = !isJava) + val pt = checkClassType( + if Feature.enabled(modularity) + then ptype.separateRefinements(cls, parentRefinements) + else ptype, + parent.srcPos, + traitReq = parent ne parents.head, + stablePrefixReq = !isJava) if (pt.derivesFrom(cls)) { val addendum = parent match { case Select(qual: Super, _) if Feature.migrateTo3 => @@ -1582,6 +1623,23 @@ class Namer { typer: Typer => } } + /** Enter all parent refinements as public class members, unless a definition + * with the same name already exists in the class. Remember the refining symbols + * as an attachment on the ClassDef tree. + */ + def enterParentRefinementSyms(refinements: List[(Name, Type)]) = + val refinedSyms = mutable.ListBuffer[Symbol]() + for (name, tp) <- refinements do + if decls.lookupEntry(name) == null then + val flags = tp match + case tp: MethodOrPoly => Method | Synthetic | Deferred | Tracked + case _ if name.isTermName => Synthetic | Deferred | Tracked + case _ => Synthetic | Deferred + refinedSyms += newSymbol(cls, name, flags, tp, coord = original.rhs.span.startPos).entered + if refinedSyms.nonEmpty then + typr.println(i"parent refinement symbols: ${refinedSyms.toList}") + original.pushAttachment(ParentRefinements, refinedSyms.toList) + /** If `parents` contains references to traits that have supertraits with implicit parameters * add those supertraits in linearization order unless they are already covered by other * parent types. For instance, in @@ -1623,11 +1681,9 @@ class Namer { typer: Typer => val parentTypes = defn.adjustForTuple(cls, cls.typeParams, defn.adjustForBoxedUnit(cls, - addUsingTraits( - locally: - val isJava = ctx.isJava - ensureFirstIsClass(cls, parents.map(checkedParentType(_, isJava))) - ) + addUsingTraits: + val isJava = ctx.isJava + ensureFirstIsClass(cls, parents.map(checkedParentType(_, isJava))) ) ) typr.println(i"completing $denot, parents = $parents%, %, parentTypes = $parentTypes%, %") @@ -1652,6 +1708,7 @@ class Namer { typer: Typer => cls.invalidateMemberCaches() // we might have checked for a member when parents were not known yet. cls.setNoInitsFlags(parentsKind(parents), untpd.bodyKind(rest)) cls.setStableConstructor() + enterParentRefinementSyms(parentRefinements.toList) processExports(using localCtx) defn.patchStdLibClass(cls) addConstructorProxies(cls) @@ -1668,7 +1725,7 @@ class Namer { typer: Typer => final override def complete(denot: SymDenotation)(using Context): Unit = denot.resetFlag(Touched) // allow one more completion - ctx.compilationUnit.suspend() + ctx.compilationUnit.suspend(i"reset $denot") } /** Typecheck `tree` during completion using `typed`, and remember result in TypedAhead map */ @@ -1698,12 +1755,6 @@ class Namer { typer: Typer => val sym = tree.symbol if sym.isConstructor then sym.owner else sym - /** Enter and typecheck parameter list */ - def completeParams(params: List[MemberDef])(using Context): Unit = { - index(params) - for (param <- params) typedAheadExpr(param) - } - /** The signature of a module valdef. * This will compute the corresponding module class TypeRef immediately * without going through the defined type of the ValDef. This is necessary @@ -1779,6 +1830,18 @@ class Namer { typer: Typer => case _ => WildcardType } + + // translate `given T = deferred` to an abstract given with HasDefault flag + if sym.is(Given) then + mdef.rhs match + case rhs: RefTree + if rhs.name == nme.deferred + && typedAheadExpr(rhs).symbol == defn.Compiletime_deferred + && sym.maybeOwner.is(Trait) => + sym.resetFlag(Final) + sym.setFlag(Deferred | HasDefault) + case _ => + val mbrTpe = paramFn(checkSimpleKinded(typedAheadType(mdef.tpt, tptProto)).tpe) if (ctx.explicitNulls && mdef.mods.is(JavaDefined)) JavaNullInterop.nullifyMember(sym, mbrTpe, mdef.mods.isAllOf(JavaEnumValue)) @@ -1786,10 +1849,35 @@ class Namer { typer: Typer => } /** The type signature of a DefDef with given symbol */ - def defDefSig(ddef: DefDef, sym: Symbol, completer: Namer#Completer)(using Context): Type = { + def defDefSig(ddef: DefDef, sym: Symbol, completer: Namer#Completer)(using Context): Type = // Beware: ddef.name need not match sym.name if sym was freshened! val isConstructor = sym.name == nme.CONSTRUCTOR + // A map from context-bounded type parameters to associated evidence parameter names + val witnessNamesOfParam = mutable.Map[TypeDef, List[TermName]]() + if !ddef.name.is(DefaultGetterName) && !sym.is(Synthetic) then + for params <- ddef.paramss; case tdef: TypeDef <- params do + for case WitnessNamesAnnot(ws) <- tdef.mods.annotations do + witnessNamesOfParam(tdef) = ws + + /** Is each name in `wnames` defined somewhere in the longest prefix of all `params` + * that have been typed ahead (i.e. that carry the TypedAhead attachment)? + */ + def allParamsSeen(wnames: List[TermName], params: List[MemberDef]) = + (wnames.toSet[Name] -- params.takeWhile(_.hasAttachment(TypedAhead)).map(_.name)).isEmpty + + /** Enter and typecheck parameter list. + * Once all witness parameters for a context bound are seen, create a + * context bound companion for it. + */ + def completeParams(params: List[MemberDef])(using Context): Unit = + index(params) + for param <- params do + typedAheadExpr(param) + for (tdef, wnames) <- witnessNamesOfParam do + if wnames.contains(param.name) && allParamsSeen(wnames, params) then + addContextBoundCompanionFor(symbolOfTree(tdef), wnames, params.map(symbolOfTree)) + // The following 3 lines replace what was previously just completeParams(tparams). // But that can cause bad bounds being computed, as witnessed by // tests/pos/paramcycle.scala. The problematic sequence is this: @@ -1822,16 +1910,54 @@ class Namer { typer: Typer => ddef.trailingParamss.foreach(completeParams) val paramSymss = normalizeIfConstructor(ddef.paramss.nestedMap(symbolOfTree), isConstructor) sym.setParamss(paramSymss) + + /** Under x.modularity, we add `tracked` to context bound witnesses + * that have abstract type members + */ + def needsTracked(sym: Symbol, param: ValDef)(using Context) = + !sym.is(Tracked) + && param.hasAttachment(ContextBoundParam) + && sym.info.memberNames(abstractTypeNameFilter).nonEmpty + + /** Under x.modularity, set every context bound evidence parameter of a class to be tracked, + * provided it has a type that has an abstract type member. Reset private and local flags + * so that the parameter becomes a `val`. + */ + def setTracked(param: ValDef): Unit = + val sym = symbolOfTree(param) + sym.maybeOwner.maybeOwner.infoOrCompleter match + case info: TempClassInfo if needsTracked(sym, param) => + typr.println(i"set tracked $param, $sym: ${sym.info} containing ${sym.info.memberNames(abstractTypeNameFilter).toList}") + for acc <- info.decls.lookupAll(sym.name) if acc.is(ParamAccessor) do + acc.resetFlag(PrivateLocal) + acc.setFlag(Tracked) + sym.setFlag(Tracked) + case _ => + def wrapMethType(restpe: Type): Type = instantiateDependent(restpe, paramSymss) methodType(paramSymss, restpe, ddef.mods.is(JavaDefined)) + + def wrapRefinedMethType(restpe: Type): Type = + wrapMethType(addParamRefinements(restpe, paramSymss)) + if isConstructor then + if sym.isPrimaryConstructor && Feature.enabled(modularity) then + ddef.termParamss.foreach(_.foreach(setTracked)) // set result type tree to unit, but take the current class as result type of the symbol typedAheadType(ddef.tpt, defn.UnitType) wrapMethType(effectiveResultType(sym, paramSymss)) + else if sym.isAllOf(Given | Method) && Feature.enabled(modularity) then + // set every context bound evidence parameter of a given companion method + // to be tracked, provided it has a type that has an abstract type member. + // Add refinements for all tracked parameters to the result type. + for params <- ddef.termParamss; param <- params do + val psym = symbolOfTree(param) + if needsTracked(psym, param) then psym.setFlag(Tracked) + valOrDefDefSig(ddef, sym, paramSymss, wrapRefinedMethType) else valOrDefDefSig(ddef, sym, paramSymss, wrapMethType) - } + end defDefSig def inferredResultType( mdef: ValOrDefDef, @@ -1942,8 +2068,9 @@ class Namer { typer: Typer => rhsCtx = prepareRhsCtx(rhsCtx, paramss) def typedAheadRhs(pt: Type) = - PrepareInlineable.dropInlineIfError(sym, - typedAheadExpr(mdef.rhs, pt)(using rhsCtx)) + CyclicReference.trace(i"type the right hand side of $sym since no explicit type was given"): + PrepareInlineable.dropInlineIfError(sym, + typedAheadExpr(mdef.rhs, pt)(using rhsCtx)) def rhsType = // For default getters, we use the corresponding parameter type as an @@ -1961,9 +2088,10 @@ class Namer { typer: Typer => else // don't strip @uncheckedVariance annot for default getters TypeOps.simplify(tp.widenTermRefExpr, - if defaultTp.exists then TypeOps.SimplifyKeepUnchecked() else null) match + if defaultTp.exists then TypeOps.SimplifyKeepUnchecked() else null) + match case ctp: ConstantType if sym.isInlineVal => ctp - case tp => TypeComparer.widenInferred(tp, pt, widenUnions = true) + case tp => TypeComparer.widenInferred(tp, pt, Widen.Unions) // Replace aliases to Unit by Unit itself. If we leave the alias in // it would be erased to BoxedUnit. diff --git a/compiler/src/dotty/tools/dotc/typer/Nullables.scala b/compiler/src/dotty/tools/dotc/typer/Nullables.scala index 914fc0acb89d..3f071dad2d03 100644 --- a/compiler/src/dotty/tools/dotc/typer/Nullables.scala +++ b/compiler/src/dotty/tools/dotc/typer/Nullables.scala @@ -33,20 +33,24 @@ object Nullables: && hi.isValueType // We cannot check if hi is nullable, because it can cause cyclic reference. + private def nullifiedHi(lo: Type, hi: Type)(using Context): Type = + if needNullifyHi(lo, hi) then + if ctx.flexibleTypes then FlexibleType(hi) else OrNull(hi) + else hi + /** Create a nullable type bound * If lo is `Null`, `| Null` is added to hi */ def createNullableTypeBounds(lo: Type, hi: Type)(using Context): TypeBounds = - val newHi = if needNullifyHi(lo, hi) then OrType(hi, defn.NullType, soft = false) else hi - TypeBounds(lo, newHi) + TypeBounds(lo, nullifiedHi(lo, hi)) /** Create a nullable type bound tree * If lo is `Null`, `| Null` is added to hi */ def createNullableTypeBoundsTree(lo: Tree, hi: Tree, alias: Tree = EmptyTree)(using Context): TypeBoundsTree = - val hiTpe = hi.typeOpt - val newHi = if needNullifyHi(lo.typeOpt, hiTpe) then TypeTree(OrType(hiTpe, defn.NullType, soft = false)) else hi - TypeBoundsTree(lo, newHi, alias) + val hiTpe = nullifiedHi(lo.typeOpt, hi.typeOpt) + val hiTree = if(hiTpe eq hi.typeOpt) hi else TypeTree(hiTpe) + TypeBoundsTree(lo, hiTree, alias) /** A set of val or var references that are known to be not null, plus a set of * variable references that are not known (anymore) to be not null diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index 46c12b244fbb..ecf1da30cac1 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -11,6 +11,7 @@ import Constants.* import util.{Stats, SimpleIdentityMap, SimpleIdentitySet} import Decorators.* import Uniques.* +import Flags.Method import inlines.Inlines import config.Printers.typr import Inferencing.* @@ -26,7 +27,7 @@ object ProtoTypes { import tpd.* /** A trait defining an `isCompatible` method. */ - trait Compatibility { + trait Compatibility: /** Is there an implicit conversion from `tp` to `pt`? */ def viewExists(tp: Type, pt: Type)(using Context): Boolean @@ -106,19 +107,34 @@ object ProtoTypes { if !res then ctx.typerState.constraint = savedConstraint res - /** Constrain result with special case if `meth` is an inlineable method in an inlineable context. - * In that case, we should always succeed and not constrain type parameters in the expected type, - * because the actual return type can be a subtype of the currently known return type. - * However, we should constrain parameters of the declared return type. This distinction is - * achieved by replacing expected type parameters with wildcards. + /** Constrain result with two special cases: + * 1. If `meth` is an inlineable method in an inlineable context, + * we should always succeed and not constrain type parameters in the expected type, + * because the actual return type can be a subtype of the currently known return type. + * However, we should constrain parameters of the declared return type. This distinction is + * achieved by replacing expected type parameters with wildcards. + * 2. When constraining the result of a primitive value operation against + * a precise typevar, don't lower-bound the typevar with a non-singleton type. */ def constrainResult(meth: Symbol, mt: Type, pt: Type)(using Context): Boolean = - if (Inlines.isInlineable(meth)) { + + def constFoldException(pt: Type): Boolean = pt.dealias match + case tvar: TypeVar => + tvar.isPrecise + && meth.is(Method) && meth.owner.isPrimitiveValueClass + && mt.resultType.isPrimitiveValueType && !mt.resultType.isSingleton + case tparam: TypeParamRef => + constFoldException(ctx.typerState.constraint.typeVarOfParam(tparam)) + case _ => + false + + if Inlines.isInlineable(meth) then constrainResult(mt, wildApprox(pt)) true - } - else constrainResult(mt, pt) - } + else + constFoldException(pt) || constrainResult(mt, pt) + end constrainResult + end Compatibility object NoViewsAllowed extends Compatibility { override def viewExists(tp: Type, pt: Type)(using Context): Boolean = false @@ -701,6 +717,20 @@ object ProtoTypes { case FunProto((arg: untpd.TypedSplice) :: Nil, _) => arg.isExtensionReceiver case _ => false + /** An extractor for Singleton and Precise witness types. + * + * Singleton { type Self = T } returns Some(T, true) + * Precise { type Self = T } returns Some(T, false) + */ + object PreciseConstrained: + def unapply(tp: Type)(using Context): Option[(Type, Boolean)] = tp.dealias match + case RefinedType(parent, tpnme.Self, TypeAlias(tp)) => + val tsym = parent.typeSymbol + if tsym == defn.SingletonClass then Some((tp, true)) + else if tsym == defn.PreciseClass then Some((tp, false)) + else None + case _ => None + /** Add all parameters of given type lambda `tl` to the constraint's domain. * If the constraint contains already some of these parameters in its domain, * make a copy of the type lambda and add the copy's type parameters instead. @@ -713,26 +743,43 @@ object ProtoTypes { tl: TypeLambda, owningTree: untpd.Tree, alwaysAddTypeVars: Boolean, nestingLevel: Int = ctx.nestingLevel - ): (TypeLambda, List[TypeVar]) = { + ): (TypeLambda, List[TypeVar]) = val state = ctx.typerState val addTypeVars = alwaysAddTypeVars || !owningTree.isEmpty if (tl.isInstanceOf[PolyType]) assert(!ctx.typerState.isCommittable || addTypeVars, s"inconsistent: no typevars were added to committable constraint ${state.constraint}") // hk type lambdas can be added to constraints without typevars during match reduction + val added = state.constraint.ensureFresh(tl) - def newTypeVars(tl: TypeLambda): List[TypeVar] = - for paramRef <- tl.paramRefs - yield - val tvar = TypeVar(paramRef, state, nestingLevel) + def preciseConstrainedRefs(tp: Type, singletonOnly: Boolean): Set[TypeParamRef] = tp match + case tp: MethodType if tp.isContextualMethod => + val ownBounds = + for + case PreciseConstrained(ref: TypeParamRef, singleton) <- tp.paramInfos + if !singletonOnly || singleton + yield ref + ownBounds.toSet ++ preciseConstrainedRefs(tp.resType, singletonOnly) + case tp: LambdaType => + preciseConstrainedRefs(tp.resType, singletonOnly) + case _ => + Set.empty + + def newTypeVars: List[TypeVar] = + val preciseRefs = preciseConstrainedRefs(added, singletonOnly = false) + for paramRef <- added.paramRefs yield + val tvar = TypeVar(paramRef, state, nestingLevel, precise = preciseRefs.contains(paramRef)) state.ownedVars += tvar tvar - val added = state.constraint.ensureFresh(tl) - val tvars = if addTypeVars then newTypeVars(added) else Nil + val tvars = if addTypeVars then newTypeVars else Nil TypeComparer.addToConstraint(added, tvars) + val singletonRefs = preciseConstrainedRefs(added, singletonOnly = true) + for paramRef <- added.paramRefs do + // Constrain all type parameters [T: Singleton] to T <: Singleton + if singletonRefs.contains(paramRef) then paramRef <:< defn.SingletonType (added, tvars) - } + end constrained def constrained(tl: TypeLambda, owningTree: untpd.Tree)(using Context): (TypeLambda, List[TypeVar]) = constrained(tl, owningTree, diff --git a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala index e152b5e6b9c7..7a5c838848ac 100644 --- a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala +++ b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala @@ -170,13 +170,6 @@ class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking override def addCanThrowCapabilities(expr: untpd.Tree, cases: List[CaseDef])(using Context): untpd.Tree = expr - override def typedUnadapted(tree: untpd.Tree, pt: Type, locked: TypeVars)(using Context): Tree = - try super.typedUnadapted(tree, pt, locked) - catch case NonFatal(ex) if ctx.phase != Phases.typerPhase && ctx.phase != Phases.inliningPhase && !ctx.run.enrichedErrorMessage => - val treeStr = tree.show(using ctx.withPhase(ctx.phase.prev.megaPhase)) - println(ctx.run.enrichErrorMessage(s"exception while retyping $treeStr of class ${tree.className} # ${tree.uniqueId}")) - throw ex - override def inlineExpansion(mdef: DefDef)(using Context): List[Tree] = mdef :: Nil override def inferView(from: Tree, to: Type)(using Context): Implicits.SearchResult = @@ -189,4 +182,5 @@ class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking override protected def checkEqualityEvidence(tree: tpd.Tree, pt: Type)(using Context): Unit = () override protected def matchingApply(methType: MethodOrPoly, pt: FunProto)(using Context): Boolean = true override protected def typedScala2MacroBody(call: untpd.Tree)(using Context): Tree = promote(call) + override protected def migrate[T](migration: => T, disabled: => T = ()): T = disabled } diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index 173d5e6b1f7e..cb1aea27c444 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -52,7 +52,7 @@ object RefChecks { }} for (name <- defaultMethodNames) { - val methods = clazz.info.member(name).alternatives.map(_.symbol) + val methods = clazz.thisType.member(name).alternatives.map(_.symbol) val haveDefaults = methods.filter(_.hasDefaultParams) if (haveDefaults.length > 1) { val owners = haveDefaults map (_.owner) @@ -552,7 +552,11 @@ object RefChecks { overrideError("is an extension method, cannot override a normal method") else if (other.is(ExtensionMethod) && !member.is(ExtensionMethod)) // (1.3) overrideError("is a normal method, cannot override an extension method") - else if !other.is(Deferred) + else if (!other.is(Deferred) + || other.isAllOf(Given | HasDefault) + // deferred givens have flags Given, HasDefault and Deferred set. These + // need to be checked for overriding as if they were concrete members + ) && !member.is(Deferred) && !other.name.is(DefaultGetterName) && !member.isAnyOverride @@ -610,8 +614,13 @@ object RefChecks { overrideError("is not inline, cannot implement an inline method") else if (other.isScala2Macro && !member.isScala2Macro) // (1.11) overrideError("cannot be used here - only Scala-2 macros can override Scala-2 macros") - else if (!compatTypes(memberTp(self), otherTp(self)) && - !compatTypes(memberTp(upwardsSelf), otherTp(upwardsSelf))) + else if !compatTypes(memberTp(self), otherTp(self)) + && !compatTypes(memberTp(upwardsSelf), otherTp(upwardsSelf)) + && !member.is(Tracked) + // Tracked members need to be excluded since they are abstract type members with + // singleton types. Concrete overrides usually have a wider type. + // TODO: Should we exclude all refinements inherited from parents? + then overrideError("has incompatible type", compareTypes = true) else if (member.targetName != other.targetName) if (other.targetName != other.name) @@ -620,7 +629,9 @@ object RefChecks { overrideError("cannot have a @targetName annotation since external names would be different") else if intoOccurrences(memberTp(self)) != intoOccurrences(otherTp(self)) then overrideError("has different occurrences of `into` modifiers", compareTypes = true) - else if other.is(ParamAccessor) && !isInheritedAccessor(member, other) then // (1.12) + else if other.is(ParamAccessor) && !isInheritedAccessor(member, other) + && !member.is(Tracked) // see remark on tracked members above + then // (1.12) report.errorOrMigrationWarning( em"cannot override val parameter ${other.showLocated}", member.srcPos, @@ -670,6 +681,10 @@ object RefChecks { mbr.isType || mbr.isSuperAccessor // not yet synthesized || mbr.is(JavaDefined) && hasJavaErasedOverriding(mbr) + || mbr.is(Tracked) + // Tracked members correspond to existing val parameters, so they don't + // count as deferred. The val parameter could not implement the tracked + // refinement since it usually has a wider type. def isImplemented(mbr: Symbol) = val mbrDenot = mbr.asSeenFrom(clazz.thisType) @@ -891,11 +906,15 @@ object RefChecks { * can assume invariant refinement for case classes in `constrainPatternType`. */ def checkCaseClassInheritanceInvariant() = - for (caseCls <- clazz.info.baseClasses.tail.find(_.is(Case))) - for (baseCls <- caseCls.info.baseClasses.tail) - if (baseCls.typeParams.exists(_.paramVarianceSign != 0)) - for (problem <- variantInheritanceProblems(baseCls, caseCls, "non-variant", "case ")) - report.errorOrMigrationWarning(problem, clazz.srcPos, MigrationVersion.Scala2to3) + for + caseCls <- clazz.info.baseClasses.tail.find(_.is(Case)) + baseCls <- caseCls.info.baseClasses.tail + if baseCls.typeParams.exists(_.paramVarianceSign != 0) + problem <- variantInheritanceProblems(baseCls, caseCls, i"base $baseCls", "case ") + withExplain = problem.appendExplanation: + """Refining a basetype of a case class is not allowed. + |This is a limitation that enables better GADT constraints in case class patterns""".stripMargin + do report.errorOrMigrationWarning(withExplain, clazz.srcPos, MigrationVersion.Scala2to3) checkNoAbstractMembers() if (abstractErrors.isEmpty) checkNoAbstractDecls(clazz) @@ -924,7 +943,7 @@ object RefChecks { for { cls <- clazz.info.baseClasses.tail if cls.paramAccessors.nonEmpty && !mixins.contains(cls) - problem <- variantInheritanceProblems(cls, clazz.asClass.superClass, "parameterized", "super") + problem <- variantInheritanceProblems(cls, clazz.asClass.superClass, i"parameterized base $cls", "super") } report.error(problem, clazz.srcPos) } @@ -947,7 +966,7 @@ object RefChecks { if (combinedBT =:= thisBT) None // ok else Some( - em"""illegal inheritance: $clazz inherits conflicting instances of $baseStr base $baseCls. + em"""illegal inheritance: $clazz inherits conflicting instances of $baseStr. | | Direct basetype: $thisBT | Basetype via $middleStr$middle: $combinedBT""") @@ -1033,8 +1052,7 @@ object RefChecks { * surprising names at runtime. E.g. in neg/i4564a.scala, a private * case class `apply` method would have to be renamed to something else. */ - def checkNoPrivateOverrides(tree: Tree)(using Context): Unit = - val sym = tree.symbol + def checkNoPrivateOverrides(sym: Symbol)(using Context): Unit = if sym.maybeOwner.isClass && sym.is(Private) && (sym.isOneOf(MethodOrLazyOrMutable) || !sym.is(Local)) // in these cases we'll produce a getter later @@ -1100,6 +1118,58 @@ object RefChecks { end checkUnaryMethods + /** Check that an extension method is not hidden, i.e., that it is callable as an extension method. + * + * An extension method is hidden if it does not offer a parameter that is not subsumed + * by the corresponding parameter of the member with the same name (or of all alternatives of an overload). + * + * This check is suppressed if this method is an override. + * + * For example, it is not possible to define a type-safe extension `contains` for `Set`, + * since for any parameter type, the existing `contains` method will compile and would be used. + * + * If the member has a leading implicit parameter list, then the extension method must also have + * a leading implicit parameter list. The reason is that if the implicit arguments are inferred, + * either the member method is used or typechecking fails. If the implicit arguments are supplied + * explicitly and the member method is not applicable, the extension is checked, and its parameters + * must be implicit in order to be applicable. + * + * If the member does not have a leading implicit parameter list, then the argument cannot be explicitly + * supplied with `using`, as typechecking would fail. But the extension method may have leading implicit + * parameters, which are necessarily supplied implicitly in the application. The first non-implicit + * parameters of the extension method must be distinguishable from the member parameters, as described. + * + * If the extension method is nullary, it is always hidden by a member of the same name. + * (Either the member is nullary, or the reference is taken as the eta-expansion of the member.) + */ + def checkExtensionMethods(sym: Symbol)(using Context): Unit = + if sym.is(Extension) && !sym.nextOverriddenSymbol.exists then + extension (tp: Type) + def strippedResultType = Applications.stripImplicit(tp.stripPoly, wildcardOnly = true).resultType + def firstExplicitParamTypes = Applications.stripImplicit(tp.stripPoly, wildcardOnly = true).firstParamTypes + def hasImplicitParams = tp.stripPoly match { case mt: MethodType => mt.isImplicitMethod case _ => false } + val target = sym.info.firstExplicitParamTypes.head // required for extension method, the putative receiver + val methTp = sym.info.strippedResultType // skip leading implicits and the "receiver" parameter + def hidden = + target.nonPrivateMember(sym.name) + .filterWithPredicate: + member => + val memberIsImplicit = member.info.hasImplicitParams + val paramTps = + if memberIsImplicit then methTp.stripPoly.firstParamTypes + else methTp.firstExplicitParamTypes + + paramTps.isEmpty || memberIsImplicit && !methTp.hasImplicitParams || { + val memberParamTps = member.info.stripPoly.firstParamTypes + !memberParamTps.isEmpty + && memberParamTps.lengthCompare(paramTps) == 0 + && memberParamTps.lazyZip(paramTps).forall((m, x) => x frozen_<:< m) + } + .exists + if !target.typeSymbol.denot.isAliasType && !target.typeSymbol.denot.isOpaqueAlias && hidden + then report.warning(ExtensionNullifiedByMember(sym, target.typeSymbol), sym.srcPos) + end checkExtensionMethods + /** Verify that references in the user-defined `@implicitNotFound` message are valid. * (i.e. they refer to a type variable that really occurs in the signature of the annotated symbol.) */ @@ -1181,12 +1251,12 @@ object RefChecks { end checkImplicitNotFoundAnnotation - def checkAnyRefMethodCall(tree: Tree)(using Context) = - if tree.symbol.exists - && defn.topClasses.contains(tree.symbol.owner) - && (!ctx.owner.enclosingClass.exists || ctx.owner.enclosingClass.isPackageObject) then - report.warning(UnqualifiedCallToAnyRefMethod(tree, tree.symbol), tree) - + def checkAnyRefMethodCall(tree: Tree)(using Context): Unit = + if tree.symbol.exists && defn.topClasses.contains(tree.symbol.owner) then + tree.tpe match + case tp: NamedType if tp.prefix.typeSymbol != ctx.owner.enclosingClass => + report.warning(UnqualifiedCallToAnyRefMethod(tree, tree.symbol), tree) + case _ => () } import RefChecks.* @@ -1233,8 +1303,8 @@ class RefChecks extends MiniPhase { thisPhase => override def transformValDef(tree: ValDef)(using Context): ValDef = { if tree.symbol.exists then - checkNoPrivateOverrides(tree) val sym = tree.symbol + checkNoPrivateOverrides(sym) checkVolatile(sym) if (sym.exists && sym.owner.isTerm) { tree.rhs match { @@ -1246,9 +1316,11 @@ class RefChecks extends MiniPhase { thisPhase => } override def transformDefDef(tree: DefDef)(using Context): DefDef = { - checkNoPrivateOverrides(tree) - checkImplicitNotFoundAnnotation.defDef(tree.symbol.denot) - checkUnaryMethods(tree.symbol) + val sym = tree.symbol + checkNoPrivateOverrides(sym) + checkImplicitNotFoundAnnotation.defDef(sym.denot) + checkUnaryMethods(sym) + checkExtensionMethods(sym) tree } diff --git a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala index c94724faf4d4..6b18540b6551 100644 --- a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala @@ -15,6 +15,7 @@ import transform.SyntheticMembers.* import util.Property import ast.Trees.genericEmptyTree import annotation.{tailrec, constructorOnly} +import ast.tpd import ast.tpd.* import Synthesizer.* import sbt.ExtractDependencies.* @@ -236,6 +237,23 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): EmptyTreeNoError end synthesizedValueOf + val synthesizedSingleton: SpecialHandler = (formal, span) => formal match + case PreciseConstrained(tp, true) => + if tp.isSingletonBounded(frozen = false) then + withNoErrors: + ref(defn.Compiletime_erasedValue).appliedToType(formal).withSpan(span) + else + withErrors(i"$tp is not a singleton") + case _ => + EmptyTreeNoError + + val synthesizedPrecise: SpecialHandler = (formal, span) => formal match + case PreciseConstrained(tp, false) => + withNoErrors: + ref(defn.Compiletime_erasedValue).appliedToType(formal).withSpan(span) + case _ => + EmptyTreeNoError + /** Create an anonymous class `new Object { type MirroredMonoType = ... }` * and mark it with given attachment so that it is made into a mirror at PostTyper. */ @@ -265,10 +283,10 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): .refinedWith(tpnme.MirroredType, TypeAlias(mirroredType)) .refinedWith(tpnme.MirroredLabel, TypeAlias(ConstantType(Constant(label.toString)))) - /** A path referencing the companion of class type `clsType` */ - private def companionPath(clsType: Type, span: Span)(using Context) = - val ref = pathFor(clsType.mirrorCompanionRef) - assert(ref.symbol.is(Module) && (clsType.classSymbol.is(ModuleClass) || (ref.symbol.companionClass == clsType.classSymbol))) + /** A path referencing the companion of `cls` with prefix `pre` */ + private def companionPath(pre: Type, cls: Symbol, span: Span)(using Context) = + val ref = tpd.ref(TermRef(pre, cls.companionModule)) + assert(ref.symbol.is(Module) && (cls.is(ModuleClass) || ref.symbol.companionClass == cls)) ref.withSpan(span) private def checkFormal(formal: Type)(using Context): Boolean = @@ -379,7 +397,9 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): // avoid type aliases for tuples Right(MirrorSource.GenericTuple(types)) case _ => reduce(tp.underlying) - case tp: MatchType => reduce(tp.normalized) + case tp: MatchType => + val n = tp.tryNormalize + if n.exists then reduce(n) else Left(i"its subpart `$tp` is an unreducible match type.") case _ => reduce(tp.superType) case tp @ AndType(l, r) => for @@ -426,7 +446,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): .refinedWith(tpnme.MirroredElemLabels, TypeAlias(elemsLabels)) } val mirrorRef = - if cls.useCompanionAsProductMirror then companionPath(mirroredType, span) + if cls.useCompanionAsProductMirror then companionPath(pre, cls, span) else if defn.isTupleClass(cls) then newTupleMirror(typeElems.size) // TODO: cls == defn.PairClass when > 22 else anonymousMirror(monoType, MirrorImpl.OfProduct(pre), span) withNoErrors(mirrorRef.cast(mirrorType).withSpan(span)) @@ -436,7 +456,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): case Right(msrc) => msrc match case MirrorSource.Singleton(_, tref) => val singleton = tref.termSymbol // prefer alias name over the orignal name - val singletonPath = pathFor(tref).withSpan(span) + val singletonPath = tpd.singleton(tref).withSpan(span) if tref.classSymbol.is(Scala2x) then // could be Scala 3 alias of Scala 2 case object. val mirrorType = formal.constrained_& { mirrorCore(defn.Mirror_SingletonProxyClass, mirroredType, mirroredType, singleton.name) @@ -533,7 +553,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): val tparams = poly.paramRefs val variances = childClass.typeParams.map(_.paramVarianceSign) val instanceTypes = tparams.lazyZip(variances).map((tparam, variance) => - TypeComparer.instanceType(tparam, fromBelow = variance < 0, widenUnions = true) + TypeComparer.instanceType(tparam, fromBelow = variance < 0, Widen.Unions) ) val instanceType = resType.substParams(poly, instanceTypes) // this is broken in tests/run/i13332intersection.scala, @@ -569,7 +589,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): .refinedWith(tpnme.MirroredElemLabels, TypeAlias(labels)) } val mirrorRef = - if cls.useCompanionAsSumMirror then companionPath(mirroredType, span) + if cls.useCompanionAsSumMirror then companionPath(pre, cls, span) else anonymousMirror(monoType, MirrorImpl.OfSum(childPres), span) withNoErrors(mirrorRef.cast(mirrorType)) else if acceptableMsg.nonEmpty then @@ -669,7 +689,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): def canManifest(tp: Manifestable, topLevel: Boolean) = val sym = tp.typeSymbol - !sym.isAbstractType + !sym.isAbstractOrParamType && hasStableErasure(tp) && !(topLevel && defn.isBottomClassAfterErasure(sym)) @@ -735,6 +755,8 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): defn.MirrorClass -> synthesizedMirror, defn.ManifestClass -> synthesizedManifest, defn.OptManifestClass -> synthesizedOptManifest, + defn.SingletonClass -> synthesizedSingleton, + defn.PreciseClass -> synthesizedPrecise, ) def tryAll(formal: Type, span: Span)(using Context): TreeWithErrors = diff --git a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala index 96c5e57dde0e..64722d51708c 100644 --- a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala +++ b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala @@ -519,9 +519,7 @@ trait TypeAssigner { def assignType(tree: untpd.TypeBoundsTree, lo: Tree, hi: Tree, alias: Tree)(using Context): TypeBoundsTree = tree.withType( if !alias.isEmpty then alias.tpe - else if lo eq hi then - if lo.tpe.isMatch then MatchAlias(lo.tpe) - else TypeAlias(lo.tpe) + else if lo eq hi then AliasingBounds(lo.tpe) else TypeBounds(lo.tpe, hi.tpe)) def assignType(tree: untpd.Bind, sym: Symbol)(using Context): Bind = @@ -533,6 +531,12 @@ trait TypeAssigner { def assignType(tree: untpd.UnApply, proto: Type)(using Context): UnApply = tree.withType(proto) + def assignType(tree: untpd.Splice, expr: Tree)(using Context): Splice = + val tpe = expr.tpe // Quotes ?=> Expr[T] + .baseType(defn.FunctionSymbol(1, isContextual = true)).argTypes.last // Expr[T] + .baseType(defn.QuotedExprClass).argTypes.head // T + tree.withType(tpe) + def assignType(tree: untpd.QuotePattern, proto: Type)(using Context): QuotePattern = tree.withType(proto) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 0b05bcd078ff..2a877a45b550 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -40,8 +40,7 @@ import annotation.tailrec import Implicits.* import util.Stats.record import config.Printers.{gadts, typr} -import config.Feature -import config.Feature.{sourceVersion, migrateTo3} +import config.Feature, Feature.{sourceVersion, migrateTo3, modularity} import config.SourceVersion.* import rewrites.Rewrites, Rewrites.patch import staging.StagingLevel @@ -51,6 +50,7 @@ import NullOpsDecorator.* import cc.{CheckCaptures, isRetainsLike} import config.Config import config.MigrationVersion +import transform.CheckUnused.OriginalName import scala.annotation.constructorOnly @@ -112,6 +112,31 @@ object Typer { def rememberSearchFailure(tree: tpd.Tree, fail: SearchFailure) = tree.putAttachment(HiddenSearchFailure, fail :: tree.attachmentOrElse(HiddenSearchFailure, Nil)) + + def tryEither[T](op: Context ?=> T)(fallBack: (T, TyperState) => T)(using Context): T = { + val nestedCtx = ctx.fresh.setNewTyperState() + val result = op(using nestedCtx) + if (nestedCtx.reporter.hasErrors && !nestedCtx.reporter.hasStickyErrors) { + record("tryEither.fallBack") + fallBack(result, nestedCtx.typerState) + } + else { + record("tryEither.commit") + nestedCtx.typerState.commit() + result + } + } + + /** Try `op1`, if there are errors, try `op2`, if `op2` also causes errors, fall back + * to errors and result of `op1`. + */ + def tryAlternatively[T](op1: Context ?=> T)(op2: Context ?=> T)(using Context): T = + tryEither(op1) { (failedVal, failedState) => + tryEither(op2) { (_, _) => + failedState.commit() + failedVal + } + } } /** Typecheck trees, the main entry point is `typed`. * @@ -158,6 +183,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // Overridden in derived typers def newLikeThis(nestingLevel: Int): Typer = new Typer(nestingLevel) + /** Apply given migration. Overridden to use `disabled` instead in ReTypers. */ + protected def migrate[T](migration: => T, disabled: => T = ()): T = migration + /** Find the type of an identifier with given `name` in given context `ctx`. * @param name the name of the identifier * @param pt the expected type @@ -662,7 +690,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val selection = untpd.cpy.Select(tree)(qualifier, name) typed(selection, pt) else if rawType.exists then - setType(ensureAccessible(rawType, superAccess = false, tree.srcPos)) + val ref = setType(ensureAccessible(rawType, superAccess = false, tree.srcPos)) + if ref.symbol.name != name then + ref.withAttachment(OriginalName, name) + else ref else if name == nme._scope then // gross hack to support current xml literals. // awaiting a better implicits based solution for library-supported xml @@ -706,68 +737,198 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer checkLegalValue(select, pt) ConstFold(select) + // If regular selection is typeable, we are done if checkedType.exists then - finish(tree, qual, checkedType) - else if selName == nme.apply && qual.tpe.widen.isInstanceOf[MethodType] then - // Simplify `m.apply(...)` to `m(...)` - qual - else if couldInstantiateTypeVar(qual.tpe.widen) then + return finish(tree, qual, checkedType) + + // Otherwise, simplify `m.apply(...)` to `m(...)` + if selName == nme.apply && qual.tpe.widen.isInstanceOf[MethodType] then + return qual + + // Otherwise, if there's a simply visible type variable in the result, try again + // with a more defined qualifier type. There's a second trial where we try to instantiate + // all type variables in `qual.tpe.widen`, but that is done only after we search for + // extension methods or conversions. + if couldInstantiateTypeVar(qual.tpe.widen) then // there's a simply visible type variable in the result; try again with a more defined qualifier type // There's a second trial where we try to instantiate all type variables in `qual.tpe.widen`, // but that is done only after we search for extension methods or conversions. - typedSelect(tree, pt, qual) - else if qual.tpe.isSmallGenericTuple then + return typedSelect(tree, pt, qual) + + // Otherwise, try to expand a named tuple selection + val namedTupleElems = qual.tpe.widen.namedTupleElementTypes + val nameIdx = namedTupleElems.indexWhere(_._1 == selName) + if nameIdx >= 0 && Feature.enabled(Feature.namedTuples) then + return typed( + untpd.Apply( + untpd.Select(untpd.TypedSplice(qual), nme.apply), + untpd.Literal(Constant(nameIdx))), + pt) + + // Otherwise, map combinations of A *: B *: .... EmptyTuple with nesting levels <= 22 + // to the Tuple class of the right arity and select from that one + if qual.tpe.isSmallGenericTuple then val elems = qual.tpe.widenTermRefExpr.tupleElementTypes.getOrElse(Nil) - typedSelect(tree, pt, qual.cast(defn.tupleType(elems))) - else - val tree1 = { - if selName.isTypeName then EmptyTree - else tryExtensionOrConversion( - tree, pt, IgnoredProto(pt), qual, ctx.typerState.ownedVars, this, inSelect = true) - }.orElse { - if ctx.gadt.isNarrowing then - // try GADT approximation if we're trying to select a member - // Member lookup cannot take GADTs into account b/c of cache, so we - // approximate types based on GADT constraints instead. For an example, - // see MemberHealing in gadt-approximation-interaction.scala. - val wtp = qual.tpe.widen - gadts.println(i"Trying to heal member selection by GADT-approximating $wtp") - val gadtApprox = Inferencing.approximateGADT(wtp) - gadts.println(i"GADT-approximated $wtp ~~ $gadtApprox") - val qual1 = qual.cast(gadtApprox) - val tree1 = cpy.Select(tree0)(qual1, selName) - val checkedType1 = accessibleType(selectionType(tree1, qual1), superAccess = false) - if checkedType1.exists then - gadts.println(i"Member selection healed by GADT approximation") - finish(tree1, qual1, checkedType1) - else if qual1.tpe.isSmallGenericTuple then - gadts.println(i"Tuple member selection healed by GADT approximation") - typedSelect(tree, pt, qual1) - else - tryExtensionOrConversion(tree1, pt, IgnoredProto(pt), qual1, ctx.typerState.ownedVars, this, inSelect = true) - else EmptyTree - } + return typedSelect(tree, pt, qual.cast(defn.tupleType(elems))) + + // Otherwise try an extension or conversion + if selName.isTermName then + val tree1 = tryExtensionOrConversion( + tree, pt, IgnoredProto(pt), qual, ctx.typerState.ownedVars, this, inSelect = true) if !tree1.isEmpty then - tree1 - else if canDefineFurther(qual.tpe.widen) then - typedSelect(tree, pt, qual) - else if qual.tpe.derivesFrom(defn.DynamicClass) - && selName.isTermName && !isDynamicExpansion(tree) - then - val tree2 = cpy.Select(tree0)(untpd.TypedSplice(qual), selName) - if pt.isInstanceOf[FunOrPolyProto] || pt == LhsProto then - assignType(tree2, TryDynamicCallType) - else - typedDynamicSelect(tree2, Nil, pt) + return tree1 + + // Otherwise, try a GADT approximation if we're trying to select a member + // Member lookup cannot take GADTs into account b/c of cache, so we + // approximate types based on GADT constraints instead. For an example, + // see MemberHealing in gadt-approximation-interaction.scala. + if ctx.gadt.isNarrowing then + val wtp = qual.tpe.widen + gadts.println(i"Trying to heal member selection by GADT-approximating $wtp") + val gadtApprox = Inferencing.approximateGADT(wtp) + gadts.println(i"GADT-approximated $wtp ~~ $gadtApprox") + val qual1 = qual.cast(gadtApprox) + val tree1 = cpy.Select(tree0)(qual1, selName) + val checkedType1 = accessibleType(selectionType(tree1, qual1), superAccess = false) + if checkedType1.exists then + gadts.println(i"Member selection healed by GADT approximation") + return finish(tree1, qual1, checkedType1) + + if qual1.tpe.isSmallGenericTuple then + gadts.println(i"Tuple member selection healed by GADT approximation") + return typedSelect(tree, pt, qual1) + + val tree2 = tryExtensionOrConversion(tree1, pt, IgnoredProto(pt), qual1, ctx.typerState.ownedVars, this, inSelect = true) + if !tree2.isEmpty then + return tree2 + + // Otherwise, if there are uninstantiated type variables in the qualifier type, + // instantiate them and try again + if canDefineFurther(qual.tpe.widen) then + return typedSelect(tree, pt, qual) + + def dynamicSelect(pt: Type) = + val tree2 = cpy.Select(tree0)(untpd.TypedSplice(qual), selName) + if pt.isInstanceOf[FunOrPolyProto] || pt == LhsProto then + assignType(tree2, TryDynamicCallType) else - assignType(tree, - rawType match - case rawType: NamedType => - inaccessibleErrorType(rawType, superAccess, tree.srcPos) + typedDynamicSelect(tree2, Nil, pt) + + // Otherwise, if the qualifier derives from class Dynamic, expand to a + // dynamic dispatch using selectDynamic or applyDynamic + if qual.tpe.derivesFrom(defn.DynamicClass) && selName.isTermName && !isDynamicExpansion(tree) then + return dynamicSelect(pt) + + // Otherwise, if the qualifier derives from class Selectable, + // and the selector name matches one of the element of the `Fields` type member, + // and the selector is neither applied nor assigned to, + // expand to a typed dynamic dispatch using selectDynamic wrapped in a cast + if qual.tpe.derivesFrom(defn.SelectableClass) && !isDynamicExpansion(tree) + && !pt.isInstanceOf[FunOrPolyProto] && pt != LhsProto + then + val fieldsType = qual.tpe.select(tpnme.Fields).dealias.simplified + val fields = fieldsType.namedTupleElementTypes + typr.println(i"try dyn select $qual, $selName, $fields") + fields.find(_._1 == selName) match + case Some((_, fieldType)) => + val dynSelected = dynamicSelect(fieldType) + dynSelected match + case Apply(sel: Select, _) if !sel.denot.symbol.exists => + // Reject corner case where selectDynamic needs annother selectDynamic to be called. E.g. as in neg/unselectable-fields.scala. + report.error(i"Cannot use selectDynamic here since it needs another selectDynamic to be invoked", tree.srcPos) case _ => - notAMemberErrorType(tree, qual, pt)) + return dynSelected.ensureConforms(fieldType) + case _ => + + // Otherwise, if the qualifier is a context bound companion, handle + // by selecting a witness in typedCBSelect + if qual.tpe.typeSymbol == defn.CBCompanion then + val witnessSelection = typedCBSelect(tree0, pt, qual) + if !witnessSelection.isEmpty then return witnessSelection + + // Otherwise, report an error + assignType(tree, + rawType match + case rawType: NamedType => + inaccessibleErrorType(rawType, superAccess, tree.srcPos) + case _ => + notAMemberErrorType(tree, qual, pt)) end typedSelect + /** Expand a selection A.m on a context bound companion A with type + * `[ref_1 | ... | ref_N]` as described by + * Step 3 of the doc comment of annotation.internal.WitnessNames. + * @return the best alternative if it exists, + * or EmptyTree if no witness admits selecting with the given name, + * or EmptyTree and report an ambiguity error of there are several + * possible witnesses and no selection is better than the other + * according to the critera given in Step 3. + */ + def typedCBSelect(tree: untpd.Select, pt: Type, qual: Tree)(using Context): Tree = + + type Alts = List[(/*prev: */Tree, /*prevState: */TyperState, /*prevWitness: */TermRef)] + + /** Compare two alternative selections `alt1` and `alt2` from witness types + * `wit1`, `wit2` according to the 3 criteria in Step 3 of the doc comment + * of annotation.internal.WitnessNames. I.e. + * + * alt1 = qual1.m, alt2 = qual2.m, qual1: wit1, qual2: wit2 + * + * @return 1 if 1st alternative is preferred over 2nd + * -1 if 2nd alternative is preferred over 1st + * 0 if neither alternative is preferred over the other + */ + def compareAlts(alt1: Tree, alt2: Tree, wit1: TermRef, wit2: TermRef): Int = + val cmpPrefix = compare(wit1, wit2, preferGeneral = true) + typr.println(i"compare witnesses $wit1: ${wit1.info}, $wit2: ${wit2.info} = $cmpPrefix") + if cmpPrefix != 0 then cmpPrefix + else (alt1.tpe, alt2.tpe) match + case (tp1: TypeRef, tp2: TypeRef) => + if tp1.dealias == tp2.dealias then 1 else 0 + case (tp1: TermRef, tp2: TermRef) => + if tp1.info.isSingleton && (tp1 frozen_=:= tp2) then 1 + else compare(tp1, tp2, preferGeneral = false) + case (tp1: TermRef, _) => 1 // should not happen, but prefer TermRefs over others + case (_, tp2: TermRef) => -1 + case _ => 0 + + /** Find the set of maximally preferred alternatives among `prevs` and + * alternatives referred to by `witnesses`. + * @param prevs a list of (ref tree, typer state, term ref) tripls that + * represents previously identified alternatives + * @param witnesses a type of the form ref_1 | ... | ref_n containing references + * still to be considered. + */ + def tryAlts(prevs: Alts, witnesses: Type): Alts = witnesses match + case OrType(wit1, wit2) => + tryAlts(tryAlts(prevs, wit1), wit2) + case witness: TermRef => + val altQual = tpd.ref(witness).withSpan(qual.span) + val altCtx = ctx.fresh.setNewTyperState() + val alt = typedSelect(tree, pt, altQual)(using altCtx) + def current = (alt, altCtx.typerState, witness) + if altCtx.reporter.hasErrors then prevs + else + val comparisons = prevs.map: (prevTree, prevState, prevWitness) => + compareAlts(prevTree, alt, prevWitness, witness) + if comparisons.exists(_ == 1) then prevs + else current :: prevs.zip(comparisons).collect{ case (prev, cmp) if cmp != -1 => prev } + + qual.tpe.widen match + case AppliedType(_, arg :: Nil) => + tryAlts(Nil, arg) match + case Nil => EmptyTree + case (best @ (bestTree, bestState, _)) :: Nil => + bestState.commit() + bestTree + case multiAlts => + report.error( + em"""Ambiguous witness reference. None of the following alternatives is more specific than the other: + |${multiAlts.map((alt, _, witness) => i"\n $witness.${tree.name}: ${alt.tpe.widen}")}""", + tree.srcPos) + EmptyTree + end typedCBSelect + def typedSelect(tree: untpd.Select, pt: Type)(using Context): Tree = { record("typedSelect") @@ -922,13 +1083,14 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer import untpd.* var templ1 = templ def isEligible(tp: Type) = - tp.exists - && !tp.typeSymbol.is(Final) - && (!tp.isTopType || tp.isAnyRef) // Object is the only toplevel class that can be instantiated - if (templ1.parents.isEmpty && - isFullyDefined(pt, ForceDegree.flipBottom) && - isSkolemFree(pt) && - isEligible(pt.underlyingClassRef(refinementOK = false))) + tp.exists + && !tp.typeSymbol.is(Final) + && (!tp.isTopType || tp.isAnyRef) // Object is the only toplevel class that can be instantiated + if templ1.parents.isEmpty + && isFullyDefined(pt, ForceDegree.flipBottom) + && isSkolemFree(pt) + && isEligible(pt.underlyingClassRef(refinementOK = Feature.enabled(modularity))) + then templ1 = cpy.Template(templ)(parents = untpd.TypeTree(pt) :: Nil) for case parent: RefTree <- templ1.parents do typedAhead(parent, tree => inferTypeParams(typedType(tree), pt)) @@ -981,17 +1143,23 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } if (untpd.isWildcardStarArg(tree)) { - def typedWildcardStarArgExpr = { - // A sequence argument `xs: _*` can be either a `Seq[T]` or an `Array[_ <: T]`, - // irrespective of whether the method we're calling is a Java or Scala method, - // so the expected type is the union `Seq[T] | Array[_ <: T]`. - val ptArg = - // FIXME(#8680): Quoted patterns do not support Array repeated arguments + + def fromRepeated(pt: Type): Type = pt match + case pt: FlexibleType => + pt.derivedFlexibleType(fromRepeated(pt.hi)) + case _ => if ctx.mode.isQuotedPattern then + // FIXME(#8680): Quoted patterns do not support Array repeated arguments pt.translateFromRepeated(toArray = false, translateWildcard = true) else pt.translateFromRepeated(toArray = false, translateWildcard = true) - | pt.translateFromRepeated(toArray = true, translateWildcard = true) + | pt.translateFromRepeated(toArray = true, translateWildcard = true) + + def typedWildcardStarArgExpr = { + // A sequence argument `xs: _*` can be either a `Seq[T]` or an `Array[_ <: T]`, + // irrespective of whether the method we're calling is a Java or Scala method, + // so the expected type is the union `Seq[T] | Array[_ <: T]`. + val ptArg = fromRepeated(pt) val expr0 = typedExpr(tree.expr, ptArg) val expr1 = if ctx.explicitNulls && (!ctx.mode.is(Mode.Pattern)) then if expr0.tpe.isNullType then @@ -1079,7 +1247,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * with annotation contructor, as named arguments are not allowed anywhere else in Java. * Under explicit nulls, the pt could be nullable. We need to strip `Null` type first. */ - val arg1 = pt.stripNull match { + val arg1 = pt.stripNull() match { case AppliedType(a, typ :: Nil) if ctx.isJava && a.isRef(defn.ArrayClass) => tryAlternatively { typed(tree.arg, pt) } { val elemTp = untpd.TypedSplice(TypeTree(typ)) @@ -1841,11 +2009,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case _ => false } - val result = pt match { + val result = pt.underlyingMatchType match { case mt: MatchType if isMatchTypeShaped(mt) => typedDependentMatchFinish(tree, sel1, selType, tree.cases, mt) - case MatchType.InDisguise(mt) if isMatchTypeShaped(mt) => - typedDependentMatchFinish(tree, sel1, selType, tree.cases, mt) case _ => typedMatchFinish(tree, sel1, selType, tree.cases, pt) } @@ -1908,7 +2074,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val case1 = typedCase(cas, sel, wideSelType, tpe)(using caseCtx) caseCtx = Nullables.afterPatternContext(sel, case1.pat) if !alreadyStripped && Nullables.matchesNull(case1) then - wideSelType = wideSelType.stripNull + wideSelType = wideSelType.stripNull() alreadyStripped = true case1 } @@ -1931,7 +2097,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val case1 = typedCase(cas, sel, wideSelType, pt)(using caseCtx) caseCtx = Nullables.afterPatternContext(sel, case1.pat) if !alreadyStripped && Nullables.matchesNull(case1) then - wideSelType = wideSelType.stripNull + wideSelType = wideSelType.stripNull() alreadyStripped = true case1 } @@ -1956,9 +2122,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if (bounds != null) sym.info = bounds } b - case t: UnApply if t.symbol.is(Inline) => - assert(!t.symbol.is(Transparent)) - cpy.UnApply(t)(fun = Inlines.inlinedUnapplyFun(t.fun)) // TODO inline these in the inlining phase (see #19382) case t => t } } @@ -2134,7 +2297,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else res def typedSeqLiteral(tree: untpd.SeqLiteral, pt: Type)(using Context): SeqLiteral = { - val elemProto = pt.stripNull.elemType match { + val elemProto = pt.stripNull().elemType match { case NoType => WildcardType case bounds: TypeBounds => WildcardType(bounds) case elemtp => elemtp @@ -2201,6 +2364,24 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer tree.tpFun(tsyms, vsyms) completeTypeTree(InferredTypeTree(), tp, tree) + def typedContextBoundTypeTree(tree: untpd.ContextBoundTypeTree)(using Context): Tree = + val tycon = typedType(tree.tycon) + def spliced(tree: Tree) = untpd.TypedSplice(tree) + val tparam = untpd.Ident(tree.paramName).withSpan(tree.span) + if tycon.tpe.typeParams.nonEmpty then + val tycon0 = tycon.withType(tycon.tpe.etaCollapse) + typed(untpd.AppliedTypeTree(spliced(tycon0), tparam :: Nil)) + else if Feature.enabled(modularity) && tycon.tpe.member(tpnme.Self).symbol.isAbstractOrParamType then + val tparamSplice = untpd.TypedSplice(typedExpr(tparam)) + typed(untpd.RefinedTypeTree(spliced(tycon), List(untpd.TypeDef(tpnme.Self, tparamSplice)))) + else + def selfNote = + if Feature.enabled(modularity) then + " and\ndoes not have an abstract type member named `Self` either" + else "" + errorTree(tree, + em"Illegal context bound: ${tycon.tpe} does not take type parameters$selfNote.") + def typedSingletonTypeTree(tree: untpd.SingletonTypeTree)(using Context): SingletonTypeTree = { val ref1 = typedExpr(tree.ref, SingletonTypeProto) checkStable(ref1.tpe, tree.srcPos, "singleton type") @@ -2208,12 +2389,11 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } def typedRefinedTypeTree(tree: untpd.RefinedTypeTree)(using Context): TypTree = { - val tpt1 = if (tree.tpt.isEmpty) TypeTree(defn.ObjectType) else typedAheadType(tree.tpt) + val tpt1 = if tree.tpt == EmptyTree then TypeTree(defn.ObjectType) else typedAheadType(tree.tpt) val refineClsDef = desugar.refinedTypeToClass(tpt1, tree.refinements).withSpan(tree.span) val refineCls = createSymbol(refineClsDef).asClass val TypeDef(_, impl: Template) = typed(refineClsDef): @unchecked val refinements1 = impl.body - assert(tree.refinements.hasSameLengthAs(refinements1), i"${tree.refinements}%, % > $refinements1%, %") val seen = mutable.Set[Symbol]() for (refinement <- refinements1) { // TODO: get clarity whether we want to enforce these conditions typr.println(s"adding refinement $refinement") @@ -2389,15 +2569,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer report.error(MatchTypeScrutineeCannotBeHigherKinded(sel1Tpe), sel1.srcPos) val pt1 = if (bound1.isEmpty) pt else bound1.tpe val cases1 = tree.cases.mapconserve(typedTypeCase(_, sel1Tpe, pt1)) - val bound2 = if tree.bound.isEmpty then - val lub = cases1.foldLeft(defn.NothingType: Type): (acc, case1) => - if !acc.exists then NoType - else if case1.body.tpe.isProvisional then NoType - else acc | case1.body.tpe - if lub.exists then TypeTree(lub, inferred = true) - else bound1 - else bound1 - assignType(cpy.MatchTypeTree(tree)(bound2, sel1, cases1), bound2, sel1, cases1) + assignType(cpy.MatchTypeTree(tree)(bound1, sel1, cases1), bound1, sel1, cases1) } def typedByNameTypeTree(tree: untpd.ByNameTypeTree)(using Context): ByNameTypeTree = tree.result match @@ -2433,7 +2605,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer var name = tree.name if (name == nme.WILDCARD && tree.mods.is(Given)) { val Typed(_, tpt) = tree.body: @unchecked - name = desugar.inventGivenOrExtensionName(tpt) + name = desugar.inventGivenName(tpt) } if (name == nme.WILDCARD) body1 else { @@ -2446,7 +2618,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer body1.isInstanceOf[RefTree] && !isWildcardArg(body1) || body1.isInstanceOf[Literal] val symTp = - if isStableIdentifierOrLiteral then pt + if isStableIdentifierOrLiteral || pt.isNamedTupleType then pt + // need to combine tuple element types with expected named type else if isWildcardStarArg(body1) || pt == defn.ImplicitScrutineeTypeRef || body1.tpe <:< pt // There is some strange interaction with gadt matching. @@ -2552,33 +2725,48 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if filters == List(MessageFilter.None) then sup.markUsed() ctx.run.nn.suppressions.addSuppression(sup) + /** Run `typed` on `rhs` except if `rhs` is the right hand side of a deferred given, + * in which case the empty tree is returned. + */ + private inline def excludeDeferredGiven( + rhs: untpd.Tree, sym: Symbol)( + inline typed: untpd.Tree => Tree)(using Context): Tree = + rhs match + case rhs: RefTree + if rhs.name == nme.deferred && sym.isAllOf(DeferredGivenFlags, butNot = Param) => + EmptyTree + case _ => + typed(rhs) + def typedValDef(vdef: untpd.ValDef, sym: Symbol)(using Context): Tree = { val ValDef(name, tpt, _) = vdef checkNonRootName(vdef.name, vdef.nameSpan) completeAnnotations(vdef, sym) - if (sym.isOneOf(GivenOrImplicit)) checkImplicitConversionDefOK(sym) + if sym.is(Implicit) then checkImplicitConversionDefOK(sym) if sym.is(Module) then checkNoModuleClash(sym) val tpt1 = checkSimpleKinded(typedType(tpt)) - val rhs1 = vdef.rhs match { - case rhs @ Ident(nme.WILDCARD) => rhs withType tpt1.tpe - case rhs => typedExpr(rhs, tpt1.tpe.widenExpr) - } + val rhs1 = vdef.rhs match + case rhs @ Ident(nme.WILDCARD) => + rhs.withType(tpt1.tpe) + case rhs => + excludeDeferredGiven(rhs, sym): + typedExpr(_, tpt1.tpe.widenExpr) val vdef1 = assignType(cpy.ValDef(vdef)(name, tpt1, rhs1), sym) postProcessInfo(vdef1, sym) vdef1.setDefTree } - def typedDefDef(ddef: untpd.DefDef, sym: Symbol)(using Context): Tree = { - def canBeInvalidated(sym: Symbol): Boolean = + private def retractDefDef(sym: Symbol)(using Context): Tree = + // it's a discarded method (synthetic case class method or synthetic java record constructor or overridden member), drop it + val canBeInvalidated: Boolean = sym.is(Synthetic) && (desugar.isRetractableCaseClassMethodName(sym.name) || (sym.owner.is(JavaDefined) && sym.owner.derivesFrom(defn.JavaRecordClass) && sym.is(Method))) + assert(canBeInvalidated) + sym.owner.info.decls.openForMutations.unlink(sym) + EmptyTree - if !sym.info.exists then - // it's a discarded method (synthetic case class method or synthetic java record constructor or overriden member), drop it - assert(canBeInvalidated(sym)) - sym.owner.info.decls.openForMutations.unlink(sym) - return EmptyTree + def typedDefDef(ddef: untpd.DefDef, sym: Symbol)(using Context): Tree = if !sym.info.exists then retractDefDef(sym) else { // TODO: - Remove this when `scala.language.experimental.erasedDefinitions` is no longer experimental. // - Modify signature to `erased def erasedValue[T]: T` @@ -2622,9 +2810,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if sym.isInlineMethod then rhsCtx.addMode(Mode.InlineableBody) if sym.is(ExtensionMethod) then rhsCtx.addMode(Mode.InExtensionMethod) - val rhs1 = PrepareInlineable.dropInlineIfError(sym, - if sym.isScala2Macro then typedScala2MacroBody(ddef.rhs)(using rhsCtx) - else typedExpr(ddef.rhs, tpt1.tpe.widenExpr)(using rhsCtx)) + val rhs1 = excludeDeferredGiven(ddef.rhs, sym): rhs => + PrepareInlineable.dropInlineIfError(sym, + if sym.isScala2Macro then typedScala2MacroBody(rhs)(using rhsCtx) + else typedExpr(rhs, tpt1.tpe.widenExpr)(using rhsCtx)) if sym.isInlineMethod then if StagingLevel.level > 0 then @@ -2697,8 +2886,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer typeIndexedLambdaTypeTree(rhs, tparams, body) case rhs => typedType(rhs) - checkFullyAppliedType(rhs1) - if sym.isOpaqueAlias then checkNoContextFunctionType(rhs1) + if sym.isOpaqueAlias then + checkFullyAppliedType(rhs1, "Opaque type alias must be fully applied, but ") + checkNoContextFunctionType(rhs1) assignType(cpy.TypeDef(tdef)(name, rhs1), sym) } @@ -2791,6 +2981,72 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } } + /** Add all parent refinement symbols as declarations to this class */ + def addParentRefinements(body: List[Tree])(using Context): List[Tree] = + cdef.getAttachment(ParentRefinements) match + case Some(refinedSyms) => + val refinements = refinedSyms.map: sym => + ( if sym.isType then TypeDef(sym.asType) + else if sym.is(Method) then DefDef(sym.asTerm) + else ValDef(sym.asTerm) + ).withSpan(impl.span.startPos) + body ++ refinements + case None => + body + + /** Implement givens that were declared with a `deferred` rhs. + * The a given value matching the declared type is searched in a + * context directly enclosing the current class, in which all given + * parameters of the current class are also defined. + */ + def implementDeferredGivens(body: List[Tree]): List[Tree] = + if cls.is(Trait) || ctx.isAfterTyper then body + else + def isGivenValue(mbr: TermRef) = + val dcl = mbr.symbol + if dcl.is(Method) then + report.error( + em"""Cannnot infer the implementation of the deferred ${dcl.showLocated} + |since that given is parameterized. An implementing given needs to be written explicitly.""", + cdef.srcPos) + false + else true + + def givenImpl(mbr: TermRef): ValDef = + val dcl = mbr.symbol + val target = dcl.info.asSeenFrom(cls.thisType, dcl.owner) + val constr = cls.primaryConstructor + val usingParamAccessors = cls.paramAccessors.filter(_.is(Given)) + val paramScope = newScopeWith(usingParamAccessors*) + val searchCtx = ctx.outer.fresh.setScope(paramScope) + val rhs = implicitArgTree(target, cdef.span, + where = i"inferring the implementation of the deferred ${dcl.showLocated}" + )(using searchCtx) + + val impl = dcl.copy(cls, + flags = dcl.flags &~ (HasDefault | Deferred) | Final | Override, + info = target, + coord = rhs.span).entered.asTerm + + def anchorParams = new TreeMap: + override def transform(tree: Tree)(using Context): Tree = tree match + case id: Ident if usingParamAccessors.contains(id.symbol) => + cpy.Select(id)(This(cls), id.name) + case _ => + super.transform(tree) + ValDef(impl, anchorParams.transform(rhs)).withSpan(impl.span.endPos) + end givenImpl + + val givenImpls = + cls.thisType.implicitMembers + //.showing(i"impl def givens for $cls/$result") + .filter(_.symbol.isAllOf(DeferredGivenFlags, butNot = Param)) + //.showing(i"impl def filtered givens for $cls/$result") + .filter(isGivenValue) + .map(givenImpl) + body ++ givenImpls + end implementDeferredGivens + ensureCorrectSuperClass() completeAnnotations(cdef, cls) val constr1 = typed(constr).asInstanceOf[DefDef] @@ -2811,7 +3067,11 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer cdef.withType(UnspecifiedErrorType) else { val dummy = localDummy(cls, impl) - val body1 = addAccessorDefs(cls, typedStats(impl.body, dummy)(using ctx.inClassContext(self1.symbol))._1) + val body1 = + implementDeferredGivens( + addParentRefinements( + addAccessorDefs(cls, + typedStats(impl.body, dummy)(using ctx.inClassContext(self1.symbol))._1))) checkNoDoubleDeclaration(cls) val impl1 = cpy.Template(impl)(constr1, parents1, Nil, self1, body1) @@ -3006,7 +3266,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer /** Translate infix operation expression `l op r` to * - * l.op(r) if `op` is left-associative + * l.op(r) if `op` is left-associative * { val x = l; r.op(x) } if `op` is right-associative call-by-value and `l` is impure, and not in a quote pattern * r.op(l) if `op` is right-associative call-by-name, or `l` is pure, or in a quote pattern * @@ -3046,37 +3306,32 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } /** Translate tuples of all arities */ - def typedTuple(tree: untpd.Tuple, pt: Type)(using Context): Tree = { - val arity = tree.trees.length - if (arity <= Definitions.MaxTupleArity) - typed(desugar.smallTuple(tree).withSpan(tree.span), pt) - else { - val pts = - pt.tupleElementTypes match - case Some(types) if types.size == arity => types - case _ => List.fill(arity)(defn.AnyType) - val elems = tree.trees.lazyZip(pts).map( + def typedTuple(tree: untpd.Tuple, pt: Type)(using Context): Tree = + val tree1 = desugar.tuple(tree, pt) + if tree1 ne tree then typed(tree1, pt) + else + val arity = tree.trees.length + val pts = pt.stripNamedTuple.tupleElementTypes match + case Some(types) if types.size == arity => types + case _ => List.fill(arity)(defn.AnyType) + val elems = tree.trees.lazyZip(pts).map: if ctx.mode.is(Mode.Type) then typedType(_, _, mapPatternBounds = true) - else typed(_, _)) - if (ctx.mode.is(Mode.Type)) + else typed(_, _) + if ctx.mode.is(Mode.Type) then elems.foldRight(TypeTree(defn.EmptyTupleModule.termRef): Tree)((elemTpt, elemTpts) => AppliedTypeTree(TypeTree(defn.PairClass.typeRef), List(elemTpt, elemTpts))) .withSpan(tree.span) - else { + else val tupleXXLobj = untpd.ref(defn.TupleXXLModule.termRef) val app = untpd.cpy.Apply(tree)(tupleXXLobj, elems.map(untpd.TypedSplice(_))) .withSpan(tree.span) val app1 = typed(app, if ctx.mode.is(Mode.Pattern) then pt else defn.TupleXXLClass.typeRef) - if (ctx.mode.is(Mode.Pattern)) app1 - else { - val elemTpes = elems.lazyZip(pts).map((elem, pt) => - TypeComparer.widenInferred(elem.tpe, pt, widenUnions = true)) + if ctx.mode.is(Mode.Pattern) then app1 + else + val elemTpes = elems.lazyZip(pts).map: (elem, pt) => + TypeComparer.widenInferred(elem.tpe, pt, Widen.Unions) val resTpe = TypeOps.nestedPairs(elemTpes) app1.cast(resTpe) - } - } - } - } /** Retrieve symbol attached to given tree */ protected def retrieveSym(tree: untpd.Tree)(using Context): Symbol = tree.removeAttachment(SymOfTree) match { @@ -3174,6 +3429,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case tree: untpd.UnApply => typedUnApply(tree, pt) case tree: untpd.Tuple => typedTuple(tree, pt) case tree: untpd.InLambdaTypeTree => typedInLambdaTypeTree(tree, pt) + case tree: untpd.ContextBoundTypeTree => typedContextBoundTypeTree(tree) case tree: untpd.InfixOp => typedInfixOp(tree, pt) case tree: untpd.ParsedTry => typedTry(tree, pt) case tree @ untpd.PostfixOp(qual, Ident(nme.WILDCARD)) => typedAsFunction(tree, pt) @@ -3271,7 +3527,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val paramTypes = { val hasWildcard = formals.exists(_.existsPart(_.isInstanceOf[WildcardType], StopAt.Static)) if hasWildcard then formals.map(_ => untpd.TypeTree()) - else formals.map(formal => untpd.TypeTree(formal.loBound)) // about loBound, see tests/pos/i18649.scala + else formals.map(formal => untpd.InferredTypeTree(formal.loBound)) // about loBound, see tests/pos/i18649.scala } val erasedParams = pt match { @@ -3432,31 +3688,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def typedPattern(tree: untpd.Tree, selType: Type = WildcardType)(using Context): Tree = withMode(Mode.Pattern)(typed(tree, selType)) - def tryEither[T](op: Context ?=> T)(fallBack: (T, TyperState) => T)(using Context): T = { - val nestedCtx = ctx.fresh.setNewTyperState() - val result = op(using nestedCtx) - if (nestedCtx.reporter.hasErrors && !nestedCtx.reporter.hasStickyErrors) { - record("tryEither.fallBack") - fallBack(result, nestedCtx.typerState) - } - else { - record("tryEither.commit") - nestedCtx.typerState.commit() - result - } - } - - /** Try `op1`, if there are errors, try `op2`, if `op2` also causes errors, fall back - * to errors and result of `op1`. - */ - def tryAlternatively[T](op1: Context ?=> T)(op2: Context ?=> T)(using Context): T = - tryEither(op1) { (failedVal, failedState) => - tryEither(op2) { (_, _) => - failedState.commit() - failedVal - } - } - /** Is `pt` a prototype of an `apply` selection, or a parameterless function yielding one? */ def isApplyProto(pt: Type)(using Context): Boolean = pt.revealIgnored match { case pt: SelectionProto => pt.name == nme.apply @@ -3707,7 +3938,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer private def adapt1(tree: Tree, pt: Type, locked: TypeVars)(using Context): Tree = { assert(pt.exists && !pt.isInstanceOf[ExprType] || ctx.reporter.errorsReported, i"tree: $tree, pt: $pt") - def methodStr = err.refStr(methPart(tree).tpe) def readapt(tree: Tree)(using Context) = adapt(tree, pt, locked) def readaptSimplified(tree: Tree)(using Context) = readapt(simplify(tree, pt, locked)) @@ -3821,7 +4051,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def dummyArg(tp: Type) = untpd.Ident(nme.???).withTypeUnchecked(tp) - def addImplicitArgs(using Context) = { + val origCtx = ctx + + def addImplicitArgs(using Context) = def hasDefaultParams = methPart(tree).symbol.hasDefaultParams def implicitArgs(formals: List[Type], argIndex: Int, pt: Type): List[Tree] = formals match case Nil => Nil @@ -3872,49 +4104,44 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer arg :: inferArgsAfter(arg) end implicitArgs - val args = implicitArgs(wtp.paramInfos, 0, pt) - - def propagatedFailure(args: List[Tree]): Type = args match { - case arg :: args1 => - arg.tpe match { - case ambi: AmbiguousImplicits => - propagatedFailure(args1) match { - case NoType | (_: AmbiguousImplicits) => ambi - case failed => failed - } - case failed: SearchFailureType => failed - case _ => propagatedFailure(args1) - } - case Nil => NoType - } - - val propFail = propagatedFailure(args) - - def issueErrors(): Tree = { - def paramSymWithMethodTree(paramName: TermName) = - if tree.symbol.exists then - tree.symbol.paramSymss.flatten - .map(sym => sym.name -> sym) - .toMap - .get(paramName) - .map((_, tree)) - else - None + /** Reports errors for arguments of `appTree` that have a + * `SearchFailureType`. + */ + def issueErrors(fun: Tree, args: List[Tree]): Tree = + // Prefer other errors over ambiguities. If nested in outer searches a missing + // implicit can be healed by simply dropping this alternative and trying something + // else. But an ambiguity is sticky and propagates outwards. If we have both + // a missing implicit on one argument and an ambiguity on another the whole + // branch should be classified as a missing implicit. + val firstNonAmbiguous = args.tpes.find(tp => tp.isError && !tp.isInstanceOf[AmbiguousImplicits]) + def firstError = args.tpes.find(_.isInstanceOf[SearchFailureType]).getOrElse(NoType) + def firstFailure = firstNonAmbiguous.getOrElse(firstError) + val errorType = + firstFailure match + case tp: AmbiguousImplicits => + AmbiguousImplicits(tp.alt1, tp.alt2, tp.expectedType, tp.argument, nested = true) + case tp => + tp + val res = untpd.Apply(fun, args).withType(errorType) wtp.paramNames.lazyZip(wtp.paramInfos).lazyZip(args).foreach { (paramName, formal, arg) => - arg.tpe match { + arg.tpe match case failure: SearchFailureType => + val methodStr = err.refStr(methPart(fun).tpe) + val paramStr = implicitParamString(paramName, methodStr, fun) + val paramSym = fun.symbol.paramSymss.flatten.find(_.name == paramName) + val paramSymWithMethodCallTree = paramSym.map((_, res)) report.error( - missingArgMsg(arg, formal, implicitParamString(paramName, methodStr, tree), paramSymWithMethodTree(paramName)), - tree.srcPos.endPos - ) + missingArgMsg(arg, formal, paramStr, paramSymWithMethodCallTree), + tree.srcPos.endPos + ) case _ => - } } - untpd.Apply(tree, args).withType(propFail) - } - if (propFail.exists) { + res + + val args = implicitArgs(wtp.paramInfos, 0, pt) + if (args.tpes.exists(_.isInstanceOf[SearchFailureType])) { // If there are several arguments, some arguments might already // have influenced the context, binding variables, but later ones // might fail. In that case the constraint and instantiated variables @@ -3923,28 +4150,46 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // If method has default params, fall back to regular application // where all inferred implicits are passed as named args. - if hasDefaultParams && !propFail.isInstanceOf[AmbiguousImplicits] then - val namedArgs = wtp.paramNames.lazyZip(args).flatMap { (pname, arg) => - if (arg.tpe.isError) Nil else untpd.NamedArg(pname, untpd.TypedSplice(arg)) :: Nil - } + if hasDefaultParams then + // Only keep the arguments that don't have an error type, or that + // have an `AmbiguousImplicits` error type. The later ensures that a + // default argument can't override an ambiguous implicit. See tests + // `given-ambiguous-default*` and `19414*`. + val namedArgs = + wtp.paramNames.lazyZip(args) + .filter((_, arg) => !arg.tpe.isError || arg.tpe.isInstanceOf[AmbiguousImplicits]) + .map((pname, arg) => untpd.NamedArg(pname, untpd.TypedSplice(arg))) + val app = cpy.Apply(tree)(untpd.TypedSplice(tree), namedArgs) val needsUsing = wtp.isContextualMethod || wtp.match case MethodType(ContextBoundParamName(_) :: _) => sourceVersion.isAtLeast(`3.4`) case _ => false if needsUsing then app.setApplyKind(ApplyKind.Using) typr.println(i"try with default implicit args $app") - typed(app, pt, locked) - else issueErrors() + val retyped = typed(app, pt, locked) + + // If the retyped tree still has an error type and is an `Apply` + // node, we can report the errors for each argument nicely. + // Otherwise, we don't report anything here. + retyped match + case Apply(tree, args) if retyped.tpe.isError => issueErrors(tree, args) + case _ => retyped + else issueErrors(tree, args) } - else tree match { - case tree: Block => - readaptSimplified(tpd.Block(tree.stats, tpd.Apply(tree.expr, args))) - case tree: NamedArg => - readaptSimplified(tpd.NamedArg(tree.name, tpd.Apply(tree.arg, args))) - case _ => - readaptSimplified(tpd.Apply(tree, args)) - } - } + else + inContext(origCtx): + // Reset context in case it was set to a supercall context before. + // otherwise the invariant for taking another this or super call context is not met. + // Test case is i20483.scala + tree match + case tree: Block => + readaptSimplified(tpd.Block(tree.stats, tpd.Apply(tree.expr, args))) + case tree: NamedArg => + readaptSimplified(tpd.NamedArg(tree.name, tpd.Apply(tree.arg, args))) + case _ => + readaptSimplified(tpd.Apply(tree, args)) + end addImplicitArgs + pt.revealIgnored match { case pt: FunProto if pt.applyKind == ApplyKind.Using => // We can end up here if extension methods are called with explicit given arguments. @@ -4047,10 +4292,15 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer true } + def isRetainsArg(pt: Type) = pt match + case AnnotatedType(arg, annot) => annot.symbol == defn.RetainsArgAnnot + case _ => false + if (implicitFun || caseCompanion) && !isApplyProto(pt) && pt != SingletonTypeProto && pt != LhsProto + && !isRetainsArg(pt) && !ctx.mode.is(Mode.Pattern) && !tree.isInstanceOf[SplicePattern] && !ctx.isAfterTyper @@ -4200,7 +4450,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def adaptToSubType(wtp: Type): Tree = // try converting a constant to the target type - ConstFold(tree).tpe.widenTermRefExpr.normalized match + tree.tpe.widenTermRefExpr.normalized match case ConstantType(x) => val converted = x.convertTo(pt) if converted != null && (converted ne x) then @@ -4270,7 +4520,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case _: SelectionProto => tree // adaptations for selections are handled in typedSelect case _ if ctx.mode.is(Mode.ImplicitsEnabled) && tree.tpe.isValueType => - if pt.isRef(defn.AnyValClass, skipRefined = false) + if tree.tpe.widen.isNamedTupleType && pt.derivesFrom(defn.TupleClass) then + readapt(typed(untpd.Select(untpd.TypedSplice(tree), nme.toTuple))) + else if pt.isRef(defn.AnyValClass, skipRefined = false) || pt.isRef(defn.ObjectClass, skipRefined = false) then recover(TooUnspecific(pt)) @@ -4322,6 +4574,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * tree that went unreported. A scenario where this happens is i1802.scala. */ def ensureReported(tp: Type) = tp match { + case err: PreviousErrorType if ctx.usedBestEffortTasty => // do nothing if error was already reported in previous compilation case err: ErrorType if !ctx.reporter.errorsReported => report.error(err.msg, tree.srcPos) case _ => } @@ -4342,7 +4595,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer cpy.Ident(qual)(qual.symbol.name.sourceModuleName.toTypeName) case _ => errorTree(tree, em"cannot convert from $tree to an instance creation expression") - val tycon = ctorResultType.underlyingClassRef(refinementOK = false) + val tycon = ctorResultType.underlyingClassRef(refinementOK = Feature.enabled(modularity)) typed( untpd.Select( untpd.New(untpd.TypedSplice(tpt.withType(tycon))), @@ -4381,7 +4634,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case poly: PolyType if !(ctx.mode is Mode.Type) && dummyTreeOfType.unapply(tree).isEmpty => // If we are in a conversion from a TermRef with polymorphic underlying - // type, give up. In this case the typed `null` literal cannot be instantiated. + // type, give up. In this case the typed `null` literal cannot be instantiated. // Test case was but i18695.scala, but it got fixed by a different tweak in #18719. // We leave test for this condition in as a defensive measure in case // it arises somewhere else. diff --git a/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala b/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala index f7afc7a7e0a7..0c63f5b4ecb1 100644 --- a/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala +++ b/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala @@ -59,7 +59,7 @@ class TyperPhase(addRootImports: Boolean = true) extends Phase { } protected def discardAfterTyper(unit: CompilationUnit)(using Context): Boolean = - (unit.isJava && !ctx.settings.YjavaTasty.value) || unit.suspended + (unit.isJava && !ctx.settings.XjavaTasty.value) || unit.suspended override val subPhases: List[SubPhase] = List( SubPhase("indexing"), SubPhase("typechecking"), SubPhase("checkingJava")) diff --git a/compiler/src/dotty/tools/dotc/util/EnumFlags.scala b/compiler/src/dotty/tools/dotc/util/EnumFlags.scala new file mode 100644 index 000000000000..a833af7632de --- /dev/null +++ b/compiler/src/dotty/tools/dotc/util/EnumFlags.scala @@ -0,0 +1,14 @@ +package dotty.tools.dotc.util + +object EnumFlags: + + opaque type FlagSet[E <: reflect.Enum] = Int + + object FlagSet: + + extension [E <: reflect.Enum](set: FlagSet[E]) + def is(flag: E): Boolean = (set & (1 << flag.ordinal)) != 0 + def |(flag: E): FlagSet[E] = (set | (1 << flag.ordinal)) + + def empty[E <: reflect.Enum]: FlagSet[E] = + 0 diff --git a/compiler/src/dotty/tools/dotc/util/Signatures.scala b/compiler/src/dotty/tools/dotc/util/Signatures.scala index 0bd407261125..3f7d7dd39531 100644 --- a/compiler/src/dotty/tools/dotc/util/Signatures.scala +++ b/compiler/src/dotty/tools/dotc/util/Signatures.scala @@ -196,7 +196,8 @@ object Signatures { fun: tpd.Tree, isTypeApply: Boolean = false )(using Context): (Int, Int, List[Signature]) = - def treeQualifier(tree: tpd.Tree): tpd.Tree = tree match + def treeQualifier(tree: tpd.Tree): tpd.Tree = + tree match case Apply(qual, _) => treeQualifier(qual) case TypeApply(qual, _) => treeQualifier(qual) case AppliedTypeTree(qual, _) => treeQualifier(qual) @@ -247,7 +248,9 @@ object Signatures { val alternativeSignatures = alternativesWithTypes .flatMap(toApplySignature(_, findOutermostCurriedApply(untpdPath), safeParamssListIndex)) - val finalParamIndex = currentParamsIndex + previousArgs + val finalParamIndex = + if currentParamsIndex == -1 then -1 + else previousArgs + currentParamsIndex (finalParamIndex, alternativeIndex, alternativeSignatures) else (0, 0, Nil) @@ -495,8 +498,8 @@ object Signatures { case res => List(tpe) def isSyntheticEvidence(name: String) = - if !name.startsWith(NameKinds.ContextBoundParamName.separator) then false else - symbol.paramSymss.flatten.find(_.name.show == name).exists(_.flags.is(Flags.Implicit)) + name.startsWith(NameKinds.ContextBoundParamName.separator) + && symbol.paramSymss.flatten.find(_.name.show == name).exists(_.flags.is(Flags.Implicit)) def toTypeParam(tpe: PolyType): List[Param] = val evidenceParams = (tpe.paramNamess.flatten zip tpe.paramInfoss.flatten).flatMap: diff --git a/compiler/src/dotty/tools/io/AbstractFile.scala b/compiler/src/dotty/tools/io/AbstractFile.scala index 243dc2953d2e..233b1ca8fb62 100644 --- a/compiler/src/dotty/tools/io/AbstractFile.scala +++ b/compiler/src/dotty/tools/io/AbstractFile.scala @@ -97,11 +97,16 @@ abstract class AbstractFile extends Iterable[AbstractFile] { /** Returns the path of this abstract file in a canonical form. */ def canonicalPath: String = if (jpath == null) path else jpath.normalize.toString - /** Checks extension case insensitively. TODO: change to enum */ - def hasExtension(other: String): Boolean = extension == other.toLowerCase + /** Checks extension case insensitively. */ + @deprecated("prefer queries on ext") + def hasExtension(other: String): Boolean = ext.toLowerCase.equalsIgnoreCase(other) - /** Returns the extension of this abstract file. TODO: store as an enum to avoid costly comparisons */ - val extension: String = Path.extension(name) + /** Returns the extension of this abstract file. */ + val ext: FileExtension = Path.fileExtension(name) + + /** Returns the extension of this abstract file as a String. */ + @deprecated("use ext instead.") + def extension: String = ext.toLowerCase /** The absolute file, if this is a relative file. */ def absolute: AbstractFile @@ -129,7 +134,7 @@ abstract class AbstractFile extends Iterable[AbstractFile] { } /** Does this abstract file represent something which can contain classfiles? */ - def isClassContainer: Boolean = isDirectory || (jpath != null && (extension == "jar" || extension == "zip")) + def isClassContainer: Boolean = isDirectory || (jpath != null && ext.isJarOrZip) /** Create a file on disk, if one does not exist already. */ def create(): Unit @@ -258,8 +263,8 @@ abstract class AbstractFile extends Iterable[AbstractFile] { final def resolveSibling(name: String): AbstractFile | Null = container.lookupName(name, directory = false) - final def resolveSiblingWithExtension(extension: String): AbstractFile | Null = - resolveSibling(name.stripSuffix(this.extension) + extension) + final def resolveSiblingWithExtension(extension: FileExtension): AbstractFile | Null = + resolveSibling(Path.fileName(name) + "." + extension) private def fileOrSubdirectoryNamed(name: String, isDir: Boolean): AbstractFile = lookupName(name, isDir) match { diff --git a/compiler/src/dotty/tools/io/ClassPath.scala b/compiler/src/dotty/tools/io/ClassPath.scala index 5344e2cf7e35..01a3f2cc1870 100644 --- a/compiler/src/dotty/tools/io/ClassPath.scala +++ b/compiler/src/dotty/tools/io/ClassPath.scala @@ -56,32 +56,8 @@ trait ClassPath { private[dotty] def list(inPackage: PackageName): ClassPathEntries /** - * Returns the class file and / or source file for a given external name, e.g., "java.lang.String". - * If there is both a class file and source file, the compiler can decide whether to read the - * class file or compile the source file. - * - * Internally this seems to be used only by `ScriptRunner`, but only to call `.isDefined`. That - * could probably be implemented differently. - * - * Externally, it is used by sbt's compiler interface: - * https://github.com/sbt/sbt/blob/v0.13.15/compile/interface/src/main/scala/xsbt/CompilerInterface.scala#L249 - * Jason has some improvements for that in the works (https://github.com/scala/bug/issues/10289#issuecomment-310022699) - */ - def findClass(className: String): Option[ClassRepresentation] = { - // A default implementation which should be overridden, if we can create the more efficient - // solution for a given type of ClassPath - val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) - - val packageName = PackageName(pkg) - val foundClassFromClassFiles = classes(packageName).find(_.name == simpleClassName) - def findClassInSources = sources(packageName).find(_.name == simpleClassName) - - foundClassFromClassFiles orElse findClassInSources - } - - /** - * Returns the classfile for an external name, e.g., "java.lang.String". This method does not - * return source files. + * Returns *only* the classfile for an external name, e.g., "java.lang.String". This method does not + * return source files, tasty files,. * * This method is used by the classfile parser. When parsing a Java class, its own inner classes * are entered with a `ClassfileLoader` that parses the classfile returned by this method. @@ -176,13 +152,18 @@ object ClassPath { val baseDir = file.parent new Jar(file).classPathElements map (elem => - specToURL(elem) getOrElse (baseDir / elem).toURL + specToURL(elem, baseDir) getOrElse (baseDir / elem).toURL ) } - def specToURL(spec: String): Option[URL] = - try Some(new URI(spec).toURL) - catch case _: MalformedURLException | _: URISyntaxException => None + def specToURL(spec: String, basedir: Directory): Option[URL] = + try + val uri = new URI(spec) + if uri.isAbsolute() then Some(uri.toURL()) + else + Some(basedir.resolve(Path(spec)).toURL) + catch + case _: MalformedURLException | _: URISyntaxException => None def manifests: List[java.net.URL] = { import scala.jdk.CollectionConverters.EnumerationHasAsScala diff --git a/compiler/src/dotty/tools/io/File.scala b/compiler/src/dotty/tools/io/File.scala index 22a0e04b2b48..59e4a2ee451b 100644 --- a/compiler/src/dotty/tools/io/File.scala +++ b/compiler/src/dotty/tools/io/File.scala @@ -39,8 +39,6 @@ object File { */ class File(jpath: JPath)(implicit constructorCodec: Codec) extends Path(jpath) with Streamable.Chars { override val creationCodec: io.Codec = constructorCodec - - override def addExtension(ext: String): File = super.addExtension(ext).toFile override def toAbsolute: File = if (isAbsolute) this else super.toAbsolute.toFile override def toDirectory: Directory = new Directory(jpath) override def toFile: File = this diff --git a/compiler/src/dotty/tools/io/FileExtension.scala b/compiler/src/dotty/tools/io/FileExtension.scala new file mode 100644 index 000000000000..3aeef5b902ce --- /dev/null +++ b/compiler/src/dotty/tools/io/FileExtension.scala @@ -0,0 +1,84 @@ +package dotty.tools.io + +import dotty.tools.uncheckedNN +import dotty.tools.dotc.util.EnumFlags.FlagSet + +enum FileExtension(val toLowerCase: String): + case Tasty extends FileExtension("tasty") + case Betasty extends FileExtension("betasty") + case Class extends FileExtension("class") + case Jar extends FileExtension("jar") + case Scala extends FileExtension("scala") + case ScalaScript extends FileExtension("sc") + case Java extends FileExtension("java") + case Zip extends FileExtension("zip") + case Inc extends FileExtension("inc") + case Empty extends FileExtension("") + + /** Fallback extension */ + case External(override val toLowerCase: String) extends FileExtension(toLowerCase) + + /** represents an empty file extension. */ + def isEmpty: Boolean = this == Empty + + override def toString: String = toLowerCase + + /** represents `".tasty"` */ + def isTasty = this == Tasty + /** represents `".betasty"` */ + def isBetasty = this == Betasty + /** represents `".class"` */ + def isClass = this == Class + /** represents `".scala"` */ + def isScala = this == Scala + /** represents `".sc"` */ + def isScalaScript = this == ScalaScript + /** represents `".java"` */ + def isJava = this == Java + /** represents `".jar"` */ + def isJar: Boolean = this == Jar + /** represents `".zip"` */ + def isZip: Boolean = this == Zip + /** represents `".jar"` or `".zip"` */ + def isJarOrZip: Boolean = FileExtension.JarOrZip.is(this) + /** represents `".scala"` or `".java"` */ + def isScalaOrJava: Boolean = FileExtension.ScalaOrJava.is(this) + /** represents `".java"` or `.tasty` */ + def isJavaOrTasty: Boolean = FileExtension.JavaOrTasty.is(this) + +object FileExtension: + + private val JarOrZip: FlagSet[FileExtension] = FlagSet.empty | Zip | Jar + private val ScalaOrJava: FlagSet[FileExtension] = FlagSet.empty | Scala | Java + private val JavaOrTasty: FlagSet[FileExtension] = FlagSet.empty | Java | Tasty + + // this will be optimised to a single hashcode + equality check, and then fallback to slowLookup, + // keep in sync with slowLookup. + private def initialLookup(s: String): FileExtension = s match + case "tasty" => Tasty + case "class" => Class + case "jar" => Jar + case "scala" => Scala + case "sc" => ScalaScript + case "java" => Java + case "zip" => Zip + case "inc" => Inc + case "betasty" => Betasty + case _ => slowLookup(s) + + // slower than initialLookup, keep in sync with initialLookup + private def slowLookup(s: String): FileExtension = + if s.equalsIgnoreCase("tasty") then Tasty + else if s.equalsIgnoreCase("class") then Class + else if s.equalsIgnoreCase("jar") then Jar + else if s.equalsIgnoreCase("scala") then Scala + else if s.equalsIgnoreCase("sc") then ScalaScript + else if s.equalsIgnoreCase("java") then Java + else if s.equalsIgnoreCase("zip") then Zip + else if s.equalsIgnoreCase("inc") then Inc + else if s.equalsIgnoreCase("betasty") then Betasty + else External(s) + + def from(s: String): FileExtension = + if s.isEmpty then Empty + else initialLookup(s) diff --git a/compiler/src/dotty/tools/io/FileWriters.scala b/compiler/src/dotty/tools/io/FileWriters.scala index 4f03194fa4ce..b6338082c696 100644 --- a/compiler/src/dotty/tools/io/FileWriters.scala +++ b/compiler/src/dotty/tools/io/FileWriters.scala @@ -1,8 +1,7 @@ package dotty.tools.io -import dotty.tools.dotc.core.Contexts.* -import dotty.tools.dotc.core.Decorators.em -import dotty.tools.dotc.report +import scala.language.unsafeNulls + import dotty.tools.io.AbstractFile import dotty.tools.io.JarArchive import dotty.tools.io.PlainFile @@ -25,13 +24,148 @@ import java.util.zip.CRC32 import java.util.zip.Deflater import java.util.zip.ZipEntry import java.util.zip.ZipOutputStream -import scala.language.unsafeNulls +import scala.collection.mutable + +import dotty.tools.dotc.core.Contexts, Contexts.Context +import dotty.tools.dotc.core.Decorators.em + +import dotty.tools.dotc.util.{SourcePosition, NoSourcePosition} + +import dotty.tools.dotc.reporting.Message +import dotty.tools.dotc.report -/** Copied from `dotty.tools.backend.jvm.ClassfileWriters` but no `PostProcessorFrontendAccess` needed */ +import dotty.tools.backend.jvm.PostProcessorFrontendAccess.BackendReporting +import scala.annotation.constructorOnly +import java.util.concurrent.atomic.AtomicReference +import java.util.concurrent.atomic.AtomicBoolean +import java.util.ConcurrentModificationException + +/** !!!Copied from `dotty.tools.backend.jvm.ClassfileWriters` but no `PostProcessorFrontendAccess` needed. + * this should probably be changed to wrap that class instead. + * + * Until then, any changes to this file should be copied to `dotty.tools.backend.jvm.ClassfileWriters` as well. + */ object FileWriters { type InternalName = String type NullableFile = AbstractFile | Null + inline def ctx(using ReadOnlyContext): ReadOnlyContext = summon[ReadOnlyContext] + + sealed trait DelayedReporter { + def hasErrors: Boolean + def error(message: Context ?=> Message, position: SourcePosition): Unit + def warning(message: Context ?=> Message, position: SourcePosition): Unit + def log(message: String): Unit + + final def toBuffered: Option[BufferingReporter] = this match + case buffered: BufferingReporter => + if buffered.hasReports then Some(buffered) else None + case _: EagerReporter => None + + def error(message: Context ?=> Message): Unit = error(message, NoSourcePosition) + def warning(message: Context ?=> Message): Unit = warning(message, NoSourcePosition) + final def exception(reason: Context ?=> Message, throwable: Throwable): Unit = + error({ + val trace = throwable.getStackTrace().nn.mkString("\n ") + em"An unhandled exception was thrown in the compiler while\n ${reason.message}.\n${throwable}\n $trace" + }, NoSourcePosition) + } + + final class EagerReporter(using captured: Context) extends DelayedReporter: + private var _hasErrors = false + + def hasErrors: Boolean = _hasErrors + + def error(message: Context ?=> Message, position: SourcePosition): Unit = + report.error(message, position) + _hasErrors = true + + def warning(message: Context ?=> Message, position: SourcePosition): Unit = + report.warning(message, position) + + def log(message: String): Unit = report.echo(message) + + final class BufferingReporter extends DelayedReporter { + // We optimise access to the buffered reports for the common case - that there are no warning/errors to report + // We could use a listBuffer etc - but that would be extra allocation in the common case + // buffered logs are updated atomically. + + private val _bufferedReports = AtomicReference(List.empty[Report]) + private val _hasErrors = AtomicBoolean(false) + + enum Report(val relay: Context ?=> BackendReporting => Unit): + case Error(message: Context => Message, position: SourcePosition) extends Report(ctx ?=> _.error(message(ctx), position)) + case Warning(message: Context => Message, position: SourcePosition) extends Report(ctx ?=> _.warning(message(ctx), position)) + case Log(message: String) extends Report(_.log(message)) + + /** Atomically record that an error occurred */ + private def recordError(): Unit = + _hasErrors.set(true) + + /** Atomically add a report to the log */ + private def recordReport(report: Report): Unit = + _bufferedReports.getAndUpdate(report :: _) + + /** atomically extract and clear the buffered reports, must only be called at a synchonization point. */ + private def resetReports(): List[Report] = + val curr = _bufferedReports.get() + if curr.nonEmpty && !_bufferedReports.compareAndSet(curr, Nil) then + throw ConcurrentModificationException("concurrent modification of buffered reports") + else curr + + def hasErrors: Boolean = _hasErrors.get() + def hasReports: Boolean = _bufferedReports.get().nonEmpty + + def error(message: Context ?=> Message, position: SourcePosition): Unit = + recordReport(Report.Error({case given Context => message}, position)) + recordError() + + def warning(message: Context ?=> Message, position: SourcePosition): Unit = + recordReport(Report.Warning({case given Context => message}, position)) + + def log(message: String): Unit = + recordReport(Report.Log(message)) + + /** Should only be called from main compiler thread. */ + def relayReports(toReporting: BackendReporting)(using Context): Unit = + val reports = resetReports() + if reports.nonEmpty then + reports.reverse.foreach(_.relay(toReporting)) + } + + trait ReadOnlySettings: + def jarCompressionLevel: Int + def debug: Boolean + + trait ReadOnlyRun: + def suspendedAtTyperPhase: Boolean + + trait ReadOnlyContext: + val run: ReadOnlyRun + val settings: ReadOnlySettings + val reporter: DelayedReporter + + trait BufferedReadOnlyContext extends ReadOnlyContext: + val reporter: BufferingReporter + + object ReadOnlyContext: + def readSettings(using ctx: Context): ReadOnlySettings = new: + val jarCompressionLevel = ctx.settings.XjarCompressionLevel.value + val debug = ctx.settings.Ydebug.value + + def readRun(using ctx: Context): ReadOnlyRun = new: + val suspendedAtTyperPhase = ctx.run.suspendedAtTyperPhase + + def buffered(using Context): BufferedReadOnlyContext = new: + val settings = readSettings + val reporter = BufferingReporter() + val run = readRun + + def eager(using Context): ReadOnlyContext = new: + val settings = readSettings + val reporter = EagerReporter() + val run = readRun + /** * The interface to writing classfiles. GeneratedClassHandler calls these methods to generate the * directory and files that are created, and eventually calls `close` when the writing is complete. @@ -47,7 +181,7 @@ object FileWriters { * * @param name the internal name of the class, e.g. "scala.Option" */ - def writeTasty(name: InternalName, bytes: Array[Byte])(using Context): NullableFile + def writeTasty(name: InternalName, bytes: Array[Byte])(using ReadOnlyContext): NullableFile /** * Close the writer. Behavior is undefined after a call to `close`. @@ -60,7 +194,7 @@ object FileWriters { object TastyWriter { - def apply(output: AbstractFile)(using Context): TastyWriter = { + def apply(output: AbstractFile)(using ReadOnlyContext): TastyWriter = { // In Scala 2 depenening on cardinality of distinct output dirs MultiClassWriter could have been used // In Dotty we always use single output directory @@ -73,7 +207,7 @@ object FileWriters { private final class SingleTastyWriter(underlying: FileWriter) extends TastyWriter { - override def writeTasty(className: InternalName, bytes: Array[Byte])(using Context): NullableFile = { + override def writeTasty(className: InternalName, bytes: Array[Byte])(using ReadOnlyContext): NullableFile = { underlying.writeFile(classToRelativePath(className), bytes) } @@ -83,14 +217,14 @@ object FileWriters { } sealed trait FileWriter { - def writeFile(relativePath: String, bytes: Array[Byte])(using Context): NullableFile + def writeFile(relativePath: String, bytes: Array[Byte])(using ReadOnlyContext): NullableFile def close(): Unit } object FileWriter { - def apply(file: AbstractFile, jarManifestMainClass: Option[String])(using Context): FileWriter = + def apply(file: AbstractFile, jarManifestMainClass: Option[String])(using ReadOnlyContext): FileWriter = if (file.isInstanceOf[JarArchive]) { - val jarCompressionLevel = ctx.settings.YjarCompressionLevel.value + val jarCompressionLevel = ctx.settings.jarCompressionLevel // Writing to non-empty JAR might be an undefined behaviour, e.g. in case if other files where // created using `AbstractFile.bufferedOutputStream`instead of JarWritter val jarFile = file.underlyingSource.getOrElse{ @@ -127,7 +261,7 @@ object FileWriters { lazy val crc = new CRC32 - override def writeFile(relativePath: String, bytes: Array[Byte])(using Context): NullableFile = this.synchronized { + override def writeFile(relativePath: String, bytes: Array[Byte])(using ReadOnlyContext): NullableFile = this.synchronized { val entry = new ZipEntry(relativePath) if (storeOnly) { // When using compression method `STORED`, the ZIP spec requires the CRC and compressed/ @@ -155,14 +289,14 @@ object FileWriters { val noAttributes = Array.empty[FileAttribute[?]] private val isWindows = scala.util.Properties.isWin - private def checkName(component: Path)(using Context): Unit = if (isWindows) { + private def checkName(component: Path)(using ReadOnlyContext): Unit = if (isWindows) { val specials = raw"(?i)CON|PRN|AUX|NUL|COM[1-9]|LPT[1-9]".r val name = component.toString - def warnSpecial(): Unit = report.warning(em"path component is special Windows device: ${name}") + def warnSpecial(): Unit = ctx.reporter.warning(em"path component is special Windows device: ${name}") specials.findPrefixOf(name).foreach(prefix => if (prefix.length == name.length || name(prefix.length) == '.') warnSpecial()) } - def ensureDirForPath(baseDir: Path, filePath: Path)(using Context): Unit = { + def ensureDirForPath(baseDir: Path, filePath: Path)(using ReadOnlyContext): Unit = { import java.lang.Boolean.TRUE val parent = filePath.getParent if (!builtPaths.containsKey(parent)) { @@ -192,7 +326,7 @@ object FileWriters { private val fastOpenOptions = util.EnumSet.of(StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE) private val fallbackOpenOptions = util.EnumSet.of(StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING) - override def writeFile(relativePath: String, bytes: Array[Byte])(using Context): NullableFile = { + override def writeFile(relativePath: String, bytes: Array[Byte])(using ReadOnlyContext): NullableFile = { val path = base.resolve(relativePath) try { ensureDirForPath(base, path) @@ -213,10 +347,10 @@ object FileWriters { os.close() } catch { case e: FileConflictException => - report.error(em"error writing ${path.toString}: ${e.getMessage}") + ctx.reporter.error(em"error writing ${path.toString}: ${e.getMessage}") case e: java.nio.file.FileSystemException => - if (ctx.settings.Ydebug.value) e.printStackTrace() - report.error(em"error writing ${path.toString}: ${e.getClass.getName} ${e.getMessage}") + if (ctx.settings.debug) e.printStackTrace() + ctx.reporter.error(em"error writing ${path.toString}: ${e.getClass.getName} ${e.getMessage}") } AbstractFile.getFile(path) } @@ -241,7 +375,7 @@ object FileWriters { finally out.close() } - override def writeFile(relativePath: String, bytes: Array[Byte])(using Context):NullableFile = { + override def writeFile(relativePath: String, bytes: Array[Byte])(using ReadOnlyContext):NullableFile = { val outFile = getFile(base, relativePath) writeBytes(outFile, bytes) outFile diff --git a/compiler/src/dotty/tools/io/Jar.scala b/compiler/src/dotty/tools/io/Jar.scala index 3e65d2f7635d..dd33b1229610 100644 --- a/compiler/src/dotty/tools/io/Jar.scala +++ b/compiler/src/dotty/tools/io/Jar.scala @@ -165,7 +165,7 @@ object Jar { def isJarOrZip(f: Path): Boolean = isJarOrZip(f, true) def isJarOrZip(f: Path, examineFile: Boolean): Boolean = - f.hasExtension("zip", "jar") || (examineFile && magicNumberIsZip(f)) + f.ext.isJarOrZip || (examineFile && magicNumberIsZip(f)) def create(file: File, sourceDir: Directory, mainClass: String): Unit = { val writer = new Jar(file).jarWriter(Name.MAIN_CLASS -> mainClass) diff --git a/compiler/src/dotty/tools/io/JarArchive.scala b/compiler/src/dotty/tools/io/JarArchive.scala index f42f68e745ed..728f89966af0 100644 --- a/compiler/src/dotty/tools/io/JarArchive.scala +++ b/compiler/src/dotty/tools/io/JarArchive.scala @@ -11,7 +11,7 @@ import scala.jdk.CollectionConverters.* * that be can used as the compiler's output directory. */ class JarArchive private (root: Directory) extends PlainDirectory(root) { - def close(): Unit = jpath.getFileSystem().close() + def close(): Unit = this.synchronized(jpath.getFileSystem().close()) override def exists: Boolean = jpath.getFileSystem().isOpen() && super.exists def allFileNames(): Iterator[String] = java.nio.file.Files.walk(jpath).iterator().asScala.map(_.toString) @@ -20,14 +20,14 @@ class JarArchive private (root: Directory) extends PlainDirectory(root) { object JarArchive { /** Create a new jar file. Overwrite if file already exists */ def create(path: Path): JarArchive = { - require(path.extension == "jar") + require(path.ext.isJar) path.delete() open(path, create = true) } /** Create a jar file. */ def open(path: Path, create: Boolean = false): JarArchive = { - require(path.extension == "jar") + require(path.ext.isJar) // creating a new zip file system by using the JAR URL syntax: // https://docs.oracle.com/javase/7/docs/technotes/guides/io/fsp/zipfilesystemprovider.html diff --git a/compiler/src/dotty/tools/io/Path.scala b/compiler/src/dotty/tools/io/Path.scala index c8420c5e381d..6f97e03ca4d7 100644 --- a/compiler/src/dotty/tools/io/Path.scala +++ b/compiler/src/dotty/tools/io/Path.scala @@ -33,17 +33,20 @@ import scala.util.Random.alphanumeric */ object Path { def isExtensionJarOrZip(jpath: JPath): Boolean = isExtensionJarOrZip(jpath.getFileName.toString) - def isExtensionJarOrZip(name: String): Boolean = { - val ext = extension(name) - ext == "jar" || ext == "zip" + def isExtensionJarOrZip(name: String): Boolean = fileExtension(name).isJarOrZip + def fileExtension(name: String): FileExtension = { + val i = name.lastIndexOf('.') + if (i < 0) FileExtension.Empty + else FileExtension.from(name.substring(i + 1)) } - def extension(name: String): String = { - var i = name.length - 1 - while (i >= 0 && name.charAt(i) != '.') - i -= 1 + @deprecated("use fileExtension instead.") + def extension(name: String): String = fileExtension(name).toLowerCase - if (i < 0) "" - else name.substring(i + 1).toLowerCase + /** strip anything after and including trailing the extension */ + def fileName(name: String): String = { + val i = name.lastIndexOf('.') + if (i < 0) name + else name.substring(0, i).nn } def onlyDirs(xs: Iterator[Path]): Iterator[Directory] = xs.filter(_.isDirectory).map(_.toDirectory) @@ -160,22 +163,36 @@ class Path private[io] (val jpath: JPath) { val p = parent if (p isSame this) Nil else p :: p.parents } + + def ext: FileExtension = Path.fileExtension(name) + // if name ends with an extension (e.g. "foo.jpg") returns the extension ("jpg"), otherwise "" - def extension: String = Path.extension(name) + @deprecated("use ext instead.") + def extension: String = ext.toLowerCase + // compares against extensions in a CASE INSENSITIVE way. + @deprecated("consider using queries on ext instead.") def hasExtension(ext: String, exts: String*): Boolean = { - val lower = extension.toLowerCase - ext.toLowerCase == lower || exts.exists(_.toLowerCase == lower) + val lower = ext.toLowerCase + lower.equalsIgnoreCase(ext) || exts.exists(lower.equalsIgnoreCase) } // returns the filename without the extension. - def stripExtension: String = name stripSuffix ("." + extension) + def stripExtension: String = Path.fileName(name) // returns the Path with the extension. def addExtension(ext: String): Path = new Path(jpath.resolveSibling(name + ext)) + + // changes the existing extension out for a new one, or adds it + // if the current path has none. + def changeExtension(ext: FileExtension): Path = + changeExtension(ext.toLowerCase) + // changes the existing extension out for a new one, or adds it // if the current path has none. def changeExtension(ext: String): Path = - if (extension == "") addExtension(ext) - else new Path(jpath.resolveSibling(stripExtension + "." + ext)) + val name0 = name + val dropExtension = Path.fileName(name0) + if dropExtension eq name0 then addExtension(ext) + else new Path(jpath.resolveSibling(dropExtension + "." + ext)) // conditionally execute def ifFile[T](f: File => T): Option[T] = if (isFile) Some(f(toFile)) else None diff --git a/compiler/src/dotty/tools/repl/ReplDriver.scala b/compiler/src/dotty/tools/repl/ReplDriver.scala index 0d64c88d9228..5bdf175c522c 100644 --- a/compiler/src/dotty/tools/repl/ReplDriver.scala +++ b/compiler/src/dotty/tools/repl/ReplDriver.scala @@ -82,8 +82,8 @@ class ReplDriver(settings: Array[String], /** Create a fresh and initialized context with IDE mode enabled */ private def initialCtx(settings: List[String]) = { val rootCtx = initCtx.fresh.addMode(Mode.ReadPositions | Mode.Interactive) - rootCtx.setSetting(rootCtx.settings.YcookComments, true) - rootCtx.setSetting(rootCtx.settings.YreadComments, true) + rootCtx.setSetting(rootCtx.settings.XcookComments, true) + rootCtx.setSetting(rootCtx.settings.XreadComments, true) setupRootCtx(this.settings ++ settings, rootCtx) } @@ -559,6 +559,7 @@ class ReplDriver(settings: Array[String], private object ReplConsoleReporter extends ConsoleReporter.AbstractConsoleReporter { override def posFileStr(pos: SourcePosition) = "" // omit file paths override def printMessage(msg: String): Unit = out.println(msg) + override def echoMessage(msg: String): Unit = printMessage(msg) override def flush()(using Context): Unit = out.flush() } diff --git a/compiler/src/dotty/tools/tasty/besteffort/BestEffortTastyFormat.scala b/compiler/src/dotty/tools/tasty/besteffort/BestEffortTastyFormat.scala new file mode 100644 index 000000000000..99a24ce5f346 --- /dev/null +++ b/compiler/src/dotty/tools/tasty/besteffort/BestEffortTastyFormat.scala @@ -0,0 +1,45 @@ +package dotty.tools.tasty.besteffort + +import dotty.tools.tasty.TastyFormat + +/************************************************************************************* +Best Effort TASTy (.betasty) format extends the TASTy grammar with additional +terminal symbols and productions. Grammar notation is kept from the regular TASTy. +However, the lowercase prefixes describing the semantics (but otherwise not affecting +the grammar) may not always hold. + +The following are the added terminal Symbols to the grammar: + * `ERRORtype` - representing an error from a previous compilation + +The following are the added productions to the grammar: + + Standard-Section: "ASTs" +```none + Type = ERRORtype + Path = ERRORtype +``` +**************************************************************************************/ +object BestEffortTastyFormat { + export TastyFormat.{astTagToString => _, *} + + /** First four bytes of a best effort TASTy file, used instead of the regular header. + * Signifies that the TASTy can only be consumed by the compiler in the best effort mode. + * Other than that, versioning works as usual, disallowing Best Effort Tasty from older minor versions. + */ + final val bestEffortHeader: Array[Int] = Array(0x5C, 0xA1, 0xAB, 0x20) + + /** Natural number. Along with MajorVersion, MinorVersion and ExperimentalVersion + * numbers specifies the Best Effort TASTy format. For now, Best Effort TASTy holds + * no compatibility guarantees, making this a reserved space for when this would have + * to be changed. + */ + final val PatchVersion: Int = 0 + + // added AST tag - Best Effort TASTy only + final val ERRORtype = 50 + + def astTagToString(tag: Int) = tag match { + case ERRORtype => "ERRORtype" + case _ => TastyFormat.astTagToString(tag) + } +} diff --git a/compiler/src/dotty/tools/tasty/besteffort/BestEffortTastyHeaderUnpickler.scala b/compiler/src/dotty/tools/tasty/besteffort/BestEffortTastyHeaderUnpickler.scala new file mode 100644 index 000000000000..4325f55be4a7 --- /dev/null +++ b/compiler/src/dotty/tools/tasty/besteffort/BestEffortTastyHeaderUnpickler.scala @@ -0,0 +1,175 @@ +package dotty.tools.tasty.besteffort + +import java.util.UUID + +import BestEffortTastyFormat.{MajorVersion, MinorVersion, ExperimentalVersion, bestEffortHeader, header} +import dotty.tools.tasty.{UnpicklerConfig, TastyHeaderUnpickler, TastyReader, UnpickleException, TastyFormat, TastyVersion} + +/** + * The Best Effort Tasty Header consists of six fields: + * - uuid + * - contains a hash of the sections of the Best Effort TASTy file + * - majorVersion + * - matching the TASTy format version that last broke backwards compatibility + * - minorVersion + * - matching the TASTy format version that last broke forward compatibility + * - patchVersion + * - specyfing the best effort TASTy version. Currently unused, kept as a reserved space. + * Empty if it was serialized as a regular TASTy file with reagular tasty header. + * - experimentalVersion + * - 0 for final compiler version + * - positive for between minor versions and forward compatibility + * is broken since the previous stable version. + * - toolingVersion + * - arbitrary string representing the tooling that produced the Best Effort TASTy + */ +sealed abstract case class BestEffortTastyHeader( + uuid: UUID, + majorVersion: Int, + minorVersion: Int, + patchVersion: Option[Int], + experimentalVersion: Int, + toolingVersion: String +) + +class BestEffortTastyHeaderUnpickler(config: UnpicklerConfig, reader: TastyReader) { + import BestEffortTastyHeaderUnpickler._ + import reader._ + + def this(reader: TastyReader) = this(UnpicklerConfig.generic, reader) + def this(bytes: Array[Byte]) = this(new TastyReader(bytes)) + + def readHeader(): UUID = + readFullHeader().uuid + + def readFullHeader(): BestEffortTastyHeader = { + val hasBestEffortHeader = { + val readHeader = (for (i <- 0 until header.length) yield readByte()).toArray + + if (readHeader.sameElements(header)) false + else if (readHeader.sameElements(bestEffortHeader)) true + else throw new UnpickleException("not a TASTy or Best Effort TASTy file") + } + + val fileMajor = readNat() + val fileMinor = readNat() + val filePatch = + if hasBestEffortHeader then Some(readNat()) + else None + val fileExperimental = readNat() + val toolingVersion = { + val length = readNat() + val start = currentAddr + val end = start + length + goto(end) + new String(bytes, start.index, length) + } + + checkValidVersion(fileMajor, fileMinor, fileExperimental, toolingVersion, config) + + val uuid = new UUID(readUncompressedLong(), readUncompressedLong()) + new BestEffortTastyHeader(uuid, fileMajor, fileMinor, filePatch, fileExperimental, toolingVersion) {} + } + + private def check(cond: Boolean, msg: => String): Unit = { + if (!cond) throw new UnpickleException(msg) + } +} + +// Copy pasted from dotty.tools.tasty.TastyHeaderUnpickler +// Since that library has strong compatibility guarantees, we do not want +// to add any more methods just to support an experimental feature +// (like best-effort compilation options). +object BestEffortTastyHeaderUnpickler { + + private def check(cond: Boolean, msg: => String): Unit = { + if (!cond) throw new UnpickleException(msg) + } + + private def checkValidVersion(fileMajor: Int, fileMinor: Int, fileExperimental: Int, toolingVersion: String, config: UnpicklerConfig) = { + val toolMajor: Int = config.majorVersion + val toolMinor: Int = config.minorVersion + val toolExperimental: Int = config.experimentalVersion + val validVersion = TastyFormat.isVersionCompatible( + fileMajor = fileMajor, + fileMinor = fileMinor, + fileExperimental = fileExperimental, + compilerMajor = toolMajor, + compilerMinor = toolMinor, + compilerExperimental = toolExperimental + ) + check(validVersion, { + // failure means that the TASTy file cannot be read, therefore it is either: + // - backwards incompatible major, in which case the library should be recompiled by the minimum stable minor + // version supported by this compiler + // - any experimental in an older minor, in which case the library should be recompiled by the stable + // compiler in the same minor. + // - older experimental in the same minor, in which case the compiler is also experimental, and the library + // should be recompiled by the current compiler + // - forward incompatible, in which case the compiler must be upgraded to the same version as the file. + val fileVersion = TastyVersion(fileMajor, fileMinor, fileExperimental) + val toolVersion = TastyVersion(toolMajor, toolMinor, toolExperimental) + + val compat = Compatibility.failReason(file = fileVersion, read = toolVersion) + + val what = if (compat < 0) "Backward" else "Forward" + val signature = signatureString(fileVersion, toolVersion, what, tool = Some(toolingVersion)) + val fix = ( + if (compat < 0) { + val newCompiler = + if (compat == Compatibility.BackwardIncompatibleMajor) toolVersion.minStable + else if (compat == Compatibility.BackwardIncompatibleExperimental) fileVersion.nextStable + else toolVersion // recompile the experimental library with the current experimental compiler + recompileFix(newCompiler, config) + } + else upgradeFix(fileVersion, config) + ) + signature + fix + tastyAddendum + }) + } + + private def signatureString( + fileVersion: TastyVersion, toolVersion: TastyVersion, what: String, tool: Option[String]) = { + val optProducedBy = tool.fold("")(t => s", produced by $t") + s"""$what incompatible TASTy file has version ${fileVersion.show}$optProducedBy, + | expected ${toolVersion.validRange}. + |""".stripMargin + } + + private def recompileFix(producerVersion: TastyVersion, config: UnpicklerConfig) = { + val addendum = config.recompileAdditionalInfo + val newTool = config.upgradedProducerTool(producerVersion) + s""" The source of this file should be recompiled by $newTool.$addendum""".stripMargin + } + + private def upgradeFix(fileVersion: TastyVersion, config: UnpicklerConfig) = { + val addendum = config.upgradeAdditionalInfo(fileVersion) + val newTool = config.upgradedReaderTool(fileVersion) + s""" To read this ${fileVersion.kind} file, use $newTool.$addendum""".stripMargin + } + + private def tastyAddendum: String = """ + | Please refer to the documentation for information on TASTy versioning: + | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin + + private object Compatibility { + final val BackwardIncompatibleMajor = -3 + final val BackwardIncompatibleExperimental = -2 + final val ExperimentalRecompile = -1 + final val ExperimentalUpgrade = 1 + final val ForwardIncompatible = 2 + + /** Given that file can't be read, extract the reason */ + def failReason(file: TastyVersion, read: TastyVersion): Int = + if (file.major == read.major && file.minor == read.minor && file.isExperimental && read.isExperimental) { + if (file.experimental < read.experimental) ExperimentalRecompile // recompile library as compiler is too new + else ExperimentalUpgrade // they should upgrade compiler as library is too new + } + else if (file.major < read.major) + BackwardIncompatibleMajor // pre 3.0.0 + else if (file.isExperimental && file.major == read.major && file.minor <= read.minor) + // e.g. 3.4.0 reading 3.4.0-RC1-NIGHTLY, or 3.3.0 reading 3.0.2-RC1-NIGHTLY + BackwardIncompatibleExperimental + else ForwardIncompatible + } +} diff --git a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala index 256940645ec3..517adff17991 100644 --- a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala +++ b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala @@ -43,7 +43,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler import tpd.* private val xCheckMacro: Boolean = ctx.settings.XcheckMacros.value - private val yDebugMacro: Boolean = ctx.settings.YdebugMacros.value + private val yDebugMacro: Boolean = ctx.settings.XdebugMacros.value extension [T](self: scala.quoted.Expr[T]) def show: String = @@ -396,17 +396,22 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler end TermTypeTest object Term extends TermModule: - def betaReduce(tree: Term): Option[Term] = - tree match - case tpd.Block(Nil, expr) => - for e <- betaReduce(expr) yield tpd.cpy.Block(tree)(Nil, e) - case tpd.Inlined(_, Nil, expr) => - betaReduce(expr) - case _ => - val tree1 = dotc.transform.BetaReduce(tree) - if tree1 eq tree then None - else Some(tree1.withSpan(tree.span)) - + def betaReduce(tree: Term): Option[Term] = + val tree1 = new dotty.tools.dotc.ast.tpd.TreeMap { + override def transform(tree: Tree)(using Context): Tree = tree match { + case tpd.Block(Nil, _) | tpd.Inlined(_, Nil, _) => + super.transform(tree) + case tpd.Apply(sel @ tpd.Select(expr, nme), args) => + val tree1 = cpy.Apply(tree)(cpy.Select(sel)(transform(expr), nme), args) + dotc.transform.BetaReduce(tree1).withSpan(tree.span) + case tpd.Apply(ta @ tpd.TypeApply(sel @ tpd.Select(expr: Apply, nme), tpts), args) => + val tree1 = cpy.Apply(tree)(cpy.TypeApply(ta)(cpy.Select(sel)(transform(expr), nme), tpts), args) + dotc.transform.BetaReduce(tree1).withSpan(tree.span) + case _ => + dotc.transform.BetaReduce(tree).withSpan(tree.span) + } + }.transform(tree) + if tree1 == tree then None else Some(tree1) end Term given TermMethods: TermMethods with @@ -2210,6 +2215,12 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler object MethodType extends MethodTypeModule: def apply(paramNames: List[String])(paramInfosExp: MethodType => List[TypeRepr], resultTypeExp: MethodType => TypeRepr): MethodType = Types.MethodType(paramNames.map(_.toTermName))(paramInfosExp, resultTypeExp) + def apply(kind: MethodTypeKind)(paramNames: List[String])(paramInfosExp: MethodType => List[TypeRepr], resultTypeExp: MethodType => TypeRepr): MethodType = + val companion = kind match + case MethodTypeKind.Contextual => Types.ContextualMethodType + case MethodTypeKind.Implicit => Types.ImplicitMethodType + case MethodTypeKind.Plain => Types.MethodType + companion.apply(paramNames.map(_.toTermName))(paramInfosExp, resultTypeExp) def unapply(x: MethodType): (List[String], List[TypeRepr], TypeRepr) = (x.paramNames.map(_.toString), x.paramTypes, x.resType) end MethodType @@ -2218,6 +2229,12 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler extension (self: MethodType) def isErased: Boolean = false def isImplicit: Boolean = self.isImplicitMethod + def isContextual: Boolean = self.isContextualMethod + def methodTypeKind: MethodTypeKind = + self.companion match + case Types.ContextualMethodType => MethodTypeKind.Contextual + case Types.ImplicitMethodType => MethodTypeKind.Implicit + case _ => MethodTypeKind.Plain def param(idx: Int): TypeRepr = self.newParamRef(idx) def erasedParams: List[Boolean] = self.erasedParams @@ -2326,6 +2343,27 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def unapply(x: NoPrefix): true = true end NoPrefix + type FlexibleType = dotc.core.Types.FlexibleType + + object FlexibleTypeTypeTest extends TypeTest[TypeRepr, FlexibleType]: + def unapply(x: TypeRepr): Option[FlexibleType & x.type] = x match + case x: (Types.FlexibleType & x.type) => Some(x) + case _ => None + end FlexibleTypeTypeTest + + object FlexibleType extends FlexibleTypeModule: + def apply(tp: TypeRepr): FlexibleType = Types.FlexibleType(tp) + def unapply(x: FlexibleType): Some[TypeRepr] = Some(x.hi) + end FlexibleType + + given FlexibleTypeMethods: FlexibleTypeMethods with + extension (self: FlexibleType) + def underlying: TypeRepr = self.hi + def lo: TypeRepr = self.lo + def hi: TypeRepr = self.hi + end extension + end FlexibleTypeMethods + type Constant = dotc.core.Constants.Constant object Constant extends ConstantModule @@ -2682,8 +2720,9 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def isAliasType: Boolean = self.denot.isAliasType def isAnonymousClass: Boolean = self.denot.isAnonymousClass def isAnonymousFunction: Boolean = self.denot.isAnonymousFunction - def isAbstractType: Boolean = self.denot.isAbstractType + def isAbstractType: Boolean = self.denot.isAbstractOrParamType def isClassConstructor: Boolean = self.denot.isClassConstructor + def isSuperAccessor = self.name.is(dotc.core.NameKinds.SuperAccessorName) def isType: Boolean = self.isType def isTerm: Boolean = self.isTerm def isPackageDef: Boolean = self.is(dotc.core.Flags.Package) diff --git a/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala b/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala index eac85244d97b..acf66fcf2009 100644 --- a/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala +++ b/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala @@ -239,6 +239,8 @@ object Extractors { this += "NoPrefix()" case MatchCase(pat, rhs) => this += "MatchCase(" += pat += ", " += rhs += ")" + case FlexibleType(tp) => + this += "FlexibleType(" += tp += ")" } def visitSignature(sig: Signature): this.type = { diff --git a/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala b/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala index 9aec7fc17ed7..9503177ff738 100644 --- a/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala +++ b/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala @@ -1247,6 +1247,11 @@ object SourceCode { this += " => " printType(rhs) + case FlexibleType(tp) => + this += "(" + printType(tp) + this += ")?" + case _ => cannotBeShownAsSource(tpe.show(using Printer.TypeReprStructure)) } diff --git a/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala b/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala index b8dfa833c437..115803d79dc1 100644 --- a/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala +++ b/compiler/test-coursier/dotty/tools/coursier/CoursierScalaTests.scala @@ -166,7 +166,7 @@ object CoursierScalaTests: case Nil => args case _ => "--" +: args val newJOpts = jOpts.map(s => s"--java-opt ${s.stripPrefix("-J")}").mkString(" ") - execCmd("./cs", (s"""launch "org.scala-lang:scala3-compiler_3:${sys.env("DOTTY_BOOTSTRAPPED_VERSION")}" $newJOpts --main-class "$entry" --property "scala.usejavacp=true"""" +: newOptions)*)._2 + execCmd("./cs", (s"""launch "org.scala-lang:scala3-compiler_3:${sys.env("DOTTY_BOOTSTRAPPED_VERSION")}" $newJOpts --main-class "$entry" --property "scala.usejavacp=true" --property "scala.use_legacy_launcher=true"""" +: newOptions)*)._2 /** Get coursier script */ @BeforeClass def setup(): Unit = diff --git a/compiler/test-resources/repl/10693 b/compiler/test-resources/repl/10693 new file mode 100644 index 000000000000..ab4d175c1a6f --- /dev/null +++ b/compiler/test-resources/repl/10693 @@ -0,0 +1,16 @@ +scala> def test[A, B](a: A, b: B): A | B = a +def test[A, B](a: A, b: B): A | B +scala> def d0 = test("string", 1) +def d0: String | Int +scala> def d1 = test(1, "string") +def d1: Int | String +scala> def d2 = test(d0, d1) +def d2: String | Int +scala> def d3 = test(d1, d0) +def d3: Int | String +scala> def d4 = test(d2, d3) +def d4: String | Int +scala> def d5 = test(d3, d2) +def d5: Int | String +scala> def d6 = test(d4, d5) +def d6: String | Int \ No newline at end of file diff --git a/compiler/test-resources/repl/i18383 b/compiler/test-resources/repl/i18383 new file mode 100644 index 000000000000..81d3c9d5a7fd --- /dev/null +++ b/compiler/test-resources/repl/i18383 @@ -0,0 +1,14 @@ +scala>:settings -Wunused:all + +scala> import scala.collection.* + +scala> class Foo { import scala.util.*; println("foo") } +1 warning found +-- Warning: -------------------------------------------------------------------- +1 | class Foo { import scala.util.*; println("foo") } + | ^ + | unused import +// defined class Foo + +scala> { import scala.util.*; "foo" } +val res0: String = foo diff --git a/compiler/test-resources/repl/i18756 b/compiler/test-resources/repl/i18756 new file mode 100644 index 000000000000..56be353e67f3 --- /dev/null +++ b/compiler/test-resources/repl/i18756 @@ -0,0 +1,17 @@ +scala> object A { val f: ( => Int) => Int = i => i ; f(1) } +// defined object A + +scala> A.f(1) +val res0: Int = 1 + +scala> A.f(1) +val res1: Int = 1 + +scala> object B { val f: ( => Int) => Int = i => i ; f(1) } +// defined object B + +scala> B.f(1) +val res2: Int = 1 + +scala> B.f(1) +val res3: Int = 1 diff --git a/compiler/test-resources/repl/reset-command b/compiler/test-resources/repl/reset-command index 0adf0d93a0d8..3977504d4f5a 100644 --- a/compiler/test-resources/repl/reset-command +++ b/compiler/test-resources/repl/reset-command @@ -12,7 +12,7 @@ scala> def f(thread: Thread) = thread.stop() -- Deprecation Warning: -------------------------------------------------------- 1 | def f(thread: Thread) = thread.stop() | ^^^^^^^^^^^ - |method stop in class Thread is deprecated since : see corresponding Javadoc for more information. + |method stop in class Thread is deprecated: see corresponding Javadoc for more information. def f(thread: Thread): Unit scala> def resetNoArgsStillWorks = 1 diff --git a/compiler/test-resources/repl/settings-command b/compiler/test-resources/repl/settings-command index 5e9912384435..dce782b92836 100644 --- a/compiler/test-resources/repl/settings-command +++ b/compiler/test-resources/repl/settings-command @@ -11,7 +11,7 @@ scala> def f(thread: Thread) = thread.stop() -- Deprecation Warning: -------------------------------------------------------- 1 | def f(thread: Thread) = thread.stop() | ^^^^^^^^^^^ - |method stop in class Thread is deprecated since : see corresponding Javadoc for more information. + |method stop in class Thread is deprecated: see corresponding Javadoc for more information. def f(thread: Thread): Unit scala> diff --git a/compiler/test-resources/scripting/argfileClasspath.sc b/compiler/test-resources/scripting/argfileClasspath.sc deleted file mode 100755 index c31371ba8934..000000000000 --- a/compiler/test-resources/scripting/argfileClasspath.sc +++ /dev/null @@ -1,9 +0,0 @@ -#!dist/target/pack/bin/scala @compiler/test-resources/scripting/cpArgumentsFile.txt - -import java.nio.file.Paths - -def main(args: Array[String]): Unit = - val cwd = Paths.get(".").toAbsolutePath.toString.replace('\\', '/').replaceAll("/$", "") - printf("cwd: %s\n", cwd) - printf("classpath: %s\n", sys.props("java.class.path")) - diff --git a/compiler/test-resources/scripting/classpathReport.sc b/compiler/test-resources/scripting/classpathReport_scalacli.sc similarity index 59% rename from compiler/test-resources/scripting/classpathReport.sc rename to compiler/test-resources/scripting/classpathReport_scalacli.sc index a9eacbbba1f7..0b2552b3ac84 100755 --- a/compiler/test-resources/scripting/classpathReport.sc +++ b/compiler/test-resources/scripting/classpathReport_scalacli.sc @@ -1,8 +1,8 @@ -#!bin/scala -classpath 'dist/target/pack/lib/*' - +#!/usr/bin/env bin/scala +// This file is a Scala CLI script. import java.nio.file.Paths -def main(args: Array[String]): Unit = +// def main(args: Array[String]): Unit = // MIGRATION: Scala CLI expects `*.sc` files to be straight-line code val cwd = Paths.get(".").toAbsolutePath.normalize.toString.norm printf("cwd: %s\n", cwd) printf("classpath: %s\n", sys.props("java.class.path").norm) diff --git a/compiler/test-resources/scripting/cpArgumentsFile.txt b/compiler/test-resources/scripting/cpArgumentsFile.txt deleted file mode 100755 index 73037eb7d9bc..000000000000 --- a/compiler/test-resources/scripting/cpArgumentsFile.txt +++ /dev/null @@ -1 +0,0 @@ --classpath dist/target/pack/lib/* diff --git a/compiler/test-resources/scripting/envtest.sc b/compiler/test-resources/scripting/envtest.sc index b2fde1b32339..724580449229 100755 --- a/compiler/test-resources/scripting/envtest.sc +++ b/compiler/test-resources/scripting/envtest.sc @@ -1,2 +1,4 @@ +// this file is intended to be ran as an argument to the dotty.tools.scripting.ScriptingDriver class + def main(args: Array[String]): Unit = println("Hello " + util.Properties.propOrNull("key")) diff --git a/compiler/test-resources/scripting/envtestNu.sc b/compiler/test-resources/scripting/envtestNu.sc new file mode 100755 index 000000000000..fe4cd7851b0a --- /dev/null +++ b/compiler/test-resources/scripting/envtestNu.sc @@ -0,0 +1,2 @@ +// MIGRATION: Scala CLI expects `*.sc` files to be straight-line code + println("Hello " + util.Properties.propOrNull("key")) diff --git a/compiler/test-resources/scripting/envtest_scalacli.sc b/compiler/test-resources/scripting/envtest_scalacli.sc new file mode 100755 index 000000000000..993ea1691640 --- /dev/null +++ b/compiler/test-resources/scripting/envtest_scalacli.sc @@ -0,0 +1,3 @@ +// This file is a Scala CLI script. + +println("Hello " + util.Properties.propOrNull("key")) diff --git a/compiler/test-resources/scripting/hashBang.sc b/compiler/test-resources/scripting/hashBang.sc index d767bd1a1592..98884bc050c0 100755 --- a/compiler/test-resources/scripting/hashBang.sc +++ b/compiler/test-resources/scripting/hashBang.sc @@ -1,4 +1,4 @@ -#!/usr/bin/env scala +#!/usr/bin/env fake-program-to-test-hashbang-removal # comment STUFF=nada !# diff --git a/compiler/test-resources/scripting/hashBang.scala b/compiler/test-resources/scripting/hashBang.scala index 1aab26269f86..b7bf6b541854 100755 --- a/compiler/test-resources/scripting/hashBang.scala +++ b/compiler/test-resources/scripting/hashBang.scala @@ -1,8 +1,8 @@ -#!/usr/bin/env scala +#!/usr/bin/env fake-program-to-test-hashbang-removal # comment STUFF=nada !# - +// everything above this point should be ignored by the compiler def main(args: Array[String]): Unit = System.err.printf("mainClassFromStack: %s\n",mainFromStack) assert(mainFromStack.contains("hashBang"),s"fromStack[$mainFromStack]") diff --git a/compiler/test-resources/scripting/scriptName.scala b/compiler/test-resources/scripting/scriptName.scala index 21aec32fe0bb..7e479197d567 100755 --- a/compiler/test-resources/scripting/scriptName.scala +++ b/compiler/test-resources/scripting/scriptName.scala @@ -1,4 +1,4 @@ -#!/usr/bin/env scala +// this file is intended to be ran as an argument to the dotty.tools.scripting.ScriptingDriver class def main(args: Array[String]): Unit = val name = Option(sys.props("script.name")) match { diff --git a/compiler/test-resources/scripting/scriptPath.sc b/compiler/test-resources/scripting/scriptPath.sc index 46cd5e8a7385..e29e659d09d4 100755 --- a/compiler/test-resources/scripting/scriptPath.sc +++ b/compiler/test-resources/scripting/scriptPath.sc @@ -1,4 +1,4 @@ -#!dist/target/pack/bin/scala +// this file is intended to be ran as an argument to the dotty.tools.scripting.ScriptingDriver class def main(args: Array[String]): Unit = args.zipWithIndex.foreach { case (arg,i) => printf("arg %d: [%s]\n",i,arg) } diff --git a/compiler/test-resources/scripting/scriptPathNu.sc b/compiler/test-resources/scripting/scriptPathNu.sc new file mode 100755 index 000000000000..bb3e459654b9 --- /dev/null +++ b/compiler/test-resources/scripting/scriptPathNu.sc @@ -0,0 +1,13 @@ +#!/usr/bin/env bin/scala + +// THIS FILE IS RAN WITH SCALA CLI, which wraps scripts exposing scriptPath and args variables + +args.zipWithIndex.foreach { case (arg,i) => printf("arg %d: [%s]\n",i,arg) } + +if !scriptPath.endsWith("scriptPathNu.sc") then + printf( s"incorrect script.path defined as [$scriptPath]") +else + printf("scriptPath: %s\n", scriptPath) // report the value + +extension(s: String) + def norm: String = s.replace('\\', '/') diff --git a/compiler/test-resources/scripting/scriptPath_scalacli.sc b/compiler/test-resources/scripting/scriptPath_scalacli.sc new file mode 100755 index 000000000000..c13888d0e4b1 --- /dev/null +++ b/compiler/test-resources/scripting/scriptPath_scalacli.sc @@ -0,0 +1,13 @@ +#!/usr/bin/env bin/scala + +// THIS FILE IS RAN WITH SCALA CLI, which wraps scripts exposing scriptPath and args variables + +args.zipWithIndex.foreach { case (arg,i) => printf("arg %d: [%s]\n",i,arg) } + +if !scriptPath.endsWith("scriptPath_scalacli.sc") then + printf( s"incorrect script.path defined as [$scriptPath]") +else + printf("scriptPath: %s\n", scriptPath) // report the value + +extension(s: String) + def norm: String = s.replace('\\', '/') diff --git a/compiler/test-resources/scripting/showArgs.sc b/compiler/test-resources/scripting/showArgs.sc index 28f16a9022b3..69d552b9cf5f 100755 --- a/compiler/test-resources/scripting/showArgs.sc +++ b/compiler/test-resources/scripting/showArgs.sc @@ -1,4 +1,4 @@ -#!/usr/bin/env scala +// this file is intended to be ran as an argument to the dotty.tools.scripting.ScriptingDriver class // precise output format expected by BashScriptsTests.scala def main(args: Array[String]): Unit = diff --git a/compiler/test-resources/scripting/showArgsNu.sc b/compiler/test-resources/scripting/showArgsNu.sc new file mode 100755 index 000000000000..f4c1aa6af257 --- /dev/null +++ b/compiler/test-resources/scripting/showArgsNu.sc @@ -0,0 +1,6 @@ +#!/usr/bin/env bin/scala + +// precise output format expected by BashScriptsTests.scala +// MIGRATION: Scala CLI expects `*.sc` files to be straight-line code +for (a,i) <- args.zipWithIndex do + printf(s"arg %2d:[%s]\n",i,a) diff --git a/compiler/test-resources/scripting/showArgs_scalacli.sc b/compiler/test-resources/scripting/showArgs_scalacli.sc new file mode 100755 index 000000000000..4591ac159345 --- /dev/null +++ b/compiler/test-resources/scripting/showArgs_scalacli.sc @@ -0,0 +1,7 @@ +#!/usr/bin/env bin/scala + +// This file is a Scala CLI script. + +// precise output format expected by BashScriptsTests.scala +for (a,i) <- args.zipWithIndex do + printf(s"arg %2d:[%s]\n",i,a) diff --git a/compiler/test-resources/scripting/sqlDateError.sc b/compiler/test-resources/scripting/sqlDateError.sc index ceff98f40cad..e7c3a623c6c1 100755 --- a/compiler/test-resources/scripting/sqlDateError.sc +++ b/compiler/test-resources/scripting/sqlDateError.sc @@ -1,4 +1,4 @@ -#!bin/scala +// this file is intended to be ran as an argument to the dotty.tools.scripting.ScriptingDriver class def main(args: Array[String]): Unit = { println(new java.sql.Date(100L)) diff --git a/compiler/test-resources/scripting/sqlDateErrorNu.sc b/compiler/test-resources/scripting/sqlDateErrorNu.sc new file mode 100755 index 000000000000..a6f1bd50297d --- /dev/null +++ b/compiler/test-resources/scripting/sqlDateErrorNu.sc @@ -0,0 +1,6 @@ +#!/usr/bin/env bin/scala + +// def main(args: Array[String]): Unit = { MIGRATION: Scala CLI expects `*.sc` files to be straight-line code + println(new java.sql.Date(100L)) + System.err.println("SCALA_OPTS="+Option(System.getenv("SCALA_OPTS")).getOrElse("")) +// } diff --git a/compiler/test-resources/scripting/sqlDateError_scalacli.sc b/compiler/test-resources/scripting/sqlDateError_scalacli.sc new file mode 100755 index 000000000000..10b58821a6e4 --- /dev/null +++ b/compiler/test-resources/scripting/sqlDateError_scalacli.sc @@ -0,0 +1,6 @@ +#!/usr/bin/env bin/scala + +// This file is a Scala CLI script. + +println(new java.sql.Date(100L)) +System.err.println("SCALA_OPTS="+Option(System.getenv("SCALA_OPTS")).getOrElse("")) diff --git a/compiler/test-resources/scripting/touchFile.sc b/compiler/test-resources/scripting/touchFile.sc index 974f8a64d192..b46b3c99d786 100755 --- a/compiler/test-resources/scripting/touchFile.sc +++ b/compiler/test-resources/scripting/touchFile.sc @@ -1,4 +1,4 @@ -#!/usr/bin/env scala +// this file is intended to be ran as an argument to the dotty.tools.scripting.ScriptingDriver class import java.io.File diff --git a/compiler/test-resources/scripting/unglobClasspath.sc b/compiler/test-resources/scripting/unglobClasspath.sc deleted file mode 100755 index 796697cdedf2..000000000000 --- a/compiler/test-resources/scripting/unglobClasspath.sc +++ /dev/null @@ -1,8 +0,0 @@ -#!bin/scala -classpath 'dist/target/pack/lib/*' - -// won't compile unless the hashbang line sets classpath -import org.jline.terminal.Terminal - -def main(args: Array[String]) = - val cp = sys.props("java.class.path") - printf("unglobbed classpath: %s\n", cp) diff --git a/compiler/test-resources/scripting/unglobClasspath_scalacli.sc b/compiler/test-resources/scripting/unglobClasspath_scalacli.sc new file mode 100755 index 000000000000..ccc4cf667085 --- /dev/null +++ b/compiler/test-resources/scripting/unglobClasspath_scalacli.sc @@ -0,0 +1,9 @@ +// This file is a Scala CLI script. + +import dotty.tools.tasty.TastyFormat +// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +// not visible on default classpath, "compiler/test/dotty/tools/scripting/ClasspathTests.scala" +// adds it to classpath via a compiler argument `-classpath 'org/scala-lang/tasty-core_3/$VERSION/*'` + +val cp = sys.props("java.class.path") +printf("unglobbed classpath: %s\n", cp) diff --git a/compiler/test/dotc/neg-best-effort-pickling.blacklist b/compiler/test/dotc/neg-best-effort-pickling.blacklist new file mode 100644 index 000000000000..ff02be107a8a --- /dev/null +++ b/compiler/test/dotc/neg-best-effort-pickling.blacklist @@ -0,0 +1,19 @@ +export-in-extension.scala +i12456.scala +i8623.scala +i1642.scala +i16696.scala +constructor-proxy-values.scala +i9328.scala +i15414.scala +i6796.scala +i14013.scala +toplevel-cyclic +curried-dependent-ift.scala +i17121.scala +illegal-match-types.scala +i13780-1.scala + +# semantic db generation fails in the first compilation +i1642.scala +i15158.scala diff --git a/compiler/test/dotc/neg-best-effort-unpickling.blacklist b/compiler/test/dotc/neg-best-effort-unpickling.blacklist new file mode 100644 index 000000000000..1e22d919f25a --- /dev/null +++ b/compiler/test/dotc/neg-best-effort-unpickling.blacklist @@ -0,0 +1,17 @@ +# cyclic reference crashes +i4368.scala +i827.scala +cycles.scala +i5332.scala +i4369c.scala +i1806.scala +i0091-infpaths.scala +exports.scala +i14834.scala + +# other type related crashes +i4653.scala +overrideClass.scala + +# repeating on a top level type definition +i18750.scala diff --git a/compiler/test/dotc/pos-test-pickling.blacklist b/compiler/test/dotc/pos-test-pickling.blacklist index 32f8cdef1386..d6f962176ecc 100644 --- a/compiler/test/dotc/pos-test-pickling.blacklist +++ b/compiler/test/dotc/pos-test-pickling.blacklist @@ -30,6 +30,7 @@ strict-pattern-bindings-3.0-migration.scala i17186b.scala i11982a.scala i17255 +i17735.scala # Tree is huge and blows stack for printing Text i7034.scala @@ -64,6 +65,8 @@ i17149.scala tuple-fold.scala mt-redux-norm.perspective.scala i18211.scala +10867.scala +named-tuples1.scala # Opaque type i5720.scala @@ -100,7 +103,7 @@ i13842.scala # Position change under captureChecking boxmap-paper.scala -# Function types print differnt after unpickling since test mispredicts Feature.preFundsEnabled +# Function types print different after unpickling since test mispredicts Feature.preFundsEnabled caps-universal.scala # GADT cast applied to singleton type difference @@ -114,3 +117,21 @@ java-inherited-type1 # recursion limit exceeded i7445b.scala + +# more aggresive reduce projection makes a difference +i15525.scala +i19955a.scala +i19955b.scala +i20053b.scala + +# alias types at different levels of dereferencing +parsercombinators-givens.scala +parsercombinators-givens-2.scala +parsercombinators-ctx-bounds.scala +parsercombinators-this.scala +parsercombinators-arrow.scala +parsercombinators-new-syntax.scala +hylolib-deferred-given +hylolib-cb +hylolib + diff --git a/compiler/test/dotc/run-test-pickling.blacklist b/compiler/test/dotc/run-test-pickling.blacklist index 954a64db1b66..dacbc63bb520 100644 --- a/compiler/test/dotc/run-test-pickling.blacklist +++ b/compiler/test/dotc/run-test-pickling.blacklist @@ -45,4 +45,5 @@ t6138-2 i12656.scala trait-static-forwarder i17255 +named-tuples-strawman-2.scala diff --git a/compiler/test/dotty/Properties.scala b/compiler/test/dotty/Properties.scala index e085b0de4875..86e0788a3b8f 100644 --- a/compiler/test/dotty/Properties.scala +++ b/compiler/test/dotty/Properties.scala @@ -103,6 +103,9 @@ object Properties { /** scalajs-javalib jar */ def scalaJSJavalib: String = sys.props("dotty.tests.classes.scalaJSJavalib") + /** scalajs-scalalib jar */ + def scalaJSScalalib: String = sys.props("dotty.tests.classes.scalaJSScalalib") + /** scalajs-library jar */ def scalaJSLibrary: String = sys.props("dotty.tests.classes.scalaJSLibrary") } diff --git a/compiler/test/dotty/tools/TestSources.scala b/compiler/test/dotty/tools/TestSources.scala index a288e49c5eb9..b2133b2fb182 100644 --- a/compiler/test/dotty/tools/TestSources.scala +++ b/compiler/test/dotty/tools/TestSources.scala @@ -64,6 +64,14 @@ object TestSources { if Properties.usingScalaLibraryTasty then loadList(patmatExhaustivityScala2LibraryTastyBlacklistFile) else Nil + // neg best effort tests lists + + def negBestEffortPicklingBlacklistFile: String = "compiler/test/dotc/neg-best-effort-pickling.blacklist" + def negBestEffortUnpicklingBlacklistFile: String = "compiler/test/dotc/neg-best-effort-unpickling.blacklist" + + def negBestEffortPicklingBlacklisted: List[String] = loadList(negBestEffortPicklingBlacklistFile) + def negBestEffortUnpicklingBlacklisted: List[String] = loadList(negBestEffortUnpicklingBlacklistFile) + // load lists private def loadList(path: String): List[String] = { diff --git a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala index 51390e35b527..f446913d7964 100644 --- a/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala +++ b/compiler/test/dotty/tools/backend/jvm/DottyBytecodeTests.scala @@ -1785,6 +1785,64 @@ class DottyBytecodeTests extends DottyBytecodeTest { } } + + @Test def i15098 = { + val source = + """object Main { + | def main(args: Array[String]): Unit = { + | Array(1).foreach { n => + | val x = 123 + | println(n) + | } + | } + |} + """.stripMargin + + checkBCode(source) { dir => + val clsIn = dir.lookupName("Main$.class", directory = false).input + val clsNode = loadClassNode(clsIn, skipDebugInfo = false) + val method = getMethod(clsNode, "main") + val instructions = instructionsFromMethod(method).filter(_.isInstanceOf[LineNumber]) + + val expected = List( + LineNumber(3, Label(0)), + ) + + assertSameCode(instructions, expected) + } + } + + @Test def i15098_2 = { + val source = + """object Main { + | def main(args: Array[String]): Unit = { + | Array(1).map { n => + | val x = 123 + | x + n + | }.foreach { n => + | println(n) + | println(n) + | } + | } + |} + """.stripMargin + + checkBCode(source) { dir => + val clsIn = dir.lookupName("Main$.class", directory = false).input + val clsNode = loadClassNode(clsIn, skipDebugInfo = false) + val method = getMethod(clsNode, "main") + val instructions = instructionsFromMethod(method).filter(_.isInstanceOf[LineNumber]) + + val expected = List( + LineNumber(3, Label(0)), + LineNumber(6, Label(15)), + LineNumber(3, Label(24)), + LineNumber(6, Label(27)), + ) + + assertSameCode(instructions, expected) + } + } } object invocationReceiversTestCode { diff --git a/compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala b/compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala index 6173842e9ad1..b490d55bb43f 100644 --- a/compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala +++ b/compiler/test/dotty/tools/backend/jvm/InlineBytecodeTests.scala @@ -765,4 +765,56 @@ class InlineBytecodeTests extends DottyBytecodeTest { diffInstructions(instructions1, instructions2)) } } + + @Test def beta_reduce_elide_unit_binding = { + val source = """class Test: + | def test = ((u: Unit) => u).apply(()) + """.stripMargin + + checkBCode(source) { dir => + val clsIn = dir.lookupName("Test.class", directory = false).input + val clsNode = loadClassNode(clsIn) + + val fun = getMethod(clsNode, "test") + val instructions = instructionsFromMethod(fun) + val expected = List(Op(RETURN)) + + assert(instructions == expected, + "`i was not properly beta-reduced in `test`\n" + diffInstructions(instructions, expected)) + + } + } + + @Test def inline_match_scrutinee_with_side_effect = { + val source = """class Test: + | inline def inlineTest(): Int = + | inline { + | println("scrutinee") + | (1, 2) + | } match + | case (e1, e2) => e1 + e2 + | + | def test: Int = inlineTest() + """.stripMargin + + checkBCode(source) { dir => + val clsIn = dir.lookupName("Test.class", directory = false).input + val clsNode = loadClassNode(clsIn) + + val fun = getMethod(clsNode, "test") + val instructions = instructionsFromMethod(fun) + val expected = List( + Field(GETSTATIC, "scala/Predef$", "MODULE$", "Lscala/Predef$;"), + Ldc(LDC, "scrutinee"), + Invoke(INVOKEVIRTUAL, "scala/Predef$", "println", "(Ljava/lang/Object;)V", false), + Op(ICONST_3), + Op(IRETURN), + ) + + assert(instructions == expected, + "`i was not properly inlined in `test`\n" + diffInstructions(instructions, expected)) + + } + } + } diff --git a/compiler/test/dotty/tools/backend/jvm/PublicInBinaryTests.scala b/compiler/test/dotty/tools/backend/jvm/PublicInBinaryTests.scala index eebb2b23247a..a5463b75804e 100644 --- a/compiler/test/dotty/tools/backend/jvm/PublicInBinaryTests.scala +++ b/compiler/test/dotty/tools/backend/jvm/PublicInBinaryTests.scala @@ -42,7 +42,6 @@ class PublicInBinaryTests extends DottyBytecodeTest { override def initCtx = val ctx0 = super.initCtx ctx0.setSetting(ctx0.settings.experimental, true) - ctx0.setSetting(ctx0.settings.YnoExperimental, true) @Test def publicInBinaryDef(): Unit = { diff --git a/compiler/test/dotty/tools/dotc/BestEffortOptionsTests.scala b/compiler/test/dotty/tools/dotc/BestEffortOptionsTests.scala new file mode 100644 index 000000000000..1e7262f5fd8d --- /dev/null +++ b/compiler/test/dotty/tools/dotc/BestEffortOptionsTests.scala @@ -0,0 +1,59 @@ +package dotty +package tools +package dotc + +import scala.concurrent.duration._ +import dotty.tools.vulpix._ +import org.junit.{ Test, AfterClass } +import reporting.TestReporter +import java.io.{File => JFile} + +import scala.language.unsafeNulls + +class BestEffortOptionsTests { + import ParallelTesting._ + import vulpix.TestConfiguration._ + import BestEffortOptionsTests._ + import CompilationTest.aggregateTests + + // Since TASTy and beTASTy files are read in a lazy manner (only when referenced by the source .scala file) + // we test by using the "-from-tasty" option. This guarantees that the tasty files will be read + // (and that the Best Effort TASTy reader will be tested), but we unfortunately skip the useful + // interactions a tree derived from beTASTy could have with other frontend phases. + @Test def negTestFromBestEffortTasty: Unit = { + // Can be reproduced with + // > sbt + // > scalac --Ybest-effort -Xsemanticdb + // > scalac --from-tasty -Ywith-best-effort-tasty META_INF/best-effort/ + + implicit val testGroup: TestGroup = TestGroup("negTestFromBestEffortTasty") + compileBestEffortTastyInDir(s"tests${JFile.separator}neg", bestEffortBaselineOptions, + picklingFilter = FileFilter.exclude(TestSources.negBestEffortPicklingBlacklisted), + unpicklingFilter = FileFilter.exclude(TestSources.negBestEffortUnpicklingBlacklisted) + ).checkNoCrash() + } + + // Tests an actual use case of this compilation mode, where symbol definitions of the downstream + // projects depend on the best effort tasty files generated with the Best Effort dir option + @Test def bestEffortIntergrationTest: Unit = { + implicit val testGroup: TestGroup = TestGroup("bestEffortIntegrationTests") + compileBestEffortIntegration(s"tests${JFile.separator}best-effort", bestEffortBaselineOptions) + .noCrashWithCompilingDependencies() + } +} + +object BestEffortOptionsTests extends ParallelTesting { + def maxDuration = 45.seconds + def numberOfSlaves = Runtime.getRuntime.availableProcessors() + def safeMode = Properties.testsSafeMode + def isInteractive = SummaryReport.isInteractive + def testFilter = Properties.testsFilter + def updateCheckFiles: Boolean = Properties.testsUpdateCheckfile + def failedTests = TestReporter.lastRunFailedTests + + implicit val summaryReport: SummaryReporting = new SummaryReport + @AfterClass def tearDown(): Unit = { + super.cleanup() + summaryReport.echoSummary() + } +} diff --git a/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala b/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala index 5e9a01a77ca7..a40c1ec1e5b2 100644 --- a/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala @@ -138,6 +138,16 @@ class BootstrappedOnlyCompilationTests { aggregateTests(tests*).checkRuns() } + @Test def runScala2LibraryFromTasty: Unit = { + implicit val testGroup: TestGroup = TestGroup("runScala2LibraryFromTasty") + // These tests recompile the entire scala2-library from TASTy, + // they are resource intensive and should not run alongside other tests to avoid timeouts + aggregateTests( + compileFile("tests/run-custom-args/scala2-library-from-tasty-jar.scala", withCompilerOptions), + compileFile("tests/run-custom-args/scala2-library-from-tasty.scala", withCompilerOptions), + ).limitThreads(2).checkRuns() // TODO reduce to limitThreads(1) if it still causes problems, this would be around 50% slower based on local benchmarking + } + @Test def runBootstrappedOnly: Unit = { implicit val testGroup: TestGroup = TestGroup("runBootstrappedOnly") aggregateTests( diff --git a/compiler/test/dotty/tools/dotc/CompilationTests.scala b/compiler/test/dotty/tools/dotc/CompilationTests.scala index a96a4ea09102..de3bd02bba6e 100644 --- a/compiler/test/dotty/tools/dotc/CompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/CompilationTests.scala @@ -31,8 +31,8 @@ class CompilationTests { @Test def pos: Unit = { implicit val testGroup: TestGroup = TestGroup("compilePos") var tests = List( - compileFilesInDir("tests/pos", defaultOptions.and("-Ysafe-init", "-Wunused:all", "-Wshadow:private-shadow", "-Wshadow:type-parameter-shadow"), FileFilter.include(TestSources.posLintingAllowlist)), - compileFilesInDir("tests/pos", defaultOptions.and("-Ysafe-init"), FileFilter.exclude(TestSources.posLintingAllowlist)), + compileFilesInDir("tests/pos", defaultOptions.and("-Wsafe-init", "-Wunused:all", "-Wshadow:private-shadow", "-Wshadow:type-parameter-shadow"), FileFilter.include(TestSources.posLintingAllowlist)), + compileFilesInDir("tests/pos", defaultOptions.and("-Wsafe-init"), FileFilter.exclude(TestSources.posLintingAllowlist)), compileFilesInDir("tests/pos-deep-subtype", allowDeepSubtypes), compileFilesInDir("tests/pos-special/sourcepath/outer", defaultOptions.and("-sourcepath", "tests/pos-special/sourcepath")), compileFile("tests/pos-special/sourcepath/outer/nested/Test4.scala", defaultOptions.and("-sourcepath", "tests/pos-special/sourcepath")), @@ -40,9 +40,9 @@ class CompilationTests { compileFilesInDir("tests/pos-custom-args/captures", defaultOptions.and("-language:experimental.captureChecking")), compileFile("tests/pos-special/utf8encoded.scala", defaultOptions.and("-encoding", "UTF8")), compileFile("tests/pos-special/utf16encoded.scala", defaultOptions.and("-encoding", "UTF16")), - compileDir("tests/pos-special/i18589", defaultOptions.and("-Ysafe-init").without("-Ycheck:all")), + compileDir("tests/pos-special/i18589", defaultOptions.and("-Wsafe-init").without("-Ycheck:all")), // Run tests for legacy lazy vals - compileFilesInDir("tests/pos", defaultOptions.and("-Ysafe-init", "-Ylegacy-lazy-vals", "-Ycheck-constraint-deps"), FileFilter.include(TestSources.posLazyValsAllowlist)), + compileFilesInDir("tests/pos", defaultOptions.and("-Wsafe-init", "-Ylegacy-lazy-vals", "-Ycheck-constraint-deps"), FileFilter.include(TestSources.posLazyValsAllowlist)), compileDir("tests/pos-special/java-param-names", defaultOptions.withJavacOnlyOptions("-parameters")), ) ::: ( // TODO create a folder for capture checking tests with the stdlib, or use tests/pos-custom-args/captures under this mode? @@ -51,7 +51,7 @@ class CompilationTests { ) if scala.util.Properties.isJavaAtLeast("16") then - tests ::= compileFilesInDir("tests/pos-java16+", defaultOptions.and("-Ysafe-init")) + tests ::= compileFilesInDir("tests/pos-java16+", defaultOptions.and("-Wsafe-init")) aggregateTests(tests*).checkCompile() } @@ -75,6 +75,7 @@ class CompilationTests { compileFile("tests/rewrites/i12340.scala", unindentOptions.and("-rewrite")), compileFile("tests/rewrites/i17187.scala", unindentOptions.and("-rewrite")), compileFile("tests/rewrites/i17399.scala", unindentOptions.and("-rewrite")), + compileFile("tests/rewrites/i20002.scala", defaultOptions.and("-indent", "-rewrite")), ).checkRewrites() } @@ -156,11 +157,11 @@ class CompilationTests { @Test def runAll: Unit = { implicit val testGroup: TestGroup = TestGroup("runAll") aggregateTests( - compileFilesInDir("tests/run", defaultOptions.and("-Ysafe-init")), + compileFilesInDir("tests/run", defaultOptions.and("-Wsafe-init")), compileFilesInDir("tests/run-deep-subtype", allowDeepSubtypes), compileFilesInDir("tests/run-custom-args/captures", allowDeepSubtypes.and("-language:experimental.captureChecking")), // Run tests for legacy lazy vals. - compileFilesInDir("tests/run", defaultOptions.and("-Ysafe-init", "-Ylegacy-lazy-vals", "-Ycheck-constraint-deps"), FileFilter.include(TestSources.runLazyValsAllowlist)), + compileFilesInDir("tests/run", defaultOptions.and("-Wsafe-init", "-Ylegacy-lazy-vals", "-Ycheck-constraint-deps"), FileFilter.include(TestSources.runLazyValsAllowlist)), ).checkRuns() } @@ -194,22 +195,24 @@ class CompilationTests { @Test def explicitNullsNeg: Unit = { implicit val testGroup: TestGroup = TestGroup("explicitNullsNeg") aggregateTests( - compileFilesInDir("tests/explicit-nulls/neg", defaultOptions and "-Yexplicit-nulls"), - compileFilesInDir("tests/explicit-nulls/unsafe-common", defaultOptions and "-Yexplicit-nulls"), + compileFilesInDir("tests/explicit-nulls/neg", explicitNullsOptions), + compileFilesInDir("tests/explicit-nulls/flexible-types-common", explicitNullsOptions and "-Yno-flexible-types"), + compileFilesInDir("tests/explicit-nulls/unsafe-common", explicitNullsOptions and "-Yno-flexible-types"), ) }.checkExpectedErrors() @Test def explicitNullsPos: Unit = { implicit val testGroup: TestGroup = TestGroup("explicitNullsPos") aggregateTests( - compileFilesInDir("tests/explicit-nulls/pos", defaultOptions and "-Yexplicit-nulls"), - compileFilesInDir("tests/explicit-nulls/unsafe-common", defaultOptions and "-Yexplicit-nulls" and "-language:unsafeNulls"), + compileFilesInDir("tests/explicit-nulls/pos", explicitNullsOptions), + compileFilesInDir("tests/explicit-nulls/flexible-types-common", explicitNullsOptions), + compileFilesInDir("tests/explicit-nulls/unsafe-common", explicitNullsOptions and "-language:unsafeNulls" and "-Yno-flexible-types"), ) }.checkCompile() @Test def explicitNullsRun: Unit = { implicit val testGroup: TestGroup = TestGroup("explicitNullsRun") - compileFilesInDir("tests/explicit-nulls/run", defaultOptions and "-Yexplicit-nulls") + compileFilesInDir("tests/explicit-nulls/run", explicitNullsOptions) }.checkRuns() // initialization tests @@ -222,9 +225,9 @@ class CompilationTests { // initialization tests @Test def checkInit: Unit = { implicit val testGroup: TestGroup = TestGroup("checkInit") - val options = defaultOptions.and("-Ysafe-init", "-Xfatal-warnings") + val options = defaultOptions.and("-Wsafe-init", "-Xfatal-warnings") compileFilesInDir("tests/init/neg", options).checkExpectedErrors() - compileFilesInDir("tests/init/warn", defaultOptions.and("-Ysafe-init")).checkWarnings() + compileFilesInDir("tests/init/warn", defaultOptions.and("-Wsafe-init")).checkWarnings() compileFilesInDir("tests/init/pos", options).checkCompile() compileFilesInDir("tests/init/crash", options.without("-Xfatal-warnings")).checkCompile() // The regression test for i12128 has some atypical classpath requirements. diff --git a/compiler/test/dotty/tools/dotc/SettingsTests.scala b/compiler/test/dotty/tools/dotc/SettingsTests.scala index 8125a80f29f8..301dc10ab54e 100644 --- a/compiler/test/dotty/tools/dotc/SettingsTests.scala +++ b/compiler/test/dotty/tools/dotc/SettingsTests.scala @@ -7,14 +7,19 @@ import reporting.StoreReporter import vulpix.TestConfiguration import core.Contexts.{Context, ContextBase} -import dotty.tools.dotc.config.Settings._ -import dotty.tools.dotc.config.ScalaSettingCategories._ +import dotty.tools.dotc.config.Settings.* +import dotty.tools.dotc.config.Settings.Setting.ChoiceWithHelp +import dotty.tools.dotc.config.ScalaSettingCategories.* import dotty.tools.vulpix.TestConfiguration.mkClasspath +import dotty.tools.io.PlainDirectory +import dotty.tools.io.Directory +import dotty.tools.dotc.config.ScalaVersion import java.nio.file._ import org.junit.Test import org.junit.Assert._ +import scala.util.Using class SettingsTests { @@ -199,6 +204,125 @@ class SettingsTests { assertEquals(List("Flag -qux set repeatedly"), summary.warnings) } + @Test def `Output setting is overriding existing jar`: Unit = + val result = Using.resource(Files.createTempFile("myfile", ".jar").nn){ file => + object Settings extends SettingGroup: + val defaultDir = new PlainDirectory(Directory(".")) + val testOutput = OutputSetting(RootSetting, "testOutput", "testOutput", "", defaultDir) + + import Settings._ + + Files.write(file, "test".getBytes()) + val fileStateBefore = String(Files.readAllBytes(file)) + + val args = List(s"-testOutput:${file.toString}") + val summary = processArguments(args, processAll = true) + + assertNotEquals(fileStateBefore, String(Files.readAllBytes(file)), "Jar should have been overriden") + + }(Files.deleteIfExists(_)) + + @Test def `Output setting is respecting previous setting`: Unit = + val result = Using.resources( + Files.createTempFile("myfile", ".jar").nn, Files.createTempFile("myfile2", ".jar").nn + ){ (file1, file2) => + object Settings extends SettingGroup: + val defaultDir = new PlainDirectory(Directory(".")) + val testOutput = OutputSetting(RootSetting, "testOutput", "testOutput", "", defaultDir, preferPrevious = true) + + import Settings._ + + Files.write(file1, "test1".getBytes()) + Files.write(file2, "test2".getBytes()) + + val file1StateBefore = String(Files.readAllBytes(file1)) + val file2StateBefore = String(Files.readAllBytes(file2)) + + val creationTime = Files.getLastModifiedTime(file1) + val args = List(s"-testOutput:${file1.toString}", s"-testOutput:${file2.toString}") + val summary = processArguments(args, processAll = true) + + // The output is a new filesystem without information of original path + // We can't check the `testOutput.value` as in other tests. + assertNotEquals(file1StateBefore, String(Files.readAllBytes(file1))) + assertEquals(file2StateBefore, String(Files.readAllBytes(file2))) + + }(Files.deleteIfExists(_), Files.deleteIfExists(_)) + + @Test def `Output side effect is not present when setting is deprecated`: Unit = + val result = Using.resource(Files.createTempFile("myfile", ".jar").nn){ file => + object Settings extends SettingGroup: + val defaultDir = new PlainDirectory(Directory(".")) + val testOutput = OutputSetting(RootSetting, "testOutput", "testOutput", "", defaultDir, preferPrevious = true, deprecation = Deprecation.renamed("XtestOutput")) + + import Settings._ + + Files.write(file, "test".getBytes()) + val fileStateBefore = String(Files.readAllBytes(file)) + + val args = List(s"-testOutput:${file.toString}") + val summary = processArguments(args, processAll = true) + + assertEquals(fileStateBefore, String(Files.readAllBytes(file))) + + }(Files.deleteIfExists(_)) + + @Test def `Arguments of flags are correctly parsed with both ":" and " " separating`: Unit = + object Settings extends SettingGroup: + val booleanSetting = BooleanSetting(RootSetting, "booleanSetting", "booleanSetting", false) + val stringSetting = StringSetting(RootSetting, "stringSetting", "stringSetting", "", "test") + val choiceSetting = ChoiceSetting(RootSetting, "choiceSetting", "choiceSetting", "", List("a", "b"), "a") + val multiChoiceSetting= MultiChoiceSetting(RootSetting, "multiChoiceSetting", "multiChoiceSetting", "", List("a", "b"), List()) + val multiChoiceHelpSetting= MultiChoiceHelpSetting(RootSetting, "multiChoiceHelpSetting", "multiChoiceHelpSetting", "", List(ChoiceWithHelp("a", "a"), ChoiceWithHelp("b", "b")), List()) + val intSetting = IntSetting(RootSetting, "intSetting", "intSetting", 0) + val intChoiceSetting = IntChoiceSetting(RootSetting, "intChoiceSetting", "intChoiceSetting", List(1,2,3), 1) + val multiStringSetting = MultiStringSetting(RootSetting, "multiStringSetting", "multiStringSetting", "", List("a", "b"), List()) + val outputSetting = OutputSetting(RootSetting, "outputSetting", "outputSetting", "", new PlainDirectory(Directory("."))) + val pathSetting = PathSetting(RootSetting, "pathSetting", "pathSetting", ".") + val phasesSetting = PhasesSetting(RootSetting, "phasesSetting", "phasesSetting", "all") + val versionSetting= VersionSetting(RootSetting, "versionSetting", "versionSetting") + + import Settings._ + Using.resource(Files.createTempDirectory("testDir")) { dir => + + val args = List( + List("-booleanSetting", "true"), + List("-stringSetting", "newTest"), + List("-choiceSetting", "b"), + List("-multiChoiceSetting", "a,b"), + List("-multiChoiceHelpSetting", "a,b"), + List("-intSetting", "42"), + List("-intChoiceSetting", "2"), + List("-multiStringSetting", "a,b"), + List("-outputSetting", dir.toString), + List("-pathSetting", dir.toString), + List("-phasesSetting", "parser,typer"), + List("-versionSetting", "1.0.0"), + ) + + def testValues(summary: ArgsSummary) = + withProcessedArgs(summary) { + assertEquals(true, booleanSetting.value) + assertEquals("newTest", stringSetting.value) + assertEquals("b", choiceSetting.value) + assertEquals(List("a", "b"), multiChoiceSetting.value) + assertEquals(List("a", "b"), multiChoiceHelpSetting.value) + assertEquals(42, intSetting.value) + assertEquals(2, intChoiceSetting.value) + assertEquals(List("a", "b"), multiStringSetting.value) + assertEquals(dir.toString, outputSetting.value.path) + assertEquals(dir.toString, pathSetting.value) + assertEquals(List("parser", "typer"), phasesSetting.value) + assertEquals(ScalaVersion.parse("1.0.0").get, versionSetting.value) + } + + val summaryColon = processArguments(args.map(_.mkString(":")), processAll = true) + val summaryWhitespace = processArguments(args.flatten, processAll = true) + testValues(summary = summaryColon) + testValues(summary = summaryWhitespace) + + }(Files.deleteIfExists(_)) + private def withProcessedArgs(summary: ArgsSummary)(f: SettingsState ?=> Unit) = f(using summary.sstate) extension [T](setting: Setting[T]) diff --git a/compiler/test/dotty/tools/dotc/StringFormatterTest.scala b/compiler/test/dotty/tools/dotc/StringFormatterTest.scala index 4dfc08cc7e9b..b0ff8b8fc03e 100644 --- a/compiler/test/dotty/tools/dotc/StringFormatterTest.scala +++ b/compiler/test/dotty/tools/dotc/StringFormatterTest.scala @@ -23,6 +23,7 @@ class StringFormatterTest extends AbstractStringFormatterTest: @Test def flagsTup = check("(,final)", i"${(JavaStatic, Final)}") @Test def seqOfTup2 = check("(final,given), (private,lazy)", i"${Seq((Final, Given), (Private, Lazy))}%, %") @Test def seqOfTup3 = check("(Foo,given, (right is approximated))", i"${Seq((Foo, Given, TypeComparer.ApproxState.None.addHigh))}%, %") + @Test def tupleNull = check("(1,null)", i"${(1, null: String | Null)}") class StorePrinter extends Printer: var string: String = "" diff --git a/compiler/test/dotty/tools/dotc/classpath/JrtClassPathTest.scala b/compiler/test/dotty/tools/dotc/classpath/JrtClassPathTest.scala index b676bb100320..a06698c1d513 100644 --- a/compiler/test/dotty/tools/dotc/classpath/JrtClassPathTest.scala +++ b/compiler/test/dotty/tools/dotc/classpath/JrtClassPathTest.scala @@ -38,7 +38,6 @@ class JrtClassPathTest { assertEquals("java/lang/Object", AsmUtils.readClass(jl_Object.file.toByteArray).name) assertTrue(cp.list("java.lang").packages.exists(_.name == "java.lang.annotation")) assertTrue(cp.list("java.lang").classesAndSources.exists(_.name == "Object")) - assertTrue(cp.findClass("java.lang.Object").isDefined) assertTrue(cp.findClassFile("java.lang.Object").isDefined) } } diff --git a/compiler/test/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactoryTest.scala b/compiler/test/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactoryTest.scala index 84973b8d3d71..db14ff3b1fb4 100644 --- a/compiler/test/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactoryTest.scala +++ b/compiler/test/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactoryTest.scala @@ -27,7 +27,7 @@ class ZipAndJarFileLookupFactoryTest { createZip(f, Array(), "p2/X.class") createZip(f, Array(), "p3/Y.class") val cp1 = createCp - assert(cp1.findClass("p1.C").isDefined) + assert(cp1.findClassFile("p1.C").isDefined) // We expect get a cache hit as the underlying zip hasn't changed val cp2 = createCp @@ -46,8 +46,8 @@ class ZipAndJarFileLookupFactoryTest { val cp3 = createCp assert(cp1 ne cp3, (System.identityHashCode(cp1), System.identityHashCode(cp3))) // And that instance should see D, not C, in package p1. - assert(cp3.findClass("p1.C").isEmpty) - assert(cp3.findClass("p1.D").isDefined) + assert(cp3.findClassFile("p1.C").isEmpty) + assert(cp3.findClassFile("p1.D").isDefined) } finally Files.delete(f) } diff --git a/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala b/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala index e958a5925fce..3dc4f4e4ec5e 100644 --- a/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala +++ b/compiler/test/dotty/tools/dotc/config/ScalaSettingsTests.scala @@ -7,6 +7,13 @@ import dotty.tools.dotc.config.ScalaSettingCategories._ import org.junit.Test import org.junit.Assert._ import core.Decorators.toMessage +import dotty.tools.io.{Path, PlainFile} + +import java.net.URI +import java.nio.file.Files +import scala.util.Using + +import scala.annotation.nowarn class ScalaSettingsTests: @@ -83,6 +90,105 @@ class ScalaSettingsTests: val nowr = new Diagnostic.Warning("This is a problem.".toMessage, util.NoSourcePosition) assertEquals(Action.Silent, sut.action(nowr)) + @nowarn("cat=deprecation") + @Test def `Deprecated options are correctly mapped to their replacements`: Unit = + def createTestCase(oldSetting: Setting[_], newSetting: Setting[_], value: String = "") = + s"${oldSetting.name}$value" -> newSetting + + val settings = ScalaSettings + List( + createTestCase(settings.YtermConflict , settings.XtermConflict, ":package"), + createTestCase(settings.YnoGenericSig , settings.XnoGenericSig), + createTestCase(settings.Ydumpclasses , settings.Xdumpclasses,":./"), + createTestCase(settings.YjarCompressionLevel , settings.XjarCompressionLevel,":0"), + createTestCase(settings.YkindProjector , settings.XkindProjector, ":underscores"), + createTestCase(settings.YdropComments , settings.XdropComments), + createTestCase(settings.YcookComments , settings.XcookComments), + createTestCase(settings.YreadComments , settings.XreadComments), + createTestCase(settings.YnoDecodeStacktraces , settings.XnoDecodeStacktraces), + createTestCase(settings.YnoEnrichErrorMessages, settings.XnoEnrichErrorMessages), + createTestCase(settings.YdebugMacros , settings.XdebugMacros), + // createTestCase(settings.YjavaTasty , settings.XjavaTasty), + // createTestCase(settings.YearlyTastyOutput , settings.XearlyTastyOutput, ":./"), + // createTestCase(settings.YallowOutlineFromTasty, settings.XallowOutlineFromTasty), + createTestCase(settings.YcheckInit , settings.WcheckInit), + // createTestCase(settings.Xlint , settings.Wshadow, ":all"), // this setting is not going to be mapped to replacement. Read more in the commit message + ).map: (deprecatedArgument, newSetting) => + val args = List(deprecatedArgument) + val argSummary = ArgsSummary(settings.defaultState, args, errors = Nil, warnings = Nil) + val conf = settings.processArguments(argSummary, processAll = true, skipped = Nil) + assert(!newSetting.isDefaultIn(conf.sstate), s"Setting $deprecatedArgument was not forwarded to ${newSetting.name}") + + @nowarn("cat=deprecation") + @Test def `Deprecated options should not be set if old option was incorrect`: Unit = + def createTestCase(oldSetting: Setting[_], newSetting: Setting[_], value: String = ":illegal") = + s"${oldSetting.name}:$value" -> newSetting + + val settings = ScalaSettings + List( + createTestCase(settings.YtermConflict , settings.XtermConflict), + createTestCase(settings.YnoGenericSig , settings.XnoGenericSig), + createTestCase(settings.Ydumpclasses , settings.Xdumpclasses, ""), + createTestCase(settings.YjarCompressionLevel , settings.XjarCompressionLevel), + createTestCase(settings.YkindProjector , settings.XkindProjector), + createTestCase(settings.YdropComments , settings.XdropComments), + createTestCase(settings.YcookComments , settings.XcookComments), + createTestCase(settings.YreadComments , settings.XreadComments), + createTestCase(settings.YnoDecodeStacktraces , settings.XnoDecodeStacktraces), + createTestCase(settings.YnoEnrichErrorMessages, settings.XnoEnrichErrorMessages), + createTestCase(settings.YdebugMacros , settings.XdebugMacros), + // createTestCase(settings.YjavaTasty , settings.XjavaTasty), + // createTestCase(settings.YearlyTastyOutput , settings.XearlyTastyOutput), + // createTestCase(settings.YallowOutlineFromTasty, settings.XallowOutlineFromTasty), + createTestCase(settings.YcheckInit , settings.WcheckInit), + createTestCase(settings.Xlint , settings.Wshadow), + ).map: (deprecatedArgument, newSetting) => + val args = List(deprecatedArgument) + val argSummary = ArgsSummary(settings.defaultState, args, errors = Nil, warnings = Nil) + val conf = settings.processArguments(argSummary, processAll = true, skipped = Nil) + assert(newSetting.isDefaultIn(conf.sstate), s"Setting $deprecatedArgument was forwarded to ${newSetting.name}, when it should be ignored because first option was erroreus") + + // -Xlint was handled in a special way when it was added, making in hard to deprecate it. + // For now on we will retain old behavior, in next version we will emit deprecation warning. + // It is also scheduled for removal in future versions. + @Test def `Make Xlint to ignore invalid args`: Unit = + val settings = ScalaSettings + val args = List("-Xlint:-unused,_") + val argSummary = ArgsSummary(settings.defaultState, args, errors = Nil, warnings = Nil) + val conf = settings.processArguments(argSummary, processAll = true, skipped = Nil) + assert(conf.warnings.contains("Option -Xlint is deprecated: Use -Wshadow to enable shadowing lints. Scheduled for removal.")) + assert(conf.errors.isEmpty) + + @nowarn("cat=deprecation") + @Test def `Deprecated options aliases are correctly mapped to their replacements`: Unit = + def createTestCase(oldSetting: Setting[_], newSetting: Setting[_], value: String = "") = + oldSetting.aliases.map: alias => + s"$alias$value" -> newSetting + + val settings = ScalaSettings + List( + createTestCase(settings.YtermConflict , settings.XtermConflict, ":package"), + createTestCase(settings.YnoGenericSig , settings.XnoGenericSig), + createTestCase(settings.Ydumpclasses , settings.Xdumpclasses,":./"), + createTestCase(settings.YjarCompressionLevel , settings.XjarCompressionLevel,":0"), + createTestCase(settings.YkindProjector , settings.XkindProjector, ":underscores"), + createTestCase(settings.YdropComments , settings.XdropComments), + createTestCase(settings.YcookComments , settings.XcookComments), + createTestCase(settings.YreadComments , settings.XreadComments), + createTestCase(settings.YnoDecodeStacktraces , settings.XnoDecodeStacktraces), + createTestCase(settings.YnoEnrichErrorMessages, settings.XnoEnrichErrorMessages), + createTestCase(settings.YdebugMacros , settings.XdebugMacros), + // createTestCase(settings.YjavaTasty , settings.XjavaTasty), + // createTestCase(settings.YearlyTastyOutput , settings.XearlyTastyOutput, ":./"), + // createTestCase(settings.YallowOutlineFromTasty, settings.XallowOutlineFromTasty), + createTestCase(settings.YcheckInit , settings.WcheckInit), + // createTestCase(settings.Xlint , settings.Wshadow, ":all"), // this setting is not going to be mapped to replacement. Read more in the commit message + ).flatten.map: (deprecatedArgument, newSetting) => + val args = List(deprecatedArgument) + val argSummary = ArgsSummary(settings.defaultState, args, errors = Nil, warnings = Nil) + val conf = settings.processArguments(argSummary, processAll = true, skipped = Nil) + assert(!newSetting.isDefaultIn(conf.sstate), s"Setting alias $deprecatedArgument was not forwarded to ${newSetting.name}") + @Test def `i18367 rightmost WConf flags take precedence over flags to the left`: Unit = import reporting.{Action, Diagnostic} val sets = ScalaSettings @@ -96,5 +202,100 @@ class ScalaSettingsTests: assertEquals(Action.Silent, sut.action(depr)) + private def wconfSrcFilterTest(argsStr: String, + warning: reporting.Diagnostic.Warning): Either[List[String], reporting.Action] = + import reporting.Diagnostic + val settings = ScalaSettings + val args = ArgsSummary(settings.defaultState, List(argsStr), errors = Nil, warnings = Nil) + val proc = settings.processArguments(args, processAll = true, skipped = Nil) + val wconfStr = settings.Wconf.valueIn(proc.sstate) + val wconf = reporting.WConf.fromSettings(wconfStr) + wconf.map(_.action(warning)) + + @Test def `WConf src filter silences warnings from a matching path for virtual file`: Unit = + val result = wconfSrcFilterTest( + argsStr = "-Wconf:src=path/.*:s", + warning = reporting.Diagnostic.Warning( + "A warning".toMessage, + util.SourcePosition( + source = util.SourceFile.virtual(new URI("file:///some/path/file.scala"), ""), + span = util.Spans.Span(1L) + ) + ) + ) + assertEquals(result, Right(reporting.Action.Silent)) + + @Test def `WConf src filter doesn't silence warnings from a non-matching path`: Unit = + val result = wconfSrcFilterTest( + argsStr = "-Wconf:src=another/.*:s", + warning = reporting.Diagnostic.Warning( + "A warning".toMessage, + util.SourcePosition( + source = util.SourceFile.virtual(new URI("file:///some/path/file.scala"), ""), + span = util.Spans.Span(1L) + ) + ) + ) + assertEquals(result, Right(reporting.Action.Warning)) + + @Test def `WConf src filter silences warnings from a matching path for real file`: Unit = + val result = Using.resource(Files.createTempFile("myfile", ".scala").nn) { file => + wconfSrcFilterTest( + argsStr = "-Wconf:src=myfile.*?\\.scala:s", + warning = reporting.Diagnostic.Warning( + "A warning".toMessage, + util.SourcePosition( + source = util.SourceFile(new PlainFile(Path(file)), "UTF-8"), + span = util.Spans.Span(1L) + ) + ) + ) + }(Files.deleteIfExists(_)) + assertEquals(result, Right(reporting.Action.Silent)) + + @Test def `WConf src filter doesn't silence warnings from a non-matching path for real file`: Unit = + val result = Using.resource(Files.createTempFile("myfile", ".scala").nn) { file => + wconfSrcFilterTest( + argsStr = "-Wconf:src=another.*?\\.scala:s", + warning = reporting.Diagnostic.Warning( + "A warning".toMessage, + util.SourcePosition( + source = util.SourceFile(new PlainFile(Path(file)), "UTF-8"), + span = util.Spans.Span(1L) + ) + ) + ) + }(Files.deleteIfExists(_)) + assertEquals(result, Right(reporting.Action.Warning)) + + @Test def `WConf src filter reports an error on an invalid regex`: Unit = + val result = wconfSrcFilterTest( + argsStr = """-Wconf:src=\:s""", + warning = reporting.Diagnostic.Warning( + "A warning".toMessage, + util.SourcePosition( + source = util.SourceFile.virtual(new URI("file:///some/path/file.scala"), ""), + span = util.Spans.Span(1L) + ) + ), + ) + assertTrue( + result.left.exists(errors => + errors.sizeIs == 1 && errors.headOption.exists(_.startsWith("invalid pattern")) + ) + ) + + @Test def `WConf src filter can be mixed with other filters with rightmost taking precedence`: Unit = + val result = wconfSrcFilterTest( + argsStr = "-Wconf:src=.*:s,cat=deprecation:e", + warning = reporting.Diagnostic.DeprecationWarning( + "A warning".toMessage, + util.SourcePosition( + source = util.SourceFile.virtual(new URI("file:///some/path/file.scala"), ""), + span = util.Spans.Span(1L) + ) + ) + ) + assertEquals(result, Right(reporting.Action.Error)) end ScalaSettingsTests diff --git a/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala b/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala index 4daaf86f2fb0..072944da1349 100644 --- a/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala +++ b/compiler/test/dotty/tools/dotc/core/tasty/CommentPicklingTest.scala @@ -98,7 +98,7 @@ class CommentPicklingTest { Main.process(options.all, reporter) assertFalse("Compilation failed.", reporter.hasErrors) - val tastyFiles = Path.onlyFiles(out.walkFilter(_.extension == "tasty")).toList + val tastyFiles = Path.onlyFiles(out.walkFilter(_.ext.isTasty)).toList val unpicklingOptions = unpickleOptions .withClasspath(out.toAbsolute.toString) .and("dummy") // Need to pass a dummy source file name @@ -110,14 +110,14 @@ class CommentPicklingTest { private class UnpicklingDriver extends Driver { override def initCtx = val ctx = super.initCtx.fresh - ctx.setSetting(ctx.settings.YreadComments, true) + ctx.setSetting(ctx.settings.XreadComments, true) ctx def unpickle[T](args: Array[String], files: List[File])(fn: (List[tpd.Tree], Context) => T): T = { implicit val ctx: Context = setup(args, initCtx).map(_._2).getOrElse(initCtx) ctx.initialize() val trees = files.flatMap { f => - val unpickler = new DottyUnpickler(AbstractFile.getFile(f.jpath), f.toByteArray()) + val unpickler = new DottyUnpickler(AbstractFile.getFile(f.jpath), f.toByteArray(), isBestEffortTasty = false) unpickler.enter(roots = Set.empty) unpickler.rootTrees(using ctx) } diff --git a/compiler/test/dotty/tools/dotc/core/tasty/PathPicklingTest.scala b/compiler/test/dotty/tools/dotc/core/tasty/PathPicklingTest.scala index 66463e3ff66c..326a2dc87b2a 100644 --- a/compiler/test/dotty/tools/dotc/core/tasty/PathPicklingTest.scala +++ b/compiler/test/dotty/tools/dotc/core/tasty/PathPicklingTest.scala @@ -48,7 +48,7 @@ class PathPicklingTest { val jar = JarArchive.open(Path(s"$out/out.jar"), create = false) try for file <- jar.iterator() if file.name.endsWith(".tasty") do - sb.append(TastyPrinter.showContents(file.toByteArray, noColor = true)) + sb.append(TastyPrinter.showContents(file.toByteArray, noColor = true, isBestEffortTasty = false)) finally jar.close() sb.toString() diff --git a/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala b/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala index 2c970e93f573..382c029c86e0 100644 --- a/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala +++ b/compiler/test/dotty/tools/dotc/printing/PrintingTest.scala @@ -25,7 +25,7 @@ import java.io.File class PrintingTest { def options(phase: String, flags: List[String]) = - List(s"-Xprint:$phase", "-color:never", "-classpath", TestConfiguration.basicClasspath) ::: flags + List(s"-Xprint:$phase", "-color:never", "-nowarn", "-classpath", TestConfiguration.basicClasspath) ::: flags private def compileFile(path: JPath, phase: String): Boolean = { val baseFilePath = path.toString.stripSuffix(".scala") @@ -51,7 +51,7 @@ class PrintingTest { def testIn(testsDir: String, phase: String) = val res = Directory(testsDir).list.toList - .filter(f => f.extension == "scala") + .filter(f => f.ext.isScala) .map { f => compileFile(f.jpath, phase) } val failed = res.filter(!_) diff --git a/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala b/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala index a96a2765d56a..953dd16e170b 100644 --- a/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala +++ b/compiler/test/dotty/tools/dotc/reporting/TestReporter.scala @@ -71,10 +71,12 @@ extends Reporter with UniqueMessagePositions with HideNonSensicalMessages with M } if dia.level >= WARNING then - _diagnosticBuf.append(dia) _consoleReporter.doReport(dia) + _diagnosticBuf.append(dia) printMessageAndPos(dia, extra) } + + override def printSummary()(using Context): Unit = () } object TestReporter { diff --git a/compiler/test/dotty/tools/dotc/sbt/ProgressCallbackTest.scala b/compiler/test/dotty/tools/dotc/sbt/ProgressCallbackTest.scala index 489dc0f1759c..49fd3ee68d5f 100644 --- a/compiler/test/dotty/tools/dotc/sbt/ProgressCallbackTest.scala +++ b/compiler/test/dotty/tools/dotc/sbt/ProgressCallbackTest.scala @@ -97,7 +97,11 @@ final class ProgressCallbackTest extends DottyTest: locally: // (4) assert that the final progress recorded is at the target phase, // and progress is equal to the number of phases before the target. - val (befores, target +: next +: _) = runnableSubPhases.span(_ != targetPhase): @unchecked + // + // (4.1) extract the real befores by looking at the runnable phases + val (befores, target +: _) = runnableSubPhases.span(_ != targetPhase): @unchecked + // (4.2) extract the predicted next phase by looking at all phases + val (_, `target` +: next +: _) = allSubPhases.span(_ != targetPhase): @unchecked // (4.1) we expect cancellation to occur *as we enter* the target phase, // so no units should be visited in this phase. Therefore progress // should be equal to the number of phases before the target. (as we have 1 unit) diff --git a/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala b/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala index f538d9534cd9..4ed59db5c10e 100644 --- a/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala +++ b/compiler/test/dotty/tools/dotc/transform/PatmatExhaustivityTest.scala @@ -52,7 +52,7 @@ class PatmatExhaustivityTest { /** A single test with multiple files grouped in a folder */ private def compileDir(path: JPath): Boolean = { val files = Directory(path).list.toList - .filter(f => f.extension == "scala" || f.extension == "java" ) + .filter(_.ext.isScalaOrJava) .map(_.jpath) val actualLines = compile(files) @@ -65,7 +65,7 @@ class PatmatExhaustivityTest { def patmatExhaustivity: Unit = { val blacklisted = TestSources.patmatExhaustivityScala2LibraryTastyBlacklisted.toSet val res = Directory(testsDir).list.toList - .filter(f => f.extension == "scala" || f.isDirectory) + .filter(f => f.ext.isScala || f.isDirectory) .filter { f => val path = if f.isDirectory then f.path + "/" else f.path Properties.testsFilter.isEmpty || Properties.testsFilter.exists(path.contains) @@ -74,7 +74,7 @@ class PatmatExhaustivityTest { .map(f => if f.isDirectory then compileDir(f.jpath) else compileFile(f.jpath)) val failed = res.filter(!_) - val ignored = Directory(testsDir).list.toList.filter(_.extension == "ignore") + val ignored = Directory(testsDir).list.toList.filter(_.ext.toLowerCase.equalsIgnoreCase("ignore")) val msg = s"Total: ${res.length + ignored.length}, Failed: ${failed.length}, Ignored: ${ignored.length}" diff --git a/compiler/test/dotty/tools/io/ClasspathTest.scala b/compiler/test/dotty/tools/io/ClasspathTest.scala index a0fef65afdec..333f2b8062b0 100755 --- a/compiler/test/dotty/tools/io/ClasspathTest.scala +++ b/compiler/test/dotty/tools/io/ClasspathTest.scala @@ -15,6 +15,8 @@ class ClasspathTest { def pathsep = sys.props("path.separator") + def isWindows: Boolean = scala.util.Properties.isWin + // // Cope with wildcard classpath entries, exercised with -classpath // @@ -23,7 +25,7 @@ class ClasspathTest { @Test def testWildcards(): Unit = val outDir = Files.createTempDirectory("classpath-test") try - val compilerLib = "dist/target/pack/lib" + val compilerLib = s"${if isWindows then "dist-win-x86_64" else "dist"}/target/pack/lib" val libdir = Paths.get(compilerLib).toFile if libdir.exists then val libjarFiles = libdir.listFiles.toList.take(5) diff --git a/compiler/test/dotty/tools/repl/ReplCompilerTests.scala b/compiler/test/dotty/tools/repl/ReplCompilerTests.scala index ecae111604cf..67e63d0156a5 100644 --- a/compiler/test/dotty/tools/repl/ReplCompilerTests.scala +++ b/compiler/test/dotty/tools/repl/ReplCompilerTests.scala @@ -20,6 +20,14 @@ class ReplCompilerTests extends ReplTest: assertEquals("def foo: 1", storedOutput().trim) } + @Test def i18383NoWarnOnUnusedImport: Unit = { + initially { + run("import scala.collection.*") + } andThen { + println(lines().mkString("* ", "\n * ", "")) + } + } + @Test def compileTwo = initially { run("def foo: 1 = 1") @@ -509,4 +517,3 @@ class ReplHighlightTests extends ReplTest(ReplTest.defaultOptions.filterNot(_.st case class Tree(left: Tree, right: Tree) def deepTree(depth: Int): Tree deepTree(300)""") - diff --git a/compiler/test/dotty/tools/repl/TabcompleteTests.scala b/compiler/test/dotty/tools/repl/TabcompleteTests.scala index e4c3a2557e7d..f719752be353 100644 --- a/compiler/test/dotty/tools/repl/TabcompleteTests.scala +++ b/compiler/test/dotty/tools/repl/TabcompleteTests.scala @@ -122,11 +122,11 @@ class TabcompleteTests extends ReplTest { } @Test def moduleCompletion = initially { - assertEquals(List("Predef"), tabComplete("object Foo { type T = Pre")) + assertEquals(List("Predef"), tabComplete("object Foo { type T = Pred")) } @Test def i6415 = initially { - assertEquals(List("Predef"), tabComplete("object Foo { opaque type T = Pre")) + assertEquals(List("Predef"), tabComplete("object Foo { opaque type T = Pred")) } @Test def i6361 = initially { diff --git a/compiler/test/dotty/tools/scripting/BashExitCodeTests.scala b/compiler/test/dotty/tools/scripting/BashExitCodeTests.scala index cc53447cd64b..857f5ef378e7 100644 --- a/compiler/test/dotty/tools/scripting/BashExitCodeTests.scala +++ b/compiler/test/dotty/tools/scripting/BashExitCodeTests.scala @@ -16,7 +16,11 @@ import ScriptTestEnv.* class BashExitCodeTests: private var myTmpDir: String | Null = null private lazy val tmpDir = { myTmpDir = Files.createTempDirectory("exit-code-tests").toFile.absPath; myTmpDir } - @After def cleanup(): Unit = if myTmpDir != null then io.Directory(myTmpDir).deleteRecursively() + @After def cleanup(): Unit = { + if myTmpDir != null then io.Directory(myTmpDir).deleteRecursively() + + cleanupScalaCLIDirs() + } /** Verify the exit code of running `cmd args*`. */ def verifyExit(cmd: String, args: String*)(expectedExitCode: Int): Unit = @@ -28,8 +32,8 @@ class BashExitCodeTests: s"expected $expectedExitCode but got $exitCode${pp("out", stdout)}${pp("err", stderr)}" }, expectedExitCode, exitCode) - // Helpers for running scala, scalac, and scalac without the the output directory ("raw") - def scala(args: String*) = verifyExit(scalaPath, args*) + // Helpers for running scala, scalac, and scalac without the output directory ("raw") + def scala(args: String*) = verifyExit(scalaPath, ("--power" +: args :+ "--offline" :+ "--server=false")*) def scalacRaw(args: String*) = verifyExit(scalacPath, args*) def scalac(args: String*) = scalacRaw(("-d" +: tmpDir +: args)*) @@ -38,12 +42,16 @@ class BashExitCodeTests: Files.write(Files.createTempFile(tmpDir.toPath, getClass.getSimpleName, suffix), body.getBytes(UTF_8)).absPath @Test def neg = scalac(f("@main def Test = prin"))(1) - @Test def run = scalac(f("@main def Test = ???"))(0) & scala("-classpath", tmpDir, "Test")(1) - @Test def pos = scalac(f("@main def Test = ()"))(0) & scala("-classpath", tmpDir, "Test")(0) + @Test def run = scalac(f("@main def Test = ???"))(0) & scala("-classpath", tmpDir, "-M", "Test")(1) + @Test def pos = scalac(f("@main def Test = ()"))(0) & scala("-classpath", tmpDir, "-M", "Test")(0) + + @Test def runNeg_script = scala(f("prin", ".sc"))(1) + @Test def runRun_script = scala(f("???", ".sc"))(1) + @Test def runPos_script = scala(f("()", ".sc"))(0) - @Test def runNeg = scala(f("@main def Test = prin", ".sc"))(1) - @Test def runRun = scala(f("@main def Test = ???", ".sc"))(1) - @Test def runPos = scala(f("@main def Test = ()", ".sc"))(0) + @Test def runNeg = scala(f("@main def Test = prin", ".scala"))(1) + @Test def runRun = scala(f("@main def Test = ???", ".scala"))(1) + @Test def runPos = scala(f("@main def Test = ()", ".scala"))(0) @Test def scNeg = scalac("-script", f("@main def Test = prin", ".sc"))(1) @Test def scRun = scalac("-script", f("@main def Test = ???", ".sc"))(1) diff --git a/compiler/test/dotty/tools/scripting/BashScriptsTests.scala b/compiler/test/dotty/tools/scripting/BashScriptsTests.scala index f3f364754e20..6af863f0fccd 100644 --- a/compiler/test/dotty/tools/scripting/BashScriptsTests.scala +++ b/compiler/test/dotty/tools/scripting/BashScriptsTests.scala @@ -5,7 +5,7 @@ package scripting import scala.language.unsafeNulls import java.nio.file.Paths -import org.junit.{Test, AfterClass} +import org.junit.{Test, Ignore, AfterClass} import org.junit.Assert.assertEquals import org.junit.Assume.assumeFalse import org.junit.experimental.categories.Category @@ -25,11 +25,13 @@ object BashScriptsTests: def testFiles = scripts("/scripting") @AfterClass def cleanup: Unit = { + cleanupScalaCLIDirs() + val af = argsfile.toFile - if (af.exists) { + if af.exists then af.delete() - } } + printf("osname[%s]\n", osname) printf("uname[%s]\n", ostypeFull) printf("using JAVA_HOME=%s\n", envJavaHome) @@ -50,7 +52,9 @@ object BashScriptsTests: val testScriptArgs = Seq( "a", "b", "c", "-repl", "-run", "-script", "-debug" ) - val showArgsScript = testFiles.find(_.getName == "showArgs.sc").get.absPath + val Seq(showArgsScript, showArgsScalaCli) = Seq("showArgs.sc", "showArgs_scalacli.sc").map { name => + testFiles.find(_.getName == name).get.absPath + } def testFile(name: String): String = val file = testFiles.find(_.getName == name) match { @@ -64,13 +68,13 @@ object BashScriptsTests: } file - val Seq(envtestSc, envtestScala) = Seq("envtest.sc", "envtest.scala").map { testFile(_) } + val Seq(envtestNuSc, envtestScala) = Seq("envtest_scalacli.sc", "envtest.scala").map { testFile(_) } // create command line with given options, execute specified script, return stdout def callScript(tag: String, script: String, keyPre: String): String = val keyArg = s"$keyPre=$tag" printf("pass tag [%s] via [%s] to script [%s]\n", tag, keyArg, script) - val cmd: String = Seq("SCALA_OPTS= ", scalaPath, keyArg, script).mkString(" ") + val cmd: String = Seq("SCALA_OPTS= ", scalaPath, "run", keyArg, "--power", "--offline", "--server=false", script).mkString(" ") printf("cmd: [%s]\n", cmd) val (validTest, exitCode, stdout, stderr) = bashCommand(cmd) stderr.filter { !_.contains("Inappropriate ioctl") }.foreach { System.err.printf("stderr [%s]\n", _) } @@ -84,13 +88,15 @@ class BashScriptsTests: ////////////////////////// begin tests ////////////////////// /* verify that `dist/bin/scala` correctly passes args to the jvm via -J-D for script envtest.sc */ + @Ignore // SCALA CLI does not support `-J` to pass java properties, only things like -Xmx5g @Test def verifyScJProperty = assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) val tag = "World1" - val stdout = callScript(tag, envtestSc, s"-J-Dkey") + val stdout = callScript(tag, envtestNuSc, s"-J-Dkey") assertEquals( s"Hello $tag", stdout) /* verify that `dist/bin/scala` correctly passes args to the jvm via -J-D for script envtest.scala */ + @Ignore // SCALA CLI does not support `-J` to pass java properties, only things like -Xmx5g @Test def verifyScalaJProperty = assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) val tag = "World2" @@ -101,7 +107,7 @@ class BashScriptsTests: @Test def verifyScDProperty = assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) val tag = "World3" - val stdout = callScript(tag, envtestSc, s"-Dkey") + val stdout = callScript(tag, envtestNuSc, s"-Dkey") assertEquals(s"Hello $tag", stdout) /* verify that `dist/bin/scala` can set system properties via -D for envtest.scala */ @@ -114,7 +120,9 @@ class BashScriptsTests: /* verify that `dist/bin/scala` can set system properties via -D when executing compiled script via -jar envtest.jar */ @Test def saveAndRunWithDProperty = assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) - val commandline = Seq("SCALA_OPTS= ", scalaPath.relpath, "-save", envtestScala.relpath).mkString(" ") + val libOut = envtestScala.relpath.stripSuffix(".scala") + ".jar" + val commandline = Seq( + "SCALA_OPTS= ", scalaPath.relpath, "--power", "package", envtestScala.relpath, "-o", libOut, "--library", "--offline", "--server=false").mkString(" ") val (_, _, _, _) = bashCommand(commandline) // compile jar, discard output val testJar = testFile("envtest.jar") // jar is created by the previous bashCommand() if (testJar.isFile){ @@ -124,7 +132,8 @@ class BashScriptsTests: } val tag = "World5" - val commandline2 = Seq("SCALA_OPTS= ", scalaPath.relpath, s"-Dkey=$tag", testJar.relpath) + val commandline2 = Seq( + "SCALA_OPTS= ", scalaPath.relpath, "run", s"-Dkey=$tag", "-classpath", testJar.relpath, "--power", "--offline", "--server=false") printf("cmd[%s]\n", commandline2.mkString(" ")) val (validTest, exitCode, stdout, stderr) = bashCommand(commandline2.mkString(" ")) assertEquals(s"Hello $tag", stdout.mkString("/n")) @@ -148,7 +157,11 @@ class BashScriptsTests: /* verify `dist/bin/scala` non-interference with command line args following script name */ @Test def verifyScalaArgs = assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) - val commandline = (Seq("SCALA_OPTS= ", scalaPath, showArgsScript) ++ testScriptArgs).mkString(" ") + val commandline = ( + Seq("SCALA_OPTS= ", scalaPath, showArgsScalaCli) + ++ Seq("--power", "--offline", "--server=false") + ++ ("--" +: testScriptArgs) + ).mkString(" ") val (validTest, exitCode, stdout, stderr) = bashCommand(commandline) if verifyValid(validTest) then var fail = false @@ -162,13 +175,13 @@ class BashScriptsTests: assert(stdout == expectedOutput) /* - * verify that scriptPath.sc sees a valid script.path property, - * and that it's value is the path to "scriptPath.sc". + * verify that scriptPath_scalacli.sc sees a valid script.path property, + * and that it's value is the path to "scriptPath_scalacli.sc". */ @Category(Array(classOf[BootstrappedOnlyTests])) @Test def verifyScriptPathProperty = assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) - val scriptFile = testFiles.find(_.getName == "scriptPath.sc").get + val scriptFile = testFiles.find(_.getName == "scriptPath_scalacli.sc").get val expected = s"${scriptFile.getName}" printf("===> verify valid system property script.path is reported by script [%s]\n", scriptFile.getName) printf("calling scriptFile: %s\n", scriptFile) @@ -177,15 +190,15 @@ class BashScriptsTests: stdout.foreach { printf("stdout: [%s]\n", _) } stderr.foreach { printf("stderr: [%s]\n", _) } val valid = stdout.exists { _.endsWith(expected) } - if valid then printf("# valid script.path reported by [%s]\n", scriptFile.getName) - assert(valid, s"script ${scriptFile.absPath} did not report valid script.path value") + if valid then printf("# valid scriptPath reported by [%s]\n", scriptFile.getName) + assert(valid, s"script ${scriptFile.absPath} did not report valid scriptPath value") /* * verify SCALA_OPTS can specify an @argsfile when launching a scala script in `dist/bin/scala`. */ @Test def verifyScalaOpts = assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) - val scriptFile = testFiles.find(_.getName == "classpathReport.sc").get + val scriptFile = testFiles.find(_.getName == "classpathReport_scalacli.sc").get printf("===> verify SCALA_OPTS='@argsfile' is properly handled by `dist/bin/scala`\n") val envPairs = List(("SCALA_OPTS", s"@$argsfile")) val (validTest, exitCode, stdout, stderr) = bashCommand(scriptFile.absPath, envPairs) @@ -208,7 +221,7 @@ class BashScriptsTests: */ @Test def sqlDateTest = assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) - val scriptBase = "sqlDateError" + val scriptBase = "sqlDateError_scalacli" val scriptFile = testFiles.find(_.getName == s"$scriptBase.sc").get val testJar = testFile(s"$scriptBase.jar") // jar should not be created when scriptFile runs val tj = Paths.get(testJar).toFile @@ -236,7 +249,6 @@ class BashScriptsTests: printf("===> verify -e is properly handled by `dist/bin/scala`\n") val expected = "9" val expression = s"println(3*3)" - val cmd = s"bin/scala -e $expression" val (validTest, exitCode, stdout, stderr) = bashCommand(s"""bin/scala -e '$expression'""") val result = stdout.filter(_.nonEmpty).mkString("") printf("stdout: %s\n", result) diff --git a/compiler/test/dotty/tools/scripting/ClasspathTests.scala b/compiler/test/dotty/tools/scripting/ClasspathTests.scala index 4fd1211698f6..a946e509aeb3 100755 --- a/compiler/test/dotty/tools/scripting/ClasspathTests.scala +++ b/compiler/test/dotty/tools/scripting/ClasspathTests.scala @@ -11,8 +11,12 @@ import org.junit.{Test, Ignore, AfterClass} import vulpix.TestConfiguration import ScriptTestEnv.* -/** Test java command line generated by bin/scala and bin/scalac */ +object ClasspathTests: + @AfterClass def cleanup: Unit = { + cleanupScalaCLIDirs() + } +/** Test java command line generated by bin/scala and bin/scalac */ class ClasspathTests: /* * Test disabled (temporarily). @@ -24,7 +28,7 @@ class ClasspathTests: @Ignore @Test def hashbangClasspathVerifyTest = { // only interested in classpath test scripts - val testScriptName = "classpathReport.sc" + val testScriptName = "classpathReport_scalacli.sc" val testScript = scripts("/scripting").find { _.getName.matches(testScriptName) } match case None => sys.error(s"test script not found: ${testScriptName}") case Some(file) => file @@ -39,7 +43,7 @@ class ClasspathTests: cmd.foreach { printf("[%s]\n", _) } - // classpathReport.sc is expected to produce two lines: + // classpathReport_scalacli.sc is expected to produce two lines: // cwd: // classpath: @@ -51,10 +55,10 @@ class ClasspathTests: // convert scriptCp to a list of files val hashbangJars: List[File] = scriptCp.split(psep).map { _.toFile }.toList val hashbangClasspathJars = hashbangJars.map { _.name }.sorted.distinct // get jar basenames, remove duplicates - val packlibDir = s"$scriptCwd/$packLibDir" // classpathReport.sc specifies a wildcard classpath in this directory + val packlibDir: String = ??? /* ??? was s"$scriptCwd/$packLibDir" */ // classpathReport_scalacli.sc specifies a wildcard classpath in this directory val packlibJars: List[File] = listJars(packlibDir) // classpath entries expected to have been reported by the script - printf("%d jar files in dist/target/pack/lib\n", packlibJars.size) + printf(s"%d jar files in $packDir/lib\n", packlibJars.size) printf("%d test script jars in classpath\n", hashbangClasspathJars.size) val (diff: Set[File], msg: String) = if (packlibJars.size > hashbangClasspathJars.size) { @@ -63,7 +67,7 @@ class ClasspathTests: (hashbangJars.toSet -- packlibJars.toSet , "only in hashbang classpath") } // verify that the script hasbang classpath setting was effective at supplementing the classpath - // (a minimal subset of jars below dist/target/pack/lib are always be in the classpath) + // (a minimal subset of jars below dist*/target/pack/lib are always be in the classpath) val missingClasspathEntries = if hashbangClasspathJars.size != packlibJars.size then printf("packlib dir [%s]\n", packlibDir) printf("hashbangClasspathJars: %s\n", hashbangJars.map { _.relpath.norm }.mkString("\n ", "\n ", "")) @@ -77,18 +81,31 @@ class ClasspathTests: /* * verify classpath is unglobbed by MainGenericRunner. */ + @Ignore @Test def unglobClasspathVerifyTest = { - val testScriptName = "unglobClasspath.sc" + val testScriptName = "unglobClasspath_scalacli.sc" val testScript = scripts("/scripting").find { _.name.matches(testScriptName) } match case None => sys.error(s"test script not found: ${testScriptName}") case Some(file) => file val relpath = testScript.toPath.relpath.norm + val scalaCommand = scalaPath.relpath.norm printf("===> unglobClasspathVerifyTest for script [%s]\n", relpath) printf("bash is [%s]\n", bashExe) if packBinScalaExists then - val bashCmdline = s"set +x ; SCALA_OPTS= $relpath" + val sv = packScalaVersion + val tastyDirGlob = s"$packMavenDir/org/scala-lang/tasty-core_3/$sv/*" + // ^^^^^^^^^^^^^ + // the classpath is a glob pattern that should be unglobbed by scala command, + // otherwise the script could not compile because it references a class + // from tasty-core + + val bashCmdline = Seq( + "set +x ;", + "SCALA_OPTS=", + scalaCommand, "run", "--classpath", s"'$tastyDirGlob'", "--power", "--offline", "--server=false", relpath + ).mkString(" ") val cmd = Array(bashExe, "-c", bashCmdline) cmd.foreach { printf("[%s]\n", _) } diff --git a/compiler/test/dotty/tools/scripting/ExpressionTest.scala b/compiler/test/dotty/tools/scripting/ExpressionTest.scala index 6b5248e67f08..bc42860253b0 100755 --- a/compiler/test/dotty/tools/scripting/ExpressionTest.scala +++ b/compiler/test/dotty/tools/scripting/ExpressionTest.scala @@ -44,7 +44,7 @@ class ExpressionTest: assert(success) def getResult(expression: String): String = - val (_, _, stdout, stderr) = bashCommand(s"$scalaPath -e '$expression'") + val (_, _, stdout, stderr) = bashCommand(s"$scalaPath -e '$expression' --power --offline --server=false") printf("stdout: %s\n", stdout.mkString("|")) printf("stderr: %s\n", stderr.mkString("\n", "\n", "")) stdout.filter(_.nonEmpty).mkString("") @@ -55,6 +55,10 @@ class ExpressionTest: object ExpressionTest: + @AfterClass def cleanup(): Unit = { + cleanupScalaCLIDirs() + } + def main(args: Array[String]): Unit = val tests = new ExpressionTest println("\n=== verifyCommandLineExpression ===") diff --git a/compiler/test/dotty/tools/scripting/ScriptTestEnv.scala b/compiler/test/dotty/tools/scripting/ScriptTestEnv.scala index ebae5bfca6be..dd1cc04bb58a 100644 --- a/compiler/test/dotty/tools/scripting/ScriptTestEnv.scala +++ b/compiler/test/dotty/tools/scripting/ScriptTestEnv.scala @@ -5,6 +5,7 @@ package scripting import scala.language.unsafeNulls import java.io.File +import java.util.Locale import java.nio.file.{Path, Paths, Files} import dotty.tools.dotc.config.Properties.* @@ -15,7 +16,7 @@ import scala.jdk.CollectionConverters.* /** * Common Code for supporting scripting tests. * To override the path to the bash executable, set TEST_BASH= - * To specify where `dist/target/pack/bin` resides, set TEST_CWD= + * To specify where `dist[*]/target/pack/bin` resides, set TEST_CWD= * Test scripts run in a bash env, so paths are converted to forward slash via .norm. */ object ScriptTestEnv { @@ -28,6 +29,44 @@ object ScriptTestEnv { def whichJava: String = whichExe("java") def whichBash: String = whichExe("bash") + def cleanupScalaCLIDirs(): Unit = { + val scriptingDir = io.Directory(scriptsDir("/scripting").getPath) + val dottyDir = io.Directory(workingDirectory) + + val residueDirs = Seq( + (scriptingDir / ".bsp"), + (scriptingDir / ".scala-build"), + (dottyDir / ".scala-build") + ) + + for f <- residueDirs do + f.deleteRecursively() + + val bspDir = dottyDir / ".bsp" + (bspDir / "scala.json").delete() + if bspDir.isEmpty then bspDir.delete() + } + + lazy val nativePackDir: Option[String] = { + def nativeDir(os: String, arch: String) = Some(s"dist/$os-$arch/target/pack") + def nativeOs(os: String) = archNorm match + case arch @ ("aarch64" | "x86_64") => nativeDir(os, arch) + case _ => None + + if winshell then nativeDir("win", "x86_64") // assume x86_64 for now + else if linux then nativeOs("linux") + else if mac then nativeOs("mac") + else None + } + + def jvmPackDir() = + println("warning: unknown OS architecture combination, defaulting to JVM launcher.") + "dist/target/pack" + + def packDir: String = nativePackDir.getOrElse(jvmPackDir()) + + def packBinDir: String = s"$packDir/bin" + lazy val workingDirectory: String = { val dirstr = if testCwd.nonEmpty then if verbose then printf("TEST_CWD set to [%s]\n", testCwd) @@ -36,7 +75,7 @@ object ScriptTestEnv { userDir // userDir, if TEST_CWD not set // issue warning if things don't look right - val test = Paths.get(s"$dirstr/dist/target/pack/bin").normalize + val test = Paths.get(s"$dirstr/$packBinDir").normalize if !test.isDirectory then printf("warning: not found below working directory: %s\n", test.norm) @@ -46,7 +85,7 @@ object ScriptTestEnv { def envPath: String = envOrElse("PATH", "") // remove duplicate entries in path - def supplementedPath: String = s"dist/target/pack/bin$psep$envJavaHome/bin$psep$envScalaHome/bin$psep$envPath".norm + def supplementedPath: String = s"$packBinDir$psep$envJavaHome/bin$psep$envScalaHome/bin$psep$envPath".norm def adjustedPathEntries: List[String] = supplementedPath.norm.split(psep).toList.distinct def adjustedPath: String = adjustedPathEntries.mkString(psep) def envPathEntries: List[String] = envPath.split(psep).toList.distinct @@ -55,11 +94,18 @@ object ScriptTestEnv { def unameExe = which("uname") def ostypeFull = if unameExe.nonEmpty then exec(unameExe).mkString else "" - def ostype = ostypeFull.toLowerCase.takeWhile{ cc => cc >= 'a' && cc <='z' || cc >= 'A' && cc <= 'Z' } + def ostype = ostypeFull.toLowerCase(Locale.ROOT).takeWhile{ cc => cc >= 'a' && cc <='z' || cc >= 'A' && cc <= 'Z' } + def archFull = if unameExe.nonEmpty then exec(unameExe, "-m").mkString else "" + def archNorm = archFull match + case "arm64" => "aarch64" + case "amd64" => "x86_64" + case id => id def cygwin = ostype == "cygwin" def mingw = ostype == "mingw" def msys = ostype == "msys" + def linux = ostype == "linux" + def mac = ostype == "darwin" def winshell: Boolean = cygwin || mingw || msys def which(str: String) = @@ -124,10 +170,22 @@ object ScriptTestEnv { } yield line - def packBinDir = "dist/target/pack/bin" - def packLibDir = "dist/target/pack/lib" + // def packLibDir = s"$packDir/lib" // replaced by packMavenDir + def packMavenDir = s"$packDir/maven2" + def packVersionFile = s"$packDir/VERSION" def packBinScalaExists: Boolean = Files.exists(Paths.get(s"$packBinDir/scala")) + def packScalaVersion: String = { + val versionFile = Paths.get(packVersionFile) + if Files.exists(versionFile) then + val lines = Files.readAllLines(versionFile).asScala + lines.find { _.startsWith("version:=") } match + case Some(line) => line.drop(9) + case None => sys.error(s"no version:= found in $packVersionFile") + else + sys.error(s"no $packVersionFile found") + } + def listJars(dir: String): List[File] = val packlibDir = Paths.get(dir).toFile if packlibDir.isDirectory then @@ -217,8 +275,10 @@ object ScriptTestEnv { def toUrl: String = Paths.get(absPath).toUri.toURL.toString + // Used to be an extension on String // Treat norm paths with a leading '/' as absolute (Windows java.io.File#isAbsolute treats them as relative) - def isAbsolute = p.norm.startsWith("/") || (isWin && p.norm.secondChar == ":") + //@annotation.nowarn // hidden by Path#isAbsolute + //def isAbsolute = p.norm.startsWith("/") || (isWin && p.norm.secondChar == ":") } extension(f: File) { @@ -233,8 +293,8 @@ object ScriptTestEnv { lazy val cwd: Path = Paths.get(".").toAbsolutePath.normalize lazy val (scalacPath: String, scalaPath: String) = { - val scalac = s"$workingDirectory/dist/target/pack/bin/scalac".toPath.normalize - val scala = s"$workingDirectory/dist/target/pack/bin/scala".toPath.normalize + val scalac = s"$workingDirectory/$packBinDir/scalac".toPath.normalize + val scala = s"$workingDirectory/$packBinDir/scala".toPath.normalize (scalac.norm, scala.norm) } @@ -242,7 +302,7 @@ object ScriptTestEnv { // use optional TEST_BASH if defined, otherwise, bash must be in PATH // envScalaHome is: - // dist/target/pack, if present + // dist[*]/target/pack, if present // else, SCALA_HOME if defined // else, not defined lazy val envScalaHome = diff --git a/compiler/test/dotty/tools/scripting/ScriptingTests.scala b/compiler/test/dotty/tools/scripting/ScriptingTests.scala index 5ec417090504..4dc193f0efe4 100644 --- a/compiler/test/dotty/tools/scripting/ScriptingTests.scala +++ b/compiler/test/dotty/tools/scripting/ScriptingTests.scala @@ -17,7 +17,11 @@ import org.junit.Assume.assumeFalse /** Runs all tests contained in `compiler/test-resources/scripting/` */ class ScriptingTests: // classpath tests managed by scripting.ClasspathTests.scala - def testFiles = scripts("/scripting").filter { ! _.getName.toLowerCase.contains("classpath") } + def testFiles = scripts("/scripting").filter { sc => + val name = sc.getName.toLowerCase + !name.contains("classpath") + && !name.contains("_scalacli") + } /* * Call .scala scripts without -save option, verify no jar created @@ -47,7 +51,10 @@ class ScriptingTests: */ @Test def scriptingMainTests = assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) - for (scriptFile, scriptArgs) <- scalaFilesWithArgs(".sc") do + for + (scriptFile, scriptArgs) <- scalaFilesWithArgs(".sc") + if !scriptFile.getName().endsWith("Nu.sc") + do showScriptUnderTest(scriptFile) val unexpectedJar = script2jar(scriptFile) unexpectedJar.delete @@ -66,7 +73,10 @@ class ScriptingTests: */ @Test def scriptingJarTest = assumeFalse("Scripts do not yet support Scala 2 library TASTy", Properties.usingScalaLibraryTasty) - for (scriptFile, scriptArgs) <- scalaFilesWithArgs(".sc") do + for + (scriptFile, scriptArgs) <- scalaFilesWithArgs(".sc") + if !scriptFile.getName().endsWith("Nu.sc") + do showScriptUnderTest(scriptFile) val expectedJar = script2jar(scriptFile) expectedJar.delete diff --git a/compiler/test/dotty/tools/utils.scala b/compiler/test/dotty/tools/utils.scala index a8c480088e08..d17edbaa855e 100644 --- a/compiler/test/dotty/tools/utils.scala +++ b/compiler/test/dotty/tools/utils.scala @@ -20,14 +20,19 @@ import dotc.config.CommandLineParser object Dummy def scripts(path: String): Array[File] = { - val dir = new File(Dummy.getClass.getResource(path).getPath) - assert(dir.exists && dir.isDirectory, "Couldn't load scripts dir") + val dir = scriptsDir(path) dir.listFiles.filter { f => val path = if f.isDirectory then f.getPath + "/" else f.getPath Properties.testsFilter.isEmpty || Properties.testsFilter.exists(path.contains) } } +def scriptsDir(path: String): File = { + val dir = new File(Dummy.getClass.getResource(path).getPath) + assert(dir.exists && dir.isDirectory, "Couldn't load scripts dir") + dir +} + extension (f: File) def absPath = f.getAbsolutePath.replace('\\', '/') @@ -101,10 +106,10 @@ def toolArgsParse(lines: List[String], filename: Option[String]): List[(String,S case toolArg(name, args) => List((name, args)) case _ => Nil } ++ - lines.flatMap { + lines.flatMap { case directiveOptionsArg(args) => List(("scalac", args)) case directiveJavacOptions(args) => List(("javac", args)) - case _ => Nil + case _ => Nil } import org.junit.Test diff --git a/compiler/test/dotty/tools/vulpix/FileFilter.scala b/compiler/test/dotty/tools/vulpix/FileFilter.scala index b2aef8af038e..9f62a7db2fb6 100644 --- a/compiler/test/dotty/tools/vulpix/FileFilter.scala +++ b/compiler/test/dotty/tools/vulpix/FileFilter.scala @@ -23,4 +23,8 @@ object FileFilter { object NoFilter extends FileFilter { def accept(file: String) = true } + + object ExcludeDotFiles extends FileFilter { + def accept(file: String) = !file.startsWith(".") + } } diff --git a/compiler/test/dotty/tools/vulpix/ParallelTesting.scala b/compiler/test/dotty/tools/vulpix/ParallelTesting.scala index e9975ed25b6d..09d3614b64a5 100644 --- a/compiler/test/dotty/tools/vulpix/ParallelTesting.scala +++ b/compiler/test/dotty/tools/vulpix/ParallelTesting.scala @@ -158,6 +158,12 @@ trait ParallelTesting extends RunnerOrchestration { self => } } + private sealed trait FromTastyCompilationMode + private case object NotFromTasty extends FromTastyCompilationMode + private case object FromTasty extends FromTastyCompilationMode + private case object FromBestEffortTasty extends FromTastyCompilationMode + private case class WithBestEffortTasty(bestEffortDir: JFile) extends FromTastyCompilationMode + /** A group of files that may all be compiled together, with the same flags * and output directory */ @@ -166,7 +172,7 @@ trait ParallelTesting extends RunnerOrchestration { self => files: Array[JFile], flags: TestFlags, outDir: JFile, - fromTasty: Boolean = false, + fromTasty: FromTastyCompilationMode = NotFromTasty, decompilation: Boolean = false ) extends TestSource { def sourceFiles: Array[JFile] = files.filter(isSourceFile) @@ -225,9 +231,11 @@ trait ParallelTesting extends RunnerOrchestration { self => private final def compileTestSource(testSource: TestSource): Try[List[TestReporter]] = Try(testSource match { case testSource @ JointCompilationSource(name, files, flags, outDir, fromTasty, decompilation) => - val reporter = - if (fromTasty) compileFromTasty(flags, outDir) - else compile(testSource.sourceFiles, flags, outDir) + val reporter = fromTasty match + case NotFromTasty => compile(testSource.sourceFiles, flags, outDir) + case FromTasty => compileFromTasty(flags, outDir) + case FromBestEffortTasty => compileFromBestEffortTasty(flags, outDir) + case WithBestEffortTasty(bestEffortDir) => compileWithBestEffortTasty(testSource.sourceFiles, bestEffortDir, flags, outDir) List(reporter) case testSource @ SeparateCompilationSource(_, dir, flags, outDir) => @@ -266,12 +274,11 @@ trait ParallelTesting extends RunnerOrchestration { self => */ final def diffTest(testSource: TestSource, checkFile: JFile, actual: List[String], reporters: Seq[TestReporter], logger: LoggedRunnable) = { for (msg <- FileDiff.check(testSource.title, actual, checkFile.getPath)) { - onFailure(testSource, reporters, logger, Some(msg)) - if (updateCheckFiles) { FileDiff.dump(checkFile.toPath.toString, actual) echo("Updated checkfile: " + checkFile.getPath) } else { + onFailure(testSource, reporters, logger, Some(msg)) val outFile = checkFile.toPath.resolveSibling(s"${checkFile.toPath.getFileName}.out").toString FileDiff.dump(outFile, actual) echo(FileDiff.diffMessage(checkFile.getPath, outFile)) @@ -665,6 +672,31 @@ trait ParallelTesting extends RunnerOrchestration { self => reporter + protected def compileFromBestEffortTasty(flags0: TestFlags, targetDir: JFile): TestReporter = { + val classes = flattenFiles(targetDir).filter(isBestEffortTastyFile).map(_.toString) + val flags = flags0 and "-from-tasty" and "-Ywith-best-effort-tasty" + val reporter = mkReporter + val driver = new Driver + + driver.process(flags.all ++ classes, reporter = reporter) + + reporter + } + + protected def compileWithBestEffortTasty(files0: Array[JFile], bestEffortDir: JFile, flags0: TestFlags, targetDir: JFile): TestReporter = { + val flags = flags0 + .and("-Ywith-best-effort-tasty") + .and("-d", targetDir.getPath) + val reporter = mkReporter + val driver = new Driver + + val args = Array("-classpath", flags.defaultClassPath + JFile.pathSeparator + bestEffortDir.toString) ++ flags.options + + driver.process(args ++ files0.map(_.toString), reporter = reporter) + + reporter + } + protected def compileFromTasty(flags0: TestFlags, targetDir: JFile): TestReporter = { val tastyOutput = new JFile(targetDir.getPath + "_from-tasty") tastyOutput.mkdir() @@ -988,6 +1020,22 @@ trait ParallelTesting extends RunnerOrchestration { self => override def maybeFailureMessage(testSource: TestSource, reporters: Seq[TestReporter]): Option[String] = None } + private final class NoBestEffortErrorsTest(testSources: List[TestSource], times: Int, threadLimit: Option[Int], suppressAllOutput: Boolean)(implicit summaryReport: SummaryReporting) + extends Test(testSources, times, threadLimit, suppressAllOutput) { + override def suppressErrors = true + override def maybeFailureMessage(testSource: TestSource, reporters: Seq[TestReporter]): Option[String] = + val unsucceffulBestEffortErrorMsg = "Unsuccessful best-effort compilation." + val failedBestEffortCompilation: Seq[TestReporter] = + reporters.collect{ + case testReporter if testReporter.errors.exists(_.msg.message.startsWith(unsucceffulBestEffortErrorMsg)) => + testReporter + } + if !failedBestEffortCompilation.isEmpty then + Some(failedBestEffortCompilation.flatMap(_.consoleOutput.split("\n")).mkString("\n")) + else + None + } + /** The `CompilationTest` is the main interface to `ParallelTesting`, it * can be instantiated via one of the following methods: @@ -1127,12 +1175,28 @@ trait ParallelTesting extends RunnerOrchestration { self => def checkWarnings()(implicit summaryReport: SummaryReporting): this.type = checkPass(new WarnTest(targets, times, threadLimit, shouldFail || shouldSuppressOutput), "Warn") + /** Creates a "neg" test run, which makes sure that each test manages successful + * best-effort compilation, without any errors related to pickling/unpickling + * of betasty files. + */ + def checkNoBestEffortError()(implicit summaryReport: SummaryReporting): this.type = { + val test = new NoBestEffortErrorsTest(targets, times, threadLimit, shouldFail || shouldSuppressOutput).executeTestSuite() + + cleanup() + + if (test.didFail) { + fail("Best-effort test should not have shown a \"Unsuccessful best-effort compilation\" error, but did") + } + + this + } + /** Creates a "neg" test run, which makes sure that each test generates the * correct number of errors at the correct positions. It also makes sure * that none of these tests crashes the compiler. */ def checkExpectedErrors()(implicit summaryReport: SummaryReporting): this.type = - val test = new NegTest(targets, times, threadLimit, shouldFail || shouldSuppressOutput).executeTestSuite() + val test = new NegTest(targets, times, threadLimit, shouldSuppressOutput).executeTestSuite() cleanup() @@ -1347,7 +1411,7 @@ trait ParallelTesting extends RunnerOrchestration { self => private def compilationTargets(sourceDir: JFile, fileFilter: FileFilter = FileFilter.NoFilter): (List[JFile], List[JFile]) = sourceDir.listFiles.foldLeft((List.empty[JFile], List.empty[JFile])) { case ((dirs, files), f) => if (!fileFilter.accept(f.getName)) (dirs, files) - else if (f.isDirectory) (f :: dirs, files) + else if (f.isDirectory && FileFilter.ExcludeDotFiles.accept(f.getName)) (f :: dirs, files) else if (isSourceFile(f)) (dirs, f :: files) else (dirs, files) } @@ -1504,7 +1568,7 @@ trait ParallelTesting extends RunnerOrchestration { self => flags: TestFlags, outDir: JFile, fromTasty: Boolean = false, - ) extends JointCompilationSource(name, Array(file), flags, outDir, fromTasty) { + ) extends JointCompilationSource(name, Array(file), flags, outDir, if (fromTasty) FromTasty else NotFromTasty) { override def buildInstructions(errors: Int, warnings: Int): String = { val runOrPos = if (file.getPath.startsWith(s"tests${JFile.separator}run${JFile.separator}")) "run" else "pos" @@ -1538,6 +1602,147 @@ trait ParallelTesting extends RunnerOrchestration { self => ) } + /** A two step compilation test for best effort compilation pickling and unpickling. + * + * First, erroring neg test files are compiled with the `-Ybest-effort` option. + * If successful, then the produced Best Effort TASTy is re-compiled with + * '-Ywith-best-effort-tasty' to test the TastyReader for Best Effort TASTy. + */ + def compileBestEffortTastyInDir(f: String, flags: TestFlags, picklingFilter: FileFilter, unpicklingFilter: FileFilter)( + implicit testGroup: TestGroup): BestEffortOptionsTest = { + val bestEffortFlag = "-Ybest-effort" + val semanticDbFlag = "-Xsemanticdb" + assert(!flags.options.contains(bestEffortFlag), "Best effort compilation flag should not be added manually") + + val outDir = defaultOutputDir + testGroup + JFile.separator + val sourceDir = new JFile(f) + checkRequirements(f, sourceDir, outDir) + + val (dirsStep1, filteredPicklingFiles) = compilationTargets(sourceDir, picklingFilter) + val (dirsStep2, filteredUnpicklingFiles) = compilationTargets(sourceDir, unpicklingFilter) + + class BestEffortCompilation( + name: String, + file: JFile, + flags: TestFlags, + outputDir: JFile + ) extends JointCompilationSource(name, Array(file), flags.and(bestEffortFlag).and(semanticDbFlag), outputDir) { + override def buildInstructions(errors: Int, warnings: Int): String = { + s"""| + |Test '$title' compiled with a compiler crash, + |the test can be reproduced by running: + | + | sbt "scalac $bestEffortFlag $semanticDbFlag $file" + | + |These tests can be disabled by adding `${file.getName}` to `compiler${JFile.separator}test${JFile.separator}dotc${JFile.separator}neg-best-effort-pickling.blacklist` + |""".stripMargin + } + } + + class CompilationFromBestEffortTasty( + name: String, + file: JFile, + flags: TestFlags, + bestEffortDir: JFile, + ) extends JointCompilationSource(name, Array(file), flags, bestEffortDir, fromTasty = FromBestEffortTasty) { + + override def buildInstructions(errors: Int, warnings: Int): String = { + def beTastyFiles(file: JFile): Array[JFile] = + file.listFiles.flatMap { innerFile => + if (innerFile.isDirectory) beTastyFiles(innerFile) + else if (isBestEffortTastyFile(innerFile)) Array(innerFile) + else Array.empty[JFile] + } + val beTastyFilesString = beTastyFiles(bestEffortDir).mkString(" ") + s"""| + |Test '$title' compiled with a compiler crash, + |the test can be reproduced by running: + | + | sbt "scalac -Ybest-effort $file" + | sbt "scalac --from-tasty -Ywith-best-effort-tasty $beTastyFilesString" + | + |These tests can be disabled by adding `${file.getName}` to `compiler${JFile.separator}test${JFile.separator}dotc${JFile.separator}neg-best-effort-unpickling.blacklist` + | + |""".stripMargin + } + } + + val (bestEffortTargets, targetAndBestEffortDirs) = + filteredPicklingFiles.map { f => + val outputDir = createOutputDirsForFile(f, sourceDir, outDir) + val bestEffortDir = new JFile(outputDir, s"META-INF${JFile.separator}best-effort") + ( + BestEffortCompilation(testGroup.name, f, flags, outputDir), + (f, bestEffortDir) + ) + }.unzip + val (_, bestEffortDirs) = targetAndBestEffortDirs.unzip + val fileToBestEffortDirMap = targetAndBestEffortDirs.toMap + + val picklingSet = filteredPicklingFiles.toSet + val fromTastyTargets = + filteredUnpicklingFiles.filter(picklingSet.contains(_)).map { f => + val bestEffortDir = fileToBestEffortDirMap(f) + new CompilationFromBestEffortTasty(testGroup.name, f, flags, bestEffortDir) + } + + new BestEffortOptionsTest( + new CompilationTest(bestEffortTargets).keepOutput, + new CompilationTest(fromTastyTargets).keepOutput, + bestEffortDirs, + shouldDelete = true + ) + } + + /** A two step integration test for best effort compilation. + * + * Directories found in the directory `f` represent separate tests and must contain + * the 'err' and 'main' directories. First the (erroring) contents of the 'err' + * directory are compiled with the `Ybest-effort` option. + * Then, are the contents of 'main' are compiled with the previous best effort directory + * on the classpath using the option `-Ywith-best-effort-tasty`. + */ + def compileBestEffortIntegration(f: String, flags: TestFlags)(implicit testGroup: TestGroup) = { + val bestEffortFlag = "-Ybest-effort" + val semanticDbFlag = "-Xsemanticdb" + val withBetastyFlag = "-Ywith-best-effort-tasty" + val sourceDir = new JFile(f) + val dirs = sourceDir.listFiles.toList + assert(dirs.forall(_.isDirectory), s"All files in $f have to be directories.") + + val (step1Targets, step2Targets, bestEffortDirs) = dirs.map { dir => + val step1SourceDir = new JFile(dir, "err") + val step2SourceDir = new JFile(dir, "main") + + val step1SourceFiles = step1SourceDir.listFiles + val step2SourceFiles = step2SourceDir.listFiles + + val outDir = defaultOutputDir + testGroup + JFile.separator + dir.getName().toString + JFile.separator + + val step1OutDir = createOutputDirsForDir(step1SourceDir, step1SourceDir, outDir) + val step2OutDir = createOutputDirsForDir(step2SourceDir, step2SourceDir, outDir) + + val step1Compilation = JointCompilationSource( + testGroup.name, step1SourceFiles, flags.and(bestEffortFlag).and(semanticDbFlag), step1OutDir, fromTasty = NotFromTasty + ) + + val bestEffortDir = new JFile(step1OutDir, s"META-INF${JFile.separator}best-effort") + + val step2Compilation = JointCompilationSource( + testGroup.name, step2SourceFiles, flags.and(withBetastyFlag).and(semanticDbFlag), step2OutDir, fromTasty = WithBestEffortTasty(bestEffortDir) + ) + (step1Compilation, step2Compilation, bestEffortDir) + }.unzip3 + + BestEffortOptionsTest( + new CompilationTest(step1Targets).keepOutput, + new CompilationTest(step2Targets).keepOutput, + bestEffortDirs, + true + ) + } + + class TastyCompilationTest(step1: CompilationTest, step2: CompilationTest, shouldDelete: Boolean)(implicit testGroup: TestGroup) { def keepOutput: TastyCompilationTest = @@ -1564,6 +1769,35 @@ trait ParallelTesting extends RunnerOrchestration { self => } } + class BestEffortOptionsTest(step1: CompilationTest, step2: CompilationTest, bestEffortDirs: List[JFile], shouldDelete: Boolean)(implicit testGroup: TestGroup) { + + def checkNoCrash()(implicit summaryReport: SummaryReporting): this.type = { + step1.checkNoBestEffortError() // Compile all files to generate the class files with best effort tasty + step2.checkNoBestEffortError() // Compile with best effort tasty + + if (shouldDelete) { + CompilationTest.aggregateTests(step1, step2).delete() + def delete(file: JFile): Unit = { + if (file.isDirectory) file.listFiles.foreach(delete) + try Files.delete(file.toPath) + catch { + case _: NoSuchFileException => // already deleted, everything's fine + } + } + bestEffortDirs.foreach(t => delete(t)) + } + + this + } + + def noCrashWithCompilingDependencies()(implicit summaryReport: SummaryReporting): this.type = { + step1.checkNoBestEffortError() // Compile all files to generate the class files with best effort tasty + step2.checkCompile() // Compile with best effort tasty + + this + } + } + /** This function behaves similar to `compileFilesInDir` but it ignores * sub-directories and as such, does **not** perform separate compilation * tests. @@ -1601,4 +1835,7 @@ object ParallelTesting { def isTastyFile(f: JFile): Boolean = f.getName.endsWith(".tasty") + def isBestEffortTastyFile(f: JFile): Boolean = + f.getName.endsWith(".betasty") + } diff --git a/compiler/test/dotty/tools/vulpix/TestConfiguration.scala b/compiler/test/dotty/tools/vulpix/TestConfiguration.scala index 1defe3f4f53d..e97ef47e6fef 100644 --- a/compiler/test/dotty/tools/vulpix/TestConfiguration.scala +++ b/compiler/test/dotty/tools/vulpix/TestConfiguration.scala @@ -52,6 +52,7 @@ object TestConfiguration { lazy val scalaJSClasspath = mkClasspath(List( Properties.scalaJSJavalib, + Properties.scalaJSScalalib, Properties.scalaJSLibrary, Properties.dottyLibraryJS )) @@ -69,6 +70,7 @@ object TestConfiguration { val noYcheckCommonOptions = Array("-indent") ++ checkOptions ++ noCheckOptions val defaultOptions = TestFlags(basicClasspath, commonOptions) val noYcheckOptions = TestFlags(basicClasspath, noYcheckCommonOptions) + val bestEffortBaselineOptions = TestFlags(basicClasspath, noCheckOptions) val unindentOptions = TestFlags(basicClasspath, Array("-no-indent") ++ checkOptions ++ noCheckOptions ++ yCheckOptions) val withCompilerOptions = defaultOptions.withClasspath(withCompilerClasspath).withRunClasspath(withCompilerClasspath) @@ -89,6 +91,8 @@ object TestConfiguration { val picklingWithCompilerOptions = picklingOptions.withClasspath(withCompilerClasspath).withRunClasspath(withCompilerClasspath) + val explicitNullsOptions = defaultOptions and "-Yexplicit-nulls" + /** Default target of the generated class files */ private def defaultTarget: String = { import scala.util.Properties.isJavaAtLeast diff --git a/dist/bin-native-overrides/cli-common-platform b/dist/bin-native-overrides/cli-common-platform new file mode 100644 index 000000000000..1a11c770f91a --- /dev/null +++ b/dist/bin-native-overrides/cli-common-platform @@ -0,0 +1,16 @@ +#!/usr/bin/env bash + +if [[ ${cygwin-} || ${mingw-} || ${msys-} ]]; then + SCALA_CLI_VERSION="" + # iterate through lines in VERSION_SRC + while IFS= read -r line; do + # if line starts with "version:=" then extract the version + if [[ "$line" == cli_version:=* ]]; then + SCALA_CLI_VERSION="${line#cli_version:=}" + break + fi + done < "$PROG_HOME/EXTRA_PROPERTIES" + SCALA_CLI_CMD_BASH=("\"$PROG_HOME/bin/scala-cli\"" "--cli-version \"$SCALA_CLI_VERSION\"") +else + SCALA_CLI_CMD_BASH=("\"$PROG_HOME/bin/scala-cli\"") +fi diff --git a/dist/bin-native-overrides/cli-common-platform.bat b/dist/bin-native-overrides/cli-common-platform.bat new file mode 100644 index 000000000000..d1c4f1c4716b --- /dev/null +++ b/dist/bin-native-overrides/cli-common-platform.bat @@ -0,0 +1,22 @@ +@echo off + +setlocal enabledelayedexpansion + +set "_SCALA_CLI_VERSION=" +@rem read for cli_version:=_SCALA_CLI_VERSION in EXTRA_PROPERTIES file +FOR /F "usebackq delims=" %%G IN ("%_PROG_HOME%\EXTRA_PROPERTIES") DO ( + SET "line=%%G" + IF "!line:~0,13!"=="cli_version:=" ( + SET "_SCALA_CLI_VERSION=!line:~13!" + GOTO :foundCliVersion + ) +) + +@REM we didn't find it, so we should fail +echo "ERROR: cli_version not found in EXTRA_PROPERTIES file" +exit /b 1 + +:foundCliVersion +endlocal & set "SCALA_CLI_VERSION=%_SCALA_CLI_VERSION%" + +set SCALA_CLI_CMD_WIN="%_PROG_HOME%\bin\scala-cli.exe" "--cli-version" "%SCALA_CLI_VERSION%" diff --git a/dist/bin/cli-common b/dist/bin/cli-common new file mode 100644 index 000000000000..d295d58916da --- /dev/null +++ b/dist/bin/cli-common @@ -0,0 +1,160 @@ +#!/usr/bin/env bash + +#/*-------------------------------------------------------------------------- +# * Credits: This script is based on the script generated by sbt-pack. +# *--------------------------------------------------------------------------*/ + +# save terminal settings +saved_stty=$(stty -g 2>/dev/null) +# clear on error so we don't later try to restore them +if [[ ! $? ]]; then + saved_stty="" +fi + +# restore stty settings (echo in particular) +function restoreSttySettings() { + stty $saved_stty + saved_stty="" +} + +scala_exit_status=127 +function onExit() { + [[ "$saved_stty" != "" ]] && restoreSttySettings + exit $scala_exit_status +} + +#/*-------------------------------------------------------------------------- +# * SECTION FOR JAVA COMMAND +# *--------------------------------------------------------------------------*/ + +# to reenable echo if we are interrupted before completing. +trap onExit INT TERM EXIT + +unset cygwin mingw msys darwin conemu + +# COLUMNS is used together with command line option '-pageWidth'. +if command -v tput >/dev/null 2>&1; then + export COLUMNS="$(tput -Tdumb cols)" +fi + +case "`uname`" in + CYGWIN*) cygwin=true + ;; + MINGW*) mingw=true + ;; + MSYS*) msys=true + ;; + Darwin*) darwin=true + if [ -z "$JAVA_VERSION" ] ; then + JAVA_VERSION="CurrentJDK" + else + echo "Using Java version: $JAVA_VERSION" 1>&2 + fi + if [ -z "$JAVA_HOME" ] ; then + JAVA_HOME=/System/Library/Frameworks/JavaVM.framework/Versions/${JAVA_VERSION}/Home + fi + JAVACMD="`which java`" + ;; +esac + +unset CYGPATHCMD +if [[ ${cygwin-} || ${mingw-} || ${msys-} ]]; then + # ConEmu terminal is incompatible with jna-5.*.jar + [[ (${CONEMUANSI-} || ${ConEmuANSI-}) ]] && conemu=true + # cygpath is used by various windows shells: cygwin, git-sdk, gitbash, msys, etc. + CYGPATHCMD=`which cygpath 2>/dev/null` + case "$TERM" in + rxvt* | xterm* | cygwin*) + stty -icanon min 1 -echo + JAVA_OPTS="$JAVA_OPTS -Djline.terminal=unix" + ;; + esac +fi + +# Resolve JAVA_HOME from javac command path +if [ -z "$JAVA_HOME" ]; then + javaExecutable="`which javac`" + if [ -n "$javaExecutable" -a -f "$javaExecutable" -a ! "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then + # readlink(1) is not available as standard on Solaris 10. + readLink=`which readlink` + if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then + javaExecutable="`readlink -f \"$javaExecutable\"`" + javaHome="`dirname \"$javaExecutable\"`" + javaHome=`expr "$javaHome" : '\(.*\)/bin'` + JAVA_HOME="$javaHome" + export JAVA_HOME + fi + fi +fi + +if [ -z "${JAVACMD-}" ] ; then + if [ -n "${JAVA_HOME-}" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + else + JAVACMD="`which java`" + fi +fi + +if [ ! -x "$JAVACMD" ] ; then + echo "Error: JAVA_HOME is not defined correctly." + echo " We cannot execute $JAVACMD" + exit 1 +fi + +if [ -z "$JAVA_HOME" ] ; then + echo "Warning: JAVA_HOME environment variable is not set." +fi + +CLASSPATH_SUFFIX="" +# Path separator used in EXTRA_CLASSPATH +PSEP=":" + +# translate paths to Windows-mixed format before running java +if [ -n "${CYGPATHCMD-}" ]; then + [ -n "${PROG_HOME-}" ] && + PROG_HOME=`"$CYGPATHCMD" -am "$PROG_HOME"` + [ -n "$JAVA_HOME" ] && + JAVA_HOME=`"$CYGPATHCMD" -am "$JAVA_HOME"` + CLASSPATH_SUFFIX=";" + PSEP=";" +elif [[ ${mingw-} || ${msys-} ]]; then + # For Mingw / Msys, convert paths from UNIX format before anything is touched + [ -n "$PROG_HOME" ] && + PROG_HOME="`(cd "$PROG_HOME"; pwd -W | sed 's|/|\\\\|g')`" + [ -n "$JAVA_HOME" ] && + JAVA_HOME="`(cd "$JAVA_HOME"; pwd -W | sed 's|/|\\\\|g')`" + CLASSPATH_SUFFIX=";" + PSEP=";" +fi + +#/*-------------------------------------------------- +# * The code below is for Dotty +# *-------------------------------------------------*/ + +find_lib () { + for lib in "$PROG_HOME"/lib/$1 ; do + if [[ -f "$lib" ]]; then + if [ -n "$CYGPATHCMD" ]; then + "$CYGPATHCMD" -am "$lib" + elif [[ $mingw || $msys ]]; then + echo "$lib" | sed 's|/|\\\\|g' + else + echo "$lib" + fi + return + fi + done +} + +SCALA_CLI_JAR="$PROG_HOME/etc/scala-cli.jar" + +declare -a scala_args + +addScala () { + scala_args+=("'$1'") +} diff --git a/dist/bin/cli-common-platform b/dist/bin/cli-common-platform new file mode 100644 index 000000000000..a5906e882bb4 --- /dev/null +++ b/dist/bin/cli-common-platform @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +SCALA_CLI_CMD_BASH=("\"$JAVACMD\"" "-jar \"$PROG_HOME/bin/scala-cli.jar\"") diff --git a/dist/bin/cli-common-platform.bat b/dist/bin/cli-common-platform.bat new file mode 100644 index 000000000000..99103266c1d9 --- /dev/null +++ b/dist/bin/cli-common-platform.bat @@ -0,0 +1,5 @@ +@echo off + +@rem we need to escape % in the java command path, for some reason this doesnt work in common.bat +set "_JAVACMD=!_JAVACMD:%%=%%%%!" +set SCALA_CLI_CMD_WIN="%_JAVACMD%" "-jar" "%_PROG_HOME%\bin\scala-cli.jar" \ No newline at end of file diff --git a/dist/bin/common b/dist/bin/common old mode 100755 new mode 100644 index e3e4253938fb..2de8bdf9f99a --- a/dist/bin/common +++ b/dist/bin/common @@ -1,198 +1,23 @@ #!/usr/bin/env bash -#/*-------------------------------------------------------------------------- -# * Credits: This script is based on the script generated by sbt-pack. -# *--------------------------------------------------------------------------*/ - -# save terminal settings -saved_stty=$(stty -g 2>/dev/null) -# clear on error so we don't later try to restore them -if [[ ! $? ]]; then - saved_stty="" -fi - -# restore stty settings (echo in particular) -function restoreSttySettings() { - stty $saved_stty - saved_stty="" -} - -scala_exit_status=127 -function onExit() { - [[ "$saved_stty" != "" ]] && restoreSttySettings - exit $scala_exit_status -} - -# to reenable echo if we are interrupted before completing. -trap onExit INT TERM EXIT - -unset cygwin mingw msys darwin conemu - -# COLUMNS is used together with command line option '-pageWidth'. -if command -v tput >/dev/null 2>&1; then - export COLUMNS="$(tput -Tdumb cols)" -fi - -case "`uname`" in - CYGWIN*) cygwin=true - ;; - MINGW*) mingw=true - ;; - MSYS*) msys=true - ;; - Darwin*) darwin=true - if [ -z "$JAVA_VERSION" ] ; then - JAVA_VERSION="CurrentJDK" - else - echo "Using Java version: $JAVA_VERSION" 1>&2 - fi - if [ -z "$JAVA_HOME" ] ; then - JAVA_HOME=/System/Library/Frameworks/JavaVM.framework/Versions/${JAVA_VERSION}/Home - fi - JAVACMD="`which java`" - ;; -esac - -unset CYGPATHCMD -if [[ ${cygwin-} || ${mingw-} || ${msys-} ]]; then - # ConEmu terminal is incompatible with jna-5.*.jar - [[ (${CONEMUANSI-} || ${ConEmuANSI-}) ]] && conemu=true - # cygpath is used by various windows shells: cygwin, git-sdk, gitbash, msys, etc. - CYGPATHCMD=`which cygpath 2>/dev/null` - case "$TERM" in - rxvt* | xterm* | cygwin*) - stty -icanon min 1 -echo - JAVA_OPTS="$JAVA_OPTS -Djline.terminal=unix" - ;; - esac -fi - -# Resolve JAVA_HOME from javac command path -if [ -z "$JAVA_HOME" ]; then - javaExecutable="`which javac`" - if [ -n "$javaExecutable" -a -f "$javaExecutable" -a ! "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then - # readlink(1) is not available as standard on Solaris 10. - readLink=`which readlink` - if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then - javaExecutable="`readlink -f \"$javaExecutable\"`" - javaHome="`dirname \"$javaExecutable\"`" - javaHome=`expr "$javaHome" : '\(.*\)/bin'` - JAVA_HOME="$javaHome" - export JAVA_HOME - fi - fi -fi - -if [ -z "${JAVACMD-}" ] ; then - if [ -n "${JAVA_HOME-}" ] ; then - if [ -x "$JAVA_HOME/jre/sh/java" ] ; then - # IBM's JDK on AIX uses strange locations for the executables - JAVACMD="$JAVA_HOME/jre/sh/java" - else - JAVACMD="$JAVA_HOME/bin/java" - fi - else - JAVACMD="`which java`" - fi -fi - -if [ ! -x "$JAVACMD" ] ; then - echo "Error: JAVA_HOME is not defined correctly." - echo " We cannot execute $JAVACMD" - exit 1 -fi - -if [ -z "$JAVA_HOME" ] ; then - echo "Warning: JAVA_HOME environment variable is not set." -fi - -CLASSPATH_SUFFIX="" -# Path separator used in EXTRA_CLASSPATH -PSEP=":" - -# translate paths to Windows-mixed format before running java -if [ -n "${CYGPATHCMD-}" ]; then - [ -n "${PROG_HOME-}" ] && - PROG_HOME=`"$CYGPATHCMD" -am "$PROG_HOME"` - [ -n "$JAVA_HOME" ] && - JAVA_HOME=`"$CYGPATHCMD" -am "$JAVA_HOME"` - CLASSPATH_SUFFIX=";" - PSEP=";" -elif [[ ${mingw-} || ${msys-} ]]; then - # For Mingw / Msys, convert paths from UNIX format before anything is touched - [ -n "$PROG_HOME" ] && - PROG_HOME="`(cd "$PROG_HOME"; pwd -W | sed 's|/|\\\\|g')`" - [ -n "$JAVA_HOME" ] && - JAVA_HOME="`(cd "$JAVA_HOME"; pwd -W | sed 's|/|\\\\|g')`" - CLASSPATH_SUFFIX=";" - PSEP=";" -fi +source "$PROG_HOME/bin/common-shared" #/*-------------------------------------------------- # * The code below is for Dotty # *-------------------------------------------------*/ -find_lib () { - for lib in "$PROG_HOME"/lib/$1 ; do - if [[ -f "$lib" ]]; then - if [ -n "$CYGPATHCMD" ]; then - "$CYGPATHCMD" -am "$lib" - elif [[ $mingw || $msys ]]; then - echo "$lib" | sed 's|/|\\\\|g' - else - echo "$lib" - fi - return - fi - done -} - -DOTTY_COMP=$(find_lib "*scala3-compiler*") -DOTTY_INTF=$(find_lib "*scala3-interfaces*") -DOTTY_LIB=$(find_lib "*scala3-library*") -DOTTY_STAGING=$(find_lib "*scala3-staging*") -DOTTY_TASTY_INSPECTOR=$(find_lib "*scala3-tasty-inspector*") -TASTY_CORE=$(find_lib "*tasty-core*") -SCALA_ASM=$(find_lib "*scala-asm*") -SCALA_LIB=$(find_lib "*scala-library*") -SBT_INTF=$(find_lib "*compiler-interface*") -JLINE_READER=$(find_lib "*jline-reader-3*") -JLINE_TERMINAL=$(find_lib "*jline-terminal-3*") -JLINE_TERMINAL_JNA=$(find_lib "*jline-terminal-jna-3*") - -# jna-5 only appropriate for some combinations -[[ ${conemu-} && ${msys-} ]] || JNA=$(find_lib "*jna-5*") - compilerJavaClasspathArgs () { - # echo "dotty-compiler: $DOTTY_COMP" - # echo "dotty-interface: $DOTTY_INTF" - # echo "dotty-library: $DOTTY_LIB" - # echo "tasty-core: $TASTY_CORE" - # echo "scala-asm: $SCALA_ASM" - # echo "scala-lib: $SCALA_LIB" - # echo "sbt-intface: $SBT_INTF" + toolchain="$PROG_HOME/lib/scala.jar" + toolchain_extra="$PROG_HOME/lib/with_compiler.jar" - toolchain="" - toolchain+="$SCALA_LIB$PSEP" - toolchain+="$DOTTY_LIB$PSEP" - toolchain+="$SCALA_ASM$PSEP" - toolchain+="$SBT_INTF$PSEP" - toolchain+="$DOTTY_INTF$PSEP" - toolchain+="$DOTTY_COMP$PSEP" - toolchain+="$TASTY_CORE$PSEP" - toolchain+="$DOTTY_STAGING$PSEP" - toolchain+="$DOTTY_TASTY_INSPECTOR$PSEP" - - # jine - toolchain+="$JLINE_READER$PSEP" - toolchain+="$JLINE_TERMINAL$PSEP" - toolchain+="$JLINE_TERMINAL_JNA$PSEP" - [ -n "${JNA-}" ] && toolchain+="$JNA$PSEP" + if [ -n "$toolchain_extra" ]; then + toolchain+="$PSEP$toolchain_extra" + fi if [ -n "${jvm_cp_args-}" ]; then jvm_cp_args="$toolchain$jvm_cp_args" else - jvm_cp_args="$toolchain$PSEP" + jvm_cp_args="$toolchain" fi } @@ -205,16 +30,12 @@ ReplMain=dotty.tools.repl.Main ScriptingMain=dotty.tools.scripting.Main declare -a java_args -declare -a scala_args declare -a residual_args declare -a script_args addJava () { java_args+=("'$1'") } -addScala () { - scala_args+=("'$1'") -} addResidual () { residual_args+=("'$1'") } diff --git a/dist/bin/common-shared b/dist/bin/common-shared new file mode 100644 index 000000000000..8c85993a5283 --- /dev/null +++ b/dist/bin/common-shared @@ -0,0 +1,139 @@ +#!/usr/bin/env bash + +# Common options for both scala-cli and java based launchers + +#/*-------------------------------------------------------------------------- +# * Credits: This script is based on the script generated by sbt-pack. +# *--------------------------------------------------------------------------*/ + +# save terminal settings +saved_stty=$(stty -g 2>/dev/null) +# clear on error so we don't later try to restore them +if [[ ! $? ]]; then + saved_stty="" +fi + +# restore stty settings (echo in particular) +function restoreSttySettings() { + stty $saved_stty + saved_stty="" +} + +scala_exit_status=127 +function onExit() { + [[ "$saved_stty" != "" ]] && restoreSttySettings + exit $scala_exit_status +} + +# to reenable echo if we are interrupted before completing. +trap onExit INT TERM EXIT + +unset cygwin mingw msys darwin conemu + +# COLUMNS is used together with command line option '-pageWidth'. +if command -v tput >/dev/null 2>&1; then + export COLUMNS="$(tput -Tdumb cols)" +fi + +case "`uname`" in + CYGWIN*) cygwin=true + ;; + MINGW*) mingw=true + ;; + MSYS*) msys=true + ;; + Darwin*) darwin=true + if [ -z "$JAVA_VERSION" ] ; then + JAVA_VERSION="CurrentJDK" + else + echo "Using Java version: $JAVA_VERSION" 1>&2 + fi + if [ -z "$JAVA_HOME" ] ; then + JAVA_HOME=/System/Library/Frameworks/JavaVM.framework/Versions/${JAVA_VERSION}/Home + fi + JAVACMD="`which java`" + ;; +esac + +unset CYGPATHCMD +if [[ ${cygwin-} || ${mingw-} || ${msys-} ]]; then + # ConEmu terminal is incompatible with jna-5.*.jar + [[ (${CONEMUANSI-} || ${ConEmuANSI-}) ]] && conemu=true + # cygpath is used by various windows shells: cygwin, git-sdk, gitbash, msys, etc. + CYGPATHCMD=`which cygpath 2>/dev/null` + case "$TERM" in + rxvt* | xterm* | cygwin*) + stty -icanon min 1 -echo + JAVA_OPTS="$JAVA_OPTS -Djline.terminal=unix" + ;; + esac +fi + +# Resolve JAVA_HOME from javac command path +if [ -z "$JAVA_HOME" ]; then + javaExecutable="`which javac`" + if [ -n "$javaExecutable" -a -f "$javaExecutable" -a ! "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then + # readlink(1) is not available as standard on Solaris 10. + readLink=`which readlink` + if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then + javaExecutable="`readlink -f \"$javaExecutable\"`" + javaHome="`dirname \"$javaExecutable\"`" + javaHome=`expr "$javaHome" : '\(.*\)/bin'` + JAVA_HOME="$javaHome" + export JAVA_HOME + fi + fi +fi + +if [ -z "${JAVACMD-}" ] ; then + if [ -n "${JAVA_HOME-}" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + else + JAVACMD="`which java`" + fi +fi + +if [ ! -x "$JAVACMD" ] ; then + echo "Error: JAVA_HOME is not defined correctly." + echo " We cannot execute $JAVACMD" + exit 1 +fi + +if [ -z "$JAVA_HOME" ] ; then + echo "Warning: JAVA_HOME environment variable is not set." +fi + +CLASSPATH_SUFFIX="" +# Path separator used in EXTRA_CLASSPATH +PSEP=":" +PROG_HOME_URI="file://$PROG_HOME" + +# translate paths to Windows-mixed format before running java +if [ -n "${CYGPATHCMD-}" ]; then + [ -n "${PROG_HOME-}" ] && + PROG_HOME=`"$CYGPATHCMD" -am "$PROG_HOME"` + PROG_HOME_URI="file:///$PROG_HOME" # Add extra root dir prefix + [ -n "$JAVA_HOME" ] && + JAVA_HOME=`"$CYGPATHCMD" -am "$JAVA_HOME"` + CLASSPATH_SUFFIX=";" + PSEP=";" +elif [[ ${mingw-} || ${msys-} ]]; then + # For Mingw / Msys, convert paths from UNIX format before anything is touched + [ -n "$PROG_HOME" ] && + PROG_HOME="`(cd "$PROG_HOME"; pwd -W | sed 's|/|\\\\|g')`" + PROG_HOME_URI="file:///$PROG_HOME" # Add extra root dir prefix + [ -n "$JAVA_HOME" ] && + JAVA_HOME="`(cd "$JAVA_HOME"; pwd -W | sed 's|/|\\\\|g')`" + CLASSPATH_SUFFIX=";" + PSEP=";" +fi + +declare -a scala_args +addScala () { + scala_args+=("'$1'") +} diff --git a/dist/bin/common.bat b/dist/bin/common.bat index 7aef606d5509..510771d43b6e 100644 --- a/dist/bin/common.bat +++ b/dist/bin/common.bat @@ -41,17 +41,3 @@ if not defined _PROG_HOME ( set "_LIB_DIR=%_PROG_HOME%\lib" set _PSEP=; - -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*scala3-compiler*"') do set "_SCALA3_COMP=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*scala3-interfaces*"') do set "_SCALA3_INTF=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*scala3-library*"') do set "_SCALA3_LIB=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*scala3-staging*"') do set "_SCALA3_STAGING=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*scala3-tasty-inspector*"') do set "_SCALA3_TASTY_INSPECTOR=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*tasty-core*"') do set "_TASTY_CORE=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*scala-asm*"') do set "_SCALA_ASM=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*scala-library*"') do set "_SCALA_LIB=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*compiler-interface*"') do set "_SBT_INTF=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*jline-reader-3*"') do set "_JLINE_READER=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*jline-terminal-3*"') do set "_JLINE_TERMINAL=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*jline-terminal-jna-3*"') do set "_JLINE_TERMINAL_JNA=%_LIB_DIR%\%%f" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*jna-5*"') do set "_JNA=%_LIB_DIR%\%%f" diff --git a/dist/bin/scala b/dist/bin/scala index bd69d40c2b97..35efdfc38d96 100755 --- a/dist/bin/scala +++ b/dist/bin/scala @@ -26,47 +26,43 @@ if [ -z "${PROG_HOME-}" ] ; then cd "$saveddir" fi -source "$PROG_HOME/bin/common" +source "$PROG_HOME/bin/common-shared" +source "$PROG_HOME/bin/cli-common-platform" +SCALA_VERSION="" +# iterate through lines in VERSION_SRC +while IFS= read -r line; do + # if line starts with "version:=" then extract the version + if [[ "$line" == version:=* ]]; then + SCALA_VERSION="${line#version:=}" + break + fi +done < "$PROG_HOME/VERSION" + +# assert that SCALA_VERSION is not empty +if [ -z "$SCALA_VERSION" ]; then + echo "Failed to extract Scala version from $PROG_HOME/VERSION" + exit 1 +fi + +MVN_REPOSITORY="$PROG_HOME_URI/maven2" + +# escape all script arguments while [[ $# -gt 0 ]]; do - case "$1" in - -D*) - # pass to scala as well: otherwise we lose it sometimes when we - # need it, e.g. communicating with a server compiler. - # respect user-supplied -Dscala.usejavacp - addJava "$1" - addScala "$1" - shift - ;; - -J*) - # as with -D, pass to scala even though it will almost - # never be used. - addJava "${1:2}" - addScala "$1" - shift - ;; - -classpath*) - if [ "$1" != "${1##* }" ]; then - # -classpath and its value have been supplied in a single string e.g. "-classpath 'lib/*'" - A=$1 ; shift # consume $1 before adding its substrings back - set -- $A "$@" # split $1 on whitespace and put it back - else - addScala "$1" - shift - fi - ;; - *) - addScala "$1" - shift - ;; - esac + addScala "$1" + shift done # exec here would prevent onExit from being called, leaving terminal in unusable state -compilerJavaClasspathArgs [ -z "${ConEmuPID-}" -o -n "${cygwin-}" ] && export MSYSTEM= PWD= # workaround for #12405 -eval "\"$JAVACMD\"" "${java_args[@]}" "-Dscala.home=\"$PROG_HOME\"" "-classpath \"$jvm_cp_args\"" "dotty.tools.MainGenericRunner" "-classpath \"$jvm_cp_args\"" "${scala_args[@]}" -scala_exit_status=$? +# SCALA_CLI_CMD_BASH is an array, set by cli-common-platform +eval "${SCALA_CLI_CMD_BASH[@]}" \ + "--prog-name scala" \ + "--skip-cli-updates" \ + "--cli-default-scala-version \"$SCALA_VERSION\"" \ + "-r \"$MVN_REPOSITORY\"" \ + "${scala_args[@]}" +scala_exit_status=$? onExit diff --git a/dist/bin/scala.bat b/dist/bin/scala.bat index ca908fd340be..7418909da263 100644 --- a/dist/bin/scala.bat +++ b/dist/bin/scala.bat @@ -14,14 +14,17 @@ for %%f in ("%~dp0.") do ( call "%_PROG_HOME%\bin\common.bat" if not %_EXITCODE%==0 goto end -call :args %* - @rem ######################################################################### @rem ## Main -call :compilerJavaClasspathArgs +call :setScalaOpts + +call "%_PROG_HOME%\bin\cli-common-platform.bat" + +@rem SCALA_CLI_CMD_WIN is an array, set in cli-common-platform.bat. +@rem WE NEED TO PASS '--skip-cli-updates' for JVM launchers but we actually don't need it for native launchers +call %SCALA_CLI_CMD_WIN% "--prog-name" "scala" "--skip-cli-updates" "--cli-default-scala-version" "%_SCALA_VERSION%" "-r" "%MVN_REPOSITORY%" %* -call "%_JAVACMD%" %_JAVA_ARGS% "-Dscala.home=%_PROG_HOME%" -classpath "%_JVM_CP_ARGS%" dotty.tools.MainGenericRunner -classpath "%_JVM_CP_ARGS%" %_SCALA_ARGS% if not %ERRORLEVEL%==0 ( set _EXITCODE=1& goto end ) goto end @@ -29,62 +32,31 @@ goto end @rem ######################################################################### @rem ## Subroutines -:args -set _JAVA_ARGS= -set _SCALA_ARGS= -set _SCALA_CPATH= - -:args_loop -if "%~1"=="" goto args_done -set "__ARG=%~1" -if "%__ARG:~0,2%"=="-D" ( - @rem pass to scala as well: otherwise we lose it sometimes when we - @rem need it, e.g. communicating with a server compiler. - set _JAVA_ARGS=!_JAVA_ARGS! "%__ARG%" - set _SCALA_ARGS=!_SCALA_ARGS! "%__ARG%" -) else if "%__ARG:~0,2%"=="-J" ( - @rem as with -D, pass to scala even though it will almost - @rem never be used. - set _JAVA_ARGS=!_JAVA_ARGS! %__ARG:~2% - set _SCALA_ARGS=!_SCALA_ARGS! "%__ARG%" -) else if "%__ARG%"=="-classpath" ( - set "_SCALA_CPATH=%~2" - shift -) else if "%__ARG%"=="-cp" ( - set "_SCALA_CPATH=%~2" - shift -) else ( - set _SCALA_ARGS=!_SCALA_ARGS! "%__ARG%" +:setScalaOpts + +@REM sfind the index of the first colon in _PROG_HOME +set "index=0" +set "char=!_PROG_HOME:~%index%,1!" +:findColon +if not "%char%"==":" ( + set /a "index+=1" + set "char=!_PROG_HOME:~%index%,1!" + goto :findColon ) -shift -goto args_loop -:args_done -goto :eof -@rem output parameter: _JVM_CP_ARGS -:compilerJavaClasspathArgs -set __TOOLCHAIN= -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA_LIB%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA3_LIB%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA_ASM%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SBT_INTF%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA3_INTF%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA3_COMP%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_TASTY_CORE%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA3_STAGING%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA3_TASTY_INSPECTOR%%_PSEP%" - -@rem # jline -set "__TOOLCHAIN=%__TOOLCHAIN%%_JLINE_READER%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_JLINE_TERMINAL%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_JLINE_TERMINAL_JNA%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_JNA%%_PSEP%" - -if defined _SCALA_CPATH ( - set "_JVM_CP_ARGS=%__TOOLCHAIN%%_SCALA_CPATH%" -) else ( - set "_JVM_CP_ARGS=%__TOOLCHAIN%" +set "_SCALA_VERSION=" +set "MVN_REPOSITORY=file:///%_PROG_HOME:\=/%/maven2" + +@rem read for version:=_SCALA_VERSION in VERSION_FILE +FOR /F "usebackq delims=" %%G IN ("%_PROG_HOME%\VERSION") DO ( + SET "line=%%G" + IF "!line:~0,9!"=="version:=" ( + SET "_SCALA_VERSION=!line:~9!" + GOTO :foundVersion + ) ) + +:foundVersion goto :eof @rem ######################################################################### diff --git a/dist/bin/scala_legacy b/dist/bin/scala_legacy new file mode 100755 index 000000000000..18fc6d874e34 --- /dev/null +++ b/dist/bin/scala_legacy @@ -0,0 +1,72 @@ +#!/usr/bin/env bash + +# Try to autodetect real location of the script +if [ -z "${PROG_HOME-}" ] ; then + ## resolve links - $0 may be a link to PROG_HOME + PRG="$0" + + # need this for relative symlinks + while [ -h "$PRG" ] ; do + ls=`ls -ld "$PRG"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG="`dirname "$PRG"`/$link" + fi + done + + saveddir=`pwd` + + PROG_HOME=`dirname "$PRG"`/.. + + # make it fully qualified + PROG_HOME=`cd "$PROG_HOME" && pwd` + + cd "$saveddir" +fi + +source "$PROG_HOME/bin/common" + +while [[ $# -gt 0 ]]; do + case "$1" in + -D*) + # pass to scala as well: otherwise we lose it sometimes when we + # need it, e.g. communicating with a server compiler. + # respect user-supplied -Dscala.usejavacp + addJava "$1" + addScala "$1" + shift + ;; + -J*) + # as with -D, pass to scala even though it will almost + # never be used. + addJava "${1:2}" + addScala "$1" + shift + ;; + -classpath*) + if [ "$1" != "${1##* }" ]; then + # -classpath and its value have been supplied in a single string e.g. "-classpath 'lib/*'" + A=$1 ; shift # consume $1 before adding its substrings back + set -- $A "$@" # split $1 on whitespace and put it back + else + addScala "$1" + shift + fi + ;; + *) + addScala "$1" + shift + ;; + esac +done + +# exec here would prevent onExit from being called, leaving terminal in unusable state +compilerJavaClasspathArgs +[ -z "${ConEmuPID-}" -o -n "${cygwin-}" ] && export MSYSTEM= PWD= # workaround for #12405 +eval "\"$JAVACMD\"" "${java_args[@]}" "-Dscala.home=\"$PROG_HOME\"" "-classpath \"$jvm_cp_args\"" "-Dscala.expandjavacp=true" "dotty.tools.MainGenericRunner" "-classpath \"$jvm_cp_args\"" "${scala_args[@]}" +scala_exit_status=$? + + +onExit diff --git a/dist/bin/scalac b/dist/bin/scalac old mode 100644 new mode 100755 index d9bd21ca425b..a527d9767749 --- a/dist/bin/scalac +++ b/dist/bin/scalac @@ -86,6 +86,7 @@ eval "\"$JAVACMD\"" \ ${JAVA_OPTS:-$default_java_opts} \ "${java_args[@]}" \ "-classpath \"$jvm_cp_args\"" \ + "-Dscala.expandjavacp=true" \ "-Dscala.usejavacp=true" \ "-Dscala.home=\"$PROG_HOME\"" \ "dotty.tools.MainGenericCompiler" \ diff --git a/dist/bin/scalac.bat b/dist/bin/scalac.bat index cb1a76471f70..e2898bdc2890 100644 --- a/dist/bin/scalac.bat +++ b/dist/bin/scalac.bat @@ -21,7 +21,10 @@ call :args %* call :compilerJavaClasspathArgs -call "%_JAVACMD%" %_JAVA_ARGS% -classpath "%_JVM_CP_ARGS%" "-Dscala.usejavacp=true" "-Dscala.home=%_PROG_HOME%" dotty.tools.MainGenericCompiler %_SCALA_ARGS% +@rem we need to escape % in the java command path, for some reason this doesnt work in common.bat +set "_JAVACMD=!_JAVACMD:%%=%%%%!" + +call "%_JAVACMD%" %_JAVA_ARGS% -classpath "%_JVM_CP_ARGS%" "-Dscala.usejavacp=true" "-Dscala.expandjavacp=true" "-Dscala.home=%_PROG_HOME%" dotty.tools.MainGenericCompiler %_SCALA_ARGS% if not %ERRORLEVEL%==0 ( set _EXITCODE=1 goto end @@ -85,29 +88,8 @@ goto :eof @rem output parameter: _JVM_CP_ARGS :compilerJavaClasspathArgs -@rem echo scala3-compiler: %_SCALA3_COMP% -@rem echo scala3-interface: %_SCALA3_INTF% -@rem echo scala3-library: %_SCALA3_LIB% -@rem echo tasty-core: %_TASTY_CORE% -@rem echo scala-asm: %_SCALA_ASM% -@rem echo scala-lib: %_SCALA_LIB% -@rem echo sbt-intface: %_SBT_INTF% - -set "__TOOLCHAIN=%_SCALA_LIB%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA3_LIB%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA_ASM%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SBT_INTF%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA3_INTF%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA3_COMP%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_TASTY_CORE%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA3_STAGING%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_SCALA3_TASTY_INSPECTOR%%_PSEP%" - -@rem # jline -set "__TOOLCHAIN=%__TOOLCHAIN%%_JLINE_READER%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_JLINE_TERMINAL%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_JLINE_TERMINAL_JNA%%_PSEP%" -set "__TOOLCHAIN=%__TOOLCHAIN%%_JNA%%_PSEP%" +set "__TOOLCHAIN=%_LIB_DIR%\scala.jar" +set "__TOOLCHAIN=%__TOOLCHAIN%%_PSEP%%_LIB_DIR%\with_compiler.jar%" if defined _SCALA_CPATH ( set "_JVM_CP_ARGS=%__TOOLCHAIN%%_SCALA_CPATH%" diff --git a/dist/bin/scaladoc b/dist/bin/scaladoc index 8b9ec41a7f8c..0af5a2b55acb 100755 --- a/dist/bin/scaladoc +++ b/dist/bin/scaladoc @@ -36,6 +36,7 @@ CompilerMain=dotty.tools.dotc.Main DecompilerMain=dotty.tools.dotc.decompiler.Main ReplMain=dotty.tools.repl.Main ScriptingMain=dotty.tools.scripting.Main +JVM_CP_ARGS="$PROG_HOME/lib/scaladoc.jar" PROG_NAME=$CompilerMain @@ -52,67 +53,6 @@ addScrip() { script_args+=("'$1'") } -classpathArgs () { - CLASS_PATH="" - CLASS_PATH+="$(find_lib "*scaladoc*")$PSEP" - CLASS_PATH+="$(find_lib "*scala3-compiler*")$PSEP" - CLASS_PATH+="$(find_lib "*scala3-interfaces*")$PSEP" - CLASS_PATH+="$(find_lib "*scala3-library*")$PSEP" - CLASS_PATH+="$(find_lib "*tasty-core*")$PSEP" - CLASS_PATH+="$(find_lib "*scala3-tasty-inspector*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-0*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-anchorlink*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-autolink*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-emoji*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-gfm-strikethrough*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-gfm-tasklist*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-wikilink*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-yaml-front-matter*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-tables*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-ins*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ext-superscript*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util-ast*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util-data*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util-dependency*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util-misc*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util-format*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util-sequence*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util-builder*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util-collection*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util-visitor*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util-options*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-util-html*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-formatter*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-ast*")$PSEP" - CLASS_PATH+="$(find_lib "*liqp*")$PSEP" - CLASS_PATH+="$(find_lib "*jsoup*")$PSEP" - CLASS_PATH+="$(find_lib "*jackson-dataformat-yaml*")$PSEP" - CLASS_PATH+="$(find_lib "*jackson-datatype-jsr310*")$PSEP" - CLASS_PATH+="$(find_lib "*strftime4j*")$PSEP" - CLASS_PATH+="$(find_lib "*scala-asm*")$PSEP" - CLASS_PATH+="$(find_lib "*compiler-interface*")$PSEP" - CLASS_PATH+="$(find_lib "*jline-reader*")$PSEP" - CLASS_PATH+="$(find_lib "*jline-terminal-3*")$PSEP" - CLASS_PATH+="$(find_lib "*jline-terminal-jna*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-formatter*")$PSEP" - CLASS_PATH+="$(find_lib "*autolink-0.6*")$PSEP" - CLASS_PATH+="$(find_lib "*flexmark-jira-converter*")$PSEP" - CLASS_PATH+="$(find_lib "*antlr4*")$PSEP" - CLASS_PATH+="$(find_lib "*jackson-annotations*")$PSEP" - CLASS_PATH+="$(find_lib "*jackson-core*")$PSEP" - CLASS_PATH+="$(find_lib "*jackson-databind*")$PSEP" - CLASS_PATH+="$(find_lib "*snakeyaml*")$PSEP" - CLASS_PATH+="$(find_lib "*scala-library*")$PSEP" - CLASS_PATH+="$(find_lib "*protobuf-java*")$PSEP" - CLASS_PATH+="$(find_lib "*util-interface*")$PSEP" - CLASS_PATH+="$(find_lib "*jna-5*")$PSEP" - CLASS_PATH+="$(find_lib "*antlr4-runtime*")$PSEP" - - jvm_cp_args="-classpath \"$CLASS_PATH\"" -} - #for A in "$@" ; do echo "A[$A]" ; done ; exit 2 while [[ $# -gt 0 ]]; do @@ -134,12 +74,11 @@ case "$1" in esac done -classpathArgs - eval "\"$JAVACMD\"" \ ${JAVA_OPTS:-$default_java_opts} \ "${java_args[@]}" \ - "${jvm_cp_args-}" \ + -classpath "${JVM_CP_ARGS}" \ + -Dscala.expandjavacp=true \ -Dscala.usejavacp=true \ "dotty.tools.scaladoc.Main" \ "${scala_args[@]}" \ diff --git a/dist/bin/scaladoc.bat b/dist/bin/scaladoc.bat index bcc0d71788a3..b9e4820b006d 100644 --- a/dist/bin/scaladoc.bat +++ b/dist/bin/scaladoc.bat @@ -21,13 +21,16 @@ call :args %* @rem ######################################################################### @rem ## Main -call :classpathArgs - if defined JAVA_OPTS ( set _JAVA_OPTS=%JAVA_OPTS% ) else ( set _JAVA_OPTS=%_DEFAULT_JAVA_OPTS% ) + +@rem we need to escape % in the java command path, for some reason this doesnt work in common.bat +set "_JAVACMD=!_JAVACMD:%%=%%%%!" + call "%_JAVACMD%" %_JAVA_OPTS% %_JAVA_DEBUG% %_JAVA_ARGS% ^ --classpath "%_CLASS_PATH%" ^ +-classpath "%_LIB_DIR%\scaladoc.jar" ^ +-Dscala.expandjavacp=true ^ -Dscala.usejavacp=true ^ dotty.tools.scaladoc.Main %_SCALA_ARGS% %_RESIDUAL_ARGS% if not %ERRORLEVEL%==0 ( @@ -99,65 +102,6 @@ goto :eof set _RESIDUAL_ARGS=%_RESIDUAL_ARGS% %~1 goto :eof -@rem output parameter: _CLASS_PATH -:classpathArgs -set "_LIB_DIR=%_PROG_HOME%\lib" -set _CLASS_PATH= -@rem keep list in sync with bash script `bin\scaladoc` ! -call :updateClasspath "scaladoc" -call :updateClasspath "scala3-compiler" -call :updateClasspath "scala3-interfaces" -call :updateClasspath "scala3-library" -call :updateClasspath "tasty-core" -call :updateClasspath "scala3-tasty-inspector" -call :updateClasspath "flexmark-0" -call :updateClasspath "flexmark-html-parser" -call :updateClasspath "flexmark-ext-anchorlink" -call :updateClasspath "flexmark-ext-autolink" -call :updateClasspath "flexmark-ext-emoji" -call :updateClasspath "flexmark-ext-gfm-strikethrough" -call :updateClasspath "flexmark-ext-gfm-tables" -call :updateClasspath "flexmark-ext-gfm-tasklist" -call :updateClasspath "flexmark-ext-wikilink" -call :updateClasspath "flexmark-ext-yaml-front-matter" -call :updateClasspath "liqp" -call :updateClasspath "jsoup" -call :updateClasspath "jackson-dataformat-yaml" -call :updateClasspath "jackson-datatype-jsr310" -call :updateClasspath "strftime4j" -call :updateClasspath "scala-asm" -call :updateClasspath "compiler-interface" -call :updateClasspath "jline-reader" -call :updateClasspath "jline-terminal-3" -call :updateClasspath "jline-terminal-jna" -call :updateClasspath "flexmark-util" -call :updateClasspath "flexmark-formatter" -call :updateClasspath "autolink-0.6" -call :updateClasspath "flexmark-jira-converter" -call :updateClasspath "antlr4" -call :updateClasspath "jackson-annotations" -call :updateClasspath "jackson-core" -call :updateClasspath "jackson-databind" -call :updateClasspath "snakeyaml" -call :updateClasspath "scala-library" -call :updateClasspath "protobuf-java" -call :updateClasspath "util-interface" -call :updateClasspath "jna-5" -call :updateClasspath "flexmark-ext-tables" -call :updateClasspath "flexmark-ext-ins" -call :updateClasspath "flexmark-ext-superscript" -call :updateClasspath "antlr4-runtime" -goto :eof - -@rem input parameter: %1=pattern for library file -@rem output parameter: _CLASS_PATH -:updateClasspath -set "__PATTERN=%~1" -for /f "delims=" %%f in ('dir /a-d /b "%_LIB_DIR%\*%__PATTERN%*" 2^>NUL') do ( - set "_CLASS_PATH=!_CLASS_PATH!%_LIB_DIR%\%%f%_PSEP%" -) -goto :eof - @rem ######################################################################### @rem ## Cleanups diff --git a/docs/_assets/js/api-search.js b/docs/_assets/js/api-search.js index 4950d2067ffe..03981350e81b 100644 --- a/docs/_assets/js/api-search.js +++ b/docs/_assets/js/api-search.js @@ -28,66 +28,48 @@ * } * ``` */ -onmessage = function(e) { - var docs = e.data.docs; - var searchTerm = e.data.search; - - var regexForTerm = function(query) { - var escaped = query.replace(/([\.\*\+\?\|\(\)\[\]\\])/g, '\\$1'); +onmessage = function({ data: { docs, search } }) { + const regexForTerm = (query) => { + const escaped = query.replace(/([\.\*\+\?\|\(\)\[\]\\])/g, '\\$1'); if (query.toLowerCase() != query) { // Regexp that matches CamelCase subbits: "BiSe" is // "[a-z]*Bi[a-z]*Se" and matches "BitSet", "ABitSet", ... return new RegExp(escaped.replace(/([A-Z])/g,"[a-z]*$1")); } - else { // if query is all lower case make a normal case insensitive search - return new RegExp(escaped, "i"); - } + // if query is all lower case make a normal case insensitive search + return new RegExp(escaped, "i"); }; - var searchRegex = regexForTerm(searchTerm); + const searchRegex = regexForTerm(search); - var filterPackages = function(entity) { - switch(entity.kind) { - case "val": - case "def": - case "type": - case "package": - return false; - default: - return true; - } - }; + const filterPackages = (entity) => !["val", "def", "type", "package"].includes(entity.kind); - // look at this higher order function, such syntax: - var messageParentIfMatches = function(parent) { - return function(entity) { - var fullName = entity.path.join('.'); + const messageParentIfMatches = (parent) => (entity) => { + const fullName = entity.path.join('.'); - if (searchRegex.test(fullName)) { + if (searchRegex.test(fullName)) { + postMessage({ + type: "entityResult", + package: parent, + entity + }); + } + + entity.members.forEach((member) => { + if (searchRegex.test(member.name)) { postMessage({ - "type": "entityResult", - "package": parent, - "entity": entity + type: "memberResult", + package: parent, + parent: entity, + member }); } - - var searchChild = function(member) { - if (searchRegex.test(member.name)) { - postMessage({ - "type": "memberResult", - "package": parent, - "parent": entity, - "member": member, - }); - } - }; - entity.members.forEach(searchChild); - }; + }); }; - docs.forEach(function(pack) { + docs.forEach((pack) => { pack.members .filter(filterPackages) .forEach(messageParentIfMatches(pack)); }); -} +}; diff --git a/docs/_assets/js/sidebar.js b/docs/_assets/js/sidebar.js index aa377ed8aa0e..2832486c1d6a 100644 --- a/docs/_assets/js/sidebar.js +++ b/docs/_assets/js/sidebar.js @@ -2,5 +2,5 @@ function toggleSection(titleElement) { const title = $(titleElement); title.siblings("ul").toggleClass("toggled"); - title.children("i.fas").toggleClass("fa-angle-right").toggleClass("fa-angle-down"); + title.children("i.fas").toggleClass("fa-angle-right fa-angle-down"); } diff --git a/docs/_assets/js/toolbar.js b/docs/_assets/js/toolbar.js index be132e7db4a9..a799ca661dd9 100644 --- a/docs/_assets/js/toolbar.js +++ b/docs/_assets/js/toolbar.js @@ -1,20 +1,26 @@ -$(document).ready(function() { - $("#menu-icon").click(() => { - $(".sidebar").toggleClass("toggled"); - }) - $("#search-icon").click(() => { - $("#searchbar").toggleClass("shown"); - $("#search-api-input").focus(); - }) - const searchInput = $("#search-api-input"); - searchInput.keydown(evt => { - if (evt.which == 13) { - const baseUrl = $("#baseurl-input").val(); - window.location = ( - baseUrl + "/api/search.html?" + - "searchTerm=" + searchInput.val() + - "&previousUrl=" + encodeURI(window.location) - ); +$(function() { + const menuIcon = $("#menu-icon"); + const sidebar = $(".sidebar"); + menuIcon.on("click", () => { + sidebar.toggleClass("toggled"); + }); + + const searchIcon = $("#search-icon"); + const searchbar = $("#searchbar"); + const searchApiInput = $("#search-api-input"); + searchIcon.on("click", () => { + searchbar.toggleClass("shown"); + searchApiInput.focus(); + }); + + const baseurlInput = $("#baseurl-input"); + searchApiInput.keydown(evt => { + if (evt.which === 13) { // Enter + const baseUrl = baseurlInput.val(); + const searchTerm = searchApiInput.val(); + const previousUrl = encodeURI(window.location); + const searchUrl = `${baseUrl}/api/search.html?searchTerm=${searchTerm}&previousUrl=${previousUrl}`; + window.location = searchUrl; } - }) -}) + }); +}); diff --git a/docs/_docs/internals/best-effort-compilation.md b/docs/_docs/internals/best-effort-compilation.md new file mode 100644 index 000000000000..2fed951c3fd8 --- /dev/null +++ b/docs/_docs/internals/best-effort-compilation.md @@ -0,0 +1,88 @@ +--- +layout: doc-page +title: Best Effort Compilation +--- + +Best-effort compilation is a compilation mode introduced with the aim of improving IDE integration. It allows to generate +tasty-like artifacts and semanticdb files in erroring programs. + +It is composed of two experimental compiler options: +* `-Ybest-effort` produces Best Effort TASTy (`.betasty`) files to the `META-INF/best-effort` directory +* `-Ywith-best-effort-tasty` allows to read Best Effort TASTy files, and if such file is read from the classpath then +limits compilation to the frontend phases + +This feature aims to force through to the typer phase regardless of errors, and then serialize tasty-like files +obtained from the error trees into the best effort directory (`META-INF/best-effort`) and also serialize semanticdb as normal. + +The exact execution pattern is as follows: + +```none +Parser + │ + │ regardless of errors + ˅ +TyperPhase ────────────────────────────────────┐ + │ │ + │ │ + │ with errors │ no errors + │ │ + │ ˅ + │ Every following frontend pass until semanticdb.ExtractSemanticDB (interrupted in the case of errors) + │ │ + │ │ regardless of errors + ˅ ˅ +semanticdb.ExtractSemanticDB ──────────────────┐ + │ │ + │ with errors │ no errors + │ │ + │ ˅ + │ Every following frontend pass until Pickler (interrupted in the case of errors) + │ │ + │ │ regardless of errors + ˅ ˅ +Pickler (with added printing of best effort tasty to the best effort target directory) + │ │ + │ with errors │ no errors + ˅ ˅ +End compilation Execute latter passes +``` + +This is because the IDE is able to retrieve useful info even when skipping phases like PostTyper. + +This execution structure where we skip phases depending on the errors found is motivated by the desire +to avoid additionally handling errored trees in as many phases as possible, therefore also decreasing +maintenance load. This way phases like PostTyper do not have to be continually adjusted to handle trees +with errors from typer and usually the IDE is able to retrieve enough information with just the typer phase. + +An unfortunate consequence of this structure is the fact that we lose access to phases allowing for incremental +compilation, which is something that could be adressed in the future. + +`-Ywith-best-effort-tasty` option allows reading Best Effort TASTy files from classpath. If such file is read, then +the compiler is disallowed from proceeding to any non-frontend phase. This is to be used either in combination with +`-Ybest-effort` option to produce Best Effort TASTy using failing dependencies, or in the Presentation Compiler +to access symbols derived from failing projects. + +## Best Effort TASTy format + +The Best Effort TASTy (`.betasty`) format is a file format produced by the compiler when the `-Ybest-effort` option +is used. It is characterised by a different header and an addition of the `ERRORtype` type, which represents errored types in +the compiler. The Best Effort TASTy format also extends the regular TASTy grammar to allow the handling of as +large amount of incorrect trees produced by the compiler as possible. The format is defined as part of the +`dotty.tools.besteffort.BestEffortTastyFormat` object. + +Since currently the format holds an experimental status, no compatibility rules are defined for now, and the specification +may change between the patch compiler versions, if need be. + +For performance reasons, if no errors are detected in the frontend phases, a betasty file mey be serialized in the format of +regular TASTy file, characterized by the use of Tasty header instead of Best Effort TASTy header in the `.betasty` file. + +## Testing + +The testing procedure reuses the `tests/neg` negative tests that are usually meant to produce errors. First they are compiled +with the `-Ybest-effort` option (testing the TreePickler for errored trees), then later, the tree is reconstructed using +the previously created Best Effort TASTy, with `-Yread-tasty` and `-Ywith-best-effort-tasty` options. This is to test the +TreeUnpickler for those Best Effort TASTy files. + +One of the goals of this feature is to keep the maintainance cost low, and to not let this feature hinder the pace of the +overall development of the compiler. Because of that, the tests can be freely disabled in `compiler/neg-best-effort.blacklist` +(testing TreePickler) and `compiler/neg-best-effort-from-tasty.blacklist` (testing TreeUnpickler). diff --git a/docs/_docs/internals/syntax.md b/docs/_docs/internals/syntax.md index 10f068e53c7f..dd4a3af403ab 100644 --- a/docs/_docs/internals/syntax.md +++ b/docs/_docs/internals/syntax.md @@ -191,6 +191,7 @@ MatchType ::= InfixType `match` <<< TypeCaseClauses >>> InfixType ::= RefinedType {id [nl] RefinedType} InfixOp(t1, op, t2) RefinedType ::= AnnotType {[nl] Refinement} RefinedTypeTree(t, ds) AnnotType ::= SimpleType {Annotation} Annotated(t, annot) +AnnotType1 ::= SimpleType1 {Annotation} Annotated(t, annot) SimpleType ::= SimpleLiteral SingletonTypeTree(l) | ‘?’ TypeBounds @@ -198,7 +199,7 @@ SimpleType ::= SimpleLiteral SimpleType1 ::= id Ident(name) | Singleton ‘.’ id Select(t, name) | Singleton ‘.’ ‘type’ SingletonTypeTree(p) - | ‘(’ Types ‘)’ Tuple(ts) + | ‘(’ [Types | NamesAndTypes] ‘)’ Tuple(ts) | Refinement RefinedTypeTree(EmptyTree, refinement) | TypeSplice -- deprecated syntax | SimpleType1 TypeArgs AppliedTypeTree(t, args) @@ -220,8 +221,12 @@ IntoTargetType ::= Type TypeArgs ::= ‘[’ Types ‘]’ ts Refinement ::= :<<< [RefineDcl] {semi [RefineDcl]} >>> ds TypeBounds ::= [‘>:’ Type] [‘<:’ Type] TypeBoundsTree(lo, hi) -TypeParamBounds ::= TypeBounds {‘:’ Type} ContextBounds(typeBounds, tps) +TypeAndCtxBounds ::= TypeBounds [‘:’ ContextBounds] ContextBounds(typeBounds, tps) +ContextBounds ::= ContextBound | '{' ContextBound {',' ContextBound} '}' +ContextBound ::= Type ['as' id] Types ::= Type {‘,’ Type} +NamesAndTypes ::= NameAndType {‘,’ NameAndType} +NameAndType ::= id ':' Type ``` ### Expressions @@ -290,8 +295,10 @@ TypeSplice ::= spliceId | ‘$’ ‘{’ Block ‘}’ -- unless inside quoted type pattern -- deprecated syntax | ‘$’ ‘{’ Pattern ‘}’ -- when inside quoted type pattern -- deprecated syntax ExprsInParens ::= ExprInParens {‘,’ ExprInParens} + | NamedExprInParens {‘,’ NamedExprInParens} ExprInParens ::= PostfixExpr ‘:’ Type -- normal Expr allows only RefinedType here | Expr +NamedExprInParens ::= id '=' ExprInParens ParArgumentExprs ::= ‘(’ [ExprsInParens] ‘)’ exprs | ‘(’ ‘using’ ExprsInParens ‘)’ | ‘(’ [ExprsInParens ‘,’] PostfixExpr ‘*’ ‘)’ exprs :+ Typed(expr, Ident(wildcardStar)) @@ -343,6 +350,9 @@ SimplePattern1 ::= SimpleRef PatVar ::= varid | ‘_’ Patterns ::= Pattern {‘,’ Pattern} + | NamedPattern {‘,’ NamedPattern} +NamedPattern ::= id '=' Pattern + ArgumentPatterns ::= ‘(’ [Patterns] ‘)’ Apply(fn, pats) | ‘(’ [Patterns ‘,’] PatVar ‘*’ ‘)’ ``` @@ -351,7 +361,7 @@ ArgumentPatterns ::= ‘(’ [Patterns] ‘)’ ```ebnf ClsTypeParamClause::= ‘[’ ClsTypeParam {‘,’ ClsTypeParam} ‘]’ ClsTypeParam ::= {Annotation} [‘+’ | ‘-’] TypeDef(Modifiers, name, tparams, bounds) - id [HkTypeParamClause] TypeParamBounds Bound(below, above, context) + id [HkTypeParamClause] TypeAndCtxBounds Bound(below, above, context) TypTypeParamClause::= ‘[’ TypTypeParam {‘,’ TypTypeParam} ‘]’ TypTypeParam ::= {Annotation} id [HkTypeParamClause] TypeBounds @@ -365,7 +375,7 @@ ClsParamClause ::= [nl] ‘(’ ClsParams ‘)’ | [nl] ‘(’ ‘using’ (ClsParams | FunArgTypes) ‘)’ ClsParams ::= ClsParam {‘,’ ClsParam} ClsParam ::= {Annotation} ValDef(mods, id, tpe, expr) -- point of mods on val/var - [{Modifier} (‘val’ | ‘var’)] Param + [{Modifier | ‘tracked’} (‘val’ | ‘var’)] Param DefParamClauses ::= DefParamClause { DefParamClause } -- and two DefTypeParamClause cannot be adjacent DefParamClause ::= DefTypeParamClause @@ -376,7 +386,7 @@ TypelessClause ::= DefTermParamClause | UsingParamClause DefTypeParamClause::= [nl] ‘[’ DefTypeParam {‘,’ DefTypeParam} ‘]’ -DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeParamBounds +DefTypeParam ::= {Annotation} id [HkTypeParamClause] TypeAndCtxBounds DefTermParamClause::= [nl] ‘(’ [DefTermParams] ‘)’ UsingParamClause ::= [nl] ‘(’ ‘using’ (DefTermParams | FunArgTypes) ‘)’ DefImplicitClause ::= [nl] ‘(’ ‘implicit’ DefTermParams ‘)’ @@ -431,6 +441,7 @@ EndMarkerTag ::= id | ‘if’ | ‘while’ | ‘for’ | ‘match’ | ### Definitions ```ebnf RefineDcl ::= ‘val’ ValDcl + | ‘var’ ValDcl | ‘def’ DefDcl | ‘type’ {nl} TypeDef ValDcl ::= ids ‘:’ Type @@ -446,7 +457,7 @@ PatDef ::= ids [‘:’ Type] [‘=’ Expr] DefDef ::= DefSig [‘:’ Type] [‘=’ Expr] DefDef(_, name, paramss, tpe, expr) | ‘this’ TypelessClauses [DefImplicitClause] ‘=’ ConstrExpr DefDef(_, , vparamss, EmptyTree, expr | Block) DefSig ::= id [DefParamClauses] [DefImplicitClause] -TypeDef ::= id [TypeParamClause] {FunParamClause} TypeBounds TypeDefTree(_, name, tparams, bound +TypeDef ::= id [TypeParamClause] {FunParamClause} TypeAndCtxBounds TypeDefTree(_, name, tparams, bound [‘=’ Type] TmplDef ::= ([‘case’] ‘class’ | ‘trait’) ClassDef @@ -458,9 +469,13 @@ ClassConstr ::= [ClsTypeParamClause] [ConstrMods] ClsParamClauses ConstrMods ::= {Annotation} [AccessModifier] ObjectDef ::= id [Template] ModuleDef(mods, name, template) // no constructor EnumDef ::= id ClassConstr InheritClauses EnumBody -GivenDef ::= [GivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) -GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefTypeParamClause`, `UsingParamClause` must be present -StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] + +GivenDef ::= [GivenConditional '=>'] GivenSig +GivenConditional ::= [DefTypeParamClause | UsingParamClause] {UsingParamClause} +GivenSig ::= GivenType ['as' id] ([‘=’ Expr] | TemplateBody) + | ConstrApps ['as' id] TemplateBody +GivenType ::= AnnotType1 {id [nl] AnnotType1} + Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} ‘(’ DefTermParam ‘)’ {UsingParamClause} ExtMethods ExtMethods ::= ExtMethod | [nl] <<< ExtMethod {semi ExtMethod} >>> diff --git a/docs/_docs/internals/type-system.md b/docs/_docs/internals/type-system.md index d2c0cd869e61..e3f02654953e 100644 --- a/docs/_docs/internals/type-system.md +++ b/docs/_docs/internals/type-system.md @@ -36,6 +36,7 @@ Type -+- ProxyType --+- NamedType ----+--- TypeRef | +- TypeVar | +- HKTypeLambda | +- MatchType + | +- FlexibleType | +- GroundType -+- AndType +- OrType diff --git a/docs/_docs/reference/changed-features/compiler-plugins.md b/docs/_docs/reference/changed-features/compiler-plugins.md index 6be8a62c7ac4..c0bfccec8172 100644 --- a/docs/_docs/reference/changed-features/compiler-plugins.md +++ b/docs/_docs/reference/changed-features/compiler-plugins.md @@ -67,7 +67,7 @@ class DivideZero extends StandardPlugin: val name: String = "divideZero" override val description: String = "divide zero check" - def init(options: List[String]): List[PluginPhase] = + override def initialize(options: List[String])(using Context): List[PluginPhase] = (new DivideZeroPhase) :: Nil class DivideZeroPhase extends PluginPhase: @@ -90,7 +90,7 @@ end DivideZeroPhase ``` The plugin main class (`DivideZero`) must extend the trait `StandardPlugin` -and implement the method `init` that takes the plugin's options as argument +and implement the method `initialize` that takes the plugin's options as argument and returns a list of `PluginPhase`s to be inserted into the compilation pipeline. Our plugin adds one compiler phase to the pipeline. A compiler phase must extend diff --git a/docs/_docs/reference/changed-features/implicit-resolution.md b/docs/_docs/reference/changed-features/implicit-resolution.md index 1396ed04b6d3..0df8d2d60a7a 100644 --- a/docs/_docs/reference/changed-features/implicit-resolution.md +++ b/docs/_docs/reference/changed-features/implicit-resolution.md @@ -165,7 +165,22 @@ Condition (*) is new. It is necessary to ensure that the defined relation is tra [//]: # todo: expand with precise rules -**9.** The following change is currently enabled in `-source future`: + +**9.** Given disambiguation has changed. When comparing two givens that both match an expected type, we used to pick the most specific one, in alignment with +overloading resolution. From Scala 3.5 on, we pick the most general one instead. Compiling with Scala 3.5-migration will print a warning in all cases where the preference has changed. Example: +```scala +class A +class B extends A +class C extends A + +given A = A() +given B = B() +given C = C() + +summon[A] // was ambiguous, will now return `given_A` +``` + +**10.** The following change is currently enabled in `-source future`: Implicit resolution now avoids generating recursive givens that can lead to an infinite loop at runtime. Here is an example: diff --git a/docs/_docs/reference/experimental/explicit-nulls.md b/docs/_docs/reference/experimental/explicit-nulls.md index 1925b0b3c925..50339c3fa1e4 100644 --- a/docs/_docs/reference/experimental/explicit-nulls.md +++ b/docs/_docs/reference/experimental/explicit-nulls.md @@ -85,7 +85,7 @@ val c = new C() // c.f == "field is null" ``` -The unsoundness above can be caught by the compiler with the option `-Ysafe-init`. +The unsoundness above can be caught by the compiler with the option `-Wsafe-init`. More details can be found in [safe initialization](../other-new-features/safe-initialization.md). ## Equality @@ -111,17 +111,59 @@ y == x // ok (x: Any) == null // ok ``` -## Java Interoperability +## Java Interoperability and Flexible Types -The Scala compiler can load Java classes in two ways: from source or from bytecode. In either case, -when a Java class is loaded, we "patch" the type of its members to reflect that Java types -remain implicitly nullable. +When dealing with reference types from Java, it's essential to address the implicit nullability of these types. +The most accurate way to represent them in Scala is to use nullable types, though working with lots of nullable types +directly can be annoying. +To streamline interactions with Java libraries, we introduce the concept of flexible types. -Specifically, we patch +The flexible type, denoted by `T?`, functions as an abstract type with unique bounds: `T | Null ... T`, +ensuring that `T | Null <: T? <: T`. +The subtyping rule treats a reference type coming from Java as either nullable or non-nullable depending on the context. +This concept draws inspiration from Kotlin's +[platform types](https://kotlinlang.org/docs/java-interop.html#null-safety-and-platform-types). +By relaxing null checks for such types, Scala aligns its safety guarantees with those of Java. +Notably, flexible types are non-denotable, meaning users cannot explicitly write them in the code; +only the compiler can construct or infer these types. -- the type of fields +Consequently, a value with a flexible type can serve as both a nullable and non-nullable value. +Additionally, both nullable and non-nullable values can be passed as parameters with flexible types during function calls. +Invoking the member functions of a flexible type is allowed, but it can trigger a `NullPointerException` +if the value is indeed `null` during runtime. -- the argument type and return type of methods +```scala +// Considering class J is from Java +class J { + // Translates to def f(s: String?): Unit + public void f(String s) { + } + + // Translates to def g(): String? + public String g() { + return ""; + } +} + +// Use J in Scala +def useJ(j: J) = + val x1: String = "" + val x2: String | Null = null + j.f(x1) // Passing String to String? + j.f(x2) // Passing String | Null to String? + j.f(null) // Passing Null to String? + + // Assign String? to String + val y1: String = j.g() + // Assign String? to String | Null + val y2: String | Null = j.g() + + // Calling member functions on flexible types + j.g().trim().length() +``` + +Upon loading a Java class, whether from source or bytecode, the Scala compiler dynamically adjusts the type of its members to reflect nullability. +This adjustment involves adding flexible types to the reference types of fields, as well as the argument types and return types of methods We illustrate the rules with following examples: @@ -138,7 +180,7 @@ We illustrate the rules with following examples: ```scala class C: - val s: String | Null + val s: String? val x: Int ``` @@ -151,15 +193,7 @@ We illustrate the rules with following examples: ==> ```scala - class C[T] { def foo(): T | Null } - ``` - - Notice this is rule is sometimes too conservative, as witnessed by - - ```scala - class InScala: - val c: C[Bool] = ??? // C as above - val b: Bool = c.foo() // no longer typechecks, since foo now returns Bool | Null + class C[T] { def foo(): T? } ``` - We can reduce the number of redundant nullable types we need to add. Consider @@ -172,21 +206,21 @@ We illustrate the rules with following examples: ==> ```scala - class Box[T] { def get(): T | Null } - class BoxFactory[T] { def makeBox(): Box[T] | Null } + class Box[T] { def get(): T? } + class BoxFactory[T] { def makeBox(): Box[T]? } ``` Suppose we have a `BoxFactory[String]`. Notice that calling `makeBox()` on it returns a - `Box[String] | Null`, not a `Box[String | Null] | Null`. This seems at first + `Box[T]?`, not a `Box[T?]?`. This seems at first glance unsound ("What if the box itself has `null` inside?"), but is sound because calling - `get()` on a `Box[String]` returns a `String | Null`. + `get()` on a `Box[String]` returns a `String?`. Notice that we need to patch _all_ Java-defined classes that transitively appear in the argument or return type of a field or method accessible from the Scala code being compiled. Absent crazy reflection magic, we think that all such Java classes _must_ be visible to the Typer in the first place, so they will be patched. -- We will append `Null` to the type arguments if the generic class is defined in Scala. +- We will patch the type arguments if the generic class is defined in Scala. ```java class BoxFactory { @@ -199,16 +233,16 @@ We illustrate the rules with following examples: ```scala class BoxFactory[T]: - def makeBox(): Box[T | Null] | Null - def makeCrazyBoxes(): java.util.List[Box[java.util.List[T] | Null]] | Null + def makeBox(): Box[T?]? + def makeCrazyBoxes(): java.util.List[Box[java.util.List[T]?]]? ``` - In this case, since `Box` is Scala-defined, we will get `Box[T | Null] | Null`. + In this case, since `Box` is Scala-defined, we will get `Box[T?]?`. This is needed because our nullability function is only applied (modularly) to the Java classes, but not to the Scala ones, so we need a way to tell `Box` that it contains a nullable value. - The `List` is Java-defined, so we don't append `Null` to its type argument. But we + The `List` is Java-defined, so we don't patch its type argument. But we still need to nullify its inside. - We don't nullify _simple_ literal constant (`final`) fields, since they are known to be non-null @@ -234,7 +268,7 @@ We illustrate the rules with following examples: val NAME_GENERATED: String | Null = getNewName() ``` -- We don't append `Null` to a field nor to a return type of a method which is annotated with a +- We don't patch a field nor to a return type of a method which is annotated with a `NotNull` annotation. ```java @@ -250,8 +284,8 @@ We illustrate the rules with following examples: ```scala class C: val name: String - def getNames(prefix: String | Null): java.util.List[String] // we still need to nullify the paramter types - def getBoxedName(): Box[String | Null] // we don't append `Null` to the outmost level, but we still need to nullify inside + def getNames(prefix: String?): java.util.List[String] // we still need to nullify the paramter types + def getBoxedName(): Box[String?] // we don't append `Null` to the outmost level, but we still need to nullify inside ``` The annotation must be from the list below to be recognized as `NotNull` by the compiler. @@ -280,6 +314,9 @@ We illustrate the rules with following examples: "io.reactivex.annotations.NonNull" :: Nil map PreNamedString) ``` +Flexible types can be disabled by using `-Yno-flexible-types` flag. +The ordinary union type `| Null` will be used instead. + ### Override check When we check overriding between Scala classes and Java classes, the rules are relaxed for [`Null`](https://scala-lang.org/api/3.x/scala/Null.html) type with this feature, in order to help users to working with Java libraries. diff --git a/docs/_docs/reference/experimental/main-annotation.md b/docs/_docs/reference/experimental/main-annotation.md index 7cc105be06f9..98a8479132da 100644 --- a/docs/_docs/reference/experimental/main-annotation.md +++ b/docs/_docs/reference/experimental/main-annotation.md @@ -4,6 +4,8 @@ title: "MainAnnotation" nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/main-annotation.html --- +> This feature was removed in https://github.com/scala/scala3/pull/19937. It was subsumed by macro annotations. See SIP-63 https://github.com/scala/improvement-proposals/pull/80. + `MainAnnotation` provides a generic way to define main annotations such as `@main`. When a users annotates a method with an annotation that extends `MainAnnotation` a class with a `main` method will be generated. The main method will contain the code needed to parse the command line arguments and run the application. @@ -93,6 +95,6 @@ import scala.util.CommandLineParser.FromString[T] val result = program() println("result: " + result) println("executed program") - + end myMain ``` diff --git a/docs/_docs/reference/experimental/modularity.md b/docs/_docs/reference/experimental/modularity.md new file mode 100644 index 000000000000..a989b71770af --- /dev/null +++ b/docs/_docs/reference/experimental/modularity.md @@ -0,0 +1,189 @@ +--- +layout: doc-page +title: "Modularity Improvements" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/modularity.html +--- + +# Modularity Improvements + +Martin Odersky, 7.1.2024 + +Scala is a language in the SML tradition, in the sense that it has +abstract and alias types as members of modules (which in Scala take the form of objects and classes). This leads to a simple dependently +typed system, where dependencies in types are on paths instead of full terms. + +So far, some key ingredients were lacking which meant that module composition with functors is harder in Scala than in SML. In particular, one often needs to resort the infamous `Aux` pattern that lifts type members into type parameters so that they can be tracked across class instantiations. This makes modular, dependently typed programs +much harder to write and read, and makes such programming only accessible to experts. + +In this note I propose some small changes to Scala's dependent typing that makes +modular programming much more straightforward. + +The suggested improvements have been implemented and are available +in source version `future` if the additional experimental language import `modularity` is present. For instance, using the following command: + +``` + scala compile -source:future -language:experimental.modularity +``` + +## Tracked Parameters + +Scala is dependently typed for functions, but unfortunately not for classes. +For instance, consider the following definitions: + +```scala + class C: + type T + ... + + def f(x: C): x.T = ... + + val y: C { type T = Int } +``` +Then `f(y)` would have type `Int`, since the compiler will substitute the +concrete parameter reference `y` for the formal parameter `x` in the result +type of `f`, and `y.T = Int` + +However, if we use a class `F` instead of a method `f`, things go wrong. + +```scala + class F(val x: C): + val result: x.T = ... +``` +Now `F(y).result` would not have type `Int` but instead the rather less useful type `?1.T` where `?1` is a so-called skolem constant of type `C` (a skolem represents an unknown value). + +This shortcoming means that classes cannot really be used for advanced +modularity constructs that rely on dependent typing. + +**Proposal:** Introduce a `tracked` modifier that can be added to +a `val` parameter of a class or trait. For every tracked class parameter of a class `C`, add a refinement in the constructor type of `C` that the class member is the same as the parameter. + +**Example:** In the setting above, assume `F` is instead declared like this: +```scala + class F(tracked val x: C): + val result: x.T = ... +``` +Then the constructor `F` would get roughly the following type: +```scala + F(x1: C): F { val x: x1.type } +``` +_Aside:_ More precisely, both parameter and refinement would apply to the same name `x` but the refinement still refers to the parameter. We unfortunately can't express that in source, however, so we chose the new name `x1` for the parameter in the explanation. + +With the new constructor type, the expression `F(y).result` would now have the type `Int`, as hoped for. The reasoning to get there is as follows: + + - The result of the constructor `F(y)` has type `F { val x: y.type }` by + the standard typing for dependent functions. + - The type of `result` inside `F` is `x.T`. + - Hence, the type of `result` as a member of `F { val x: y.type }` is `y.T`, which is equal to `Int`. + +The addition of tracked parameters makes classes suitable as a fundamental modularity construct supporting dependent typing. Here is an example, taken from issue #3920: + +```scala +trait Ordering: + type T + def compare(t1:T, t2: T): Int + +class SetFunctor(tracked val ord: Ordering): + type Set = List[ord.T] + + def empty: Set = Nil + + extension (s: Set) + def add(x: ord.T): Set = x :: remove(x) + def remove(x: ord.T): Set = s.filter(e => ord.compare(x, e) != 0) + def contains(x: ord.T): Boolean = s.exists(e => ord.compare(x, e) == 0) + +object intOrdering extends Ordering: + type T = Int + def compare(t1: T, t2: T): Int = t1 - t2 + +val IntSet = new SetFunctor(intOrdering) + +@main def Test = + import IntSet.* + val set = IntSet.empty.add(6).add(8).add(23) + assert(!set.contains(7)) + assert(set.contains(8)) +``` +This works as it should now. Without the addition of `tracked` to the +parameter of `SetFunctor` typechecking would immediately lose track of +the element type `T` after an `add`, and would therefore fail. + +**Syntax Change** + +``` +ClsParam ::= {Annotation} [{Modifier | ‘tracked’} (‘val’ | ‘var’)] Param +``` + +The (soft) `tracked` modifier is only allowed for `val` parameters of classes. + +**Discussion** + +Since `tracked` is so useful, why not assume it by default? First, `tracked` makes sense only for `val` parameters. If a class parameter is not also a field declared using `val` then there's nothing to refine in the constructor result type. One could think of at least making all `val` parameters tracked by default, but that would be a backwards incompatible change. For instance, the following code would break: + +```scala +case class Foo(x: Int) +var foo = Foo(1) +if someCondition then foo = Foo(2) +``` +If we assume `tracked` for parameter `x` (which is implicitly a `val`), +then `foo` would get inferred type `Foo { val x: 1 }`, so it could not +be reassigned to a value of type `Foo { val x: 2 }` on the next line. + +Another approach might be to assume `tracked` for a `val` parameter `x` +only if the class refers to a type member of `x`. But it turns out that this +scheme is unimplementable since it would quickly lead to cyclic references +when typechecking recursive class graphs. So an explicit `tracked` looks like the best available option. + +## Allow Class Parents to be Refined Types + +Since `tracked` parameters create refinements in constructor types, +it is now possible that a class has a parent that is a refined type. +Previously such types were not permitted, since we were not quite sure how to handle them. But with tracked parameters it becomes pressing to +admit such types. + +**Proposal** Allow refined types as parent types of classes. All refinements that are inherited in this way become synthetic members of the class. + +**Example** + +```scala +class C: + type T + def m(): T + +type R = C: + type T = Int + def m(): 22 + +class D extends R: + def next(): D +``` +This code now compiles. The definition of `D` is expanded as follows: + +```scala +class D extends C: + def next(): D + /*synthetic*/ type T = Int + /*synthetic*/ def m(): 22 +``` +Note how class refinements are moved from the parent constructor of `D` into the body of class `D` itself. + +This change does not entail a syntax change. Syntactically, parent types cannot be refined types themselves. So the following would be illegal: +```scala +class D extends C { type T = Int; def m(): 22 }: // error + def next(): D +``` +If a refined type should be used directly as a parent type of a class, it needs to come in parentheses: +```scala +class D extends (C { type T = Int; def m(): 22 }) // ok + def next(): D +``` + +## A Small Relaxation To Export Rules + +The rules for export forwarders are changed as follows. + +Previously, all export forwarders were declared `final`. Now, only term members are declared `final`. Type aliases are left aside. + +This makes it possible to export the same type member into several traits and then mix these traits in the same class. The test file `tests/pos/typeclass-aggregates.scala` shows why this is essential if we want to combine multiple givens with type members in a new given that aggregates all these givens in an intersection type. + +The change does not lose safety since different type aliases would in any case lead to uninstantiatable classes. \ No newline at end of file diff --git a/docs/_docs/reference/experimental/named-tuples.md b/docs/_docs/reference/experimental/named-tuples.md new file mode 100644 index 000000000000..3867b4d13f15 --- /dev/null +++ b/docs/_docs/reference/experimental/named-tuples.md @@ -0,0 +1,263 @@ +--- +layout: doc-page +title: "Named Tuples" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/named-tuples.html +--- + +The elements of a tuple can now be named. Example: +```scala +type Person = (name: String, age: Int) +val Bob: Person = (name = "Bob", age = 33) + +Bob match + case (name, age) => + println(s"$name is $age years old") + +val persons: List[Person] = ... +val minors = persons.filter: p => + p.age < 18 +``` +Named bindings in tuples are similar to function parameters and arguments. We use `name: Type` for element types and `name = value` for element values. It is illegal to mix named and unnamed elements in a tuple, or to use the same same +name for two different elements. + +Fields of named tuples can be selected by their name, as in the line `p.age < 18` above. + +### Conformance and Convertibility + +The order of names in a named tuple matters. For instance, the type `Person` above and the type `(age: Int, name: String)` would be different, incompatible types. + +Values of named tuple types can also be be defined using regular tuples. For instance: +```scala +val Laura: Person = ("Laura", 25) + +def register(person: Person) = ... +register(person = ("Silvain", 16)) +register(("Silvain", 16)) +``` +This follows since a regular tuple `(T_1, ..., T_n)` is treated as a subtype of a named tuple `(N_1 = T_1, ..., N_n = T_n)` with the same element types. + +In the other direction, one can convert a named tuple to an unnamed tuple with the `toTuple` method. Example: +```scala +val x: (String, Int) = Bob.toTuple // ok +``` +`toTuple` is defined as an extension method in the `NamedTuple` object. +It returns the given tuple unchanged and simply "forgets" the names. + +A `.toTuple` selection is inserted implicitly by the compiler if it encounters a named tuple but the expected type is a regular tuple. So the following works as well: +```scala +val x: (String, Int) = Bob // works, expanded to Bob.toTuple +``` +The difference between subtyping in one direction and automatic `.toTuple` conversions in the other is relatively minor. The main difference is that `.toTuple` conversions don't work inside type constructors. So the following is OK: +```scala + val names = List("Laura", "Silvain") + val ages = List(25, 16) + val persons: List[Person] = names.zip(ages) +``` +But the following would be illegal. +```scala + val persons: List[Person] = List(Bob, Laura) + val pairs: List[(String, Int)] = persons // error +``` +We would need an explicit `_.toTuple` selection to express this: +```scala + val pairs: List[(String, Int)] = persons.map(_.toTuple) +``` +Note that conformance rules for named tuples are analogous to the rules for named parameters. One can assign parameters by position to a named parameter list. +```scala + def f(param: Int) = ... + f(param = 1) // OK + f(2) // Also OK +``` +But one cannot use a name to pass an argument to an unnamed parameter: +```scala + val f: Int => T + f(2) // OK + f(param = 2) // Not OK +``` +The rules for tuples are analogous. Unnamed tuples conform to named tuple types, but the opposite requires a conversion. + +### Pattern Matching + +When pattern matching on a named tuple, the pattern may be named or unnamed. +If the pattern is named it needs to mention only a subset of the tuple names, and these names can come in any order. So the following are all OK: +```scala +Bob match + case (name, age) => ... + +Bob match + case (name = x, age = y) => ... + +Bob match + case (age = x) => ... + +Bob match + case (age = x, name = y) => ... +``` + +### Expansion + +Named tuples are in essence just a convenient syntax for regular tuples. In the internal representation, a named tuple type is represented at compile time as a pair of two tuples. One tuple contains the names as literal constant string types, the other contains the element types. The runtime representation of a named tuples consists of just the element values, whereas the names are forgotten. This is achieved by declaring `NamedTuple` +in package `scala` as an opaque type as follows: +```scala + opaque type NamedTuple[N <: Tuple, +V <: Tuple] >: V = V +``` +For instance, the `Person` type would be represented as the type +```scala +NamedTuple[("name", "age"), (String, Int)] +``` +`NamedTuple` is an opaque type alias of its second, value parameter. The first parameter is a string constant type which determines the name of the element. Since the type is just an alias of its value part, names are erased at runtime, and named tuples and regular tuples have the same representation. + +A `NamedTuple[N, V]` type is publicly known to be a supertype (but not a subtype) of its value paramater `V`, which means that regular tuples can be assigned to named tuples but not _vice versa_. + +The `NamedTuple` object contains a number of extension methods for named tuples hat mirror the same functions in `Tuple`. Examples are +`apply`, `head`, `tail`, `take`, `drop`, `++`, `map`, or `zip`. +Similar to `Tuple`, the `NamedTuple` object also contains types such as `Elem`, `Head`, `Concat` +that describe the results of these extension methods. + +The translation of named tuples to instances of `NamedTuple` is fixed by the specification and therefore known to the programmer. This means that: + + - All tuple operations also work with named tuples "out of the box". + - Macro libraries can rely on this expansion. + +### The NamedTuple.From Type + +The `NamedTuple` object contains a type definition +```scala + type From[T] <: AnyNamedTuple +``` +`From` is treated specially by the compiler. When `NamedTuple.From` is applied to +an argument type that is an instance of a case class, the type expands to the named +tuple consisting of all the fields of that case class. +Here, _fields_ means: elements of the first parameter section. For instance, assuming +```scala +case class City(zip: Int, name: String, population: Int) +``` +then `NamedTuple.From[City]` is the named tuple +```scala +(zip: Int, name: String, population: Int) +``` +The same works for enum cases expanding to case classes, abstract types with case classes as upper bound, alias types expanding to case classes +and singleton types with case classes as underlying type. + +`From` is also defined on named tuples. If `NT` is a named tuple type, then `From[NT] = NT`. + + +### Restrictions + +The following restrictions apply to named tuple elements: + + 1. Either all elements of a tuple are named or none are named. It is illegal to mix named and unnamed elements in a tuple. For instance, the following is in error: + ```scala + val illFormed1 = ("Bob", age = 33) // error + ``` + 2. Each element name in a named tuple must be unique. For instance, the following is in error: + ```scala + val illFormed2 = (name = "", age = 0, name = true) // error + ``` + 3. Named tuples can be matched with either named or regular patterns. But regular tuples and other selector types can only be matched with regular tuple patterns. For instance, the following is in error: + ```scala + (tuple: Tuple) match + case (age = x) => // error + ``` + 4. Regular selector names `_1`, `_2`, ... are not allowed as names in named tuples. + +### Syntax + +The syntax of Scala is extended as follows to support named tuples: +``` +SimpleType ::= ... + | ‘(’ NameAndType {‘,’ NameAndType} ‘)’ +NameAndType ::= id ':' Type + +SimpleExpr ::= ... + | '(' NamedExprInParens {‘,’ NamedExprInParens} ')' +NamedExprInParens ::= id '=' ExprInParens + +Patterns ::= Pattern {‘,’ Pattern} + | NamedPattern {‘,’ NamedPattern} +NamedPattern ::= id '=' Pattern +``` + +### Named Pattern Matching + +We allow named patterns not just for named tuples but also for case classes. +For instance: +```scala +city match + case c @ City(name = "London") => println(p.population) + case City(name = n, zip = 1026, population = pop) => println(pop) +``` + +Named constructor patterns are analogous to named tuple patterns. In both cases + + - either all fields are named or none is, + - every name must match the name some field of the selector, + - names can come in any order, + - not all fields of the selector need to be matched. + +This revives SIP 43, with a much simpler desugaring than originally proposed. +Named patterns are compatible with extensible pattern matching simply because +`unapply` results can be named tuples. + +### Source Incompatibilities + +There are some source incompatibilities involving named tuples of length one. +First, what was previously classified as an assignment could now be interpreted as a named tuple. Example: +```scala +var age: Int +(age = 1) +``` +This was an assignment in parentheses before, and is a named tuple of arity one now. It is however not idiomatic Scala code, since assignments are not usually enclosed in parentheses. + +Second, what was a named argument to an infix operator can now be interpreted as a named tuple. +```scala +class C: + infix def f(age: Int) +val c: C +``` +then +```scala +c f (age = 1) +``` +will now construct a tuple as second operand instead of passing a named parameter. + +### Computed Field Names + +The `Selectable` trait now has a `Fields` type member that can be instantiated +to a named tuple. + +```scala +trait Selectable: + type Fields <: NamedTuple.AnyNamedTuple +``` + +If `Fields` is instantiated in a subclass of `Selectable` to some named tuple type, +then the available fields and their types will be defined by that type. Assume `n: T` +is an element of the `Fields` type in some class `C` that implements `Selectable`, +that `c: C`, and that `n` is not otherwise legal as a name of a selection on `c`. +Then `c.n` is a legal selection, which expands to `c.selectDynamic("n").asInstanceOf[T]`. + +It is the task of the implementation of `selectDynamic` in `C` to ensure that its +computed result conforms to the predicted type `T` + +As an example, assume we have a query type `Q[T]` defined as follows: + +```scala +trait Q[T] extends Selectable: + type Fields = NamedTuple.Map[NamedTuple.From[T], Q] + def selectDynamic(fieldName: String) = ... +``` + +Assume in the user domain: +```scala +case class City(zipCode: Int, name: String, population: Int) +val city: Q[City] +``` +Then +```scala +city.zipCode +``` +has type `Q[Int]` and it expands to +```scala +city.selectDynamic("zipCode").asInstanceOf[Q[Int]] +``` diff --git a/docs/_docs/reference/experimental/typeclasses.md b/docs/_docs/reference/experimental/typeclasses.md new file mode 100644 index 000000000000..a78e764bbe7d --- /dev/null +++ b/docs/_docs/reference/experimental/typeclasses.md @@ -0,0 +1,790 @@ +--- +layout: doc-page +title: "Better Support for Type Classes" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/typeclasses.html +--- + +Martin Odersky, 8.1.2024, edited 5.4.2024 + +A type class in Scala is a pattern where we define + + - a trait with one type parameter (the _type class_) + - given instances at specific instantiations of that trait, + - using clauses or context bounds abstracting over that trait. + +Type classes as a pattern work overall OK, but if we compare them to native implementations in Haskell, or protocols in Swift, or traits in Rust, then there are some idiosyncrasies and rough corners which in the end make them +a bit cumbersome and limiting for standard generic programming patterns. Much has improved since Scala 2's implicits, but there is still some gap to bridge to get to parity with these languages. + +This note shows that with some fairly small and reasonable tweaks to Scala's syntax and typing rules we can obtain a much better scheme for working with type classes, or do generic programming in general. + +The bulk of the suggested improvements has been implemented and is available +under source version `future` if the additional experimental language import `modularity` is present. For instance, using the following command: + +``` + scala compile -source:future -language:experimental.modularity +``` + +It is intended to turn features described here into proposals under the Scala improvement process. A first installment is SIP 64, which covers some syntactic changes, names for context bounds, multiple context bounds and deferred givens. The order of exposition described in this note is different from the planned proposals of SIPs. This doc is not a guide on how to sequence details, but instead wants to present a vision of what is possible. For instance, we start here with a feature (Self types and `is` syntax) that has turned out to be controversial and that will probably be proposed only late in the sequence of SIPs. + +## Generalizing Context Bounds + + The only place in Scala's syntax where the type class pattern is relevant is + in context bounds. A context bound such as + +```scala + def min[A: Ordering](x: List[A]): A +``` +requires that `Ordering` is a trait or class with a single type parameter (which makes it a type class) and expands to a `using` clause that instantiates that parameter. Here is the expansion of `min`: +```scala + def min[A](x: List[A])(using Ordering[A]): A +``` + +**Proposal** Allow type classes to define an abstract type member named `Self` instead of a type parameter. + +**Example** + +```scala + trait Ord: + type Self + + trait SemiGroup: + type Self + extension (x: Self) def combine(y: Self): Self + + trait Monoid extends SemiGroup: + def unit: Self + object Monoid: + def unit[M](using m: Monoid { type Self = M}): M + + trait Functor: + type Self[A] + extension [A](x: Self[A]) def map[B](f: A => B): Self[B] + + trait Monad extends Functor: + def pure[A](x: A): Self[A] + extension [A](x: Self[A]) + def flatMap[B](f: A => Self[B]): Self[B] + def map[B](f: A => B) = x.flatMap(f `andThen` pure) + + def reduce[A: Monoid](xs: List[A]): A = + xs.foldLeft(Monoid.unit)(_ `combine` _) + + trait ParserCombinator: + type Self + type Input + type Result + extension (self: Self) + def parse(input: Input): Option[Result] = ... + + def combine[A: ParserCombinator, B: ParserCombinator { type Input = A.Input }] = ... +``` + +**Advantages** + + - Avoids repetitive type parameters, concentrates on what's essential, namely the type class hierarchy. + - Gives a clear indication of traits intended as type classes. A trait is a type class + if it has type `Self` as a member + - Allows to create aggregate type classes that combine givens via intersection types. + - Allows to use refinements in context bounds (the `combine` example above would be very awkward to express using the old way of context bounds expanding to type constructors). + +`Self`-based context bounds are a better fit for a dependently typed language like Scala than parameter-based ones. The main reason is that we are dealing with proper types, not type constructors. Proper types can be parameterized, intersected, or refined. This makes `Self`-based designs inherently more compositional than parameterized ones. + + + +**Details** + +When a trait has both a type parameter and an abstract `Self` type, we + resolve a context bound to the `Self` type. This allows type classes + that carry type parameters, as in + +```scala +trait Sequential[E]: + type Self +``` + +Here, +```scala +[S: Sequential[Int]] +``` +should resolve to: +```scala +[S](using Sequential[Int] { type Self = S }) +``` +and not to: +```scala +[S](using Sequential[S]) +``` + +**Discussion** + + Why not use `This` for the self type? The name `This` suggests that it is the type of `this`. But this is not true for type class traits. `Self` is the name of the type implementing a distinguished _member type_ of the trait in a `given` definition. `Self` is an established term in both Rust and Swift with the meaning used here. + + One possible objection to the `Self` based design is that it does not cover "multi-parameter" type classes. But neither do context bounds! "Multi-parameter" type classes in Scala are simply givens that can be synthesized with the standard mechanisms. Type classes in the strict sense abstract only over a single type, namely the implementation type of a trait. + + +## Auxiliary Type Alias `is` + +We introduce a standard type alias `is` in the Scala package or in `Predef`, defined like this: + +```scala + infix type is[A <: AnyKind, B <: {type Self <: AnyKind}] = B { type Self = A } +``` + +This makes writing instance definitions and using clauses quite pleasant. Examples: + +```scala + given Int is Ord ... + given Int is Monoid ... + + type Reader = [X] =>> Env => X + given Reader is Monad ... + + object Monoid: + def unit[M](using m: M is Monoid): M +``` + +(more examples will follow below) + + + +## Naming Context Bounds + +Context bounds are a convenient and legible abbreviation. A problem so far is that they are always anonymous, +one cannot name the using parameter to which a context bound expands. + +For instance, consider a `reduce` method over `Monoid`s defined like this: + +```scala +def reduce[A : Monoid](xs: List[A]): A = ??? +``` +Since we don't have a name for the `Monoid` instance of `A`, we need to resort to `summon` in the body of `reduce`: +```scala +def reduce[A : Monoid](xs: List[A]): A = + xs.foldLeft(summon Monoid[A])(_ `combine` _) +``` +That's generally considered too painful to write and read, hence people usually adopt one of two alternatives. Either, eschew context bounds and switch to using clauses: +```scala +def reduce[A](xs: List[A])(using m: Monoid[A]): A = + xs.foldLeft(m)(_ `combine` _) +``` +Or, plan ahead and define a "trampoline" method in `Monoid`'s companion object: +```scala + trait Monoid[A] extends SemiGroup[A]: + def unit: A + object Monoid: + def unit[A](using m: Monoid[A]): A = m.unit + ... + def reduce[A : Monoid](xs: List[A]): A = + xs.foldLeft(Monoid.unit)(_ `combine` _) +``` +This is all accidental complexity which can be avoided by the following proposal. + +**Proposal:** Allow to name a context bound, like this: +```scala + def reduce[A : Monoid as m](xs: List[A]): A = + xs.foldLeft(m.unit)(_ `combine` _) +``` + +We use `as x` after the type to bind the instance to `x`. This is analogous to import renaming, which also introduces a new name for something that comes before. + +**Benefits:** The new syntax is simple and clear. +It avoids the awkward choice between concise context bounds that can't be named and verbose using clauses that can. + +### New Syntax for Aggregate Context Bounds + +Aggregate context bounds like `A : X : Y` are not obvious to read, and it becomes worse when we add names, e.g. `A : X as x : Y as y`. + +**Proposal:** Allow to combine several context bounds inside `{...}`, analogous +to import clauses. Example: + +```scala + trait: + def showMax[X : {Ordering, Show}](x: X, y: X): String + class B extends A: + def showMax[X : {Ordering as ordering, Show as show}](x: X, y: X): String = + show.asString(ordering.max(x, y)) +``` + +The old syntax with multiple `:` should be phased out over time. + +**Benefits:** The new syntax is much clearer than the old one, in particular for newcomers that don't know context bounds well. + +### Better Default Names for Context Bounds + +So far, an unnamed context bound for a type parameter gets a synthesized fresh name. It would be much more useful if it got the name of the constrained type parameter instead, translated to be a term name. This means our `reduce` method over monoids would not even need an `as` binding. We could simply formulate it as follows: +``` + def reduce[A : Monoid](xs: List[A]) = + xs.foldLeft(A.unit)(_ `combine` _) +``` + +In Scala we are already familiar with using one name for two related things where one version names a type and the other an associated value. For instance, we use that convention for classes and companion objects. In retrospect, the idea of generalizing this to also cover type parameters is obvious. It is surprising that it was not brought up before. + +**Proposed Rules** + + 1. The generated evidence parameter for a context bound `A : C as a` has name `a` + 2. The generated evidence for a context bound `A : C` without an `as` binding has name `A` (seen as a term name). So, `A : C` is equivalent to `A : C as A`. + 3. If there are multiple context bounds for a type parameter, as in `A : {C_1, ..., C_n}`, the generated evidence parameter for every context bound `C_i` has a fresh synthesized name, unless the context bound carries an `as` clause, in which case rule (1) applies. + +TODO: Present context bound proxy concept. + +The default naming convention reduces the need for named context bounds. But named context bounds are still essential, for at least two reasons: + + - They are needed to give names to multiple context bounds. + - They give an explanation what a single unnamed context bound expands to. + + +### Expansion of Context Bounds + +Context bounds are currently translated to implicit parameters in the last parameter list of a method or class. This is a problem if a context bound is mentioned in one of the preceding parameter types. For example, consider a type class of parsers with associated type members `Input` and `Result` describing the input type on which the parsers operate and the type of results they produce: +```scala +trait Parser[P]: + type Input + type Result +``` +Here is a method `run` that runs a parser on an input of the required type: + +```scala +def run[P : Parser](in: P.Input): P.Result +``` +Or, making clearer what happens by using an explicit name for the context bound: +```scala +def run[P : Parser as p](in: p.Input): p.Result +``` +With the current translation this does not work since it would be expanded to: +```scala + def run[P](x: p.Input)(using p: Parser[P]): p.Result +``` +Note that the `p` in `p.Input` refers to the `p` introduced in the using clause, which comes later. So this is ill-formed. + +This problem would be fixed by changing the translation of context bounds so that they expand to using clauses immediately after the type parameter. But such a change is infeasible, for two reasons: + + 1. It would be a binary-incompatible change. + 2. Putting using clauses earlier can impair type inference. A type in + a using clause can be constrained by term arguments coming before that + clause. Moving the using clause first would miss those constraints, which could cause ambiguities in implicit search. + +But there is an alternative which is feasible: + +**Proposal:** Map the context bounds of a method or class as follows: + + 1. If one of the bounds is referred to by its term name in a subsequent parameter clause, the context bounds are mapped to a using clause immediately preceding the first such parameter clause. + 2. Otherwise, if the last parameter clause is a using (or implicit) clause, merge all parameters arising from context bounds in front of that clause, creating a single using clause. + 3. Otherwise, let the parameters arising from context bounds form a new using clause at the end. + +Rules (2) and (3) are the status quo, and match Scala 2's rules. Rule (1) is new but since context bounds so far could not be referred to, it does not apply to legacy code. Therefore, binary compatibility is maintained. + +**Discussion** More refined rules could be envisaged where context bounds are spread over different using clauses so that each comes as late as possible. But it would make matters more complicated and the gain in expressiveness is not clear to me. + +Named (either explicitly, or by default) context bounds in givens that produce classes are mapped to tracked val's of these classes (see #18958). This allows +references to these parameters to be precise, so that information about dependent type members is preserved. + + +## Context Bounds for Type Members + +It's not very orthogonal to allow subtype bounds for both type parameters and abstract type members, but context bounds only for type parameters. What's more, we don't even have the fallback of an explicit using clause for type members. The only alternative is to also introduce a set of abstract givens that get implemented in each subclass. This is extremely heavyweight and opaque to newcomers. + +**Proposal**: Allow context bounds for type members. Example: + +```scala + class Collection: + type Element : Ord +``` + +The question is how these bounds are expanded. Context bounds on type parameters +are expanded into using clauses. But for type members this does not work, since we cannot refer to a member type of a class in a parameter type of that class. What we are after is an equivalent of using parameter clauses but represented as class members. + +**Proposal:** Introduce a new way to implement a given definition in a trait like this: +```scala +given T = deferred +``` +`deferred` is a new method in the `scala.compiletime` package, which can appear only as the right hand side of a given defined in a trait. Any class implementing that trait will provide an implementation of this given. If a definition is not provided explicitly, it will be synthesized by searching for a given of type `T` in the scope of the inheriting class. Specifically, the scope in which this given will be searched is the environment of that class augmented by its parameters but not containing its members (since that would lead to recursive resolutions). If an implementation _is_ provided explicitly, it counts as an override of a concrete definition and needs an `override` modifier. + +Deferred givens allow a clean implementation of context bounds in traits, +as in the following example: +```scala +trait Sorted: + type Element : Ord + +class SortedSet[A : Ord] extends Sorted: + type Element = A +``` +The compiler expands this to the following implementation: +```scala +trait Sorted: + type Element + given Ord[Element] = compiletime.deferred + +class SortedSet[A](using A: Ord[A]) extends Sorted: + type Element = A + override given Ord[Element] = A // i.e. the A defined by the using clause +``` + +The using clause in class `SortedSet` provides an implementation for the deferred given in trait `Sorted`. + +**Benefits:** + + - Better orthogonality, type parameters and abstract type members now accept the same kinds of bounds. + - Better ergonomics, since deferred givens get naturally implemented in inheriting classes, no need for boilerplate to fill in definitions of abstract givens. + +**Alternative:** It was suggested that we use a modifier for a deferred given instead of a `= deferred`. Something like `deferred given C[T]`. But a modifier does not suggest the concept that a deferred given will be implemented automatically in subclasses unless an explicit definition is written. In a sense, we can see `= deferred` as the invocation of a magic macro that is provided by the compiler. So from a user's point of view a given with `deferred` right hand side is not abstract. +It is a concrete definition where the compiler will provide the correct implementation. + +## New Given Syntax + +A good language syntax is like a Bach fugue: A small set of motifs is combined in a multitude of harmonic ways. Dissonances and irregularities should be avoided. + +When designing Scala 3, I believe that, by and large, we achieved that goal, except in one area, which is the syntax of givens. There _are_ some glaring dissonances, as seen in this code for defining an ordering on lists: +```scala +given [A](using Ord[A]): Ord[List[A]] with + def compare(x: List[A], y: List[A]) = ... +``` +The `:` feels utterly foreign in this position. It's definitely not a type ascription, so what is its role? Just as bad is the trailing `with`. Everywhere else we use braces or trailing `:` to start a scope of nested definitions, so the need of `with` sticks out like a sore thumb. + +We arrived at that syntax not because of a flight of fancy but because even after trying for about a year to find other solutions it seemed like the least bad alternative. The awkwardness of the given syntax arose because we insisted that givens could be named or anonymous, with the default on anonymous, that we would not use underscore for an anonymous given, and that the name, if present, had to come first, and have the form `name [parameters] :`. In retrospect, that last requirement showed a lack of creativity on our part. + +Sometimes unconventional syntax grows on you and becomes natural after a while. But here it was unfortunately the opposite. The longer I used given definitions in this style the more awkward they felt, in particular since the rest of the language seemed so much better put together by comparison. And I believe many others agree with me on this. Since the current syntax is unnatural and esoteric, this means it's difficult to discover and very foreign even after that. This makes it much harder to learn and apply givens than it need be. + +Things become much simpler if we introduce the optional name instead with an `as name` clause at the end, just like we did for context bounds. We can then use a more intuitive syntax for givens like this: +```scala +given String is Ord: + def compare(x: String, y: String) = ... + +given [A : Ord] => List[A] is Ord: + def compare(x: List[A], y: List[A]) = ... + +given Int is Monoid: + extension (x: Int) def combine(y: Int) = x + y + def unit = 0 +``` +Here, the second given can be read as if `A` is an `Ord` then `List[A]` is also an`Ord`. Or: for all `A: Ord`, `List[A]` is `Ord`. The arrow can be seen as an implication, note also the analogy to pattern matching syntax. + +If explicit names are desired, we add them with `as` clauses: +```scala +given String is Ord as intOrd: + def compare(x: String, y: String) = ... + +given [A : Ord] => List[A] is Ord as listOrd: + def compare(x: List[A], y: List[A]) = ... + +given Int is Monoid as intMonoid: + extension (x: Int) def combine(y: Int) = x + y + def unit = 0 +``` + +The underlying principles are: + + - A `given` clause consists of the following elements: + + - An optional _precondition_, which introduces type parameters and/or using clauses and which ends in `=>`, + - the implemented _type_, + - an optional name binding using `as`, + - an implementation which consists of either an `=` and an expression, + or a template body. + + - Since there is no longer a middle `:` separating name and parameters from the implemented type, we can use a `:` to start the class body without looking unnatural, as is done everywhere else. That eliminates the special case where `with` was used before. + +This will be a fairly significant change to the given syntax. I believe there's still a possibility to do this. Not so much code has migrated to new style givens yet, and code that was written can be changed fairly easily. Specifically, there are about a 900K definitions of `implicit def`s +in Scala code on Github and about 10K definitions of `given ... with`. So about 1% of all code uses the Scala 3 syntax, which would have to be changed again. + +Changing something introduced just recently in Scala 3 is not fun, +but I believe these adjustments are preferable to let bad syntax +sit there and fester. The cost of changing should be amortized by improved developer experience over time, and better syntax would also help in migrating Scala 2 style implicits to Scala 3. But we should do it quickly before a lot more code +starts migrating. + +Migration to the new syntax is straightforward, and can be supported by automatic rewrites. For a transition period we can support both the old and the new syntax. It would be a good idea to backport the new given syntax to the LTS version of Scala so that code written in this version can already use it. The current LTS would then support old and new-style givens indefinitely, whereas new Scala 3.x versions would phase out the old syntax over time. + + +### Abolish Abstract Givens + +Another simplification is possible. So far we have special syntax for abstract givens: +```scala +given x: T +``` +The problem is that this syntax clashes with the quite common case where we want to establish a given without any nested definitions. For instance +consider a given that constructs a type tag: +```scala +class Tag[T] +``` +Then this works: +```scala +given Tag[String]() +given Tag[String] with {} +``` +But the following more natural syntax fails: +```scala +given Tag[String] +``` +The last line gives a rather cryptic error: +``` +1 |given Tag[String] + | ^ + | anonymous given cannot be abstract +``` +The problem is that the compiler thinks that the last given is intended to be abstract, and complains since abstract givens need to be named. This is another annoying dissonance. Nowhere else in Scala's syntax does adding a +`()` argument to a class cause a drastic change in meaning. And it's also a violation of the principle that it should be possible to define all givens without providing names for them. + +Fortunately, abstract givens are no longer necessary since they are superseded by the new `deferred` scheme. So we can deprecate that syntax over time. Abstract givens are a highly specialized mechanism with a so far non-obvious syntax. We have seen that this syntax clashes with reasonable expectations of Scala programmers. My estimate is that maybe a dozen people world-wide have used abstract givens in anger so far. + +**Proposal** In the future, let the `= deferred` mechanism be the only way to deliver the functionality of abstract givens. + +This is less of a disruption than it might appear at first: + + - `given T` was illegal before since abstract givens could not be anonymous. + It now means a concrete given of class `T` with no member definitions. + - `given x: T` is legacy syntax for an abstract given. + - `given T as x = deferred` is the analogous new syntax, which is more powerful since + it allows for automatic instantiation. + - `given T = deferred` is the anonymous version in the new syntax, which was not expressible before. + +**Benefits:** + + - Simplification of the language since a feature is dropped + - Eliminate non-obvious and misleading syntax. + + +### Bonus: Fixing Singleton + +We know the current treatment of `Singleton` as a type bound is broken since +`x.type | y.type <: Singleton` holds by the subtyping rules for union types, even though `x.type | y.type` is clearly not a singleton. + +A better approach is to treat `Singleton` as a type class that is interpreted specially by the compiler. + +We can do this in a backwards-compatible way by defining `Singleton` like this: + +```scala +trait Singleton: + type Self +``` + +Then, instead of using an unsound upper bound we can use a context bound: + +```scala +def f[X: Singleton](x: X) = ... +``` + +The context bound is treated specially by the compiler so that no using clause is generated at runtime (this is straightforward, using the erased definitions mechanism). + +### Bonus: Precise Typing + +This approach also presents a solution to the problem how to express precise type variables. We can introduce another special type class `Precise` and use it like this: + +```scala +def f[X: Precise](x: X) = ... +``` +Like a `Singleton` bound, a `Precise` bound disables automatic widening of singleton types or union types in inferred instances of type variable `X`. But there is no requirement that the type argument _must_ be a singleton. + + +## Summary of Syntax Changes + +Here is the complete context-free syntax for all proposed features. +Overall the syntax for givens becomes a lot simpler than what it was before. + +``` +TmplDef ::= 'given' GivenDef +GivenDef ::= [GivenConditional '=>'] GivenSig +GivenConditional ::= [DefTypeParamClause | UsingParamClause] {UsingParamClause} +GivenSig ::= GivenType ['as' id] ([‘=’ Expr] | TemplateBody) + | ConstrApps ['as' id] TemplateBody +GivenType ::= AnnotType {id [nl] AnnotType} + +TypeDef ::= id [TypeParamClause] TypeAndCtxBounds +TypeParamBounds ::= TypeAndCtxBounds +TypeAndCtxBounds ::= TypeBounds [‘:’ ContextBounds] +ContextBounds ::= ContextBound | '{' ContextBound {',' ContextBound} '}' +ContextBound ::= Type ['as' id] +``` + + + +## Examples + + +### Example 1 + +Here are some standard type classes, which were mostly already introduced at the start of this note, now with associated instance givens and some test code: + +```scala + // Type classes + + trait Ord: + type Self + extension (x: Self) + def compareTo(y: Self): Int + def < (y: Self): Boolean = compareTo(y) < 0 + def > (y: Self): Boolean = compareTo(y) > 0 + def <= (y: Self): Boolean = compareTo(y) <= 0 + def >= (y: Self): Boolean = compareTo(y) >= 0 + def max(y: Self): Self = if x < y then y else x + + trait Show: + type Self + extension (x: Self) def show: String + + trait SemiGroup: + type Self + extension (x: Self) def combine(y: Self): Self + + trait Monoid extends SemiGroup: + def unit: Self + + trait Functor: + type Self[A] // Here, Self is a type constructor with parameter A + extension [A](x: Self[A]) def map[B](f: A => B): Self[B] + + trait Monad extends Functor: + def pure[A](x: A): Self[A] + extension [A](x: Self[A]) + def flatMap[B](f: A => Self[B]): Self[B] + def map[B](f: A => B) = x.flatMap(f `andThen` pure) + + // Instances + + given Int is Ord: + extension (x: Int) + def compareTo(y: Int) = + if x < y then -1 + else if x > y then +1 + else 0 + + given [T: Ord] => List[T] is Ord: + extension (xs: List[T]) def compareTo(ys: List[T]): Int = + (xs, ys) match + case (Nil, Nil) => 0 + case (Nil, _) => -1 + case (_, Nil) => +1 + case (x :: xs1, y :: ys1) => + val fst = x.compareTo(y) + if (fst != 0) fst else xs1.compareTo(ys1) + + given List is Monad: + extension [A](xs: List[A]) + def flatMap[B](f: A => List[B]): List[B] = + xs.flatMap(f) + def pure[A](x: A): List[A] = + List(x) + + type Reader[Ctx] = [X] =>> Ctx => X + + given [Ctx] => Reader[Ctx] is Monad: + extension [A](r: Ctx => A) + def flatMap[B](f: A => Ctx => B): Ctx => B = + ctx => f(r(ctx))(ctx) + def pure[A](x: A): Ctx => A = + ctx => x + + // Usages + + extension (xs: Seq[String]) + def longestStrings: Seq[String] = + val maxLength = xs.map(_.length).max + xs.filter(_.length == maxLength) + + extension [M[_]: Monad, A](xss: M[M[A]]) + def flatten: M[A] = + xss.flatMap(identity) + + def maximum[T: Ord](xs: List[T]): T = + xs.reduce(_ `max` _) + + given [T: Ord] => T is Ord as descending: + extension (x: T) def compareTo(y: T) = T.compareTo(y)(x) + + def minimum[T: Ord](xs: List[T]) = + maximum(xs)(using descending) +``` +The `Reader` type is a bit hairy. It is a type class (written in the parameterized syntax) where we fix a context `Ctx` and then let `Reader` be the polymorphic function type over `X` that takes a context `Ctx` and returns an `X`. Type classes like this are commonly used in monadic effect systems. + + +### Example 2 + +The following contributed code by @LPTK (issue #10929) did _not_ work at first since +references were not tracked correctly. The version below adds explicit tracked parameters which makes the code compile. +```scala +infix abstract class TupleOf[T, +A]: + type Mapped[+A] <: Tuple + def map[B](x: T)(f: A => B): Mapped[B] + +object TupleOf: + + given TupleOf[EmptyTuple, Nothing] with + type Mapped[+A] = EmptyTuple + def map[B](x: EmptyTuple)(f: Nothing => B): Mapped[B] = x + + given [A, Rest <: Tuple](using tracked val tup: Rest TupleOf A): TupleOf[A *: Rest, A] with + type Mapped[+A] = A *: tup.Mapped[A] + def map[B](x: A *: Rest)(f: A => B): Mapped[B] = + f(x.head) *: tup.map(x.tail)(f) +``` + +Note the quite convoluted syntax, which makes the code hard to understand. Here is the same example in the new type class syntax, which also compiles correctly: +```scala +//> using options -language:experimental.modularity -source future + +trait TupleOf[+A]: + type Self + type Mapped[+A] <: Tuple + def map[B](x: Self)(f: A => B): Mapped[B] + +object TupleOf: + + given EmptyTuple is TupleOf[Nothing]: + type Mapped[+A] = EmptyTuple + def map[B](x: EmptyTuple)(f: Nothing => B): Mapped[B] = x + + given [A, Rest <: Tuple : TupleOf[A]] => A *: Rest is TupleOf[A]: + type Mapped[+A] = A *: Rest.Mapped[A] + def map[B](x: A *: Rest)(f: A => B): Mapped[B] = + f(x.head) *: Rest.map(x.tail)(f) +``` +Note in particular the following points: + + - In the original code, it was not clear that `TupleOf` is a type class, + since it contained two type parameters, one of which played the role + of the instance type `Self`. The new version is much clearer: `TupleOf` is + a type class over `Self` with one additional parameter, the common type of all tuple elements. + - The two given definitions are obfuscated in the old code. Their version + in the new code makes it clear what kind of instances they define: + + - `EmptyTuple` is a tuple of `Nothing`. + - if `Rest` is a tuple of `A`, then `A *: Rest` is also a tuple of `A`. + + - There's no need to introduce names for parameter instances in using clauses; the default naming scheme for context bound evidences works fine, and is more concise. + - There's no need to manually declare implicit parameters as `tracked`, + context bounds provide that automatically. + - Everything in the new code feels like idiomatic Scala 3, whereas the original code exhibits the awkward corner case that requires a `with` in + front of given definitions. + +### Example 3 + +Dimi Racordon tried to [define parser combinators](https://users.scala-lang.org/t/create-an-instance-of-a-type-class-with-methods-depending-on-type-members/9613) in Scala that use dependent type members for inputs and results. It was intended as a basic example of type class constraints, but it did not work in current Scala. + +Here is the problem solved with the new syntax. Note how much clearer that syntax is compared to Dimi's original version, which did not work out in the end. + +```scala +/** A parser combinator */ +trait Combinator: + type Self + + type Input + type Result + + extension (self: Self) + /** Parses and returns an element from input `in` */ + def parse(in: Input): Option[Result] +end Combinator + +case class Apply[I, R](action: I => Option[R]) +case class Combine[A, B](a: A, b: B) + +given [I, R] => Apply[I, R] is Combinator: + type Input = I + type Result = R + extension (self: Apply[I, R]) + def parse(in: I): Option[R] = self.action(in) + +given [A: Combinator, B: Combinator { type Input = A.Input }] + => Combine[A, B] is Combinator: + type Input = A.Input + type Result = (A.Result, B.Result) + extension (self: Combine[A, B]) + def parse(in: Input): Option[Result] = + for + x <- self.a.parse(in) + y <- self.b.parse(in) + yield (x, y) +``` +The example is now as expressed as straightforwardly as it should be: + + - `Combinator` is a type class with two associated types, `Input` and `Result`, and a `parse` method. + - `Apply` and `Combine` are two data constructors representing parser combinators. They are declared to be `Combinators` in the two subsequent `given` declarations. + - `Apply`'s parse method applies the `action` function to the input. + - `Combine[A, B]` is a parser combinator provided `A` and `B` are parser combinators + that process the same type of `Input`, which is also the input type of + `Combine[A, B]`. Its `Result` type is a pair of the `Result` types of `A` and `B`. + Results are produced by a simple for-expression. + +Compared to the original example, which required serious contortions, this is now all completely straightforward. + +_Note 1:_ One could also explore improvements, for instance making this purely functional. But that's not the point of the demonstration here, where I wanted +to take the original example and show how it can be made to work with the new constructs, and be expressed more clearly as well. + +_Note 2:_ One could improve the notation even further by adding equality constraints in the style of Swift, which in turn resemble the _sharing constraints_ of SML. A hypothetical syntax applied to the second given would be: +```scala +given [A: Combinator, B: Combinator with A.Input == B.Input] + => Combine[A, B] is Combinator: +``` +This variant is aesthetically pleasing since it makes the equality constraint symmetric. The original version had to use an asymmetric refinement on the second type parameter bound instead. For now, such constraints are neither implemented nor proposed. This is left as a possibility for future work. Note also the analogy with +the work of @mbovel and @Sporarum on refinement types, where similar `with` clauses can appear for term parameters. If that work goes ahead, we could possibly revisit the issue of `with` clauses also for type parameters. + +### Example 4 + +Dimi Racordon tried to [port some core elements](https://github.com/kyouko-taiga/scala-hylolib) of the type class based [Hylo standard library to Scala](https://github.com/hylo-lang/hylo/tree/main/StandardLibrary/Sources). It worked to some degree, but there were some things that could not be expressed, and more things that could be expressed only awkwardly. + +With the improvements proposed here, the library can now be expressed quite clearly and straightforwardly. See tests/pos/hylolib in this PR for details. + +## Suggested Improvement unrelated to Type Classes + +The following improvement would make sense alongside the suggested changes to type classes. But it does not form part of this proposal and is not yet implemented. + + +### Using `as` also in Patterns + +Since we have now more precedents of `as` as a postfix binder, I want to come back to the proposal to use it in patterns as well, in favor of `@`, which should be deprecated. + +Examples: + +```scala + xs match + case (Person(name, age) as p) :: rest => ... + + tp match + case Param(tl, _) :: _ as tparams => ... + + val x :: xs1 as xs = ys.checkedCast +``` + +These would replace the previous syntax using `@`: + +```scala + xs match + case p @ Person(name, age) :: rest => ... + + tp match + case tparams @ (Param(tl, _) :: _) => ... + + val xs @ (x :: xs1) = ys.checkedCast +``` +**Advantages:** No unpronounceable and non-standard symbol like `@`. More regularity. + +Generally, we want to use `as name` to attach a name for some entity that could also have been used stand-alone. + +**Proposed Syntax Change** + +``` +Pattern2 ::= InfixPattern ['as' id] +``` + +## Summary + +I have proposed some tweaks to Scala 3, which would greatly increase its usability for modular, type class based, generic programming. The proposed changes are: + + 1. Allow context bounds over classes that define a `Self` member type. + 1. Allow context bounds to be named with `as`. Use the bound parameter name as a default name for the generated context bound evidence. + 1. Add a new `{...}` syntax for multiple context bounds. + 1. Make context bounds also available for type members, which expand into a new form of deferred given. Phase out the previous abstract givens in favor of the new form. + 1. Add a predefined type alias `is`. + 1. Introduce a new cleaner syntax of given clauses. + +It's interesting that givens, which are a very general concept in Scala, were "almost there" when it comes to full support of concepts and generic programming. We only needed to add a few usability tweaks to context bounds, +alongside two syntactic changes that supersede the previous forms of `given .. with` clauses and abstract givens. Also interesting is that the superseded syntax constructs were the two areas where we collectively felt that the previous solutions were a bit awkward, but we could not think of better ones at the time. It's very nice that more satisfactory solutions are now emerging. + +## Conclusion + +Generic programming can be expressed in a number of languages. For instance, with +type classes in Haskell, or with traits in Rust, or with protocols in Swift, or with concepts in C++. Each of these is constructed from a fairly heavyweight set of new constructs, different from expressions and types. By contrast, equivalent solutions in Scala rely on regular types. Type classes are simply traits that define a `Self` type member. + +The proposed scheme has similar expressiveness to Protocols in Swift or Traits in Rust. Both of these were largely influenced by Jeremy Siek's PdD thesis "[A language for generic programming](https://scholarworks.iu.edu/dspace/handle/2022/7067)", which was first proposed as a way to implement concepts in C++. C++ did not follow Siek's approach, but Swift and Rust did. + +In Siek's thesis and in the formal treatments of Rust and Swift, + type class concepts are explained by mapping them to a lower level language of explicit dictionaries with representations for terms and types. Crucially, that lower level is not expressible without loss of granularity in the source language itself, since type representations are mapped to term dictionaries. By contrast, the current proposal expands type class concepts into other well-typed Scala constructs, which ultimately map into well-typed DOT programs. Type classes are simply a convenient notation for something that can already be expressed in Scala. In that sense, we stay true to the philosophy of a _scalable language_, where a small core can support a large range of advanced use cases. + diff --git a/docs/_docs/reference/other-new-features/binary-literals.md b/docs/_docs/reference/other-new-features/binary-literals.md new file mode 100644 index 000000000000..ba19fdd3d7f7 --- /dev/null +++ b/docs/_docs/reference/other-new-features/binary-literals.md @@ -0,0 +1,19 @@ +--- +layout: doc-page +title: "Binary Integer Literals" +nightlyOf: https://docs.scala-lang.org/scala3/reference/changed-features/binary-integer-literals.html +--- + +A new syntax for integer literals has been added, it is now possible to do the following: +```scala +val bitmask = 0b0010_0000 // equivalent to 32, 0x20 +``` + +Binary integer literals behave similarly to hex integer literals (`0x...`), for example: +* Both `0b...` and `0B...` are allowed +* `0b`/`0B` on its own is disallowed, possible alternatives: `0`, `0b0`, `0B0` +* Only `0` and `1` are allowed after the b (`b`/`B`) +* Underscores `_` are allowed anywhere between digits, and are ignored: `0b__1 == 0b1` + + +Note: This change has been backported to Scala 2.13.13, it is therefore not technically a changed feature diff --git a/docs/_docs/reference/other-new-features/experimental-defs.md b/docs/_docs/reference/other-new-features/experimental-defs.md index b71b20ecc036..06cc8860281c 100644 --- a/docs/_docs/reference/other-new-features/experimental-defs.md +++ b/docs/_docs/reference/other-new-features/experimental-defs.md @@ -265,8 +265,7 @@ Experimental definitions can only be referenced in an experimental scope. Experi -6. Any code compiled using a [_Nightly_](https://search.maven.org/artifact/org.scala-lang/scala3-compiler_3) or _Snapshot_ version of the compiler is considered to be in an experimental scope. -Can use the `-Yno-experimental` compiler flag to disable it and run as a proper release. +6. An experimental language feature is imported in at the package level. All top-level definitions will be marked as `@experimental`. In any other situation, a reference to an experimental definition will cause a compilation error. diff --git a/docs/_docs/reference/other-new-features/export.md b/docs/_docs/reference/other-new-features/export.md index 98e9a7d3d711..e21d369b6b5e 100644 --- a/docs/_docs/reference/other-new-features/export.md +++ b/docs/_docs/reference/other-new-features/export.md @@ -37,7 +37,12 @@ final def print(bits: BitMap): Unit = printUnit.print(bits) final type PrinterType = printUnit.PrinterType ``` -They can be accessed inside `Copier` as well as from outside: +With the experimental `modularity` language import, only exported methods and values are final, whereas the generated `PrinterType` would be a simple type alias +```scala + type PrinterType = printUnit.PrinterType +``` + +These aliases can be accessed inside `Copier` as well as from outside: ```scala val copier = new Copier @@ -90,12 +95,17 @@ export O.* ``` Export aliases copy the type and value parameters of the members they refer to. -Export aliases are always `final`. Aliases of given instances are again defined as givens (and aliases of old-style implicits are `implicit`). Aliases of extensions are again defined as extensions. Aliases of inline methods or values are again defined `inline`. There are no other modifiers that can be given to an alias. This has the following consequences for overriding: +Export aliases of term members are always `final`. Aliases of given instances are again defined as givens (and aliases of old-style implicits are `implicit`). Aliases of extensions are again defined as extensions. Aliases of inline methods or values are again defined `inline`. There are no other modifiers that can be given to an alias. This has the following consequences for overriding: - - Export aliases cannot be overridden, since they are final. + - Export aliases of methods or fields cannot be overridden, since they are final. - Export aliases cannot override concrete members in base classes, since they are not marked `override`. - However, export aliases can implement deferred members of base classes. + - Export type aliases are normally also final, except when the experimental + language import `modularity` is present. The general + rules for type aliases ensure in any case that if there are several type aliases in a class, + they must agree on their right hand sides, or the class could not be instantiated. + So dropping the `final` for export type aliases is safe. Export aliases for public value definitions that are accessed without referring to private values in the qualifier path diff --git a/docs/_docs/reference/other-new-features/safe-initialization.md b/docs/_docs/reference/other-new-features/safe-initialization.md index 757038eac786..503dbc7bde47 100644 --- a/docs/_docs/reference/other-new-features/safe-initialization.md +++ b/docs/_docs/reference/other-new-features/safe-initialization.md @@ -4,7 +4,7 @@ title: "Safe Initialization" nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/safe-initialization.html --- -Scala 3 implements experimental safe initialization check, which can be enabled by the compiler option `-Ysafe-init`. +Scala 3 implements experimental safe initialization check, which can be enabled by the compiler option `-Wsafe-init`. The design and implementation of the initialization checker is described in the paper _Safe object initialization, abstractly_ [3]. diff --git a/docs/_docs/reference/syntax.md b/docs/_docs/reference/syntax.md index 1980bc4e0ab2..66cf5a18fac9 100644 --- a/docs/_docs/reference/syntax.md +++ b/docs/_docs/reference/syntax.md @@ -198,10 +198,10 @@ SimpleType ::= SimpleLiteral | id | Singleton ‘.’ id | Singleton ‘.’ ‘type’ - | ‘(’ Types ‘)’ + | ‘(’ [Types] ‘)’ | Refinement - | SimpleType1 TypeArgs - | SimpleType1 ‘#’ id + | SimpleType TypeArgs + | SimpleType ‘#’ id Singleton ::= SimpleRef | SimpleLiteral | Singleton ‘.’ id @@ -263,7 +263,7 @@ SimpleExpr ::= SimpleRef | quoteId -- only inside splices | ‘new’ ConstrApp {‘with’ ConstrApp} [TemplateBody] | ‘new’ TemplateBody - | ‘(’ ExprsInParens ‘)’ + | ‘(’ [ExprsInParens] ‘)’ | SimpleExpr ‘.’ id | SimpleExpr ‘.’ MatchClause | SimpleExpr TypeArgs @@ -279,8 +279,7 @@ ExprSplice ::= spliceId | ‘$’ ‘{’ Block ‘}’ -- unless inside quoted pattern | ‘$’ ‘{’ Pattern ‘}’ -- when inside quoted pattern ExprsInParens ::= ExprInParens {‘,’ ExprInParens} -ExprInParens ::= PostfixExpr ‘:’ Type - | Expr +ExprInParens ::= PostfixExpr ‘:’ Type | Expr ParArgumentExprs ::= ‘(’ [ExprsInParens] ‘)’ | ‘(’ ‘using’ ExprsInParens ‘)’ | ‘(’ [ExprsInParens ‘,’] PostfixExpr ‘*’ ‘)’ @@ -331,6 +330,7 @@ SimplePattern1 ::= SimpleRef PatVar ::= varid | ‘_’ Patterns ::= Pattern {‘,’ Pattern} + ArgumentPatterns ::= ‘(’ [Patterns] ‘)’ | ‘(’ [Patterns ‘,’] PatVar ‘*’ ‘)’ ``` @@ -392,7 +392,7 @@ LocalModifier ::= ‘abstract’ AccessModifier ::= (‘private’ | ‘protected’) [AccessQualifier] AccessQualifier ::= ‘[’ id ‘]’ -Annotation ::= ‘@’ SimpleType1 {ParArgumentExprs} +Annotation ::= ‘@’ SimpleType {ParArgumentExprs} Import ::= ‘import’ ImportExpr {‘,’ ImportExpr} Export ::= ‘export’ ImportExpr {‘,’ ImportExpr} @@ -415,6 +415,7 @@ EndMarkerTag ::= id | ‘if’ | ‘while’ | ‘for’ | ‘match’ | ``` RefineDcl ::= ‘val’ ValDcl | ‘def’ DefDcl + | ‘var’ ValDcl | ‘type’ {nl} TypeDef ValDcl ::= ids ‘:’ Type DefDcl ::= DefSig ‘:’ Type @@ -443,6 +444,7 @@ ObjectDef ::= id [Template] EnumDef ::= id ClassConstr InheritClauses EnumBody GivenDef ::= [GivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) GivenSig ::= [id] [DefTypeParamClause] {UsingParamClause} ‘:’ -- one of `id`, `DefTypeParamClause`, `UsingParamClause` must be present +GivenType ::= AnnotType {id [nl] AnnotType} StructuralInstance ::= ConstrApp {‘with’ ConstrApp} [‘with’ WithTemplateBody] Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} ‘(’ DefTermParam ‘)’ {UsingParamClause} ExtMethods @@ -452,7 +454,7 @@ ExtMethod ::= {Annotation [nl]} {Modifier} ‘def’ DefDef Template ::= InheritClauses [TemplateBody] InheritClauses ::= [‘extends’ ConstrApps] [‘derives’ QualId {‘,’ QualId}] ConstrApps ::= ConstrApp ({‘,’ ConstrApp} | {‘with’ ConstrApp}) -ConstrApp ::= SimpleType1 {Annotation} {ParArgumentExprs} +ConstrApp ::= SimpleType {Annotation} {ParArgumentExprs} ConstrExpr ::= SelfInvocation | <<< SelfInvocation {semi BlockStat} >>> SelfInvocation ::= ‘this’ ArgumentExprs {ArgumentExprs} diff --git a/docs/_spec/01-lexical-syntax.md b/docs/_spec/01-lexical-syntax.md index 7dfcea87bd2d..e1686204116e 100644 --- a/docs/_spec/01-lexical-syntax.md +++ b/docs/_spec/01-lexical-syntax.md @@ -332,9 +332,10 @@ Literal ::= [‘-’] integerLiteral ### Integer Literals ```ebnf -integerLiteral ::= (decimalNumeral | hexNumeral) [‘L’ | ‘l’] +integerLiteral ::= (decimalNumeral | hexNumeral | binaryNumeral) [‘L’ | ‘l’] decimalNumeral ::= ‘0’ | digit [{digit | ‘_’} digit] hexNumeral ::= ‘0’ (‘x’ | ‘X’) hexDigit [{hexDigit | ‘_’} hexDigit] +binaryNumeral ::= ‘0’ (‘b’ | ‘B’) binaryDigit [{binaryDigit | ‘_’} binaryDigit] ``` Values of type `Int` are all integer numbers between $-2\^{31}$ and $2\^{31}-1$, inclusive. @@ -357,7 +358,7 @@ The numeric ranges given by these types are: The digits of a numeric literal may be separated by arbitrarily many underscores for purposes of legibility. > ```scala -> 0 21_000 0x7F -42L 0xFFFF_FFFF +> 0 21_000 0x7F -42L 0xFFFF_FFFF 0b0100_0010 > ``` ### Floating Point Literals diff --git a/docs/_spec/03-types.md b/docs/_spec/03-types.md index 407a69b8c8c5..6bc7886c5677 100644 --- a/docs/_spec/03-types.md +++ b/docs/_spec/03-types.md @@ -197,7 +197,7 @@ An argument type of the form ´\Rightarrow T´ represents a [call-by-name parame Function types associate to the right, e.g. ´S \Rightarrow T \Rightarrow R´ is the same as ´S \Rightarrow (T \Rightarrow R)´. -Function types are [covariant](04-basic-definitions.md#variance-annotations) in their result type and [contravariant](04-basic-definitions.md#variance-annotations) in their argument types. +Function types are [covariant](04-basic-definitions.html#variance-annotations) in their result type and [contravariant](04-basic-definitions.html#variance-annotations) in their argument types. Function types translate into internal class types that define an `apply` method. Specifically, the ´n´-ary function type ´(T_1, ..., T_n) \Rightarrow R´ translates to the internal class type `scala.Function´_n´[´T_1´, ..., ´T_n´, ´R´]`. @@ -210,7 +210,7 @@ trait Function´_n´[-´T_1´, ..., -´T_n´, +´R´]: def apply(´x_1´: ´T_1´, ..., ´x_n´: ´T_n´): ´R´ ``` -Their exact supertype and implementation can be consulted in the [function classes section](./12-the-scala-standard-library.md#the-function-classes) of the standard library page in this document. +Their exact supertype and implementation can be consulted in the [function classes section](./12-the-scala-standard-library.html#the-function-classes) of the standard library page in this document. _Dependent function types_ are function types whose parameters are named and can referred to in result types. In the concrete type ´(x_1: T_1, ..., x_n: T_n) \Rightarrow R´, ´R´ can refer to the parameters ´x_i´, notably to form path-dependent types. diff --git a/docs/_spec/TODOreference/changed-features/compiler-plugins.md b/docs/_spec/TODOreference/changed-features/compiler-plugins.md index 20bdb7f49836..719e204fc803 100644 --- a/docs/_spec/TODOreference/changed-features/compiler-plugins.md +++ b/docs/_spec/TODOreference/changed-features/compiler-plugins.md @@ -67,7 +67,7 @@ class DivideZero extends StandardPlugin: val name: String = "divideZero" override val description: String = "divide zero check" - def init(options: List[String]): List[PluginPhase] = + override def initialize(options: List[String])(using Context): List[PluginPhase] = (new DivideZeroPhase) :: Nil class DivideZeroPhase extends PluginPhase: @@ -90,7 +90,7 @@ end DivideZeroPhase ``` The plugin main class (`DivideZero`) must extend the trait `StandardPlugin` -and implement the method `init` that takes the plugin's options as argument +and implement the method `initialize` that takes the plugin's options as argument and returns a list of `PluginPhase`s to be inserted into the compilation pipeline. Our plugin adds one compiler phase to the pipeline. A compiler phase must extend diff --git a/docs/_spec/TODOreference/other-new-features/safe-initialization.md b/docs/_spec/TODOreference/other-new-features/safe-initialization.md index 757038eac786..503dbc7bde47 100644 --- a/docs/_spec/TODOreference/other-new-features/safe-initialization.md +++ b/docs/_spec/TODOreference/other-new-features/safe-initialization.md @@ -4,7 +4,7 @@ title: "Safe Initialization" nightlyOf: https://docs.scala-lang.org/scala3/reference/other-new-features/safe-initialization.html --- -Scala 3 implements experimental safe initialization check, which can be enabled by the compiler option `-Ysafe-init`. +Scala 3 implements experimental safe initialization check, which can be enabled by the compiler option `-Wsafe-init`. The design and implementation of the initialization checker is described in the paper _Safe object initialization, abstractly_ [3]. diff --git a/docs/_spec/public/scripts/main.js b/docs/_spec/public/scripts/main.js index 9ade9c770f1e..c74c8d0ff9a1 100644 --- a/docs/_spec/public/scripts/main.js +++ b/docs/_spec/public/scripts/main.js @@ -1,34 +1,29 @@ function currentChapter() { - var path = document.location.pathname; - var idx = path.lastIndexOf("/") + 1; - var chap = path.substring(idx, idx + 2); - return parseInt(chap, 10); + return parseInt(document.location.pathname.split('/').pop().substr(0, 2), 10); } function heading(i, heading, $heading) { - var currentLevel = parseInt(heading.tagName.substring(1)); - var result = ""; + const currentLevel = parseInt(heading.tagName.substring(1)); + if (currentLevel === this.headerLevel) { - this.headerCounts[this.headerLevel] += 1; - return "" + this.headerCounts[this.headerLevel] + " " + $heading.text(); + this.headerCounts[this.headerLevel]++; } else if (currentLevel < this.headerLevel) { - while(currentLevel < this.headerLevel) { + while (currentLevel < this.headerLevel) { this.headerCounts[this.headerLevel] = 1; - this.headerLevel -= 1; + this.headerLevel--; } - this.headerCounts[this.headerLevel] += 1; - return "" + this.headerCounts[this.headerLevel]+ " " + $heading.text(); + this.headerCounts[this.headerLevel]++; } else { - while(currentLevel > this.headerLevel) { - this.headerLevel += 1; + while (currentLevel > this.headerLevel) { + this.headerLevel++; this.headerCounts[this.headerLevel] = 1; } - return "" + this.headerCounts[this.headerLevel]+ " " + $heading.text(); } + return `${this.headerCounts[this.headerLevel]} ${$heading.text()}`; } // ignore when using wkhtmltopdf, or it won't work... -if(window.jekyllEnv !== 'spec-pdf') { +if (window.jekyllEnv !== 'spec-pdf') { $('#toc').toc( { 'selectors': 'h1,h2,h3', @@ -64,8 +59,6 @@ document.addEventListener("DOMContentLoaded", function() { }); $("#chapters a").each(function (index) { - if (document.location.pathname.endsWith($(this).attr("href"))) - $(this).addClass("chapter-active"); - else - $(this).removeClass("chapter-active"); + const href = $(this).attr("href"); + $(this).toggleClass("chapter-active", document.location.pathname.endsWith(href)); }); diff --git a/docs/sidebar.yml b/docs/sidebar.yml index 65d7ac2f9ee4..efdab80595a6 100644 --- a/docs/sidebar.yml +++ b/docs/sidebar.yml @@ -81,6 +81,7 @@ subsection: - page: reference/other-new-features/safe-initialization.md - page: reference/other-new-features/type-test.md - page: reference/other-new-features/experimental-defs.md + - page: reference/other-new-features/binary-literals.md - title: Other Changed Features directory: changed-features index: reference/changed-features/changed-features.md @@ -153,6 +154,9 @@ subsection: - page: reference/experimental/cc.md - page: reference/experimental/purefuns.md - page: reference/experimental/tupled-function.md + - page: reference/experimental/named-tuples.md + - page: reference/experimental/modularity.md + - page: reference/experimental/typeclasses.md - page: reference/syntax.md - title: Language Versions index: reference/language-versions/language-versions.md @@ -216,5 +220,6 @@ subsection: - page: internals/debug-macros.md - page: internals/gadts.md - page: internals/coverage.md + - page: internals/best-effort-compilation.md - page: release-notes-0.1.2.md hidden: true diff --git a/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala b/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala index 3604e38375e7..e878866be81e 100644 --- a/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala +++ b/language-server/src/dotty/tools/languageserver/DottyLanguageServer.scala @@ -753,7 +753,7 @@ object DottyLanguageServer { /** Does this sourcefile represent a worksheet? */ private def isWorksheet(sourcefile: SourceFile): Boolean = - sourcefile.file.extension == "sc" + sourcefile.file.ext.isScalaScript /** Wrap the source of a worksheet inside an `object`. */ private def wrapWorksheet(source: String): String = diff --git a/language-server/test/dotty/tools/languageserver/CompletionTest.scala b/language-server/test/dotty/tools/languageserver/CompletionTest.scala index d0ceb37c07ba..d64bb44c1a5d 100644 --- a/language-server/test/dotty/tools/languageserver/CompletionTest.scala +++ b/language-server/test/dotty/tools/languageserver/CompletionTest.scala @@ -954,14 +954,8 @@ class CompletionTest { .noCompletions() @Test def i13624_annotType: Unit = - val expected1 = Set( - ("MyAnnotation", Class, "MyAnnotation"), - ("MyAnnotation", Module, "MyAnnotation"), - ) - val expected2 = Set( - ("MyAnnotation", Class, "Foo.MyAnnotation"), - ("MyAnnotation", Module, "Foo.MyAnnotation"), - ) + val expected1 = Set(("MyAnnotation", Class, "MyAnnotation")) + val expected2 = Set(("MyAnnotation", Class, "Foo.MyAnnotation")) code"""object Foo{ | class MyAnnotation extends annotation.StaticAnnotation |} @@ -984,14 +978,8 @@ class CompletionTest { @Test def i13624_annotation : Unit = code"""@annotation.implicitNot${m1} |@annotation.implicitNotFound @mai${m2}""" - .completion(m1, - ("implicitNotFound", Class, "scala.annotation.implicitNotFound"), - ("implicitNotFound", Module, "scala.annotation.implicitNotFound"), - ) - .completion(m2, - ("main", Class, "main"), - ("main", Module, "main"), - ) + .completion(m1, ("implicitNotFound", Class, "scala.annotation.implicitNotFound")) + .completion(m2, ("main", Class, "main")) @Test def i13623_annotation : Unit = code"""import annot${m1}""" @@ -1489,7 +1477,6 @@ class CompletionTest { ("xDef", Method, "=> Int"), ("xVal", Field, "Int"), ("xObject", Module, "Foo.xObject"), - ("xClass", Module, "Foo.xClass"), ("xClass", Class, "Foo.xClass"))) } @@ -1557,9 +1544,7 @@ class CompletionTest { |object T: | extension (x: Test.TestSel$m1) |""" - .completion(m1, Set( - ("TestSelect", Module, "Test.TestSelect"), ("TestSelect", Class, "Test.TestSelect") - )) + .completion(m1, Set(("TestSelect", Class, "Test.TestSelect"))) @Test def extensionDefinitionCompletionsSelectNested: Unit = code"""|object Test: @@ -1568,9 +1553,7 @@ class CompletionTest { |object T: | extension (x: Test.Test2.TestSel$m1) |""" - .completion(m1, Set( - ("TestSelect", Module, "Test.Test2.TestSelect"), ("TestSelect", Class, "Test.Test2.TestSelect") - )) + .completion(m1, Set(("TestSelect", Class, "Test.Test2.TestSelect"))) @Test def extensionDefinitionCompletionsSelectInside: Unit = code"""|object Test: @@ -1721,4 +1704,23 @@ class CompletionTest { .completion(m1, Set( ("getOrElse", Method, "[V1 >: String](key: Int, default: => V1): V1"), )) + + @Test def noEnumCompletionInNewContext: Unit = + code"""|enum TestEnum: + | case TestCase + |object M: + | TestEnu$m1 + | TestEnum.TestCa$m2 + | val x: TestEnu$m3 + | val y: TestEnum.Tes$m4 + | new TestEnu$m5 + | new TestEnum.TestCas$m6 + |""" + .completion(m1, Set(("TestEnum", Module, "TestEnum"))) + .completion(m2, Set(("TestCase", Field, "TestEnum"))) + .completion(m3, Set(("TestEnum", Module, "TestEnum"), ("TestEnum", Class, "TestEnum"))) + .completion(m4, Set(("TestCase", Field, "TestEnum"))) + .completion(m5, Set()) + .completion(m6, Set()) + } diff --git a/library/src/scala/annotation/experimental.scala b/library/src-bootstrapped/scala/annotation/experimental.scala similarity index 64% rename from library/src/scala/annotation/experimental.scala rename to library/src-bootstrapped/scala/annotation/experimental.scala index 69ab5b9c7221..634cfe12db7f 100644 --- a/library/src/scala/annotation/experimental.scala +++ b/library/src-bootstrapped/scala/annotation/experimental.scala @@ -5,5 +5,5 @@ package scala.annotation * @see [[https://dotty.epfl.ch/docs/reference/other-new-features/experimental-defs]] * @syntax markdown */ -@deprecatedInheritance("Scheduled for being final in the future", "3.4.0") -class experimental extends StaticAnnotation +final class experimental(message: String) extends StaticAnnotation: + def this() = this("") diff --git a/library/src-non-bootstrapped/scala/annotation/experimental.scala b/library/src-non-bootstrapped/scala/annotation/experimental.scala new file mode 100644 index 000000000000..e879b47e12ff --- /dev/null +++ b/library/src-non-bootstrapped/scala/annotation/experimental.scala @@ -0,0 +1,3 @@ +package scala.annotation + +final class experimental extends StaticAnnotation diff --git a/library/src/scala/NamedTuple.scala b/library/src/scala/NamedTuple.scala new file mode 100644 index 000000000000..dc6e6c3144f6 --- /dev/null +++ b/library/src/scala/NamedTuple.scala @@ -0,0 +1,217 @@ +package scala +import annotation.experimental +import compiletime.ops.boolean.* + +@experimental +object NamedTuple: + + /** The type to which named tuples get mapped to. For instance, + * (name: String, age: Int) + * gets mapped to + * NamedTuple[("name", "age"), (String, Int)] + */ + opaque type NamedTuple[N <: Tuple, +V <: Tuple] >: V <: AnyNamedTuple = V + + /** A type which is a supertype of all named tuples */ + opaque type AnyNamedTuple = Any + + def apply[N <: Tuple, V <: Tuple](x: V): NamedTuple[N, V] = x + + def unapply[N <: Tuple, V <: Tuple](x: NamedTuple[N, V]): Some[V] = Some(x) + + extension [V <: Tuple](x: V) + inline def withNames[N <: Tuple]: NamedTuple[N, V] = x + + export NamedTupleDecomposition.{Names, DropNames} + + extension [N <: Tuple, V <: Tuple](x: NamedTuple[N, V]) + + /** The underlying tuple without the names */ + inline def toTuple: V = x + + /** The number of elements in this tuple */ + inline def size: Tuple.Size[V] = toTuple.size + + // This intentionally works for empty named tuples as well. I think NonEmptyTuple is a dead end + // and should be reverted, just like NonEmptyList is also appealing at first, but a bad idea + // in the end. + + /** The value (without the name) at index `n` of this tuple */ + inline def apply(n: Int): Tuple.Elem[V, n.type] = + inline toTuple match + case tup: NonEmptyTuple => tup(n).asInstanceOf[Tuple.Elem[V, n.type]] + case tup => tup.productElement(n).asInstanceOf[Tuple.Elem[V, n.type]] + + /** The first element value of this tuple */ + inline def head: Tuple.Elem[V, 0] = apply(0) + + /** The tuple consisting of all elements of this tuple except the first one */ + inline def tail: NamedTuple[Tuple.Tail[N], Tuple.Tail[V]] = + toTuple.drop(1).asInstanceOf[NamedTuple[Tuple.Tail[N], Tuple.Tail[V]]] + + /** The last element value of this tuple */ + inline def last: Tuple.Last[V] = apply(size - 1).asInstanceOf[Tuple.Last[V]] + + /** The tuple consisting of all elements of this tuple except the last one */ + inline def init: NamedTuple[Tuple.Init[N], Tuple.Init[V]] = + toTuple.take(size - 1).asInstanceOf[NamedTuple[Tuple.Init[N], Tuple.Init[V]]] + + /** The tuple consisting of the first `n` elements of this tuple, or all + * elements if `n` exceeds `size`. + */ + inline def take(n: Int): NamedTuple[Tuple.Take[N, n.type], Tuple.Take[V, n.type]] = + toTuple.take(n) + + /** The tuple consisting of all elements of this tuple except the first `n` ones, + * or no elements if `n` exceeds `size`. + */ + inline def drop(n: Int): NamedTuple[Tuple.Drop[N, n.type], Tuple.Drop[V, n.type]] = + toTuple.drop(n) + + /** The tuple `(x.take(n), x.drop(n))` */ + inline def splitAt(n: Int): + (NamedTuple[Tuple.Take[N, n.type], Tuple.Take[V, n.type]], + NamedTuple[Tuple.Drop[N, n.type], Tuple.Drop[V, n.type]]) = + // would be nice if this could have type `Split[NamedTuple[N, V]]` instead, but + // we get a type error then. Similar for other methods here. + toTuple.splitAt(n) + + /** The tuple consisting of all elements of this tuple followed by all elements + * of tuple `that`. The names of the two tuples must be disjoint. + */ + inline def ++ [N2 <: Tuple, V2 <: Tuple](that: NamedTuple[N2, V2])(using Tuple.Disjoint[N, N2] =:= true) + : NamedTuple[Tuple.Concat[N, N2], Tuple.Concat[V, V2]] + = toTuple ++ that.toTuple + + // inline def :* [L] (x: L): NamedTuple[Append[N, ???], Append[V, L] = ??? + // inline def *: [H] (x: H): NamedTuple[??? *: N], H *: V] = ??? + + /** The named tuple consisting of all element values of this tuple mapped by + * the polymorphic mapping function `f`. The names of elements are preserved. + * If `x = (n1 = v1, ..., ni = vi)` then `x.map(f) = `(n1 = f(v1), ..., ni = f(vi))`. + */ + inline def map[F[_]](f: [t] => t => F[t]): NamedTuple[N, Tuple.Map[V, F]] = + toTuple.map(f).asInstanceOf[NamedTuple[N, Tuple.Map[V, F]]] + + /** The named tuple consisting of all elements of this tuple in reverse */ + inline def reverse: NamedTuple[Tuple.Reverse[N], Tuple.Reverse[V]] = + toTuple.reverse + + /** The named tuple consisting of all elements values of this tuple zipped + * with corresponding element values in named tuple `that`. + * If the two tuples have different sizes, + * the extra elements of the larger tuple will be disregarded. + * The names of `x` and `that` at the same index must be the same. + * The result tuple keeps the same names as the operand tuples. + */ + inline def zip[V2 <: Tuple](that: NamedTuple[N, V2]): NamedTuple[N, Tuple.Zip[V, V2]] = + toTuple.zip(that.toTuple) + + /** A list consisting of all element values */ + inline def toList: List[Tuple.Union[V]] = toTuple.toList.asInstanceOf[List[Tuple.Union[V]]] + + /** An array consisting of all element values */ + inline def toArray: Array[Object] = toTuple.toArray + + /** An immutable array consisting of all element values */ + inline def toIArray: IArray[Object] = toTuple.toIArray + + end extension + + /** The size of a named tuple, represented as a literal constant subtype of Int */ + type Size[X <: AnyNamedTuple] = Tuple.Size[DropNames[X]] + + /** The type of the element value at position N in the named tuple X */ + type Elem[X <: AnyNamedTuple, N <: Int] = Tuple.Elem[DropNames[X], N] + + /** The type of the first element value of a named tuple */ + type Head[X <: AnyNamedTuple] = Elem[X, 0] + + /** The type of the last element value of a named tuple */ + type Last[X <: AnyNamedTuple] = Tuple.Last[DropNames[X]] + + /** The type of a named tuple consisting of all elements of named tuple X except the first one */ + type Tail[X <: AnyNamedTuple] = Drop[X, 1] + + /** The type of the initial part of a named tuple without its last element */ + type Init[X <: AnyNamedTuple] = + NamedTuple[Tuple.Init[Names[X]], Tuple.Init[DropNames[X]]] + + /** The type of the named tuple consisting of the first `N` elements of `X`, + * or all elements if `N` exceeds `Size[X]`. + */ + type Take[X <: AnyNamedTuple, N <: Int] = + NamedTuple[Tuple.Take[Names[X], N], Tuple.Take[DropNames[X], N]] + + /** The type of the named tuple consisting of all elements of `X` except the first `N` ones, + * or no elements if `N` exceeds `Size[X]`. + */ + type Drop[X <: AnyNamedTuple, N <: Int] = + NamedTuple[Tuple.Drop[Names[X], N], Tuple.Drop[DropNames[X], N]] + + /** The pair type `(Take(X, N), Drop[X, N]). */ + type Split[X <: AnyNamedTuple, N <: Int] = (Take[X, N], Drop[X, N]) + + /** Type of the concatenation of two tuples `X` and `Y` */ + type Concat[X <: AnyNamedTuple, Y <: AnyNamedTuple] = + NamedTuple[Tuple.Concat[Names[X], Names[Y]], Tuple.Concat[DropNames[X], DropNames[Y]]] + + /** The type of the named tuple `X` mapped with the type-level function `F`. + * If `X = (n1 : T1, ..., ni : Ti)` then `Map[X, F] = `(n1 : F[T1], ..., ni : F[Ti])`. + */ + type Map[X <: AnyNamedTuple, F[_ <: Tuple.Union[DropNames[X]]]] = + NamedTuple[Names[X], Tuple.Map[DropNames[X], F]] + + /** A named tuple with the elements of tuple `X` in reversed order */ + type Reverse[X <: AnyNamedTuple] = + NamedTuple[Tuple.Reverse[Names[X]], Tuple.Reverse[DropNames[X]]] + + /** The type of the named tuple consisting of all element values of + * named tuple `X` zipped with corresponding element values of + * named tuple `Y`. If the two tuples have different sizes, + * the extra elements of the larger tuple will be disregarded. + * The names of `X` and `Y` at the same index must be the same. + * The result tuple keeps the same names as the operand tuples. + * For example, if + * ``` + * X = (n1 : S1, ..., ni : Si) + * Y = (n1 : T1, ..., nj : Tj) where j >= i + * ``` + * then + * ``` + * Zip[X, Y] = (n1 : (S1, T1), ..., ni: (Si, Ti)) + * ``` + * @syntax markdown + */ + type Zip[X <: AnyNamedTuple, Y <: AnyNamedTuple] = + Names[X] match + case Names[Y] => + NamedTuple[Names[X], Tuple.Zip[DropNames[X], DropNames[Y]]] + + /** A type specially treated by the compiler to represent all fields of a + * class argument `T` as a named tuple. Or, if `T` is already a named tyuple, + * `From[T]` is the same as `T`. + */ + type From[T] <: AnyNamedTuple + + /** The type of the empty named tuple */ + type Empty = EmptyTuple.type + + /** The empty named tuple */ + val Empty: Empty = EmptyTuple.asInstanceOf[Empty] + +end NamedTuple + +/** Separate from NamedTuple object so that we can match on the opaque type NamedTuple. */ +@experimental +object NamedTupleDecomposition: + import NamedTuple.* + + /** The names of a named tuple, represented as a tuple of literal string values. */ + type Names[X <: AnyNamedTuple] <: Tuple = X match + case NamedTuple[n, _] => n + + /** The value types of a named tuple represented as a regular tuple. */ + type DropNames[NT <: AnyNamedTuple] <: Tuple = NT match + case NamedTuple[_, x] => x + diff --git a/library/src/scala/Precise.scala b/library/src/scala/Precise.scala new file mode 100644 index 000000000000..aad42ca8950f --- /dev/null +++ b/library/src/scala/Precise.scala @@ -0,0 +1,11 @@ +package scala +import annotation.experimental +import language.experimental.erasedDefinitions + +/** A type class-like trait intended as a context bound for type variables. + * If we have `[X: Precise]`, instances of the type variable `X` are inferred + * in precise mode. This means that singleton types and union types are not + * widened. + */ +@experimental erased trait Precise: + type Self diff --git a/library/src/scala/Tuple.scala b/library/src/scala/Tuple.scala index 3738bd05a19b..8074fe3664e5 100644 --- a/library/src/scala/Tuple.scala +++ b/library/src/scala/Tuple.scala @@ -1,6 +1,6 @@ package scala -import annotation.{experimental, showAsInfix} +import annotation.showAsInfix import compiletime.* import compiletime.ops.int.* @@ -82,7 +82,6 @@ sealed trait Tuple extends Product { /** Given a tuple `(a1, ..., am)`, returns the reversed tuple `(am, ..., a1)` * consisting all its elements. */ - @experimental inline def reverse[This >: this.type <: Tuple]: Reverse[This] = runtime.Tuples.reverse(this).asInstanceOf[Reverse[This]] } @@ -201,11 +200,9 @@ object Tuple { type IsMappedBy[F[_]] = [X <: Tuple] =>> X =:= Map[InverseMap[X, F], F] /** Type of the reversed tuple */ - @experimental type Reverse[X <: Tuple] = ReverseOnto[X, EmptyTuple] /** Prepends all elements of a tuple in reverse order onto the other tuple */ - @experimental type ReverseOnto[From <: Tuple, +To <: Tuple] <: Tuple = From match case x *: xs => ReverseOnto[xs, x *: To] case EmptyTuple => To @@ -238,6 +235,25 @@ object Tuple { */ type Union[T <: Tuple] = Fold[T, Nothing, [x, y] =>> x | y] + /** A type level Boolean indicating whether the tuple `X` has an element + * that matches `Y`. + * @pre The elements of `X` are assumed to be singleton types + */ + type Contains[X <: Tuple, Y] <: Boolean = X match + case Y *: _ => true + case _ *: xs => Contains[xs, Y] + case EmptyTuple => false + + /** A type level Boolean indicating whether the type `Y` contains + * none of the elements of `X`. + * @pre The elements of `X` and `Y` are assumed to be singleton types + */ + type Disjoint[X <: Tuple, Y <: Tuple] <: Boolean = X match + case x *: xs => Contains[Y, x] match + case true => false + case false => Disjoint[xs, Y] + case EmptyTuple => true + /** Empty tuple */ def apply(): EmptyTuple = EmptyTuple diff --git a/library/src/scala/annotation/MacroAnnotation.scala b/library/src/scala/annotation/MacroAnnotation.scala index 02e9470f06fd..ffaa0cf88464 100644 --- a/library/src/scala/annotation/MacroAnnotation.scala +++ b/library/src/scala/annotation/MacroAnnotation.scala @@ -45,9 +45,12 @@ trait MacroAnnotation extends StaticAnnotation: * import scala.collection.concurrent * * class memoize extends MacroAnnotation: - * def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + * def transform(using Quotes)( + * definition: quotes.reflect.Definition, + * companion: Option[quotes.reflect.Definition] + * ): List[quotes.reflect.Definition] = * import quotes.reflect.* - * tree match + * definition match * case DefDef(name, TermParamClause(param :: Nil) :: Nil, tpt, Some(rhsTree)) => * (param.tpt.tpe.asType, tpt.tpe.asType) match * case ('[t], '[u]) => @@ -58,16 +61,17 @@ trait MacroAnnotation extends StaticAnnotation: * '{ concurrent.TrieMap.empty[t, u] }.asTerm * val cacheVal = ValDef(cacheSymbol, Some(cacheRhs)) * val newRhs = - * given Quotes = tree.symbol.asQuotes + * given Quotes = definition.symbol.asQuotes * val cacheRefExpr = Ref(cacheSymbol).asExprOf[concurrent.Map[t, u]] * val paramRefExpr = Ref(param.symbol).asExprOf[t] * val rhsExpr = rhsTree.asExprOf[u] * '{ $cacheRefExpr.getOrElseUpdate($paramRefExpr, $rhsExpr) }.asTerm - * val newTree = DefDef.copy(tree)(name, TermParamClause(param :: Nil) :: Nil, tpt, Some(newRhs)) + * val newTree = DefDef.copy(definition)(name, TermParamClause(param :: Nil) :: Nil, tpt, Some(newRhs)) * List(cacheVal, newTree) * case _ => * report.error("Annotation only supported on `def` with a single argument are supported") - * List(tree) + * List(definition) + * end transform * ``` * with this macro annotation a user can write * ```scala @@ -102,11 +106,14 @@ trait MacroAnnotation extends StaticAnnotation: * * @experimental * class equals extends MacroAnnotation: - * def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + * def transform(using Quotes)( + * definition: quotes.reflect.Definition, + * companion: Option[quotes.reflect.Definition] + * ): List[quotes.reflect.Definition] = * import quotes.reflect.* - * tree match + * definition match * case ClassDef(className, ctr, parents, self, body) => - * val cls = tree.symbol + * val cls = definition.symbol * * val constructorParameters = ctr.paramss.collect { case clause: TermParamClause => clause } * if constructorParameters.size != 1 || constructorParameters.head.params.isEmpty then @@ -139,10 +146,11 @@ trait MacroAnnotation extends StaticAnnotation: * val hashCodeOverrideDef = DefDef(hashCodeOverrideSym, _ => Some(Ref(hashSym))) * * val newBody = equalsOverrideDef :: hashVal :: hashCodeOverrideDef :: body - * List(ClassDef.copy(tree)(className, ctr, parents, self, newBody)) + * List(ClassDef.copy(definition)(className, ctr, parents, self, newBody)) * case _ => * report.error("Annotation only supports `class`") - * List(tree) + * List(definition) + * end transform * * private def equalsExpr[T: Type](that: Expr[Any], thisFields: List[Expr[Any]])(using Quotes): Expr[Boolean] = * '{ @@ -204,9 +212,10 @@ trait MacroAnnotation extends StaticAnnotation: * override def hashCode(): Int = hash$macro$1 * ``` * - * @param Quotes Implicit instance of Quotes used for tree reflection - * @param tree Tree that will be transformed + * @param Quotes Implicit instance of Quotes used for tree reflection + * @param definition Tree that will be transformed + * @param companion Tree for the companion class or module if the definition is respectively a module or a class * * @syntax markdown */ - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] diff --git a/library/src/scala/annotation/MainAnnotation.scala b/library/src/scala/annotation/MainAnnotation.scala deleted file mode 100644 index 29e650e50b74..000000000000 --- a/library/src/scala/annotation/MainAnnotation.scala +++ /dev/null @@ -1,129 +0,0 @@ -package scala.annotation - -/** MainAnnotation provides the functionality for a compiler-generated main class. - * It links a compiler-generated main method (call it compiler-main) to a user - * written main method (user-main). - * The protocol of calls from compiler-main is as follows: - * - * - create a `command` with the command line arguments, - * - for each parameter of user-main, a call to `command.argGetter`, - * or `command.varargGetter` if is a final varargs parameter, - * - a call to `command.run` with the closure of user-main applied to all arguments. - * - * Example: - * ```scala sc:nocompile - * /** Sum all the numbers - * * - * * @param first Fist number to sum - * * @param rest The rest of the numbers to sum - * */ - * @myMain def sum(first: Int, second: Int = 0, rest: Int*): Int = first + second + rest.sum - * ``` - * generates - * ```scala sc:nocompile - * object foo { - * def main(args: Array[String]): Unit = { - * val mainAnnot = new myMain() - * val info = new Info( - * name = "foo.main", - * documentation = "Sum all the numbers", - * parameters = Seq( - * new Parameter("first", "scala.Int", hasDefault=false, isVarargs=false, "Fist number to sum"), - * new Parameter("rest", "scala.Int" , hasDefault=false, isVarargs=true, "The rest of the numbers to sum") - * ) - * ) - * val mainArgsOpt = mainAnnot.command(info, args) - * if mainArgsOpt.isDefined then - * val mainArgs = mainArgsOpt.get - * val args0 = mainAnnot.argGetter[Int](info.parameters(0), mainArgs(0), None) // using parser Int - * val args1 = mainAnnot.argGetter[Int](info.parameters(1), mainArgs(1), Some(() => sum$default$1())) // using parser Int - * val args2 = mainAnnot.varargGetter[Int](info.parameters(2), mainArgs.drop(2)) // using parser Int - * mainAnnot.run(() => sum(args0(), args1(), args2()*)) - * } - * } - * ``` - * - * @param Parser The class used for argument string parsing and arguments into a `T` - * @param Result The required result type of the main method. - * If this type is Any or Unit, any type will be accepted. - */ -@experimental -trait MainAnnotation[Parser[_], Result] extends StaticAnnotation: - import MainAnnotation.{Info, Parameter} - - /** Process the command arguments before parsing them. - * - * Return `Some` of the sequence of arguments that will be parsed to be passed to the main method. - * This sequence needs to have the same length as the number of parameters of the main method (i.e. `info.parameters.size`). - * If there is a varags parameter, then the sequence must be at least of length `info.parameters.size - 1`. - * - * Returns `None` if the arguments are invalid and parsing and run should be stopped. - * - * @param info The information about the command (name, documentation and info about parameters) - * @param args The command line arguments - */ - def command(info: Info, args: Seq[String]): Option[Seq[String]] - - /** The getter for the `idx`th argument of type `T` - * - * @param idx The index of the argument - * @param defaultArgument Optional lambda to instantiate the default argument - */ - def argGetter[T](param: Parameter, arg: String, defaultArgument: Option[() => T])(using Parser[T]): () => T - - /** The getter for a final varargs argument of type `T*` */ - def varargGetter[T](param: Parameter, args: Seq[String])(using Parser[T]): () => Seq[T] - - /** Run `program` if all arguments are valid if all arguments are valid - * - * @param program A function containing the call to the main method and instantiation of its arguments - */ - def run(program: () => Result): Unit - -end MainAnnotation - -@experimental -object MainAnnotation: - - /** Information about the main method - * - * @param name The name of the main method - * @param documentation The documentation of the main method without the `@param` documentation (see Parameter.documentaion) - * @param parameters Information about the parameters of the main method - */ - @experimental // MiMa does not check scope inherited @experimental - final class Info( - val name: String, - val documentation: String, - val parameters: Seq[Parameter], - ): - - /** If the method ends with a varargs parameter */ - def hasVarargs: Boolean = parameters.nonEmpty && parameters.last.isVarargs - - end Info - - /** Information about a parameter of a main method - * - * @param name The name of the parameter - * @param typeName The name of the parameter's type - * @param hasDefault If the parameter has a default argument - * @param isVarargs If the parameter is a varargs parameter (can only be true for the last parameter) - * @param documentation The documentation of the parameter (from `@param` documentation in the main method) - * @param annotations The annotations of the parameter that extend `ParameterAnnotation` - */ - @experimental // MiMa does not check scope inherited @experimental - final class Parameter( - val name: String, - val typeName: String, - val hasDefault: Boolean, - val isVarargs: Boolean, - val documentation: String, - val annotations: Seq[ParameterAnnotation], - ) - - /** Marker trait for annotations that will be included in the Parameter annotations. */ - @experimental // MiMa does not check scope inherited @experimental - trait ParameterAnnotation extends StaticAnnotation - -end MainAnnotation diff --git a/library/src/scala/annotation/internal/WitnessNames.scala b/library/src/scala/annotation/internal/WitnessNames.scala new file mode 100644 index 000000000000..3921c2083617 --- /dev/null +++ b/library/src/scala/annotation/internal/WitnessNames.scala @@ -0,0 +1,54 @@ +package scala.annotation +package internal + +/** An annotation that is used for marking type definitions that should get + * context bound companions. The scheme is as follows: + * + * 1. When desugaring a context-bounded type A, add a @WitnessNames(n_1, ... , n_k) + * annotation to the type declaration node, where n_1, ..., n_k are the names of + * all the witnesses generated for the context bounds of A. This annotation will + * be pickled as usual. + * + * 2. During Namer or Unpickling, when encountering a type declaration A with + * a WitnessNames(n_1, ... , n_k) annotation, create a CB companion `val A` with + * type ``[ref_1 | ... | ref_k] where ref_i is a TermRef + * with the same prefix as A and name n_i. Except, don't do this if the type in + * question is a type parameter and there is already a term parameter with name A + * defined for the same method. + * + * ContextBoundCompanion is defined as an internal abstract type like this: + * + * type ``[-Refs] + * + * The context bound companion's variance is negative, so that unions in the + * arguments are joined when encountering multiple definfitions and forming a glb. + * + * 3. Add a special case for typing a selection A.m on a value A of type + * ContextBoundCompanion[ref_1, ..., ref_k]. Namely, try to typecheck all + * selections ref_1.m, ..., ref_k.m with the expected type. There must be + * a unique selection ref_i.m that typechecks and such that for all other + * selections ref_j.m that also typecheck one of the following three criteria + * applies: + * + * 1. ref_i.m and ref_j.m are the same. This means: If they are types then + * ref_i.m is an alias of ref_j.m. If they are terms then they are both + * singleton types and ref_i.m =:= ref_j.m. + * 2. The underlying type (under widen) of ref_i is a true supertype of the + * underlying type of ref_j. + * 3. ref_i.m is a term, the underlying type of ref_j is not a strict subtype + * of the underlying type of ref_i, and the underlying type ref_i.m is a + * strict subtype of the underlying type of ref_j.m. + * + * If there is such a selection, map A.m to ref_i.m, otherwise report an error. + * + * (2) might surprise. It is the analogue of given disambiguation, where we also + * pick the most general candidate that matches the expected type. E.g. we have + * context bounds for Functor, Monad, and Applicable. In this case we want to + * select the `map` method of `Functor`. + * + * 4. At PostTyper, issue an error when encountering any reference to a CB companion. + */ +@experimental +class WitnessNames(names: String*) extends StaticAnnotation + + diff --git a/library/src/scala/annotation/newMain.scala b/library/src/scala/annotation/newMain.scala deleted file mode 100644 index 552e4225a648..000000000000 --- a/library/src/scala/annotation/newMain.scala +++ /dev/null @@ -1,389 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.annotation - -import scala.collection.mutable -import scala.util.CommandLineParser.FromString -import scala.annotation.meta.param - -/** - * The annotation that designates a main function. - * Main functions are entry points for Scala programs. They can be called through a command line interface by using - * the `scala` command, followed by their name and, optionally, their parameters. - * - * The parameters of a main function may have any type `T`, as long as there exists a - * `given util.CommandLineParser.FromString[T]` in the scope. It will be used for parsing the string given as input - * into the correct argument type. - * These types already have parsers defined: - * - String, - * - Boolean, - * - Byte, Short, Int, Long, Float, Double. - * - * The parameters of a main function may be passed either by position, or by name. Passing an argument positionally - * means that you give the arguments in the same order as the function's signature. Passing an argument by name means - * that you give the argument right after giving its name. Considering the function - * `@newMain def foo(i: Int, str: String)`, we may have arguments passed: - * - by position: `scala foo 1 abc`, - * - by name: `scala foo -i 1 --str abc` or `scala foo --str abc -i 1`. - * - * A mixture of both is also possible: `scala foo --str abc 1` is equivalent to all previous examples. - * - * Note that main function overloading is not currently supported, i.e. you cannot define two main methods that have - * the same name in the same project. - * - * Special arguments are used to display help regarding a main function: `--help` and `-h`. If used as argument, the program - * will display some useful information about the main function. This help directly uses the ScalaDoc comment - * associated with the function, more precisely its description and the description of the parameters documented with - * `@param`. Note that if a parameter is named `help` or `h`, or if one of the parameters has as alias one of those names, - * the help displaying will be disabled for that argument. - * For example, for `@newMain def foo(help: Boolean)`, `scala foo -h` will display the help, but `scala foo --help` will fail, - * as it will expect a Boolean value after `--help`. - * - * Parameters may be given annotations to add functionalities to the main function: - * - `main.alias` adds other names to a parameter. For example, if a parameter `node` has as aliases - * `otherNode` and `n`, it may be addressed using `--node`, `--otherNode` or `-n`. - * - * Here is an example of a main function with annotated parameters: - * `@newMain def foo(@newMain.alias("x") number: Int, @newMain.alias("explanation") s: String)`. The following commands are - * equivalent: - * - `scala foo --number 1 -s abc` - * - `scala foo -x 1 -s abc` - * - `scala foo --number 1 --explanation abc` - * - `scala foo -x 1 --explanation abc` - * - * Boolean parameters are considered flags that do not require the "true" or "false" value to be passed. - * For example, `@newMain def foo(i: Boolean)` can be called as `foo` (where `i=false`) or `foo -i` (where `i=true`). - * - * The special `--` marker can be used to indicate that all following arguments are passed verbatim as positional parameters. - * For example, `@newMain def foo(args: String*)` can be called as `scala foo a b -- -c -d` which implies that `args=Seq("a", "b", "-c", "-d")`. - */ -@experimental -final class newMain extends MainAnnotation[FromString, Any]: - import newMain.* - import MainAnnotation.* - - private val longArgRegex = "--[a-zA-Z][a-zA-Z0-9]+".r - private val shortArgRegex = "-[a-zA-Z]".r - // TODO: what should be considered as an invalid argument? - // Consider argument `-3.14`, `--i`, `-name` - private val illFormedName = "--[a-zA-Z]|-[a-zA-Z][a-zA-Z0-9]+".r - /** After this marker, all arguments are positional */ - private inline val positionArgsMarker = "--" - - extension (param: Parameter) - private def aliasNames: Seq[String] = - param.annotations.collect{ case alias: alias => getNameWithMarker(alias.name) } - private def isFlag: Boolean = - param.typeName == "scala.Boolean" - - private def getNameWithMarker(name: String): String = - if name.length > 1 then s"--$name" - else if name.length == 1 then s"-$name" - else assert(false, "invalid name") - - def command(info: Info, args: Seq[String]): Option[Seq[String]] = - val names = Names(info) - if Help.shouldPrintDefaultHelp(names, args) then - Help.printUsage(info) - Help.printExplain(info) - None - else - preProcessArgs(info, names, args).orElse { - Help.printUsage(info) - None - } - end command - - def argGetter[T](param: Parameter, arg: String, defaultArgument: Option[() => T])(using p: FromString[T]): () => T = { - if arg.nonEmpty then parse[T](param, arg) - else - assert(param.hasDefault) - - defaultArgument.get - } - - def varargGetter[T](param: Parameter, args: Seq[String])(using p: FromString[T]): () => Seq[T] = { - val getters = args.map(arg => parse[T](param, arg)) - () => getters.map(_()) - } - - def run(execProgram: () => Any): Unit = - if !hasParseErrors then execProgram() - - private def preProcessArgs(info: Info, names: Names, args: Seq[String]): Option[Seq[String]] = - var hasError: Boolean = false - def error(msg: String): Unit = { - hasError = true - println(s"Error: $msg") - } - - val (positionalArgs, byNameArgsMap) = - val positionalArgs = List.newBuilder[String] - val byNameArgs = List.newBuilder[(String, String)] - val flagsAdded = mutable.Set.empty[String] - // TODO: once we settle on a spec, we should implement this in a more elegant way - var i = 0 - while i < args.length do - args(i) match - case name @ (longArgRegex() | shortArgRegex()) => - if names.isFlagName(name) then - val canonicalName = names.canonicalName(name).get - flagsAdded += canonicalName - byNameArgs += ((canonicalName, "true")) - else if i == args.length - 1 then // last argument -x ot --xyz - error(s"missing argument for ${name}") - else args(i + 1) match - case longArgRegex() | shortArgRegex() | `positionArgsMarker` => - error(s"missing argument for ${name}") - case value => - names.canonicalName(name) match - case Some(canonicalName) => - byNameArgs += ((canonicalName, value)) - case None => - error(s"unknown argument name: $name") - i += 1 // consume `value` - case name @ illFormedName() => - error(s"ill-formed argument name: $name") - case `positionArgsMarker` => - i += 1 // skip `--` - // all args after `--` are positional args - while i < args.length do - positionalArgs += args(i) - i += 1 - case value => - positionalArgs += value - i += 1 - end while - - // Add "false" for all flags not present in the arguments - for - param <- info.parameters - if param.isFlag - name = getNameWithMarker(param.name) - if !flagsAdded.contains(name) - do - byNameArgs += ((name, "false")) - - (positionalArgs.result(), byNameArgs.result().groupMap(_._1)(_._2)) - - // List of arguments in the order they should be passed to the main function - val orderedArgs: List[String] = - def rec(params: List[Parameter], acc: List[String], remainingArgs: List[String]): List[String] = - params match - case Nil => - for (remainingArg <- remainingArgs) error(s"unused argument: $remainingArg") - acc.reverse - case param :: tailParams => - if param.isVarargs then // also last arguments - byNameArgsMap.get(param.name) match - case Some(byNameVarargs) => acc.reverse ::: byNameVarargs.toList ::: remainingArgs - case None => acc.reverse ::: remainingArgs - else byNameArgsMap.get(getNameWithMarker(param.name)) match - case Some(argValues) => - assert(argValues.nonEmpty, s"${param.name} present in byNameArgsMap, but it has no argument value") - if argValues.length > 1 then - error(s"more than one value for ${param.name}: ${argValues.mkString(", ")}") - rec(tailParams, argValues.last :: acc, remainingArgs) - - case None => - remainingArgs match - case arg :: rest => - rec(tailParams, arg :: acc, rest) - case Nil => - if !param.hasDefault then - error(s"missing argument for ${param.name}") - rec(tailParams, "" :: acc, Nil) - rec(info.parameters.toList, Nil, positionalArgs) - - if hasError then None - else Some(orderedArgs) - end preProcessArgs - - private var hasParseErrors: Boolean = false - - private def parse[T](param: Parameter, arg: String)(using p: FromString[T]): () => T = - p.fromStringOption(arg) match - case Some(t) => - () => t - case None => - /** Issue an error, and return an uncallable getter */ - println(s"Error: could not parse argument for `${param.name}` of type ${param.typeName.split('.').last}: $arg") - hasParseErrors = true - () => throw new AssertionError("trying to get invalid argument") - - @experimental // MiMa does not check scope inherited @experimental - private object Help: - - /** The name of the special argument to display the method's help. - * If one of the method's parameters is called the same, will be ignored. - */ - private inline val helpArg = "--help" - - /** The short name of the special argument to display the method's help. - * If one of the method's parameters uses the same short name, will be ignored. - */ - private inline val shortHelpArg = "-h" - - private inline val maxUsageLineLength = 120 - - def printUsage(info: Info): Unit = - def argsUsage: Seq[String] = - for (param <- info.parameters) - yield { - val canonicalName = getNameWithMarker(param.name) - val namesPrint = (canonicalName +: param.aliasNames).mkString("[", " | ", "]") - val shortTypeName = param.typeName.split('.').last - if param.isVarargs then s"[<$shortTypeName> [<$shortTypeName> [...]]]" - else if param.hasDefault then s"[$namesPrint <$shortTypeName>]" - else if param.isFlag then s"$namesPrint" - else s"$namesPrint <$shortTypeName>" - } - - def wrapArgumentUsages(argsUsage: Seq[String], maxLength: Int): Seq[String] = { - def recurse(args: Seq[String], currentLine: String, acc: Vector[String]): Seq[String] = - (args, currentLine) match { - case (Nil, "") => acc - case (Nil, l) => (acc :+ l) - case (arg +: t, "") => recurse(t, arg, acc) - case (arg +: t, l) if l.length + 1 + arg.length <= maxLength => recurse(t, s"$l $arg", acc) - case (arg +: t, l) => recurse(t, arg, acc :+ l) - } - - recurse(argsUsage, "", Vector()).toList - } - - val printUsageBeginning = s"Usage: ${info.name} " - val argsOffset = printUsageBeginning.length - val printUsages = wrapArgumentUsages(argsUsage, maxUsageLineLength - argsOffset) - - println(printUsageBeginning + printUsages.mkString("\n" + " " * argsOffset)) - end printUsage - - def printExplain(info: Info): Unit = - def shiftLines(s: Seq[String], shift: Int): String = s.map(" " * shift + _).mkString("\n") - - def wrapLongLine(line: String, maxLength: Int): List[String] = { - def recurse(s: String, acc: Vector[String]): Seq[String] = - val lastSpace = s.trim.nn.lastIndexOf(' ', maxLength) - if ((s.length <= maxLength) || (lastSpace < 0)) - acc :+ s - else { - val (shortLine, rest) = s.splitAt(lastSpace) - recurse(rest.trim.nn, acc :+ shortLine) - } - - recurse(line, Vector()).toList - } - - println() - - if (info.documentation.nonEmpty) - println(wrapLongLine(info.documentation, maxUsageLineLength).mkString("\n")) - if (info.parameters.nonEmpty) { - val argNameShift = 2 - val argDocShift = argNameShift + 2 - - println("Arguments:") - for param <- info.parameters do - val canonicalName = getNameWithMarker(param.name) - val otherNames = param.aliasNames match { - case Seq() => "" - case names => names.mkString("(", ", ", ") ") - } - val argDoc = StringBuilder(" " * argNameShift) - argDoc.append(s"$canonicalName $otherNames- ${param.typeName.split('.').last}") - if param.isVarargs then argDoc.append(" (vararg)") - else if param.hasDefault then argDoc.append(" (optional)") - - if (param.documentation.nonEmpty) { - val shiftedDoc = - param.documentation.split("\n").nn - .map(line => shiftLines(wrapLongLine(line.nn, maxUsageLineLength - argDocShift), argDocShift)) - .mkString("\n") - argDoc.append("\n").append(shiftedDoc) - } - - println(argDoc) - } - end printExplain - - def shouldPrintDefaultHelp(names: Names, args: Seq[String]): Boolean = - val helpIsOverridden = names.canonicalName(helpArg).isDefined - val shortHelpIsOverridden = names.canonicalName(shortHelpArg).isDefined - (!helpIsOverridden && args.contains(helpArg)) || - (!shortHelpIsOverridden && args.contains(shortHelpArg)) - - end Help - - @experimental // MiMa does not check scope inherited @experimental - private class Names(info: Info): - - checkNames() - checkFlags() - - private lazy val namesToCanonicalName: Map[String, String] = - info.parameters.flatMap(param => - val canonicalName = getNameWithMarker(param.name) - (canonicalName -> canonicalName) +: param.aliasNames.map(_ -> canonicalName) - ).toMap - - private lazy val canonicalFlagsNames: Set[String] = - info.parameters.collect { - case param if param.isFlag => getNameWithMarker(param.name) - }.toSet - - def canonicalName(name: String): Option[String] = namesToCanonicalName.get(name) - - def isFlagName(name: String): Boolean = - namesToCanonicalName.get(name).map(canonicalFlagsNames.contains).contains(true) - - override def toString(): String = s"Names($namesToCanonicalName)" - - private def checkNames(): Unit = - def checkDuplicateNames() = - val nameAndCanonicalName = info.parameters.flatMap { paramInfo => - (getNameWithMarker(paramInfo.name) +: paramInfo.aliasNames).map(_ -> paramInfo.name) - } - val nameToNames = nameAndCanonicalName.groupMap(_._1)(_._2) - for (name, canonicalNames) <- nameToNames if canonicalNames.length > 1 do - throw IllegalArgumentException(s"$name is used for multiple parameters: ${canonicalNames.mkString(", ")}") - def checkValidNames() = - def isValidArgName(name: String): Boolean = - longArgRegex.matches(s"--$name") || shortArgRegex.matches(s"-$name") - for param <- info.parameters do - if !isValidArgName(param.name) then - throw IllegalArgumentException(s"The following argument name is invalid: ${param.name}") - for annot <- param.annotations do - annot match - case alias: alias if !isValidArgName(alias.name) => - throw IllegalArgumentException(s"The following alias is invalid: ${alias.name}") - case _ => - - checkValidNames() - checkDuplicateNames() - - private def checkFlags(): Unit = - for param <- info.parameters if param.isFlag && param.hasDefault do - throw IllegalArgumentException(s"@newMain flag parameters cannot have a default value. `${param.name}` has a default value.") - - end Names - -end newMain - -object newMain: - - /** Alias name for the parameter. - * - * If the name has one character, then it is a short name (e.g. `-i`). - * If the name has more than one characters, then it is a long name (e.g. `--input`). - */ - @experimental - final class alias(val name: String) extends MainAnnotation.ParameterAnnotation - -end newMain diff --git a/library/src/scala/annotation/retains.scala b/library/src/scala/annotation/retains.scala index 4fa14e635136..909adc13a1c2 100644 --- a/library/src/scala/annotation/retains.scala +++ b/library/src/scala/annotation/retains.scala @@ -12,12 +12,20 @@ package scala.annotation * non-standard capturing type syntax. */ @experimental -class retains(xs: Any*) extends annotation.StaticAnnotation +class retains(xs: (Any@retainsArg)*) extends annotation.StaticAnnotation -/** Equivalent in meaning to `@retains(cap)`, but consumes less bytecode. +/** Equivalent in meaning to `@retains(cap)`, but consumes less bytecode. */ @experimental class retainsCap() extends annotation.StaticAnnotation // This special case is needed to be able to load standard library modules without // cyclic reference errors. Specifically, load sequences involving IterableOnce. +/** Internal use, only for parameters of `retains` and `retainsByName`. + */ +@experimental +class retainsArg extends annotation.StaticAnnotation + // This annotation prevents argument references to retains and retainsByName from being + // augmented with explicit arguments. That's unsound in general, but necessary + // since a captureRef could have an impure context function type, A ?=> B, but + // we still need to have the unapplied captureRef in the annotation. diff --git a/library/src/scala/annotation/retainsByName.scala b/library/src/scala/annotation/retainsByName.scala index 421e0400c4e1..e6e3dafcb752 100644 --- a/library/src/scala/annotation/retainsByName.scala +++ b/library/src/scala/annotation/retainsByName.scala @@ -2,5 +2,5 @@ package scala.annotation /** An annotation that indicates capture of an enclosing by-name type */ -@experimental class retainsByName(xs: Any*) extends annotation.StaticAnnotation +@experimental class retainsByName(xs: (Any@retainsArg)*) extends annotation.StaticAnnotation diff --git a/library/src/scala/compiletime/package.scala b/library/src/scala/compiletime/package.scala index 3eca997554a0..a3896a1eeb06 100644 --- a/library/src/scala/compiletime/package.scala +++ b/library/src/scala/compiletime/package.scala @@ -1,7 +1,7 @@ package scala package compiletime -import annotation.compileTimeOnly +import annotation.{compileTimeOnly, experimental} /** Use this method when you have a type, do not have a value for it but want to * pattern match on it. For example, given a type `Tup <: Tuple`, one can @@ -42,6 +42,20 @@ def erasedValue[T]: T = erasedValue[T] @compileTimeOnly("`uninitialized` can only be used as the right hand side of a mutable field definition") def uninitialized: Nothing = ??? +/** Used as the right hand side of a given in a trait, like this + * + * ``` + * given T = deferred + * ``` + * + * This signifies that the given will get a synthesized definition in all classes + * that implement the enclosing trait and that do not contain an explicit overriding + * definition of that given. + */ +@experimental +@compileTimeOnly("`deferred` can only be used as the right hand side of a given definition in a trait") +def deferred: Nothing = ??? + /** The error method is used to produce user-defined compile errors during inline expansion. * If an inline expansion results in a call error(msgStr) the compiler produces an error message containing the given msgStr. * diff --git a/library/src/scala/quoted/Expr.scala b/library/src/scala/quoted/Expr.scala index 525f647eaaac..f1045e5bdaca 100644 --- a/library/src/scala/quoted/Expr.scala +++ b/library/src/scala/quoted/Expr.scala @@ -10,12 +10,45 @@ abstract class Expr[+T] private[scala] () object Expr { /** `e.betaReduce` returns an expression that is functionally equivalent to `e`, - * however if `e` is of the form `((y1, ..., yn) => e2)(e1, ..., en)` - * then it optimizes this the top most call by returning the result of beta-reducing the application. - * Otherwise returns `expr`. + * however if `e` is of the form `((y1, ..., yn) => e2)(e1, ..., en)` + * then it optimizes the top most call by returning the result of beta-reducing the application. + * Similarly, all outermost curried function applications will be beta-reduced, if possible. + * Otherwise returns `expr`. * - * To retain semantics the argument `ei` is bound as `val yi = ei` and by-name arguments to `def yi = ei`. - * Some bindings may be elided as an early optimization. + * To retain semantics the argument `ei` is bound as `val yi = ei` and by-name arguments to `def yi = ei`. + * Some bindings may be elided as an early optimization. + * + * Example: + * ```scala sc:nocompile + * ((a: Int, b: Int) => a + b).apply(x, y) + * ``` + * will be reduced to + * ```scala sc:nocompile + * val a = x + * val b = y + * a + b + * ``` + * + * Generally: + * ```scala sc:nocompile + * ([X1, Y1, ...] => (x1, y1, ...) => ... => [Xn, Yn, ...] => (xn, yn, ...) => f[X1, Y1, ..., Xn, Yn, ...](x1, y1, ..., xn, yn, ...))).apply[Tx1, Ty1, ...](myX1, myY1, ...)....apply[Txn, Tyn, ...](myXn, myYn, ...) + * ``` + * will be reduced to + * ```scala sc:nocompile + * type X1 = Tx1 + * type Y1 = Ty1 + * ... + * val x1 = myX1 + * val y1 = myY1 + * ... + * type Xn = Txn + * type Yn = Tyn + * ... + * val xn = myXn + * val yn = myYn + * ... + * f[X1, Y1, ..., Xn, Yn, ...](x1, y1, ..., xn, yn, ...) + * ``` */ def betaReduce[T](expr: Expr[T])(using Quotes): Expr[T] = import quotes.reflect.* diff --git a/library/src/scala/quoted/Quotes.scala b/library/src/scala/quoted/Quotes.scala index fa96b73551d1..d048d8d728d5 100644 --- a/library/src/scala/quoted/Quotes.scala +++ b/library/src/scala/quoted/Quotes.scala @@ -211,6 +211,11 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => * +- MatchCase * +- TypeBounds * +- NoPrefix + * +- FlexibleType + * + * +- MethodTypeKind -+- Contextual + * +- Implicit + * +- Plain * * +- Selector -+- SimpleSelector * +- RenameSelector @@ -589,8 +594,8 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => trait DefDefModule { this: DefDef.type => /** Create a method definition `def f[..](...)` with the signature defined in the symbol. * - * The `rhsFn` is a function that receives references to its parameters and should return - * `Some` containing the implementation of the method. Returns `None` the method has no implementation. + * The `rhsFn` is a function that receives references to its parameters, and should return + * `Some` containing the implementation of the method, or `None` if the method has no implementation. * Any definition directly inside the implementation should have `symbol` as owner. * * Use `Symbol.asQuotes` to create the rhs using quoted code. @@ -666,8 +671,8 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => trait ValDefModule { this: ValDef.type => /** Create a value definition `val x`, `var x` or `lazy val x` with the signature defined in the symbol. * - * The `rhs` should return be `Some` containing the implementation of the method. - * Returns `None` the method has no implementation. + * The `rhs` should return `Some` containing the implementation of the method, + * or `None` if the method has no implementation. * Any definition directly inside the implementation should have `symbol` as owner. * * Use `Symbol.asQuotes` to create the rhs using quoted code. @@ -774,14 +779,47 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => /** Methods of the module object `val Term` */ trait TermModule { this: Term.type => - /** Returns a term that is functionally equivalent to `t`, + /** Returns a term that is functionally equivalent to `t`, * however if `t` is of the form `((y1, ..., yn) => e2)(e1, ..., en)` - * then it optimizes this the top most call by returning the `Some` - * with the result of beta-reducing the application. + * then it optimizes the top most call by returning `Some` + * with the result of beta-reducing the function application. + * Similarly, all outermost curried function applications will be beta-reduced, if possible. * Otherwise returns `None`. * - * To retain semantics the argument `ei` is bound as `val yi = ei` and by-name arguments to `def yi = ei`. - * Some bindings may be elided as an early optimization. + * To retain semantics the argument `ei` is bound as `val yi = ei` and by-name arguments to `def yi = ei`. + * Some bindings may be elided as an early optimization. + * + * Example: + * ```scala sc:nocompile + * ((a: Int, b: Int) => a + b).apply(x, y) + * ``` + * will be reduced to + * ```scala sc:nocompile + * val a = x + * val b = y + * a + b + * ``` + * + * Generally: + * ```scala sc:nocompile + * ([X1, Y1, ...] => (x1, y1, ...) => ... => [Xn, Yn, ...] => (xn, yn, ...) => f[X1, Y1, ..., Xn, Yn, ...](x1, y1, ..., xn, yn, ...))).apply[Tx1, Ty1, ...](myX1, myY1, ...)....apply[Txn, Tyn, ...](myXn, myYn, ...) + * ``` + * will be reduced to + * ```scala sc:nocompile + * type X1 = Tx1 + * type Y1 = Ty1 + * ... + * val x1 = myX1 + * val y1 = myY1 + * ... + * type Xn = Txn + * type Yn = Tyn + * ... + * val xn = myXn + * val yn = myYn + * ... + * f[X1, Y1, ..., Xn, Yn, ...](x1, y1, ..., xn, yn, ...) + * ``` */ def betaReduce(term: Term): Option[Term] @@ -3201,6 +3239,15 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => /** `TypeTest` that allows testing at runtime in a pattern match if a `TypeRepr` is a `MethodOrPoly` */ given MethodOrPolyTypeTest: TypeTest[TypeRepr, MethodOrPoly] + /** Type which decides on the kind of parameter list represented by `MethodType`. */ + enum MethodTypeKind: + /** Represents a parameter list without any implicitness of parameters, like (x1: X1, x2: X2, ...) */ + case Plain + /** Represents a parameter list with implicit parameters, like `(implicit X1, ..., Xn)`, `(using X1, ..., Xn)`, `(using x1: X1, ..., xn: Xn)` */ + case Implicit + /** Represents a parameter list of a contextual method, like `(using X1, ..., Xn)` or `(using x1: X1, ..., xn: Xn)` */ + case Contextual + /** Type of the definition of a method taking a single list of parameters. It's return type may be a MethodType. */ type MethodType <: MethodOrPoly @@ -3213,6 +3260,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => /** Methods of the module object `val MethodType` */ trait MethodTypeModule { this: MethodType.type => def apply(paramNames: List[String])(paramInfosExp: MethodType => List[TypeRepr], resultTypeExp: MethodType => TypeRepr): MethodType + def apply(kind: MethodTypeKind)(paramNames: List[String])(paramInfosExp: MethodType => List[TypeRepr], resultTypeExp: MethodType => TypeRepr): MethodType def unapply(x: MethodType): (List[String], List[TypeRepr], TypeRepr) } @@ -3222,8 +3270,12 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => /** Extension methods of `MethodType` */ trait MethodTypeMethods: extension (self: MethodType) - /** Is this the type of using parameter clause `(implicit X1, ..., Xn)`, `(using X1, ..., Xn)` or `(using x1: X1, ..., xn: Xn)` */ + /** Is this the type of parameter clause like `(implicit X1, ..., Xn)`, `(using X1, ..., Xn)` or `(using x1: X1, ..., xn: Xn)` */ def isImplicit: Boolean + /** Is this the type of parameter clause like `(using X1, ..., Xn)` or `(using x1: X1, x2: X2, ... )` */ + def isContextual: Boolean + /** Returns a MethodTypeKind object representing the implicitness of the MethodType parameter clause. */ + def methodTypeKind: MethodTypeKind /** Is this the type of erased parameter clause `(erased x1: X1, ..., xn: Xn)` */ @deprecated("Use `hasErasedParams` and `erasedParams`", "3.4") def isErased: Boolean @@ -3377,6 +3429,35 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => def unapply(x: NoPrefix): true } + // ----- Flexible Type -------------------------------------------- + + /** Flexible types for explicit nulls */ + type FlexibleType <: TypeRepr + + /** `TypeTest` that allows testing at runtime in a pattern match if a `TypeRepr` is a `FlexibleType` */ + given FlexibleTypeTypeTest: TypeTest[TypeRepr, FlexibleType] + + /** Module object of `type FlexibleType` */ + val FlexibleType: FlexibleTypeModule + + /** Methods of the module object `val FlexibleType` */ + trait FlexibleTypeModule { this: FlexibleType.type => + def apply(tp: TypeRepr): FlexibleType + def unapply(x: FlexibleType): Option[TypeRepr] + } + + /** Makes extension methods on `FlexibleType` available without any imports */ + given FlexibleTypeMethods: FlexibleTypeMethods + + /** Extension methods of `FlexibleType` */ + trait FlexibleTypeMethods: + extension (self: FlexibleType) + def underlying: TypeRepr + def lo: TypeRepr + def hi: TypeRepr + end extension + end FlexibleTypeMethods + /////////////// // CONSTANTS // /////////////// @@ -3992,12 +4073,15 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => /** Is this symbol an anonymous function? */ def isAnonymousFunction: Boolean - /** Is this symbol an abstract type? */ + /** Is this symbol an abstract type or a type parameter? */ def isAbstractType: Boolean /** Is this the constructor of a class? */ def isClassConstructor: Boolean + /** Is this the super accessor? */ + def isSuperAccessor: Boolean + /** Is this the definition of a type? */ def isType: Boolean diff --git a/library/src/scala/quoted/ToExpr.scala b/library/src/scala/quoted/ToExpr.scala index 042c8ff37a52..6c167c353d87 100644 --- a/library/src/scala/quoted/ToExpr.scala +++ b/library/src/scala/quoted/ToExpr.scala @@ -97,7 +97,7 @@ object ToExpr { /** Default implementation of `ToExpr[Array[T]]` */ given ArrayToExpr[T: Type: ToExpr: ClassTag]: ToExpr[Array[T]] with { def apply(arr: Array[T])(using Quotes): Expr[Array[T]] = - '{ Array[T](${Expr(arr.toSeq)}*)(${Expr(summon[ClassTag[T]])}) } + '{ Array[T](${Expr(arr.toSeq)}*)(using ${Expr(summon[ClassTag[T]])}) } } /** Default implementation of `ToExpr[Array[Boolean]]` */ diff --git a/library/src/scala/runtime/LazyVals.scala b/library/src/scala/runtime/LazyVals.scala index ea369539d021..e38e016f5182 100644 --- a/library/src/scala/runtime/LazyVals.scala +++ b/library/src/scala/runtime/LazyVals.scala @@ -9,7 +9,7 @@ import scala.annotation.* */ object LazyVals { @nowarn - private[this] val unsafe: sun.misc.Unsafe = { + private val unsafe: sun.misc.Unsafe = { def throwInitializationException() = throw new ExceptionInInitializerError( new IllegalStateException("Can't find instance of sun.misc.Unsafe") diff --git a/library/src/scala/runtime/Tuples.scala b/library/src/scala/runtime/Tuples.scala index 41425e8559ba..efb54c54d50b 100644 --- a/library/src/scala/runtime/Tuples.scala +++ b/library/src/scala/runtime/Tuples.scala @@ -1,7 +1,5 @@ package scala.runtime -import scala.annotation.experimental - object Tuples { inline val MaxSpecialized = 22 @@ -505,7 +503,6 @@ object Tuples { } } - @experimental def reverse(self: Tuple): Tuple = (self: Any) match { case xxl: TupleXXL => xxlReverse(xxl) case _ => specialCaseReverse(self) diff --git a/library/src/scala/runtime/stdLibPatches/Predef.scala b/library/src/scala/runtime/stdLibPatches/Predef.scala index 09feaf11c31d..77b014b80466 100644 --- a/library/src/scala/runtime/stdLibPatches/Predef.scala +++ b/library/src/scala/runtime/stdLibPatches/Predef.scala @@ -1,5 +1,7 @@ package scala.runtime.stdLibPatches +import scala.annotation.experimental + object Predef: import compiletime.summonFrom @@ -61,4 +63,21 @@ object Predef: inline def ne(inline y: AnyRef | Null): Boolean = !(x eq y) + extension (opt: Option.type) + @experimental + inline def fromNullable[T](t: T | Null): Option[T] = Option(t).asInstanceOf[Option[T]] + + /** A type supporting Self-based type classes. + * + * A is TC + * + * expands to + * + * TC { type Self = A } + * + * which is what is needed for a context bound `[A: TC]`. + */ + @experimental + infix type is[A <: AnyKind, B <: Any{type Self <: AnyKind}] = B { type Self = A } + end Predef diff --git a/library/src/scala/runtime/stdLibPatches/language.scala b/library/src/scala/runtime/stdLibPatches/language.scala index 70d5f2d41907..b9f9d47bb0b1 100644 --- a/library/src/scala/runtime/stdLibPatches/language.scala +++ b/library/src/scala/runtime/stdLibPatches/language.scala @@ -84,13 +84,32 @@ object language: object captureChecking /** Experimental support for automatic conversions of arguments, without requiring - * a langauge import `import scala.language.implicitConversions`. + * a language import `import scala.language.implicitConversions`. * * @see [[https://dotty.epfl.ch/docs/reference/experimental/into-modifier]] */ @compileTimeOnly("`into` can only be used at compile time in import statements") object into + /** Experimental support for named tuples. + * + * @see [[https://dotty.epfl.ch/docs/reference/experimental/into-modifier]] + */ + @compileTimeOnly("`namedTuples` can only be used at compile time in import statements") + object namedTuples + + /** Experimental support for new features for better modularity, including + * - better tracking of dependencies through classes + * - better usability of context bounds + * - better syntax and conventions for type classes + * - ability to merge exported types in intersections + * + * @see [[https://dotty.epfl.ch/docs/reference/experimental/modularity]] + * @see [[https://dotty.epfl.ch/docs/reference/experimental/typeclasses]] + */ + @compileTimeOnly("`modularity` can only be used at compile time in import statements") + object modularity + /** Was needed to add support for relaxed imports of extension methods. * The language import is no longer needed as this is now a standard feature since SIP was accepted. * @see [[http://dotty.epfl.ch/docs/reference/contextual/extension-methods]] @@ -98,6 +117,13 @@ object language: @compileTimeOnly("`relaxedExtensionImports` can only be used at compile time in import statements") @deprecated("The experimental.relaxedExtensionImports language import is no longer needed since the feature is now standard", since = "3.4") object relaxedExtensionImports + + /** Enhance match type extractors to follow aliases and singletons. + * + * @see [[https://github.com/scala/improvement-proposals/pull/84]] + */ + @compileTimeOnly("`betterMatchTypeExtractors` can only be used at compile time in import statements") + object betterMatchTypeExtractors end experimental /** The deprecated object contains features that are no longer officially suypported in Scala. @@ -260,6 +286,35 @@ object language: @compileTimeOnly("`3.5` can only be used at compile time in import statements") object `3.5` + /** Set source version to 3.6-migration. + * + * @see [[https://docs.scala-lang.org/scala3/guides/migration/compatibility-intro.html]] + */ + @compileTimeOnly("`3.6-migration` can only be used at compile time in import statements") + object `3.6-migration` + + /** Set source version to 3.6 + * + * @see [[https://docs.scala-lang.org/scala3/guides/migration/compatibility-intro.html]] + */ + @compileTimeOnly("`3.6` can only be used at compile time in import statements") + object `3.6` + + /** Set source version to 3.7-migration. + * + * @see [[https://docs.scala-lang.org/scala3/guides/migration/compatibility-intro.html]] + */ + @compileTimeOnly("`3.7-migration` can only be used at compile time in import statements") + object `3.7-migration` + + /** Set source version to 3.7 + * + * @see [[https://docs.scala-lang.org/scala3/guides/migration/compatibility-intro.html]] + */ + @compileTimeOnly("`3.7` can only be used at compile time in import statements") + object `3.7` + + // !!! Keep in sync with dotty.tools.dotc.config.SourceVersion !!! // Also add tests in `tests/pos/source-import-3-x.scala` and `tests/pos/source-import-3-x-migration.scala` diff --git a/presentation-compiler/src/main/dotty/tools/pc/AutoImports.scala b/presentation-compiler/src/main/dotty/tools/pc/AutoImports.scala index e86732c3453d..bf814ef682e0 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/AutoImports.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/AutoImports.scala @@ -13,7 +13,7 @@ import dotty.tools.dotc.core.Names.* import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.util.SourcePosition import dotty.tools.dotc.util.Spans -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j as l diff --git a/presentation-compiler/src/main/dotty/tools/pc/AutoImportsProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/AutoImportsProvider.scala index 8546bbf62384..ded7845ffa4e 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/AutoImportsProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/AutoImportsProvider.scala @@ -15,7 +15,7 @@ import dotty.tools.dotc.interactive.InteractiveDriver import dotty.tools.dotc.util.SourceFile import dotty.tools.pc.AutoImports.* import dotty.tools.pc.completions.CompletionPos -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j as l diff --git a/presentation-compiler/src/main/dotty/tools/pc/CompletionItemResolver.scala b/presentation-compiler/src/main/dotty/tools/pc/CompletionItemResolver.scala index c962617ac7fb..291ffe1fec30 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/CompletionItemResolver.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/CompletionItemResolver.scala @@ -9,7 +9,7 @@ import dotty.tools.dotc.core.Contexts.Context import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.core.Types.TermRef -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j.CompletionItem @@ -62,7 +62,7 @@ object CompletionItemResolver extends ItemResolver: if companion == NoSymbol || gsym.is(JavaDefined) then if gsymDoc.isEmpty() then if gsym.isAliasType then - fullDocstring(gsym.info.metalsDealias.typeSymbol, search) + fullDocstring(gsym.info.deepDealias.typeSymbol, search) else if gsym.is(Method) then gsym.info.finalResultType match case tr @ TermRef(_, sym) => diff --git a/presentation-compiler/src/main/dotty/tools/pc/ConvertToNamedArgumentsProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/ConvertToNamedArgumentsProvider.scala index 00bfe17cb21b..d4f945760cc0 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/ConvertToNamedArgumentsProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/ConvertToNamedArgumentsProvider.scala @@ -12,7 +12,7 @@ import dotty.tools.dotc.core.Types.MethodType import dotty.tools.dotc.interactive.Interactive import dotty.tools.dotc.interactive.InteractiveDriver import dotty.tools.dotc.util.SourceFile -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j as l diff --git a/presentation-compiler/src/main/dotty/tools/pc/ExtractMethodProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/ExtractMethodProvider.scala index 55c4e4d9e4b6..4416d0c0d000 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/ExtractMethodProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/ExtractMethodProvider.scala @@ -23,7 +23,7 @@ import dotty.tools.dotc.util.SourceFile import dotty.tools.dotc.util.SourcePosition import dotty.tools.pc.printer.ShortenedTypePrinter import dotty.tools.pc.printer.ShortenedTypePrinter.IncludeDefaultParam -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j.TextEdit import org.eclipse.lsp4j as l diff --git a/presentation-compiler/src/main/dotty/tools/pc/HoverProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/HoverProvider.scala index 6f39b4871a06..fd363dbd37a2 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/HoverProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/HoverProvider.scala @@ -5,6 +5,7 @@ import java.util as ju import scala.meta.internal.metals.Report import scala.meta.internal.metals.ReportContext import scala.meta.internal.pc.ScalaHover +import scala.meta.pc.ContentType import scala.meta.pc.HoverSignature import scala.meta.pc.OffsetParams import scala.meta.pc.SymbolSearch @@ -23,14 +24,15 @@ import dotty.tools.dotc.util.SourceFile import dotty.tools.dotc.util.SourcePosition import dotty.tools.pc.printer.ShortenedTypePrinter import dotty.tools.pc.printer.ShortenedTypePrinter.IncludeDefaultParam -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* object HoverProvider: def hover( params: OffsetParams, driver: InteractiveDriver, - search: SymbolSearch + search: SymbolSearch, + contentType: ContentType )(implicit reportContext: ReportContext): ju.Optional[HoverSignature] = val uri = params.uri().nn val text = params.text().nn @@ -101,12 +103,12 @@ object HoverProvider: skipCheckOnName ) match case Nil => - fallbackToDynamics(path, printer) + fallbackToDynamics(path, printer, contentType) case (symbol, tpe) :: _ if symbol.name == nme.selectDynamic || symbol.name == nme.applyDynamic => - fallbackToDynamics(path, printer) + fallbackToDynamics(path, printer, contentType) case symbolTpes @ ((symbol, tpe) :: _) => - val exprTpw = tpe.widenTermRefExpr.metalsDealias + val exprTpw = tpe.widenTermRefExpr.deepDealias val hoverString = tpw match // https://github.com/scala/scala3/issues/8891 @@ -121,12 +123,12 @@ object HoverProvider: if tpe != NoType then tpe else tpw - printer.hoverSymbol(sym, finalTpe) + printer.hoverSymbol(sym, finalTpe.deepDealias) end match end hoverString val docString = symbolTpes - .flatMap(symTpe => search.symbolDocumentation(symTpe._1)) + .flatMap(symTpe => search.symbolDocumentation(symTpe._1, contentType)) .map(_.docstring()) .mkString("\n") printer.expressionType(exprTpw) match @@ -144,7 +146,8 @@ object HoverProvider: symbolSignature = Some(hoverString), docstring = Some(docString), forceExpressionType = forceExpressionType, - contextInfo = printer.getUsedRenamesInfo + contextInfo = printer.getUsedRenamesInfo, + contentType = contentType ) ).nn case _ => @@ -159,7 +162,8 @@ object HoverProvider: private def fallbackToDynamics( path: List[Tree], - printer: ShortenedTypePrinter + printer: ShortenedTypePrinter, + contentType: ContentType )(using Context): ju.Optional[HoverSignature] = path match case SelectDynamicExtractor(sel, n, name) => def findRefinement(tp: Type): Option[HoverSignature] = @@ -178,16 +182,17 @@ object HoverProvider: new ScalaHover( expressionType = Some(tpeString), symbolSignature = Some(s"$valOrDef $name$tpeString"), - contextInfo = printer.getUsedRenamesInfo + contextInfo = printer.getUsedRenamesInfo, + contentType = contentType ) ) case RefinedType(parent, _, _) => findRefinement(parent) case _ => None - val refTpe = sel.typeOpt.widen.metalsDealias match + val refTpe = sel.typeOpt.widen.deepDealias match case r: RefinedType => Some(r) - case t: (TermRef | TypeProxy) => Some(t.termSymbol.info.metalsDealias) + case t: (TermRef | TypeProxy) => Some(t.termSymbol.info.deepDealias) case _ => None refTpe.flatMap(findRefinement).asJava diff --git a/presentation-compiler/src/main/dotty/tools/pc/IndexedContext.scala b/presentation-compiler/src/main/dotty/tools/pc/IndexedContext.scala index ad6fe9420a81..6b74e3aa2ec1 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/IndexedContext.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/IndexedContext.scala @@ -13,7 +13,7 @@ import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.interactive.Interactive import dotty.tools.dotc.typer.ImportInfo import dotty.tools.pc.IndexedContext.Result -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* sealed trait IndexedContext: given ctx: Context @@ -75,7 +75,7 @@ sealed trait IndexedContext: ) private def isTypeAliasOf(alias: Symbol, queriedSym: Symbol): Boolean = - alias.isAliasType && alias.info.metalsDealias.typeSymbol == queriedSym + alias.isAliasType && alias.info.deepDealias.typeSymbol == queriedSym final def isEmpty: Boolean = this match case IndexedContext.Empty => true diff --git a/presentation-compiler/src/main/dotty/tools/pc/InferredTypeProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/InferredTypeProvider.scala index b37b1b6dff6c..d8cdbcd8fe69 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/InferredTypeProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/InferredTypeProvider.scala @@ -24,7 +24,7 @@ import dotty.tools.dotc.util.Spans import dotty.tools.dotc.util.Spans.Span import dotty.tools.pc.printer.ShortenedTypePrinter import dotty.tools.pc.printer.ShortenedTypePrinter.IncludeDefaultParam -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j.TextEdit import org.eclipse.lsp4j as l @@ -101,7 +101,7 @@ final class InferredTypeProvider( case _ => true if isInScope(tpe) then tpe - else tpe.metalsDealias + else tpe.deepDealias val printer = ShortenedTypePrinter( symbolSearch, diff --git a/presentation-compiler/src/main/dotty/tools/pc/MetalsInteractive.scala b/presentation-compiler/src/main/dotty/tools/pc/MetalsInteractive.scala index 381e0eaec6a5..9a541ef69942 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/MetalsInteractive.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/MetalsInteractive.scala @@ -5,7 +5,7 @@ import scala.annotation.tailrec import dotc.* import ast.*, tpd.* -import core.*, Contexts.*, Decorators.*, Flags.*, Names.*, Symbols.*, Types.* +import core.*, Contexts.*, Flags.*, Names.*, Symbols.*, Types.* import interactive.* import util.* import util.SourcePosition @@ -240,7 +240,7 @@ object MetalsInteractive: end match end enclosingSymbolsWithExpressionType - import dotty.tools.pc.utils.MtagsEnrichments.* + import dotty.tools.pc.utils.InteractiveEnrichments.* private def recoverError( tree: Tree, diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala b/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala index 60def237badb..c447123c8725 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcCollector.scala @@ -26,7 +26,7 @@ import dotty.tools.dotc.interactive.InteractiveDriver import dotty.tools.dotc.util.SourceFile import dotty.tools.dotc.util.SourcePosition import dotty.tools.dotc.util.Spans.Span -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* abstract class PcCollector[T]( driver: InteractiveDriver, diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala index 0de81ec39711..fc97dd1f1176 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala @@ -13,13 +13,13 @@ import dotty.tools.dotc.ast.NavigateAST import dotty.tools.dotc.ast.tpd.* import dotty.tools.dotc.ast.untpd import dotty.tools.dotc.core.Contexts.Context -import dotty.tools.dotc.core.Flags.ModuleClass +import dotty.tools.dotc.core.Flags.{Exported, ModuleClass} import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.interactive.Interactive import dotty.tools.dotc.interactive.InteractiveDriver import dotty.tools.dotc.util.SourceFile import dotty.tools.dotc.util.SourcePosition -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j.Location @@ -123,9 +123,12 @@ class PcDefinitionProvider( case symbols @ (sym :: other) => val isLocal = sym.source == pos.source if isLocal then - val defs = - Interactive.findDefinitions(List(sym), driver, false, false).filter(_.source == sym.source) - defs.headOption match + val (exportedDefs, otherDefs) = + Interactive.findDefinitions(List(sym), driver, false, false) + .filter(_.source == sym.source) + .partition(_.tree.symbol.is(Exported)) + + otherDefs.headOption.orElse(exportedDefs.headOption) match case Some(srcTree) => val pos = srcTree.namePos pos.toLocation match diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcDocumentHighlightProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcDocumentHighlightProvider.scala index cd4f4919b1ef..d9b94ebb82a3 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcDocumentHighlightProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcDocumentHighlightProvider.scala @@ -6,7 +6,7 @@ import dotty.tools.dotc.ast.tpd.* import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.interactive.InteractiveDriver import dotty.tools.dotc.util.SourcePosition -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j.DocumentHighlight import org.eclipse.lsp4j.DocumentHighlightKind diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcInlayHintsProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcInlayHintsProvider.scala index 9b40f1e6777a..c4fdb97c0418 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcInlayHintsProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcInlayHintsProvider.scala @@ -4,7 +4,7 @@ package dotty.tools.pc import java.nio.file.Paths import scala.meta.internal.metals.ReportContext -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import dotty.tools.pc.printer.ShortenedTypePrinter import scala.meta.internal.pc.InlayHints import scala.meta.internal.pc.LabelPart @@ -24,7 +24,6 @@ import dotty.tools.dotc.interactive.InteractiveDriver import dotty.tools.dotc.util.SourceFile import dotty.tools.dotc.util.SourcePosition import dotty.tools.dotc.util.Spans.Span -import dotty.tools.pc.IndexedContext import org.eclipse.lsp4j.InlayHint import org.eclipse.lsp4j.InlayHintKind @@ -43,6 +42,7 @@ class PcInlayHintsProvider( val source = SourceFile.virtual(filePath.toString, sourceText) driver.run(uri, source) + given InlayHintsParams = params given InferredType.Text = InferredType.Text(text) given ctx: Context = driver.currentCtx @@ -66,7 +66,7 @@ class PcInlayHintsProvider( tree: Tree, ): InlayHints = tree match - case ImplicitConversion(symbol, range) if params.implicitConversions() => + case ImplicitConversion(symbol, range) => val adjusted = adjustPos(range) inlayHints .add( @@ -79,8 +79,7 @@ class PcInlayHintsProvider( LabelPart(")") :: Nil, InlayHintKind.Parameter, ) - case ImplicitParameters(symbols, pos, allImplicit) - if params.implicitParameters() => + case ImplicitParameters(symbols, pos, allImplicit) => val labelParts = symbols.map(s => List(labelPart(s, s.decodedName))) val label = if allImplicit then labelParts.separated("(using ", ", ", ")") @@ -90,14 +89,14 @@ class PcInlayHintsProvider( label, InlayHintKind.Parameter, ) - case ValueOf(label, pos) if params.implicitParameters() => + case ValueOf(label, pos) => inlayHints.add( adjustPos(pos).toLsp, LabelPart("(") :: LabelPart(label) :: List(LabelPart(")")), InlayHintKind.Parameter, ) case TypeParameters(tpes, pos, sel) - if params.typeParameters() && !syntheticTupleApply(sel) => + if !syntheticTupleApply(sel) => val label = tpes.map(toLabelParts(_, pos)).separated("[", ", ", "]") inlayHints.add( adjustPos(pos).endPos.toLsp, @@ -105,7 +104,7 @@ class PcInlayHintsProvider( InlayHintKind.Type, ) case InferredType(tpe, pos, defTree) - if params.inferredTypes() && !isErrorTpe(tpe) => + if !isErrorTpe(tpe) => val adjustedPos = adjustPos(pos).endPos if inlayHints.containsDef(adjustedPos.start) then inlayHints else @@ -140,7 +139,7 @@ class PcInlayHintsProvider( isInScope(tycon) && args.forall(isInScope) case _ => true if isInScope(tpe) then tpe - else tpe.metalsDealias(using indexedCtx.ctx) + else tpe.deepDealias(using indexedCtx.ctx) val dealiased = optDealias(tpe) val tpeStr = printer.tpe(dealiased) @@ -192,14 +191,16 @@ class PcInlayHintsProvider( end PcInlayHintsProvider object ImplicitConversion: - def unapply(tree: Tree)(using Context) = - tree match - case Apply(fun: Ident, args) if isSynthetic(fun) => - implicitConversion(fun, args) - case Apply(Select(fun, name), args) - if name == nme.apply && isSynthetic(fun) => - implicitConversion(fun, args) - case _ => None + def unapply(tree: Tree)(using params: InlayHintsParams, ctx: Context) = + if (params.implicitConversions()) { + tree match + case Apply(fun: Ident, args) if isSynthetic(fun) => + implicitConversion(fun, args) + case Apply(Select(fun, name), args) + if name == nme.apply && isSynthetic(fun) => + implicitConversion(fun, args) + case _ => None + } else None private def isSynthetic(tree: Tree)(using Context) = tree.span.isSynthetic && tree.symbol.isOneOf(Flags.GivenOrImplicit) @@ -213,52 +214,64 @@ object ImplicitConversion: end ImplicitConversion object ImplicitParameters: - def unapply(tree: Tree)(using Context) = - tree match - case Apply(fun, args) - if args.exists(isSyntheticArg) && !tree.sourcePos.span.isZeroExtent => - val (implicitArgs, providedArgs) = args.partition(isSyntheticArg) - val allImplicit = providedArgs.isEmpty || providedArgs.forall { - case Ident(name) => name == nme.MISSING - case _ => false - } - val pos = implicitArgs.head.sourcePos - Some(implicitArgs.map(_.symbol), pos, allImplicit) - case _ => None + def unapply(tree: Tree)(using params: InlayHintsParams, ctx: Context) = + if (params.implicitParameters()) { + tree match + case Apply(fun, args) + if args.exists(isSyntheticArg) && !tree.sourcePos.span.isZeroExtent => + val (implicitArgs, providedArgs) = args.partition(isSyntheticArg) + val allImplicit = providedArgs.isEmpty || providedArgs.forall { + case Ident(name) => name == nme.MISSING + case _ => false + } + val pos = implicitArgs.head.sourcePos + Some(implicitArgs.map(_.symbol), pos, allImplicit) + case _ => None + } else None private def isSyntheticArg(tree: Tree)(using Context) = tree match case tree: Ident => - tree.span.isSynthetic && tree.symbol.isOneOf(Flags.GivenOrImplicit) + tree.span.isSynthetic && tree.symbol.isOneOf(Flags.GivenOrImplicit) && + !isQuotes(tree) case _ => false + + // Decorations for Quotes are rarely useful + private def isQuotes(tree: Tree)(using Context) = + tree.tpe.typeSymbol == defn.QuotesClass + end ImplicitParameters object ValueOf: - def unapply(tree: Tree)(using Context) = - tree match - case Apply(ta @ TypeApply(fun, _), _) - if fun.span.isSynthetic && isValueOf(fun) => - Some( - "new " + tpnme.valueOf.decoded.capitalize + "(...)", - fun.sourcePos, - ) - case _ => None + def unapply(tree: Tree)(using params: InlayHintsParams, ctx: Context) = + if (params.implicitParameters()) { + tree match + case Apply(ta @ TypeApply(fun, _), _) + if fun.span.isSynthetic && isValueOf(fun) => + Some( + "new " + tpnme.valueOf.decoded.capitalize + "(...)", + fun.sourcePos, + ) + case _ => None + } else None private def isValueOf(tree: Tree)(using Context) = val symbol = tree.symbol.maybeOwner symbol.name.decoded == tpnme.valueOf.decoded.capitalize end ValueOf object TypeParameters: - def unapply(tree: Tree)(using Context) = - tree match - case TypeApply(sel: Select, _) if sel.isForComprehensionMethod => None - case TypeApply(fun, args) if inferredTypeArgs(args) => - val pos = fun match - case sel: Select if sel.isInfix => - sel.sourcePos.withEnd(sel.nameSpan.end) - case _ => fun.sourcePos - val tpes = args.map(_.typeOpt.stripTypeVar.widen.finalResultType) - Some((tpes, pos.endPos, fun)) - case _ => None + def unapply(tree: Tree)(using params: InlayHintsParams, ctx: Context) = + if (params.typeParameters()) { + tree match + case TypeApply(sel: Select, _) + if sel.isForComprehensionMethod || sel.isInfix || + sel.symbol.name == nme.unapply => + None + case TypeApply(fun, args) if inferredTypeArgs(args) => + val tpes = args.map(_.tpe.stripTypeVar.widen.finalResultType) + Some((tpes, fun.sourcePos.endPos, fun)) + case _ => None + } else None + private def inferredTypeArgs(args: List[Tree]): Boolean = args.forall { case tt: TypeTree if tt.span.exists && !tt.span.isZeroExtent => true @@ -271,29 +284,35 @@ object InferredType: object Text: def apply(text: Array[Char]): Text = text - def unapply(tree: Tree)(using text: Text, cxt: Context) = - tree match - case vd @ ValDef(_, tpe, _) - if isValidSpan(tpe.span, vd.nameSpan) && - !vd.symbol.is(Flags.Enum) && - !isValDefBind(text, vd) => - if vd.symbol == vd.symbol.sourceSymbol then - Some(tpe.typeOpt, tpe.sourcePos.withSpan(vd.nameSpan), vd) - else None - case vd @ DefDef(_, _, tpe, _) - if isValidSpan(tpe.span, vd.nameSpan) && - tpe.span.start >= vd.nameSpan.end && - !vd.symbol.isConstructor && - !vd.symbol.is(Flags.Mutable) => - if vd.symbol == vd.symbol.sourceSymbol then - Some(tpe.typeOpt, tpe.sourcePos, vd) - else None - case bd @ Bind( - name, - Ident(nme.WILDCARD), - ) => - Some(bd.symbol.info, bd.namePos, bd) - case _ => None + def unapply(tree: Tree)(using params: InlayHintsParams, text: Text, ctx: Context) = + if (params.inferredTypes()) { + tree match + case vd @ ValDef(_, tpe, _) + if isValidSpan(tpe.span, vd.nameSpan) && + !vd.symbol.is(Flags.Enum) && + (isNotInUnapply(vd) || params.hintsInPatternMatch()) && + !isValDefBind(text, vd) => + if vd.symbol == vd.symbol.sourceSymbol then + Some(tpe.tpe, tpe.sourcePos.withSpan(vd.nameSpan), vd) + else None + case vd @ DefDef(_, _, tpe, _) + if isValidSpan(tpe.span, vd.nameSpan) && + tpe.span.start >= vd.nameSpan.end && + !vd.symbol.isConstructor && + !vd.symbol.is(Flags.Mutable) => + if vd.symbol == vd.symbol.sourceSymbol then + Some(tpe.tpe, tpe.sourcePos, vd) + else None + case bd @ Bind( + name, + Ident(nme.WILDCARD), + ) if !bd.span.isZeroExtent && bd.symbol.isTerm && params.hintsInPatternMatch() => + Some(bd.symbol.info, bd.namePos, bd) + case _ => None + } else None + + private def isNotInUnapply(vd: ValDef)(using Context) = + vd.rhs.span.exists && vd.rhs.span.start > vd.nameSpan.end private def isValidSpan(tpeSpan: Span, nameSpan: Span): Boolean = tpeSpan.isZeroExtent && diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcInlineValueProviderImpl.scala b/presentation-compiler/src/main/dotty/tools/pc/PcInlineValueProviderImpl.scala index 39365475a075..38b5e8d0069b 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcInlineValueProviderImpl.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcInlineValueProviderImpl.scala @@ -17,7 +17,7 @@ import dotty.tools.dotc.core.Symbols.Symbol import dotty.tools.dotc.interactive.Interactive import dotty.tools.dotc.interactive.InteractiveDriver import dotty.tools.dotc.util.SourcePosition -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j as l diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcRenameProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcRenameProvider.scala index 8a441e1e385a..94482767f917 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcRenameProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcRenameProvider.scala @@ -8,7 +8,7 @@ import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.Symbols.Symbol import dotty.tools.dotc.interactive.InteractiveDriver import dotty.tools.dotc.util.SourcePosition -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j as l diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcSemanticTokensProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcSemanticTokensProvider.scala index f1a0b6a65aa7..a5332f1e4ff6 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcSemanticTokensProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcSemanticTokensProvider.scala @@ -12,7 +12,7 @@ import dotty.tools.dotc.core.Symbols.NoSymbol import dotty.tools.dotc.core.Symbols.Symbol import dotty.tools.dotc.interactive.InteractiveDriver import dotty.tools.dotc.util.SourcePosition -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j.SemanticTokenModifiers import org.eclipse.lsp4j.SemanticTokenTypes diff --git a/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala b/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala index 378564d90bc1..86aa895cb4fc 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/ScalaPresentationCompiler.scala @@ -360,7 +360,7 @@ case class ScalaPresentationCompiler( params.token() ) { access => val driver = access.compiler() - HoverProvider.hover(params, driver, search) + HoverProvider.hover(params, driver, search, config.hoverContentType()) } end hover diff --git a/presentation-compiler/src/main/dotty/tools/pc/SelectionRangeProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/SelectionRangeProvider.scala index 9dce37028bea..7973f4103ff6 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/SelectionRangeProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/SelectionRangeProvider.scala @@ -12,7 +12,7 @@ import dotty.tools.dotc.interactive.Interactive import dotty.tools.dotc.interactive.InteractiveDriver import dotty.tools.dotc.util.SourceFile import dotty.tools.dotc.util.SourcePosition -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j import org.eclipse.lsp4j.SelectionRange diff --git a/presentation-compiler/src/main/dotty/tools/pc/SemanticdbSymbols.scala b/presentation-compiler/src/main/dotty/tools/pc/SemanticdbSymbols.scala index d298a88fc655..9ef19587948e 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/SemanticdbSymbols.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/SemanticdbSymbols.scala @@ -7,7 +7,7 @@ import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.Names.* import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.semanticdb.* -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* object SemanticdbSymbols: diff --git a/presentation-compiler/src/main/dotty/tools/pc/SignatureHelpProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/SignatureHelpProvider.scala index f7797efbfb27..edfd9c95fa84 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/SignatureHelpProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/SignatureHelpProvider.scala @@ -6,20 +6,14 @@ import dotty.tools.dotc.core.Flags import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.interactive.Interactive import dotty.tools.dotc.interactive.InteractiveDriver -import dotty.tools.dotc.parsing.Tokens.closingRegionTokens -import dotty.tools.dotc.reporting.ErrorMessageID -import dotty.tools.dotc.reporting.ExpectedTokenButFound import dotty.tools.dotc.util.Signatures import dotty.tools.dotc.util.SourceFile -import dotty.tools.dotc.util.Spans -import dotty.tools.dotc.util.Spans.Span import dotty.tools.pc.printer.ShortenedTypePrinter import dotty.tools.pc.printer.ShortenedTypePrinter.IncludeDefaultParam -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j as l import scala.jdk.CollectionConverters.* -import scala.jdk.OptionConverters.* import scala.meta.internal.metals.ReportContext import scala.meta.pc.OffsetParams import scala.meta.pc.SymbolDocumentation diff --git a/presentation-compiler/src/main/dotty/tools/pc/SymbolInformationProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/SymbolInformationProvider.scala index aa1508f89313..0743361f255d 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/SymbolInformationProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/SymbolInformationProvider.scala @@ -12,9 +12,9 @@ import dotty.tools.dotc.core.Flags import dotty.tools.dotc.core.Names.* import dotty.tools.dotc.core.StdNames.nme import dotty.tools.dotc.core.Symbols.* -import dotty.tools.pc.utils.MtagsEnrichments.metalsDealias +import dotty.tools.pc.utils.InteractiveEnrichments.deepDealias import dotty.tools.pc.SemanticdbSymbols -import dotty.tools.pc.utils.MtagsEnrichments.allSymbols +import dotty.tools.pc.utils.InteractiveEnrichments.allSymbols class SymbolInformationProvider(using Context): private def toSymbols( @@ -77,7 +77,7 @@ class SymbolInformationProvider(using Context): then classSym.asClass.parentSyms.map(SemanticdbSymbols.symbolName) else Nil val dealisedSymbol = - if sym.isAliasType then sym.info.metalsDealias.typeSymbol else sym + if sym.isAliasType then sym.info.deepDealias.typeSymbol else sym val classOwner = sym.ownersIterator.drop(1).find(s => s.isClass || s.is(Flags.Module)) val overridden = sym.denot.allOverriddenSymbols.toList diff --git a/presentation-compiler/src/main/dotty/tools/pc/TastyUtils.scala b/presentation-compiler/src/main/dotty/tools/pc/TastyUtils.scala index 747f104cfede..62a947aeb50b 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/TastyUtils.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/TastyUtils.scala @@ -21,7 +21,7 @@ object TastyUtils: private def normalTasty(tastyURI: URI): String = val tastyBytes = Files.readAllBytes(Paths.get(tastyURI)) - new TastyPrinter(tastyBytes.nn).showContents() + new TastyPrinter(tastyBytes.nn, isBestEffortTasty = false, testPickler = false).showContents() private def htmlTasty( tastyURI: URI, diff --git a/presentation-compiler/src/main/dotty/tools/pc/WorksheetSemanticdbProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/WorksheetSemanticdbProvider.scala index 4c8c92759a3b..bd9efb49d148 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/WorksheetSemanticdbProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/WorksheetSemanticdbProvider.scala @@ -2,7 +2,7 @@ package dotty.tools.pc import java.nio.file.Path -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* trait WorksheetSemanticdbProvider: diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/AmmoniteFileCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/AmmoniteFileCompletions.scala index 31bf7c348119..81337c7d8dcb 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/AmmoniteFileCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/AmmoniteFileCompletions.scala @@ -12,7 +12,7 @@ import dotty.tools.dotc.ast.tpd.Tree import dotty.tools.dotc.ast.untpd.ImportSelector import dotty.tools.dotc.core.Contexts.Context import dotty.tools.dotc.core.StdNames.* -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j as l diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/AmmoniteIvyCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/AmmoniteIvyCompletions.scala index ec4a1813a437..718b57cd4828 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/AmmoniteIvyCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/AmmoniteIvyCompletions.scala @@ -4,7 +4,7 @@ import scala.meta.internal.mtags.CoursierComplete import dotty.tools.dotc.ast.untpd.ImportSelector import dotty.tools.dotc.core.Contexts.Context -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* object AmmoniteIvyCompletions: def contribute( diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionAffix.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionAffix.scala new file mode 100644 index 000000000000..4ed58c773a7c --- /dev/null +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionAffix.scala @@ -0,0 +1,95 @@ +package dotty.tools.pc.completions + +import org.eclipse.lsp4j.Position +import org.eclipse.lsp4j.Range + +/** + * @param suffixes which we should insert + * @param prefixes which we should insert + * @param snippet which suffix should we insert the snippet $0 + */ +case class CompletionAffix( + suffixes: Set[Suffix], + prefixes: Set[Prefix], + snippet: Suffix, + currentPrefix: Option[String], +): + def addLabelSnippet = suffixes.exists(_.kind == SuffixKind.Bracket) + def hasSnippet = snippet.kind != SuffixKind.NoSuffix + def chain(copyFn: CompletionAffix => CompletionAffix) = copyFn(this) + def withNewSuffix(kind: Suffix) = this.copy(suffixes = suffixes + kind) + def withNewPrefix(kind: Prefix) = this.copy(prefixes = prefixes + kind) + def withCurrentPrefix(currentPrefix: String) = this.copy(currentPrefix = Some(currentPrefix)) + def withNewSuffixSnippet(suffix: Suffix) = + this.copy(suffixes = suffixes + suffix, snippet = suffix) + + def nonEmpty: Boolean = suffixes.nonEmpty || prefixes.nonEmpty + + def toSuffix: String = + def loop(suffixes: List[SuffixKind]): String = + def cursor = if suffixes.head == snippet.kind then "$0" else "" + suffixes match + case SuffixKind.Brace :: tail => s"($cursor)" + loop(tail) + case SuffixKind.Bracket :: tail => s"[$cursor]" + loop(tail) + case SuffixKind.Template :: tail => s" {$cursor}" + loop(tail) + case _ => "" + loop(suffixes.toList.map(_.kind)) + + def toSuffixOpt: Option[String] = + val edit = toSuffix + if edit.nonEmpty then Some(edit) else None + + + given Ordering[Position] = Ordering.by(elem => (elem.getLine, elem.getCharacter)) + + def toInsertRange: Option[Range] = + import scala.language.unsafeNulls + + val ranges = prefixes.collect: + case Affix(_, Some(range)) => range + .toList + for + startPos <- ranges.map(_.getStart).minOption + endPos <- ranges.map(_.getEnd).maxOption + yield Range(startPos, endPos) + + private def loopPrefix(prefixes: List[PrefixKind]): String = + prefixes match + case PrefixKind.New :: tail => "new " + loopPrefix(tail) + case _ => "" + + /** + * We need to insert previous prefix, but we don't want to display it in the label i.e. + * ```scala + * scala.util.Tr@@ + * ```` + * should return `new Try[T]: Try[T]` + * but insert `new scala.util.Try` + * + */ + def toInsertPrefix: String = + loopPrefix(prefixes.toList.map(_.kind)) + currentPrefix.getOrElse("") + + def toPrefix: String = + loopPrefix(prefixes.toList.map(_.kind)) + +end CompletionAffix + +object CompletionAffix: + val empty = CompletionAffix( + suffixes = Set.empty, + prefixes = Set.empty, + snippet = Affix(SuffixKind.NoSuffix), + currentPrefix = None, + ) + +enum SuffixKind: + case Brace, Bracket, Template, NoSuffix + +enum PrefixKind: + case New + +type Suffix = Affix[SuffixKind] +type Prefix = Affix[PrefixKind] + +private case class Affix[+T](kind: T, insertRange: Option[Range] = None) diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionPos.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionPos.scala index 6e828f8f2058..ad571ff843c3 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionPos.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionPos.scala @@ -10,7 +10,7 @@ import dotty.tools.dotc.core.Contexts.* import dotty.tools.dotc.util.SourcePosition import dotty.tools.dotc.util.Spans.* import dotty.tools.dotc.interactive.Completion -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j as l diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala index 2beb4460db56..9cd98de33141 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala @@ -22,7 +22,7 @@ import dotty.tools.pc.AutoImports.AutoImportEdits import dotty.tools.pc.AutoImports.AutoImportsGenerator import dotty.tools.pc.printer.ShortenedTypePrinter import dotty.tools.pc.printer.ShortenedTypePrinter.IncludeDefaultParam -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j.Command import org.eclipse.lsp4j.CompletionItem @@ -153,13 +153,36 @@ class CompletionProvider( val printer = ShortenedTypePrinter(search, IncludeDefaultParam.ResolveLater)(using indexedContext) + val underlyingCompletion = completion match + case CompletionValue.ExtraMethod(_, underlying) => underlying + case other => other + // For overloaded signatures we get multiple symbols, so we need // to recalculate the description - // related issue https://github.com/scala/scala3/issues/11941 - lazy val kind: CompletionItemKind = completion.completionItemKind - val description = completion.description(printer) - val label = completion.labelWithDescription(printer) - val ident = completion.insertText.getOrElse(completion.label) + // related issue https://github.com/lampepfl/scala3/issues/11941 + lazy val kind: CompletionItemKind = underlyingCompletion.completionItemKind + val description = underlyingCompletion.description(printer) + val label = underlyingCompletion.labelWithDescription(printer) + val ident = underlyingCompletion.insertText.getOrElse(underlyingCompletion.label) + + lazy val isInStringInterpolation = + path match + // s"My name is $name" + case (_: Ident) :: (_: SeqLiteral) :: (_: Typed) :: Apply( + Select(Apply(Select(Select(_, name), _), _), _), + _ + ) :: _ => + name == StdNames.nme.StringContext + // "My name is $name" + case Literal(Constant(_: String)) :: _ => + true + case _ => + false + + def wrapInBracketsIfRequired(newText: String): String = + if underlyingCompletion.snippetAffix.nonEmpty && isInStringInterpolation then + "{" + newText + "}" + else newText def mkItem( newText: String, @@ -170,25 +193,25 @@ class CompletionProvider( val editRange = if newText.startsWith(oldText) then completionPos.stripSuffixEditRange else completionPos.toEditRange - val textEdit = new TextEdit(range.getOrElse(editRange), newText) + val textEdit = new TextEdit(range.getOrElse(editRange), wrapInBracketsIfRequired(newText)) val item = new CompletionItem(label) item.setSortText(f"${idx}%05d") item.setDetail(description) - item.setFilterText(completion.filterText.getOrElse(completion.label)) + item.setFilterText(underlyingCompletion.filterText.getOrElse(underlyingCompletion.label)) item.setTextEdit(textEdit) - item.setAdditionalTextEdits((completion.additionalEdits ++ additionalEdits).asJava) - completion.insertMode.foreach(item.setInsertTextMode) + item.setAdditionalTextEdits((underlyingCompletion.additionalEdits ++ additionalEdits).asJava) + underlyingCompletion.insertMode.foreach(item.setInsertTextMode) - val data = completion.completionData(buildTargetIdentifier) + val data = underlyingCompletion.completionData(buildTargetIdentifier) item.setData(data.toJson) - item.setTags(completion.lspTags.asJava) + item.setTags(underlyingCompletion.lspTags.asJava) if config.isCompletionSnippetsEnabled() then item.setInsertTextFormat(InsertTextFormat.Snippet) - completion.command.foreach { command => + underlyingCompletion.command.foreach { command => item.setCommand(new Command("", command)) } @@ -196,21 +219,8 @@ class CompletionProvider( item end mkItem - val completionTextSuffix = completion.snippetSuffix.toEdit - - lazy val isInStringInterpolation = - path match - // s"My name is $name" - case (_: Ident) :: (_: SeqLiteral) :: (_: Typed) :: Apply( - Select(Apply(Select(Select(_, name), _), _), _), - _ - ) :: _ => - name == StdNames.nme.StringContext - // "My name is $name" - case Literal(Constant(_: String)) :: _ => - true - case _ => - false + val completionTextSuffix = underlyingCompletion.snippetAffix.toSuffix + val completionTextPrefix = underlyingCompletion.snippetAffix.toInsertPrefix lazy val backtickSoftKeyword = path match case (_: Select) :: _ => false @@ -232,7 +242,7 @@ class CompletionProvider( mkItem(nameEdit.getNewText().nn, other.toList, range = Some(nameEdit.getRange().nn)) case _ => mkItem( - v.insertText.getOrElse( ident.backticked(backtickSoftKeyword) + completionTextSuffix), + v.insertText.getOrElse(completionTextPrefix + ident.backticked(backtickSoftKeyword) + completionTextSuffix), edits.edits, range = v.range ) @@ -242,25 +252,25 @@ class CompletionProvider( case IndexedContext.Result.InScope => mkItem( v.insertText.getOrElse( - ident.backticked( - backtickSoftKeyword - ) + completionTextSuffix + completionTextPrefix + ident.backticked(backtickSoftKeyword) + completionTextSuffix ), range = v.range, ) + // Special case when symbol is out of scope, and there is no auto import. + // It means that it will use fully qualified path case _ if isInStringInterpolation => mkItem( - "{" + sym.fullNameBackticked + completionTextSuffix + "}", + "{" + completionTextPrefix + sym.fullNameBackticked + completionTextSuffix + "}", range = v.range ) case _ if v.isExtensionMethod => mkItem( - ident.backticked(backtickSoftKeyword) + completionTextSuffix, + completionTextPrefix + ident.backticked(backtickSoftKeyword) + completionTextSuffix, range = v.range ) case _ => mkItem( - sym.fullNameBackticked( + completionTextPrefix + sym.fullNameBackticked( backtickSoftKeyword ) + completionTextSuffix, range = v.range @@ -270,18 +280,16 @@ class CompletionProvider( end match end mkItemWithImports - completion match + underlyingCompletion match case v: (CompletionValue.Workspace | CompletionValue.Extension | CompletionValue.ImplicitClass) => mkItemWithImports(v) case v: CompletionValue.Interpolator if v.isWorkspace || v.isExtension => mkItemWithImports(v) case _ => - val insert = - completion.insertText.getOrElse(ident.backticked(backtickSoftKeyword)) - mkItem( - insert + completionTextSuffix, - range = completion.range - ) + val nameText = underlyingCompletion.insertText.getOrElse(ident.backticked(backtickSoftKeyword)) + val nameWithAffixes = completionTextPrefix + nameText + completionTextSuffix + mkItem(nameWithAffixes, range = underlyingCompletion.range) + end match end completionItems end CompletionProvider diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionSuffix.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionSuffix.scala deleted file mode 100644 index 580d65089737..000000000000 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionSuffix.scala +++ /dev/null @@ -1,39 +0,0 @@ -package dotty.tools.pc.completions - -/** - * @param suffixes which we should insert - * @param snippet which suffix should we insert the snippet $0 - */ -case class CompletionSuffix( - suffixes: Set[SuffixKind], - snippet: SuffixKind, -): - def addLabelSnippet = suffixes.contains(SuffixKind.Bracket) - def hasSnippet = snippet != SuffixKind.NoSuffix - def chain(copyFn: CompletionSuffix => CompletionSuffix) = copyFn(this) - def withNewSuffix(kind: SuffixKind) = - CompletionSuffix(suffixes + kind, snippet) - def withNewSuffixSnippet(kind: SuffixKind) = - CompletionSuffix(suffixes + kind, kind) - def toEdit: String = - def loop(suffixes: List[SuffixKind]): String = - def cursor = if suffixes.head == snippet then "$0" else "" - suffixes match - case SuffixKind.Brace :: tail => s"($cursor)" + loop(tail) - case SuffixKind.Bracket :: tail => s"[$cursor]" + loop(tail) - case SuffixKind.Template :: tail => s" {$cursor}" + loop(tail) - case _ => "" - loop(suffixes.toList) - def toEditOpt: Option[String] = - val edit = toEdit - if edit.nonEmpty then Some(edit) else None -end CompletionSuffix - -object CompletionSuffix: - val empty = CompletionSuffix( - suffixes = Set.empty, - snippet = SuffixKind.NoSuffix, - ) - -enum SuffixKind: - case Brace, Bracket, Template, NoSuffix diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionValue.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionValue.scala index 2810fe728b9a..9071b2cd2a23 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionValue.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionValue.scala @@ -10,7 +10,7 @@ import dotty.tools.dotc.core.StdNames.nme import dotty.tools.dotc.core.Symbols.Symbol import dotty.tools.dotc.core.Types.Type import dotty.tools.pc.printer.ShortenedTypePrinter -import dotty.tools.pc.utils.MtagsEnrichments.decoded +import dotty.tools.pc.utils.InteractiveEnrichments.decoded import org.eclipse.lsp4j.CompletionItemKind import org.eclipse.lsp4j.CompletionItemTag @@ -40,7 +40,7 @@ enum CompletionSource: sealed trait CompletionValue: def label: String def insertText: Option[String] = None - def snippetSuffix: CompletionSuffix = CompletionSuffix.empty + def snippetAffix: CompletionAffix = CompletionAffix.empty def additionalEdits: List[TextEdit] = Nil def range: Option[Range] = None def filterText: Option[String] = None @@ -66,7 +66,6 @@ object CompletionValue: sealed trait Symbolic extends CompletionValue: def denotation: Denotation val symbol = denotation.symbol - def isFromWorkspace: Boolean = false override def completionItemDataKind = CompletionItemData.None def isExtensionMethod: Boolean = false @@ -80,6 +79,9 @@ object CompletionValue: ) def importSymbol: Symbol = symbol + override def range: Option[Range] = + snippetAffix.toInsertRange + def completionItemKind(using Context): CompletionItemKind = val symbol = this.symbol if symbol.is(Package) || symbol.is(Module) then @@ -97,20 +99,18 @@ object CompletionValue: override def labelWithDescription( printer: ShortenedTypePrinter )(using Context): String = - if symbol.is(Method) then s"${label}${description(printer)}" - else if symbol.isConstructor then label + if symbol.isConstructor then s"${snippetAffix.toPrefix}${label}${description(printer)}" + else if symbol.is(Method) then s"${label}${description(printer)}" else if symbol.is(Mutable) then s"$label: ${description(printer)}" else if symbol.is(Package) || symbol.is(Module) || symbol.isClass then - if isFromWorkspace then - s"${labelWithSuffix(printer)} -${description(printer)}" - else s"${labelWithSuffix(printer)}${description(printer)}" + s"${labelWithSuffix(printer)}${description(printer)}" else if symbol.isType then labelWithSuffix(printer) else if symbol.isTerm && symbol.info.typeSymbol.is(Module) then s"${label}${description(printer)}" else s"$label: ${description(printer)}" protected def labelWithSuffix(printer: ShortenedTypePrinter)(using Context): String = - if snippetSuffix.addLabelSnippet + if snippetAffix.addLabelSnippet then val printedParams = symbol.info.typeParams.map(p => p.paramName.decoded ++ printer.tpe(p.paramInfo) @@ -126,29 +126,64 @@ object CompletionValue: case class Compiler( label: String, denotation: Denotation, - override val snippetSuffix: CompletionSuffix + override val snippetAffix: CompletionAffix ) extends Symbolic: override def completionItemDataKind: Integer = CompletionSource.CompilerKind.ordinal + /** + * We need to access original completion in sorting phase. + * This class is only a wrapper to hold both new completion and original completion. + * + * All methods are proxied to @param extraMethod + * + * FIXME Refactor this file to different architercture. At least to somethhing that is easier to modifiy and scale. + * One solution may be a migration to flag based solution. + */ + case class ExtraMethod( + owner: Denotation, + extraMethod: Symbolic, + ) extends Symbolic: + override def additionalEdits: List[TextEdit] = extraMethod.additionalEdits + override def command: Option[String] = extraMethod.command + override def completionData(buildTargetIdentifier: String)(using Context): CompletionItemData = extraMethod.completionData((buildTargetIdentifier)) + override def completionItemKind(using Context): CompletionItemKind = extraMethod.completionItemKind + override def description(printer: ShortenedTypePrinter)(using Context): String = extraMethod.description(printer) + override def labelWithDescription(printer: ShortenedTypePrinter)(using Context): String = extraMethod.labelWithDescription(printer) + override def range: Option[Range] = extraMethod.range + override def denotation: Denotation = extraMethod.denotation + override def label: String = extraMethod.label + override def filterText: Option[String] = extraMethod.filterText + override def importSymbol: Symbol = extraMethod.importSymbol + override def lspTags(using Context): List[CompletionItemTag] = extraMethod.lspTags + override def insertText: Option[String] = extraMethod.insertText + override def isExtensionMethod: Boolean = extraMethod.isExtensionMethod + override def snippetAffix: CompletionAffix = extraMethod.snippetAffix + override def insertMode: Option[InsertTextMode] = extraMethod.insertMode + override val symbol: Symbol = extraMethod.symbol + override def completionItemDataKind: Integer = extraMethod.completionItemDataKind + case class Scope( label: String, denotation: Denotation, - override val snippetSuffix: CompletionSuffix, + override val snippetAffix: CompletionAffix, ) extends Symbolic: override def completionItemDataKind: Integer = CompletionSource.ScopeKind.ordinal case class Workspace( label: String, denotation: Denotation, - override val snippetSuffix: CompletionSuffix, + override val snippetAffix: CompletionAffix, override val importSymbol: Symbol ) extends Symbolic: - override def isFromWorkspace: Boolean = true override def completionItemDataKind: Integer = CompletionSource.WorkspaceKind.ordinal override def labelWithDescription(printer: ShortenedTypePrinter)(using Context): String = - if symbol.is(Method) && symbol.name != nme.apply then + if symbol.isConstructor || symbol.name == nme.apply then + s"${snippetAffix.toPrefix}${label}${description(printer)} - ${printer.fullNameString(importSymbol.effectiveOwner)}" + else if symbol.is(Method) then s"${labelWithSuffix(printer)} - ${printer.fullNameString(symbol.effectiveOwner)}" + else if symbol.is(Package) || symbol.is(Module) || symbol.isClass then + s"${labelWithSuffix(printer)} -${description(printer)}" else super.labelWithDescription(printer) /** @@ -157,7 +192,7 @@ object CompletionValue: case class ImplicitClass( label: String, denotation: Denotation, - override val snippetSuffix: CompletionSuffix, + override val snippetAffix: CompletionAffix, override val importSymbol: Symbol, ) extends Symbolic: override def completionItemKind(using Context): CompletionItemKind = @@ -172,7 +207,7 @@ object CompletionValue: case class Extension( label: String, denotation: Denotation, - override val snippetSuffix: CompletionSuffix + override val snippetAffix: CompletionAffix ) extends Symbolic: override def completionItemKind(using Context): CompletionItemKind = CompletionItemKind.Method @@ -257,6 +292,7 @@ object CompletionValue: override def completionItemKind(using Context): CompletionItemKind = CompletionItemKind.Folder + // TODO remove this type and return `Compiler`, `Workspace` instead case class Interpolator( denotation: Denotation, label: String, diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala index abb15d45f88a..fb39102399ba 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/Completions.scala @@ -8,15 +8,15 @@ import scala.collection.mutable import scala.meta.internal.metals.Fuzzy import scala.meta.internal.metals.ReportContext import scala.meta.internal.mtags.CoursierComplete -import scala.meta.internal.pc.{IdentifierComparator, MemberOrdering} +import scala.meta.internal.pc.{IdentifierComparator, MemberOrdering, CompletionFuzzy} import scala.meta.pc.* import dotty.tools.dotc.ast.tpd.* import dotty.tools.dotc.ast.untpd -import dotty.tools.dotc.ast.NavigateAST import dotty.tools.dotc.core.Comments.Comment import dotty.tools.dotc.core.Constants.Constant import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.core.Denotations.SingleDenotation import dotty.tools.dotc.core.Flags import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.NameOps.* @@ -27,12 +27,13 @@ import dotty.tools.dotc.core.Symbols.* import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.interactive.Completion import dotty.tools.dotc.interactive.Completion.Mode +import dotty.tools.dotc.interactive.Interactive import dotty.tools.dotc.util.SourcePosition import dotty.tools.dotc.util.SrcPos import dotty.tools.pc.AutoImports.AutoImportsGenerator -import dotty.tools.pc.completions.OverrideCompletions.OverrideExtractor import dotty.tools.pc.buildinfo.BuildInfo -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.completions.OverrideCompletions.OverrideExtractor +import dotty.tools.pc.utils.InteractiveEnrichments.* import dotty.tools.dotc.core.Denotations.SingleDenotation import dotty.tools.dotc.interactive.Interactive @@ -69,13 +70,9 @@ class Completions( false case (_: (Import | Export)) :: _ => false case _ :: (_: (Import | Export)) :: _ => false - case (_: Ident) :: (_: SeqLiteral) :: _ => false case _ => true - private lazy val allowTemplateSuffix: Boolean = - path match - case _ :: New(selectOrIdent: (Select | Ident)) :: _ => true - case _ => false + private lazy val isNew: Boolean = Completion.isInNewContext(adjustedPath) def includeSymbol(sym: Symbol)(using Context): Boolean = def hasSyntheticCursorSuffix: Boolean = @@ -88,7 +85,6 @@ class Completions( val generalExclude = isUninterestingSymbol(sym) || !isNotLocalForwardReference(sym) || - sym.isPackageObject || hasSyntheticCursorSuffix def isWildcardParam(sym: Symbol) = @@ -105,33 +101,38 @@ class Completions( end if end includeSymbol + lazy val fuzzyMatcher: Name => Boolean = name => + if completionMode.is(Mode.Member) then CompletionFuzzy.matchesSubCharacters(completionPos.query, name.toString) + else CompletionFuzzy.matches(completionPos.query, name.toString) + + def enrichedCompilerCompletions(qualType: Type): (List[CompletionValue], SymbolSearch.Result) = + val compilerCompletions = Completion + .rawCompletions(completionPos.originalCursorPosition, completionMode, completionPos.query, path, adjustedPath, Some(fuzzyMatcher)) + + compilerCompletions + .toList + .flatMap(toCompletionValues) + .filterInteresting(qualType) + def completions(): (List[CompletionValue], SymbolSearch.Result) = val (advanced, exclusive) = advancedCompletions(path, completionPos) val (all, result) = if exclusive then (advanced, SymbolSearch.Result.COMPLETE) else - val keywords = - KeywordsCompletions.contribute(path, completionPos, comments) + val keywords = KeywordsCompletions.contribute(path, completionPos, comments) val allAdvanced = advanced ++ keywords + path match // should not show completions for toplevel - case Nil | (_: PackageDef) :: _ if completionPos.originalCursorPosition.source.file.extension != "sc" => + case Nil | (_: PackageDef) :: _ if !completionPos.originalCursorPosition.source.file.ext.isScalaScript => (allAdvanced, SymbolSearch.Result.COMPLETE) case Select(qual, _) :: _ if qual.typeOpt.isErroneous => (allAdvanced, SymbolSearch.Result.COMPLETE) case Select(qual, _) :: _ => - val compilerCompletions = Completion.rawCompletions(completionPos.originalCursorPosition, completionMode, completionPos.query, path, adjustedPath) - val (compiler, result) = compilerCompletions - .toList - .flatMap(toCompletionValues) - .filterInteresting(qual.typeOpt.widenDealias) + val (compiler, result) = enrichedCompilerCompletions(qual.typeOpt.widenDealias) (allAdvanced ++ compiler, result) case _ => - val compilerCompletions = Completion.rawCompletions(completionPos.originalCursorPosition, completionMode, completionPos.query, path, adjustedPath) - val (compiler, result) = compilerCompletions - .toList - .flatMap(toCompletionValues) - .filterInteresting() + val (compiler, result) = enrichedCompilerCompletions(defn.AnyType) (allAdvanced ++ compiler, result) end match @@ -147,7 +148,7 @@ class Completions( denots: Seq[SingleDenotation] ): List[CompletionValue] = denots.toList.flatMap: denot => - completionsWithSuffix( + completionsWithAffix( denot, completion.show, (label, denot, suffix) => CompletionValue.Compiler(label, denot, suffix) @@ -157,13 +158,17 @@ class Completions( inline private def undoBacktick(label: String): String = label.stripPrefix("`").stripSuffix("`") + // TODO This has to be refactored to properly split extension methods + // This method has to be fixed even further. The similar problem will be present in shortened type printer. private def getParams(symbol: Symbol) = lazy val extensionParam = symbol.extensionParam if symbol.is(Flags.Extension) then symbol.paramSymss.filterNot( _.contains(extensionParam) ) - else symbol.paramSymss + else if symbol.isConstructor then + symbol.owner.paramSymss + else symbol.paramSymss.filter(!_.exists(_.isTypeParam)) private def isAbstractType(symbol: Symbol) = (symbol.info.typeSymbol.is(Trait) // trait A{ def doSomething: Int} @@ -184,20 +189,19 @@ class Completions( ) end isAbstractType - private def findSuffix(symbol: Symbol): CompletionSuffix = - CompletionSuffix.empty + private def findSuffix(symbol: Symbol): CompletionAffix = + CompletionAffix.empty .chain { suffix => // for [] suffix - if shouldAddSnippet && symbol.info.typeParams.nonEmpty - then suffix.withNewSuffixSnippet(SuffixKind.Bracket) + if shouldAddSnippet && symbol.info.typeParams.nonEmpty then + suffix.withNewSuffixSnippet(Affix(SuffixKind.Bracket)) else suffix } .chain { suffix => // for () suffix - if shouldAddSnippet && symbol.is(Flags.Method) - then + if shouldAddSnippet && symbol.is(Flags.Method) then val paramss = getParams(symbol) paramss match case Nil => suffix - case List(Nil) => suffix.withNewSuffix(SuffixKind.Brace) + case List(Nil) => suffix.withNewSuffix(Affix(SuffixKind.Brace)) case _ if config.isCompletionSnippetsEnabled() => val onlyParameterless = paramss.forall(_.isEmpty) lazy val onlyImplicitOrTypeParams = paramss.forall( @@ -205,58 +209,93 @@ class Completions( sym.isType || sym.is(Implicit) || sym.is(Given) } ) - if onlyParameterless then suffix.withNewSuffix(SuffixKind.Brace) + if onlyParameterless then suffix.withNewSuffix(Affix(SuffixKind.Brace)) else if onlyImplicitOrTypeParams then suffix - else if suffix.hasSnippet then - suffix.withNewSuffix(SuffixKind.Brace) - else suffix.withNewSuffixSnippet(SuffixKind.Brace) + else if suffix.hasSnippet then suffix.withNewSuffix(Affix(SuffixKind.Brace)) + else suffix.withNewSuffixSnippet(Affix(SuffixKind.Brace)) case _ => suffix end match else suffix } .chain { suffix => // for {} suffix - if shouldAddSnippet && allowTemplateSuffix - && isAbstractType(symbol) - then - if suffix.hasSnippet then suffix.withNewSuffix(SuffixKind.Template) - else suffix.withNewSuffixSnippet(SuffixKind.Template) + if shouldAddSnippet && isNew && isAbstractType(symbol) then + if suffix.hasSnippet then suffix.withNewSuffix(Affix(SuffixKind.Template)) + else suffix.withNewSuffixSnippet(Affix(SuffixKind.Template)) else suffix } end findSuffix - def completionsWithSuffix( + def completionsWithAffix( denot: SingleDenotation, label: String, - toCompletionValue: (String, SingleDenotation, CompletionSuffix) => CompletionValue + toCompletionValue: (String, SingleDenotation, CompletionAffix) => CompletionValue.Symbolic ): List[CompletionValue] = val sym = denot.symbol - // find the apply completion that would need a snippet - val methodDenots: List[SingleDenotation] = - if shouldAddSnippet && completionMode.is(Mode.Term) && - (sym.is(Flags.Module) || sym.isField || sym.isClass && !sym.is(Flags.Trait)) && !sym.is(Flags.JavaDefined) - then - val info = - /* Companion will be added even for normal classes now, - * but it will not show up from classpath. We can suggest - * constructors based on those synthetic applies. - */ - if sym.isClass && sym.companionModule.exists then sym.companionModule.info - else denot.info - val applyDenots = info.member(nme.apply).allSymbols.map(_.asSeenFrom(info).asSingleDenotation) - denot :: applyDenots - else denot :: Nil - - methodDenots.map { methodDenot => - val suffix = findSuffix(methodDenot.symbol) + val hasNonSyntheticConstructor = sym.name.isTypeName && sym.isClass + && !sym.is(ModuleClass) && !sym.is(Trait) && !sym.is(Abstract) && !sym.is(Flags.JavaDefined) + + val (extraMethodDenots, skipOriginalDenot): (List[SingleDenotation], Boolean) = + if shouldAddSnippet && isNew && hasNonSyntheticConstructor then + val constructors = sym.info.member(nme.CONSTRUCTOR).allSymbols.map(_.asSingleDenotation) + .filter(_.symbol.isAccessibleFrom(denot.info)) + constructors -> true + + else if shouldAddSnippet && completionMode.is(Mode.Term) && sym.name.isTermName && + !sym.is(Flags.JavaDefined) && (sym.isClass || sym.is(Module) || (sym.isField && denot.info.isInstanceOf[TermRef])) then + + val constructors = if sym.isAllOf(ConstructorProxyModule) then + sym.companionClass.info.member(nme.CONSTRUCTOR).allSymbols + else + val companionApplies = denot.info.member(nme.apply).allSymbols + val classConstructors = if sym.companionClass.exists && !sym.companionClass.isOneOf(AbstractOrTrait) then + sym.companionClass.info.member(nme.CONSTRUCTOR).allSymbols + else Nil + + if companionApplies.exists(_.is(Synthetic)) then + companionApplies ++ classConstructors.filter(!_.isPrimaryConstructor) + else + companionApplies ++ classConstructors + + val result = constructors.map(_.asSeenFrom(denot.info).asSingleDenotation) + .filter(_.symbol.isAccessibleFrom(denot.info)) + + result -> (sym.isAllOf(ConstructorProxyModule) || sym.is(Trait)) + else Nil -> false + + val extraCompletionValues = + val existsApply = extraMethodDenots.exists(_.symbol.name == nme.apply) + + extraMethodDenots.map { methodDenot => + val suffix = findSuffix(methodDenot.symbol) + val affix = if methodDenot.symbol.isConstructor && existsApply then + adjustedPath match + case (select @ Select(qual, _)) :: _ => + val start = qual.span.start + val insertRange = select.sourcePos.startPos.withEnd(completionPos.queryEnd).toLsp + + suffix + .withCurrentPrefix(qual.show + ".") + .withNewPrefix(Affix(PrefixKind.New, insertRange = Some(insertRange))) + case _ => + suffix.withNewPrefix(Affix(PrefixKind.New)) + else suffix + val name = undoBacktick(label) + + CompletionValue.ExtraMethod( + owner = denot, + extraMethod = toCompletionValue(name, methodDenot, affix) + ) + } + + if skipOriginalDenot then extraCompletionValues + else + val suffix = findSuffix(denot.symbol) val name = undoBacktick(label) - toCompletionValue( - name, - methodDenot, - suffix - ) - } - end completionsWithSuffix + val denotCompletionValue = toCompletionValue(name, denot, suffix) + denotCompletionValue :: extraCompletionValues + + end completionsWithAffix /** * @return Tuple of completionValues and flag. If the latter boolean value is true @@ -385,7 +424,7 @@ class Completions( // class Fo@@ case (td: TypeDef) :: _ - if Fuzzy.matches( + if CompletionFuzzy.matches( td.symbol.name.decoded.replace(Cursor.value, "").nn, filename ) => @@ -495,13 +534,22 @@ class Completions( val query = completionPos.query if completionMode.is(Mode.Scope) && query.nonEmpty then val visitor = new CompilerSearchVisitor(sym => - if !(sym.is(Flags.ExtensionMethod) || - (sym.maybeOwner.is(Flags.Implicit) && sym.maybeOwner.isClass)) + if Completion.isValidCompletionSymbol(sym, completionMode, isNew) && + !(sym.is(Flags.ExtensionMethod) || (sym.maybeOwner.is(Flags.Implicit) && sym.maybeOwner.isClass)) then indexedContext.lookupSym(sym) match case IndexedContext.Result.InScope => false + case _ if completionMode.is(Mode.ImportOrExport) => + visit( + CompletionValue.Workspace( + label = undoBacktick(sym.decodedName), + denotation = sym, + snippetAffix = CompletionAffix.empty, + importSymbol = sym + ) + ) case _ => - completionsWithSuffix( + completionsWithAffix( sym, sym.decodedName, CompletionValue.Workspace(_, _, _, sym) @@ -534,13 +582,13 @@ class Completions( && !sym.isConstructor && !isDefaultVariableSetter if isExtensionMethod then - completionsWithSuffix( + completionsWithAffix( sym, sym.decodedName, CompletionValue.Extension(_, _, _) ).map(visit).forall(_ == true) else if isImplicitClassMember then - completionsWithSuffix( + completionsWithAffix( sym, sym.decodedName, CompletionValue.ImplicitClass(_, _, _, sym.maybeOwner), @@ -569,13 +617,36 @@ class Completions( sym.showFullName + sigString else sym.fullName.stripModuleClassSuffix.show + /** If we try to complete TypeName, we should favor types over terms with same name value and without suffix. + */ + def deduplicateCompletions(completions: List[CompletionValue]): List[CompletionValue] = + val (symbolicCompletions, rest) = completions.partition: + _.isInstanceOf[CompletionValue.Symbolic] + + val symbolicCompletionsMap = symbolicCompletions + .collect { case symbolic: CompletionValue.Symbolic => symbolic } + .groupBy(_.symbol.fullName) // we somehow have to ignore proxy type + + val filteredSymbolicCompletions = symbolicCompletionsMap.filter: (name, denots) => + lazy val existsTypeWithoutSuffix: Boolean = !symbolicCompletionsMap + .get(name.toTypeName) + .forall(_.forall(sym => sym.snippetAffix.suffixes.nonEmpty)) + + (completionMode.is(Mode.Term) && !completionMode.is(Mode.ImportOrExport)) || + // show non synthetic symbols + // companion test should not result TrieMap[K, V] + (name.isTermName && !existsTypeWithoutSuffix) || + name.isTypeName + .toList.unzip._2.flatten + + filteredSymbolicCompletions ++ rest + extension (l: List[CompletionValue]) def filterInteresting( qualType: Type = ctx.definitions.AnyType, enrich: Boolean = true ): (List[CompletionValue], SymbolSearch.Result) = - - val isSeen = mutable.Set.empty[String] + val alreadySeen = mutable.Set.empty[String] val buf = List.newBuilder[CompletionValue] def visit(head: CompletionValue): Boolean = val (id, include) = @@ -585,15 +656,13 @@ class Completions( case ck: CompletionValue.CaseKeyword => (ck.label, true) case symOnly: CompletionValue.Symbolic => val sym = symOnly.symbol - val name = SemanticdbSymbols.symbolName(sym) - val nameId = - if sym.isClass || sym.is(Module) then - // drop #|. at the end to avoid duplication - name.substring(0, name.length() - 1).nn - else name + val name = symOnly match + case CompletionValue.ExtraMethod(owner, extraMethod) => + SemanticdbSymbols.symbolName(owner.symbol) + SemanticdbSymbols.symbolName(extraMethod.symbol) + case _ => SemanticdbSymbols.symbolName(sym) val suffix = - if symOnly.snippetSuffix.addLabelSnippet then "[]" else "" - val id = nameId + suffix + if symOnly.snippetAffix.addLabelSnippet then "[]" else "" + val id = name + suffix val include = includeSymbol(sym) (id, include) case kw: CompletionValue.Keyword => (kw.label, true) @@ -604,8 +673,8 @@ class Completions( (fileSysMember.label, true) case ii: CompletionValue.IvyImport => (ii.label, true) - if !isSeen(id) && include then - isSeen += id + if !alreadySeen(id) && include then + alreadySeen += id buf += head true else false @@ -615,12 +684,9 @@ class Completions( if enrich then val searchResult = - enrichWithSymbolSearch(visit, qualType).getOrElse( - SymbolSearch.Result.COMPLETE - ) - (buf.result, searchResult) - else (buf.result, SymbolSearch.Result.COMPLETE) - + enrichWithSymbolSearch(visit, qualType).getOrElse(SymbolSearch.Result.COMPLETE) + (deduplicateCompletions(buf.result), searchResult) + else (deduplicateCompletions(buf.result), SymbolSearch.Result.COMPLETE) end filterInteresting end extension @@ -704,18 +770,24 @@ class Completions( relevance end symbolRelevance + def computeRelevance(sym: Symbol, completionValue: CompletionValue.Symbolic) = + completionValue match + case _: CompletionValue.Override => + var penalty = symbolRelevance(sym) + // show the abstract members first + if !sym.is(Deferred) then penalty |= MemberOrdering.IsNotAbstract + penalty + case _: CompletionValue.Workspace => + symbolRelevance(sym) | (IsWorkspaceSymbol + sym.name.show.length()) + case _ => symbolRelevance(sym) + completion match - case ov: CompletionValue.Override => - var penalty = symbolRelevance(ov.symbol) - // show the abstract members first - if !ov.symbol.is(Deferred) then penalty |= MemberOrdering.IsNotAbstract - penalty - case CompletionValue.Workspace(_, denot, _, _) => - symbolRelevance(denot.symbol) | (IsWorkspaceSymbol + denot.name.show.length()) + case CompletionValue.ExtraMethod(owner, extraMethod) => + computeRelevance(owner.symbol, extraMethod) case sym: CompletionValue.Symbolic => - symbolRelevance(sym.symbol) - case _ => - Int.MaxValue + computeRelevance(sym.symbol, sym) + case _ => Int.MaxValue + end computeRelevancePenalty private lazy val isEvilMethod: Set[Name] = Set[Name]( @@ -823,6 +895,7 @@ class Completions( def priority(v: CompletionValue): Int = v match case _: CompletionValue.Compiler => 0 + case CompletionValue.ExtraMethod(_, _: CompletionValue.Compiler) => 0 case _ => 1 priority(o1) - priority(o2) @@ -862,6 +935,23 @@ class Completions( prioritizeCaseKeyword || prioritizeNamed end compareCompletionValue + def methodScore(v: CompletionValue.Symbolic)(using Context): Int = + val sym = v.symbol + val workspacePenalty = v match + case CompletionValue.ExtraMethod(_, _: CompletionValue.Workspace) => 5 + case _: CompletionValue.Workspace => 5 + case _ => 0 + + val isExtraMethod = v.isInstanceOf[CompletionValue.ExtraMethod] + val methodPenalty = + if isNew && sym.isConstructor then -1 + else if isExtraMethod && !sym.isConstructor then 1 + else if isExtraMethod then 2 + else if !sym.isAllOf(SyntheticModule) then 3 + else 4 + + workspacePenalty + methodPenalty + override def compare(o1: CompletionValue, o2: CompletionValue): Int = (o1, o2) match case (o1: CompletionValue.NamedArg, o2: CompletionValue.NamedArg) => @@ -881,32 +971,39 @@ class Completions( val byLocalSymbol = compareLocalSymbols(s1, s2) if byLocalSymbol != 0 then byLocalSymbol else - val byRelevance = compareByRelevance(o1, o2) - if byRelevance != 0 then byRelevance + val byFuzzy = Integer.compare( + fuzzyScore(sym1), + fuzzyScore(sym2) + ) + if byFuzzy != 0 then byFuzzy else - val byFuzzy = Integer.compare( - fuzzyScore(sym1), - fuzzyScore(sym2) - ) - if byFuzzy != 0 then byFuzzy + val byRelevance = compareByRelevance(o1, o2) + if byRelevance != 0 then byRelevance else - val byIdentifier = IdentifierComparator.compare( - s1.name.show, - s2.name.show + val byMethodScore = Integer.compare( + methodScore(sym1), + methodScore(sym2) ) - if byIdentifier != 0 then byIdentifier + if byMethodScore != 0 then byMethodScore else - val byOwner = - s1.owner.fullName.toString - .compareTo(s2.owner.fullName.toString) - if byOwner != 0 then byOwner + val byIdentifier = IdentifierComparator.compare( + s1.name.show, + s2.name.show + ) + if byIdentifier != 0 then byIdentifier else - val byParamCount = Integer.compare( - s1.paramSymss.flatten.size, - s2.paramSymss.flatten.size - ) - if byParamCount != 0 then byParamCount - else s1.detailString.compareTo(s2.detailString) + val byOwner = + s1.owner.fullName.toString + .compareTo(s2.owner.fullName.toString) + if byOwner != 0 then byOwner + else + val byParamCount = Integer.compare( + s1.paramSymss.flatten.size, + s2.paramSymss.flatten.size + ) + if byParamCount != 0 then byParamCount + else s1.detailString.compareTo(s2.detailString) + end if end if end if end if diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/FilenameCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/FilenameCompletions.scala index a9dfa55f89bd..8d2e97856e82 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/FilenameCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/FilenameCompletions.scala @@ -4,7 +4,7 @@ package completions import dotty.tools.dotc.ast.tpd.TypeDef import dotty.tools.dotc.core.Contexts.Context import dotty.tools.dotc.core.Flags -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* object FilenameCompletions: diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/InterpolatorCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/InterpolatorCompletions.scala index 2a8ead70ea33..2e39c17b24b3 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/InterpolatorCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/InterpolatorCompletions.scala @@ -12,12 +12,10 @@ import dotty.tools.dotc.core.Contexts.Context import dotty.tools.dotc.core.Flags import dotty.tools.dotc.core.Flags.* import dotty.tools.dotc.core.Symbols.Symbol -import dotty.tools.dotc.util.Spans import dotty.tools.dotc.core.Types.Type -import dotty.tools.dotc.util.SourcePosition import dotty.tools.pc.CompilerSearchVisitor import dotty.tools.pc.IndexedContext -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j as l @@ -112,18 +110,17 @@ object InterpolatorCompletions: buildTargetIdentifier: String )(using Context, ReportContext): List[CompletionValue] = def newText( - name: String, - suffix: Option[String], + label: String, + affix: CompletionAffix , identOrSelect: Ident | Select ): String = - val snippetCursor = suffixEnding(suffix, areSnippetsSupported) + val snippetCursor = suffixEnding(affix.toSuffixOpt, areSnippetsSupported) new StringBuilder() .append('{') - .append( - text.substring(identOrSelect.span.start, identOrSelect.span.end) - ) + .append(affix.toPrefix) // we use toPrefix here, because previous prefix is added in the next step + .append(text.substring(identOrSelect.span.start, identOrSelect.span.end)) .append('.') - .append(name.backticked) + .append(label.backticked) .append(snippetCursor) .append('}') .toString @@ -155,14 +152,14 @@ object InterpolatorCompletions: sym.name.toString() ) => val label = sym.name.decoded - completions.completionsWithSuffix( + completions.completionsWithAffix( sym, label, - (name, denot, suffix) => + (name, denot, affix) => CompletionValue.Interpolator( denot.symbol, label, - Some(newText(name, suffix.toEditOpt, identOrSelect)), + Some(newText(name, affix, identOrSelect)), Nil, Some(completionPos.originalCursorPosition.withStart(identOrSelect.span.start).toLsp), // Needed for VS Code which will not show the completion otherwise @@ -252,16 +249,18 @@ object InterpolatorCompletions: interpolatorEdit ++ dollarEdits end additionalEdits - def newText(symbolName: String, suffix: Option[String]): String = + def newText(symbolName: String, affix: CompletionAffix): String = val out = new StringBuilder() val identifier = symbolName.backticked val symbolNeedsBraces = interpolator.needsBraces || identifier.startsWith("`") || - suffix.isDefined + affix.toSuffixOpt.isDefined || + affix.toPrefix.nonEmpty if symbolNeedsBraces && !hasOpeningBrace then out.append('{') + out.append(affix.toInsertPrefix) out.append(identifier) - out.append(suffixEnding(suffix, areSnippetsSupported)) + out.append(suffixEnding(affix.toSuffixOpt, areSnippetsSupported)) if symbolNeedsBraces && !hasClosingBrace then out.append('}') out.toString end newText @@ -286,14 +285,14 @@ object InterpolatorCompletions: sym.name.decoded ) && !sym.isType => val label = sym.name.decoded - completions.completionsWithSuffix( + completions.completionsWithAffix( sym, label, - (name, denot, suffix) => + (name, denot, affix) => CompletionValue.Interpolator( denot.symbol, label, - Some(newText(name, suffix.toEditOpt)), + Some(newText(name, affix)), additionalEdits(), Some(nameRange), None, diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/MatchCaseCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/MatchCaseCompletions.scala index 7f1d92305309..48c6bcfe8317 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/MatchCaseCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/MatchCaseCompletions.scala @@ -15,7 +15,6 @@ import dotty.tools.toOption import dotty.tools.dotc.ast.tpd.* import dotty.tools.dotc.core.Constants.Constant import dotty.tools.dotc.core.Contexts.Context -import dotty.tools.dotc.core.Definitions import dotty.tools.dotc.core.Denotations.Denotation import dotty.tools.dotc.core.Flags import dotty.tools.dotc.core.Flags.* @@ -34,7 +33,7 @@ import dotty.tools.pc.AutoImports.SymbolImport import dotty.tools.pc.MetalsInteractive.* import dotty.tools.pc.printer.ShortenedTypePrinter import dotty.tools.pc.printer.ShortenedTypePrinter.IncludeDefaultParam -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j as l @@ -88,15 +87,15 @@ object CaseKeywordCompletion: ) => val args = head.argTypes.init if args.length > 1 then - Some(definitions.tupleType(args).widen.metalsDealias) - else args.headOption.map(_.widen.metalsDealias) + Some(definitions.tupleType(args).widen.deepDealias) + else args.headOption.map(_.widen.deepDealias) case _ => None case _ => None case sel => - Some(sel.tpe.widen.metalsDealias) + Some(sel.tpe.widen.deepDealias) selTpe - .map { selTpe => + .collect { case selTpe if selTpe != NoType => val selectorSym = selTpe.typeSymbol // Special handle case when selector is a tuple or `FunctionN`. if definitions.isTupleClass(selectorSym) || definitions.isFunctionClass( @@ -158,7 +157,7 @@ object CaseKeywordCompletion: indexedContext.scopeSymbols .foreach(s => - val ts = s.info.metalsDealias.typeSymbol + val ts = s.info.deepDealias.typeSymbol if isValid(ts) then visit(autoImportsGen.inferSymbolImport(ts)) ) // Step 2: walk through known subclasses of sealed types. @@ -261,8 +260,8 @@ object CaseKeywordCompletion: clientSupportsSnippets ) - val tpeStr = printer.tpe(selector.tpe.widen.metalsDealias.bounds.hi) - val tpe = selector.typeOpt.widen.metalsDealias.bounds.hi match + val tpeStr = printer.tpe(selector.tpe.widen.deepDealias.bounds.hi) + val tpe = selector.typeOpt.widen.deepDealias.bounds.hi match case tr @ TypeRef(_, _) => tr.underlying case t => t diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala index 6f244d9a3414..647b151a635b 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/NamedArgCompletions.scala @@ -2,7 +2,6 @@ package dotty.tools.pc.completions import scala.util.Try -import dotty.tools.dotc.ast.NavigateAST import dotty.tools.dotc.ast.Trees.ValDef import dotty.tools.dotc.ast.tpd.* import dotty.tools.dotc.ast.untpd @@ -30,7 +29,7 @@ import dotty.tools.dotc.core.Types.TypeBounds import dotty.tools.dotc.core.Types.WildcardType import dotty.tools.dotc.util.SourcePosition import dotty.tools.pc.IndexedContext -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import scala.annotation.tailrec object NamedArgCompletions: @@ -439,4 +438,4 @@ case class JustSymbol(symbol: Symbol)(using Context) extends ParamSymbol: def info: Type = symbol.info case class RefinedSymbol(symbol: Symbol, name: Name, info: Type) - extends ParamSymbol \ No newline at end of file + extends ParamSymbol diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala index 8d96396999da..df0bb70b596c 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/OverrideCompletions.scala @@ -29,7 +29,7 @@ import dotty.tools.pc.AutoImports.AutoImport import dotty.tools.pc.AutoImports.AutoImportsGenerator import dotty.tools.pc.printer.ShortenedTypePrinter import dotty.tools.pc.printer.ShortenedTypePrinter.IncludeDefaultParam -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j as l diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/ScalaCliCompletions.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/ScalaCliCompletions.scala index fce35ab69ce3..e2a0a033ee6b 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/ScalaCliCompletions.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/ScalaCliCompletions.scala @@ -4,7 +4,7 @@ import scala.meta.internal.mtags.CoursierComplete import dotty.tools.dotc.ast.tpd.* import dotty.tools.dotc.util.SourcePosition -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* class ScalaCliCompletions( coursierComplete: CoursierComplete, diff --git a/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala b/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala index a7cf1a703a1f..559e199f3449 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala @@ -29,7 +29,7 @@ import dotty.tools.pc.AutoImports.ImportSel.Rename import dotty.tools.pc.IndexedContext import dotty.tools.pc.IndexedContext.Result import dotty.tools.pc.Params -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.eclipse.lsp4j.TextEdit diff --git a/presentation-compiler/src/main/dotty/tools/pc/utils/MtagsEnrichments.scala b/presentation-compiler/src/main/dotty/tools/pc/utils/InteractiveEnrichments.scala similarity index 95% rename from presentation-compiler/src/main/dotty/tools/pc/utils/MtagsEnrichments.scala rename to presentation-compiler/src/main/dotty/tools/pc/utils/InteractiveEnrichments.scala index e4385392973f..dd2fb3107c49 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/utils/MtagsEnrichments.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/utils/InteractiveEnrichments.scala @@ -4,6 +4,7 @@ import scala.annotation.tailrec import scala.meta.internal.jdk.CollectionConverters.* import scala.meta.internal.mtags.CommonMtagsEnrichments import scala.meta.internal.mtags.KeywordWrapper +import scala.meta.pc.ContentType import scala.meta.pc.OffsetParams import scala.meta.pc.RangeParams import scala.meta.pc.SymbolDocumentation @@ -20,8 +21,7 @@ import dotty.tools.dotc.core.Names.* import dotty.tools.dotc.core.StdNames.* import dotty.tools.dotc.core.SymDenotations.NoDenotation import dotty.tools.dotc.core.Symbols.* -import dotty.tools.dotc.core.Types.AppliedType -import dotty.tools.dotc.core.Types.Type +import dotty.tools.dotc.core.Types.* import dotty.tools.dotc.interactive.Interactive import dotty.tools.dotc.interactive.InteractiveDriver import dotty.tools.dotc.util.SourcePosition @@ -31,7 +31,7 @@ import dotty.tools.pc.SemanticdbSymbols import org.eclipse.lsp4j as l -object MtagsEnrichments extends CommonMtagsEnrichments: +object InteractiveEnrichments extends CommonMtagsEnrichments: extension (driver: InteractiveDriver) @@ -261,7 +261,7 @@ object MtagsEnrichments extends CommonMtagsEnrichments: } extension (search: SymbolSearch) - def symbolDocumentation(symbol: Symbol)(using + def symbolDocumentation(symbol: Symbol, contentType: ContentType = ContentType.MARKDOWN)(using Context ): Option[SymbolDocumentation] = def toSemanticdbSymbol(symbol: Symbol) = @@ -281,6 +281,7 @@ object MtagsEnrichments extends CommonMtagsEnrichments: val documentation = search.documentation( sym, () => parentSymbols.iterator.map(toSemanticdbSymbol).toList.asJava, + contentType, ) documentation.nn.toScala end symbolDocumentation @@ -399,11 +400,16 @@ object MtagsEnrichments extends CommonMtagsEnrichments: end extension extension (tpe: Type) - def metalsDealias(using Context): Type = + def deepDealias(using Context): Type = tpe.dealias match case app @ AppliedType(tycon, params) => - // we dealias applied type params by hand, because `dealias` doesn't do it - AppliedType(tycon, params.map(_.metalsDealias)) + AppliedType(tycon, params.map(_.deepDealias)) + case aliasingBounds: AliasingBounds => + aliasingBounds.derivedAlias(aliasingBounds.alias.dealias) + case TypeBounds(lo, hi) => + TypeBounds(lo.dealias, hi.dealias) + case RefinedType(parent, name, refinedInfo) => + RefinedType(parent.dealias, name, refinedInfo.deepDealias) case dealised => dealised -end MtagsEnrichments +end InteractiveEnrichments diff --git a/presentation-compiler/test/dotty/tools/pc/base/BaseCompletionSuite.scala b/presentation-compiler/test/dotty/tools/pc/base/BaseCompletionSuite.scala index 964f6a6894a2..3e5269b947a0 100644 --- a/presentation-compiler/test/dotty/tools/pc/base/BaseCompletionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/base/BaseCompletionSuite.scala @@ -9,7 +9,7 @@ import scala.meta.pc.CancelToken import scala.language.unsafeNulls import dotty.tools.pc.completions.CompletionSource -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import dotty.tools.pc.utils.{TestCompletions, TextEdits} import org.eclipse.lsp4j.{CompletionItem, CompletionList} @@ -123,7 +123,7 @@ abstract class BaseCompletionSuite extends BasePCSuite: if (assertSingleItem && items.length != 1) then fail( - s"expected single completion item, obtained ${items.length} items.\n${items}" + s"expected single completion item, obtained ${items.length} items.\n${items.map(_.getLabel.nn + "\n")}" ) if (items.size <= itemIndex) then @@ -207,7 +207,8 @@ abstract class BaseCompletionSuite extends BasePCSuite: includeDetail: Boolean = true, filename: String = "A.scala", filter: String => Boolean = _ => true, - enablePackageWrap: Boolean = true + enablePackageWrap: Boolean = true, + includeCompletionKind: Boolean = false, ): Unit = val out = new StringBuilder() val withPkg = @@ -221,13 +222,14 @@ abstract class BaseCompletionSuite extends BasePCSuite: filteredItems.foreach { item => val label = TestCompletions.getFullyQualifiedLabel(item) val commitCharacter = - if (includeCommitCharacter) + if includeCommitCharacter then Option(item.getCommitCharacters) .getOrElse(Collections.emptyList()) .asScala .mkString(" (commit: '", " ", "')") else "" val documentation = doc(item.getDocumentation) + val completionKind = Option.when(includeCompletionKind)(s" (${item.getKind.toString})").getOrElse("") if (includeDocs && documentation.nonEmpty) { out.append("> ").append(documentation).append("\n") } @@ -244,6 +246,7 @@ abstract class BaseCompletionSuite extends BasePCSuite: "" }) .append(commitCharacter) + .append(completionKind) .append("\n") } val completionSources = filteredItems diff --git a/presentation-compiler/test/dotty/tools/pc/base/BaseHoverSuite.scala b/presentation-compiler/test/dotty/tools/pc/base/BaseHoverSuite.scala index 0b8d663f8b33..0f385631d9dc 100644 --- a/presentation-compiler/test/dotty/tools/pc/base/BaseHoverSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/base/BaseHoverSuite.scala @@ -5,7 +5,7 @@ import java.nio.file.Paths import scala.meta.internal.metals.{CompilerOffsetParams, CompilerRangeParams} import scala.language.unsafeNulls -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import dotty.tools.pc.utils.{RangeReplace, TestHovers} abstract class BaseHoverSuite diff --git a/presentation-compiler/test/dotty/tools/pc/base/BaseInlayHintsSuite.scala b/presentation-compiler/test/dotty/tools/pc/base/BaseInlayHintsSuite.scala index 94b00ca82aea..78635e540c43 100644 --- a/presentation-compiler/test/dotty/tools/pc/base/BaseInlayHintsSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/base/BaseInlayHintsSuite.scala @@ -18,6 +18,7 @@ class BaseInlayHintsSuite extends BasePCSuite { base: String, expected: String, kind: Option[Int] = None, + hintsInPatternMatch: Boolean = false ): Unit = def pkgWrap(text: String) = if (text.contains("package")) text @@ -35,7 +36,8 @@ class BaseInlayHintsSuite extends BasePCSuite { true, true, true, - true + true, + hintsInPatternMatch ) val inlayHints = presentationCompiler @@ -49,8 +51,8 @@ class BaseInlayHintsSuite extends BasePCSuite { val obtained = TestInlayHints.applyInlayHints(withPkg, inlayHints) assertNoDiff( + pkgWrap(expected), obtained, - pkgWrap(expected) ) } \ No newline at end of file diff --git a/presentation-compiler/test/dotty/tools/pc/base/BasePcDefinitionSuite.scala b/presentation-compiler/test/dotty/tools/pc/base/BasePcDefinitionSuite.scala index 8269d4ce1c44..58c2bcdb171c 100644 --- a/presentation-compiler/test/dotty/tools/pc/base/BasePcDefinitionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/base/BasePcDefinitionSuite.scala @@ -8,7 +8,7 @@ import scala.language.unsafeNulls import dotty.tools.dotc.util.Spans.Span import dotty.tools.dotc.util.{SourceFile, SourcePosition} -import dotty.tools.pc.utils.MtagsEnrichments.toLsp +import dotty.tools.pc.utils.InteractiveEnrichments.toLsp import dotty.tools.pc.utils.TextEdits import org.eclipse.lsp4j.TextEdit diff --git a/presentation-compiler/test/dotty/tools/pc/base/BaseSignatureHelpSuite.scala b/presentation-compiler/test/dotty/tools/pc/base/BaseSignatureHelpSuite.scala index ca647502fabf..5f73b108e4de 100644 --- a/presentation-compiler/test/dotty/tools/pc/base/BaseSignatureHelpSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/base/BaseSignatureHelpSuite.scala @@ -43,7 +43,7 @@ abstract class BaseSignatureHelpSuite extends BasePCSuite: out .append(signature.getLabel) .append("\n") - if (result.getActiveSignature == i && result.getActiveParameter != null && signature.getParameters.size() > 0) { + if (result.getActiveSignature == i && result.getActiveParameter != null && result.getActiveParameter() >= 0 && signature.getParameters.size() > 0) { val param = signature.getParameters.get(result.getActiveParameter) val label = param.getLabel.getLeft() /* We need to find the label of the active parameter and show ^ at that spot diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionArgSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionArgSuite.scala index 61239b535e1c..f4bfc806dbb3 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionArgSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionArgSuite.scala @@ -96,7 +96,7 @@ class CompletionArgSuite extends BaseCompletionSuite: """|age = : Int |followers = : Int |Main test - |User test + |User(name: String = ..., age: Int = ..., address: String = ..., followers: Int = ...): User |""".stripMargin, topLines = Option(4) ) @@ -130,7 +130,7 @@ class CompletionArgSuite extends BaseCompletionSuite: """|age = : Int |followers = : Int |Main test - |User test + |User(name: String = ..., age: Int = ..., address: String = ..., followers: Int = ...): User |""".stripMargin, topLines = Option(4) ) @@ -1119,4 +1119,4 @@ class CompletionArgSuite extends BaseCompletionSuite: |""".stripMargin, """x: Int |x = : Any""".stripMargin, - ) \ No newline at end of file + ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionCaseSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionCaseSuite.scala index 521880b3a84b..e72ee5221d91 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionCaseSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionCaseSuite.scala @@ -763,14 +763,14 @@ class CompletionCaseSuite extends BaseCompletionSuite: | |object O { | val x: Foo | Bar = ??? - | val y = List(x).map{ ca@@ } + | val y = List(x).map{ca@@ } |}""".stripMargin, s"""|case class Foo(a: Int) |case class Bar(b: Int) | |object O { | val x: Foo | Bar = ??? - | val y = List(x).map{ + | val y = List(x).map{ |\tcase Foo(a) => $$0 |\tcase Bar(b) => | } @@ -779,3 +779,36 @@ class CompletionCaseSuite extends BaseCompletionSuite: filter = _.contains("exhaustive") ) + @Test def summonFrom = + check( + """ + |object A { + | import scala.compiletime.summonFrom + | class A + | + | inline def f: Any = summonFrom { + | case x@@: A => ??? // error: ambiguous givens + | } + |} + |""".stripMargin, + "" + ) + + @Test def summonFrom2 = + check( + """ + |object A { + | import scala.compiletime.summonFrom + | + | class A + | given a1: A = new A + | given a2: A = new A + | + | inline def f: Any = summonFrom { + | case x@@: A => ??? // error: ambiguous givens + | } + |} + |""".stripMargin, + "" + ) + diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionDocSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionDocSuite.scala index 45f07b5fb7b1..ec0b6dc20688 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionDocSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionDocSuite.scala @@ -156,8 +156,8 @@ class CompletionDocSuite extends BaseCompletionSuite: |Found documentation for scala/collection/Iterator. |Iterator scala.collection |""".stripMargin, - - includeDocs = true + includeDocs = true, + topLines = Some(1) ) @Test def `scala5` = @@ -182,10 +182,10 @@ class CompletionDocSuite extends BaseCompletionSuite: |} """.stripMargin, """ - |> Found documentation for scala/util/Try. - |Try scala.util |> Found documentation for scala/util/Try.apply(). |Try[T](r: => T): Try[T] + |> Found documentation for scala/util/Try. + |Try scala.util |""".stripMargin, includeDocs = true ) @@ -199,7 +199,7 @@ class CompletionDocSuite extends BaseCompletionSuite: """.stripMargin, """ |> Found documentation for scala/collection/mutable/StringBuilder. - |StringBuilder scala.collection.mutable + |StringBuilder(): StringBuilder |""".stripMargin, includeDocs = true, topLines = Some(1) @@ -213,9 +213,9 @@ class CompletionDocSuite extends BaseCompletionSuite: |} """.stripMargin, """ + |Vector[A](elems: A*): Vector[A] |> Found documentation for scala/package.Vector. |Vector scala.collection.immutable - |Vector[A](elems: A*): Vector[A] |""".stripMargin, includeDocs = true ) @@ -228,11 +228,8 @@ class CompletionDocSuite extends BaseCompletionSuite: |} """.stripMargin, """ - |> ### class Catch - |Found documentation for scala/util/control/Exception.Catch# - |### object Catch - |Found documentation for scala/util/control/Exception.Catch. - |Catch[T] - scala.util.control.Exception + |> Found documentation for scala/util/control/Exception.Catch# + |Catch[T](pf: Catcher[T], fin: Option[Finally] = ..., rethrow: Throwable => Boolean = ...): Catch[T] - scala.util.control.Exception |> ### class Catch |Found documentation for scala/util/control/Exception.Catch# |### object Catch @@ -249,8 +246,8 @@ class CompletionDocSuite extends BaseCompletionSuite: | scala.util.Failure@@ |} """.stripMargin, - """|Failure scala.util - |Failure[T](exception: Throwable): Failure[T] + """|Failure[T](exception: Throwable): Failure[T] + |Failure scala.util |""".stripMargin, includeDocs = true ) @@ -264,16 +261,8 @@ class CompletionDocSuite extends BaseCompletionSuite: |} """.stripMargin, """ - |> ### class DynamicVariable - |Found documentation for scala/util/DynamicVariable# - |### object DynamicVariable - |Found documentation for scala/util/DynamicVariable. - |DynamicVariable[T] scala.util - |> ### class DynamicVariable - |Found documentation for scala/util/DynamicVariable# - |### object DynamicVariable - |Found documentation for scala/util/DynamicVariable. - |DynamicVariable scala.util + |> Found documentation for scala/util/DynamicVariable# + |DynamicVariable[T](init: T): DynamicVariable[T] |""".stripMargin, includeDocs = true ) @@ -317,6 +306,5 @@ class CompletionDocSuite extends BaseCompletionSuite: |} """.stripMargin, """|myNumbers: Vector[Int] - |myNumbers(i: Int): Int |""".stripMargin ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionExtensionSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionExtensionSuite.scala index f48ba06f699c..e67c31329c1c 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionExtensionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionExtensionSuite.scala @@ -17,12 +17,14 @@ class CompletionExtensionSuite extends BaseCompletionSuite: |def main = 100.inc@@ |""".stripMargin, """|incr: Int (extension) + |asInstanceOf[X0]: X0 + |isInstanceOf[X0]: Boolean |""".stripMargin ) @Test def `simple-old-syntax` = check( - """|package example + """package example | |object Test: | implicit class TestOps(a: Int): @@ -30,8 +32,9 @@ class CompletionExtensionSuite extends BaseCompletionSuite: | |def main = 100.test@@ |""".stripMargin, - """|testOps(b: Int): String (implicit) - |""".stripMargin + """testOps(b: Int): String (implicit) + |""".stripMargin, + topLines = Some(1) ) @Test def `simple2` = @@ -93,8 +96,10 @@ class CompletionExtensionSuite extends BaseCompletionSuite: | |def main = "foo".iden@@ |""".stripMargin, - """|identity: String (implicit) - |""".stripMargin // identity2 won't be available + """|identity: String (implicit) + |""".stripMargin, // identity2 won't be available + filter = _.contains("(implicit)") + ) @Test def `filter-by-type-subtype` = @@ -152,7 +157,8 @@ class CompletionExtensionSuite extends BaseCompletionSuite: | def incr: Int = num + 1 | |def main = 100.incr - |""".stripMargin + |""".stripMargin, + assertSingleItem = false ) @Test def `simple-edit-old` = @@ -174,7 +180,8 @@ class CompletionExtensionSuite extends BaseCompletionSuite: | def incr: Int = num + 1 | |def main = 100.incr - |""".stripMargin + |""".stripMargin, + assertSingleItem = false ) @Test def `simple-edit-suffix` = @@ -262,6 +269,8 @@ class CompletionExtensionSuite extends BaseCompletionSuite: | def main = 100.inc@@ |""".stripMargin, """|incr: Int (extension) + |asInstanceOf[X0]: X0 + |isInstanceOf[X0]: Boolean |""".stripMargin ) @@ -276,6 +285,8 @@ class CompletionExtensionSuite extends BaseCompletionSuite: | def main = 100.inc@@ |""".stripMargin, """|incr: Int (implicit) + |asInstanceOf[X0]: X0 + |isInstanceOf[X0]: Boolean |""".stripMargin ) @@ -290,6 +301,8 @@ class CompletionExtensionSuite extends BaseCompletionSuite: | def main = 100.inc@@ |""".stripMargin, """|incr: Int (extension) + |asInstanceOf[X0]: X0 + |isInstanceOf[X0]: Boolean |""".stripMargin ) @@ -304,6 +317,8 @@ class CompletionExtensionSuite extends BaseCompletionSuite: | def main = 100.inc@@ |""".stripMargin, """|incr: Int (implicit) + |asInstanceOf[X0]: X0 + |isInstanceOf[X0]: Boolean |""".stripMargin ) @@ -391,7 +406,8 @@ class CompletionExtensionSuite extends BaseCompletionSuite: |testVal: Int (implicit) |testVar: Int (implicit) |testOps(b: Int): String (implicit) - |""".stripMargin + |""".stripMargin, + topLines = Some(4) ) @Test def `implicit-val-edit` = @@ -413,5 +429,6 @@ class CompletionExtensionSuite extends BaseCompletionSuite: | val testVal: Int = 42 | |def main = 100.testVal - |""".stripMargin + |""".stripMargin, + assertSingleItem = false ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionExtraConstructorSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionExtraConstructorSuite.scala new file mode 100644 index 000000000000..010d0b14fa90 --- /dev/null +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionExtraConstructorSuite.scala @@ -0,0 +1,565 @@ +package dotty.tools.pc.tests.completion + +import scala.meta.pc.SymbolDocumentation +import scala.language.unsafeNulls + +import dotty.tools.pc.base.BaseCompletionSuite +import dotty.tools.pc.utils.MockEntries + +import org.junit.Test +import org.junit.Ignore +import scala.collection.immutable.ListMapBuilder + +class CompletionExtraConstructorSuite extends BaseCompletionSuite: + + @Test def `no-extra-new-completions-class-1` = + check( + """|object Wrapper: + | class TestClass(x: Int) + | TestCla@@ + |""".stripMargin, + """|TestClass(x: Int): TestClass (Constructor) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `no-extra-new-completions-class-2` = + check( + """|object Wrapper: + | class TestClass() + | TestCla@@ + |""".stripMargin, + """|TestClass(): TestClass (Constructor) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `no-extra-new-completions-class-3` = + check( + """|object Wrapper: + | class TestClass[T](x: T) + | TestCla@@ + |""".stripMargin, + """|TestClass[T](x: T): TestClass[T] (Constructor) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `no-extra-new-completions-case-class-1` = + check( + """|object Wrapper: + | case class TestClass(x: Int) + | TestCla@@ + |""".stripMargin, + """|TestClass(x: Int): TestClass (Method) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `no-extra-new-completions-case-class-2` = + check( + """|object Wrapper: + | case class TestClass() + | TestCla@@ + |""".stripMargin, + """|TestClass(): TestClass (Method) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `no-extra-new-completions-case-class-3` = + check( + """|object Wrapper: + | case class TestClass[T](x: T) + | TestCla@@ + |""".stripMargin, + """|TestClass[T](x: T): TestClass[T] (Method) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + // TODO We first need to detect support when to add additional braces / colon + // missing new TestClass(x: Int): TestClass (Constructor) + @Test def `extra-new-completions-abstract-class-1` = + check( + """|object Wrapper: + | abstract class TestClass(x: Int) + | TestCla@@ + |""".stripMargin, + """| + |""".stripMargin, + includeCompletionKind = true + ) + + // TODO We first need to detect support when to add additional braces / colon + // missing new TestClass(): TestClass (Constructor) + @Test def `extra-new-completions-abstract-class-2` = + check( + """|object Wrapper: + | abstract class TestClass() + | TestCla@@ + |""".stripMargin, + """| + |""".stripMargin, + includeCompletionKind = true + ) + + // TODO We first need to detect support when to add additional braces / colon + // missing new TestClass[T](x: T): TestClass[T] (Constructor) + @Test def `extra-new-completions-abstract-class-3` = + check( + """|object Wrapper: + | abstract class TestClass[T](x: T) + | TestCla@@ + |""".stripMargin, + """| + |""".stripMargin, + includeCompletionKind = true + ) + + // TODO We first need to detect support when to add additional braces / colon + // missing new TestClass (Constructor) + @Test def `extra-new-completions-trait-1` = + check( + """|object Wrapper: + | trait TestClass + | TestCla@@ + |""".stripMargin, + """| + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `extra-new-completions-class-1` = + check( + """|object Wrapper: + | class TestClass(x: Int) + | object TestClass: + | def apply(x: Int, y: Int): TestClass = TestClass(x + y) + | TestCla@@ + |""".stripMargin, + """|TestClass(x: Int, y: Int): TestClass (Method) + |new TestClass(x: Int): TestClass (Constructor) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `extra-new-completions-class-2` = + check( + """|object Wrapper: + | class TestClass(x: Int) + | object TestClass: + | def apply(x: Int): TestClass = TestClass(x) + | TestCla@@ + |} + |""".stripMargin, + """|TestClass(x: Int): TestClass (Method) + |new TestClass(x: Int): TestClass (Constructor) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `extra-new-completions-class-3` = + check( + """|object Wrapper: + | class TestClass() + | object TestClass: + | def apply(): TestClass = TestClass(1) + | TestCla@@ + |""".stripMargin, + """|TestClass(): TestClass (Method) + |new TestClass(): TestClass (Constructor) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + // TODO We first need to detect support when to add additional braces / colon + // missing new TestClass(x: Int): TestClass (Constructor) + @Test def `extra-new-completions-abstract-class-with-companion-1` = + check( + """|object Wrapper: + | abstract class TestClass(x: Int) + | object TestClass: + | def apply(x: Int, y: Int): TestClass = ??? + | TestCla@@ + |""".stripMargin, + """|TestClass(x: Int, y: Int): TestClass (Method) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + // TODO We first need to detect support when to add additional braces / colon + // missing new TestClass(x: Int): TestClass (Constructor) + @Test def `extra-new-completions-abstract-class-with-companion-2` = + check( + """|object Wrapper: + | abstract class TestClass(x: Int) + | object TestClass: + | def apply(x: Int): TestClass = ??? + | TestCla@@ + |""".stripMargin, + """|TestClass(x: Int): TestClass (Method) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + // TODO We first need to detect support when to add additional braces / colon + // missing new TestClass(): TestClass (Constructor) + @Test def `extra-new-completions-abstract-class-with-companion-3` = + check( + """|object Wrapper: + | abstract class TestClass() + | object TestClass: + | def apply(): TestClass = ??? + | TestCla@@ + |""".stripMargin, + """|TestClass(): TestClass (Method) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + // TODO We first need to detect support when to add additional braces / colon + // missing new TestClass(x: Int): TestClass (Constructor) + @Test def `extra-new-completions-trait-with-companion-1` = + check( + """|object Wrapper: + | trait TestClass(x: Int) + | object TestClass: + | def apply(x: Int, y: Int): TestClass = ??? + | TestCla@@ + |""".stripMargin, + """|TestClass(x: Int, y: Int): TestClass (Method) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + // TODO We first need to detect support when to add additional braces / colon + // missing new TestClass(x: Int): TestClass (Constructor) + @Test def `extra-new-completions-trait-with-companion-2` = + check( + """|object Wrapper: + | trait TestClass(x: Int) + | object TestClass: + | def apply(x: Int): TestClass = ??? + | TestCla@@ + |""".stripMargin, + """|TestClass(x: Int): TestClass (Method) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + // TODO We first need to detect support when to add additional braces / colon + // missing new TestClass(): TestClass (Constructor) + @Test def `extra-new-completions-trait-with-companion-3` = + check( + """|object Wrapper: + | trait TestClass() + | object TestClass: + | def apply(): TestClass = ??? + | TestCla@@ + |""".stripMargin, + """|TestClass(): TestClass (Method) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + // This test should have new TestClass completion without parentheses. The actual issue is with printer, edit text is correct + // TODO We first need to detect support when to add additional braces / colon + // missing new TestClass(): TestClass (Constructor) + @Test def `extra-new-completions-trait-with-companion-4` = + check( + """|object Wrapper: + | trait TestClass + | object TestClass: + | def apply(): TestClass = ??? + | TestCla@@ + |""".stripMargin, + """|TestClass(): TestClass (Method) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + checkSnippet( + """|object Wrapper: + | trait TestClass + | object TestClass: + | def apply(): TestClass = ??? + | TestCla@@ + |""".stripMargin, + """|TestClass() + |TestClass + |""".stripMargin, + ) + + @Test def `multiple-extra-new-constructors-class-1` = + check( + """|object Wrapper: + | class TestClass(): + | def this(x: Int) = this() + | TestCla@@ + |""".stripMargin, + """|TestClass(): TestClass (Constructor) + |TestClass(x: Int): TestClass (Constructor) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `multiple-extra-new-constructors-class-2` = + check( + """|object Wrapper: + | class TestClass(): + | def this(x: Int) = this() + | def this(x: Int, y: Int) = this() + | TestCla@@ + |""".stripMargin, + """|TestClass(): TestClass (Constructor) + |TestClass(x: Int): TestClass (Constructor) + |TestClass(x: Int, y: Int): TestClass (Constructor) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `multiple-extra-new-constructors-with-companion-2` = + check( + """|object Wrapper: + | class TestClass(): + | def this(x: Int) = this() + | def this(x: Int, y: Int) = this() + | object TestClass: + | def apply(z: Int): TestClass = ??? + | TestCla@@ + |""".stripMargin, + """|TestClass(z: Int): TestClass (Method) + |new TestClass(): TestClass (Constructor) + |new TestClass(x: Int): TestClass (Constructor) + |new TestClass(x: Int, y: Int): TestClass (Constructor) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `multiple-extra-new-constructors-with-companion-3` = + check( + """|object Wrapper: + | class TestClass(): + | def this(x: Int) = this() + | def this(x: Int, y: Int) = this() + | object TestClass: + | def apply(z: Int): TestClass = ??? + | def apply(z: Int, w: Int): TestClass = ??? + | TestCla@@ + |""".stripMargin, + """|TestClass(z: Int): TestClass (Method) + |TestClass(z: Int, w: Int): TestClass (Method) + |new TestClass(): TestClass (Constructor) + |new TestClass(x: Int): TestClass (Constructor) + |new TestClass(x: Int, y: Int): TestClass (Constructor) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `multiple-extra-new-constructors-with-companion-same-signature-class` = + check( + """|object Wrapper: + | class TestClass(): + | def this(x: Int) = this() + | def this(x: Int, y: Int) = this() + | object TestClass: + | def apply(x: Int): TestClass = ??? + | TestCla@@ + |""".stripMargin, + """|TestClass(x: Int): TestClass (Method) + |new TestClass(): TestClass (Constructor) + |new TestClass(x: Int): TestClass (Constructor) + |new TestClass(x: Int, y: Int): TestClass (Constructor) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `multiple-extra-new-constructors-with-companion-same-signature-case-class` = + check( + """|object Wrapper: + | case class TestClass(): + | def this(x: Int) = this() + | def this(x: Int, y: Int) = this() + | object TestClass: + | def apply(x: Int): TestClass = ??? + | TestCla@@ + |""".stripMargin, + """|TestClass(): TestClass (Method) + |TestClass(x: Int): TestClass (Method) + |new TestClass(x: Int): TestClass (Constructor) + |new TestClass(x: Int, y: Int): TestClass (Constructor) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `multiple-extra-new-constructors-with-companion-same-signature-trait` = + check( + """|object Wrapper: + | trait TestClass + | object TestClass: + | def apply(x: Int): TestClass = ??? + | TestCla@@ + |""".stripMargin, + """|TestClass(x: Int): TestClass (Method) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + + // TODO We first need to detect support when to add additional braces / colon + // missing: + // new TestClass(): TestClass (Constructor) + // new TestClass(x: Int): TestClass (Constructor) + // new TestClass(x: Int, y: Int): TestClass (Constructor) + @Test def `multiple-extra-new-constructors-with-companion-same-signature-abstract` = + check( + """|object Wrapper: + | abstract class TestClass(): + | def this(x: Int) = this() + | def this(x: Int, y: Int) = this() + | object TestClass: + | def apply(x: Int): TestClass = ??? + | TestCla@@ + |""".stripMargin, + """|TestClass(x: Int): TestClass (Method) + |TestClass test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `no-extra-completions-in-type-mode-1` = + check( + """|object Wrapper: + | class TestClass() + | val x: TestCla@@ + |""".stripMargin, + """|TestClass test.Wrapper (Class) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `no-extra-completions-in-type-mode-2` = + check( + """|object Wrapper: + | class TestClass() + | val x: TestCla@@ + |""".stripMargin, + """|TestClass test.Wrapper (Class) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `no-extra-completions-in-type-mode-3` = + check( + """|object Wrapper: + | class TestClass(): + | def this(x: Int) = this() + | def this(x: Int, y: Int) = this() + | object TestClass: + | def apply(x: Int): TestClass = ??? + | val x: TestCla@@ + |""".stripMargin, + """|TestClass test.Wrapper (Class) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `workspace-no-extra-completions-in-type-mode-4` = + check( + """|object Wrapper: + | class TestClass(): + | def this(x: Int) = this() + | def this(x: Int, y: Int) = this() + | object TestClass: + | def apply(x: Int): TestClass = ??? + |object M { + | val x: TestCla@@ + |} + |""".stripMargin, + """|TestClass - test.Wrapper (Class) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `workspace-multiple-extra-new-constructors` = + check( + """|object Wrapper: + | class TestClass(): + | def this(x: Int) = this() + | def this(x: Int, y: Int) = this() + | object TestClass: + | def apply(x: Int): TestClass = ??? + |object M { + | TestCla@@ + |} + |""".stripMargin, + """|TestClass(x: Int): TestClass - test.Wrapper (Method) + |new TestClass(): TestClass - test.Wrapper (Constructor) + |new TestClass(x: Int): TestClass - test.Wrapper (Constructor) + |new TestClass(x: Int, y: Int): TestClass - test.Wrapper (Constructor) + |TestClass - test.Wrapper (Module) + |""".stripMargin, + includeCompletionKind = true + ) + + @Test def `prepend-new` = + checkSnippet( + """|object Wrapper: + | case class TestClass(x: Int) + | object TestClass: + | def apply(x: Int): TestClass = ??? + |object Main { + | TestClas@@ + |} + |""".stripMargin, + """|TestClass($0) + |new TestClass + |TestClass + |""".stripMargin + ) + + @Test def `prepend-new-fully-qualified-path` = + checkSnippet( + """|object Wrapper: + | case class TestClass(x: Int) + | object TestClass: + | def apply(x: Int): TestClass = ??? + |object Main { + | Wrapper.Test@@ + |} + |""".stripMargin, + """|TestClass($0) + |new Wrapper.TestClass + |TestClass + |""".stripMargin + ) + + @Test def `dont-include-private-members` = + check( + """|object TestObject: + | private def apply(i: Int) = i + |object Main: + | TestObject@@ + |""".stripMargin, + """|TestObject test + |""".stripMargin + ) + diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionInterpolatorSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionInterpolatorSuite.scala index d9dc635ce21a..08cc1535fd56 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionInterpolatorSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionInterpolatorSuite.scala @@ -4,6 +4,7 @@ import dotty.tools.pc.base.BaseCompletionSuite import org.junit.runners.MethodSorters import org.junit.{FixMethodOrder, Test} +import org.junit.Ignore @FixMethodOrder(MethodSorters.NAME_ASCENDING) class CompletionInterpolatorSuite extends BaseCompletionSuite: @@ -542,7 +543,7 @@ class CompletionInterpolatorSuite extends BaseCompletionSuite: |} |""".stripMargin, """s"Hello $hello@@"""".stripMargin, - """s"Hello $helloMethod"""".stripMargin, + """s"Hello ${helloMethod($0)}"""".stripMargin, filter = _.contains("a: Int") ) @@ -627,10 +628,10 @@ class CompletionInterpolatorSuite extends BaseCompletionSuite: |} |""".stripMargin, assertSingleItem = false, - // Scala 3 has an additional Paths() completion - itemIndex = 2 + filter = _.contains("java.nio.file") ) + @Test def `auto-imports-prefix-with-interpolator` = checkEdit( """| @@ -644,7 +645,6 @@ class CompletionInterpolatorSuite extends BaseCompletionSuite: | s"this is an interesting ${java.nio.file.Paths}" |} |""".stripMargin, - // Scala 3 has an additional Paths object completion itemIndex = 1, assertSingleItem = false ) @@ -745,7 +745,7 @@ class CompletionInterpolatorSuite extends BaseCompletionSuite: |object Main { | val a = s"${ListBuffer($0)}"" |}""".stripMargin, - filter = _.contains("[A]") + assertSingleItem = false, ) @Test def `dont-show-when-writing-before-dollar` = @@ -780,3 +780,62 @@ class CompletionInterpolatorSuite extends BaseCompletionSuite: |""".stripMargin, "host: String" ) + + @Test def `prepend-new-missing-interpolator` = + checkSnippet( + """|case class TestClass(x: Int) + |object TestClass: + | def apply(x: Int): TestClass = ??? + |object Main: + | "$TestClas@@" + |""".stripMargin, + """|{TestClass($0)} + |{new TestClass$0} + |TestClass$0 + |""".stripMargin + ) + + @Ignore("This case is not yet supported by metals") + @Test def `prepend-new-missing-interpolator-with-prefix` = + checkSnippet( + """|object Wrapper: + | case class TestClass(x: Int) + | object TestClass: + | def apply(x: Int): TestClass = ??? + |object Main: + | "$Wrapper.TestClas@@" + |""".stripMargin, + """|{Wrapper.TestClass($0)} + |{new Wrapper.TestClass$0} + |{Wrapper.TestClass$0} + |""".stripMargin + ) + + @Test def `prepend-new-with-prefix` = + checkSnippet( + """|object Wrapper: + | case class TestClass(x: Int) + | object TestClass: + | def apply(x: Int): TestClass = ??? + |object Main: + | s"$Wrapper.TestClas@@" + |""".stripMargin, + """|{Wrapper.TestClass($0)} + |{new Wrapper.TestClass$0} + |{Wrapper.TestClass$0} + |""".stripMargin + ) + + @Test def `prepend-new-interpolator` = + checkSnippet( + """|case class TestClass(x: Int) + |object TestClass: + | def apply(x: Int): TestClass = ??? + |object Main: + | s"$TestClas@@" + |""".stripMargin, + """|{TestClass($0)} + |{new TestClass} + |TestClass + |""".stripMargin + ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionKeywordSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionKeywordSuite.scala index c828cd4e6e67..bf7077d47b3f 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionKeywordSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionKeywordSuite.scala @@ -151,8 +151,7 @@ class CompletionKeywordSuite extends BaseCompletionSuite: """|value: Int |val |var - |varargs(): varargs - |varargs - scala.annotation + |varargs(): varargs - scala.annotation |""".stripMargin ) @@ -169,8 +168,7 @@ class CompletionKeywordSuite extends BaseCompletionSuite: |""".stripMargin, """|val |var - |varargs(): varargs - |varargs - scala.annotation + |varargs(): varargs - scala.annotation |""".stripMargin ) @@ -203,8 +201,7 @@ class CompletionKeywordSuite extends BaseCompletionSuite: |} |""".stripMargin, """|value: Int - |varargs(): varargs - |varargs - scala.annotation""".stripMargin + |varargs(): varargs - scala.annotation""".stripMargin ) @Test def `val-trailing-space` = @@ -364,9 +361,7 @@ class CompletionKeywordSuite extends BaseCompletionSuite: | val x: Map[Int, new@@] |} """.stripMargin, - "", - // to avoid newMain annotation - filter = str => !str.contains("newMain") + "" ) // TODO: Should provide empty completions // The issue is that the tree looks the same as for `case @@` (it doesn't see `new`) @@ -382,9 +377,7 @@ class CompletionKeywordSuite extends BaseCompletionSuite: | } |} """.stripMargin, - "", - // to avoid newMain annotation - filter = str => !str.contains("newMain") + "" ) @Test def `super-typeapply` = @@ -698,28 +691,26 @@ class CompletionKeywordSuite extends BaseCompletionSuite: @Test def `derives-with-extends` = check( - """ - |package foo - | - |trait Bar {} - |trait Baz {} - | - |class Foo(x: Int) extends Bar with Baz der@@ - """.stripMargin, + """|package foo + | + |trait Bar {} + |trait Baz {} + | + |class Foo(x: Int) extends Bar with Baz der@@ + |""".stripMargin, """|derives |""".stripMargin ) @Test def `derives-with-constructor-extends` = check( - """ - |package foo - | - |trait Bar {} - |class Baz(b: Int) {} - | - |class Foo(x: Int) extends Bar with Baz(1) der@@ - """.stripMargin, + """|package foo + | + |trait Bar {} + |class Baz(b: Int) {} + | + |class Foo(x: Int) extends Bar with Baz(1) der@@ + |""".stripMargin, """|derives |""".stripMargin ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionOverrideSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionOverrideSuite.scala index 8bc45d344244..94c444b0feb9 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionOverrideSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionOverrideSuite.scala @@ -925,12 +925,15 @@ class CompletionOverrideSuite extends BaseCompletionSuite: | def@@ |} |""".stripMargin, + """|def hello1: Int - |override val hello2: Int |override def equals(x$0: Any): Boolean + |override def hashCode(): Int + |override def toString(): String + |override val hello2: Int |""".stripMargin, includeDetail = false, - topLines = Some(3) + topLines = Some(5) ) @Test def `path-dependent` = diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionScalaCliSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionScalaCliSuite.scala index 0d86922d4e70..79d35944c84d 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionScalaCliSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionScalaCliSuite.scala @@ -3,6 +3,7 @@ package dotty.tools.pc.tests.completion import dotty.tools.pc.base.BaseCompletionSuite import org.junit.Test +import org.junit.Ignore class CompletionScalaCliSuite extends BaseCompletionSuite: @@ -28,7 +29,8 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: |// //> using lib ??? |//> using lib io.circe::circe-core_native0.4 |package A - |""".stripMargin + |""".stripMargin, + assertSingleItem = false ) @Test def `version-sort` = @@ -43,6 +45,7 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: |""".stripMargin, ) + @Ignore @Test def `single-colon` = check( """|//> using lib "io.circe:circe-core_na@@ @@ -73,6 +76,7 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: |""".stripMargin, ) + @Ignore @Test def `multiple-libs` = check( """|//> using lib "io.circe::circe-core:0.14.0", "io.circe::circe-core_na@@" @@ -81,6 +85,7 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: "circe-core_native0.4" ) + @Ignore @Test def `script` = check( scriptWrapper( @@ -133,6 +138,7 @@ class CompletionScalaCliSuite extends BaseCompletionSuite: |io.circul""".stripMargin ) + @Ignore @Test def `multiple-deps2` = check( """|//> using libs "io.circe::circe-core:0.14.0", "io.circe::circe-core_na@@" diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetNegSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetNegSuite.scala index ccd989d811b5..8cbbad0e6ef2 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetNegSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetNegSuite.scala @@ -16,12 +16,12 @@ class CompletionSnippetNegSuite extends BaseCompletionSuite: @Test def `member` = checkSnippet( - """ - |object Main { - | List.appl@@ - |} - |""".stripMargin, - "apply" + """|object Main { + | List.appl@@ + |} + |""".stripMargin, + """|apply + |unapplySeq""".stripMargin ) @Test def `scope` = diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetSuite.scala index c3e3f374c23d..5769304919ca 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetSuite.scala @@ -15,6 +15,7 @@ class CompletionSnippetSuite extends BaseCompletionSuite: |} |""".stripMargin, """|apply($0) + |unapplySeq($0) |""".stripMargin ) @@ -172,7 +173,6 @@ class CompletionSnippetSuite extends BaseCompletionSuite: |ArrayDequeOps[$0] |ArrayDeque |ArrayDeque - |ArrayDequeOps |""".stripMargin ) @@ -305,15 +305,35 @@ class CompletionSnippetSuite extends BaseCompletionSuite: @Test def `case-class2` = checkSnippet( - s"""|object Main { - | scala.util.Tr@@ + s"""|object wrapper: + | case class Test2(x: Int) + | object Test2: + | def apply(x: Int): Test2 = ??? + |object Main { + | wrapper.Test@@ |} |""".stripMargin, - """|Try - |Try($0) + """|Test2($0) + |new wrapper.Test2 + |Test2 |""".stripMargin ) + @Test def `case-class2-edit` = + checkEditLine( + s"""|object wrapper: + | case class Test2(x: Int) + | object Test2: + | def apply(x: Int): Test2 = ??? + |object Main { + | ___ + |} + |""".stripMargin, + "wrapper.Test@@", + "new wrapper.Test2", + filter = _.contains("new Test2") + ) + @Test def `case-class3` = checkSnippet( s"""|object Main { @@ -322,9 +342,10 @@ class CompletionSnippetSuite extends BaseCompletionSuite: |""".stripMargin, // Note: the class and trait items in here are invalid. So // they are filtered out. - """|Try - |Try($0) - |""".stripMargin + """|Try($0) - [T](r: => T): Try[T] + |Try - scala.util + |""".stripMargin, + includeDetail = true ) @Test def `symbol` = @@ -352,10 +373,10 @@ class CompletionSnippetSuite extends BaseCompletionSuite: | Wi@@ |} |""".stripMargin, - """|Widget - example - |Widget($0) - (name: String): Widget + """|Widget($0) - (name: String): Widget |Widget($0) - (age: Int): Widget |Widget($0) - (name: String, age: Int): Widget + |Widget - example |""".stripMargin, includeDetail = true, topLines = Some(4) @@ -365,18 +386,34 @@ class CompletionSnippetSuite extends BaseCompletionSuite: checkSnippet( s"""|package example | - |object Widget{} + |object TestObject {} |object Main { - | Wi@@ + | TestObjec@@ |} |""".stripMargin, - """|Widget - example - |Window - java.awt - |WindowPeer - java.awt.peer - |WithFilter - scala.collection + """|TestObject - example + |""".stripMargin, + includeDetail = true, + ) + + @Test def `dont-enter-empty-paramlist` = + checkSnippet( + s"""|package example + | + |object Main { + | ListMa@@ + |} + |""".stripMargin, + """|ListMap($0) - [K, V](elems: (K, V)*): ListMap[K, V] + |new ListMap - [K, V]: ListMap[K, V] + |ListMap - scala.collection.immutable + |ListMap($0) - [K, V](elems: (K, V)*): ListMap[K, V] + |new ListMap - [K, V]: ListMap[K, V] + |ListMap - scala.collection.mutable + |ListMapBuilder - [K, V]: ListMapBuilder[K, V] + |ConcurrentSkipListMap - java.util.concurrent |""".stripMargin, includeDetail = true, - topLines = Some(4) ) // https://github.com/scalameta/metals/issues/4004 @@ -393,7 +430,8 @@ class CompletionSnippetSuite extends BaseCompletionSuite: | extension (s: String) | def bar = 0 | val bar = "abc".bar - """.stripMargin + """.stripMargin, + filter = _.contains("bar: Int") ) // https://github.com/scalameta/metals/issues/4004 @@ -410,5 +448,6 @@ class CompletionSnippetSuite extends BaseCompletionSuite: | extension (s: String) | def bar() = 0 | val bar = "abc".bar() - """.stripMargin + """.stripMargin, + filter = _.contains("bar: Int") ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala index eadadd484089..b5db258601bc 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala @@ -26,13 +26,12 @@ class CompletionSuite extends BaseCompletionSuite: | Lis@@ |}""".stripMargin, """ - |List scala.collection.immutable |List[A](elems: A*): List[A] + |List scala.collection.immutable |List - java.awt |List - java.util - |ListMap[K, V](elems: (K, V)*): ListMap[K, V] |""".stripMargin, - topLines = Some(5) + topLines = Some(4) ) @Test def member = @@ -179,8 +178,24 @@ class CompletionSuite extends BaseCompletionSuite: |object A { | TrieMap@@ |}""".stripMargin, - """|TrieMap scala.collection.concurrent - |TrieMap[K, V](elems: (K, V)*): TrieMap[K, V] + """|TrieMap[K, V](elems: (K, V)*): TrieMap[K, V] + |new TrieMap[K, V]: TrieMap[K, V] + |new TrieMap[K, V](hashf: Hashing[K], ef: Equiv[K]): TrieMap[K, V] + |TrieMap scala.collection.concurrent + |""".stripMargin + ) + + @Test def `no-companion-apply-in-new`= + check( + """ + |import scala.collection.concurrent._ + |object A { + | new TrieMap@@ + |}""".stripMargin, + // TrieMap should be filtered if it doesn't contain any types that can be constructed in `new` keyword context. + """|TrieMap[K, V]: TrieMap[K, V] + |TrieMap[K, V](hashf: Hashing[K], ef: Equiv[K]): TrieMap[K, V] + |TrieMap scala.collection.concurrent |""".stripMargin ) @@ -216,16 +231,13 @@ class CompletionSuite extends BaseCompletionSuite: """ |import JavaCon@@ |""".stripMargin, - """|AsJavaConverters - scala.collection.convert - |JavaConverters - scala.collection + """|JavaConverters - scala.collection |JavaConversions - scala.concurrent |AsJavaConsumer - scala.jdk.FunctionWrappers + |AsJavaConverters - scala.collection.convert |FromJavaConsumer - scala.jdk.FunctionWrappers |AsJavaBiConsumer - scala.jdk.FunctionWrappers |AsJavaIntConsumer - scala.jdk.FunctionWrappers - |AsJavaLongConsumer - scala.jdk.FunctionWrappers - |FromJavaBiConsumer - scala.jdk.FunctionWrappers - |FromJavaIntConsumer - scala.jdk.FunctionWrappers |""".stripMargin ) @@ -392,6 +404,7 @@ class CompletionSuite extends BaseCompletionSuite: |Function20 scala |Function21 scala |Function22 scala + |PartialFunction scala |""".stripMargin, topLines = Some(25) ) @@ -473,8 +486,7 @@ class CompletionSuite extends BaseCompletionSuite: | |} """.stripMargin, - """|DelayedLazyVal scala.concurrent - |DelayedLazyVal[T](f: () => T, body: => Unit)(exec: ExecutionContext): DelayedLazyVal[T]""".stripMargin + "DelayedLazyVal[T](f: () => T, body: => Unit)(implicit exec: ExecutionContext): DelayedLazyVal[T]" ) @Test def local2 = @@ -520,7 +532,6 @@ class CompletionSuite extends BaseCompletionSuite: |until(end: Long): Exclusive[Long] |until(end: Long, step: Long): Exclusive[Long] |""".stripMargin, - postProcessObtained = _.replace("Float", "Double"), stableOrder = false ) @@ -618,8 +629,8 @@ class CompletionSuite extends BaseCompletionSuite: |} |""".stripMargin, """|Some(value) scala - |Some scala |Some[A](value: A): Some[A] + |Some scala |""".stripMargin ) @@ -630,8 +641,8 @@ class CompletionSuite extends BaseCompletionSuite: | case List(Som@@) |} |""".stripMargin, - """|Some scala - |Some[A](value: A): Some[A] + """|Some[A](value: A): Some[A] + |Some scala |""".stripMargin ) @@ -656,8 +667,8 @@ class CompletionSuite extends BaseCompletionSuite: |} |""".stripMargin, """|Some(value) scala - |Seq scala.collection.immutable - |Set scala.collection.immutable + |Set[A](elems: A*): Set[A] + |Seq[A](elems: A*): Seq[A] |""".stripMargin, topLines = Some(3) ) @@ -784,6 +795,10 @@ class CompletionSuite extends BaseCompletionSuite: |} |""".stripMargin, """|intNumber: Int + |toInt: Int + |instance: Int + |asInstanceOf[X0]: X0 + |isInstanceOf[X0]: Boolean |""".stripMargin ) @@ -1094,7 +1109,8 @@ class CompletionSuite extends BaseCompletionSuite: |} |""".stripMargin, """|first: java.util.List[Int] - |""".stripMargin + |""".stripMargin, + topLines = Some(1) ) @Test def `object-at-type-pos` = @@ -1154,8 +1170,7 @@ class CompletionSuite extends BaseCompletionSuite: |def main = | Testin@@ |""".stripMargin, - """|Testing a - |Testing(): Testing + """|Testing(): Testing |""".stripMargin ) @@ -1168,8 +1183,7 @@ class CompletionSuite extends BaseCompletionSuite: |def main = | Testin@@ |""".stripMargin, - """|Testing a - |Testing(a: Int, b: String): Testing + """|Testing(a: Int, b: String): Testing |""".stripMargin ) @@ -1314,28 +1328,22 @@ class CompletionSuite extends BaseCompletionSuite: |""".stripMargin, """|AClass[A <: Int] test.O |AClass test.O - |AbstractTypeClassManifest - scala.reflect.ClassManifestFactory """.stripMargin ) + val extensionResult = + """|Foo test + |Found - scala.collection.Searching + """.stripMargin + @Test def `extension-definition-scope` = check( """|trait Foo |object T: | extension (x: Fo@@) |""".stripMargin, - """|Foo test - |Font - java.awt - |Form - java.text.Normalizer - |Format - java.text - |FontPeer - java.awt.peer - |FormView - javax.swing.text.html - |Formatter - java.util - |Formatter - java.util.logging - |FocusEvent - java.awt.event - |FontMetrics - java.awt - |Found - scala.collection.Searching - |""".stripMargin + extensionResult, + topLines = Some(2) ) @Test def `extension-definition-symbol-search` = @@ -1354,18 +1362,8 @@ class CompletionSuite extends BaseCompletionSuite: |object T: | extension [A <: Fo@@] |""".stripMargin, - """|Foo test - |Font - java.awt - |Form - java.text.Normalizer - |Format - java.text - |FontPeer - java.awt.peer - |FormView - javax.swing.text.html - |Formatter - java.util - |Formatter - java.util.logging - |FocusEvent - java.awt.event - |FontMetrics - java.awt - |Found - scala.collection.Searching - |""".stripMargin + extensionResult, + topLines = Some(2) ) @Test def `extension-definition-type-parameter-symbol-search` = @@ -1384,18 +1382,8 @@ class CompletionSuite extends BaseCompletionSuite: |object T: | extension (using Fo@@) |""".stripMargin, - """|Foo test - |Font - java.awt - |Form - java.text.Normalizer - |Format - java.text - |FontPeer - java.awt.peer - |FormView - javax.swing.text.html - |Formatter - java.util - |Formatter - java.util.logging - |FocusEvent - java.awt.event - |FontMetrics - java.awt - |Found - scala.collection.Searching - |""".stripMargin + extensionResult, + topLines = Some(2) ) @@ -1405,18 +1393,8 @@ class CompletionSuite extends BaseCompletionSuite: |object T: | extension (x: Int)(using Fo@@) |""".stripMargin, - """|Foo test - |Font - java.awt - |Form - java.text.Normalizer - |Format - java.text - |FontPeer - java.awt.peer - |FormView - javax.swing.text.html - |Formatter - java.util - |Formatter - java.util.logging - |FocusEvent - java.awt.event - |FontMetrics - java.awt - |Found - scala.collection.Searching - |""".stripMargin + extensionResult, + topLines = Some(2) ) @Test def `extension-definition-mix-2` = @@ -1425,18 +1403,8 @@ class CompletionSuite extends BaseCompletionSuite: |object T: | extension (using Fo@@)(x: Int)(using Foo) |""".stripMargin, - """|Foo test - |Font - java.awt - |Form - java.text.Normalizer - |Format - java.text - |FontPeer - java.awt.peer - |FormView - javax.swing.text.html - |Formatter - java.util - |Formatter - java.util.logging - |FocusEvent - java.awt.event - |FontMetrics - java.awt - |Found - scala.collection.Searching - |""".stripMargin + extensionResult, + topLines = Some(2) ) @Test def `extension-definition-mix-3` = @@ -1445,18 +1413,8 @@ class CompletionSuite extends BaseCompletionSuite: |object T: | extension (using Foo)(x: Int)(using Fo@@) |""".stripMargin, - """|Foo test - |Font - java.awt - |Form - java.text.Normalizer - |Format - java.text - |FontPeer - java.awt.peer - |FormView - javax.swing.text.html - |Formatter - java.util - |Formatter - java.util.logging - |FocusEvent - java.awt.event - |FontMetrics - java.awt - |Found - scala.collection.Searching - |""".stripMargin + extensionResult, + topLines = Some(2) ) @Test def `extension-definition-mix-4` = @@ -1465,18 +1423,8 @@ class CompletionSuite extends BaseCompletionSuite: |object T: | extension [A](x: Fo@@) |""".stripMargin, - """|Foo test - |Font - java.awt - |Form - java.text.Normalizer - |Format - java.text - |FontPeer - java.awt.peer - |FormView - javax.swing.text.html - |Formatter - java.util - |Formatter - java.util.logging - |FocusEvent - java.awt.event - |FontMetrics - java.awt - |Found - scala.collection.Searching - |""".stripMargin + extensionResult, + topLines = Some(2) ) @Test def `extension-definition-mix-5` = @@ -1485,18 +1433,8 @@ class CompletionSuite extends BaseCompletionSuite: |object T: | extension [A](using Fo@@)(x: Int) |""".stripMargin, - """|Foo test - |Font - java.awt - |Form - java.text.Normalizer - |Format - java.text - |FontPeer - java.awt.peer - |FormView - javax.swing.text.html - |Formatter - java.util - |Formatter - java.util.logging - |FocusEvent - java.awt.event - |FontMetrics - java.awt - |Found - scala.collection.Searching - |""".stripMargin + extensionResult, + topLines = Some(2) ) @Test def `extension-definition-mix-6` = @@ -1505,18 +1443,8 @@ class CompletionSuite extends BaseCompletionSuite: |object T: | extension [A](using Foo)(x: Fo@@) |""".stripMargin, - """|Foo test - |Font - java.awt - |Form - java.text.Normalizer - |Format - java.text - |FontPeer - java.awt.peer - |FormView - javax.swing.text.html - |Formatter - java.util - |Formatter - java.util.logging - |FocusEvent - java.awt.event - |FontMetrics - java.awt - |Found - scala.collection.Searching - |""".stripMargin + extensionResult, + topLines = Some(2) ) @Test def `extension-definition-mix-7` = @@ -1525,18 +1453,8 @@ class CompletionSuite extends BaseCompletionSuite: |object T: | extension [A](using Foo)(x: Fo@@)(using Foo) |""".stripMargin, - """|Foo test - |Font - java.awt - |Form - java.text.Normalizer - |Format - java.text - |FontPeer - java.awt.peer - |FormView - javax.swing.text.html - |Formatter - java.util - |Formatter - java.util.logging - |FocusEvent - java.awt.event - |FontMetrics - java.awt - |Found - scala.collection.Searching - |""".stripMargin + extensionResult, + topLines = Some(2) ) @Test def `extension-definition-select` = @@ -1569,7 +1487,6 @@ class CompletionSuite extends BaseCompletionSuite: | extension [T](x: Test.TestSel@@) |""".stripMargin, """|TestSelect[T] test.Test - |TestSelect test.Test |""".stripMargin ) @@ -1581,6 +1498,7 @@ class CompletionSuite extends BaseCompletionSuite: """|object O: | val a = List.apply($0) |""".stripMargin, + assertSingleItem = false ) @Test def `multiline-comment` = @@ -1641,13 +1559,21 @@ class CompletionSuite extends BaseCompletionSuite: assertSingleItem = false ) - @Test def `multi-export` = check( """export scala.collection.{AbstractMap, Set@@} |""".stripMargin, """Set scala.collection |SetOps scala.collection + |AbstractSet scala.collection + |BitSet scala.collection + |BitSetOps scala.collection + |SortedSet scala.collection + |SortedSetFactoryDefaults scala.collection + |SortedSetOps scala.collection + |StrictOptimizedSetOps scala.collection + |StrictOptimizedSortedSetOps scala.collection + |GenSet = scala.collection.Set[X] |""".stripMargin ) @@ -1657,6 +1583,15 @@ class CompletionSuite extends BaseCompletionSuite: |""".stripMargin, """Set scala.collection |SetOps scala.collection + |AbstractSet scala.collection + |BitSet scala.collection + |BitSetOps scala.collection + |SortedSet scala.collection + |SortedSetFactoryDefaults scala.collection + |SortedSetOps scala.collection + |StrictOptimizedSetOps scala.collection + |StrictOptimizedSortedSetOps scala.collection + |GenSet = scala.collection.Set[X] |""".stripMargin, ) @@ -1665,11 +1600,11 @@ class CompletionSuite extends BaseCompletionSuite: check( """import scala.collection.{AbstractMap, @@} |""".stripMargin, - """GenIterable scala.collection - |GenMap scala.collection - |GenSeq scala.collection - |GenSet scala.collection - |GenTraversable scala.collection + """+: scala.collection + |:+ scala.collection + |AbstractIndexedSeqView scala.collection + |AbstractIterable scala.collection + |AbstractIterator scala.collection |""".stripMargin, topLines = Some(5) ) @@ -1697,7 +1632,8 @@ class CompletionSuite extends BaseCompletionSuite: | List(1,2,3).tes@@ |""".stripMargin, """|test(p: Int => Boolean): List[Int] - |""".stripMargin + |""".stripMargin, + topLines = Some(1) ) @Test def `old-style-extension-type-variable-inference` = @@ -1709,7 +1645,8 @@ class CompletionSuite extends BaseCompletionSuite: | List(1,2,3).tes@@ |""".stripMargin, """|test(p: Int => Boolean): List[Int] - |""".stripMargin + |""".stripMargin, + topLines = Some(1) ) @Test def `instantiate-type-vars-in-extra-apply-completions` = @@ -1719,7 +1656,6 @@ class CompletionSuite extends BaseCompletionSuite: | foo@@ |""".stripMargin, """|fooBar: List[Int] - |fooBar(n: Int): Int |""".stripMargin ) @@ -1729,7 +1665,13 @@ class CompletionSuite extends BaseCompletionSuite: | List@@ |""".stripMargin, """|List[A](elems: A*): List[A] - |ListMap[K, V](elems: (K, V)*): ListMap[K, V] + |ListSet[A](elems: A*): ListSet[A] - scala.collection.immutable + |ListMap[K, V](elems: (K, V)*): ListMap[K, V] - scala.collection.immutable + |new ListMap[K, V]: ListMap[K, V] - scala.collection.immutable + |new ListSet[A]: ListSet[A] - scala.collection.immutable + |ListMap[K, V](elems: (K, V)*): ListMap[K, V] - scala.collection.mutable + |new ListMap[K, V]: ListMap[K, V] - scala.collection.mutable + |LazyList[A](elems: A*): LazyList[A] |""".stripMargin, filter = _.contains("[") ) @@ -1861,3 +1803,108 @@ class CompletionSuite extends BaseCompletionSuite: filter = _ == "Override java.lang" ) + @Test def `fuzzy-search-test` = + check( + """| + |object MyInterface { + | def someMethod(x: Int): Int = ??? + |} + |object Test { + | MyInterface.m@@ + |} + |""".stripMargin, + """|someMethod(x: Int): Int + |""".stripMargin, + topLines = Some(1) + ) + + @Test def `fuzzy-search-test-multiple` = + check( + """| + |trait MyInterface { + | def someMethod(x: Int): Int = ??? + |} + |object Test { + | extension (interface: MyInterface) def someExtMethod(x: Int): Int = ??? + | implicit class MyInterfaceExtension(interface: MyInterface): + | def someOldExtMethod(x: Int): Int = ??? + | val x: MyInterface = ??? + | x.m@@ + |} + |""".stripMargin, + """|someMethod(x: Int): Int + |someExtMethod(x: Int): Int + |someOldExtMethod(x: Int): Int + |""".stripMargin, + topLines = Some(3) + ) + + @Test def `context-bound-in-extension-construct` = + check( + """ + |object x { + | extension [T: Orde@@] + |} + |""".stripMargin, + """Ordered[T] scala.math + |Ordering[T] scala.math + |""".stripMargin, + topLines = Some(2) + ) + + @Test def `context-bounds-in-extension-construct` = + check( + """ + |object x { + | extension [T: Ordering: Orde@@] + |} + |""".stripMargin, + """Ordered[T] scala.math + |Ordering[T] scala.math + |""".stripMargin, + topLines = Some(2) + ) + + @Test def `type-bound-in-extension-construct` = + check( + """ + |object x { + | extension [T <: Orde@@] + |} + |""".stripMargin, + """Ordered[T] scala.math + |Ordering[T] scala.math + |""".stripMargin, + topLines = Some(2) + ) + + @Test def `no-enum-completions-in-new-context` = + check( + """enum TestEnum: + | case TestCase + |object M: + | new TestEnu@@ + |""".stripMargin, + "" + ) + + @Test def `no-enum-case-completions-in-new-context` = + check( + """enum TestEnum: + | case TestCase + |object M: + | new TestEnum.TestCas@@ + |""".stripMargin, + "" + ) + + @Test def `deduplicated-enum-completions` = + check( + """enum TestEnum: + | case TestCase + |object M: + | val x: TestEn@@ + |""".stripMargin, + """TestEnum test + |""".stripMargin, + ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionWorkspaceSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionWorkspaceSuite.scala index 52e565a5a78b..c8cfbd178f32 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionWorkspaceSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionWorkspaceSuite.scala @@ -700,7 +700,7 @@ class CompletionWorkspaceSuite extends BaseCompletionSuite: |object Main { | val a = ListBuffer($0) |}""".stripMargin, - filter = _.contains("[A]") + filter = _.startsWith("ListBuffer[A]") ) @Test def `type-import` = @@ -811,7 +811,6 @@ class CompletionWorkspaceSuite extends BaseCompletionSuite: |""".stripMargin, """|fooBar: String |fooBar: List[Int] - |fooBar(n: Int): Int |""".stripMargin, ) @@ -827,8 +826,9 @@ class CompletionWorkspaceSuite extends BaseCompletionSuite: | |val j = MyTy@@ |""".stripMargin, - """|MyType(m: Long): MyType - |MyType - demo.other""".stripMargin, + """|MyType(m: Long): MyType - demo.other + |MyType - demo.other + """.stripMargin, ) @Test def `type-apply2` = @@ -843,8 +843,9 @@ class CompletionWorkspaceSuite extends BaseCompletionSuite: | |val j = MyTy@@ |""".stripMargin, - """|MyType(m: Long): MyType - |MyType - demo.other""".stripMargin, + """|MyType(m: Long): MyType - demo.other + |MyType - demo.other + """.stripMargin, ) @Test def `method-name-conflict` = diff --git a/presentation-compiler/test/dotty/tools/pc/tests/definition/PcDefinitionSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/definition/PcDefinitionSuite.scala index 9636aea77c2e..c7c9b9979404 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/definition/PcDefinitionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/definition/PcDefinitionSuite.scala @@ -274,6 +274,23 @@ class PcDefinitionSuite extends BasePcDefinitionSuite: |""".stripMargin ) + @Test def exportTermExtension = + check( + """|package a + |class Test extends A { + | assert("Hello".fo@@o == "HelloFoo") + |} + | + |trait A { + | export B.* + |} + | + |object B { + | extension (value: String) def <>: String = s"${value}Foo" + |} + |""".stripMargin + ) + @Test def `named-arg-local` = check( """| diff --git a/presentation-compiler/test/dotty/tools/pc/tests/highlight/TypeDocumentHighlightSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/highlight/TypeDocumentHighlightSuite.scala index 35ca8232dc1e..71adb819d7c7 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/highlight/TypeDocumentHighlightSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/highlight/TypeDocumentHighlightSuite.scala @@ -2,7 +2,7 @@ package dotty.tools.pc.tests.highlight import dotty.tools.pc.base.BaseDocumentHighlightSuite -import org.junit.Test +import org.junit.{Test, Ignore} class TypeDocumentHighlightSuite extends BaseDocumentHighlightSuite: @@ -147,7 +147,7 @@ class TypeDocumentHighlightSuite extends BaseDocumentHighlightSuite: |}""".stripMargin ) - @Test def `projection1` = + @Ignore @Test def `projection1` = check( """| |class A { @@ -158,7 +158,7 @@ class TypeDocumentHighlightSuite extends BaseDocumentHighlightSuite: |}""".stripMargin ) - @Test def `projection2` = + @Ignore @Test def `projection2` = check( """| |class A { diff --git a/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverDefnSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverDefnSuite.scala index 7a647fa40f5f..f4ce4473e60a 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverDefnSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverDefnSuite.scala @@ -223,13 +223,8 @@ class HoverDefnSuite extends BaseHoverSuite: | <> |} |""".stripMargin, - """|**Expression type**: - |```scala - |Option[Int] - |``` - |**Symbol signature**: - |```scala - |val x: Option[T] + """|```scala + |val x: Option[Int] |``` |""".stripMargin.hover ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverPlainTextSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverPlainTextSuite.scala new file mode 100644 index 000000000000..a69a1ff0f5da --- /dev/null +++ b/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverPlainTextSuite.scala @@ -0,0 +1,90 @@ +package dotty.tools.pc.tests.hover + +import dotty.tools.pc.base.BaseHoverSuite + +import org.junit.Test +import dotty.tools.pc.utils.MockEntries +import scala.meta.pc.SymbolDocumentation +import scala.meta.internal.pc.PresentationCompilerConfigImpl +import scala.meta.pc.ContentType +import scala.meta.pc.PresentationCompilerConfig + +class HoverPlainTextSuite extends BaseHoverSuite: + + override protected def config: PresentationCompilerConfig = + PresentationCompilerConfigImpl().copy( + snippetAutoIndent = false, + hoverContentType = ContentType.PLAINTEXT + ) + + override protected def mockEntries: MockEntries = new MockEntries: + override def documentations: Set[SymbolDocumentation] = Set( + ScalaMockDocumentation("java/lang/String#substring().", "substring", List(), List(MockParam("beginIndex"))), + ScalaMockDocumentation("java/util/Collections#emptyList().", "emptyList"), + ScalaMockDocumentation("_empty_/Alpha.apply().", "apply", List(), List(MockParam("x"))), + ScalaMockDocumentation("_empty_/Alpha#", "init", List(), List(MockParam("x"))), + ScalaMockDocumentation("scala/collection/LinearSeqOps#headOption().", "headOption"), + ScalaMockDocumentation("scala/Option#fold().", "fold", List(MockParam("B"))), + ) + + @Test def `basic-plaintext` = + check( + """| + |/** + | * Some docstring + | */ + |case class Alpha(x: Int) { + |} + | + |object Main { + | val x = <> + |} + |""".stripMargin, + """|def apply(x: Int): Alpha + | + |Found documentation for _empty_/Alpha.apply(). + | + |""".stripMargin + ) + + + @Test def `head-plaintext` = + check( + """|object a { + | <> + |} + |""".stripMargin, + """|override def headOption: Option[Int] + | + |Found documentation for scala/collection/LinearSeqOps#headOption(). + |""".stripMargin + ) + + @Test def `trait-plaintext` = + check( + """|trait XX + |object Main extends <>{} + |""".stripMargin, + "trait XX: XX", + ) + + @Test def `function-chain4-plaintext` = + check( + """ + |trait Consumer { + | def subConsumer[T](i: T): T + | def consume(value: Int)(n: Int): Unit + |} + | + |object O { + | val consumer: Consumer = ??? + | List(1).foreach(<>.consume(1)) + |} + |""".stripMargin, + """|Expression type: + |Consumer + | + |Symbol signature: + |def subConsumer[T](i: T): T + |""".stripMargin + ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverTermSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverTermSuite.scala index d1e90241e639..b51974b00fb0 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverTermSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/hover/HoverTermSuite.scala @@ -653,3 +653,33 @@ class HoverTermSuite extends BaseHoverSuite: |``` |""".stripMargin ) + + @Test def `dealias-type-members-in-structural-types1`: Unit = + check( + """object Obj { + | trait A extends Sup { self => + | type T + | def member : T + | } + | val x: A { type T = Int} = ??? + | + | <> + | + |}""".stripMargin, + """def member: Int""".stripMargin.hover + ) + + @Test def `dealias-type-members-in-structural-types2`: Unit = + check( + """object Obj: + | trait A extends Sup { self => + | type T + | def fun(body: A { type T = self.T} => Unit) = () + | } + | val x: A { type T = Int} = ??? + | + | x.fun: <> => + | () + |""".stripMargin, + """yy: A{type T = Int}""".stripMargin.hover + ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/inlayHints/InlayHintsSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/inlayHints/InlayHintsSuite.scala index da7601e3c746..8ce7cdce4382 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/inlayHints/InlayHintsSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/inlayHints/InlayHintsSuite.scala @@ -170,7 +170,7 @@ class InlayHintsSuite extends BaseInlayHintsSuite { |} |""".stripMargin, """|object O { - | def m/*: List<>[Int<>]*/ = 1 ::/*[Int<>]*/ List/*[Int<>]*/(1) + | def m/*: List<>[Int<>]*/ = 1 :: List/*[Int<>]*/(1) |} |""".stripMargin ) @@ -418,13 +418,16 @@ class InlayHintsSuite extends BaseInlayHintsSuite { @Test def `tuple-unapply` = check( """|object Main { + | val (local, _) = ("", 1.0) | val (fst, snd) = (1, 2) |} |""".stripMargin, """|object Main { + | val (local/*: String<>*/, _) = ("", 1.0) | val (fst/*: Int<>*/, snd/*: Int<>*/) = (1, 2) |} - |""".stripMargin + |""".stripMargin, + hintsInPatternMatch = true ) @Test def `list-unapply` = @@ -434,7 +437,7 @@ class InlayHintsSuite extends BaseInlayHintsSuite { |} |""".stripMargin, """|object Main { - | val hd/*: Int<>*/ ::/*[Int<>]*/ tail/*: List<>[Int<>]*/ = List/*[Int<>]*/(1, 2) + | val hd :: tail = List/*[Int<>]*/(1, 2) |} |""".stripMargin, ) @@ -449,7 +452,7 @@ class InlayHintsSuite extends BaseInlayHintsSuite { |""".stripMargin, """|object Main { | val x/*: Int<>*/ = List/*[Int<>]*/(1, 2) match { - | case hd/*: Int<>*/ ::/*[Int<>]*/ tail/*: List<>[Int<>]*/ => hd + | case hd :: tail => hd | } |} |""".stripMargin, @@ -464,9 +467,10 @@ class InlayHintsSuite extends BaseInlayHintsSuite { |""".stripMargin, """|object Main { |case class Foo[A](x: A, y: A) - | val Foo/*[Int<>]*/(fst/*: Int<>*/, snd/*: Int<>*/) = Foo/*[Int<>]*/(1, 2) + | val Foo(fst/*: Int<>*/, snd/*: Int<>*/) = Foo/*[Int<>]*/(1, 2) |} |""".stripMargin, + hintsInPatternMatch = true ) @Test def `valueOf` = @@ -517,7 +521,7 @@ class InlayHintsSuite extends BaseInlayHintsSuite { |} |""".stripMargin, """|object Main { - | List/*[Int<>]*/(1).collect/*[Int<>]*/ { case x/*: Int<>*/ => x } + | List/*[Int<>]*/(1).collect/*[Int<>]*/ { case x => x } | val x: PartialFunction[Int, Int] = { | case 1 => 2 | } @@ -532,7 +536,7 @@ class InlayHintsSuite extends BaseInlayHintsSuite { |} |""".stripMargin, """|object O { - | val tupleBound @ (one/*: String<>*/, two/*: String<>*/) = ("1", "2") + | val tupleBound @ (one, two) = ("1", "2") |} |""".stripMargin ) @@ -546,7 +550,8 @@ class InlayHintsSuite extends BaseInlayHintsSuite { """|object O { | val tupleBound /* comment */ @ (one/*: String<>*/, two/*: String<>*/) = ("1", "2") |} - |""".stripMargin + |""".stripMargin, + hintsInPatternMatch = true ) @Test def `complex` = @@ -764,4 +769,155 @@ class InlayHintsSuite extends BaseInlayHintsSuite { |} |""".stripMargin ) + + @Test def `pattern-match` = + check( + """|package example + |object O { + | val head :: tail = List(1) + | List(1) match { + | case head :: next => + | case Nil => + | } + | Option(Option(1)) match { + | case Some(Some(value)) => + | case None => + | } + | val (local, _) = ("", 1.0) + | val Some(x) = Option(1) + | for { + | x <- List((1,2)) + | (z, y) = x + | } yield { + | x + | } + |} + |""".stripMargin, + """|package example + |object O { + | val head :: tail = List/*[Int<>]*/(1) + | List/*[Int<>]*/(1) match { + | case head :: next => + | case Nil => + | } + | Option/*[Option<>[Int<>]]*/(Option/*[Int<>]*/(1)) match { + | case Some(Some(value)) => + | case None => + | } + | val (local, _) = ("", 1.0) + | val Some(x) = Option/*[Int<>]*/(1) + | for { + | x <- List/*[(Int<>, Int<>)]*/((1,2)) + | (z, y) = x + | } yield { + | x + | } + |} + |""".stripMargin + ) + + + @Test def `pattern-match1` = + check( + """|package example + |object O { + | val head :: tail = List(1) + | List(1) match { + | case head :: next => + | case Nil => + | } + | Option(Option(1)) match { + | case Some(Some(value)) => + | case None => + | } + | val (local, _) = ("", 1.0) + | val Some(x) = Option(1) + | for { + | x <- List((1,2)) + | (z, y) = x + | } yield { + | x + | } + |} + |""".stripMargin, + """|package example + |object O { + | val head/*: Int<>*/ :: tail/*: List<>[Int<>]*/ = List/*[Int<>]*/(1) + | List/*[Int<>]*/(1) match { + | case head/*: Int<>*/ :: next/*: List<>[Int<>]*/ => + | case Nil => + | } + | Option/*[Option<>[Int<>]]*/(Option/*[Int<>]*/(1)) match { + | case Some(Some(value/*: Int<>*/)) => + | case None => + | } + | val (local/*: String<>*/, _) = ("", 1.0) + | val Some(x/*: Int<>*/) = Option/*[Int<>]*/(1) + | for { + | x/*: (Int<>, Int<>)*/ <- List/*[(Int<>, Int<>)]*/((1,2)) + | (z/*: Int<>*/, y/*: Int<>*/) = x + | } yield { + | x + | } + |} + |""".stripMargin, + hintsInPatternMatch = true + ) + + @Test def quotes = + check( + """|package example + |import scala.quoted.* + |object O: + | inline def foo[T]: List[String] = ${fooImpl[T]} + | def fooImpl[T: Type](using Quotes): Expr[List[String]] = ??? + |""".stripMargin, + """|package example + |import scala.quoted.* + |object O: + | inline def foo[T]: List[String] = ${fooImpl[T]} + | def fooImpl[T: Type](using Quotes): Expr[List[String]] = ??? + |""".stripMargin + ) + + @Test def quotes1 = + check( + """|package example + |import scala.quoted.* + |object O: + | def matchTypeImpl[T: Type](param1: Expr[T])(using Quotes) = + | import quotes.reflect.* + | Type.of[T] match + | case '[f] => + | val fr = TypeRepr.of[T] + |""".stripMargin, + """|package example + |import scala.quoted.* + |object O: + | def matchTypeImpl[T: Type](param1: Expr[T])(using Quotes)/*: Unit<>*/ = + | import quotes.reflect.* + | Type.of[T] match + | case '[f] => + | val fr/*: TypeRepr<>*/ = TypeRepr.of[T]/*(using evidence$1<<(3:23)>>)*/ + |""".stripMargin + ) + + + @Test def quotes2 = + check( + """|package example + |import scala.quoted.* + |object O: + | def rec[A : Type](using Quotes): List[String] = + | Type.of[A] match + | case '[field *: fields] => ??? + |""".stripMargin, + """|package example + |import scala.quoted.* + |object O: + | def rec[A : Type](using Quotes): List[String] = + | Type.of[A] match + | case '[field *: fields] => ??? + |""".stripMargin + ) } diff --git a/presentation-compiler/test/dotty/tools/pc/tests/signaturehelp/SignatureHelpSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/signaturehelp/SignatureHelpSuite.scala index 9e223cb094e3..2b458ced9683 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/signaturehelp/SignatureHelpSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/signaturehelp/SignatureHelpSuite.scala @@ -1533,3 +1533,28 @@ class SignatureHelpSuite extends BaseSignatureHelpSuite: |foo(i: Boolean, s: String)(b: Int): Unit |""".stripMargin ) + + @Test def `proper-param-empty-list` = + check( + """ + |object x { + | def foo[K, V](): Unit = ??? + | foo(@@) + |} + |""".stripMargin, + "foo[K, V](): Unit" + ) + + @Test def `proper-param-list-after-param-empty-list` = + check( + """ + |object x { + | def foo[K, V]()(x: Int): Unit = ??? + | foo()(@@) + |} + |""".stripMargin, + """ + |foo[K, V]()(x: Int): Unit + | ^^^^^^ + """.stripMargin + ) diff --git a/presentation-compiler/test/dotty/tools/pc/utils/MockSymbolSearch.scala b/presentation-compiler/test/dotty/tools/pc/utils/MockSymbolSearch.scala index edd339a5e2ed..9015a39ba9e7 100644 --- a/presentation-compiler/test/dotty/tools/pc/utils/MockSymbolSearch.scala +++ b/presentation-compiler/test/dotty/tools/pc/utils/MockSymbolSearch.scala @@ -7,6 +7,7 @@ import java.util as ju import scala.jdk.CollectionConverters.* import scala.jdk.OptionConverters.* import scala.meta.internal.metals.{ClasspathSearch, WorkspaceSymbolQuery} +import scala.meta.pc.ContentType import scala.meta.pc.SymbolSearch.Result import scala.meta.pc.{ ParentSymbols, @@ -66,6 +67,12 @@ class MockSymbolSearch( override def documentation( symbol: String, parents: ParentSymbols + ) = documentation(symbol, parents, ContentType.MARKDOWN) + + override def documentation( + symbol: String, + parents: ParentSymbols, + contentType: ContentType ): Optional[SymbolDocumentation] = (symbol +: parents.parents().asScala).iterator .map(symbol => mockEntries.documentations.find(_.symbol == symbol)) diff --git a/presentation-compiler/test/dotty/tools/pc/utils/PcAssertions.scala b/presentation-compiler/test/dotty/tools/pc/utils/PcAssertions.scala index 6dfc8acec66c..ef15121c6702 100644 --- a/presentation-compiler/test/dotty/tools/pc/utils/PcAssertions.scala +++ b/presentation-compiler/test/dotty/tools/pc/utils/PcAssertions.scala @@ -4,7 +4,7 @@ import scala.language.unsafeNulls import dotty.tools.pc.completions.CompletionSource import dotty.tools.dotc.util.DiffUtil -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import org.hamcrest import org.hamcrest.* diff --git a/presentation-compiler/test/dotty/tools/pc/utils/TestInlayHints.scala b/presentation-compiler/test/dotty/tools/pc/utils/TestInlayHints.scala index 98ebb0852735..a923b76b955c 100644 --- a/presentation-compiler/test/dotty/tools/pc/utils/TestInlayHints.scala +++ b/presentation-compiler/test/dotty/tools/pc/utils/TestInlayHints.scala @@ -3,7 +3,7 @@ package dotty.tools.pc.utils import scala.collection.mutable.ListBuffer import scala.meta.internal.jdk.CollectionConverters._ -import dotty.tools.pc.utils.MtagsEnrichments.* +import dotty.tools.pc.utils.InteractiveEnrichments.* import dotty.tools.pc.utils.TextEdits import org.eclipse.lsp4j.InlayHint diff --git a/project/Build.scala b/project/Build.scala index b75bf1778b3f..047f2c0c22ea 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -12,6 +12,8 @@ import pl.project13.scala.sbt.JmhPlugin import pl.project13.scala.sbt.JmhPlugin.JmhKeys.Jmh import sbt.Package.ManifestAttributes import sbt.PublishBinPlugin.autoImport._ +import dotty.tools.sbtplugin.RepublishPlugin +import dotty.tools.sbtplugin.RepublishPlugin.autoImport._ import sbt.plugins.SbtPlugin import sbt.ScriptedPlugin.autoImport._ import xerial.sbt.pack.PackPlugin @@ -26,6 +28,7 @@ import sbttastymima.TastyMiMaPlugin import sbttastymima.TastyMiMaPlugin.autoImport._ import scala.util.Properties.isJavaAtLeast + import org.portablescala.sbtplatformdeps.PlatformDepsPlugin.autoImport._ import org.scalajs.linker.interface.{ModuleInitializer, StandardConfig} @@ -83,9 +86,9 @@ object DottyJSPlugin extends AutoPlugin { object Build { import ScaladocConfigs._ - val referenceVersion = "3.4.1" + val referenceVersion = "3.4.2" - val baseVersion = "3.4.2" + val baseVersion = "3.5.0" // LTS or Next val versionLine = "Next" @@ -97,17 +100,29 @@ object Build { val publishedDottyVersion = referenceVersion val sbtDottyVersion = "0.5.5" - /** Version against which we check binary compatibility. + /** Minor version against which we check binary compatibility. + * + * This must be the earliest published release in the same versioning line. + * For a baseVersion `3.M.P` the mimaPreviousDottyVersion should be set to: + * - `3.M.0` if `P > 0` + * - `3.(M-1).0` if `P = 0` + */ + val mimaPreviousDottyVersion = "3.4.0" + + /** LTS version against which we check binary compatibility. * - * This must be the latest published release in the same versioning line. - * For example, if the next version is going to be 3.1.4, then this must be - * set to 3.1.3. If it is going to be 3.1.0, it must be set to the latest - * 3.0.x release. + * This must be the earliest published release in the LTS versioning line. + * For example, if the latest LTS release is be 3.3.4, then this must be + * set to 3.3.0. */ - val previousDottyVersion = "3.4.1" + val mimaPreviousLTSDottyVersion = "3.3.0" - /** Version against which we check binary compatibility. */ - val ltsDottyVersion = "3.3.0" + /** Version of Scala CLI to download */ + val scalaCliLauncherVersion = "1.4.0" + /** Version of Scala CLI to download (on Windows - last known validated version) */ + val scalaCliLauncherVersionWindows = "1.4.0" + /** Version of Coursier to download for initializing the local maven repo of Scala command */ + val coursierJarVersion = "2.1.10" object CompatMode { final val BinaryCompatible = 0 @@ -129,8 +144,8 @@ object Build { * scala-library. */ def stdlibVersion(implicit mode: Mode): String = mode match { - case NonBootstrapped => "2.13.12" - case Bootstrapped => "2.13.12" + case NonBootstrapped => "2.13.14" + case Bootstrapped => "2.13.14" } /** Version of the scala-library for which we will generate TASTy. @@ -140,7 +155,7 @@ object Build { * We can use nightly versions to tests the future compatibility in development. * Nightly versions: https://scala-ci.typesafe.com/ui/native/scala-integration/org/scala-lang */ - val stdlibBootstrappedVersion = "2.13.12" + val stdlibBootstrappedVersion = "2.13.14" val dottyOrganization = "org.scala-lang" val dottyGithubUrl = "https://github.com/scala/scala3" @@ -394,6 +409,7 @@ object Build { "-skip-by-id:scala.runtime.MatchCase", "-skip-by-id:dotty.tools.tasty", "-skip-by-id:dotty.tools.tasty.util", + "-skip-by-id:dotty.tools.tasty.besteffort", "-project-footer", s"Copyright (c) 2002-$currentYear, LAMP/EPFL", "-author", "-groups", @@ -500,7 +516,7 @@ object Build { case cv: Disabled => thisProjectID.name case cv: Binary => s"${thisProjectID.name}_${cv.prefix}3${cv.suffix}" } - (thisProjectID.organization % crossedName % previousDottyVersion) + (thisProjectID.organization % crossedName % mimaPreviousDottyVersion) }, mimaCheckDirection := (compatMode match { @@ -596,8 +612,8 @@ object Build { // Settings shared between scala3-compiler and scala3-compiler-bootstrapped lazy val commonDottyCompilerSettings = Seq( - // Note: bench/profiles/projects.yml should be updated accordingly. - Compile / scalacOptions ++= Seq("-Yexplicit-nulls", "-Ysafe-init"), + // Note: bench/profiles/projects.yml should be updated accordingly. + Compile / scalacOptions ++= Seq("-Yexplicit-nulls"), // Use source 3.3 to avoid fatal migration warnings on scalajs-ir scalacOptions ++= Seq("-source", "3.3"), @@ -874,6 +890,8 @@ object Build { } lazy val nonBootstrappedDottyCompilerSettings = commonDottyCompilerSettings ++ Seq( + // FIXME revert this to commonDottyCompilerSettings, when we bump reference version to 3.5.0 + scalacOptions += "-Ysafe-init", // packageAll packages all and then returns a map with the abs location packageAll := Def.taskDyn { // Use a dynamic task to avoid loops when loading the settings Def.task { @@ -901,6 +919,8 @@ object Build { ) lazy val bootstrappedDottyCompilerSettings = commonDottyCompilerSettings ++ Seq( + // FIXME revert this to commonDottyCompilerSettings, when we bump reference version to 3.5.0 + scalacOptions += "-Wsafe-init", javaOptions ++= { val jars = packageAll.value Seq( @@ -908,6 +928,13 @@ object Build { "-Ddotty.tests.classes.dottyTastyInspector=" + jars("scala3-tasty-inspector"), ) }, + // For compatibility at this moment, both the bootstrapped and the non-bootstrapped + // compilers are compiled without flexible types. + // We should move the flag to commonDottyCompilerSettings once the reference + // compiler is updated. + // Then, the next step is to enable flexible types by default and reduce the use of + // `unsafeNulls`. + scalacOptions ++= Seq("-Yno-flexible-types"), packageAll := { (`scala3-compiler` / packageAll).value ++ Seq( "scala3-compiler" -> (Compile / packageBin).value.getAbsolutePath, @@ -1046,6 +1073,7 @@ object Build { settings(commonBootstrappedSettings). settings(scala2LibraryBootstrappedSettings). settings(moduleName := "scala2-library") + // -Ycheck:all is set in project/scripts/scala2-library-tasty-mima.sh /** Scala 2 library compiled by dotty using the latest published sources of the library. * @@ -1069,9 +1097,8 @@ object Build { Compile / doc / scalacOptions += "-Ydocument-synthetic-types", scalacOptions += "-Ycompile-scala2-library", scalacOptions += "-Yscala2Unpickler:never", - scalacOptions += "-Yno-experimental", scalacOptions -= "-Xfatal-warnings", - Compile / compile / logLevel := Level.Error, + Compile / compile / logLevel.withRank(KeyRanks.Invisible) := Level.Error, ivyConfigurations += SourceDeps.hide, transitiveClassifiers := Seq("sources"), libraryDependencies += @@ -1097,19 +1124,23 @@ object Build { IO.createDirectory(trgDir) IO.unzip(scalaLibrarySourcesJar, trgDir) - ((trgDir ** "*.scala") +++ (trgDir ** "*.java")).get.toSet + val (ignoredSources, sources) = + ((trgDir ** "*.scala") +++ (trgDir ** "*.java")).get.toSet + .partition{file => + // sources from https://github.com/scala/scala/tree/2.13.x/src/library-aux + val path = file.getPath.replace('\\', '/') + path.endsWith("scala-library-src/scala/Any.scala") || + path.endsWith("scala-library-src/scala/AnyVal.scala") || + path.endsWith("scala-library-src/scala/AnyRef.scala") || + path.endsWith("scala-library-src/scala/Nothing.scala") || + path.endsWith("scala-library-src/scala/Null.scala") || + path.endsWith("scala-library-src/scala/Singleton.scala") + } + // These sources should be never compiled, filtering them out was not working correctly sometimes + ignoredSources.foreach(_.delete()) + sources } (Set(scalaLibrarySourcesJar)).toSeq }.taskValue, - (Compile / sources) ~= (_.filterNot { file => - // sources from https://github.com/scala/scala/tree/2.13.x/src/library-aux - val path = file.getPath.replace('\\', '/') - path.endsWith("scala-library-src/scala/Any.scala") || - path.endsWith("scala-library-src/scala/AnyVal.scala") || - path.endsWith("scala-library-src/scala/AnyRef.scala") || - path.endsWith("scala-library-src/scala/Nothing.scala") || - path.endsWith("scala-library-src/scala/Null.scala") || - path.endsWith("scala-library-src/scala/Singleton.scala") - }), (Compile / sources) := { val files = (Compile / sources).value val overwrittenSourcesDir = (Compile / scalaSource).value @@ -1141,7 +1172,7 @@ object Build { }, tastyMiMaConfig ~= { _.withMoreProblemFilters(TastyMiMaFilters.StdlibBootstrapped) }, tastyMiMaReportIssues := tastyMiMaReportIssues.dependsOn(Def.task { - val minorVersion = previousDottyVersion.split('.')(1) + val minorVersion = mimaPreviousDottyVersion.split('.')(1) // TODO find a way around this and test in the CI streams.value.log.warn( s"""To allow TASTy-MiMa to read TASTy files generated by this version of the compile you must: @@ -1290,6 +1321,10 @@ object Build { .asScala3PresentationCompiler(NonBootstrapped) lazy val `scala3-presentation-compiler-bootstrapped` = project.in(file("presentation-compiler")) .asScala3PresentationCompiler(Bootstrapped) + .settings( + // Add `-Yno-flexible-types` flag for bootstrap, see comments for `bootstrappedDottyCompilerSettings` + Compile / scalacOptions += "-Yno-flexible-types" + ) def scala3PresentationCompiler(implicit mode: Mode): Project = mode match { case NonBootstrapped => `scala3-presentation-compiler` @@ -1317,8 +1352,8 @@ object Build { BuildInfoPlugin.buildInfoScopedSettings(Test) ++ BuildInfoPlugin.buildInfoDefaultSettings - lazy val presentationCompilerSettings = { - val mtagsVersion = "1.2.2+44-42e0515a-SNAPSHOT" + def presentationCompilerSettings(implicit mode: Mode) = { + val mtagsVersion = "1.3.0+56-a06a024d-SNAPSHOT" Seq( resolvers ++= Resolver.sonatypeOssRepos("snapshots"), @@ -1327,11 +1362,15 @@ object Build { "io.get-coursier" % "interface" % "1.0.18", "org.scalameta" % "mtags-interfaces" % mtagsVersion, ), - libraryDependencies += ("org.scalameta" % "mtags-shared_2.13.12" % mtagsVersion % SourceDeps), + libraryDependencies += ("org.scalameta" % "mtags-shared_2.13.14" % mtagsVersion % SourceDeps), ivyConfigurations += SourceDeps.hide, transitiveClassifiers := Seq("sources"), scalacOptions ++= Seq("-source", "3.3"), // To avoid fatal migration warnings - Compile / scalacOptions ++= Seq("-Yexplicit-nulls", "-Ysafe-init"), + // FIXME change this to just Seq("-Yexplicit-nulls, "-Wsafe-init") when reference is set to 3.5.0 + Compile / scalacOptions ++= (mode match { + case Bootstrapped => Seq("-Yexplicit-nulls", "-Wsafe-init") + case NonBootstrapped => Seq("-Yexplicit-nulls", "-Ysafe-init") + }), Compile / sourceGenerators += Def.task { val s = streams.value val cacheDir = s.cacheDirectory @@ -1490,11 +1529,13 @@ object Build { "isNoModule" -> (moduleKind == ModuleKind.NoModule), "isESModule" -> (moduleKind == ModuleKind.ESModule), "isCommonJSModule" -> (moduleKind == ModuleKind.CommonJSModule), - "isFullOpt" -> (stage == FullOptStage), + "usesClosureCompiler" -> linkerConfig.closureCompiler, + "hasMinifiedNames" -> (linkerConfig.closureCompiler || linkerConfig.minify), "compliantAsInstanceOfs" -> (sems.asInstanceOfs == CheckedBehavior.Compliant), "compliantArrayIndexOutOfBounds" -> (sems.arrayIndexOutOfBounds == CheckedBehavior.Compliant), "compliantArrayStores" -> (sems.arrayStores == CheckedBehavior.Compliant), "compliantNegativeArraySizes" -> (sems.negativeArraySizes == CheckedBehavior.Compliant), + "compliantNullPointers" -> (sems.nullPointers == CheckedBehavior.Compliant), "compliantStringIndexOutOfBounds" -> (sems.stringIndexOutOfBounds == CheckedBehavior.Compliant), "compliantModuleInit" -> (sems.moduleInit == CheckedBehavior.Compliant), "strictFloats" -> sems.strictFloats, @@ -1561,6 +1602,8 @@ object Build { (dir / "shared/src/test/scala" ** (("*.scala": FileFilter) -- "ReflectiveCallTest.scala" // uses many forms of structural calls that are not allowed in Scala 3 anymore -- "UTF16Test.scala" // refutable pattern match + -- "CharsetTest.scala" // bogus @tailrec that Scala 2 ignores but Scala 3 flags as an error + -- "ClassDiffersOnlyInCaseTest.scala" // looks like the Scala 3 compiler itself does not deal with that )).get ++ (dir / "shared/src/test/require-sam" ** "*.scala").get @@ -1629,6 +1672,7 @@ object Build { Seq( "-Ddotty.tests.classes.dottyLibraryJS=" + dottyLibraryJSJar, "-Ddotty.tests.classes.scalaJSJavalib=" + findArtifactPath(externalJSDeps, "scalajs-javalib"), + "-Ddotty.tests.classes.scalaJSScalalib=" + findArtifactPath(externalJSDeps, "scalajs-scalalib_2.13"), "-Ddotty.tests.classes.scalaJSLibrary=" + findArtifactPath(externalJSDeps, "scalajs-library_2.13"), ) }, @@ -1737,6 +1781,9 @@ object Build { SourceLinksIntegrationTest / scalaSource := baseDirectory.value / "test-source-links", SourceLinksIntegrationTest / test:= ((SourceLinksIntegrationTest / test) dependsOn generateScalaDocumentation.toTask("")).value, ). + settings( + scalacOptions += "-experimental" // workaround use of experimental .info in Scaladoc2AnchorCreator + ). settings( Compile / resourceGenerators ++= Seq( generateStaticAssetsTask.taskValue, @@ -2081,13 +2128,79 @@ object Build { packMain := Map(), publishArtifact := false, packGenerateMakefile := false, - packExpandedClasspath := true, - packArchiveName := "scala3-" + dottyVersion + republishRepo := target.value / "republish", + packResourceDir += (republishRepo.value / "bin" -> "bin"), + packResourceDir += (republishRepo.value / "maven2" -> "maven2"), + packResourceDir += (republishRepo.value / "lib" -> "lib"), + republishCommandLibs += + ("scala" -> List("scala3-interfaces", "scala3-compiler", "scala3-library", "tasty-core")), + republishCommandLibs += + ("with_compiler" -> List("scala3-staging", "scala3-tasty-inspector", "^!scala3-interfaces", "^!scala3-compiler", "^!scala3-library", "^!tasty-core")), + republishCommandLibs += + ("scaladoc" -> List("scala3-interfaces", "scala3-compiler", "scala3-library", "tasty-core", "scala3-tasty-inspector", "scaladoc")), + Compile / pack := republishPack.value, ) lazy val dist = project.asDist(Bootstrapped) .settings( - packResourceDir += (baseDirectory.value / "bin" -> "bin"), + packArchiveName := "scala3-" + dottyVersion, + republishBinDir := baseDirectory.value / "bin", + republishCoursier += + ("coursier.jar" -> s"https://github.com/coursier/coursier/releases/download/v$coursierJarVersion/coursier.jar"), + republishLaunchers += + ("scala-cli.jar" -> s"https://github.com/VirtusLab/scala-cli/releases/download/v$scalaCliLauncherVersion/scala-cli.jar"), + ) + + lazy val `dist-mac-x86_64` = project.in(file("dist/mac-x86_64")).asDist(Bootstrapped) + .settings( + republishBinDir := (dist / republishBinDir).value, + packArchiveName := (dist / packArchiveName).value + "-x86_64-apple-darwin", + republishBinOverrides += (dist / baseDirectory).value / "bin-native-overrides", + republishFetchCoursier := (dist / republishFetchCoursier).value, + republishLaunchers += + ("scala-cli" -> s"gz+https://github.com/VirtusLab/scala-cli/releases/download/v$scalaCliLauncherVersion/scala-cli-x86_64-apple-darwin.gz") + ) + + lazy val `dist-mac-aarch64` = project.in(file("dist/mac-aarch64")).asDist(Bootstrapped) + .settings( + republishBinDir := (dist / republishBinDir).value, + packArchiveName := (dist / packArchiveName).value + "-aarch64-apple-darwin", + republishBinOverrides += (dist / baseDirectory).value / "bin-native-overrides", + republishFetchCoursier := (dist / republishFetchCoursier).value, + republishLaunchers += + ("scala-cli" -> s"gz+https://github.com/VirtusLab/scala-cli/releases/download/v$scalaCliLauncherVersion/scala-cli-aarch64-apple-darwin.gz") + ) + + lazy val `dist-win-x86_64` = project.in(file("dist/win-x86_64")).asDist(Bootstrapped) + .settings( + republishBinDir := (dist / republishBinDir).value, + packArchiveName := (dist / packArchiveName).value + "-x86_64-pc-win32", + republishBinOverrides += (dist / baseDirectory).value / "bin-native-overrides", + republishFetchCoursier := (dist / republishFetchCoursier).value, + republishExtraProps += ("cli_version" -> scalaCliLauncherVersion), + mappings += (republishRepo.value / "EXTRA_PROPERTIES" -> "EXTRA_PROPERTIES"), + republishLaunchers += + ("scala-cli.exe" -> s"zip+https://github.com/VirtusLab/scala-cli/releases/download/v$scalaCliLauncherVersionWindows/scala-cli-x86_64-pc-win32.zip!/scala-cli.exe") + ) + + lazy val `dist-linux-x86_64` = project.in(file("dist/linux-x86_64")).asDist(Bootstrapped) + .settings( + republishBinDir := (dist / republishBinDir).value, + packArchiveName := (dist / packArchiveName).value + "-x86_64-pc-linux", + republishBinOverrides += (dist / baseDirectory).value / "bin-native-overrides", + republishFetchCoursier := (dist / republishFetchCoursier).value, + republishLaunchers += + ("scala-cli" -> s"gz+https://github.com/VirtusLab/scala-cli/releases/download/v$scalaCliLauncherVersion/scala-cli-x86_64-pc-linux.gz") + ) + + lazy val `dist-linux-aarch64` = project.in(file("dist/linux-aarch64")).asDist(Bootstrapped) + .settings( + republishBinDir := (dist / republishBinDir).value, + packArchiveName := (dist / packArchiveName).value + "-aarch64-pc-linux", + republishBinOverrides += (dist / baseDirectory).value / "bin-native-overrides", + republishFetchCoursier := (dist / republishFetchCoursier).value, + republishLaunchers += + ("scala-cli" -> s"gz+https://github.com/VirtusLab/scala-cli/releases/download/v$scalaCliLauncherVersion/scala-cli-aarch64-pc-linux.gz") ) private def customMimaReportBinaryIssues(issueFilterLocation: String) = mimaReportBinaryIssues := { @@ -2104,7 +2217,7 @@ object Build { // FIXME: we do not aggregate `bin` because its tests delete jars, thus breaking other tests def asDottyRoot(implicit mode: Mode): Project = project.withCommonSettings. aggregate(`scala3-interfaces`, dottyLibrary, dottyCompiler, tastyCore, `scala3-sbt-bridge`, scala3PresentationCompiler). - bootstrappedAggregate(`scala2-library-tasty`, `scala3-language-server`, `scala3-staging`, + bootstrappedAggregate(`scala2-library-tasty`, `scala2-library-cc-tasty`, `scala3-language-server`, `scala3-staging`, `scala3-tasty-inspector`, `scala3-library-bootstrappedJS`, scaladoc). dependsOn(tastyCore). dependsOn(dottyCompiler). @@ -2150,9 +2263,6 @@ object Build { settings( versionScheme := Some("semver-spec"), libraryDependencies += "org.scala-lang" % "scala-library" % stdlibVersion, - // Make sure we do not refer to experimental features outside an experimental scope. - // In other words, disable NIGHTLY/SNAPSHOT experimental scope. - scalacOptions += "-Yno-experimental", ). settings(dottyLibrarySettings) if (mode == Bootstrapped) { @@ -2171,7 +2281,7 @@ object Build { case cv: Disabled => thisProjectID.name case cv: Binary => s"${thisProjectID.name}_${cv.prefix}3${cv.suffix}" } - (thisProjectID.organization % crossedName % ltsDottyVersion) + (thisProjectID.organization % crossedName % mimaPreviousLTSDottyVersion) }, mimaForwardIssueFilters := MiMaFilters.Scala3Library.ForwardsBreakingChanges, mimaBackwardIssueFilters := MiMaFilters.Scala3Library.BackwardsBreakingChanges, @@ -2219,10 +2329,19 @@ object Build { settings(scala3PresentationCompilerBuildInfo) def asDist(implicit mode: Mode): Project = project. - enablePlugins(PackPlugin). + enablePlugins(PackPlugin, RepublishPlugin). withCommonSettings. - dependsOn(`scala3-interfaces`, dottyCompiler, dottyLibrary, tastyCore, `scala3-staging`, `scala3-tasty-inspector`, scaladoc). settings(commonDistSettings). + dependsOn( + `scala3-interfaces`, + dottyCompiler, + dottyLibrary, + tastyCore, + `scala3-staging`, + `scala3-tasty-inspector`, + scaladoc, + `scala3-sbt-bridge`, // for scala-cli + ). bootstrappedSettings( target := baseDirectory.value / "target" // override setting in commonBootstrappedSettings ) @@ -2275,6 +2394,7 @@ object ScaladocConfigs { "scala.runtime.MatchCase", "dotty.tools.tasty", "dotty.tools.tasty.util", + "dotty.tools.tasty.besteffort" )) def projectFooter = ProjectFooter(s"Copyright (c) 2002-$currentYear, LAMP/EPFL") def defaultTemplate = DefaultTemplate("static-site-main") diff --git a/project/MiMaFilters.scala b/project/MiMaFilters.scala index 7565d23b2c1b..18d2e985f844 100644 --- a/project/MiMaFilters.scala +++ b/project/MiMaFilters.scala @@ -7,11 +7,25 @@ object MiMaFilters { val ForwardsBreakingChanges: Map[String, Seq[ProblemFilter]] = Map( // Additions that require a new minor version of the library - Build.previousDottyVersion -> Seq( + Build.mimaPreviousDottyVersion -> Seq( + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.annotation.experimental.this"), + ProblemFilters.exclude[FinalClassProblem]("scala.annotation.experimental"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.Tuple.fromArray"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.Tuple.fromIArray"), + ProblemFilters.exclude[MissingFieldProblem]("scala.Tuple.helpers"), + ProblemFilters.exclude[MissingClassProblem]("scala.Tuple$helpers$"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.runtime.Tuples.fromArray"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.runtime.Tuples.fromIArray"), + ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.namedTuples"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$namedTuples$"), + ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.stdLibPatches.language#experimental.modularity"), + ProblemFilters.exclude[MissingClassProblem]("scala.runtime.stdLibPatches.language$experimental$modularity$"), + ProblemFilters.exclude[DirectMissingMethodProblem]("scala.compiletime.package#package.deferred"), + ProblemFilters.exclude[MissingClassProblem]("scala.annotation.internal.WitnessNames"), ), // Additions since last LTS - Build.ltsDottyVersion -> Seq( + Build.mimaPreviousLTSDottyVersion -> Seq( ProblemFilters.exclude[DirectMissingMethodProblem]("scala.quoted.Quotes#reflectModule.ValOrDefDefMethods"), ProblemFilters.exclude[DirectMissingMethodProblem]("scala.quoted.Quotes#reflectModule.ValOrDefDefTypeTest"), ProblemFilters.exclude[DirectMissingMethodProblem]("scala.quoted.Quotes#reflectModule#defnModule.FunctionClass"), @@ -47,10 +61,23 @@ object MiMaFilters { // Only exceptional cases should be added here. // Breaking changes since last reference version - Build.previousDottyVersion -> Seq.empty, // We should never break backwards compatibility + Build.mimaPreviousDottyVersion -> // Seq.empty, // We should never break backwards compatibility + Seq( + // `ReversedMissingMethodProblem`s are acceptable. See comment in `Breaking changes since last LTS`. + ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule.FlexibleType"), + ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule.FlexibleTypeTypeTest"), + ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule.FlexibleTypeMethods"), + ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule#SymbolMethods.isSuperAccessor"), + ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule.MethodTypeKind"), + ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule#MethodTypeModule.apply"), + ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule#MethodTypeMethods.methodTypeKind"), + ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.quoted.Quotes#reflectModule#MethodTypeMethods.isContextual"), + // Change `experimental` annotation to a final class + ProblemFilters.exclude[FinalClassProblem]("scala.annotation.experimental"), + ), // Breaking changes since last LTS - Build.ltsDottyVersion -> Seq( + Build.mimaPreviousLTSDottyVersion -> Seq( // Quotes is assumed to only be implemented by the compiler and on the same version of the library. // It is exceptionally OK to break this compatibility. In these cases, there add new abstract methods that would // potentially not be implemented by others. If some other library decides to implement these, @@ -70,34 +97,36 @@ object MiMaFilters { object TastyCore { val ForwardsBreakingChanges: Map[String, Seq[ProblemFilter]] = Map( // Additions that require a new minor version of tasty core - Build.previousDottyVersion -> Seq( + Build.mimaPreviousDottyVersion -> Seq( + ProblemFilters.exclude[DirectMissingMethodProblem]("dotty.tools.tasty.TastyFormat.FLEXIBLEtype"), + ProblemFilters.exclude[DirectMissingMethodProblem]("dotty.tools.tasty.TastyFormat.TRACKED"), ), // Additions since last LTS - Build.ltsDottyVersion -> Seq( + Build.mimaPreviousLTSDottyVersion -> Seq( ) ) val BackwardsBreakingChanges: Map[String, Seq[ProblemFilter]] = Map( // Breaking changes since last LTS - Build.ltsDottyVersion -> Seq.empty // We should never break backwards compatibility + Build.mimaPreviousLTSDottyVersion -> Seq.empty // We should never break backwards compatibility ) } object Interfaces { val ForwardsBreakingChanges: Map[String, Seq[ProblemFilter]] = Map( // Additions that require a new minor version of interfaces - Build.previousDottyVersion -> Seq( + Build.mimaPreviousDottyVersion -> Seq( ), // Additions since last LTS - Build.ltsDottyVersion -> Seq( + Build.mimaPreviousLTSDottyVersion -> Seq( ) ) val BackwardsBreakingChanges: Map[String, Seq[ProblemFilter]] = Map( // Breaking changes since last LTS - Build.ltsDottyVersion -> Seq.empty // We should never break backwards compatibility + Build.mimaPreviousLTSDottyVersion -> Seq.empty // We should never break backwards compatibility ) } diff --git a/project/Modes.scala b/project/Modes.scala index eddb5a3f1a7b..fcc13dea8a89 100644 --- a/project/Modes.scala +++ b/project/Modes.scala @@ -1,4 +1,4 @@ -import sbt.{Project, ProjectReference, SettingsDefinition} +import sbt.{Project, ProjectReference, SettingsDefinition, Plugins} object Modes { @@ -25,5 +25,9 @@ object Modes { def bootstrappedDependsOn(s: sbt.ClasspathDep[ProjectReference]*)(implicit mode: Mode): Project = if (mode == NonBootstrapped) project else project.dependsOn(s: _*) + /** Plugins only if the mode is bootstrapped */ + def bootstrappedEnablePlugins(ns: Plugins*)(implicit mode: Mode): Project = + if (mode == NonBootstrapped) project else project.enablePlugins(ns: _*) + } } diff --git a/project/RepublishPlugin.scala b/project/RepublishPlugin.scala new file mode 100644 index 000000000000..5611af798b33 --- /dev/null +++ b/project/RepublishPlugin.scala @@ -0,0 +1,557 @@ +package dotty.tools.sbtplugin + +import sbt._ +import xerial.sbt.pack.PackPlugin +import xerial.sbt.pack.PackPlugin.autoImport.{packResourceDir, packDir} +import sbt.Keys._ +import sbt.AutoPlugin +import sbt.PublishBinPlugin +import sbt.PublishBinPlugin.autoImport._ +import sbt.io.Using +import sbt.util.CacheImplicits._ + +import scala.collection.mutable +import java.nio.file.Files + +import java.nio.file.attribute.PosixFilePermission +import java.nio.file.{Files, Path} + +import scala.jdk.CollectionConverters._ + +/** This local plugin provides ways of publishing a project classpath and library dependencies to + * .a local repository */ +object RepublishPlugin extends AutoPlugin { + + /** copied from github.com/coursier/coursier */ + private object FileUtil { + + def tryMakeExecutable(path: Path): Boolean = + try { + val perms = Files.getPosixFilePermissions(path).asScala.toSet + + var newPerms = perms + if (perms(PosixFilePermission.OWNER_READ)) + newPerms += PosixFilePermission.OWNER_EXECUTE + if (perms(PosixFilePermission.GROUP_READ)) + newPerms += PosixFilePermission.GROUP_EXECUTE + if (perms(PosixFilePermission.OTHERS_READ)) + newPerms += PosixFilePermission.OTHERS_EXECUTE + + if (newPerms != perms) + Files.setPosixFilePermissions( + path, + newPerms.asJava + ) + + true + } + catch { + case _: UnsupportedOperationException => + false + } + + } + + override def trigger = allRequirements + override def requires = super.requires && PublishBinPlugin && PackPlugin + + object autoImport { + val republishProjectRefs = taskKey[Seq[ProjectRef]]("fetch the classpath deps from the project.") + val republishLocalResolved = taskKey[Seq[ResolvedArtifacts]]("resolve local artifacts for distribution.") + val republishAllResolved = taskKey[Seq[ResolvedArtifacts]]("Resolve the dependencies for the distribution") + val republishClasspath = taskKey[Set[File]]("cache the dependencies for the distribution") + val republishFetchLaunchers = taskKey[Set[File]]("cache the launcher deps for the distribution") + val republishFetchCoursier = taskKey[File]("cache the coursier.jar for resolving the local maven repo.") + val republishPrepareBin = taskKey[File]("prepare the bin directory, including launchers and scripts.") + val republishWriteExtraProps = taskKey[Option[File]]("write extra properties for the launchers.") + val republishBinDir = settingKey[File]("where to find static files for the bin dir.") + val republishCoursierDir = settingKey[File]("where to download the coursier launcher jar.") + val republishBinOverrides = settingKey[Seq[File]]("files to override those in bin-dir.") + val republishCommandLibs = settingKey[Seq[(String, List[String])]]("libraries needed for each command.") + val republish = taskKey[File]("cache the dependencies and download launchers for the distribution") + val republishPack = taskKey[File]("do the pack command") + val republishRepo = settingKey[File]("the location to store the republished artifacts.") + val republishLaunchers = settingKey[Seq[(String, String)]]("launchers to download. Sequence of (name, URL).") + val republishCoursier = settingKey[Seq[(String, String)]]("coursier launcher to download. Sequence of (name, URL).") + val republishExtraProps = settingKey[Seq[(String, String)]]("extra properties for launchers.") + } + + import autoImport._ + + case class SimpleModuleId(org: String, name: String, revision: String) { + override def toString = s"$org:$name:$revision" + } + case class ResolvedArtifacts(id: SimpleModuleId, jar: Option[File], pom: Option[File]) + + private def republishResolvedArtifacts(resolved: Seq[ResolvedArtifacts], mavenRepo: File, logOpt: Option[Logger]): Set[File] = { + IO.createDirectory(mavenRepo) + resolved.map { ra => + for (log <- logOpt) + log.info(s"[republish] publishing ${ra.id} to $mavenRepo...") + val jarOpt = ra.jar + val pomOpt = ra.pom + + assert(jarOpt.nonEmpty || pomOpt.nonEmpty, s"Neither jar nor pom found for ${ra.id}") + + val pathElems = ra.id.org.split('.').toVector :+ ra.id.name :+ ra.id.revision + val artifactDir = pathElems.foldLeft(mavenRepo)(_ / _) + IO.createDirectory(artifactDir) + for (pom <- pomOpt) IO.copyFile(pom, artifactDir / pom.getName) + for (jar <- jarOpt) IO.copyFile(jar, artifactDir / jar.getName) + artifactDir + }.toSet + } + + private def coursierCmd(jar: File, cache: File): Seq[String] => List[String] = { + val jar0 = jar.getAbsolutePath.toString + val javaHome = sys.props.get("java.home").getOrElse { + throw new MessageOnlyException("java.home property not set") + } + val javaCmd = { + val cmd = if (scala.util.Properties.isWin) "java.exe" else "java" + (file(javaHome) / "bin" / cmd).getAbsolutePath + } + val env = Map("COURSIER_CACHE" -> cache.getAbsolutePath.toString).asJava + val cmdLine0 = Seq(javaCmd, "-jar", jar0) + args => + val cmdLine = cmdLine0 ++ args + // invoke cmdLine with env, but also capture the output + val p = new ProcessBuilder(cmdLine: _*) + .directory(cache) + .inheritIO() + .redirectOutput(ProcessBuilder.Redirect.PIPE) + p.environment().putAll(env) + + val proc = p.start() + val in = proc.getInputStream + val output = { + try { + val src = scala.io.Source.fromInputStream(in) + try src.getLines().toList + finally src.close() + } finally { + in.close() + } + } + + proc.waitFor() + + if (proc.exitValue() != 0) + throw new MessageOnlyException(s"Error running coursier.jar with args ${args.mkString(" ")}") + + output + } + + private def resolveMaven2(repo: File): Path = { + java.nio.file.Files.walk(repo.toPath) + .filter(_.getFileName.toString == "maven2") + .findFirst() + .orElseThrow(() => new MessageOnlyException(s"Could not find maven2 directory in $repo")) + .toAbsolutePath() + } + + private def coursierFetch( + coursierJar: File, log: Logger, cacheDir: File, localRepo: File, libs: Seq[String]): Map[String, List[String]] = { + val localRepoPath = localRepo.getAbsolutePath + val localRepoArg = { + val uriPart = { + if (scala.util.Properties.isWin) { + s"/${localRepoPath.replace('\\', '/')}" // extra root slash for Windows paths + } + else { + localRepoPath // no change needed for Unix paths + } + } + s"file://$uriPart" + } + IO.createDirectory(cacheDir) + val cacheDirPath = cacheDir.getAbsolutePath + lazy val maven2RootLocal = resolveMaven2(localRepo) + lazy val maven2RootCache = resolveMaven2(cacheDir) // lazy because cache dir isn't populated until after fetch + val cmd = coursierCmd(coursierJar, cacheDir) + val resolved = for (lib <- libs) yield { + log.info(s"[republish] Fetching $lib with coursier.jar...") + val out = cmd( + Seq( + "fetch", + "--no-default", + "--repository", "central", + "--repository", localRepoArg, + lib + ) + ) + lib -> out.collect { + case s if s.startsWith(localRepoPath) => + maven2RootLocal.relativize(java.nio.file.Paths.get(s)).toString().replace('\\', '/') // format as uri + case s if s.startsWith(cacheDirPath) => + maven2RootCache.relativize(java.nio.file.Paths.get(s)).toString().replace('\\', '/') // format as uri + } + } + resolved.toMap + } + + private def fuzzyFind[V](map: Map[String, V], key: String): V = { + map.collectFirst({ case (k, v) if k.contains(key) => v }).getOrElse { + throw new MessageOnlyException(s"Could not find key $key in map $map") + } + } + + /**Resolve the transitive library dependencies of `libs` to `csrCacheDir`. + */ + private def resolveLibraryDeps( + coursierJar: File, + log: Logger, + republishDir: File, + csrCacheDir: File, + localRepo: File, + resolvedLocal: Seq[ResolvedArtifacts], + commandLibs: Seq[(String, List[String])]): Seq[ResolvedArtifacts] = { + + // publish the local artifacts to the local repo, so coursier can resolve them + republishResolvedArtifacts(resolvedLocal, localRepo, logOpt = None) + + val classpaths = coursierFetch(coursierJar, log, csrCacheDir, localRepo, resolvedLocal.map(_.id.toString)) + + if (commandLibs.nonEmpty) { + IO.createDirectory(republishDir / "lib") + for ((command, libs) <- commandLibs) { + val (negated, actual) = libs.partition(_.startsWith("^!")) + val subtractions = negated.map(_.stripPrefix("^!")) + + def compose(libs: List[String]): List[String] = + libs.map(fuzzyFind(classpaths, _)).reduceOption(_ ++ _).map(_.distinct).getOrElse(Nil) + + // Compute the classpath entries + val entries = compose(actual).diff(compose(subtractions)) + // Generate the MANIFEST for the pathing jar + val manifest = new java.util.jar.Manifest(); + manifest.getMainAttributes().put(java.util.jar.Attributes.Name.MANIFEST_VERSION, "1.0"); + manifest.getMainAttributes().put(java.util.jar.Attributes.Name.CLASS_PATH, entries.map(e => s"../maven2/$e").mkString(" ")) + // Write the pathing jar to the Disk + val file = republishDir / "lib" / s"$command.jar" + val jar = new java.util.jar.JarOutputStream(new java.io.FileOutputStream(file), manifest) + jar.close() + } + } + + val maven2Root = resolveMaven2(csrCacheDir) + + def pathToArtifact(p: Path): ResolvedArtifacts = { + // relative path from maven2Root + val relP = maven2Root.relativize(p) + val parts = relP.iterator().asScala.map(_.toString).toVector + val (orgParts :+ name :+ rev :+ artifact) = parts + val id = SimpleModuleId(orgParts.mkString("."), name, rev) + if (artifact.endsWith(".jar")) { + ResolvedArtifacts(id, Some(p.toFile), None) + } else { + ResolvedArtifacts(id, None, Some(p.toFile)) + } + } + + java.nio.file.Files.walk(maven2Root) + .filter(p => { + val lastAsString = p.getFileName.toString + lastAsString.endsWith(".pom") || lastAsString.endsWith(".jar") + }) + .map[ResolvedArtifacts](pathToArtifact(_)) + .iterator() + .asScala + .toSeq + } + + private def fetchFilesTask( + libexecT: Def.Initialize[Task[File]], + srcs: SettingKey[Seq[(String, String)]], + strict: Boolean) = Def.task[Set[File]] { + val s = streams.value + val log = s.log + val repoDir = republishRepo.value + val launcherVersions = srcs.value + val libexec = libexecT.value + + val dlCache = s.cacheDirectory / "republish-launchers" + + val store = s.cacheStoreFactory / "versions" + + def work(name: String, dest: File, launcher: String): File = { + val (launcherURL, workFile, prefix, subPart) = { + if (launcher.startsWith("gz+")) { + IO.createDirectory(dlCache) + val launcherURL = url(launcher.stripPrefix("gz+")) + (launcherURL, dlCache / s"$name.gz", "gz", "") + } else if (launcher.startsWith("zip+")) { + IO.createDirectory(dlCache) + val (urlPart, subPath) = launcher.split("!/") match { + case Array(urlPart, subPath) => (urlPart, subPath) + case _ => + throw new MessageOnlyException(s"[republish] Invalid zip+ URL, expected ! to mark subpath: $launcher") + } + val launcherURL = url(urlPart.stripPrefix("zip+")) + (launcherURL, dlCache / s"$name.zip", "zip", subPath) + } else { + IO.createDirectory(libexec) + (url(launcher), dest, "", "") + } + } + IO.delete(workFile) + Using.urlInputStream(launcherURL) { in => + log.info(s"[republish] Downloading $launcherURL to $workFile...") + IO.transfer(in, workFile) + log.info(s"[republish] Downloaded $launcherURL to $workFile...") + } + if (prefix == "gz") { + IO.delete(dest) + Using.fileInputStream(workFile) { in => + Using.gzipInputStream(in) { gzIn => + IO.transfer(gzIn, dest) + } + } + log.info(s"[republish] uncompressed gz file $workFile to $dest...") + IO.delete(workFile) + } else if (prefix == "zip") { + IO.delete(dest) + val files = IO.unzip(workFile, dlCache, new ExactFilter(subPart)) + val extracted = files.headOption.getOrElse(throw new MessageOnlyException(s"[republish] No files extracted from $workFile matching $subPart")) + log.info(s"[republish] unzipped $workFile to $extracted...") + IO.move(extracted, dest) + log.info(s"[republish] moved $extracted to $dest...") + IO.delete(workFile) + } + FileUtil.tryMakeExecutable(dest.toPath) + dest + } + + val allLaunchers = { + if (strict && launcherVersions.isEmpty) + throw new MessageOnlyException(s"[republish] No launchers to fetch, check the build configuration for ${srcs.key.label}.") + + for ((name, launcher) <- launcherVersions) yield { + val dest = libexec / name + + val id = name.replaceAll("[^a-zA-Z0-9]", "_") + + val fetchAction = Tracked.inputChanged[String, File](store.make(id)) { (inChanged, launcher) => + if (inChanged || !Files.exists(dest.toPath)) { + work(name, dest, launcher) + } else { + log.info(s"[republish] Using cached $name launcher ($launcher).") + dest + } + } + + fetchAction(launcher) + } + } + allLaunchers.toSet + } + + override val projectSettings: Seq[Def.Setting[_]] = Def.settings( + republishCoursierDir := republishRepo.value / "coursier", + republishLaunchers := Seq.empty, + republishCoursier := Seq.empty, + republishBinOverrides := Seq.empty, + republishExtraProps := Seq.empty, + republishCommandLibs := Seq.empty, + republishLocalResolved / republishProjectRefs := { + val proj = thisProjectRef.value + val deps = buildDependencies.value + + deps.classpathRefs(proj) + }, + republishLocalResolved := Def.taskDyn { + val deps = (republishLocalResolved / republishProjectRefs).value + val publishAllLocalBin = deps.map({ d => ((d / publishLocalBin / packagedArtifacts)) }).join + val resolveId = deps.map({ d => ((d / projectID)) }).join + Def.task { + val published = publishAllLocalBin.value + val ids = resolveId.value + + ids.zip(published).map({ case (id, as) => + val simpleId = { + val name0 = id.crossVersion match { + case cv: CrossVersion.Binary => + // projectID does not add binary suffix + (s"${id.name}_${cv.prefix}${cv.suffix}3") + .ensuring(!id.name.endsWith("_3") && id.revision.startsWith("3.")) + case _ => id.name + } + SimpleModuleId(id.organization, name0, id.revision) + } + var jarOrNull: File = null + var pomOrNull: File = null + as.foreach({ case (a, f) => + if (a.`type` == "jar") { + jarOrNull = f + } else if (a.`type` == "pom") { + pomOrNull = f + } + }) + assert(jarOrNull != null, s"Could not find jar for ${id}") + assert(pomOrNull != null, s"Could not find pom for ${id}") + ResolvedArtifacts(simpleId, Some(jarOrNull), Some(pomOrNull)) + }) + } + }.value, + republishAllResolved := { + val resolvedLocal = republishLocalResolved.value + val coursierJar = republishFetchCoursier.value + val report = (thisProjectRef / updateFull).value + val s = streams.value + val lm = (republishAllResolved / dependencyResolution).value + val cacheDir = republishRepo.value + val commandLibs = republishCommandLibs.value + + val log = s.log + val csrCacheDir = s.cacheDirectory / "csr-cache" + val localRepo = s.cacheDirectory / "localRepo" / "maven2" + + // resolve the transitive dependencies of the local artifacts + val resolvedLibs = resolveLibraryDeps( + coursierJar, log, cacheDir, csrCacheDir, localRepo, resolvedLocal, commandLibs) + + // the combination of local artifacts and resolved transitive dependencies + val merged = + (resolvedLocal ++ resolvedLibs).groupBy(_.id).values.map(_.reduce { (ra1, ra2) => + val jar = ra1.jar.orElse(ra2.jar) + val pom = ra1.pom.orElse(ra2.pom) + ResolvedArtifacts(ra1.id, jar, pom) + }) + + merged.toSeq + }, + republishClasspath := { + val s = streams.value + val resolved = republishAllResolved.value + val cacheDir = republishRepo.value + republishResolvedArtifacts(resolved, cacheDir / "maven2", logOpt = Some(s.log)) + }, + republishFetchLaunchers := { + fetchFilesTask(republishPrepareBin, republishLaunchers, strict = true).value + }, + republishFetchCoursier := { + fetchFilesTask(republishCoursierDir.toTask, republishCoursier, strict = true).value.head + }, + republishPrepareBin := { + val baseDir = baseDirectory.value + val srcBin = republishBinDir.value + val overrides = republishBinOverrides.value + val repoDir = republishRepo.value + + val targetBin = repoDir / "bin" + IO.copyDirectory(srcBin, targetBin) + overrides.foreach { dir => + IO.copyDirectory(dir, targetBin, overwrite = true) + } + targetBin + }, + republishWriteExtraProps := { + val s = streams.value + val log = s.log + val extraProps = republishExtraProps.value + if (extraProps.isEmpty) { + log.info("[republish] No extra properties to write.") + None + } + else { + val repoDir = republishRepo.value + val propsFile = repoDir / "EXTRA_PROPERTIES" + log.info(s"[republish] Writing extra properties to $propsFile...") + Using.fileWriter()(propsFile) { writer => + extraProps.foreach { case (k, v) => + writer.write(s"$k:=$v\n") + } + } + Some(propsFile) + } + }, + republish := { + val cacheDir = republishRepo.value + val artifacts = republishClasspath.value + val launchers = republishFetchLaunchers.value + val extraProps = republishWriteExtraProps.value + cacheDir + }, + republishPack := { + val cacheDir = republish.value + val s = streams.value + val log = s.log + val distDir = target.value / packDir.value + val progVersion = version.value + + IO.createDirectory(distDir) + for ((path, dir) <- packResourceDir.value) { + val target = distDir / dir + IO.copyDirectory(path, target) + } + + locally { + // everything in this block is copied from sbt-pack plugin + import scala.util.Try + import java.time.format.DateTimeFormatterBuilder + import java.time.format.SignStyle + import java.time.temporal.ChronoField.* + import java.time.ZoneId + import java.time.Instant + import java.time.ZonedDateTime + import java.time.ZonedDateTime + import java.util.Locale + import java.util.Date + val base: File = new File(".") // Using the working directory as base for readability + + // Copy explicitly added dependencies + val mapped: Seq[(File, String)] = mappings.value + log.info("[republish] Copying explicit dependencies:") + val explicitDepsJars = for ((file, path) <- mapped) yield { + log.info(file.getPath) + val dest = distDir / path + IO.copyFile(file, dest, true) + dest + } + + def write(path: String, content: String) { + val p = distDir / path + IO.write(p, content) + } + + val humanReadableTimestampFormatter = new DateTimeFormatterBuilder() + .parseCaseInsensitive() + .appendValue(YEAR, 4, 10, SignStyle.EXCEEDS_PAD) + .appendLiteral('-') + .appendValue(MONTH_OF_YEAR, 2) + .appendLiteral('-') + .appendValue(DAY_OF_MONTH, 2) + .appendLiteral(' ') + .appendValue(HOUR_OF_DAY, 2) + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 2) + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 2) + .appendOffset("+HHMM", "Z") + .toFormatter(Locale.US) + + // Retrieve build time + val systemZone = ZoneId.systemDefault().normalized() + val timestamp = ZonedDateTime.ofInstant(Instant.ofEpochMilli(new Date().getTime), systemZone) + val buildTime = humanReadableTimestampFormatter.format(timestamp) + + // Check the current Git revision + val gitRevision: String = Try { + if ((base / ".git").exists()) { + log.info("[republish] Checking the git revision of the current project") + sys.process.Process("git rev-parse HEAD").!! + } else { + "unknown" + } + }.getOrElse("unknown").trim + + + // Output the version number and Git revision + write("VERSION", s"version:=${progVersion}\nrevision:=${gitRevision}\nbuildTime:=${buildTime}\n") + } + + + distDir + } + ) +} diff --git a/project/Scala2LibraryBootstrappedMiMaFilters.scala b/project/Scala2LibraryBootstrappedMiMaFilters.scala index bd149d5a910b..102a2a50e9d4 100644 --- a/project/Scala2LibraryBootstrappedMiMaFilters.scala +++ b/project/Scala2LibraryBootstrappedMiMaFilters.scala @@ -78,9 +78,6 @@ object Scala2LibraryBootstrappedMiMaFilters { "scala.collection.IterableOnceOps#Maximized.this", // New in 2.13.11: private inner class "scala.util.Properties.", "scala.util.Sorting.scala$util$Sorting$$mergeSort$default$5", - // New in 2.13.12 -- can be removed once scala/scala#10549 lands in 2.13.13 - // and we take the upgrade here - "scala.collection.immutable.MapNodeRemoveAllSetNodeIterator.next", ).map(ProblemFilters.exclude[DirectMissingMethodProblem]) } ) @@ -175,6 +172,10 @@ object Scala2LibraryBootstrappedMiMaFilters { "scala.collection.mutable.LinkedHashSet.defaultLoadFactor", // private[collection] final def "scala.collection.mutable.LinkedHashSet.defaultinitialSize", // private[collection] final def "scala.collection.mutable.OpenHashMap.nextPositivePowerOfTwo", // private[mutable] def + // New in 2.13.13 + "scala.collection.mutable.ArrayBuffer.resizeUp", // private[mutable] def + // New in 2.13.14 + "scala.util.Properties.consoleIsTerminal", // private[scala] lazy val ).map(ProblemFilters.exclude[DirectMissingMethodProblem]) ++ Seq( // MissingFieldProblem: static field ... in object ... does not have a correspondent in other version "scala.Array.UnapplySeqWrapper", diff --git a/project/plugins.sbt b/project/plugins.sbt index c94d4d5afe8d..59e58007a4a0 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -6,7 +6,7 @@ libraryDependencySchemes += "org.scala-lang.modules" %% "scala-xml" % VersionScheme.Always -addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.12.0") +addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.16.0") addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.21") diff --git a/project/resources/referenceReplacements/sidebar.yml b/project/resources/referenceReplacements/sidebar.yml index de0f3d7bec2c..240085b681f2 100644 --- a/project/resources/referenceReplacements/sidebar.yml +++ b/project/resources/referenceReplacements/sidebar.yml @@ -77,6 +77,7 @@ subsection: - page: reference/other-new-features/safe-initialization.md - page: reference/other-new-features/type-test.md - page: reference/other-new-features/experimental-defs.md + - page: reference/other-new-features/binary-literals.md - title: Other Changed Features directory: changed-features index: reference/changed-features/changed-features.md diff --git a/project/scripts/bisect.scala b/project/scripts/bisect.scala index e97fbb3fafd7..c429fe06f81b 100755 --- a/project/scripts/bisect.scala +++ b/project/scripts/bisect.scala @@ -1,3 +1,4 @@ +//> using jvm 17 // Maximal JDK version which can be used with all Scala 3 versions, can be overriden via command line arguments '--jvm=21' /* This script will bisect a problem with the compiler based on success/failure of the validation script passed as an argument. It starts with a fast bisection on released nightly builds. @@ -124,6 +125,7 @@ object ValidationScript: def tmpScalaCliScript(command: String, args: Seq[String]): File = tmpScript(s""" |#!/usr/bin/env bash + |export JAVA_HOME=${sys.props("java.home")} |scala-cli ${command} -S "$$1" --server=false ${args.mkString(" ")} |""".stripMargin ) @@ -242,8 +244,10 @@ class CommitBisect(validationScript: File, shouldFail: Boolean, bootstrapped: Bo val bisectRunScript = raw""" |scalaVersion=$$(sbt "print ${scala3CompilerProject}/version" | tail -n1) |rm -rf out - |sbt "clean; set every doc := new File(\"unused\"); set scaladoc/Compile/resourceGenerators := (\`${scala3Project}\`/Compile/resourceGenerators).value; ${scala3Project}/publishLocal" - |${validationCommandStatusModifier}${validationScript.getAbsolutePath} "$$scalaVersion" + |export JAVA_HOME=${sys.props("java.home")} + |(sbt "clean; set every doc := new File(\"unused\"); set scaladoc/Compile/resourceGenerators := (\`${scala3Project}\`/Compile/resourceGenerators).value; ${scala3Project}/publishLocal" \ + | || (echo "Failed to build compiler, skip $$scalaVersion"; git bisect skip) \ + |) && ${validationCommandStatusModifier}${validationScript.getAbsolutePath} "$$scalaVersion" """.stripMargin "git bisect start".! s"git bisect bad $fistBadHash".! diff --git a/project/scripts/bootstrappedOnlyCmdTests b/project/scripts/bootstrappedOnlyCmdTests index 4e18e3a1d4a4..11c35a7028cc 100755 --- a/project/scripts/bootstrappedOnlyCmdTests +++ b/project/scripts/bootstrappedOnlyCmdTests @@ -14,32 +14,38 @@ echo "testing scala.quoted.Expr.run from sbt scala" "$SBT" ";scala3-compiler-bootstrapped/scalac -with-compiler tests/run-staging/quote-run.scala; scala3-compiler-bootstrapped/scala -with-compiler Test" > "$tmp" grep -qe "val a: scala.Int = 3" "$tmp" - # setup for `scalac`/`scala` script tests -"$SBT" dist/pack +"$SBT" "$DIST_PROJECT/pack" + +echo "capturing scala version from $DIST_DIR/target/pack/VERSION" +IFS=':=' read -ra versionProps < "$ROOT/$DIST_DIR/target/pack/VERSION" # temporarily set IFS to ':=' to split versionProps +[ ${#versionProps[@]} -eq 3 ] && \ + [ ${versionProps[0]} = "version" ] && \ + [ -n ${versionProps[2]} ] || die "Expected non-empty 'version' property in $ROOT/$DIST_DIR/target/pack/VERSION" +scala_version=${versionProps[2]} # check that `scalac` compiles and `scala` runs it echo "testing ./bin/scalac and ./bin/scala" clear_out "$OUT" ./bin/scalac "$SOURCE" -d "$OUT" -./bin/scala -classpath "$OUT" "$MAIN" > "$tmp" +./bin/scala -classpath "$OUT" -M "$MAIN" > "$tmp" test "$EXPECTED_OUTPUT" = "$(cat "$tmp")" # Test scaladoc based on compiled classes ./bin/scaladoc -project Staging -d "$OUT1" "$OUT" clear_out "$OUT1" -# check that `scalac` and `scala` works for staging +# check that `scalac` and `scala` works for staging. clear_out "$OUT" ./bin/scalac tests/run-staging/i4044f.scala -d "$OUT" -./bin/scala -with-compiler -classpath "$OUT" Test > "$tmp" +./bin/scala -with-compiler -classpath "$OUT" -M Test > "$tmp" # check that `scalac -from-tasty` compiles and `scala` runs it echo "testing ./bin/scalac -from-tasty and scala -classpath" clear_out "$OUT1" ./bin/scalac "$SOURCE" -d "$OUT" ./bin/scalac -from-tasty -d "$OUT1" "$OUT/$TASTY" -./bin/scala -classpath "$OUT1" "$MAIN" > "$tmp" +./bin/scala -classpath "$OUT1" -M "$MAIN" > "$tmp" test "$EXPECTED_OUTPUT" = "$(cat "$tmp")" # check that `sbt scalac -decompile` runs @@ -71,7 +77,7 @@ echo "testing sbt scalac with suspension" clear_out "$OUT" "$SBT" "scala3-compiler-bootstrapped/scalac -d $OUT tests/pos-macros/macros-in-same-project-1/Bar.scala tests/pos-macros/macros-in-same-project-1/Foo.scala" > "$tmp" -# echo ":quit" | ./dist/target/pack/bin/scala # not supported by CI +# echo ":quit" | ./$DIST_DIR/target/pack/bin/scala # not supported by CI echo "testing ./bin/scaladoc" clear_out "$OUT1" @@ -91,10 +97,17 @@ clear_out "$OUT" grep -qe "Usage: scalac " "$tmp" ./bin/scala -help > "$tmp" 2>&1 -grep -qe "Usage: scala " "$tmp" +grep -qe "See 'scala --help' to read about a specific subcommand." "$tmp" ./bin/scala -d hello.jar tests/run/hello.scala ls hello.jar +clear_cli_dotfiles tests/run + +# check that `scala` runs scripts with args +echo "testing ./bin/scala with arguments" +./bin/scala run project/scripts/echoArgs.sc -- abc true 123 > "$tmp" +test "$EXPECTED_OUTPUT_ARGS" = "$(cat "$tmp")" +clear_cli_dotfiles project/scripts echo "testing i12973" clear_out "$OUT" @@ -102,14 +115,6 @@ clear_out "$OUT" echo "Bug12973().check" | TERM=dumb ./bin/scala -cp "$OUT/out.jar" > "$tmp" 2>&1 grep -qe "Bug12973 is fixed" "$tmp" -echo "capturing scala version from dist/target/pack/VERSION" -cwd=$(pwd) -IFS=':=' read -ra versionProps < "$cwd/dist/target/pack/VERSION" # temporarily set IFS to ':=' to split versionProps -[ ${#versionProps[@]} -eq 3 ] && \ - [ ${versionProps[0]} = "version" ] && \ - [ -n ${versionProps[2]} ] || die "Expected non-empty 'version' property in $cwd/dist/target/pack/VERSION" -scala_version=${versionProps[2]} - echo "testing -sourcepath with incremental compile: inlining changed inline def into a def" # Here we will test that a changed inline method symbol loaded from the sourcepath (-sourcepath compiler option) # will have its `defTree` correctly set when its method body is required for inlining. diff --git a/project/scripts/buildScalaBinary b/project/scripts/buildScalaBinary new file mode 100755 index 000000000000..7fc5275e5d8d --- /dev/null +++ b/project/scripts/buildScalaBinary @@ -0,0 +1,12 @@ +#!/usr/bin/env bash + +set -e + +ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" >& /dev/null && pwd)/../.." +SBT="$ROOT/project/scripts/sbt" # if run on CI + +# set the $DIST_PROJECT and $DIST_DIR variables +source "$ROOT/bin/common-platform" + +# build the scala/scalac/scaladoc binary, where scala is native for the current platform. +"$SBT" "$DIST_PROJECT/pack" diff --git a/project/scripts/cmdTestsCommon.inc.sh b/project/scripts/cmdTestsCommon.inc.sh index a37ab757c057..bccb4aa56ac1 100644 --- a/project/scripts/cmdTestsCommon.inc.sh +++ b/project/scripts/cmdTestsCommon.inc.sh @@ -9,11 +9,15 @@ SOURCE="tests/pos/HelloWorld.scala" MAIN="HelloWorld" TASTY="HelloWorld.tasty" EXPECTED_OUTPUT="hello world" +EXPECTED_OUTPUT_ARGS="[0:abc],[1:true],[2:123]" OUT=$(mktemp -d) OUT1=$(mktemp -d) tmp=$(mktemp) +# set the $DIST_PROJECT and $DIST_DIR variables +source "$ROOT/bin/common-platform" + die () { echo >&2 "$@" exit 1 @@ -24,3 +28,16 @@ clear_out() local out="$1" rm -rf "$out"/* } + +clear_cli_dotfiles() +{ + local out="$1" + rm -rf "$out"/.bsp + rm -rf "$out"/.scala-build + + rm -f "$ROOT"/.bsp/scala.json + if [ -z "$(ls -A "$ROOT"/.bsp)" ]; then + rm -rf "$ROOT"/.bsp + fi + rm -rf "$ROOT"/.scala-build +} diff --git a/project/scripts/echoArgs.sc b/project/scripts/echoArgs.sc new file mode 100644 index 000000000000..cb9acbb6ad2e --- /dev/null +++ b/project/scripts/echoArgs.sc @@ -0,0 +1,6 @@ +// This is a Scala CLI script + +val formatted = + (for (arg, i) <- args.zipWithIndex yield + s"[$i:$arg]").mkString(",") +println(formatted) diff --git a/project/scripts/native-integration/bashTests b/project/scripts/native-integration/bashTests new file mode 100755 index 000000000000..5fb77355238c --- /dev/null +++ b/project/scripts/native-integration/bashTests @@ -0,0 +1,84 @@ +#!/usr/bin/env bash + +set -eux + +#/*---------------*\ +# * SETUP VARS *# +# *---------------*/ + +ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" >& /dev/null && pwd)/../../.." + +SBT="$ROOT/project/scripts/sbt" # if run on CI +# SBT="sbt" # if run locally + +# set the $DIST_PROJECT and $DIST_DIR variables +source "$ROOT/bin/common-platform" + +die () { + echo >&2 "$@" + exit 1 +} + +PROG_HOME="$DIST_DIR/target/pack" + +SOURCE="$ROOT/tests/pos/HelloWorld.scala" +SOURCE_VERSION="$ROOT/project/scripts/native-integration/reportScalaVersion.scala" + +clear_cli_dotfiles() +{ + local out="$1" + rm -rf "$out"/.bsp + rm -rf "$out"/.scala-build + + rm -f "$ROOT"/.bsp/scala.json + if [ -z "$(ls -A "$ROOT"/.bsp)" ]; then + rm -rf "$ROOT"/.bsp + fi + rm -rf "$ROOT"/.scala-build +} + +#/*---------------*\ +# * INITIALIZE *# +# *---------------*/ + +# build the distribution +"$SBT" "$DIST_PROJECT/pack" + +SCALA_VERSION="" +# iterate through lines in VERSION_SRC +while IFS= read -r line; do + # if line starts with "version:=" then extract the version + if [[ "$line" == version:=* ]]; then + SCALA_VERSION="${line#version:=}" + break + fi +done < "$PROG_HOME/VERSION" + +if [ -z "$SCALA_VERSION" ]; then + die "Could not find scala version in $PROG_HOME/VERSION" +fi + +#/*-------------------*\ +# * TESTING BEGINS *# +# *-------------------*/ + +echo "assert native launcher matches expected version" +if [ -z "$LAUNCHER_EXPECTED_PROJECT" ]; then + die "LAUNCHER_EXPECTED_PROJECT is not set in the environment" +fi +test "$LAUNCHER_EXPECTED_PROJECT" = "$DIST_PROJECT" + +echo "testing version output (default)" +std_output=$("$PROG_HOME/bin/scala" version --scala-version) +test "$SCALA_VERSION" = "$std_output" + +echo "testing run command" +std_output=$("$PROG_HOME/bin/scala" run "$SOURCE" --power --offline --server=false) +test "hello world" = "$std_output" +clear_cli_dotfiles "$ROOT/tests/pos" + +echo "testing run command (-with-compiler)" +std_output=$("$PROG_HOME/bin/scala" run "$SOURCE_VERSION" -with-compiler --power --offline --server=false) +test "$SCALA_VERSION" = "$std_output" +clear_cli_dotfiles "$ROOT/project/scripts/native-integration" + diff --git a/project/scripts/native-integration/reportScalaVersion.scala b/project/scripts/native-integration/reportScalaVersion.scala new file mode 100644 index 000000000000..dc6e93708a48 --- /dev/null +++ b/project/scripts/native-integration/reportScalaVersion.scala @@ -0,0 +1,4 @@ +// To be ran by Scala CLI (requires -with-compiler command line option) + +@main def reportScalaVersion: Unit = + println(dotty.tools.dotc.config.Properties.versionNumberString) diff --git a/project/scripts/native-integration/winTests.bat b/project/scripts/native-integration/winTests.bat new file mode 100755 index 000000000000..a85b2c8c2531 --- /dev/null +++ b/project/scripts/native-integration/winTests.bat @@ -0,0 +1,19 @@ +@echo off +setlocal + +@rem paths are relative to the root project directory +set "_PREFIX=dist\win-x86_64\target\pack" +set "_SOURCE=tests\pos\HelloWorld.scala" +set "_OUT_DIR=out" + +@rem if-tests mimic the non-existing bash instruction 'set -e'. +call "%_PREFIX%\bin\scalac.bat" "@project\scripts\options" "%_SOURCE%" +if not %ERRORLEVEL%==0 endlocal& exit /b 1 + +call "%_PREFIX%\bin\scalac.bat" -d "%_OUT_DIR%" "%_SOURCE%" +if not %ERRORLEVEL%==0 endlocal& exit /b 1 + +call "%_PREFIX%\bin\scala.bat" --power -classpath "%_OUT_DIR%" -M HelloWorld --offline --server=false +if not %ERRORLEVEL%==0 endlocal& exit /b 1 + +endlocal diff --git a/project/scripts/winCmdTests b/project/scripts/winCmdTests index d287b60992b2..fe6a43c7f68f 100644 --- a/project/scripts/winCmdTests +++ b/project/scripts/winCmdTests @@ -1,10 +1,10 @@ #!/usr/bin/env bash set -e -PREFIX="dist/target/pack" +PREFIX="dist/win-x86_64/target/pack" SOURCE="tests/pos/HelloWorld.scala" $PREFIX/bin/scalac @project/scripts/options "$SOURCE" $PREFIX/bin/scalac -d out "$SOURCE" -$PREFIX/bin/scala -classpath out HelloWorld -$PREFIX/bin/scala -classpath out -J-Xmx512m HelloWorld +$PREFIX/bin/scala --power -classpath out -M HelloWorld --offline '--server=false' +$PREFIX/bin/scala --power -classpath out -J -Xmx512m -M HelloWorld --offline '--server=false' mkdir -p _site && $PREFIX/bin/scaladoc -d _site -project Hello "$SOURCE" diff --git a/project/scripts/winCmdTests.bat b/project/scripts/winCmdTests.bat index ee9b8237c694..903f74d7ab98 100644 --- a/project/scripts/winCmdTests.bat +++ b/project/scripts/winCmdTests.bat @@ -2,7 +2,7 @@ setlocal @rem paths are relative to the root project directory -set "_PREFIX=dist\target\pack" +set "_PREFIX=dist\win-x86_64\target\pack" set "_SOURCE=tests\pos\HelloWorld.scala" set "_OUT_DIR=out" set "_SITE_DIR=_site" @@ -14,10 +14,10 @@ if not %ERRORLEVEL%==0 endlocal& exit /b 1 call "%_PREFIX%\bin\scalac.bat" -d "%_OUT_DIR%" "%_SOURCE%" if not %ERRORLEVEL%==0 endlocal& exit /b 1 -call "%_PREFIX%\bin\scala.bat" -classpath "%_OUT_DIR%" HelloWorld +call "%_PREFIX%\bin\scala.bat" --power -classpath "%_OUT_DIR%" -M HelloWorld --offline --server=false if not %ERRORLEVEL%==0 endlocal& exit /b 1 -call "%_PREFIX%\bin\scala.bat" -classpath "%_OUT_DIR%" -J-Xmx512m HelloWorld +call "%_PREFIX%\bin\scala.bat" --power -classpath "%_OUT_DIR%" -J -Xmx512m -M HelloWorld --offline --server=false if not %ERRORLEVEL%==0 endlocal& exit /b 1 if not exist "%_SITE_DIR%" mkdir "%_SITE_DIR%" diff --git a/sbt-bridge/src/dotty/tools/xsbt/IncrementalCallback.java b/sbt-bridge/src/dotty/tools/xsbt/IncrementalCallback.java index 3c3d33c1c1fe..6e19c62b10d0 100644 --- a/sbt-bridge/src/dotty/tools/xsbt/IncrementalCallback.java +++ b/sbt-bridge/src/dotty/tools/xsbt/IncrementalCallback.java @@ -57,4 +57,14 @@ public void generatedLocalClass(SourceFile source, java.nio.file.Path classFile) public void generatedNonLocalClass(SourceFile source, java.nio.file.Path classFile, String binaryClassName, String srcClassName) { delegate.generatedNonLocalClass(asVirtualFile.apply(source), classFile, binaryClassName, srcClassName); } + + @Override + public void apiPhaseCompleted() { + delegate.apiPhaseCompleted(); + } + + @Override + public void dependencyPhaseCompleted() { + delegate.dependencyPhaseCompleted(); + } } diff --git a/sbt-bridge/src/dotty/tools/xsbt/OldIncrementalCallback.java b/sbt-bridge/src/dotty/tools/xsbt/OldIncrementalCallback.java index 597a964eb944..30e25194736d 100644 --- a/sbt-bridge/src/dotty/tools/xsbt/OldIncrementalCallback.java +++ b/sbt-bridge/src/dotty/tools/xsbt/OldIncrementalCallback.java @@ -71,4 +71,14 @@ public void generatedLocalClass(SourceFile source, java.nio.file.Path classFile) public void generatedNonLocalClass(SourceFile source, java.nio.file.Path classFile, String binaryClassName, String srcClassName) { delegate.generatedNonLocalClass(asJavaFile(source), classFile.toFile(), binaryClassName, srcClassName); } + + @Override + public void apiPhaseCompleted() { + delegate.apiPhaseCompleted(); + } + + @Override + public void dependencyPhaseCompleted() { + delegate.dependencyPhaseCompleted(); + } } diff --git a/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala b/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala index e47371175de6..0abefe2985c3 100644 --- a/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala +++ b/sbt-bridge/test/xsbt/ExtractUsedNamesSpecification.scala @@ -1,7 +1,6 @@ package xsbt import xsbti.UseScope -import ScalaCompilerForUnitTesting.Callbacks import org.junit.{ Test, Ignore } import org.junit.Assert._ @@ -227,9 +226,9 @@ class ExtractUsedNamesSpecification { def findPatMatUsages(in: String): Set[String] = { val compilerForTesting = new ScalaCompilerForUnitTesting - val (_, Callbacks(callback, _)) = + val output = compilerForTesting.compileSrcs(List(List(sealedClass, in))) - val clientNames = callback.usedNamesAndScopes.view.filterKeys(!_.startsWith("base.")) + val clientNames = output.analysis.usedNamesAndScopes.view.filterKeys(!_.startsWith("base.")) val names: Set[String] = clientNames.flatMap { case (_, usages) => diff --git a/sbt-bridge/test/xsbt/ProductsSpecification.scala b/sbt-bridge/test/xsbt/ProductsSpecification.scala new file mode 100644 index 000000000000..adee351b5289 --- /dev/null +++ b/sbt-bridge/test/xsbt/ProductsSpecification.scala @@ -0,0 +1,41 @@ +package xsbt + +import org.junit.Assert.* +import org.junit.Ignore +import org.junit.Test + +import java.io.File +import java.nio.file.Path +import java.nio.file.Paths + +class ProductsSpecification { + + @Test + def extractNonLocalClassesNoInc = { + val src = + """package example + | + |class A { + | class B + | def foo = + | class C + |}""".stripMargin + val output = compiler.compileSrcsNoInc(src) + val srcFile = output.srcFiles.head + val (srcNames, binaryNames) = output.analysis.classNames(srcFile).unzip // non local class names + + assertFalse(output.analysis.enabled()) // inc phases are disabled + assertTrue(output.analysis.apis.isEmpty) // extract-api did not run + assertTrue(output.analysis.usedNamesAndScopes.isEmpty) // extract-dependencies did not run + + // note that local class C is not included, classNames only records non local classes + val expectedBinary = Set("example.A", "example.A$B") + assertEquals(expectedBinary, binaryNames.toSet) + + // note that local class C is not included, classNames only records non local classes + val expectedSrc = Set("example.A", "example.A.B") + assertEquals(expectedSrc, srcNames.toSet) + } + + private def compiler = new ScalaCompilerForUnitTesting +} diff --git a/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala b/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala index f17be692ee50..a5a969ee48b9 100644 --- a/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala +++ b/sbt-bridge/test/xsbt/ScalaCompilerForUnitTesting.scala @@ -3,6 +3,7 @@ package xsbt import xsbti.compile.{CompileProgress, SingleOutput} import java.io.File +import java.nio.file.Path import xsbti._ import sbt.io.IO import xsbti.api.{ ClassLike, Def, DependencyContext } @@ -15,6 +16,8 @@ import dotty.tools.xsbt.CompilerBridge import TestCallback.ExtractedClassDependencies import ScalaCompilerForUnitTesting.Callbacks +case class CompileOutput(srcFiles: Seq[VirtualFileRef], classesOutput: Path, analysis: TestCallback, progress: TestCompileProgress) + object ScalaCompilerForUnitTesting: case class Callbacks(analysis: TestCallback, progress: TestCompileProgress) @@ -25,29 +28,24 @@ object ScalaCompilerForUnitTesting: class ScalaCompilerForUnitTesting { def extractEnteredPhases(srcs: String*): Seq[List[String]] = { - val (tempSrcFiles, Callbacks(_, testProgress)) = compileSrcs(srcs*) - val run = testProgress.runs.head - tempSrcFiles.map(src => run.unitPhases(src.id)) + val output = compileSrcs(srcs*) + val run = output.progress.runs.head + output.srcFiles.map(src => run.unitPhases(src.id)) } - def extractTotal(srcs: String*)(extraSourcePath: String*): Int = { - val (tempSrcFiles, Callbacks(_, testProgress)) = compileSrcs(List(srcs.toList), extraSourcePath.toList) - val run = testProgress.runs.head - run.total - } + def extractTotal(srcs: String*)(extraSourcePath: String*): Int = + compileSrcs(List(srcs.toList), extraSourcePath.toList).progress.runs.head.total - def extractProgressPhases(srcs: String*): List[String] = { - val (_, Callbacks(_, testProgress)) = compileSrcs(srcs*) - testProgress.runs.head.phases - } + def extractProgressPhases(srcs: String*): List[String] = + compileSrcs(srcs*).progress.runs.head.phases /** * Compiles given source code using Scala compiler and returns API representation * extracted by ExtractAPI class. */ def extractApiFromSrc(src: String): Seq[ClassLike] = { - val (Seq(tempSrcFile), Callbacks(analysisCallback, _)) = compileSrcs(src) - analysisCallback.apis(tempSrcFile) + val output = compileSrcs(src) + output.analysis.apis(output.srcFiles.head) } /** @@ -55,8 +53,8 @@ class ScalaCompilerForUnitTesting { * extracted by ExtractAPI class. */ def extractApisFromSrcs(srcs: List[String]*): Seq[Seq[ClassLike]] = { - val (tempSrcFiles, Callbacks(analysisCallback, _)) = compileSrcs(srcs.toList) - tempSrcFiles.map(analysisCallback.apis) + val output = compileSrcs(srcs.toList) + output.srcFiles.map(output.analysis.apis) } /** @@ -73,15 +71,16 @@ class ScalaCompilerForUnitTesting { assertDefaultScope: Boolean = true ): Map[String, Set[String]] = { // we drop temp src file corresponding to the definition src file - val (Seq(_, tempSrcFile), Callbacks(analysisCallback, _)) = compileSrcs(definitionSrc, actualSrc) + val output = compileSrcs(definitionSrc, actualSrc) + val analysis = output.analysis if (assertDefaultScope) for { - (className, used) <- analysisCallback.usedNamesAndScopes - analysisCallback.TestUsedName(name, scopes) <- used + (className, used) <- analysis.usedNamesAndScopes + analysis.TestUsedName(name, scopes) <- used } assert(scopes.size() == 1 && scopes.contains(UseScope.Default), s"$className uses $name in $scopes") - val classesInActualSrc = analysisCallback.classNames(tempSrcFile).map(_._1) - classesInActualSrc.map(className => className -> analysisCallback.usedNames(className)).toMap + val classesInActualSrc = analysis.classNames(output.srcFiles.head).map(_._1) + classesInActualSrc.map(className => className -> analysis.usedNames(className)).toMap } /** @@ -91,11 +90,11 @@ class ScalaCompilerForUnitTesting { * Only the names used in the last src file are returned. */ def extractUsedNamesFromSrc(sources: String*): Map[String, Set[String]] = { - val (srcFiles, Callbacks(analysisCallback, _)) = compileSrcs(sources*) - srcFiles + val output = compileSrcs(sources*) + output.srcFiles .map { srcFile => - val classesInSrc = analysisCallback.classNames(srcFile).map(_._1) - classesInSrc.map(className => className -> analysisCallback.usedNames(className)).toMap + val classesInSrc = output.analysis.classNames(srcFile).map(_._1) + classesInSrc.map(className => className -> output.analysis.usedNames(className)).toMap } .reduce(_ ++ _) } @@ -113,15 +112,15 @@ class ScalaCompilerForUnitTesting { * file system-independent way of testing dependencies between source code "files". */ def extractDependenciesFromSrcs(srcs: List[List[String]]): ExtractedClassDependencies = { - val (_, Callbacks(testCallback, _)) = compileSrcs(srcs) + val analysis = compileSrcs(srcs).analysis - val memberRefDeps = testCallback.classDependencies collect { + val memberRefDeps = analysis.classDependencies collect { case (target, src, DependencyByMemberRef) => (src, target) } - val inheritanceDeps = testCallback.classDependencies collect { + val inheritanceDeps = analysis.classDependencies collect { case (target, src, DependencyByInheritance) => (src, target) } - val localInheritanceDeps = testCallback.classDependencies collect { + val localInheritanceDeps = analysis.classDependencies collect { case (target, src, LocalDependencyByInheritance) => (src, target) } ExtractedClassDependencies.fromPairs(memberRefDeps, inheritanceDeps, localInheritanceDeps) @@ -142,12 +141,24 @@ class ScalaCompilerForUnitTesting { * The sequence of temporary files corresponding to passed snippets and analysis * callback is returned as a result. */ - def compileSrcs(groupedSrcs: List[List[String]], sourcePath: List[String] = Nil): (Seq[VirtualFile], Callbacks) = { + def compileSrcs(groupedSrcs: List[List[String]], sourcePath: List[String] = Nil, compileToJar: Boolean = false, incEnabled: Boolean = true): CompileOutput = { val temp = IO.createTemporaryDirectory - val analysisCallback = new TestCallback + val (forceSbtArgs, analysisCallback) = + if (incEnabled) + (Seq("-Yforce-sbt-phases"), new TestCallback) + else + (Seq.empty, new TestCallbackNoInc) val testProgress = new TestCompileProgress - val classesDir = new File(temp, "classes") - classesDir.mkdir() + val classesOutput = + if (compileToJar) { + val jar = new File(temp, "classes.jar") + jar.createNewFile() + jar + } else { + val dir = new File(temp, "classes") + dir.mkdir() + dir + } val bridge = new CompilerBridge @@ -164,16 +175,16 @@ class ScalaCompilerForUnitTesting { } val virtualSrcFiles = srcFiles.toArray - val classesDirPath = classesDir.getAbsolutePath.toString + val classesOutputPath = classesOutput.getAbsolutePath() val output = new SingleOutput: - def getOutputDirectory() = classesDir + def getOutputDirectory() = classesOutput val maybeSourcePath = if extraFiles.isEmpty then Nil else List("-sourcepath", temp.getAbsolutePath.toString) bridge.run( virtualSrcFiles, new TestDependencyChanges, - Array("-Yforce-sbt-phases", "-classpath", classesDirPath, "-usejavacp", "-d", classesDirPath) ++ maybeSourcePath, + (forceSbtArgs ++: Array("-classpath", classesOutputPath, "-usejavacp", "-d", classesOutputPath)) ++ maybeSourcePath, output, analysisCallback, new TestReporter, @@ -185,17 +196,23 @@ class ScalaCompilerForUnitTesting { srcFiles } - (files.flatten.toSeq, Callbacks(analysisCallback, testProgress)) + CompileOutput(files.flatten.toSeq, classesOutput.toPath, analysisCallback, testProgress) } - def compileSrcs(srcs: String*): (Seq[VirtualFile], Callbacks) = { + def compileSrcs(srcs: String*): CompileOutput = { compileSrcs(List(srcs.toList)) } + def compileSrcsNoInc(srcs: String*): CompileOutput = { + compileSrcs(List(srcs.toList), incEnabled = false) + } + + def compileSrcsToJar(srcs: String*): CompileOutput = + compileSrcs(List(srcs.toList), compileToJar = true) + private def prepareSrcFile(baseDir: File, fileName: String, src: String): VirtualFile = { val srcFile = new File(baseDir, fileName) IO.write(srcFile, src) new TestVirtualFile(srcFile.toPath) } } - diff --git a/sbt-bridge/test/xsbti/TestCallback.scala b/sbt-bridge/test/xsbti/TestCallback.scala index 3398590b169a..9f6df75d84f0 100644 --- a/sbt-bridge/test/xsbti/TestCallback.scala +++ b/sbt-bridge/test/xsbti/TestCallback.scala @@ -11,6 +11,10 @@ import DependencyContext._ import java.{util => ju} import ju.Optional +class TestCallbackNoInc extends TestCallback { + override def enabled(): Boolean = false +} + class TestCallback extends AnalysisCallback2 { case class TestUsedName(name: String, scopes: ju.EnumSet[UseScope]) diff --git a/sbt-test/java-compat/moduleInfo/A.scala b/sbt-test/java-compat/moduleInfo/A.scala new file mode 100644 index 000000000000..4b46ae7047d6 --- /dev/null +++ b/sbt-test/java-compat/moduleInfo/A.scala @@ -0,0 +1,2 @@ +// Previously, we crashed trying to parse module-info.class in the empty package. +class A diff --git a/sbt-test/java-compat/moduleInfo/build.sbt b/sbt-test/java-compat/moduleInfo/build.sbt new file mode 100644 index 000000000000..a0308b6cb83a --- /dev/null +++ b/sbt-test/java-compat/moduleInfo/build.sbt @@ -0,0 +1,5 @@ +scalaVersion := sys.props("plugin.scalaVersion") + +scalacOptions ++= Seq( + "-experimental" +) diff --git a/sbt-test/java-compat/moduleInfo/test b/sbt-test/java-compat/moduleInfo/test new file mode 100644 index 000000000000..5df2af1f3956 --- /dev/null +++ b/sbt-test/java-compat/moduleInfo/test @@ -0,0 +1 @@ +> compile diff --git a/sbt-test/pipelining/Xearly-tasty-output-inline/a/src/main/scala/a/A.scala b/sbt-test/pipelining/Xearly-tasty-output-inline/a/src/main/scala/a/A.scala new file mode 100644 index 000000000000..930e0ee78eb9 --- /dev/null +++ b/sbt-test/pipelining/Xearly-tasty-output-inline/a/src/main/scala/a/A.scala @@ -0,0 +1,10 @@ +package a + +import scala.quoted.* + +object A { + inline def power(x: Double, inline n: Int): Double = + inline if (n == 0) 1.0 + else inline if (n % 2 == 1) x * power(x, n - 1) + else power(x * x, n / 2) +} diff --git a/sbt-test/pipelining/Xearly-tasty-output-inline/b/src/main/scala/b/B.scala b/sbt-test/pipelining/Xearly-tasty-output-inline/b/src/main/scala/b/B.scala new file mode 100644 index 000000000000..7055d6d2d006 --- /dev/null +++ b/sbt-test/pipelining/Xearly-tasty-output-inline/b/src/main/scala/b/B.scala @@ -0,0 +1,10 @@ +package b + +import a.A + +object B { + @main def run = + assert(A.power(2.0, 2) == 4.0) + assert(A.power(2.0, 3) == 8.0) + assert(A.power(2.0, 4) == 16.0) +} diff --git a/sbt-test/pipelining/Xearly-tasty-output-inline/build.sbt b/sbt-test/pipelining/Xearly-tasty-output-inline/build.sbt new file mode 100644 index 000000000000..8ca1f4df759e --- /dev/null +++ b/sbt-test/pipelining/Xearly-tasty-output-inline/build.sbt @@ -0,0 +1,16 @@ +// NOTE: in this test, we are explictly fixing the classpath of project `b` to be `a-early.jar` +// to manually test pipelining without sbt/zinc managing the classpath. + +// defines a inline method. +lazy val a = project.in(file("a")) + .settings( + scalacOptions ++= Seq("-Xearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-early.jar").toString), + scalacOptions += "-Ycheck:all", + ) + +// uses the inline method, this is fine as there is no macro classloader involved +lazy val b = project.in(file("b")) + .settings( + Compile / unmanagedClasspath += Attributed.blank((ThisBuild / baseDirectory).value / "a-early.jar"), + scalacOptions += "-Ycheck:all", + ) diff --git a/sbt-test/pipelining/Yjava-tasty-annotation/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/Xearly-tasty-output-inline/project/DottyInjectedPlugin.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-annotation/project/DottyInjectedPlugin.scala rename to sbt-test/pipelining/Xearly-tasty-output-inline/project/DottyInjectedPlugin.scala diff --git a/sbt-test/pipelining/Xearly-tasty-output-inline/test b/sbt-test/pipelining/Xearly-tasty-output-inline/test new file mode 100644 index 000000000000..9779d91ce131 --- /dev/null +++ b/sbt-test/pipelining/Xearly-tasty-output-inline/test @@ -0,0 +1,3 @@ +> a/compile +# uses the early output jar of a +> b/run diff --git a/sbt-test/pipelining/Xearly-tasty-output/a/src/main/scala/a/A.scala b/sbt-test/pipelining/Xearly-tasty-output/a/src/main/scala/a/A.scala new file mode 100644 index 000000000000..4b10db3eb385 --- /dev/null +++ b/sbt-test/pipelining/Xearly-tasty-output/a/src/main/scala/a/A.scala @@ -0,0 +1,5 @@ +package a + +object A { + val foo: (1,2,3) = (1,2,3) +} diff --git a/sbt-test/pipelining/Yjava-tasty-annotation/b-alt/.keep b/sbt-test/pipelining/Xearly-tasty-output/b-early-out/.keep similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-annotation/b-alt/.keep rename to sbt-test/pipelining/Xearly-tasty-output/b-early-out/.keep diff --git a/sbt-test/pipelining/Xearly-tasty-output/b/src/main/scala/b/B.scala b/sbt-test/pipelining/Xearly-tasty-output/b/src/main/scala/b/B.scala new file mode 100644 index 000000000000..5e6fa369e309 --- /dev/null +++ b/sbt-test/pipelining/Xearly-tasty-output/b/src/main/scala/b/B.scala @@ -0,0 +1,5 @@ +package b + +object B { + val bar: (1,2,3) = (1,2,3) +} diff --git a/sbt-test/pipelining/Xearly-tasty-output/build.sbt b/sbt-test/pipelining/Xearly-tasty-output/build.sbt new file mode 100644 index 000000000000..739f6cc8c31e --- /dev/null +++ b/sbt-test/pipelining/Xearly-tasty-output/build.sbt @@ -0,0 +1,24 @@ +// NOTE: in this test, we are explictly fixing the classpath of project `c` to be `a-early.jar:b-early-out` +// to manually test pipelining without sbt/zinc managing the classpath. + +// early out is a jar +lazy val a = project.in(file("a")) + .settings( + scalacOptions ++= Seq("-Xearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-early.jar").toString), + scalacOptions += "-Ycheck:all", + ) + +// early out is a directory +lazy val b = project.in(file("b")) + .settings( + scalacOptions ++= Seq("-Xearly-tasty-output", ((ThisBuild / baseDirectory).value / "b-early-out").toString), + scalacOptions += "-Ycheck:all", + ) + +// reads classpaths from early tasty outputs. No need for extra flags as the full tasty is available. +lazy val c = project.in(file("c")) + .settings( + Compile / unmanagedClasspath += Attributed.blank((ThisBuild / baseDirectory).value / "a-early.jar"), + Compile / unmanagedClasspath += Attributed.blank((ThisBuild / baseDirectory).value / "b-early-out"), + scalacOptions += "-Ycheck:all", + ) diff --git a/sbt-test/pipelining/Xearly-tasty-output/c/src/main/scala/c/C.scala b/sbt-test/pipelining/Xearly-tasty-output/c/src/main/scala/c/C.scala new file mode 100644 index 000000000000..fd1876088778 --- /dev/null +++ b/sbt-test/pipelining/Xearly-tasty-output/c/src/main/scala/c/C.scala @@ -0,0 +1,9 @@ +package c + +import a.A +import b.B + +object C { + val f: 2 = A.foo(1) + val g: 3 = B.bar(2) +} diff --git a/sbt-test/pipelining/Yjava-tasty-enum/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/Xearly-tasty-output/project/DottyInjectedPlugin.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-enum/project/DottyInjectedPlugin.scala rename to sbt-test/pipelining/Xearly-tasty-output/project/DottyInjectedPlugin.scala diff --git a/sbt-test/pipelining/Xearly-tasty-output/test b/sbt-test/pipelining/Xearly-tasty-output/test new file mode 100644 index 000000000000..52d60facc75b --- /dev/null +++ b/sbt-test/pipelining/Xearly-tasty-output/test @@ -0,0 +1,5 @@ +> a/compile +# same as a but with a directory output +> b/compile +# c uses the early output jar of a and b +> c/compile diff --git a/sbt-test/pipelining/Yjava-tasty-annotation/a/src/main/scala/a/A.java b/sbt-test/pipelining/Xjava-tasty-annotation/a/src/main/scala/a/A.java similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-annotation/a/src/main/scala/a/A.java rename to sbt-test/pipelining/Xjava-tasty-annotation/a/src/main/scala/a/A.java diff --git a/sbt-test/pipelining/Yjava-tasty-annotation/a/src/main/scala/a/package.scala b/sbt-test/pipelining/Xjava-tasty-annotation/a/src/main/scala/a/package.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-annotation/a/src/main/scala/a/package.scala rename to sbt-test/pipelining/Xjava-tasty-annotation/a/src/main/scala/a/package.scala diff --git a/sbt-test/pipelining/Yjava-tasty-enum/b-alt/.keep b/sbt-test/pipelining/Xjava-tasty-annotation/b-alt/.keep similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-enum/b-alt/.keep rename to sbt-test/pipelining/Xjava-tasty-annotation/b-alt/.keep diff --git a/sbt-test/pipelining/Yjava-tasty-annotation/b/src/main/scala/b/B.scala b/sbt-test/pipelining/Xjava-tasty-annotation/b/src/main/scala/b/B.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-annotation/b/src/main/scala/b/B.scala rename to sbt-test/pipelining/Xjava-tasty-annotation/b/src/main/scala/b/B.scala diff --git a/sbt-test/pipelining/Yjava-tasty-annotation/build.sbt b/sbt-test/pipelining/Xjava-tasty-annotation/build.sbt similarity index 80% rename from sbt-test/pipelining/Yjava-tasty-annotation/build.sbt rename to sbt-test/pipelining/Xjava-tasty-annotation/build.sbt index 18f6b8224968..440ef8eced59 100644 --- a/sbt-test/pipelining/Yjava-tasty-annotation/build.sbt +++ b/sbt-test/pipelining/Xjava-tasty-annotation/build.sbt @@ -1,7 +1,7 @@ lazy val a = project.in(file("a")) .settings( - scalacOptions += "-Yjava-tasty", // enable pickling of java signatures - scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a-annotation-java-tasty.jar").toString), + scalacOptions += "-Xjava-tasty", // enable pickling of java signatures + scalacOptions ++= Seq("-Xearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-annotation-java-tasty.jar").toString), scalacOptions += "-Ycheck:all", Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-annotation-classes"), // send classfiles to a different directory ) diff --git a/sbt-test/pipelining/Yjava-tasty-from-tasty/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/Xjava-tasty-annotation/project/DottyInjectedPlugin.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-from-tasty/project/DottyInjectedPlugin.scala rename to sbt-test/pipelining/Xjava-tasty-annotation/project/DottyInjectedPlugin.scala diff --git a/sbt-test/pipelining/Yjava-tasty-annotation/test b/sbt-test/pipelining/Xjava-tasty-annotation/test similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-annotation/test rename to sbt-test/pipelining/Xjava-tasty-annotation/test diff --git a/sbt-test/pipelining/Yjava-tasty-enum/a/src/main/scala/a/A.java b/sbt-test/pipelining/Xjava-tasty-enum/a/src/main/scala/a/A.java similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-enum/a/src/main/scala/a/A.java rename to sbt-test/pipelining/Xjava-tasty-enum/a/src/main/scala/a/A.java diff --git a/sbt-test/pipelining/Yjava-tasty-enum/a/src/main/scala/a/package.scala b/sbt-test/pipelining/Xjava-tasty-enum/a/src/main/scala/a/package.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-enum/a/src/main/scala/a/package.scala rename to sbt-test/pipelining/Xjava-tasty-enum/a/src/main/scala/a/package.scala diff --git a/sbt-test/pipelining/Yjava-tasty-from-tasty/a_from_tasty/.keep b/sbt-test/pipelining/Xjava-tasty-enum/b-alt/.keep similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-from-tasty/a_from_tasty/.keep rename to sbt-test/pipelining/Xjava-tasty-enum/b-alt/.keep diff --git a/sbt-test/pipelining/Yjava-tasty-enum/b/src/main/scala/b/B.scala b/sbt-test/pipelining/Xjava-tasty-enum/b/src/main/scala/b/B.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-enum/b/src/main/scala/b/B.scala rename to sbt-test/pipelining/Xjava-tasty-enum/b/src/main/scala/b/B.scala diff --git a/sbt-test/pipelining/Yjava-tasty-enum/build.sbt b/sbt-test/pipelining/Xjava-tasty-enum/build.sbt similarity index 88% rename from sbt-test/pipelining/Yjava-tasty-enum/build.sbt rename to sbt-test/pipelining/Xjava-tasty-enum/build.sbt index aca2391987e9..5adbe6ec992e 100644 --- a/sbt-test/pipelining/Yjava-tasty-enum/build.sbt +++ b/sbt-test/pipelining/Xjava-tasty-enum/build.sbt @@ -1,8 +1,8 @@ lazy val a = project.in(file("a")) .settings( compileOrder := CompileOrder.Mixed, // ensure we send java sources to Scala compiler - scalacOptions += "-Yjava-tasty", // enable pickling of java signatures - scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a-enum-java-tasty.jar").toString), + scalacOptions += "-Xjava-tasty", // enable pickling of java signatures + scalacOptions ++= Seq("-Xearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-enum-java-tasty.jar").toString), scalacOptions += "-Ycheck:all", Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-enum-classes"), // send classfiles to a different directory ) diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/Xjava-tasty-enum/project/DottyInjectedPlugin.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-fromjavaobject/project/DottyInjectedPlugin.scala rename to sbt-test/pipelining/Xjava-tasty-enum/project/DottyInjectedPlugin.scala diff --git a/sbt-test/pipelining/Yjava-tasty-enum/test b/sbt-test/pipelining/Xjava-tasty-enum/test similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-enum/test rename to sbt-test/pipelining/Xjava-tasty-enum/test diff --git a/sbt-test/pipelining/Yjava-tasty-from-tasty/a/src/main/scala/a/A.java b/sbt-test/pipelining/Xjava-tasty-from-tasty/a/src/main/scala/a/A.java similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-from-tasty/a/src/main/scala/a/A.java rename to sbt-test/pipelining/Xjava-tasty-from-tasty/a/src/main/scala/a/A.java diff --git a/sbt-test/pipelining/Yjava-tasty-from-tasty/a/src/main/scala/a/package.scala b/sbt-test/pipelining/Xjava-tasty-from-tasty/a/src/main/scala/a/package.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-from-tasty/a/src/main/scala/a/package.scala rename to sbt-test/pipelining/Xjava-tasty-from-tasty/a/src/main/scala/a/package.scala diff --git a/sbt-test/pipelining/Yjava-tasty-from-tasty/b-alt/.keep b/sbt-test/pipelining/Xjava-tasty-from-tasty/a_from_tasty/.keep similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-from-tasty/b-alt/.keep rename to sbt-test/pipelining/Xjava-tasty-from-tasty/a_from_tasty/.keep diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/a-check/.keep b/sbt-test/pipelining/Xjava-tasty-from-tasty/b-alt/.keep similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-fromjavaobject/a-check/.keep rename to sbt-test/pipelining/Xjava-tasty-from-tasty/b-alt/.keep diff --git a/sbt-test/pipelining/Yjava-tasty-from-tasty/b/src/main/scala/b/B.scala b/sbt-test/pipelining/Xjava-tasty-from-tasty/b/src/main/scala/b/B.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-from-tasty/b/src/main/scala/b/B.scala rename to sbt-test/pipelining/Xjava-tasty-from-tasty/b/src/main/scala/b/B.scala diff --git a/sbt-test/pipelining/Yjava-tasty-from-tasty/build.sbt b/sbt-test/pipelining/Xjava-tasty-from-tasty/build.sbt similarity index 82% rename from sbt-test/pipelining/Yjava-tasty-from-tasty/build.sbt rename to sbt-test/pipelining/Xjava-tasty-from-tasty/build.sbt index e4b15d3d9c7e..3876ce28693d 100644 --- a/sbt-test/pipelining/Yjava-tasty-from-tasty/build.sbt +++ b/sbt-test/pipelining/Xjava-tasty-from-tasty/build.sbt @@ -2,22 +2,22 @@ lazy val a = project.in(file("a")) .settings( compileOrder := CompileOrder.Mixed, // ensure we send java sources to Scala compiler - scalacOptions += "-Yjava-tasty", // enable pickling of java signatures - scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a-pre-java-tasty.jar").toString), + scalacOptions += "-Xjava-tasty", // enable pickling of java signatures + scalacOptions ++= Seq("-Xearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-pre-java-tasty.jar").toString), scalacOptions += "-Ycheck:all", Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-pre-classes"), // send classfiles to a different directory ) // recompile `a` with `-from-tasty` flag to test idempotent read/write java signatures. -// Requires -Yjava-tasty to be set in order to read them. +// Requires -Xjava-tasty to be set in order to read them. lazy val a_from_tasty = project.in(file("a_from_tasty")) .settings( Compile / sources := Seq((ThisBuild / baseDirectory).value / "a-pre-java-tasty.jar"), Compile / unmanagedClasspath := Seq(Attributed.blank((ThisBuild / baseDirectory).value / "a-pre-java-tasty.jar")), scalacOptions += "-from-tasty", // read the jar file tasties as the source files - scalacOptions += "-Yjava-tasty", - scalacOptions += "-Yallow-outline-from-tasty", // allow outline signatures to be read with -from-tasty - scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a_from_tasty-java-tasty.jar").toString), + scalacOptions += "-Xjava-tasty", + scalacOptions += "-Xallow-outline-from-tasty", // allow outline signatures to be read with -from-tasty + scalacOptions ++= Seq("-Xearly-tasty-output", ((ThisBuild / baseDirectory).value / "a_from_tasty-java-tasty.jar").toString), scalacOptions += "-Ycheck:all", Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a_from_tasty-classes"), // send classfiles to a different directory ) diff --git a/sbt-test/pipelining/Yjava-tasty-generic/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/Xjava-tasty-from-tasty/project/DottyInjectedPlugin.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-generic/project/DottyInjectedPlugin.scala rename to sbt-test/pipelining/Xjava-tasty-from-tasty/project/DottyInjectedPlugin.scala diff --git a/sbt-test/pipelining/Yjava-tasty-from-tasty/test b/sbt-test/pipelining/Xjava-tasty-from-tasty/test similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-from-tasty/test rename to sbt-test/pipelining/Xjava-tasty-from-tasty/test diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/b-alt/.keep b/sbt-test/pipelining/Xjava-tasty-fromjavaobject/a-check/.keep similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-fromjavaobject/b-alt/.keep rename to sbt-test/pipelining/Xjava-tasty-fromjavaobject/a-check/.keep diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/a/src/main/scala/a/A.java b/sbt-test/pipelining/Xjava-tasty-fromjavaobject/a/src/main/scala/a/A.java similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-fromjavaobject/a/src/main/scala/a/A.java rename to sbt-test/pipelining/Xjava-tasty-fromjavaobject/a/src/main/scala/a/A.java diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/a/src/main/scala/a/AImport.java b/sbt-test/pipelining/Xjava-tasty-fromjavaobject/a/src/main/scala/a/AImport.java similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-fromjavaobject/a/src/main/scala/a/AImport.java rename to sbt-test/pipelining/Xjava-tasty-fromjavaobject/a/src/main/scala/a/AImport.java diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/a/src/main/scala/a/package.scala b/sbt-test/pipelining/Xjava-tasty-fromjavaobject/a/src/main/scala/a/package.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-fromjavaobject/a/src/main/scala/a/package.scala rename to sbt-test/pipelining/Xjava-tasty-fromjavaobject/a/src/main/scala/a/package.scala diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/c-alt/.keep b/sbt-test/pipelining/Xjava-tasty-fromjavaobject/b-alt/.keep similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-fromjavaobject/c-alt/.keep rename to sbt-test/pipelining/Xjava-tasty-fromjavaobject/b-alt/.keep diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/b/src/main/scala/b/B.scala b/sbt-test/pipelining/Xjava-tasty-fromjavaobject/b/src/main/scala/b/B.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-fromjavaobject/b/src/main/scala/b/B.scala rename to sbt-test/pipelining/Xjava-tasty-fromjavaobject/b/src/main/scala/b/B.scala diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/b/src/main/scala/b/BImport.scala b/sbt-test/pipelining/Xjava-tasty-fromjavaobject/b/src/main/scala/b/BImport.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-fromjavaobject/b/src/main/scala/b/BImport.scala rename to sbt-test/pipelining/Xjava-tasty-fromjavaobject/b/src/main/scala/b/BImport.scala diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/build.sbt b/sbt-test/pipelining/Xjava-tasty-fromjavaobject/build.sbt similarity index 87% rename from sbt-test/pipelining/Yjava-tasty-fromjavaobject/build.sbt rename to sbt-test/pipelining/Xjava-tasty-fromjavaobject/build.sbt index 6738db3016fa..c51a266c2ee9 100644 --- a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/build.sbt +++ b/sbt-test/pipelining/Xjava-tasty-fromjavaobject/build.sbt @@ -1,8 +1,8 @@ lazy val a = project.in(file("a")) .settings( compileOrder := CompileOrder.Mixed, // ensure we send java sources to Scala compiler - scalacOptions += "-Yjava-tasty", // enable pickling of java signatures - scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a-enum-java-tasty.jar").toString), + scalacOptions += "-Xjava-tasty", // enable pickling of java signatures + scalacOptions ++= Seq("-Xearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-enum-java-tasty.jar").toString), scalacOptions += "-Ycheck:all", Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-enum-classes"), // send classfiles to a different directory ) @@ -13,8 +13,8 @@ lazy val aCheck = project.in(file("a-check")) scalacOptions += "-Ytest-pickler", // check that the pickler is correct Compile / sources := (a / Compile / sources).value, // use the same sources as a compileOrder := CompileOrder.Mixed, // ensure we send java sources to Scala compiler - scalacOptions += "-Yjava-tasty", // enable pickling of java signatures - scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a-enum-java-tasty-2.jar").toString), + scalacOptions += "-Xjava-tasty", // enable pickling of java signatures + scalacOptions ++= Seq("-Xearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-enum-java-tasty-2.jar").toString), Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-enum-classes-2"), // send classfiles to a different directory ) diff --git a/sbt-test/pipelining/Yjava-tasty-generic/b-alt/.keep b/sbt-test/pipelining/Xjava-tasty-fromjavaobject/c-alt/.keep similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-generic/b-alt/.keep rename to sbt-test/pipelining/Xjava-tasty-fromjavaobject/c-alt/.keep diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/c/src/main/scala/c/C.scala b/sbt-test/pipelining/Xjava-tasty-fromjavaobject/c/src/main/scala/c/C.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-fromjavaobject/c/src/main/scala/c/C.scala rename to sbt-test/pipelining/Xjava-tasty-fromjavaobject/c/src/main/scala/c/C.scala diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/c/src/main/scala/c/CImport.scala b/sbt-test/pipelining/Xjava-tasty-fromjavaobject/c/src/main/scala/c/CImport.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-fromjavaobject/c/src/main/scala/c/CImport.scala rename to sbt-test/pipelining/Xjava-tasty-fromjavaobject/c/src/main/scala/c/CImport.scala diff --git a/sbt-test/pipelining/Yjava-tasty-paths/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/Xjava-tasty-fromjavaobject/project/DottyInjectedPlugin.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-paths/project/DottyInjectedPlugin.scala rename to sbt-test/pipelining/Xjava-tasty-fromjavaobject/project/DottyInjectedPlugin.scala diff --git a/sbt-test/pipelining/Yjava-tasty-fromjavaobject/test b/sbt-test/pipelining/Xjava-tasty-fromjavaobject/test similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-fromjavaobject/test rename to sbt-test/pipelining/Xjava-tasty-fromjavaobject/test diff --git a/sbt-test/pipelining/Yjava-tasty-generic/a/src/main/scala/a/A.java b/sbt-test/pipelining/Xjava-tasty-generic/a/src/main/scala/a/A.java similarity index 74% rename from sbt-test/pipelining/Yjava-tasty-generic/a/src/main/scala/a/A.java rename to sbt-test/pipelining/Xjava-tasty-generic/a/src/main/scala/a/A.java index 1fcb7e78ae3d..c6e7431f0bbe 100644 --- a/sbt-test/pipelining/Yjava-tasty-generic/a/src/main/scala/a/A.java +++ b/sbt-test/pipelining/Xjava-tasty-generic/a/src/main/scala/a/A.java @@ -1,6 +1,8 @@ // this test ensures that it is possible to read a generic java class from TASTy. package a; +import java.lang.Object; + public abstract class A { private final int _value; @@ -11,4 +13,8 @@ protected A(final int value) { public int value() { return _value; } + + public int hash(Object any) { + return any.hashCode(); + } } diff --git a/sbt-test/pipelining/Yjava-tasty-generic/a/src/main/scala/a/package.scala b/sbt-test/pipelining/Xjava-tasty-generic/a/src/main/scala/a/package.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-generic/a/src/main/scala/a/package.scala rename to sbt-test/pipelining/Xjava-tasty-generic/a/src/main/scala/a/package.scala diff --git a/sbt-test/pipelining/Yjava-tasty-result-types/b-alt/.keep b/sbt-test/pipelining/Xjava-tasty-generic/b-alt/.keep similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-result-types/b-alt/.keep rename to sbt-test/pipelining/Xjava-tasty-generic/b-alt/.keep diff --git a/sbt-test/pipelining/Xjava-tasty-generic/b/src/main/scala/b/B.scala b/sbt-test/pipelining/Xjava-tasty-generic/b/src/main/scala/b/B.scala new file mode 100644 index 000000000000..62e58aa72f94 --- /dev/null +++ b/sbt-test/pipelining/Xjava-tasty-generic/b/src/main/scala/b/B.scala @@ -0,0 +1,21 @@ +package b + +import a.A + +class B[T] { + val inner = new A[T](23) {} +} + +object B { + + val someAny: Any = 23 + + val inner = (new B[Int]).inner + + @main def test = { + val derived: Int = inner.value + assert(derived == 23, s"actually was $derived") + assert(inner.hash(someAny) == someAny.hashCode, s"actually was ${inner.hash(someAny)}") + } +} + diff --git a/sbt-test/pipelining/Yjava-tasty-generic/build.sbt b/sbt-test/pipelining/Xjava-tasty-generic/build.sbt similarity index 87% rename from sbt-test/pipelining/Yjava-tasty-generic/build.sbt rename to sbt-test/pipelining/Xjava-tasty-generic/build.sbt index 07e2ea56fbaa..c043d597c6cc 100644 --- a/sbt-test/pipelining/Yjava-tasty-generic/build.sbt +++ b/sbt-test/pipelining/Xjava-tasty-generic/build.sbt @@ -1,7 +1,7 @@ lazy val a = project.in(file("a")) .settings( - scalacOptions += "-Yjava-tasty", // enable pickling of java signatures - scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a-generic-java-tasty.jar").toString), + scalacOptions += "-Xjava-tasty", // enable pickling of java signatures + scalacOptions ++= Seq("-Xearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-generic-java-tasty.jar").toString), scalacOptions += "-Ycheck:all", Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-generic-classes"), // send classfiles to a different directory ) diff --git a/sbt-test/pipelining/Yjava-tasty-result-types/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/Xjava-tasty-generic/project/DottyInjectedPlugin.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-result-types/project/DottyInjectedPlugin.scala rename to sbt-test/pipelining/Xjava-tasty-generic/project/DottyInjectedPlugin.scala diff --git a/sbt-test/pipelining/Yjava-tasty-generic/test b/sbt-test/pipelining/Xjava-tasty-generic/test similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-generic/test rename to sbt-test/pipelining/Xjava-tasty-generic/test diff --git a/sbt-test/pipelining/Yjava-tasty-paths/a/InnerClass.java b/sbt-test/pipelining/Xjava-tasty-paths/a/InnerClass.java similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-paths/a/InnerClass.java rename to sbt-test/pipelining/Xjava-tasty-paths/a/InnerClass.java diff --git a/sbt-test/pipelining/Yjava-tasty-paths/a/InnerClassGen.java b/sbt-test/pipelining/Xjava-tasty-paths/a/InnerClassGen.java similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-paths/a/InnerClassGen.java rename to sbt-test/pipelining/Xjava-tasty-paths/a/InnerClassGen.java diff --git a/sbt-test/pipelining/Yjava-tasty-paths/a/InnerClassSub.java b/sbt-test/pipelining/Xjava-tasty-paths/a/InnerClassSub.java similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-paths/a/InnerClassSub.java rename to sbt-test/pipelining/Xjava-tasty-paths/a/InnerClassSub.java diff --git a/sbt-test/pipelining/Yjava-tasty-paths/a/RawTypes.java b/sbt-test/pipelining/Xjava-tasty-paths/a/RawTypes.java similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-paths/a/RawTypes.java rename to sbt-test/pipelining/Xjava-tasty-paths/a/RawTypes.java diff --git a/sbt-test/pipelining/Yjava-tasty-paths/a/package.scala b/sbt-test/pipelining/Xjava-tasty-paths/a/package.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-paths/a/package.scala rename to sbt-test/pipelining/Xjava-tasty-paths/a/package.scala diff --git a/sbt-test/pipelining/Yjava-tasty-paths/b/Test.scala b/sbt-test/pipelining/Xjava-tasty-paths/b/Test.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-paths/b/Test.scala rename to sbt-test/pipelining/Xjava-tasty-paths/b/Test.scala diff --git a/sbt-test/pipelining/Yjava-tasty-paths/build.sbt b/sbt-test/pipelining/Xjava-tasty-paths/build.sbt similarity index 88% rename from sbt-test/pipelining/Yjava-tasty-paths/build.sbt rename to sbt-test/pipelining/Xjava-tasty-paths/build.sbt index d63d1f9a3f7e..24a6f582647f 100644 --- a/sbt-test/pipelining/Yjava-tasty-paths/build.sbt +++ b/sbt-test/pipelining/Xjava-tasty-paths/build.sbt @@ -1,7 +1,7 @@ lazy val a = project.in(file("a")) .settings( - scalacOptions += "-Yjava-tasty", // enable pickling of java signatures - scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a-paths-java-tasty.jar").toString), + scalacOptions += "-Xjava-tasty", // enable pickling of java signatures + scalacOptions ++= Seq("-Xearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-paths-java-tasty.jar").toString), scalacOptions += "-Ycheck:all", Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-paths-classes"), // send classfiles to a different directory ) diff --git a/sbt-test/pipelining/Xjava-tasty-paths/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/Xjava-tasty-paths/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/Xjava-tasty-paths/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/Yjava-tasty-paths/test b/sbt-test/pipelining/Xjava-tasty-paths/test similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-paths/test rename to sbt-test/pipelining/Xjava-tasty-paths/test diff --git a/sbt-test/pipelining/Yjava-tasty-result-types/a/src/main/scala/a/A.java b/sbt-test/pipelining/Xjava-tasty-result-types/a/src/main/scala/a/A.java similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-result-types/a/src/main/scala/a/A.java rename to sbt-test/pipelining/Xjava-tasty-result-types/a/src/main/scala/a/A.java diff --git a/sbt-test/pipelining/Yjava-tasty-result-types/a/src/main/scala/a/package.scala b/sbt-test/pipelining/Xjava-tasty-result-types/a/src/main/scala/a/package.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-result-types/a/src/main/scala/a/package.scala rename to sbt-test/pipelining/Xjava-tasty-result-types/a/src/main/scala/a/package.scala diff --git a/sbt-test/pipelining/Xjava-tasty-result-types/b-alt/.keep b/sbt-test/pipelining/Xjava-tasty-result-types/b-alt/.keep new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sbt-test/pipelining/Yjava-tasty-result-types/b/src/main/scala/b/B.scala b/sbt-test/pipelining/Xjava-tasty-result-types/b/src/main/scala/b/B.scala similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-result-types/b/src/main/scala/b/B.scala rename to sbt-test/pipelining/Xjava-tasty-result-types/b/src/main/scala/b/B.scala diff --git a/sbt-test/pipelining/Yjava-tasty-result-types/build.sbt b/sbt-test/pipelining/Xjava-tasty-result-types/build.sbt similarity index 87% rename from sbt-test/pipelining/Yjava-tasty-result-types/build.sbt rename to sbt-test/pipelining/Xjava-tasty-result-types/build.sbt index 512344f0635b..f540de2d6599 100644 --- a/sbt-test/pipelining/Yjava-tasty-result-types/build.sbt +++ b/sbt-test/pipelining/Xjava-tasty-result-types/build.sbt @@ -1,7 +1,7 @@ lazy val a = project.in(file("a")) .settings( - scalacOptions += "-Yjava-tasty", // enable pickling of java signatures - scalacOptions ++= Seq("-Yjava-tasty-output", ((ThisBuild / baseDirectory).value / "a-result-types-java-tasty.jar").toString), + scalacOptions += "-Xjava-tasty", // enable pickling of java signatures + scalacOptions ++= Seq("-Xearly-tasty-output", ((ThisBuild / baseDirectory).value / "a-result-types-java-tasty.jar").toString), scalacOptions += "-Ycheck:all", Compile / classDirectory := ((ThisBuild / baseDirectory).value / "a-result-types-classes"), // send classfiles to a different directory ) diff --git a/sbt-test/pipelining/Xjava-tasty-result-types/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/Xjava-tasty-result-types/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/Xjava-tasty-result-types/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/Yjava-tasty-result-types/test b/sbt-test/pipelining/Xjava-tasty-result-types/test similarity index 100% rename from sbt-test/pipelining/Yjava-tasty-result-types/test rename to sbt-test/pipelining/Xjava-tasty-result-types/test diff --git a/sbt-test/pipelining/Yjava-tasty-generic/b/src/main/scala/b/B.scala b/sbt-test/pipelining/Yjava-tasty-generic/b/src/main/scala/b/B.scala deleted file mode 100644 index f132e012a5fc..000000000000 --- a/sbt-test/pipelining/Yjava-tasty-generic/b/src/main/scala/b/B.scala +++ /dev/null @@ -1,15 +0,0 @@ -package b - -import a.A - -class B[T] { - val inner = new A[T](23) {} -} - -object B { - @main def test = { - val derived: Int = (new B[Int]).inner.value - assert(derived == 23, s"actually was $derived") - } -} - diff --git a/sbt-test/pipelining/pipelining-cancel/a/src/main/scala/a/A.scala b/sbt-test/pipelining/pipelining-cancel/a/src/main/scala/a/A.scala new file mode 100644 index 000000000000..35b27f3d4662 --- /dev/null +++ b/sbt-test/pipelining/pipelining-cancel/a/src/main/scala/a/A.scala @@ -0,0 +1,7 @@ +package a + +import scala.util.Success + +object A { + val foo: (1,2,3) = (1,2,3) +} diff --git a/sbt-test/pipelining/pipelining-cancel/a/src/test/scala/a/Hello.scala b/sbt-test/pipelining/pipelining-cancel/a/src/test/scala/a/Hello.scala new file mode 100644 index 000000000000..629f1c0e6cfe --- /dev/null +++ b/sbt-test/pipelining/pipelining-cancel/a/src/test/scala/a/Hello.scala @@ -0,0 +1,10 @@ +package a + +import org.junit.Test + +class Hello { + + @Test def test(): Unit = { + assert(A.foo == (1,2,3)) + } +} diff --git a/sbt-test/pipelining/pipelining-cancel/b/src/main/scala/b/Hello.scala b/sbt-test/pipelining/pipelining-cancel/b/src/main/scala/b/Hello.scala new file mode 100644 index 000000000000..bbb4eb5ba7f7 --- /dev/null +++ b/sbt-test/pipelining/pipelining-cancel/b/src/main/scala/b/Hello.scala @@ -0,0 +1,9 @@ +package b + +import a.A + +object Hello { + @main def test(): Unit = { + assert(A.foo == (1,2,3)) + } +} diff --git a/sbt-test/pipelining/pipelining-cancel/build.sbt b/sbt-test/pipelining/pipelining-cancel/build.sbt new file mode 100644 index 000000000000..f23e65895c78 --- /dev/null +++ b/sbt-test/pipelining/pipelining-cancel/build.sbt @@ -0,0 +1,12 @@ +ThisBuild / usePipelining := true + +lazy val a = project.in(file("a")) + .settings( + scalacOptions += "-Ystop-after:pickler", // before ExtractAPI is reached, will cancel the pipeline output + ) + +lazy val b = project.in(file("b")) + .dependsOn(a) + .settings( + scalacOptions += "-Ycheck:all", + ) diff --git a/sbt-test/pipelining/pipelining-cancel/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/pipelining-cancel/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/pipelining-cancel/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/pipelining-cancel/test b/sbt-test/pipelining/pipelining-cancel/test new file mode 100644 index 000000000000..d84f55ca3c31 --- /dev/null +++ b/sbt-test/pipelining/pipelining-cancel/test @@ -0,0 +1,6 @@ +# - Test depending on a project where upstream runs short of reaching backend, +# and cancels pipelined tasty writing. +# - Because `a` finishes compile run before the sending the signal to Zinc +# that pipeline jar is written, sbt will continue to the downstream project anyway. +# - Downstream project `b` will fail as it can't find a.A from upstream. +-> b/compile diff --git a/sbt-test/pipelining/pipelining-changes/build.sbt b/sbt-test/pipelining/pipelining-changes/build.sbt new file mode 100644 index 000000000000..630bd4be5b3e --- /dev/null +++ b/sbt-test/pipelining/pipelining-changes/build.sbt @@ -0,0 +1,27 @@ +import sbt.internal.inc.Analysis +import complete.DefaultParsers._ + +ThisBuild / usePipelining := true + +// Reset compiler iterations, necessary because tests run in batch mode +val recordPreviousIterations = taskKey[Unit]("Record previous iterations.") +recordPreviousIterations := { + val log = streams.value.log + CompileState.previousIterations = { + val previousAnalysis = (previousCompile in Compile).value.analysis.asScala + previousAnalysis match { + case None => + log.info("No previous analysis detected") + 0 + case Some(a: Analysis) => a.compilations.allCompilations.size + } + } +} + +val checkIterations = inputKey[Unit]("Verifies the accumulated number of iterations of incremental compilation.") + +checkIterations := { + val expected: Int = (Space ~> NatBasic).parsed + val actual: Int = ((compile in Compile).value match { case a: Analysis => a.compilations.allCompilations.size }) - CompileState.previousIterations + assert(expected == actual, s"Expected $expected compilations, got $actual (previous: ${CompileState.previousIterations})") +} diff --git a/sbt-test/pipelining/pipelining-changes/changes/A1.scala b/sbt-test/pipelining/pipelining-changes/changes/A1.scala new file mode 100644 index 000000000000..db5605e419d1 --- /dev/null +++ b/sbt-test/pipelining/pipelining-changes/changes/A1.scala @@ -0,0 +1,5 @@ +package a + +enum A { + case A, B +} diff --git a/sbt-test/pipelining/pipelining-changes/project/CompileState.scala b/sbt-test/pipelining/pipelining-changes/project/CompileState.scala new file mode 100644 index 000000000000..078db9c7bf56 --- /dev/null +++ b/sbt-test/pipelining/pipelining-changes/project/CompileState.scala @@ -0,0 +1,4 @@ +// This is necessary because tests are run in batch mode +object CompileState { + @volatile var previousIterations: Int = -1 +} diff --git a/sbt-test/pipelining/pipelining-changes/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/pipelining-changes/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..1c6c00400f04 --- /dev/null +++ b/sbt-test/pipelining/pipelining-changes/project/DottyInjectedPlugin.scala @@ -0,0 +1,11 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + ) +} diff --git a/sbt-test/pipelining/pipelining-changes/src/main/scala/a/A.scala b/sbt-test/pipelining/pipelining-changes/src/main/scala/a/A.scala new file mode 100644 index 000000000000..4a0eec46ec7e --- /dev/null +++ b/sbt-test/pipelining/pipelining-changes/src/main/scala/a/A.scala @@ -0,0 +1,5 @@ +package a + +enum A { + case A +} diff --git a/sbt-test/pipelining/pipelining-changes/src/main/scala/a/App.scala b/sbt-test/pipelining/pipelining-changes/src/main/scala/a/App.scala new file mode 100644 index 000000000000..a9862cea9dc4 --- /dev/null +++ b/sbt-test/pipelining/pipelining-changes/src/main/scala/a/App.scala @@ -0,0 +1,11 @@ +package a + +import scala.deriving.Mirror + +object App { + val m = summon[Mirror.SumOf[a.A]] + def size = compiletime.constValue[Tuple.Size[m.MirroredElemTypes]] + + @main def test = + assert(size == 2, s"Expected size 2, got $size") +} diff --git a/sbt-test/pipelining/pipelining-changes/test b/sbt-test/pipelining/pipelining-changes/test new file mode 100644 index 000000000000..e6fb01d57f5a --- /dev/null +++ b/sbt-test/pipelining/pipelining-changes/test @@ -0,0 +1,7 @@ +# test the interaction of incremental compilation and pipelining +> compile +> recordPreviousIterations +$ copy-file changes/A1.scala src/main/scala/a/A.scala +# A recompilation should trigger recompilation of App.scala, otherwise test assert will fail +> run +> checkIterations 2 diff --git a/sbt-test/pipelining/pipelining-scala-inline/a/src/main/scala/a/A.scala b/sbt-test/pipelining/pipelining-scala-inline/a/src/main/scala/a/A.scala new file mode 100644 index 000000000000..c2dfb3e2c886 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-inline/a/src/main/scala/a/A.scala @@ -0,0 +1,8 @@ +package a + +object A { + inline def power(x: Double, inline n: Int): Double = + inline if (n == 0) 1.0 + else inline if (n % 2 == 1) x * power(x, n - 1) + else power(x * x, n / 2) +} diff --git a/sbt-test/pipelining/pipelining-scala-inline/b/src/main/scala/b/B.scala b/sbt-test/pipelining/pipelining-scala-inline/b/src/main/scala/b/B.scala new file mode 100644 index 000000000000..7055d6d2d006 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-inline/b/src/main/scala/b/B.scala @@ -0,0 +1,10 @@ +package b + +import a.A + +object B { + @main def run = + assert(A.power(2.0, 2) == 4.0) + assert(A.power(2.0, 3) == 8.0) + assert(A.power(2.0, 4) == 16.0) +} diff --git a/sbt-test/pipelining/pipelining-scala-inline/build.sbt b/sbt-test/pipelining/pipelining-scala-inline/build.sbt new file mode 100644 index 000000000000..cd2a0c4eef07 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-inline/build.sbt @@ -0,0 +1,35 @@ +ThisBuild / usePipelining := true + +// defines a purely inline function, and we always force the early output, this should not be needed in practice +// because pure inline methods do not have a Macro flag. +lazy val a = project.in(file("a")) + .settings( + scalacOptions += "-Ycheck:all", + Compile / incOptions := { + val old = (Compile / incOptions).value + val hooks = old.externalHooks + val newHooks = hooks.withExternalLookup( + new sbt.internal.inc.NoopExternalLookup { + // assert that the analysis contains the class `a.A` and that it does not have a macro. + override def shouldDoEarlyOutput(analysis: xsbti.compile.CompileAnalysis): Boolean = { + val internalClasses = analysis.asInstanceOf[sbt.internal.inc.Analysis].apis.internal + val a_A = internalClasses.get("a.A") + assert(a_A.exists(cls => !cls.hasMacro), "`a.A` wasn't found, or it had a macro.") + + // returning true will force the early output ping and activate downstream pipelining, + // this is fine for inline methods, but see `sbt-test/pipelining/pipelining-scala-macro-fail` for how + // we can force a failure by returning true here. + true + } + } + ) + old.withExternalHooks(newHooks) + }, + ) + +// uses the purely inline function +lazy val b = project.in(file("b")) + .dependsOn(a) + .settings( + scalacOptions += "-Ycheck:all", + ) diff --git a/sbt-test/pipelining/pipelining-scala-inline/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/pipelining-scala-inline/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-inline/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/pipelining-scala-inline/test b/sbt-test/pipelining/pipelining-scala-inline/test new file mode 100644 index 000000000000..48a2443830b5 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-inline/test @@ -0,0 +1 @@ +> b/run diff --git a/sbt-test/pipelining/pipelining-scala-java-basic/a/src/main/scala/a/A.scala b/sbt-test/pipelining/pipelining-scala-java-basic/a/src/main/scala/a/A.scala new file mode 100644 index 000000000000..4b10db3eb385 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-java-basic/a/src/main/scala/a/A.scala @@ -0,0 +1,5 @@ +package a + +object A { + val foo: (1,2,3) = (1,2,3) +} diff --git a/sbt-test/pipelining/pipelining-scala-java-basic/b/src/main/scala/b/B.java b/sbt-test/pipelining/pipelining-scala-java-basic/b/src/main/scala/b/B.java new file mode 100644 index 000000000000..7cac88d3cd46 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-java-basic/b/src/main/scala/b/B.java @@ -0,0 +1,5 @@ +package b; + +public class B { + public static final String VALUE = "B"; +} diff --git a/sbt-test/pipelining/pipelining-scala-java-basic/build.sbt b/sbt-test/pipelining/pipelining-scala-java-basic/build.sbt new file mode 100644 index 000000000000..2b49443ae8f0 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-java-basic/build.sbt @@ -0,0 +1,17 @@ +ThisBuild / usePipelining := true + +lazy val a = project.in(file("a")) + .settings( + scalacOptions += "-Ycheck:all", + ) + +lazy val b = project.in(file("b")) + .settings( + scalacOptions += "-Ycheck:all", + ) + +lazy val c = project.in(file("c")) + .dependsOn(a, b) + .settings( + scalacOptions += "-Ycheck:all", + ) diff --git a/sbt-test/pipelining/pipelining-scala-java-basic/c/src/main/scala/c/C.scala b/sbt-test/pipelining/pipelining-scala-java-basic/c/src/main/scala/c/C.scala new file mode 100644 index 000000000000..b8e23e0b5920 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-java-basic/c/src/main/scala/c/C.scala @@ -0,0 +1,15 @@ +package c + +import a.A +import b.B + +object C { + val c_1: 2 = A.foo(1) + val c_2: "B" = B.VALUE + + @main def run = + assert(A.foo(0) == 1) + assert(A.foo(1) == 2) + assert(A.foo(2) == 3) + assert(B.VALUE == "B") +} diff --git a/sbt-test/pipelining/pipelining-scala-java-basic/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/pipelining-scala-java-basic/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-java-basic/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/pipelining-scala-java-basic/test b/sbt-test/pipelining/pipelining-scala-java-basic/test new file mode 100644 index 000000000000..77f2017c835f --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-java-basic/test @@ -0,0 +1 @@ +> c/run diff --git a/sbt-test/pipelining/pipelining-scala-macro-fail/a/src/main/scala/a/A.scala b/sbt-test/pipelining/pipelining-scala-macro-fail/a/src/main/scala/a/A.scala new file mode 100644 index 000000000000..d98a9d2c1159 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-fail/a/src/main/scala/a/A.scala @@ -0,0 +1,18 @@ +package a + +import scala.quoted.* + +object A { + + inline def power(x: Double, inline n: Int): Double = + ${ powerCode('x, 'n) } + + def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = { + def impl(x: Double, n: Int): Double = + if (n == 0) 1.0 + else if (n % 2 == 1) x * impl(x, n - 1) + else impl(x * x, n / 2) + + Expr(impl(x.valueOrError, n.valueOrError)) + } +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-fail/b/src/main/scala/b/B.scala b/sbt-test/pipelining/pipelining-scala-macro-fail/b/src/main/scala/b/B.scala new file mode 100644 index 000000000000..7055d6d2d006 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-fail/b/src/main/scala/b/B.scala @@ -0,0 +1,10 @@ +package b + +import a.A + +object B { + @main def run = + assert(A.power(2.0, 2) == 4.0) + assert(A.power(2.0, 3) == 8.0) + assert(A.power(2.0, 4) == 16.0) +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-fail/build.sbt b/sbt-test/pipelining/pipelining-scala-macro-fail/build.sbt new file mode 100644 index 000000000000..c98e664af507 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-fail/build.sbt @@ -0,0 +1,28 @@ +ThisBuild / usePipelining := true + +// defines a macro, normally this would cause sbt not to write the early output jar, but we force it +// this will cause b to fail to compile due to the missing macro class, +// see `sbt-test/pipelining/pipelining-scala-macro` for how by default sbt does the right thing +lazy val a = project.in(file("a")) + .settings( + scalacOptions += "-Ycheck:all", + Compile / incOptions := { + val old = (Compile / incOptions).value + val hooks = old.externalHooks + val newHooks = hooks.withExternalLookup( + new sbt.internal.inc.NoopExternalLookup { + // force early output, this is safe in projects where the macro implementation is not in the same project, + // however in this build, b will now fail as it will not find the macro implementation class. + override def shouldDoEarlyOutput(analysis: xsbti.compile.CompileAnalysis): Boolean = true + } + ) + old.withExternalHooks(newHooks) + }, + ) + +// uses the macro, this will fail because we forced early output ping, causing the missing macro implementation class +lazy val b = project.in(file("b")) + .dependsOn(a) + .settings( + scalacOptions += "-Ycheck:all", + ) diff --git a/sbt-test/pipelining/pipelining-scala-macro-fail/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/pipelining-scala-macro-fail/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-fail/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-fail/test b/sbt-test/pipelining/pipelining-scala-macro-fail/test new file mode 100644 index 000000000000..13daffd6dfa0 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-fail/test @@ -0,0 +1,2 @@ +> a/compile +-> b/compile diff --git a/sbt-test/pipelining/pipelining-scala-macro-force/a/src/main/scala/a/A.scala b/sbt-test/pipelining/pipelining-scala-macro-force/a/src/main/scala/a/A.scala new file mode 100644 index 000000000000..520aec03482a --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-force/a/src/main/scala/a/A.scala @@ -0,0 +1,13 @@ +package a + +import scala.quoted.* + +object A { + + transparent inline def transparentPower(x: Double, inline n: Int): Double = + ${ macros.MacroImpl.powerCode('x, 'n) } + + inline def power(x: Double, inline n: Int): Double = + ${ macros.MacroImpl.powerCode('x, 'n) } + +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-force/a/src/main/scala/a/AConsume.scala b/sbt-test/pipelining/pipelining-scala-macro-force/a/src/main/scala/a/AConsume.scala new file mode 100644 index 000000000000..1a4b0c234910 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-force/a/src/main/scala/a/AConsume.scala @@ -0,0 +1,5 @@ +package a + +object AConsume { + def sixtyFour: Double = A.power(2.0, 6) // cause a suspension in inlining +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-force/a/src/main/scala/a/AConsumeTransparent.scala b/sbt-test/pipelining/pipelining-scala-macro-force/a/src/main/scala/a/AConsumeTransparent.scala new file mode 100644 index 000000000000..cbd356047c4d --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-force/a/src/main/scala/a/AConsumeTransparent.scala @@ -0,0 +1,5 @@ +package a + +object AConsumeTransparent { + def thirtyTwo: Double = A.transparentPower(2.0, 5) // cause a suspension in typer +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-force/b/src/main/scala/b/B.scala b/sbt-test/pipelining/pipelining-scala-macro-force/b/src/main/scala/b/B.scala new file mode 100644 index 000000000000..7955b1d7cfbb --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-force/b/src/main/scala/b/B.scala @@ -0,0 +1,14 @@ +package b + +import a.A +import a.AConsumeTransparent +import a.AConsume + +object B { + @main def run = + assert(A.power(2.0, 2) == 4.0) + assert(A.power(2.0, 3) == 8.0) + assert(A.power(2.0, 4) == 16.0) + assert(AConsumeTransparent.thirtyTwo == 32.0) // these are not actually suspended in this project + assert(AConsume.sixtyFour == 64.0) // check that suspended definition is still available +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-force/build.sbt b/sbt-test/pipelining/pipelining-scala-macro-force/build.sbt new file mode 100644 index 000000000000..ee06080d0e76 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-force/build.sbt @@ -0,0 +1,45 @@ +ThisBuild / usePipelining := true + +// defines just the macro implementations +lazy val macros = project.in(file("macros")) + .settings( + scalacOptions += "-Ycheck:all", + Compile / exportPipelining := false // downstream waits until classfiles are available + ) + +// defines a macro, we need to force sbt to produce the early output jar +// because it will detect macros in the analysis. +// However the classes for the implementation are provided by `macros` +lazy val a = project.in(file("a")) + .dependsOn(macros) + .settings( + scalacOptions += "-Ycheck:all", + scalacOptions += "-Xprint-suspension", + Compile / incOptions := { + val old = (Compile / incOptions).value + val hooks = old.externalHooks + val newHooks = hooks.withExternalLookup( + new sbt.internal.inc.NoopExternalLookup { + // force early output, this is safe because the macro class from `macros` will be available. + override def shouldDoEarlyOutput(analysis: xsbti.compile.CompileAnalysis): Boolean = { + val internalClasses = analysis.asInstanceOf[sbt.internal.inc.Analysis].apis.internal + val a_A = internalClasses.get("a.A") + val a_AConsume = internalClasses.get("a.AConsume") + val a_AConsumeTransparent = internalClasses.get("a.AConsumeTransparent") + assert(a_A.exists(cls => cls.hasMacro), s"`a.A` wasn't found, or it didn't have a macro.") + assert(a_AConsume.isDefined, s"`a.AConsume` wasn't found.") + assert(a_AConsumeTransparent.isDefined, s"`a.AConsumeTransparent` wasn't found.") + true // because `a.A` has macros, normally this would be false + } + } + ) + old.withExternalHooks(newHooks) + }, + ) + +// uses the macro, will still succeed as the macro implementation class is available +lazy val b = project.in(file("b")) + .dependsOn(a) + .settings( + scalacOptions += "-Ycheck:all", + ) diff --git a/sbt-test/pipelining/pipelining-scala-macro-force/macros/src/main/scala/macros/MacroImpl.scala b/sbt-test/pipelining/pipelining-scala-macro-force/macros/src/main/scala/macros/MacroImpl.scala new file mode 100644 index 000000000000..d7c03aaf0ae0 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-force/macros/src/main/scala/macros/MacroImpl.scala @@ -0,0 +1,15 @@ +package macros + +import scala.quoted.* + +object MacroImpl { + + def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = { + def impl(x: Double, n: Int): Double = + if (n == 0) 1.0 + else if (n % 2 == 1) x * impl(x, n - 1) + else impl(x * x, n / 2) + + Expr(impl(x.valueOrError, n.valueOrError)) + } +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-force/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/pipelining-scala-macro-force/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-force/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-force/test b/sbt-test/pipelining/pipelining-scala-macro-force/test new file mode 100644 index 000000000000..48a2443830b5 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-force/test @@ -0,0 +1 @@ +> b/run diff --git a/sbt-test/pipelining/pipelining-scala-macro-splice-ok/build.sbt b/sbt-test/pipelining/pipelining-scala-macro-splice-ok/build.sbt new file mode 100644 index 000000000000..3162b525fc06 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-splice-ok/build.sbt @@ -0,0 +1,9 @@ +ThisBuild / usePipelining := true + +// m defines a macro depending on b.B, it also tries to use the macro in the same project, +// which will succeed even though B.class is not available when running the macro, +// because compilation can suspend until B is available. +lazy val m = project.in(file("m")) + .settings( + scalacOptions += "-Ycheck:all", + ) diff --git a/sbt-test/pipelining/pipelining-scala-macro-splice-ok/m/src/main/scala/a/A.scala b/sbt-test/pipelining/pipelining-scala-macro-splice-ok/m/src/main/scala/a/A.scala new file mode 100644 index 000000000000..ded148f5f613 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-splice-ok/m/src/main/scala/a/A.scala @@ -0,0 +1,3 @@ +package a + +class A(val i: Int) diff --git a/sbt-test/pipelining/pipelining-scala-macro-splice-ok/m/src/main/scala/b/B.scala b/sbt-test/pipelining/pipelining-scala-macro-splice-ok/m/src/main/scala/b/B.scala new file mode 100644 index 000000000000..6b5337f96212 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-splice-ok/m/src/main/scala/b/B.scala @@ -0,0 +1,26 @@ +package b + +import a.A +import scala.quoted.* + +object B { + + transparent inline def transparentPower(x: Double, inline n: Int): Double = + ${ powerCode('x, 'n) } + + def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = { + // this macro will cause a suspension in compilation of C.scala, because it calls + // transparentPower. This will try to invoke the macro but fail because A.class + // is not yet available until the run for A.scala completes. + + // see sbt-test/pipelining/pipelining-scala-macro-splice/m/src/main/scala/b/B.scala + // for a corresponding implementation that uses a class from an upstream project + // instead, and fails because pipelining is turned on for the upstream project. + def impl(x: Double, n: A): Double = + if (n.i == 0) 1.0 + else if (n.i % 2 == 1) x * impl(x, A(n.i - 1)) + else impl(x * x, A(n.i / 2)) + + Expr(impl(x.valueOrError, A(n.valueOrError))) + } +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-splice-ok/m/src/main/scala/c/C.scala b/sbt-test/pipelining/pipelining-scala-macro-splice-ok/m/src/main/scala/c/C.scala new file mode 100644 index 000000000000..c88acf0c2b28 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-splice-ok/m/src/main/scala/c/C.scala @@ -0,0 +1,11 @@ +package c + +import b.B + +object C { + @main def run = { + assert(B.transparentPower(2.0, 2) == 4.0) + assert(B.transparentPower(2.0, 3) == 8.0) + assert(B.transparentPower(2.0, 4) == 16.0) + } +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-splice-ok/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/pipelining-scala-macro-splice-ok/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..1c6c00400f04 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-splice-ok/project/DottyInjectedPlugin.scala @@ -0,0 +1,11 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + ) +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-splice-ok/test b/sbt-test/pipelining/pipelining-scala-macro-splice-ok/test new file mode 100644 index 000000000000..78e8e230e0ef --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-splice-ok/test @@ -0,0 +1,3 @@ +# shows that it is ok to depend on a class, defined in the same project, +# in a macro implementation. Compilation will suspend at typer. +> m/run diff --git a/sbt-test/pipelining/pipelining-scala-macro-splice/a/src/main/scala/a/A.scala b/sbt-test/pipelining/pipelining-scala-macro-splice/a/src/main/scala/a/A.scala new file mode 100644 index 000000000000..ded148f5f613 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-splice/a/src/main/scala/a/A.scala @@ -0,0 +1,3 @@ +package a + +class A(val i: Int) diff --git a/sbt-test/pipelining/pipelining-scala-macro-splice/a_alt/.keep b/sbt-test/pipelining/pipelining-scala-macro-splice/a_alt/.keep new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sbt-test/pipelining/pipelining-scala-macro-splice/build.sbt b/sbt-test/pipelining/pipelining-scala-macro-splice/build.sbt new file mode 100644 index 000000000000..91186af42ef3 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-splice/build.sbt @@ -0,0 +1,32 @@ +ThisBuild / usePipelining := true + +lazy val a = project.in(file("a")) + .settings( + scalacOptions += "-Ycheck:all", + ) + +// same as a, but does not use pipelining +lazy val a_alt = project.in(file("a_alt")) + .settings( + Compile / sources := (a / Compile / sources).value, + Compile / exportPipelining := false, + ) + + +// m defines a macro depending on a, it also tries to use the macro in the same project, +// which will fail because A.class is not available when running the macro, +// because the dependency on a is pipelined. +lazy val m = project.in(file("m")) + .dependsOn(a) + .settings( + scalacOptions += "-Ycheck:all", + ) + +// same as m, but depends on a_alt, so it will compile +// because A.class will be available when running the macro. +lazy val m_alt = project.in(file("m_alt")) + .dependsOn(a_alt) + .settings( + Compile / sources := (m / Compile / sources).value, + scalacOptions += "-Ycheck:all", + ) diff --git a/sbt-test/pipelining/pipelining-scala-macro-splice/m/src/main/scala/b/B.scala b/sbt-test/pipelining/pipelining-scala-macro-splice/m/src/main/scala/b/B.scala new file mode 100644 index 000000000000..5da498a27355 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-splice/m/src/main/scala/b/B.scala @@ -0,0 +1,26 @@ +package b + +import a.A +import scala.quoted.* + +object B { + + transparent inline def transparentPower(x: Double, inline n: Int): Double = + ${ powerCode('x, 'n) } + + def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = { + // this macro is invoked during compilation of C.scala. When project a is pipelined + // This will fail because A.class will never be available, because the classpath entry + // is the early-output jar. The compiler detects this and aborts macro expansion with an error. + + // see sbt-test/pipelining/pipelining-scala-macro-splice-ok/m/src/main/scala/b/B.scala + // for a corresponding implementation that uses a class from the same project + // instead, but succeeds because it can suspend compilation until classes become available. + def impl(x: Double, n: A): Double = + if (n.i == 0) 1.0 + else if (n.i % 2 == 1) x * impl(x, A(n.i - 1)) + else impl(x * x, A(n.i / 2)) + + Expr(impl(x.valueOrError, A(n.valueOrError))) + } +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-splice/m/src/main/scala/c/C.scala b/sbt-test/pipelining/pipelining-scala-macro-splice/m/src/main/scala/c/C.scala new file mode 100644 index 000000000000..c88acf0c2b28 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-splice/m/src/main/scala/c/C.scala @@ -0,0 +1,11 @@ +package c + +import b.B + +object C { + @main def run = { + assert(B.transparentPower(2.0, 2) == 4.0) + assert(B.transparentPower(2.0, 3) == 8.0) + assert(B.transparentPower(2.0, 4) == 16.0) + } +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-splice/m_alt/.keep b/sbt-test/pipelining/pipelining-scala-macro-splice/m_alt/.keep new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sbt-test/pipelining/pipelining-scala-macro-splice/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/pipelining-scala-macro-splice/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..1c6c00400f04 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-splice/project/DottyInjectedPlugin.scala @@ -0,0 +1,11 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + ) +} diff --git a/sbt-test/pipelining/pipelining-scala-macro-splice/test b/sbt-test/pipelining/pipelining-scala-macro-splice/test new file mode 100644 index 000000000000..db95a0ab56a8 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro-splice/test @@ -0,0 +1,10 @@ +# as described in build.sbt, this will fail to compile. +# m defines a macro, depending on a.A, defined in upstream project a +# however because m also tries to run the macro in the same project, +# a/A.class is not available yet, so a reflection error will occur. +# This is caught by the compiler and presents a pretty diagnostic to the user, +# suggesting to disable pipelining in the project defining A. +-> m/compile +# This will run, simulating a user following the suggestion to +# disable pipelining in project a. +> m_alt/run diff --git a/sbt-test/pipelining/pipelining-scala-macro/a/src/main/scala/a/A.scala b/sbt-test/pipelining/pipelining-scala-macro/a/src/main/scala/a/A.scala new file mode 100644 index 000000000000..9077f0a2e849 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro/a/src/main/scala/a/A.scala @@ -0,0 +1,21 @@ +package a + +import scala.quoted.* + +object A { + + transparent inline def transparentPower(x: Double, inline n: Int): Double = + ${ powerCode('x, 'n) } + + inline def power(x: Double, inline n: Int): Double = + ${ powerCode('x, 'n) } + + def powerCode(x: Expr[Double], n: Expr[Int])(using Quotes): Expr[Double] = { + def impl(x: Double, n: Int): Double = + if (n == 0) 1.0 + else if (n % 2 == 1) x * impl(x, n - 1) + else impl(x * x, n / 2) + + Expr(impl(x.valueOrError, n.valueOrError)) + } +} diff --git a/sbt-test/pipelining/pipelining-scala-macro/a/src/main/scala/a/ASuspendInlining.scala b/sbt-test/pipelining/pipelining-scala-macro/a/src/main/scala/a/ASuspendInlining.scala new file mode 100644 index 000000000000..0fa449601d31 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro/a/src/main/scala/a/ASuspendInlining.scala @@ -0,0 +1,5 @@ +package a + +object ASuspendInlining { + def sixtyFour: Double = A.power(2.0, 6) // cause a suspension in inlining +} diff --git a/sbt-test/pipelining/pipelining-scala-macro/a/src/main/scala/a/ASuspendTyper.scala b/sbt-test/pipelining/pipelining-scala-macro/a/src/main/scala/a/ASuspendTyper.scala new file mode 100644 index 000000000000..2af5139b30bc --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro/a/src/main/scala/a/ASuspendTyper.scala @@ -0,0 +1,5 @@ +package a + +object ASuspendTyper { + def thirtyTwo: Double = A.transparentPower(2.0, 5) // cause a suspension in typer +} diff --git a/sbt-test/pipelining/pipelining-scala-macro/b/src/main/scala/b/B.scala b/sbt-test/pipelining/pipelining-scala-macro/b/src/main/scala/b/B.scala new file mode 100644 index 000000000000..17f72ddf1644 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro/b/src/main/scala/b/B.scala @@ -0,0 +1,14 @@ +package b + +import a.A +import a.ASuspendTyper +import a.ASuspendInlining + +object B { + @main def run = + assert(A.power(2.0, 2) == 4.0) + assert(A.power(2.0, 3) == 8.0) + assert(A.power(2.0, 4) == 16.0) + assert(ASuspendTyper.thirtyTwo == 32.0) // check that suspended definition is still available + assert(ASuspendInlining.sixtyFour == 64.0) // check that suspended definition is still available +} diff --git a/sbt-test/pipelining/pipelining-scala-macro/build.sbt b/sbt-test/pipelining/pipelining-scala-macro/build.sbt new file mode 100644 index 000000000000..5f703bb0d815 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro/build.sbt @@ -0,0 +1,62 @@ +ThisBuild / usePipelining := true + +// defines a macro, sbt will not force the early output +// because it will detect macros in the analysis, so b will compile fine, +// see `sbt-test/pipelining/pipelining-scala-macro-fail` for how we can +// force a failure by always forcing early output. +lazy val a = project.in(file("a")) + .settings( + // scalacOptions += "-Ycheck:all", + scalacOptions += "-Xprint-suspension", + Compile / incOptions := { + val old = (Compile / incOptions).value + val hooks = old.externalHooks + val newHooks = hooks.withExternalLookup( + new sbt.internal.inc.NoopExternalLookup { + @volatile var earlyOutputChecks = 0 + + def didFindMacros(analysis: xsbti.compile.CompileAnalysis) = { + val foundMacros = analysis.asInstanceOf[sbt.internal.inc.Analysis].apis.internal.values.exists(_.hasMacro) + assert(foundMacros, "expected macros to be found in the analysis.") + foundMacros + } + + // force early output, this is safe because the macro class from `macros` will be available. + override def shouldDoEarlyOutput(analysis: xsbti.compile.CompileAnalysis): Boolean = { + earlyOutputChecks += 1 + assert(earlyOutputChecks <= 2, "should only be called twice (apiPhaseCompleted, dependencyPhaseCompleted).") + val internalClasses = analysis.asInstanceOf[sbt.internal.inc.Analysis].apis.internal + val a_A = internalClasses.get("a.A") + val a_ASuspendTyper = internalClasses.get("a.ASuspendTyper") + val a_ASuspendInlining = internalClasses.get("a.ASuspendInlining") + + // both `a.A` and `a.ASuspendInlining` should be found in the analysis. + // even though `a.ASuspendInlining` suspends, it happens at inlining, so we should still + // record API for it in the first run. + assert(a_A.isDefined, s"`a.A` wasn't found.") + assert(a_ASuspendInlining.isDefined, s"`a.ASuspendInlining` wasn't found.") + + // in run 1, `a.ASuspendTyper` would have suspended at typer, and not be present in Analysis. + // Therefore we wouldn't close the early output jar. + // Therefore, because it is present here, we waited to the second run to close the early output jar, + // at which point we recorded API for `a.ASuspendTyper`, and because we closed the early output jar, + // we send the signal to Zinc that the early output was written. + assert(a_ASuspendTyper.isDefined, s"`a.ASuspendTyper` wasn't found.") + + + // do what sbt does typically, + // it will not force early output because macros are found + !didFindMacros(analysis) + } + } + ) + old.withExternalHooks(newHooks) + }, + ) + +// uses the macro, sbt is smart enough to not use pipelining flags when upstream compilation has macros +lazy val b = project.in(file("b")) + .dependsOn(a) + .settings( + scalacOptions += "-Ycheck:all", + ) diff --git a/sbt-test/pipelining/pipelining-scala-macro/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/pipelining-scala-macro/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/pipelining-scala-macro/test b/sbt-test/pipelining/pipelining-scala-macro/test new file mode 100644 index 000000000000..48a2443830b5 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-macro/test @@ -0,0 +1 @@ +> b/run diff --git a/sbt-test/pipelining/pipelining-scala-only/a/src/main/scala/a/A.scala b/sbt-test/pipelining/pipelining-scala-only/a/src/main/scala/a/A.scala new file mode 100644 index 000000000000..4b10db3eb385 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-only/a/src/main/scala/a/A.scala @@ -0,0 +1,5 @@ +package a + +object A { + val foo: (1,2,3) = (1,2,3) +} diff --git a/sbt-test/pipelining/pipelining-scala-only/b/src/main/scala/b/B.scala b/sbt-test/pipelining/pipelining-scala-only/b/src/main/scala/b/B.scala new file mode 100644 index 000000000000..971d07d5656d --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-only/b/src/main/scala/b/B.scala @@ -0,0 +1,12 @@ +package b + +import a.A + +object B { + val b: 2 = A.foo(1) + + @main def run = + assert(A.foo(0) == 1) + assert(A.foo(1) == 2) + assert(A.foo(2) == 3) +} diff --git a/sbt-test/pipelining/pipelining-scala-only/build.sbt b/sbt-test/pipelining/pipelining-scala-only/build.sbt new file mode 100644 index 000000000000..16e182e48801 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-only/build.sbt @@ -0,0 +1,12 @@ +ThisBuild / usePipelining := true + +lazy val a = project.in(file("a")) + .settings( + scalacOptions += "-Ycheck:all", + ) + +lazy val b = project.in(file("b")) + .dependsOn(a) + .settings( + scalacOptions += "-Ycheck:all", + ) diff --git a/sbt-test/pipelining/pipelining-scala-only/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/pipelining-scala-only/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-only/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/pipelining-scala-only/test b/sbt-test/pipelining/pipelining-scala-only/test new file mode 100644 index 000000000000..48a2443830b5 --- /dev/null +++ b/sbt-test/pipelining/pipelining-scala-only/test @@ -0,0 +1 @@ +> b/run diff --git a/sbt-test/pipelining/pipelining-test/a/src/main/scala/a/A.scala b/sbt-test/pipelining/pipelining-test/a/src/main/scala/a/A.scala new file mode 100644 index 000000000000..4b10db3eb385 --- /dev/null +++ b/sbt-test/pipelining/pipelining-test/a/src/main/scala/a/A.scala @@ -0,0 +1,5 @@ +package a + +object A { + val foo: (1,2,3) = (1,2,3) +} diff --git a/sbt-test/pipelining/pipelining-test/a/src/test/scala/a/Hello.scala b/sbt-test/pipelining/pipelining-test/a/src/test/scala/a/Hello.scala new file mode 100644 index 000000000000..1cfa3424bd98 --- /dev/null +++ b/sbt-test/pipelining/pipelining-test/a/src/test/scala/a/Hello.scala @@ -0,0 +1,12 @@ +package a + +import a.A + +import org.junit.Test + +class Hello { + + @Test def test(): Unit = { + assert(A.foo == (1,2,3)) + } +} diff --git a/sbt-test/pipelining/pipelining-test/build.sbt b/sbt-test/pipelining/pipelining-test/build.sbt new file mode 100644 index 000000000000..576ecc793ac6 --- /dev/null +++ b/sbt-test/pipelining/pipelining-test/build.sbt @@ -0,0 +1,7 @@ +ThisBuild / usePipelining := true + +lazy val a = project.in(file("a")) + .settings( + scalacOptions += "-Ycheck:all", + libraryDependencies += "com.novocode" % "junit-interface" % "0.11" % "test", + ) diff --git a/sbt-test/pipelining/pipelining-test/project/DottyInjectedPlugin.scala b/sbt-test/pipelining/pipelining-test/project/DottyInjectedPlugin.scala new file mode 100644 index 000000000000..69f15d168bfc --- /dev/null +++ b/sbt-test/pipelining/pipelining-test/project/DottyInjectedPlugin.scala @@ -0,0 +1,12 @@ +import sbt._ +import Keys._ + +object DottyInjectedPlugin extends AutoPlugin { + override def requires = plugins.JvmPlugin + override def trigger = allRequirements + + override val projectSettings = Seq( + scalaVersion := sys.props("plugin.scalaVersion"), + scalacOptions += "-source:3.0-migration" + ) +} diff --git a/sbt-test/pipelining/pipelining-test/test b/sbt-test/pipelining/pipelining-test/test new file mode 100644 index 000000000000..1c996d6962e8 --- /dev/null +++ b/sbt-test/pipelining/pipelining-test/test @@ -0,0 +1,12 @@ +# run the tests on a project with pipelining +# exercises the fact that -Xjava-tasty and -Xpickle-write +# flags are set twice. +# steps: +# - Compile scope is compiled with flags `-Xjava-tasty -Xpickle-write early/a-early-7423784.jar` +# - sbt copies `early/a-early-7423784.jar` to `early/a-early.jar` +# - Test scope is compiled with flags `-Xjava-tasty -Xpickle-write early-test/a-early-963232.jar -Xjava-tasty -Xpickle-write early/a-early.jar -classpath early/a-early.jar` +# e.g. for some reason the classpath has the same `a-early.jar` that +# is passed with `Xpickle-write`. +# Therefore we MUST avoid even reading the second `-Xpickle-write` setting, +# otherwise we will zero-out `a-early.jar`, causing type errors because its contents are blank. +> a/test diff --git a/sbt-test/sbt-dotty/analyzer-plugin/plugin/Analyzer.scala b/sbt-test/sbt-dotty/analyzer-plugin/plugin/Analyzer.scala index c1fab5c13f42..01aa57d7a971 100644 --- a/sbt-test/sbt-dotty/analyzer-plugin/plugin/Analyzer.scala +++ b/sbt-test/sbt-dotty/analyzer-plugin/plugin/Analyzer.scala @@ -21,7 +21,7 @@ class InitPlugin extends StandardPlugin { val name: String = "initPlugin" override val description: String = "checks that under -Yretain-trees we may get tree for all symbols" - def init(options: List[String]): List[PluginPhase] = + override def initialize(options: List[String])(using Context): List[PluginPhase] = (new SetDefTree) :: (new InitChecker) :: Nil } diff --git a/sbt-test/sbt-dotty/compiler-plugin/plugin/DivideZero.scala b/sbt-test/sbt-dotty/compiler-plugin/plugin/DivideZero.scala index c6fac6b796c0..3d1698250e5d 100644 --- a/sbt-test/sbt-dotty/compiler-plugin/plugin/DivideZero.scala +++ b/sbt-test/sbt-dotty/compiler-plugin/plugin/DivideZero.scala @@ -22,7 +22,8 @@ class DivideZero extends PluginPhase with StandardPlugin { override val runsAfter = Set(Pickler.name) override val runsBefore = Set(Staging.name) - def init(options: List[String]): List[PluginPhase] = this :: Nil + // We keep using deprecated variant here just to ensure it still works correctly + override def init(options: List[String]): List[PluginPhase] = this :: Nil private def isNumericDivide(sym: Symbol)(implicit ctx: Context): Boolean = { def test(tpe: String): Boolean = diff --git a/sbt-test/source-dependencies/compactify/src/main/scala/Nested.scala b/sbt-test/source-dependencies/compactify/src/main/scala/Nested.scala index 4b1597d287d4..b2d53cedee05 100644 --- a/sbt-test/source-dependencies/compactify/src/main/scala/Nested.scala +++ b/sbt-test/source-dependencies/compactify/src/main/scala/Nested.scala @@ -2,35 +2,35 @@ package test object TopLevelModule1 { - object InnerModule1 - { - object InnerModule2 - { - trait Z { def q = 3 } - def x = 3 - } - } - class InnerClass1 - { - class InnerClass2 - { - val z = new TopLevelModule1.InnerClass2 - } - object InnerModule3 - { - val y = new TopLevel1 with InnerModule1.InnerModule2.Z { val x = 4 } - } - } - class InnerClass2 + object InnerModule1 + { + object InnerModule2 + { + trait Z { def q = 3 } + def x = 3 + } + } + class InnerClass1 + { + class InnerClass2 + { + val z = new TopLevelModule1.InnerClass2 + } + object InnerModule3 + { + val y = new TopLevel1 with InnerModule1.InnerModule2.Z { val x = 4 } + } + } + class InnerClass2 } class TopLevel1 { - object Inner1_1 + object Inner1_1 } object TopLevel1 { - class Inner1_2 - object Inner1_2 + class Inner1_2 + object Inner1_2 } object TopLevel2 @@ -41,3 +41,11 @@ object TopLevel3 class TopLevel4 object TopLevelModuleSuffix$ + +// will generate a package object wrapper +val topLevelVal = 23 + +// explicit package object +package object inner { + val innerVal = 23 +} diff --git a/scala2-library-bootstrapped/src/scala/Array.scala b/scala2-library-bootstrapped/src/scala/Array.scala new file mode 100644 index 000000000000..d2098a76f32f --- /dev/null +++ b/scala2-library-bootstrapped/src/scala/Array.scala @@ -0,0 +1,690 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala + +//import scala.collection.generic._ +import scala.collection.{Factory, immutable, mutable} +import mutable.ArrayBuilder +import immutable.ArraySeq +import scala.language.implicitConversions +import scala.reflect.{ClassTag, classTag} +import scala.runtime.BoxedUnit +import scala.runtime.ScalaRunTime +import scala.runtime.ScalaRunTime.{array_apply, array_update} + +/** Utility methods for operating on arrays. + * For example: + * {{{ + * val a = Array(1, 2) + * val b = Array.ofDim[Int](2) + * val c = Array.concat(a, b) + * }}} + * where the array objects `a`, `b` and `c` have respectively the values + * `Array(1, 2)`, `Array(0, 0)` and `Array(1, 2, 0, 0)`. + */ +object Array { + val emptyBooleanArray = new Array[Boolean](0) + val emptyByteArray = new Array[Byte](0) + val emptyCharArray = new Array[Char](0) + val emptyDoubleArray = new Array[Double](0) + val emptyFloatArray = new Array[Float](0) + val emptyIntArray = new Array[Int](0) + val emptyLongArray = new Array[Long](0) + val emptyShortArray = new Array[Short](0) + val emptyObjectArray = new Array[Object](0) + + /** Provides an implicit conversion from the Array object to a collection Factory */ + implicit def toFactory[A : ClassTag](dummy: Array.type): Factory[A, Array[A]] = new ArrayFactory(dummy) + @SerialVersionUID(3L) + private class ArrayFactory[A : ClassTag](dummy: Array.type) extends Factory[A, Array[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]): Array[A] = Array.from[A](it) + def newBuilder: mutable.Builder[A, Array[A]] = Array.newBuilder[A] + } + + /** + * Returns a new [[scala.collection.mutable.ArrayBuilder]]. + */ + def newBuilder[T](implicit t: ClassTag[T]): ArrayBuilder[T] = ArrayBuilder.make[T](using t) + + /** Build an array from the iterable collection. + * + * {{{ + * scala> val a = Array.from(Seq(1, 5)) + * val a: Array[Int] = Array(1, 5) + * + * scala> val b = Array.from(Range(1, 5)) + * val b: Array[Int] = Array(1, 2, 3, 4) + * }}} + * + * @param it the iterable collection + * @return an array consisting of elements of the iterable collection + */ + def from[A : ClassTag](it: IterableOnce[A]): Array[A] = it match { + case it: Iterable[A] => it.toArray[A] + case _ => it.iterator.toArray[A] + } + + private def slowcopy(src : AnyRef, + srcPos : Int, + dest : AnyRef, + destPos : Int, + length : Int): Unit = { + var i = srcPos + var j = destPos + val srcUntil = srcPos + length + while (i < srcUntil) { + array_update(dest, j, array_apply(src, i)) + i += 1 + j += 1 + } + } + + /** Copy one array to another. + * Equivalent to Java's + * `System.arraycopy(src, srcPos, dest, destPos, length)`, + * except that this also works for polymorphic and boxed arrays. + * + * Note that the passed-in `dest` array will be modified by this call. + * + * @param src the source array. + * @param srcPos starting position in the source array. + * @param dest destination array. + * @param destPos starting position in the destination array. + * @param length the number of array elements to be copied. + * + * @see `java.lang.System#arraycopy` + */ + def copy(src: AnyRef, srcPos: Int, dest: AnyRef, destPos: Int, length: Int): Unit = { + val srcClass = src.getClass + if (srcClass.isArray && dest.getClass.isAssignableFrom(srcClass)) + java.lang.System.arraycopy(src, srcPos, dest, destPos, length) + else + slowcopy(src, srcPos, dest, destPos, length) + } + + /** Copy one array to another, truncating or padding with default values (if + * necessary) so the copy has the specified length. + * + * Equivalent to Java's + * `java.util.Arrays.copyOf(original, newLength)`, + * except that this works for primitive and object arrays in a single method. + * + * @see `java.util.Arrays#copyOf` + */ + def copyOf[A](original: Array[A], newLength: Int): Array[A] = ((original: @unchecked) match { + case x: Array[BoxedUnit] => newUnitArray(newLength).asInstanceOf[Array[A]] + case x: Array[AnyRef] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Int] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Double] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Long] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Float] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Char] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Byte] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Short] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Boolean] => java.util.Arrays.copyOf(x, newLength) + }).asInstanceOf[Array[A]] + + /** Copy one array to another, truncating or padding with default values (if + * necessary) so the copy has the specified length. The new array can have + * a different type than the original one as long as the values are + * assignment-compatible. When copying between primitive and object arrays, + * boxing and unboxing are supported. + * + * Equivalent to Java's + * `java.util.Arrays.copyOf(original, newLength, newType)`, + * except that this works for all combinations of primitive and object arrays + * in a single method. + * + * @see `java.util.Arrays#copyOf` + */ + def copyAs[A](original: Array[_], newLength: Int)(implicit ct: ClassTag[A]): Array[A] = { + val runtimeClass = ct.runtimeClass + if (runtimeClass == Void.TYPE) newUnitArray(newLength).asInstanceOf[Array[A]] + else { + val destClass = runtimeClass.asInstanceOf[Class[A]] + if (destClass.isAssignableFrom(original.getClass.getComponentType)) { + if (destClass.isPrimitive) copyOf[A](original.asInstanceOf[Array[A]], newLength) + else { + val destArrayClass = java.lang.reflect.Array.newInstance(destClass, 0).getClass.asInstanceOf[Class[Array[AnyRef]]] + java.util.Arrays.copyOf(original.asInstanceOf[Array[AnyRef]], newLength, destArrayClass).asInstanceOf[Array[A]] + } + } else { + val dest = new Array[A](newLength) + Array.copy(original, 0, dest, 0, original.length) + dest + } + } + } + + private def newUnitArray(len: Int): Array[Unit] = { + val result = new Array[Unit](len) + java.util.Arrays.fill(result.asInstanceOf[Array[AnyRef]], ()) + result + } + + /** Returns an array of length 0 */ + def empty[T: ClassTag]: Array[T] = new Array[T](0) + + /** Creates an array with given elements. + * + * @param xs the elements to put in the array + * @return an array containing all elements from xs. + */ + // Subject to a compiler optimization in Cleanup. + // Array(e0, ..., en) is translated to { val a = new Array(3); a(i) = ei; a } + def apply[T: ClassTag](xs: T*): Array[T] = { + val len = xs.length + xs match { + case wa: immutable.ArraySeq[_] if wa.unsafeArray.getClass.getComponentType == classTag[T].runtimeClass => + // We get here in test/files/run/sd760a.scala, `Array[T](t)` for + // a specialized type parameter `T`. While we still pay for two + // copies of the array it is better than before when we also boxed + // each element when populating the result. + ScalaRunTime.array_clone(wa.unsafeArray).asInstanceOf[Array[T]] + case _ => + val array = new Array[T](len) + val iterator = xs.iterator + var i = 0 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + } + + /** Creates an array of `Boolean` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Boolean, xs: Boolean*): Array[Boolean] = { + val array = new Array[Boolean](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Byte` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Byte, xs: Byte*): Array[Byte] = { + val array = new Array[Byte](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Short` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Short, xs: Short*): Array[Short] = { + val array = new Array[Short](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Char` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Char, xs: Char*): Array[Char] = { + val array = new Array[Char](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Int` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Int, xs: Int*): Array[Int] = { + val array = new Array[Int](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Long` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Long, xs: Long*): Array[Long] = { + val array = new Array[Long](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Float` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Float, xs: Float*): Array[Float] = { + val array = new Array[Float](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Double` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Double, xs: Double*): Array[Double] = { + val array = new Array[Double](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Unit` objects */ + def apply(x: Unit, xs: Unit*): Array[Unit] = { + val array = new Array[Unit](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates array with given dimensions */ + def ofDim[T: ClassTag](n1: Int): Array[T] = + new Array[T](n1) + /** Creates a 2-dimensional array */ + def ofDim[T: ClassTag](n1: Int, n2: Int): Array[Array[T]] = { + val arr: Array[Array[T]] = (new Array[Array[T]](n1): Array[Array[T]]) + for (i <- 0 until n1) arr(i) = new Array[T](n2) + arr + // tabulate(n1)(_ => ofDim[T](n2)) + } + /** Creates a 3-dimensional array */ + def ofDim[T: ClassTag](n1: Int, n2: Int, n3: Int): Array[Array[Array[T]]] = + tabulate(n1)(_ => ofDim[T](n2, n3)) + /** Creates a 4-dimensional array */ + def ofDim[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int): Array[Array[Array[Array[T]]]] = + tabulate(n1)(_ => ofDim[T](n2, n3, n4)) + /** Creates a 5-dimensional array */ + def ofDim[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int): Array[Array[Array[Array[Array[T]]]]] = + tabulate(n1)(_ => ofDim[T](n2, n3, n4, n5)) + + /** Concatenates all arrays into a single array. + * + * @param xss the given arrays + * @return the array created from concatenating `xss` + */ + def concat[T: ClassTag](xss: Array[T]*): Array[T] = { + val b = newBuilder[T] + b.sizeHint(xss.map(_.length).sum) + for (xs <- xss) b ++= xs + b.result() + } + + /** Returns an array that contains the results of some element computation a number + * of times. + * + * Note that this means that `elem` is computed a total of n times: + * {{{ + * scala> Array.fill(3){ math.random } + * res3: Array[Double] = Array(0.365461167592537, 1.550395944913685E-4, 0.7907242137333306) + * }}} + * + * @param n the number of elements desired + * @param elem the element computation + * @return an Array of size n, where each element contains the result of computing + * `elem`. + */ + def fill[T: ClassTag](n: Int)(elem: => T): Array[T] = { + if (n <= 0) { + empty[T] + } else { + val array = new Array[T](n) + var i = 0 + while (i < n) { + array(i) = elem + i += 1 + } + array + } + } + + /** Returns a two-dimensional array that contains the results of some element + * computation a number of times. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param elem the element computation + */ + def fill[T: ClassTag](n1: Int, n2: Int)(elem: => T): Array[Array[T]] = + tabulate(n1)(_ => fill(n2)(elem)) + + /** Returns a three-dimensional array that contains the results of some element + * computation a number of times. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param elem the element computation + */ + def fill[T: ClassTag](n1: Int, n2: Int, n3: Int)(elem: => T): Array[Array[Array[T]]] = + tabulate(n1)(_ => fill(n2, n3)(elem)) + + /** Returns a four-dimensional array that contains the results of some element + * computation a number of times. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param elem the element computation + */ + def fill[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => T): Array[Array[Array[Array[T]]]] = + tabulate(n1)(_ => fill(n2, n3, n4)(elem)) + + /** Returns a five-dimensional array that contains the results of some element + * computation a number of times. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param elem the element computation + */ + def fill[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => T): Array[Array[Array[Array[Array[T]]]]] = + tabulate(n1)(_ => fill(n2, n3, n4, n5)(elem)) + + /** Returns an array containing values of a given function over a range of integer + * values starting from 0. + * + * @param n The number of elements in the array + * @param f The function computing element values + * @return An `Array` consisting of elements `f(0),f(1), ..., f(n - 1)` + */ + def tabulate[T: ClassTag](n: Int)(f: Int => T): Array[T] = { + if (n <= 0) { + empty[T] + } else { + val array = new Array[T](n) + var i = 0 + while (i < n) { + array(i) = f(i) + i += 1 + } + array + } + } + + /** Returns a two-dimensional array containing values of a given function + * over ranges of integer values starting from `0`. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param f The function computing element values + */ + def tabulate[T: ClassTag](n1: Int, n2: Int)(f: (Int, Int) => T): Array[Array[T]] = + tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) + + /** Returns a three-dimensional array containing values of a given function + * over ranges of integer values starting from `0`. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param f The function computing element values + */ + def tabulate[T: ClassTag](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => T): Array[Array[Array[T]]] = + tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) + + /** Returns a four-dimensional array containing values of a given function + * over ranges of integer values starting from `0`. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param f The function computing element values + */ + def tabulate[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => T): Array[Array[Array[Array[T]]]] = + tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) + + /** Returns a five-dimensional array containing values of a given function + * over ranges of integer values starting from `0`. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param f The function computing element values + */ + def tabulate[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => T): Array[Array[Array[Array[Array[T]]]]] = + tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) + + /** Returns an array containing a sequence of increasing integers in a range. + * + * @param start the start value of the array + * @param end the end value of the array, exclusive (in other words, this is the first value '''not''' returned) + * @return the array with values in range `start, start + 1, ..., end - 1` + * up to, but excluding, `end`. + */ + def range(start: Int, end: Int): Array[Int] = range(start, end, 1) + + /** Returns an array containing equally spaced values in some integer interval. + * + * @param start the start value of the array + * @param end the end value of the array, exclusive (in other words, this is the first value '''not''' returned) + * @param step the increment value of the array (may not be zero) + * @return the array with values in `start, start + step, ...` up to, but excluding `end` + */ + def range(start: Int, end: Int, step: Int): Array[Int] = { + if (step == 0) throw new IllegalArgumentException("zero step") + val array = new Array[Int](immutable.Range.count(start, end, step, isInclusive = false)) + + var n = 0 + var i = start + while (if (step < 0) end < i else i < end) { + array(n) = i + i += step + n += 1 + } + array + } + + /** Returns an array containing repeated applications of a function to a start value. + * + * @param start the start value of the array + * @param len the number of elements returned by the array + * @param f the function that is repeatedly applied + * @return the array returning `len` values in the sequence `start, f(start), f(f(start)), ...` + */ + def iterate[T: ClassTag](start: T, len: Int)(f: T => T): Array[T] = { + if (len > 0) { + val array = new Array[T](len) + var acc = start + var i = 1 + array(0) = acc + + while (i < len) { + acc = f(acc) + array(i) = acc + i += 1 + } + array + } else { + empty[T] + } + } + + /** Compare two arrays per element. + * + * A more efficient version of `xs.sameElements(ys)`. + * + * Note that arrays are invariant in Scala, but it may + * be sound to cast an array of arbitrary reference type + * to `Array[AnyRef]`. Arrays on the JVM are covariant + * in their element type. + * + * `Array.equals(xs.asInstanceOf[Array[AnyRef]], ys.asInstanceOf[Array[AnyRef]])` + * + * @param xs an array of AnyRef + * @param ys an array of AnyRef + * @return true if corresponding elements are equal + */ + def equals(xs: Array[AnyRef], ys: Array[AnyRef]): Boolean = + (xs eq ys) || + (xs.length == ys.length) && { + var i = 0 + while (i < xs.length && xs(i) == ys(i)) i += 1 + i >= xs.length + } + + /** Called in a pattern match like `{ case Array(x,y,z) => println('3 elements')}`. + * + * @param x the selector value + * @return sequence wrapped in a [[scala.Some]], if `x` is an Array, otherwise `None` + */ + def unapplySeq[T](x: Array[T]): UnapplySeqWrapper[T] = new UnapplySeqWrapper(x) + + final class UnapplySeqWrapper[T](private val a: Array[T]) extends AnyVal { + def isEmpty: false = false + def get: UnapplySeqWrapper[T] = this + def lengthCompare(len: Int): Int = a.lengthCompare(len) + def apply(i: Int): T = a(i) + def drop(n: Int): scala.Seq[T] = ArraySeq.unsafeWrapArray(a.drop(n)) // clones the array, also if n == 0 + def toSeq: scala.Seq[T] = a.toSeq // clones the array + } +} + +/** Arrays are mutable, indexed collections of values. `Array[T]` is Scala's representation + * for Java's `T[]`. + * + * {{{ + * val numbers = Array(1, 2, 3, 4) + * val first = numbers(0) // read the first element + * numbers(3) = 100 // replace the 4th array element with 100 + * val biggerNumbers = numbers.map(_ * 2) // multiply all numbers by two + * }}} + * + * Arrays make use of two common pieces of Scala syntactic sugar, shown on lines 2 and 3 of the above + * example code. + * Line 2 is translated into a call to `apply(Int)`, while line 3 is translated into a call to + * `update(Int, T)`. + * + * Two implicit conversions exist in [[scala.Predef]] that are frequently applied to arrays: a conversion + * to [[scala.collection.ArrayOps]] (shown on line 4 of the example above) and a conversion + * to [[scala.collection.mutable.ArraySeq]] (a subtype of [[scala.collection.Seq]]). + * Both types make available many of the standard operations found in the Scala collections API. + * The conversion to `ArrayOps` is temporary, as all operations defined on `ArrayOps` return an `Array`, + * while the conversion to `ArraySeq` is permanent as all operations return a `ArraySeq`. + * + * The conversion to `ArrayOps` takes priority over the conversion to `ArraySeq`. For instance, + * consider the following code: + * + * {{{ + * val arr = Array(1, 2, 3) + * val arrReversed = arr.reverse + * val seqReversed : collection.Seq[Int] = arr.reverse + * }}} + * + * Value `arrReversed` will be of type `Array[Int]`, with an implicit conversion to `ArrayOps` occurring + * to perform the `reverse` operation. The value of `seqReversed`, on the other hand, will be computed + * by converting to `ArraySeq` first and invoking the variant of `reverse` that returns another + * `ArraySeq`. + * + * @see [[https://www.scala-lang.org/files/archive/spec/2.13/ Scala Language Specification]], for in-depth information on the transformations the Scala compiler makes on Arrays (Sections 6.6 and 6.15 respectively.) + * @see [[https://docs.scala-lang.org/sips/scala-2-8-arrays.html "Scala 2.8 Arrays"]] the Scala Improvement Document detailing arrays since Scala 2.8. + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/arrays.html "The Scala 2.8 Collections' API"]] section on `Array` by Martin Odersky for more information. + * @hideImplicitConversion scala.Predef.booleanArrayOps + * @hideImplicitConversion scala.Predef.byteArrayOps + * @hideImplicitConversion scala.Predef.charArrayOps + * @hideImplicitConversion scala.Predef.doubleArrayOps + * @hideImplicitConversion scala.Predef.floatArrayOps + * @hideImplicitConversion scala.Predef.intArrayOps + * @hideImplicitConversion scala.Predef.longArrayOps + * @hideImplicitConversion scala.Predef.refArrayOps + * @hideImplicitConversion scala.Predef.shortArrayOps + * @hideImplicitConversion scala.Predef.unitArrayOps + * @hideImplicitConversion scala.LowPriorityImplicits.wrapRefArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapIntArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapDoubleArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapLongArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapFloatArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapCharArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapByteArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapShortArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapBooleanArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapUnitArray + * @hideImplicitConversion scala.LowPriorityImplicits.genericWrapArray + * @define coll array + * @define Coll `Array` + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + * @define collectExample + * @define undefinedorder + */ +final class Array[T](_length: Int) extends java.io.Serializable with java.lang.Cloneable { + + /** The length of the array */ + def length: Int = throw new Error() + + /** The element at given index. + * + * Indices start at `0`; `xs.apply(0)` is the first element of array `xs`. + * Note the indexing syntax `xs(i)` is a shorthand for `xs.apply(i)`. + * + * @param i the index + * @return the element at the given index + * @throws ArrayIndexOutOfBoundsException if `i < 0` or `length <= i` + */ + def apply(i: Int): T = throw new Error() + + /** Update the element at given index. + * + * Indices start at `0`; `xs.update(i, x)` replaces the i^th^ element in the array. + * Note the syntax `xs(i) = x` is a shorthand for `xs.update(i, x)`. + * + * @param i the index + * @param x the value to be written at index `i` + * @throws ArrayIndexOutOfBoundsException if `i < 0` or `length <= i` + */ + def update(i: Int, x: T): Unit = { throw new Error() } + + /** Clone the Array. + * + * @return A clone of the Array. + */ + override def clone(): Array[T] = throw new Error() +} diff --git a/scala2-library-bootstrapped/src/scala/collection/ArrayOps.scala b/scala2-library-bootstrapped/src/scala/collection/ArrayOps.scala new file mode 100644 index 000000000000..d4659bbb0dba --- /dev/null +++ b/scala2-library-bootstrapped/src/scala/collection/ArrayOps.scala @@ -0,0 +1,1664 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import java.lang.Math.{max, min} +import java.util.Arrays + +import scala.Predef.{ // unimport all array-related implicit conversions to avoid triggering them accidentally + genericArrayOps => _, + booleanArrayOps => _, + byteArrayOps => _, + charArrayOps => _, + doubleArrayOps => _, + floatArrayOps => _, + intArrayOps => _, + longArrayOps => _, + refArrayOps => _, + shortArrayOps => _, + unitArrayOps => _, + genericWrapArray => _, + wrapRefArray => _, + wrapIntArray => _, + wrapDoubleArray => _, + wrapLongArray => _, + wrapFloatArray => _, + wrapCharArray => _, + wrapByteArray => _, + wrapShortArray => _, + wrapBooleanArray => _, + wrapUnitArray => _, + wrapString => _, + copyArrayToImmutableIndexedSeq => _, + _ +} +import scala.collection.Stepper.EfficientSplit +import scala.collection.immutable.Range +import scala.collection.mutable.ArrayBuilder +import scala.math.Ordering +import scala.reflect.ClassTag +import scala.util.Sorting + +object ArrayOps { + + @SerialVersionUID(3L) + private class ArrayView[A](xs: Array[A]) extends AbstractIndexedSeqView[A] { + def length = xs.length + def apply(n: Int) = xs(n) + override def toString: String = immutable.ArraySeq.unsafeWrapArray(xs).mkString("ArrayView(", ", ", ")") + } + + /** A lazy filtered array. No filtering is applied until one of `foreach`, `map` or `flatMap` is called. */ + class WithFilter[A](p: A => Boolean, xs: Array[A]) { + + /** Apply `f` to each element for its side effects. + * Note: [U] parameter needed to help scalac's type inference. + */ + def foreach[U](f: A => U): Unit = { + val len = xs.length + var i = 0 + while(i < len) { + val x = xs(i) + if(p(x)) f(x) + i += 1 + } + } + + /** Builds a new array by applying a function to all elements of this array. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given function + * `f` to each element of this array and collecting the results. + */ + def map[B: ClassTag](f: A => B): Array[B] = { + val b = ArrayBuilder.make[B] + var i = 0 + while (i < xs.length) { + val x = xs(i) + if(p(x)) b += f(x) + i = i + 1 + } + b.result() + } + + /** Builds a new array by applying a function to all elements of this array + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given collection-valued function + * `f` to each element of this array and concatenating the results. + */ + def flatMap[B: ClassTag](f: A => IterableOnce[B]): Array[B] = { + val b = ArrayBuilder.make[B] + var i = 0 + while(i < xs.length) { + val x = xs(i) + if(p(x)) b ++= f(xs(i)) + i += 1 + } + b.result() + } + + def flatMap[BS, B](f: A => BS)(implicit asIterable: BS => Iterable[B], m: ClassTag[B]): Array[B] = + flatMap[B](x => asIterable(f(x))) + + /** Creates a new non-strict filter which combines this filter with the given predicate. */ + def withFilter(q: A => Boolean): WithFilter[A] = new WithFilter[A](a => p(a) && q(a), xs) + } + + @SerialVersionUID(3L) + private[collection] final class ArrayIterator[@specialized(Specializable.Everything) A](xs: Array[A]) extends AbstractIterator[A] with Serializable { + private[this] var pos = 0 + private[this] val len = xs.length + override def knownSize: Int = len - pos + def hasNext: Boolean = pos < len + def next(): A = { + if (pos >= xs.length) Iterator.empty.next() + val r = xs(pos) + pos += 1 + r + } + override def drop(n: Int): Iterator[A] = { + if (n > 0) { + val newPos = pos + n + pos = + if (newPos < 0 /* overflow */) len + else Math.min(len, newPos) + } + this + } + } + + @SerialVersionUID(3L) + private final class ReverseIterator[@specialized(Specializable.Everything) A](xs: Array[A]) extends AbstractIterator[A] with Serializable { + private[this] var pos = xs.length-1 + def hasNext: Boolean = pos >= 0 + def next(): A = { + if (pos < 0) Iterator.empty.next() + val r = xs(pos) + pos -= 1 + r + } + + override def drop(n: Int): Iterator[A] = { + if (n > 0) pos = Math.max( -1, pos - n) + this + } + } + + @SerialVersionUID(3L) + private final class GroupedIterator[A](xs: Array[A], groupSize: Int) extends AbstractIterator[Array[A]] with Serializable { + private[this] var pos = 0 + def hasNext: Boolean = pos < xs.length + def next(): Array[A] = { + if(pos >= xs.length) throw new NoSuchElementException + val r = new ArrayOps(xs).slice(pos, pos+groupSize) + pos += groupSize + r + } + } + + /** The cut-off point for the array size after which we switch from `Sorting.stableSort` to + * an implementation that copies the data to a boxed representation for use with `Arrays.sort`. + */ + private final val MaxStableSortLength = 300 + + /** Avoid an allocation in [[collect]]. */ + private val fallback: Any => Any = _ => fallback +} + +/** This class serves as a wrapper for `Array`s with many of the operations found in + * indexed sequences. Where needed, instances of arrays are implicitly converted + * into this class. There is generally no reason to create an instance explicitly or use + * an `ArrayOps` type. It is better to work with plain `Array` types instead and rely on + * the implicit conversion to `ArrayOps` when calling a method (which does not actually + * allocate an instance of `ArrayOps` because it is a value class). + * + * Neither `Array` nor `ArrayOps` are proper collection types + * (i.e. they do not extend `Iterable` or even `IterableOnce`). `mutable.ArraySeq` and + * `immutable.ArraySeq` serve this purpose. + * + * The difference between this class and `ArraySeq`s is that calling transformer methods such as + * `filter` and `map` will yield an array, whereas an `ArraySeq` will remain an `ArraySeq`. + * + * @tparam A type of the elements contained in this array. + */ +final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { + + @`inline` private[this] implicit def elemTag: ClassTag[A] = ClassTag(xs.getClass.getComponentType) + + /** The size of this array. + * + * @return the number of elements in this array. + */ + @`inline` def size: Int = xs.length + + /** The size of this array. + * + * @return the number of elements in this array. + */ + @`inline` def knownSize: Int = xs.length + + /** Tests whether the array is empty. + * + * @return `true` if the array contains no elements, `false` otherwise. + */ + @`inline` def isEmpty: Boolean = xs.length == 0 + + /** Tests whether the array is not empty. + * + * @return `true` if the array contains at least one element, `false` otherwise. + */ + @`inline` def nonEmpty: Boolean = xs.length != 0 + + /** Selects the first element of this array. + * + * @return the first element of this array. + * @throws NoSuchElementException if the array is empty. + */ + def head: A = if (nonEmpty) xs.apply(0) else throw new NoSuchElementException("head of empty array") + + /** Selects the last element. + * + * @return The last element of this array. + * @throws NoSuchElementException If the array is empty. + */ + def last: A = if (nonEmpty) xs.apply(xs.length-1) else throw new NoSuchElementException("last of empty array") + + /** Optionally selects the first element. + * + * @return the first element of this array if it is nonempty, + * `None` if it is empty. + */ + def headOption: Option[A] = if(isEmpty) None else Some(head) + + /** Optionally selects the last element. + * + * @return the last element of this array$ if it is nonempty, + * `None` if it is empty. + */ + def lastOption: Option[A] = if(isEmpty) None else Some(last) + + /** Compares the size of this array to a test value. + * + * @param otherSize the test value that gets compared with the size. + * @return A value `x` where + * {{{ + * x < 0 if this.size < otherSize + * x == 0 if this.size == otherSize + * x > 0 if this.size > otherSize + * }}} + */ + def sizeCompare(otherSize: Int): Int = Integer.compare(xs.length, otherSize) + + /** Compares the length of this array to a test value. + * + * @param len the test value that gets compared with the length. + * @return A value `x` where + * {{{ + * x < 0 if this.length < len + * x == 0 if this.length == len + * x > 0 if this.length > len + * }}} + */ + def lengthCompare(len: Int): Int = Integer.compare(xs.length, len) + + /** Method mirroring [[SeqOps.sizeIs]] for consistency, except it returns an `Int` + * because `size` is known and comparison is constant-time. + * + * These operations are equivalent to [[sizeCompare(Int) `sizeCompare(Int)`]], and + * allow the following more readable usages: + * + * {{{ + * this.sizeIs < size // this.sizeCompare(size) < 0 + * this.sizeIs <= size // this.sizeCompare(size) <= 0 + * this.sizeIs == size // this.sizeCompare(size) == 0 + * this.sizeIs != size // this.sizeCompare(size) != 0 + * this.sizeIs >= size // this.sizeCompare(size) >= 0 + * this.sizeIs > size // this.sizeCompare(size) > 0 + * }}} + */ + def sizeIs: Int = xs.length + + /** Method mirroring [[SeqOps.lengthIs]] for consistency, except it returns an `Int` + * because `length` is known and comparison is constant-time. + * + * These operations are equivalent to [[lengthCompare(Int) `lengthCompare(Int)`]], and + * allow the following more readable usages: + * + * {{{ + * this.lengthIs < len // this.lengthCompare(len) < 0 + * this.lengthIs <= len // this.lengthCompare(len) <= 0 + * this.lengthIs == len // this.lengthCompare(len) == 0 + * this.lengthIs != len // this.lengthCompare(len) != 0 + * this.lengthIs >= len // this.lengthCompare(len) >= 0 + * this.lengthIs > len // this.lengthCompare(len) > 0 + * }}} + */ + def lengthIs: Int = xs.length + + /** Selects an interval of elements. The returned array is made up + * of all elements `x` which satisfy the invariant: + * {{{ + * from <= indexOf(x) < until + * }}} + * + * @param from the lowest index to include from this array. + * @param until the lowest index to EXCLUDE from this array. + * @return an array containing the elements greater than or equal to + * index `from` extending up to (but not including) index `until` + * of this array. + */ + def slice(from: Int, until: Int): Array[A] = { + import java.util.Arrays.copyOfRange + val lo = max(from, 0) + val hi = min(until, xs.length) + if (hi > lo) { + (((xs: Array[_]): @unchecked) match { + case x: Array[AnyRef] => copyOfRange(x, lo, hi) + case x: Array[Int] => copyOfRange(x, lo, hi) + case x: Array[Double] => copyOfRange(x, lo, hi) + case x: Array[Long] => copyOfRange(x, lo, hi) + case x: Array[Float] => copyOfRange(x, lo, hi) + case x: Array[Char] => copyOfRange(x, lo, hi) + case x: Array[Byte] => copyOfRange(x, lo, hi) + case x: Array[Short] => copyOfRange(x, lo, hi) + case x: Array[Boolean] => copyOfRange(x, lo, hi) + }).asInstanceOf[Array[A]] + } else new Array[A](0) + } + + /** The rest of the array without its first element. */ + def tail: Array[A] = + if(xs.length == 0) throw new UnsupportedOperationException("tail of empty array") else slice(1, xs.length) + + /** The initial part of the array without its last element. */ + def init: Array[A] = + if(xs.length == 0) throw new UnsupportedOperationException("init of empty array") else slice(0, xs.length-1) + + /** Iterates over the tails of this array. The first value will be this + * array and the final one will be an empty array, with the intervening + * values the results of successive applications of `tail`. + * + * @return an iterator over all the tails of this array + */ + def tails: Iterator[Array[A]] = iterateUntilEmpty(xs => new ArrayOps(xs).tail) + + /** Iterates over the inits of this array. The first value will be this + * array and the final one will be an empty array, with the intervening + * values the results of successive applications of `init`. + * + * @return an iterator over all the inits of this array + */ + def inits: Iterator[Array[A]] = iterateUntilEmpty(xs => new ArrayOps(xs).init) + + // A helper for tails and inits. + private[this] def iterateUntilEmpty(f: Array[A] => Array[A]): Iterator[Array[A]] = + Iterator.iterate(xs)(f).takeWhile(x => x.length != 0) ++ Iterator.single(Array.empty[A]) + + /** An array containing the first `n` elements of this array. */ + def take(n: Int): Array[A] = slice(0, n) + + /** The rest of the array without its `n` first elements. */ + def drop(n: Int): Array[A] = slice(n, xs.length) + + /** An array containing the last `n` elements of this array. */ + def takeRight(n: Int): Array[A] = drop(xs.length - max(n, 0)) + + /** The rest of the array without its `n` last elements. */ + def dropRight(n: Int): Array[A] = take(xs.length - max(n, 0)) + + /** Takes longest prefix of elements that satisfy a predicate. + * + * @param p The predicate used to test elements. + * @return the longest prefix of this array whose elements all satisfy + * the predicate `p`. + */ + def takeWhile(p: A => Boolean): Array[A] = { + val i = indexWhere(x => !p(x)) + val hi = if(i < 0) xs.length else i + slice(0, hi) + } + + /** Drops longest prefix of elements that satisfy a predicate. + * + * @param p The predicate used to test elements. + * @return the longest suffix of this array whose first element + * does not satisfy the predicate `p`. + */ + def dropWhile(p: A => Boolean): Array[A] = { + val i = indexWhere(x => !p(x)) + val lo = if(i < 0) xs.length else i + slice(lo, xs.length) + } + + def iterator: Iterator[A] = + ((xs: Any @unchecked) match { + case xs: Array[AnyRef] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Int] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Double] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Long] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Float] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Char] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Byte] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Short] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Boolean] => new ArrayOps.ArrayIterator(xs) + case xs: Array[Unit] => new ArrayOps.ArrayIterator(xs) + case null => throw new NullPointerException + }).asInstanceOf[Iterator[A]] + + def stepper[S <: Stepper[_]](implicit shape: StepperShape[A, S]): S with EfficientSplit = { + import convert.impl._ + val s = (shape.shape: @unchecked) match { + case StepperShape.ReferenceShape => (xs: Any) match { + case bs: Array[Boolean] => new BoxedBooleanArrayStepper(bs, 0, xs.length) + case _ => new ObjectArrayStepper[AnyRef](xs.asInstanceOf[Array[AnyRef ]], 0, xs.length) + } + case StepperShape.IntShape => new IntArrayStepper (xs.asInstanceOf[Array[Int ]], 0, xs.length) + case StepperShape.LongShape => new LongArrayStepper (xs.asInstanceOf[Array[Long ]], 0, xs.length) + case StepperShape.DoubleShape => new DoubleArrayStepper (xs.asInstanceOf[Array[Double ]], 0, xs.length) + case StepperShape.ByteShape => new WidenedByteArrayStepper (xs.asInstanceOf[Array[Byte ]], 0, xs.length) + case StepperShape.ShortShape => new WidenedShortArrayStepper (xs.asInstanceOf[Array[Short ]], 0, xs.length) + case StepperShape.CharShape => new WidenedCharArrayStepper (xs.asInstanceOf[Array[Char ]], 0, xs.length) + case StepperShape.FloatShape => new WidenedFloatArrayStepper (xs.asInstanceOf[Array[Float ]], 0, xs.length) + } + s.asInstanceOf[S with EfficientSplit] + } + + /** Partitions elements in fixed size arrays. + * @see [[scala.collection.Iterator]], method `grouped` + * + * @param size the number of elements per group + * @return An iterator producing arrays of size `size`, except the + * last will be less than size `size` if the elements don't divide evenly. + */ + def grouped(size: Int): Iterator[Array[A]] = new ArrayOps.GroupedIterator[A](xs, size) + + /** Splits this array into a prefix/suffix pair according to a predicate. + * + * Note: `c span p` is equivalent to (but more efficient than) + * `(c takeWhile p, c dropWhile p)`, provided the evaluation of the + * predicate `p` does not cause any side-effects. + * + * @param p the test predicate + * @return a pair consisting of the longest prefix of this array whose + * elements all satisfy `p`, and the rest of this array. + */ + def span(p: A => Boolean): (Array[A], Array[A]) = { + val i = indexWhere(x => !p(x)) + val idx = if(i < 0) xs.length else i + (slice(0, idx), slice(idx, xs.length)) + } + + /** Splits this array into two at a given position. + * Note: `c splitAt n` is equivalent to `(c take n, c drop n)`. + * + * @param n the position at which to split. + * @return a pair of arrays consisting of the first `n` + * elements of this array, and the other elements. + */ + def splitAt(n: Int): (Array[A], Array[A]) = (take(n), drop(n)) + + /** A pair of, first, all elements that satisfy predicate `p` and, second, all elements that do not. */ + def partition(p: A => Boolean): (Array[A], Array[A]) = { + val res1, res2 = ArrayBuilder.make[A] + var i = 0 + while(i < xs.length) { + val x = xs(i) + (if(p(x)) res1 else res2) += x + i += 1 + } + (res1.result(), res2.result()) + } + + /** Applies a function `f` to each element of the array and returns a pair of arrays: the first one + * made of those values returned by `f` that were wrapped in [[scala.util.Left]], and the second + * one made of those wrapped in [[scala.util.Right]]. + * + * Example: + * {{{ + * val xs = Array(1, "one", 2, "two", 3, "three") partitionMap { + * case i: Int => Left(i) + * case s: String => Right(s) + * } + * // xs == (Array(1, 2, 3), + * // Array(one, two, three)) + * }}} + * + * @tparam A1 the element type of the first resulting collection + * @tparam A2 the element type of the second resulting collection + * @param f the 'split function' mapping the elements of this array to an [[scala.util.Either]] + * + * @return a pair of arrays: the first one made of those values returned by `f` that were wrapped in [[scala.util.Left]], + * and the second one made of those wrapped in [[scala.util.Right]]. */ + def partitionMap[A1: ClassTag, A2: ClassTag](f: A => Either[A1, A2]): (Array[A1], Array[A2]) = { + val res1 = ArrayBuilder.make[A1] + val res2 = ArrayBuilder.make[A2] + var i = 0 + while(i < xs.length) { + f(xs(i)) match { + case Left(x) => res1 += x + case Right(x) => res2 += x + } + i += 1 + } + (res1.result(), res2.result()) + } + + /** Returns a new array with the elements in reversed order. */ + @inline def reverse: Array[A] = { + val len = xs.length + val res = new Array[A](len) + var i = 0 + while(i < len) { + res(len-i-1) = xs(i) + i += 1 + } + res + } + + /** An iterator yielding elements in reversed order. + * + * Note: `xs.reverseIterator` is the same as `xs.reverse.iterator` but implemented more efficiently. + * + * @return an iterator yielding the elements of this array in reversed order + */ + def reverseIterator: Iterator[A] = + ((xs: Any @unchecked) match { + case xs: Array[AnyRef] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Int] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Double] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Long] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Float] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Char] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Byte] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Short] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Boolean] => new ArrayOps.ReverseIterator(xs) + case xs: Array[Unit] => new ArrayOps.ReverseIterator(xs) + case null => throw new NullPointerException + }).asInstanceOf[Iterator[A]] + + /** Selects all elements of this array which satisfy a predicate. + * + * @param p the predicate used to test elements. + * @return a new array consisting of all elements of this array that satisfy the given predicate `p`. + */ + def filter(p: A => Boolean): Array[A] = { + val res = ArrayBuilder.make[A] + var i = 0 + while(i < xs.length) { + val x = xs(i) + if(p(x)) res += x + i += 1 + } + res.result() + } + + /** Selects all elements of this array which do not satisfy a predicate. + * + * @param p the predicate used to test elements. + * @return a new array consisting of all elements of this array that do not satisfy the given predicate `p`. + */ + def filterNot(p: A => Boolean): Array[A] = filter(x => !p(x)) + + /** Sorts this array according to an Ordering. + * + * The sort is stable. That is, elements that are equal (as determined by + * `lt`) appear in the same order in the sorted sequence as in the original. + * + * @see [[scala.math.Ordering]] + * + * @param ord the ordering to be used to compare elements. + * @return an array consisting of the elements of this array + * sorted according to the ordering `ord`. + */ + def sorted[B >: A](implicit ord: Ordering[B]): Array[A] = { + val len = xs.length + def boxed = if(len < ArrayOps.MaxStableSortLength) { + val a = xs.clone() + Sorting.stableSort(a)(using ord.asInstanceOf[Ordering[A]]) + a + } else { + val a = Array.copyAs[AnyRef](xs, len)(ClassTag.AnyRef) + Arrays.sort(a, ord.asInstanceOf[Ordering[AnyRef]]) + Array.copyAs[A](a, len) + } + if(len <= 1) xs.clone() + else ((xs: Array[_]) match { + case xs: Array[AnyRef] => + val a = Arrays.copyOf(xs, len); Arrays.sort(a, ord.asInstanceOf[Ordering[AnyRef]]); a + case xs: Array[Int] => + if(ord eq Ordering.Int) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Long] => + if(ord eq Ordering.Long) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Char] => + if(ord eq Ordering.Char) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Byte] => + if(ord eq Ordering.Byte) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Short] => + if(ord eq Ordering.Short) { val a = Arrays.copyOf(xs, len); Arrays.sort(a); a } + else boxed + case xs: Array[Boolean] => + if(ord eq Ordering.Boolean) { val a = Arrays.copyOf(xs, len); Sorting.stableSort(a); a } + else boxed + case xs => boxed + }).asInstanceOf[Array[A]] + } + + /** Sorts this array according to a comparison function. + * + * The sort is stable. That is, elements that are equal (as determined by + * `lt`) appear in the same order in the sorted sequence as in the original. + * + * @param lt the comparison function which tests whether + * its first argument precedes its second argument in + * the desired ordering. + * @return an array consisting of the elements of this array + * sorted according to the comparison function `lt`. + */ + def sortWith(lt: (A, A) => Boolean): Array[A] = sorted(Ordering.fromLessThan(lt)) + + /** Sorts this array according to the Ordering which results from transforming + * an implicitly given Ordering with a transformation function. + * + * @see [[scala.math.Ordering]] + * @param f the transformation function mapping elements + * to some other domain `B`. + * @param ord the ordering assumed on domain `B`. + * @tparam B the target type of the transformation `f`, and the type where + * the ordering `ord` is defined. + * @return an array consisting of the elements of this array + * sorted according to the ordering where `x < y` if + * `ord.lt(f(x), f(y))`. + */ + def sortBy[B](f: A => B)(implicit ord: Ordering[B]): Array[A] = sorted(ord on f) + + /** Creates a non-strict filter of this array. + * + * Note: the difference between `c filter p` and `c withFilter p` is that + * the former creates a new array, whereas the latter only + * restricts the domain of subsequent `map`, `flatMap`, `foreach`, + * and `withFilter` operations. + * + * @param p the predicate used to test elements. + * @return an object of class `ArrayOps.WithFilter`, which supports + * `map`, `flatMap`, `foreach`, and `withFilter` operations. + * All these operations apply to those elements of this array + * which satisfy the predicate `p`. + */ + def withFilter(p: A => Boolean): ArrayOps.WithFilter[A] = new ArrayOps.WithFilter[A](p, xs) + + /** Finds index of first occurrence of some value in this array after or at some start index. + * + * @param elem the element value to search for. + * @param from the start index + * @return the index `>= from` of the first element of this array that is equal (as determined by `==`) + * to `elem`, or `-1`, if none exists. + */ + def indexOf(elem: A, from: Int = 0): Int = { + var i = from + while(i < xs.length) { + if(elem == xs(i)) return i + i += 1 + } + -1 + } + + /** Finds index of the first element satisfying some predicate after or at some start index. + * + * @param p the predicate used to test elements. + * @param from the start index + * @return the index `>= from` of the first element of this array that satisfies the predicate `p`, + * or `-1`, if none exists. + */ + def indexWhere(@deprecatedName("f", "2.13.3") p: A => Boolean, from: Int = 0): Int = { + var i = from + while(i < xs.length) { + if(p(xs(i))) return i + i += 1 + } + -1 + } + + /** Finds index of last occurrence of some value in this array before or at a given end index. + * + * @param elem the element value to search for. + * @param end the end index. + * @return the index `<= end` of the last element of this array that is equal (as determined by `==`) + * to `elem`, or `-1`, if none exists. + */ + def lastIndexOf(elem: A, end: Int = xs.length - 1): Int = { + var i = min(end, xs.length-1) + while(i >= 0) { + if(elem == xs(i)) return i + i -= 1 + } + -1 + } + + /** Finds index of last element satisfying some predicate before or at given end index. + * + * @param p the predicate used to test elements. + * @return the index `<= end` of the last element of this array that satisfies the predicate `p`, + * or `-1`, if none exists. + */ + def lastIndexWhere(p: A => Boolean, end: Int = xs.length - 1): Int = { + var i = min(end, xs.length-1) + while(i >= 0) { + if(p(xs(i))) return i + i -= 1 + } + -1 + } + + /** Finds the first element of the array satisfying a predicate, if any. + * + * @param p the predicate used to test elements. + * @return an option value containing the first element in the array + * that satisfies `p`, or `None` if none exists. + */ + def find(@deprecatedName("f", "2.13.3") p: A => Boolean): Option[A] = { + val idx = indexWhere(p) + if(idx == -1) None else Some(xs(idx)) + } + + /** Tests whether a predicate holds for at least one element of this array. + * + * @param p the predicate used to test elements. + * @return `true` if the given predicate `p` is satisfied by at least one element of this array, otherwise `false` + */ + def exists(@deprecatedName("f", "2.13.3") p: A => Boolean): Boolean = indexWhere(p) >= 0 + + /** Tests whether a predicate holds for all elements of this array. + * + * @param p the predicate used to test elements. + * @return `true` if this array is empty or the given predicate `p` + * holds for all elements of this array, otherwise `false`. + */ + def forall(@deprecatedName("f", "2.13.3") p: A => Boolean): Boolean = { + var i = 0 + while(i < xs.length) { + if(!p(xs(i))) return false + i += 1 + } + true + } + + /** Applies a binary operator to a start value and all elements of this array, + * going left to right. + * + * @param z the start value. + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return the result of inserting `op` between consecutive elements of this array, + * going left to right with the start value `z` on the left: + * {{{ + * op(...op(z, x_1), x_2, ..., x_n) + * }}} + * where `x,,1,,, ..., x,,n,,` are the elements of this array. + * Returns `z` if this array is empty. + */ + def foldLeft[B](z: B)(op: (B, A) => B): B = { + def f[@specialized(Specializable.Everything) T](xs: Array[T], op: (Any, Any) => Any, z: Any): Any = { + val length = xs.length + var v: Any = z + var i = 0 + while(i < length) { + v = op(v, xs(i)) + i += 1 + } + v + } + ((xs: Any @unchecked) match { + case null => throw new NullPointerException // null-check first helps static analysis of instanceOf + case xs: Array[AnyRef] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Int] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Double] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Long] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Float] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Char] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Byte] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Short] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Boolean] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Unit] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + }).asInstanceOf[B] + } + + /** Produces an array containing cumulative results of applying the binary + * operator going left to right. + * + * @param z the start value. + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return array with intermediate values. + * + * Example: + * {{{ + * Array(1, 2, 3, 4).scanLeft(0)(_ + _) == Array(0, 1, 3, 6, 10) + * }}} + * + */ + def scanLeft[ B : ClassTag ](z: B)(op: (B, A) => B): Array[B] = { + var v = z + var i = 0 + val res = new Array[B](xs.length + 1) + while(i < xs.length) { + res(i) = v + v = op(v, xs(i)) + i += 1 + } + res(i) = v + res + } + + /** Computes a prefix scan of the elements of the array. + * + * Note: The neutral element `z` may be applied more than once. + * + * @tparam B element type of the resulting array + * @param z neutral element for the operator `op` + * @param op the associative operator for the scan + * + * @return a new array containing the prefix scan of the elements in this array + */ + def scan[B >: A : ClassTag](z: B)(op: (B, B) => B): Array[B] = scanLeft(z)(op) + + /** Produces an array containing cumulative results of applying the binary + * operator going right to left. + * + * @param z the start value. + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return array with intermediate values. + * + * Example: + * {{{ + * Array(4, 3, 2, 1).scanRight(0)(_ + _) == Array(10, 6, 3, 1, 0) + * }}} + * + */ + def scanRight[ B : ClassTag ](z: B)(op: (A, B) => B): Array[B] = { + var v = z + var i = xs.length - 1 + val res = new Array[B](xs.length + 1) + res(xs.length) = z + while(i >= 0) { + v = op(xs(i), v) + res(i) = v + i -= 1 + } + res + } + + /** Applies a binary operator to all elements of this array and a start value, + * going right to left. + * + * @param z the start value. + * @param op the binary operator. + * @tparam B the result type of the binary operator. + * @return the result of inserting `op` between consecutive elements of this array, + * going right to left with the start value `z` on the right: + * {{{ + * op(x_1, op(x_2, ... op(x_n, z)...)) + * }}} + * where `x,,1,,, ..., x,,n,,` are the elements of this array. + * Returns `z` if this array is empty. + */ + def foldRight[B](z: B)(op: (A, B) => B): B = { + def f[@specialized(Specializable.Everything) T](xs: Array[T], op: (Any, Any) => Any, z: Any): Any = { + var v = z + var i = xs.length - 1 + while(i >= 0) { + v = op(xs(i), v) + i -= 1 + } + v + } + ((xs: Any @unchecked) match { + case null => throw new NullPointerException + case xs: Array[AnyRef] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Int] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Double] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Long] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Float] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Char] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Byte] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Short] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Boolean] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + case xs: Array[Unit] => f(xs, op.asInstanceOf[(Any, Any) => Any], z) + }).asInstanceOf[B] + + } + + /** Folds the elements of this array using the specified associative binary operator. + * + * @tparam A1 a type parameter for the binary operator, a supertype of `A`. + * @param z a neutral element for the fold operation; may be added to the result + * an arbitrary number of times, and must not change the result (e.g., `Nil` for list concatenation, + * 0 for addition, or 1 for multiplication). + * @param op a binary operator that must be associative. + * @return the result of applying the fold operator `op` between all the elements, or `z` if this array is empty. + */ + def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = foldLeft(z)(op) + + /** Builds a new array by applying a function to all elements of this array. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given function + * `f` to each element of this array and collecting the results. + */ + def map[B](f: A => B)(implicit ct: ClassTag[B]): Array[B] = { + val len = xs.length + val ys = new Array[B](len) + if(len > 0) { + var i = 0 + (xs: Any @unchecked) match { + case xs: Array[AnyRef] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Int] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Double] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Long] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Float] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Char] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Byte] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Short] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Boolean] => while (i < len) { ys(i) = f(xs(i).asInstanceOf[A]); i = i+1 } + } + } + ys + } + + def mapInPlace(f: A => A): Array[A] = { + var i = 0 + while (i < xs.length) { + xs.update(i, f(xs(i))) + i = i + 1 + } + xs + } + + /** Builds a new array by applying a function to all elements of this array + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given collection-valued function + * `f` to each element of this array and concatenating the results. + */ + def flatMap[B : ClassTag](f: A => IterableOnce[B]): Array[B] = { + val b = ArrayBuilder.make[B] + var i = 0 + while(i < xs.length) { + b ++= f(xs(i)) + i += 1 + } + b.result() + } + + def flatMap[BS, B](f: A => BS)(implicit asIterable: BS => Iterable[B], m: ClassTag[B]): Array[B] = + flatMap[B](x => asIterable(f(x))) + + /** Flattens a two-dimensional array by concatenating all its rows + * into a single array. + * + * @tparam B Type of row elements. + * @param asIterable A function that converts elements of this array to rows - Iterables of type `B`. + * @return An array obtained by concatenating rows of this array. + */ + def flatten[B](implicit asIterable: A => IterableOnce[B], m: ClassTag[B]): Array[B] = { + val b = ArrayBuilder.make[B] + val len = xs.length + var size = 0 + var i = 0 + while(i < len) { + xs(i) match { + case it: IterableOnce[_] => + val k = it.knownSize + if(k > 0) size += k + case a: Array[_] => size += a.length + case _ => + } + i += 1 + } + if(size > 0) b.sizeHint(size) + i = 0 + while(i < len) { + b ++= asIterable(xs(i)) + i += 1 + } + b.result() + } + + /** Builds a new array by applying a partial function to all elements of this array + * on which the function is defined. + * + * @param pf the partial function which filters and maps the array. + * @tparam B the element type of the returned array. + * @return a new array resulting from applying the given partial function + * `pf` to each element on which it is defined and collecting the results. + * The order of the elements is preserved. + */ + def collect[B: ClassTag](pf: PartialFunction[A, B]): Array[B] = { + val fallback: Any => Any = ArrayOps.fallback + val b = ArrayBuilder.make[B] + var i = 0 + while (i < xs.length) { + val v = pf.applyOrElse(xs(i), fallback) + if (v.asInstanceOf[AnyRef] ne fallback) b.addOne(v.asInstanceOf[B]) + i += 1 + } + b.result() + } + + /** Finds the first element of the array for which the given partial function is defined, and applies the + * partial function to it. */ + def collectFirst[B](@deprecatedName("f","2.13.9") pf: PartialFunction[A, B]): Option[B] = { + val fallback: Any => Any = ArrayOps.fallback + var i = 0 + while (i < xs.length) { + val v = pf.applyOrElse(xs(i), fallback) + if (v.asInstanceOf[AnyRef] ne fallback) return Some(v.asInstanceOf[B]) + i += 1 + } + None + } + + /** Returns an array formed from this array and another iterable collection + * by combining corresponding elements in pairs. + * If one of the two collections is longer than the other, its remaining elements are ignored. + * + * @param that The iterable providing the second half of each result pair + * @tparam B the type of the second half of the returned pairs + * @return a new array containing pairs consisting of corresponding elements of this array and `that`. + * The length of the returned array is the minimum of the lengths of this array and `that`. + */ + def zip[B](that: IterableOnce[B]): Array[(A, B)] = { + val b = new ArrayBuilder.ofRef[(A, B)]() + val k = that.knownSize + b.sizeHint(if(k >= 0) min(k, xs.length) else xs.length) + var i = 0 + val it = that.iterator + while(i < xs.length && it.hasNext) { + b += ((xs(i), it.next())) + i += 1 + } + b.result() + } + + /** Analogous to `zip` except that the elements in each collection are not consumed until a strict operation is + * invoked on the returned `LazyZip2` decorator. + * + * Calls to `lazyZip` can be chained to support higher arities (up to 4) without incurring the expense of + * constructing and deconstructing intermediary tuples. + * + * {{{ + * val xs = List(1, 2, 3) + * val res = (xs lazyZip xs lazyZip xs lazyZip xs).map((a, b, c, d) => a + b + c + d) + * // res == List(4, 8, 12) + * }}} + * + * @param that the iterable providing the second element of each eventual pair + * @tparam B the type of the second element in each eventual pair + * @return a decorator `LazyZip2` that allows strict operations to be performed on the lazily evaluated pairs + * or chained calls to `lazyZip`. Implicit conversion to `Iterable[(A, B)]` is also supported. + */ + def lazyZip[B](that: Iterable[B]): LazyZip2[A, B, Array[A]] = new LazyZip2(xs, immutable.ArraySeq.unsafeWrapArray(xs), that) + + /** Returns an array formed from this array and another iterable collection + * by combining corresponding elements in pairs. + * If one of the two collections is shorter than the other, + * placeholder elements are used to extend the shorter collection to the length of the longer. + * + * @param that the iterable providing the second half of each result pair + * @param thisElem the element to be used to fill up the result if this array is shorter than `that`. + * @param thatElem the element to be used to fill up the result if `that` is shorter than this array. + * @return a new array containing pairs consisting of corresponding elements of this array and `that`. + * The length of the returned array is the maximum of the lengths of this array and `that`. + * If this array is shorter than `that`, `thisElem` values are used to pad the result. + * If `that` is shorter than this array, `thatElem` values are used to pad the result. + */ + def zipAll[A1 >: A, B](that: Iterable[B], thisElem: A1, thatElem: B): Array[(A1, B)] = { + val b = new ArrayBuilder.ofRef[(A1, B)]() + val k = that.knownSize + b.sizeHint(max(k, xs.length)) + var i = 0 + val it = that.iterator + while(i < xs.length && it.hasNext) { + b += ((xs(i), it.next())) + i += 1 + } + while(it.hasNext) { + b += ((thisElem, it.next())) + i += 1 + } + while(i < xs.length) { + b += ((xs(i), thatElem)) + i += 1 + } + b.result() + } + + /** Zips this array with its indices. + * + * @return A new array containing pairs consisting of all elements of this array paired with their index. + * Indices start at `0`. + */ + def zipWithIndex: Array[(A, Int)] = { + val b = new Array[(A, Int)](xs.length) + var i = 0 + while(i < xs.length) { + b(i) = ((xs(i), i)) + i += 1 + } + b + } + + /** A copy of this array with an element appended. */ + def appended[B >: A : ClassTag](x: B): Array[B] = { + val dest = Array.copyAs[B](xs, xs.length+1) + dest(xs.length) = x + dest + } + + @`inline` final def :+ [B >: A : ClassTag](x: B): Array[B] = appended(x) + + /** A copy of this array with an element prepended. */ + def prepended[B >: A : ClassTag](x: B): Array[B] = { + val dest = new Array[B](xs.length + 1) + dest(0) = x + Array.copy(xs, 0, dest, 1, xs.length) + dest + } + + @`inline` final def +: [B >: A : ClassTag](x: B): Array[B] = prepended(x) + + /** A copy of this array with all elements of a collection prepended. */ + def prependedAll[B >: A : ClassTag](prefix: IterableOnce[B]): Array[B] = { + val b = ArrayBuilder.make[B] + val k = prefix.knownSize + if(k >= 0) b.sizeHint(k + xs.length) + b.addAll(prefix) + if(k < 0) b.sizeHint(b.length + xs.length) + b.addAll(xs) + b.result() + } + + /** A copy of this array with all elements of an array prepended. */ + def prependedAll[B >: A : ClassTag](prefix: Array[_ <: B]): Array[B] = { + val dest = Array.copyAs[B](prefix, prefix.length+xs.length) + Array.copy(xs, 0, dest, prefix.length, xs.length) + dest + } + + @`inline` final def ++: [B >: A : ClassTag](prefix: IterableOnce[B]): Array[B] = prependedAll(prefix) + + @`inline` final def ++: [B >: A : ClassTag](prefix: Array[_ <: B]): Array[B] = prependedAll(prefix) + + /** A copy of this array with all elements of a collection appended. */ + def appendedAll[B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = { + val b = ArrayBuilder.make[B] + val k = suffix.knownSize + if(k >= 0) b.sizeHint(k + xs.length) + b.addAll(xs) + b.addAll(suffix) + b.result() + } + + /** A copy of this array with all elements of an array appended. */ + def appendedAll[B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = { + val dest = Array.copyAs[B](xs, xs.length+suffix.length) + Array.copy(suffix, 0, dest, xs.length, suffix.length) + dest + } + + @`inline` final def :++ [B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = appendedAll(suffix) + + @`inline` final def :++ [B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = appendedAll(suffix) + + @`inline` final def concat[B >: A : ClassTag](suffix: IterableOnce[B]): Array[B] = appendedAll(suffix) + + @`inline` final def concat[B >: A : ClassTag](suffix: Array[_ <: B]): Array[B] = appendedAll(suffix) + + @`inline` final def ++[B >: A : ClassTag](xs: IterableOnce[B]): Array[B] = appendedAll(xs) + + @`inline` final def ++[B >: A : ClassTag](xs: Array[_ <: B]): Array[B] = appendedAll(xs) + + /** Tests whether this array contains a given value as an element. + * + * @param elem the element to test. + * @return `true` if this array has an element that is equal (as + * determined by `==`) to `elem`, `false` otherwise. + */ + def contains(elem: A): Boolean = exists (_ == elem) + + /** Returns a copy of this array with patched values. + * Patching at negative indices is the same as patching starting at 0. + * Patching at indices at or larger than the length of the original array appends the patch to the end. + * If more values are replaced than actually exist, the excess is ignored. + * + * @param from The start index from which to patch + * @param other The patch values + * @param replaced The number of values in the original array that are replaced by the patch. + */ + def patch[B >: A : ClassTag](from: Int, other: IterableOnce[B], replaced: Int): Array[B] = { + val b = ArrayBuilder.make[B] + val k = other.knownSize + val r = if(replaced < 0) 0 else replaced + if(k >= 0) b.sizeHint(xs.length + k - r) + val chunk1 = if(from > 0) min(from, xs.length) else 0 + if(chunk1 > 0) b.addAll(xs, 0, chunk1) + b ++= other + val remaining = xs.length - chunk1 - r + if(remaining > 0) b.addAll(xs, xs.length - remaining, remaining) + b.result() + } + + /** Converts an array of pairs into an array of first elements and an array of second elements. + * + * @tparam A1 the type of the first half of the element pairs + * @tparam A2 the type of the second half of the element pairs + * @param asPair an implicit conversion which asserts that the element type + * of this Array is a pair. + * @param ct1 a class tag for `A1` type parameter that is required to create an instance + * of `Array[A1]` + * @param ct2 a class tag for `A2` type parameter that is required to create an instance + * of `Array[A2]` + * @return a pair of Arrays, containing, respectively, the first and second half + * of each element pair of this Array. + */ + def unzip[A1, A2](implicit asPair: A => (A1, A2), ct1: ClassTag[A1], ct2: ClassTag[A2]): (Array[A1], Array[A2]) = { + val a1 = new Array[A1](xs.length) + val a2 = new Array[A2](xs.length) + var i = 0 + while (i < xs.length) { + val e = asPair(xs(i)) + a1(i) = e._1 + a2(i) = e._2 + i += 1 + } + (a1, a2) + } + + /** Converts an array of triples into three arrays, one containing the elements from each position of the triple. + * + * @tparam A1 the type of the first of three elements in the triple + * @tparam A2 the type of the second of three elements in the triple + * @tparam A3 the type of the third of three elements in the triple + * @param asTriple an implicit conversion which asserts that the element type + * of this Array is a triple. + * @param ct1 a class tag for T1 type parameter that is required to create an instance + * of Array[T1] + * @param ct2 a class tag for T2 type parameter that is required to create an instance + * of Array[T2] + * @param ct3 a class tag for T3 type parameter that is required to create an instance + * of Array[T3] + * @return a triple of Arrays, containing, respectively, the first, second, and third + * elements from each element triple of this Array. + */ + def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3), ct1: ClassTag[A1], ct2: ClassTag[A2], + ct3: ClassTag[A3]): (Array[A1], Array[A2], Array[A3]) = { + val a1 = new Array[A1](xs.length) + val a2 = new Array[A2](xs.length) + val a3 = new Array[A3](xs.length) + var i = 0 + while (i < xs.length) { + val e = asTriple(xs(i)) + a1(i) = e._1 + a2(i) = e._2 + a3(i) = e._3 + i += 1 + } + (a1, a2, a3) + } + + /** Transposes a two dimensional array. + * + * @tparam B Type of row elements. + * @param asArray A function that converts elements of this array to rows - arrays of type `B`. + * @return An array obtained by replacing elements of this arrays with rows the represent. + */ + def transpose[B](implicit asArray: A => Array[B]): Array[Array[B]] = { + val aClass = xs.getClass.getComponentType + val bb = new ArrayBuilder.ofRef[Array[B]]()(ClassTag[Array[B]](aClass)) + if (xs.length == 0) bb.result() + else { + def mkRowBuilder() = ArrayBuilder.make[B](using ClassTag[B](aClass.getComponentType)) + val bs = new ArrayOps(asArray(xs(0))).map((x: B) => mkRowBuilder()) + for (xs <- this) { + var i = 0 + for (x <- new ArrayOps(asArray(xs))) { + bs(i) += x + i += 1 + } + } + for (b <- new ArrayOps(bs)) bb += b.result() + bb.result() + } + } + + /** Apply `f` to each element for its side effects. + * Note: [U] parameter needed to help scalac's type inference. + */ + def foreach[U](f: A => U): Unit = { + val len = xs.length + var i = 0 + (xs: Any @unchecked) match { + case xs: Array[AnyRef] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Int] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Double] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Long] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Float] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Char] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Byte] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Short] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + case xs: Array[Boolean] => while (i < len) { f(xs(i).asInstanceOf[A]); i = i+1 } + } + } + + /** Selects all the elements of this array ignoring the duplicates. + * + * @return a new array consisting of all the elements of this array without duplicates. + */ + def distinct: Array[A] = distinctBy(identity) + + /** Selects all the elements of this array ignoring the duplicates as determined by `==` after applying + * the transforming function `f`. + * + * @param f The transforming function whose result is used to determine the uniqueness of each element + * @tparam B the type of the elements after being transformed by `f` + * @return a new array consisting of all the elements of this array without duplicates. + */ + def distinctBy[B](f: A => B): Array[A] = + ArrayBuilder.make[A].addAll(iterator.distinctBy(f)).result() + + /** A copy of this array with an element value appended until a given target length is reached. + * + * @param len the target length + * @param elem the padding value + * @tparam B the element type of the returned array. + * @return a new array consisting of + * all elements of this array followed by the minimal number of occurrences of `elem` so + * that the resulting collection has a length of at least `len`. + */ + def padTo[B >: A : ClassTag](len: Int, elem: B): Array[B] = { + var i = xs.length + val newlen = max(i, len) + val dest = Array.copyAs[B](xs, newlen) + while(i < newlen) { + dest(i) = elem + i += 1 + } + dest + } + + /** Produces the range of all indices of this sequence. + * + * @return a `Range` value from `0` to one less than the length of this array. + */ + def indices: Range = Range(0, xs.length) + + /** Partitions this array into a map of arrays according to some discriminator function. + * + * @param f the discriminator function. + * @tparam K the type of keys returned by the discriminator function. + * @return A map from keys to arrays such that the following invariant holds: + * {{{ + * (xs groupBy f)(k) = xs filter (x => f(x) == k) + * }}} + * That is, every key `k` is bound to an array of those elements `x` + * for which `f(x)` equals `k`. + */ + def groupBy[K](f: A => K): immutable.Map[K, Array[A]] = { + val m = mutable.Map.empty[K, ArrayBuilder[A]] + val len = xs.length + var i = 0 + while(i < len) { + val elem = xs(i) + val key = f(elem) + val bldr = m.getOrElseUpdate(key, ArrayBuilder.make[A]) + bldr += elem + i += 1 + } + m.view.mapValues(_.result()).toMap + } + + /** + * Partitions this array into a map of arrays according to a discriminator function `key`. + * Each element in a group is transformed into a value of type `B` using the `value` function. + * + * It is equivalent to `groupBy(key).mapValues(_.map(f))`, but more efficient. + * + * {{{ + * case class User(name: String, age: Int) + * + * def namesByAge(users: Array[User]): Map[Int, Array[String]] = + * users.groupMap(_.age)(_.name) + * }}} + * + * @param key the discriminator function + * @param f the element transformation function + * @tparam K the type of keys returned by the discriminator function + * @tparam B the type of values returned by the transformation function + */ + def groupMap[K, B : ClassTag](key: A => K)(f: A => B): immutable.Map[K, Array[B]] = { + val m = mutable.Map.empty[K, ArrayBuilder[B]] + val len = xs.length + var i = 0 + while(i < len) { + val elem = xs(i) + val k = key(elem) + val bldr = m.getOrElseUpdate(k, ArrayBuilder.make[B]) + bldr += f(elem) + i += 1 + } + m.view.mapValues(_.result()).toMap + } + + @`inline` final def toSeq: immutable.Seq[A] = toIndexedSeq + + def toIndexedSeq: immutable.IndexedSeq[A] = + immutable.ArraySeq.unsafeWrapArray(Array.copyOf(xs, xs.length)) + + /** Copy elements of this array to another array. + * Fills the given array `xs` starting at index 0. + * Copying will stop once either all the elements of this array have been copied, + * or the end of the array is reached. + * + * @param xs the array to fill. + * @tparam B the type of the elements of the array. + */ + def copyToArray[B >: A](xs: Array[B]): Int = copyToArray(xs, 0) + + /** Copy elements of this array to another array. + * Fills the given array `xs` starting at index `start`. + * Copying will stop once either all the elements of this array have been copied, + * or the end of the array is reached. + * + * @param xs the array to fill. + * @param start the starting index within the destination array. + * @tparam B the type of the elements of the array. + */ + def copyToArray[B >: A](xs: Array[B], start: Int): Int = copyToArray(xs, start, Int.MaxValue) + + /** Copy elements of this array to another array. + * Fills the given array `xs` starting at index `start` with at most `len` values. + * Copying will stop once either all the elements of this array have been copied, + * or the end of the array is reached, or `len` elements have been copied. + * + * @param xs the array to fill. + * @param start the starting index within the destination array. + * @param len the maximal number of elements to copy. + * @tparam B the type of the elements of the array. + */ + def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(this.xs.length, xs.length, start, len) + if (copied > 0) { + Array.copy(this.xs, 0, xs, start, copied) + } + copied + } + + /** Create a copy of this array with the specified element type. */ + def toArray[B >: A: ClassTag]: Array[B] = { + val destination = new Array[B](xs.length) + @annotation.unused val copied = copyToArray(destination, 0) + //assert(copied == xs.length) + destination + } + + /** Counts the number of elements in this array which satisfy a predicate */ + def count(p: A => Boolean): Int = { + var i, res = 0 + val len = xs.length + while(i < len) { + if(p(xs(i))) res += 1 + i += 1 + } + res + } + + // can't use a default arg because we already have another overload with a default arg + /** Tests whether this array starts with the given array. */ + @`inline` def startsWith[B >: A](that: Array[B]): Boolean = startsWith(that, 0) + + /** Tests whether this array contains the given array at a given index. + * + * @param that the array to test + * @param offset the index where the array is searched. + * @return `true` if the array `that` is contained in this array at + * index `offset`, otherwise `false`. + */ + def startsWith[B >: A](that: Array[B], offset: Int): Boolean = { + val safeOffset = offset.max(0) + val thatl = that.length + if(thatl > xs.length-safeOffset) thatl == 0 + else { + var i = 0 + while(i < thatl) { + if(xs(i+safeOffset) != that(i)) return false + i += 1 + } + true + } + } + + /** Tests whether this array ends with the given array. + * + * @param that the array to test + * @return `true` if this array has `that` as a suffix, `false` otherwise. + */ + def endsWith[B >: A](that: Array[B]): Boolean = { + val thatl = that.length + val off = xs.length - thatl + if(off < 0) false + else { + var i = 0 + while(i < thatl) { + if(xs(i+off) != that(i)) return false + i += 1 + } + true + } + } + + /** A copy of this array with one single replaced element. + * @param index the position of the replacement + * @param elem the replacing element + * @return a new array which is a copy of this array with the element at position `index` replaced by `elem`. + * @throws IndexOutOfBoundsException if `index` does not satisfy `0 <= index < length`. + */ + def updated[B >: A : ClassTag](index: Int, elem: B): Array[B] = { + if(index < 0 || index >= xs.length) throw new IndexOutOfBoundsException(s"$index is out of bounds (min 0, max ${xs.length-1})") + val dest = toArray[B] + dest(index) = elem + dest + } + + @`inline` def view: IndexedSeqView[A] = new ArrayOps.ArrayView[A](xs) + + + /* ************************************************************************************************************ + The remaining methods are provided for completeness but they delegate to mutable.ArraySeq implementations which + may not provide the best possible performance. We need them in `ArrayOps` because their return type + mentions `C` (which is `Array[A]` in `StringOps` and `mutable.ArraySeq[A]` in `mutable.ArraySeq`). + ************************************************************************************************************ */ + + + /** Computes the multiset difference between this array and another sequence. + * + * @param that the sequence of elements to remove + * @return a new array which contains all elements of this array + * except some of occurrences of elements that also appear in `that`. + * If an element value `x` appears + * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form + * part of the result, but any following occurrences will. + */ + def diff[B >: A](that: Seq[B]): Array[A] = mutable.ArraySeq.make(xs).diff(that).toArray[A] + + /** Computes the multiset intersection between this array and another sequence. + * + * @param that the sequence of elements to intersect with. + * @return a new array which contains all elements of this array + * which also appear in `that`. + * If an element value `x` appears + * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained + * in the result, but any following occurrences will be omitted. + */ + def intersect[B >: A](that: Seq[B]): Array[A] = mutable.ArraySeq.make(xs).intersect(that).toArray[A] + + /** Groups elements in fixed size blocks by passing a "sliding window" + * over them (as opposed to partitioning them, as is done in grouped.) + * @see [[scala.collection.Iterator]], method `sliding` + * + * @param size the number of elements per group + * @param step the distance between the first elements of successive groups + * @return An iterator producing arrays of size `size`, except the + * last element (which may be the only element) will be truncated + * if there are fewer than `size` elements remaining to be grouped. + */ + def sliding(size: Int, step: Int = 1): Iterator[Array[A]] = mutable.ArraySeq.make(xs).sliding(size, step).map(_.toArray[A]) + + /** Iterates over combinations of elements. + * + * A '''combination''' of length `n` is a sequence of `n` elements selected in order of their first index in this sequence. + * + * For example, `"xyx"` has two combinations of length 2. The `x` is selected first: `"xx"`, `"xy"`. + * The sequence `"yx"` is not returned as a combination because it is subsumed by `"xy"`. + * + * If there is more than one way to generate the same combination, only one will be returned. + * + * For example, the result `"xy"` arbitrarily selected one of the `x` elements. + * + * As a further illustration, `"xyxx"` has three different ways to generate `"xy"` because there are three elements `x` + * to choose from. Moreover, there are three unordered pairs `"xx"` but only one is returned. + * + * It is not specified which of these equal combinations is returned. It is an implementation detail + * that should not be relied on. For example, the combination `"xx"` does not necessarily contain + * the first `x` in this sequence. This behavior is observable if the elements compare equal + * but are not identical. + * + * As a consequence, `"xyx".combinations(3).next()` is `"xxy"`: the combination does not reflect the order + * of the original sequence, but the order in which elements were selected, by "first index"; + * the order of each `x` element is also arbitrary. + * + * @return An Iterator which traverses the n-element combinations of this array + * @example {{{ + * Array('a', 'b', 'b', 'b', 'c').combinations(2).map(runtime.ScalaRunTime.stringOf).foreach(println) + * // Array(a, b) + * // Array(a, c) + * // Array(b, b) + * // Array(b, c) + * Array('b', 'a', 'b').combinations(2).map(runtime.ScalaRunTime.stringOf).foreach(println) + * // Array(b, b) + * // Array(b, a) + * }}} + */ + def combinations(n: Int): Iterator[Array[A]] = mutable.ArraySeq.make(xs).combinations(n).map(_.toArray[A]) + + /** Iterates over distinct permutations of elements. + * + * @return An Iterator which traverses the distinct permutations of this array. + * @example {{{ + * Array('a', 'b', 'b').permutations.map(runtime.ScalaRunTime.stringOf).foreach(println) + * // Array(a, b, b) + * // Array(b, a, b) + * // Array(b, b, a) + * }}} + */ + def permutations: Iterator[Array[A]] = mutable.ArraySeq.make(xs).permutations.map(_.toArray[A]) + + // we have another overload here, so we need to duplicate this method + /** Tests whether this array contains the given sequence at a given index. + * + * @param that the sequence to test + * @param offset the index where the sequence is searched. + * @return `true` if the sequence `that` is contained in this array at + * index `offset`, otherwise `false`. + */ + def startsWith[B >: A](that: IterableOnce[B], offset: Int = 0): Boolean = mutable.ArraySeq.make(xs).startsWith(that, offset) + + // we have another overload here, so we need to duplicate this method + /** Tests whether this array ends with the given sequence. + * + * @param that the sequence to test + * @return `true` if this array has `that` as a suffix, `false` otherwise. + */ + def endsWith[B >: A](that: Iterable[B]): Boolean = mutable.ArraySeq.make(xs).endsWith(that) +} diff --git a/scala2-library-bootstrapped/src/scala/collection/Factory.scala b/scala2-library-bootstrapped/src/scala/collection/Factory.scala new file mode 100644 index 000000000000..6006f292bb19 --- /dev/null +++ b/scala2-library-bootstrapped/src/scala/collection/Factory.scala @@ -0,0 +1,784 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.collection.immutable.NumericRange +import scala.language.implicitConversions +import scala.collection.mutable.Builder +import scala.annotation.unchecked.uncheckedVariance +import scala.reflect.ClassTag + +/** + * A factory that builds a collection of type `C` with elements of type `A`. + * + * This is a general form of any factory ([[IterableFactory]], + * [[SortedIterableFactory]], [[MapFactory]] and [[SortedMapFactory]]) whose + * element type is fixed. + * + * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) + * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) + */ +trait Factory[-A, +C] extends Any { + + /** + * @return A collection of type `C` containing the same elements + * as the source collection `it`. + * @param it Source collection + */ + def fromSpecific(it: IterableOnce[A]): C + + /** Get a Builder for the collection. For non-strict collection types this will use an intermediate buffer. + * Building collections with `fromSpecific` is preferred because it can be lazy for lazy collections. */ + def newBuilder: Builder[A, C] +} + +object Factory { + + implicit val stringFactory: Factory[Char, String] = new StringFactory + @SerialVersionUID(3L) + private class StringFactory extends Factory[Char, String] with Serializable { + def fromSpecific(it: IterableOnce[Char]): String = { + val b = new mutable.StringBuilder(scala.math.max(0, it.knownSize)) + b ++= it + b.result() + } + def newBuilder: Builder[Char, String] = new mutable.StringBuilder() + } + + implicit def arrayFactory[A: ClassTag]: Factory[A, Array[A]] = new ArrayFactory[A] + @SerialVersionUID(3L) + private class ArrayFactory[A: ClassTag] extends Factory[A, Array[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]): Array[A] = { + val b = newBuilder + b.sizeHint(scala.math.max(0, it.knownSize)) + b ++= it + b.result() + } + def newBuilder: Builder[A, Array[A]] = mutable.ArrayBuilder.make[A] + } + +} + +/** Base trait for companion objects of unconstrained collection types that may require + * multiple traversals of a source collection to build a target collection `CC`. + * + * @tparam CC Collection type constructor (e.g. `List`) + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait IterableFactory[+CC[_]] extends Serializable { + + /** Creates a target $coll from an existing source collection + * + * @param source Source collection + * @tparam A the type of the collection’s elements + * @return a new $coll with the elements of `source` + */ + def from[A](source: IterableOnce[A]): CC[A] + + /** An empty collection + * @tparam A the type of the ${coll}'s elements + */ + def empty[A]: CC[A] + + /** Creates a $coll with the specified elements. + * @tparam A the type of the ${coll}'s elements + * @param elems the elements of the created $coll + * @return a new $coll with elements `elems` + */ + def apply[A](elems: A*): CC[A] = from(elems) + + /** Produces a $coll containing repeated applications of a function to a start value. + * + * @param start the start value of the $coll + * @param len the number of elements contained in the $coll + * @param f the function that's repeatedly applied + * @return a $coll with `len` values in the sequence `start, f(start), f(f(start)), ...` + */ + def iterate[A](start: A, len: Int)(f: A => A): CC[A] = from(new View.Iterate(start, len)(f)) + + /** Produces a $coll that uses a function `f` to produce elements of type `A` + * and update an internal state of type `S`. + * + * @param init State initial value + * @param f Computes the next element (or returns `None` to signal + * the end of the collection) + * @tparam A Type of the elements + * @tparam S Type of the internal state + * @return a $coll that produces elements using `f` until `f` returns `None` + */ + def unfold[A, S](init: S)(f: S => Option[(A, S)]): CC[A] = from(new View.Unfold(init)(f)) + + /** Produces a $coll containing a sequence of increasing of integers. + * + * @param start the first element of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @return a $coll with values `start, start + 1, ..., end - 1` + */ + def range[A : Integral](start: A, end: A): CC[A] = from(NumericRange(start, end, implicitly[Integral[A]].one)) + + /** Produces a $coll containing equally spaced values in some integer interval. + * @param start the start value of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @param step the difference between successive elements of the $coll (must be positive or negative) + * @return a $coll with values `start, start + step, ...` up to, but excluding `end` + */ + def range[A : Integral](start: A, end: A, step: A): CC[A] = from(NumericRange(start, end, step)) + + /** + * @return A builder for $Coll objects. + * @tparam A the type of the ${coll}’s elements + */ + def newBuilder[A]: Builder[A, CC[A]] + + /** Produces a $coll containing the results of some element computation a number of times. + * @param n the number of elements contained in the $coll. + * @param elem the element computation + * @return A $coll that contains the results of `n` evaluations of `elem`. + */ + def fill[A](n: Int)(elem: => A): CC[A] = from(new View.Fill(n)(elem)) + + /** Produces a two-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int)(elem: => A): CC[CC[A] @uncheckedVariance] = fill(n1)(fill(n2)(elem)) + + /** Produces a three-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]] @uncheckedVariance] = fill(n1)(fill(n2, n3)(elem)) + + /** Produces a four-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = + fill(n1)(fill(n2, n3, n4)(elem)) + + /** Produces a five-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4 x n5` evaluations of `elem`. + */ + def fill[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = + fill(n1)(fill(n2, n3, n4, n5)(elem)) + + /** Produces a $coll containing values of a given function over a range of integer values starting from 0. + * @param n The number of elements in the $coll + * @param f The function computing element values + * @return A $coll consisting of elements `f(0), ..., f(n -1)` + */ + def tabulate[A](n: Int)(f: Int => A): CC[A] = from(new View.Tabulate(n)(f)) + + /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2)` + * for `0 <= i1 < n1` and `0 <= i2 < n2`. + */ + def tabulate[A](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) + + /** Produces a three-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, and `0 <= i3 < n3`. + */ + def tabulate[A](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) + + /** Produces a four-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, and `0 <= i4 < n4`. + */ + def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) + + /** Produces a five-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4, i5)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, `0 <= i4 < n4`, and `0 <= i5 < n5`. + */ + def tabulate[A](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) + + /** Concatenates all argument collections into a single $coll. + * + * @param xss the collections that are to be concatenated. + * @return the concatenation of all the collections. + */ + def concat[A](xss: Iterable[A]*): CC[A] = { + from(xss.foldLeft(View.empty[A])(_ ++ _)) + } + + implicit def iterableFactory[A]: Factory[A, CC[A]] = IterableFactory.toFactory(this) +} + +object IterableFactory { + + /** + * Fixes the element type of `factory` to `A` + * @param factory The factory to fix the element type + * @tparam A Type of elements + * @tparam CC Collection type constructor of the factory (e.g. `Seq`, `List`) + * @return A [[Factory]] that uses the given `factory` to build a collection of elements + * of type `A` + */ + implicit def toFactory[A, CC[_]](factory: IterableFactory[CC]): Factory[A, CC[A]] = new ToFactory[A, CC](factory) + + @SerialVersionUID(3L) + private[this] class ToFactory[A, CC[_]](factory: IterableFactory[CC]) extends Factory[A, CC[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]): CC[A] = factory.from[A](it) + def newBuilder: Builder[A, CC[A]] = factory.newBuilder[A] + } + + implicit def toBuildFrom[A, CC[_]](factory: IterableFactory[CC]): BuildFrom[Any, A, CC[A]] = + new BuildFrom[Any, A, CC[A]] { + def fromSpecific(from: Any)(it: IterableOnce[A]) = factory.from(it) + def newBuilder(from: Any) = factory.newBuilder + } + + @SerialVersionUID(3L) + class Delegate[CC[_]](delegate: IterableFactory[CC]) extends IterableFactory[CC] { + override def apply[A](elems: A*): CC[A] = delegate.apply(elems: _*) + def empty[A]: CC[A] = delegate.empty + def from[E](it: IterableOnce[E]): CC[E] = delegate.from(it) + def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder[A] + } +} + +/** + * @tparam CC Collection type constructor (e.g. `List`) + */ +trait SeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends IterableFactory[CC] { + import SeqFactory.UnapplySeqWrapper + final def unapplySeq[A](x: CC[A] @uncheckedVariance): UnapplySeqWrapper[A] = new UnapplySeqWrapper(x) // TODO is uncheckedVariance sound here? +} + +object SeqFactory { + @SerialVersionUID(3L) + class Delegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: SeqFactory[CC]) extends SeqFactory[CC] { + override def apply[A](elems: A*): CC[A] = delegate.apply(elems: _*) + def empty[A]: CC[A] = delegate.empty + def from[E](it: IterableOnce[E]): CC[E] = delegate.from(it) + def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder[A] + } + + final class UnapplySeqWrapper[A](private val c: SeqOps[A, Seq, Seq[A]]) extends AnyVal { + def isEmpty: false = false + def get: UnapplySeqWrapper[A] = this + def lengthCompare(len: Int): Int = c.lengthCompare(len) + def apply(i: Int): A = c(i) + def drop(n: Int): scala.Seq[A] = c match { + case seq: scala.Seq[A] => seq.drop(n) + case _ => c.view.drop(n).toSeq + } + def toSeq: scala.Seq[A] = c.toSeq + } +} + +trait StrictOptimizedSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends SeqFactory[CC] { + + override def fill[A](n: Int)(elem: => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += elem + i += 1 + } + b.result() + } + + override def tabulate[A](n: Int)(f: Int => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += f(i) + i += 1 + } + b.result() + } + + override def concat[A](xss: Iterable[A]*): CC[A] = { + val b = newBuilder[A] + val knownSizes = xss.view.map(_.knownSize) + if (knownSizes forall (_ >= 0)) { + b.sizeHint(knownSizes.sum) + } + for (xs <- xss) b ++= xs + b.result() + } + +} + +/** + * @tparam A Type of elements (e.g. `Int`, `Boolean`, etc.) + * @tparam C Type of collection (e.g. `List[Int]`, `TreeMap[Int, String]`, etc.) + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait SpecificIterableFactory[-A, +C] extends Factory[A, C] { + def empty: C + def apply(xs: A*): C = fromSpecific(xs) + def fill(n: Int)(elem: => A): C = fromSpecific(new View.Fill(n)(elem)) + def newBuilder: Builder[A, C] + + implicit def specificIterableFactory: Factory[A, C] = this +} + +/** + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait MapFactory[+CC[_, _]] extends Serializable { + + /** + * An empty Map + */ + def empty[K, V]: CC[K, V] + + /** + * A collection of type Map generated from given iterable object. + */ + def from[K, V](it: IterableOnce[(K, V)]): CC[K, V] + + /** + * A collection of type Map that contains given key/value bindings. + */ + def apply[K, V](elems: (K, V)*): CC[K, V] = from(elems) + + /** + * The default builder for Map objects. + */ + def newBuilder[K, V]: Builder[(K, V), CC[K, V]] + + /** + * The default Factory instance for maps. + */ + implicit def mapFactory[K, V]: Factory[(K, V), CC[K, V]] = MapFactory.toFactory(this) +} + +object MapFactory { + + /** + * Fixes the key and value types of `factory` to `K` and `V`, respectively + * @param factory The factory to fix the key and value types + * @tparam K Type of keys + * @tparam V Type of values + * @tparam CC Collection type constructor of the factory (e.g. `Map`, `HashMap`, etc.) + * @return A [[Factory]] that uses the given `factory` to build a map with keys of type `K` + * and values of type `V` + */ + implicit def toFactory[K, V, CC[_, _]](factory: MapFactory[CC]): Factory[(K, V), CC[K, V]] = new ToFactory[K, V, CC](factory) + + @SerialVersionUID(3L) + private[this] class ToFactory[K, V, CC[_, _]](factory: MapFactory[CC]) extends Factory[(K, V), CC[K, V]] with Serializable { + def fromSpecific(it: IterableOnce[(K, V)]): CC[K, V] = factory.from[K, V](it) + def newBuilder: Builder[(K, V), CC[K, V]] = factory.newBuilder[K, V] + } + + implicit def toBuildFrom[K, V, CC[_, _]](factory: MapFactory[CC]): BuildFrom[Any, (K, V), CC[K, V]] = + new BuildFrom[Any, (K, V), CC[K, V]] { + def fromSpecific(from: Any)(it: IterableOnce[(K, V)]) = factory.from(it) + def newBuilder(from: Any) = factory.newBuilder[K, V] + } + + @SerialVersionUID(3L) + class Delegate[C[_, _]](delegate: MapFactory[C]) extends MapFactory[C] { + override def apply[K, V](elems: (K, V)*): C[K, V] = delegate.apply(elems: _*) + def from[K, V](it: IterableOnce[(K, V)]): C[K, V] = delegate.from(it) + def empty[K, V]: C[K, V] = delegate.empty + def newBuilder[K, V]: Builder[(K, V), C[K, V]] = delegate.newBuilder + } +} + +/** Base trait for companion objects of collections that require an implicit evidence. + * @tparam CC Collection type constructor (e.g. `ArraySeq`) + * @tparam Ev Unary type constructor for the implicit evidence required for an element type + * (typically `Ordering` or `ClassTag`) + * + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait EvidenceIterableFactory[+CC[_], Ev[_]] extends Serializable { + + def from[E : Ev](it: IterableOnce[E]): CC[E] + + def empty[A : Ev]: CC[A] + + def apply[A : Ev](xs: A*): CC[A] = from(xs) + + /** Produces a $coll containing the results of some element computation a number of times. + * @param n the number of elements contained in the $coll. + * @param elem the element computation + * @return A $coll that contains the results of `n` evaluations of `elem`. + */ + def fill[A : Ev](n: Int)(elem: => A): CC[A] = from(new View.Fill(n)(elem)) + + /** Produces a $coll containing values of a given function over a range of integer values starting from 0. + * @param n The number of elements in the $coll + * @param f The function computing element values + * @return A $coll consisting of elements `f(0), ..., f(n -1)` + */ + def tabulate[A : Ev](n: Int)(f: Int => A): CC[A] = from(new View.Tabulate(n)(f)) + + /** Produces a $coll containing repeated applications of a function to a start value. + * + * @param start the start value of the $coll + * @param len the number of elements contained in the $coll + * @param f the function that's repeatedly applied + * @return a $coll with `len` values in the sequence `start, f(start), f(f(start)), ...` + */ + def iterate[A : Ev](start: A, len: Int)(f: A => A): CC[A] = from(new View.Iterate(start, len)(f)) + + /** Produces a $coll that uses a function `f` to produce elements of type `A` + * and update an internal state of type `S`. + * + * @param init State initial value + * @param f Computes the next element (or returns `None` to signal + * the end of the collection) + * @tparam A Type of the elements + * @tparam S Type of the internal state + * @return a $coll that produces elements using `f` until `f` returns `None` + */ + def unfold[A : Ev, S](init: S)(f: S => Option[(A, S)]): CC[A] = from(new View.Unfold(init)(f)) + + def newBuilder[A : Ev]: Builder[A, CC[A]] + + implicit def evidenceIterableFactory[A : Ev]: Factory[A, CC[A]] = EvidenceIterableFactory.toFactory(this) +} + +object EvidenceIterableFactory { + + /** + * Fixes the element type of `factory` to `A` + * @param factory The factory to fix the element type + * @tparam A Type of elements + * @tparam CC Collection type constructor of the factory (e.g. `TreeSet`) + * @tparam Ev Type constructor of the evidence (usually `Ordering` or `ClassTag`) + * @return A [[Factory]] that uses the given `factory` to build a collection of elements + * of type `A` + */ + implicit def toFactory[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]): Factory[A, CC[A]] = new ToFactory[Ev, A, CC](factory) + + @SerialVersionUID(3L) + private[this] class ToFactory[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]) extends Factory[A, CC[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]): CC[A] = factory.from[A](it) + def newBuilder: Builder[A, CC[A]] = factory.newBuilder[A] + } + + implicit def toBuildFrom[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]): BuildFrom[Any, A, CC[A]] = new EvidenceIterableFactoryToBuildFrom(factory) + private class EvidenceIterableFactoryToBuildFrom[Ev[_], A: Ev, CC[_]](factory: EvidenceIterableFactory[CC, Ev]) extends BuildFrom[Any, A, CC[A]] { + def fromSpecific(from: Any)(it: IterableOnce[A]): CC[A] = factory.from[A](it) + def newBuilder(from: Any): Builder[A, CC[A]] = factory.newBuilder[A] + } + + @SerialVersionUID(3L) + class Delegate[CC[_], Ev[_]](delegate: EvidenceIterableFactory[CC, Ev]) extends EvidenceIterableFactory[CC, Ev] { + override def apply[A: Ev](xs: A*): CC[A] = delegate.apply(xs: _*) + def empty[A : Ev]: CC[A] = delegate.empty + def from[E : Ev](it: IterableOnce[E]): CC[E] = delegate.from(it) + def newBuilder[A : Ev]: Builder[A, CC[A]] = delegate.newBuilder[A] + } +} + +/** Base trait for companion objects of collections that require an implicit `Ordering`. + * @tparam CC Collection type constructor (e.g. `SortedSet`) + */ +trait SortedIterableFactory[+CC[_]] extends EvidenceIterableFactory[CC, Ordering] + +object SortedIterableFactory { + @SerialVersionUID(3L) + class Delegate[CC[_]](delegate: EvidenceIterableFactory[CC, Ordering]) + extends EvidenceIterableFactory.Delegate[CC, Ordering](delegate) with SortedIterableFactory[CC] +} + +/** Base trait for companion objects of collections that require an implicit `ClassTag`. + * @tparam CC Collection type constructor (e.g. `ArraySeq`) + */ +trait ClassTagIterableFactory[+CC[_]] extends EvidenceIterableFactory[CC, ClassTag] { + + @`inline` private[this] implicit def ccClassTag[X]: ClassTag[CC[X]] = + ClassTag.AnyRef.asInstanceOf[ClassTag[CC[X]]] // Good enough for boxed vs primitive arrays + + /** Produces a $coll containing a sequence of increasing of integers. + * + * @param start the first element of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @return a $coll with values `start, start + 1, ..., end - 1` + */ + def range[A : Integral : ClassTag](start: A, end: A): CC[A] = from(NumericRange(start, end, implicitly[Integral[A]].one)) + + /** Produces a $coll containing equally spaced values in some integer interval. + * @param start the start value of the $coll + * @param end the end value of the $coll (the first value NOT contained) + * @param step the difference between successive elements of the $coll (must be positive or negative) + * @return a $coll with values `start, start + step, ...` up to, but excluding `end` + */ + def range[A : Integral : ClassTag](start: A, end: A, step: A): CC[A] = from(NumericRange(start, end, step)) + + /** Produces a two-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2` evaluations of `elem`. + */ + def fill[A : ClassTag](n1: Int, n2: Int)(elem: => A): CC[CC[A] @uncheckedVariance] = fill(n1)(fill(n2)(elem)) + + /** Produces a three-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3` evaluations of `elem`. + */ + def fill[A : ClassTag](n1: Int, n2: Int, n3: Int)(elem: => A): CC[CC[CC[A]] @uncheckedVariance] = fill(n1)(fill(n2, n3)(elem)) + + /** Produces a four-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4` evaluations of `elem`. + */ + def fill[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = + fill(n1)(fill(n2, n3, n4)(elem)) + + /** Produces a five-dimensional $coll containing the results of some element computation a number of times. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param elem the element computation + * @return A $coll that contains the results of `n1 x n2 x n3 x n4 x n5` evaluations of `elem`. + */ + def fill[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = + fill(n1)(fill(n2, n3, n4, n5)(elem)) + + /** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2)` + * for `0 <= i1 < n1` and `0 <= i2 < n2`. + */ + def tabulate[A : ClassTag](n1: Int, n2: Int)(f: (Int, Int) => A): CC[CC[A] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) + + /** Produces a three-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, and `0 <= i3 < n3`. + */ + def tabulate[A : ClassTag](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => A): CC[CC[CC[A]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) + + /** Produces a four-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, and `0 <= i4 < n4`. + */ + def tabulate[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => A): CC[CC[CC[CC[A]]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) + + /** Produces a five-dimensional $coll containing values of a given function over ranges of integer values starting from 0. + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param f The function computing element values + * @return A $coll consisting of elements `f(i1, i2, i3, i4, i5)` + * for `0 <= i1 < n1`, `0 <= i2 < n2`, `0 <= i3 < n3`, `0 <= i4 < n4`, and `0 <= i5 < n5`. + */ + def tabulate[A : ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => A): CC[CC[CC[CC[CC[A]]]] @uncheckedVariance] = + tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) +} + +object ClassTagIterableFactory { + @SerialVersionUID(3L) + class Delegate[CC[_]](delegate: EvidenceIterableFactory[CC, ClassTag]) + extends EvidenceIterableFactory.Delegate[CC, ClassTag](delegate) with ClassTagIterableFactory[CC] + + /** An IterableFactory that uses ClassTag.Any as the evidence for every element type. This may or may not be + * sound depending on the use of the `ClassTag` by the collection implementation. */ + @SerialVersionUID(3L) + class AnyIterableDelegate[CC[_]](delegate: ClassTagIterableFactory[CC]) extends IterableFactory[CC] { + def empty[A]: CC[A] = delegate.empty(using ClassTag.Any).asInstanceOf[CC[A]] + def from[A](it: IterableOnce[A]): CC[A] = delegate.from[Any](it)(using ClassTag.Any).asInstanceOf[CC[A]] + def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder(using ClassTag.Any).asInstanceOf[Builder[A, CC[A]]] + override def apply[A](elems: A*): CC[A] = delegate.apply[Any](elems: _*)(using ClassTag.Any).asInstanceOf[CC[A]] + override def iterate[A](start: A, len: Int)(f: A => A): CC[A] = delegate.iterate[A](start, len)(f)(using ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def unfold[A, S](init: S)(f: S => Option[(A, S)]): CC[A] = delegate.unfold[A, S](init)(f)(using ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def range[A](start: A, end: A)(implicit i: Integral[A]): CC[A] = delegate.range[A](start, end)(using i, ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def range[A](start: A, end: A, step: A)(implicit i: Integral[A]): CC[A] = delegate.range[A](start, end, step)(using i, ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def fill[A](n: Int)(elem: => A): CC[A] = delegate.fill[Any](n)(elem)(using ClassTag.Any).asInstanceOf[CC[A]] + override def tabulate[A](n: Int)(f: Int => A): CC[A] = delegate.tabulate[Any](n)(f)(using ClassTag.Any).asInstanceOf[CC[A]] + } +} + +/** + * @tparam CC Collection type constructor (e.g. `ArraySeq`) + */ +trait ClassTagSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends ClassTagIterableFactory[CC] { + import SeqFactory.UnapplySeqWrapper + final def unapplySeq[A](x: CC[A] @uncheckedVariance): UnapplySeqWrapper[A] = new UnapplySeqWrapper(x) // TODO is uncheckedVariance sound here? +} + +object ClassTagSeqFactory { + @SerialVersionUID(3L) + class Delegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: ClassTagSeqFactory[CC]) + extends ClassTagIterableFactory.Delegate[CC](delegate) with ClassTagSeqFactory[CC] + + /** A SeqFactory that uses ClassTag.Any as the evidence for every element type. This may or may not be + * sound depending on the use of the `ClassTag` by the collection implementation. */ + @SerialVersionUID(3L) + class AnySeqDelegate[CC[A] <: SeqOps[A, Seq, Seq[A]]](delegate: ClassTagSeqFactory[CC]) + extends ClassTagIterableFactory.AnyIterableDelegate[CC](delegate) with SeqFactory[CC] +} + +trait StrictOptimizedClassTagSeqFactory[+CC[A] <: SeqOps[A, Seq, Seq[A]]] extends ClassTagSeqFactory[CC] { + + override def fill[A : ClassTag](n: Int)(elem: => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += elem + i += 1 + } + b.result() + } + + override def tabulate[A : ClassTag](n: Int)(f: Int => A): CC[A] = { + val b = newBuilder[A] + b.sizeHint(n) + var i = 0 + while (i < n) { + b += f(i) + i += 1 + } + b.result() + } + +} + +/** + * @define factoryInfo + * This object provides a set of operations to create $Coll values. + * + * @define coll collection + * @define Coll `Iterable` + */ +trait SortedMapFactory[+CC[_, _]] extends Serializable { + + def empty[K : Ordering, V]: CC[K, V] + + def from[K : Ordering, V](it: IterableOnce[(K, V)]): CC[K, V] + + def apply[K : Ordering, V](elems: (K, V)*): CC[K, V] = from(elems) + + def newBuilder[K : Ordering, V]: Builder[(K, V), CC[K, V]] + + implicit def sortedMapFactory[K : Ordering, V]: Factory[(K, V), CC[K, V]] = SortedMapFactory.toFactory(this) + +} + +object SortedMapFactory { + + /** + * Implicit conversion that fixes the key and value types of `factory` to `K` and `V`, + * respectively. + * + * @param factory The factory to fix the key and value types + * @tparam K Type of keys + * @tparam V Type of values + * @tparam CC Collection type constructor of the factory (e.g. `TreeMap`) + * @return A [[Factory]] that uses the given `factory` to build a map with keys of + * type `K` and values of type `V` + */ + implicit def toFactory[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]): Factory[(K, V), CC[K, V]] = new ToFactory[K, V, CC](factory) + + @SerialVersionUID(3L) + private[this] class ToFactory[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]) extends Factory[(K, V), CC[K, V]] with Serializable { + def fromSpecific(it: IterableOnce[(K, V)]): CC[K, V] = factory.from[K, V](it) + def newBuilder: Builder[(K, V), CC[K, V]] = factory.newBuilder[K, V] + } + + implicit def toBuildFrom[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]): BuildFrom[Any, (K, V), CC[K, V]] = new SortedMapFactoryToBuildFrom(factory) + private class SortedMapFactoryToBuildFrom[K : Ordering, V, CC[_, _]](factory: SortedMapFactory[CC]) extends BuildFrom[Any, (K, V), CC[K, V]] { + def fromSpecific(from: Any)(it: IterableOnce[(K, V)]) = factory.from(it) + def newBuilder(from: Any) = factory.newBuilder[K, V] + } + + @SerialVersionUID(3L) + class Delegate[CC[_, _]](delegate: SortedMapFactory[CC]) extends SortedMapFactory[CC] { + override def apply[K: Ordering, V](elems: (K, V)*): CC[K, V] = delegate.apply(elems: _*) + def from[K : Ordering, V](it: IterableOnce[(K, V)]): CC[K, V] = delegate.from(it) + def empty[K : Ordering, V]: CC[K, V] = delegate.empty + def newBuilder[K : Ordering, V]: Builder[(K, V), CC[K, V]] = delegate.newBuilder + } +} diff --git a/scala2-library-bootstrapped/src/scala/collection/Iterable.scala b/scala2-library-bootstrapped/src/scala/collection/Iterable.scala new file mode 100644 index 000000000000..8f9142583b29 --- /dev/null +++ b/scala2-library-bootstrapped/src/scala/collection/Iterable.scala @@ -0,0 +1,1043 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.nowarn +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.mutable.Builder +import scala.collection.View.{LeftPartitionMapped, RightPartitionMapped} + +/** Base trait for generic collections. + * + * @tparam A the element type of the collection + * + * @define Coll `Iterable` + * @define coll iterable collection + */ +trait Iterable[+A] extends IterableOnce[A] + with IterableOps[A, Iterable, Iterable[A]] + with IterableFactoryDefaults[A, Iterable] { + + // The collection itself + @deprecated("toIterable is internal and will be made protected; its name is similar to `toList` or `toSeq`, but it doesn't copy non-immutable collections", "2.13.7") + final def toIterable: this.type = this + + final protected def coll: this.type = this + + def iterableFactory: IterableFactory[Iterable] = Iterable + + @deprecated("Iterable.seq always returns the iterable itself", "2.13.0") + def seq: this.type = this + + /** Defines the prefix of this object's `toString` representation. + * + * It is recommended to return the name of the concrete collection type, but + * not implementation subclasses. For example, for `ListMap` this method should + * return `"ListMap"`, not `"Map"` (the supertype) or `"Node"` (an implementation + * subclass). + * + * The default implementation returns "Iterable". It is overridden for the basic + * collection kinds "Seq", "IndexedSeq", "LinearSeq", "Buffer", "Set", "Map", + * "SortedSet", "SortedMap" and "View". + * + * @return a string representation which starts the result of `toString` + * applied to this $coll. By default the string prefix is the + * simple name of the collection class $coll. + */ + protected[this] def className: String = stringPrefix + + /** Forwarder to `className` for use in `scala.runtime.ScalaRunTime`. + * + * This allows the proper visibility for `className` to be + * published, but provides the exclusive access needed by + * `scala.runtime.ScalaRunTime.stringOf` (and a few tests in + * the test suite). + */ + private[scala] final def collectionClassName: String = className + + @deprecatedOverriding("Override className instead", "2.13.0") + protected[this] def stringPrefix: String = "Iterable" + + /** Converts this $coll to a string. + * + * @return a string representation of this collection. By default this + * string consists of the `className` of this $coll, followed + * by all elements separated by commas and enclosed in parentheses. + */ + override def toString = mkString(className + "(", ", ", ")") + + /** Analogous to `zip` except that the elements in each collection are not consumed until a strict operation is + * invoked on the returned `LazyZip2` decorator. + * + * Calls to `lazyZip` can be chained to support higher arities (up to 4) without incurring the expense of + * constructing and deconstructing intermediary tuples. + * + * {{{ + * val xs = List(1, 2, 3) + * val res = (xs lazyZip xs lazyZip xs lazyZip xs).map((a, b, c, d) => a + b + c + d) + * // res == List(4, 8, 12) + * }}} + * + * @param that the iterable providing the second element of each eventual pair + * @tparam B the type of the second element in each eventual pair + * @return a decorator `LazyZip2` that allows strict operations to be performed on the lazily evaluated pairs + * or chained calls to `lazyZip`. Implicit conversion to `Iterable[(A, B)]` is also supported. + */ + def lazyZip[B](that: Iterable[B]): LazyZip2[A, B, this.type] = new LazyZip2(this, this, that) +} + +/** Base trait for Iterable operations + * + * =VarianceNote= + * + * We require that for all child classes of Iterable the variance of + * the child class and the variance of the `C` parameter passed to `IterableOps` + * are the same. We cannot express this since we lack variance polymorphism. That's + * why we have to resort at some places to write `C[A @uncheckedVariance]`. + * + * @tparam CC type constructor of the collection (e.g. `List`, `Set`). Operations returning a collection + * with a different type of element `B` (e.g. `map`) return a `CC[B]`. + * @tparam C type of the collection (e.g. `List[Int]`, `String`, `BitSet`). Operations returning a collection + * with the same type of element (e.g. `drop`, `filter`) return a `C`. + * + * @define Coll Iterable + * @define coll iterable collection + * @define orderDependent + * + * Note: might return different results for different runs, unless the underlying collection type is ordered. + * @define orderDependentFold + * + * Note: might return different results for different runs, unless the + * underlying collection type is ordered or the operator is associative + * and commutative. + * @define mayNotTerminateInf + * + * Note: may not terminate for infinite-sized collections. + * @define willNotTerminateInf + * + * Note: will not terminate for infinite-sized collections. + * @define undefinedorder + * The order in which operations are performed on elements is unspecified + * and may be nondeterministic. + */ +trait IterableOps[+A, +CC[_], +C] extends Any with IterableOnce[A] with IterableOnceOps[A, CC, C] { + /** + * @return This collection as an `Iterable[A]`. No new collection will be built if `this` is already an `Iterable[A]`. + */ + // Should be `protected def asIterable`, or maybe removed altogether if it's not needed + @deprecated("toIterable is internal and will be made protected; its name is similar to `toList` or `toSeq`, but it doesn't copy non-immutable collections", "2.13.7") + def toIterable: Iterable[A] + + /** Converts this $coll to an unspecified Iterable. Will return + * the same collection if this instance is already Iterable. + * @return An Iterable containing all elements of this $coll. + */ + @deprecated("toTraversable is internal and will be made protected; its name is similar to `toList` or `toSeq`, but it doesn't copy non-immutable collections", "2.13.0") + final def toTraversable: Traversable[A] = toIterable + + override def isTraversableAgain: Boolean = true + + /** + * @return This collection as a `C`. + */ + protected def coll: C + + @deprecated("Use coll instead of repr in a collection implementation, use the collection value itself from the outside", "2.13.0") + final def repr: C = coll + + /** + * Defines how to turn a given `Iterable[A]` into a collection of type `C`. + * + * This process can be done in a strict way or a non-strict way (ie. without evaluating + * the elements of the resulting collections). In other words, this methods defines + * the evaluation model of the collection. + * + * @note When implementing a custom collection type and refining `C` to the new type, this + * method needs to be overridden (the compiler will issue an error otherwise). In the + * common case where `C =:= CC[A]`, this can be done by mixing in the + * [[scala.collection.IterableFactoryDefaults]] trait, which implements the method using + * [[iterableFactory]]. + * + * @note As witnessed by the `@uncheckedVariance` annotation, using this method + * might be unsound. However, as long as it is called with an + * `Iterable[A]` obtained from `this` collection (as it is the case in the + * implementations of operations where we use a `View[A]`), it is safe. + */ + protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]): C + + /** The companion object of this ${coll}, providing various factory methods. + * + * @note When implementing a custom collection type and refining `CC` to the new type, this + * method needs to be overridden to return a factory for the new type (the compiler will + * issue an error otherwise). + */ + def iterableFactory: IterableFactory[CC] + + @deprecated("Use iterableFactory instead", "2.13.0") + @deprecatedOverriding("Use iterableFactory instead", "2.13.0") + @`inline` def companion: IterableFactory[CC] = iterableFactory + + /** + * @return a strict builder for the same collection type. + * + * Note that in the case of lazy collections (e.g. [[scala.collection.View]] or [[scala.collection.immutable.LazyList]]), + * it is possible to implement this method but the resulting `Builder` will break laziness. + * As a consequence, operations should preferably be implemented with `fromSpecific` + * instead of this method. + * + * @note When implementing a custom collection type and refining `C` to the new type, this + * method needs to be overridden (the compiler will issue an error otherwise). In the + * common case where `C =:= CC[A]`, this can be done by mixing in the + * [[scala.collection.IterableFactoryDefaults]] trait, which implements the method using + * [[iterableFactory]]. + * + * @note As witnessed by the `@uncheckedVariance` annotation, using this method might + * be unsound. However, as long as the returned builder is only fed + * with `A` values taken from `this` instance, it is safe. + */ + protected def newSpecificBuilder: Builder[A @uncheckedVariance, C] + + /** The empty iterable of the same type as this iterable + * + * @return an empty iterable of type `C`. + */ + def empty: C = fromSpecific(Nil) + + /** Selects the first element of this $coll. + * $orderDependent + * @return the first element of this $coll. + * @throws NoSuchElementException if the $coll is empty. + */ + def head: A = iterator.next() + + /** Optionally selects the first element. + * $orderDependent + * @return the first element of this $coll if it is nonempty, + * `None` if it is empty. + */ + def headOption: Option[A] = { + val it = iterator + if (it.hasNext) Some(it.next()) else None + } + + /** Selects the last element. + * $orderDependent + * @return The last element of this $coll. + * @throws NoSuchElementException If the $coll is empty. + */ + def last: A = { + val it = iterator + var lst = it.next() + while (it.hasNext) lst = it.next() + lst + } + + /** Optionally selects the last element. + * $orderDependent + * @return the last element of this $coll$ if it is nonempty, + * `None` if it is empty. + */ + def lastOption: Option[A] = if (isEmpty) None else Some(last) + + /** A view over the elements of this collection. */ + def view: View[A] = View.fromIteratorProvider(() => iterator) + + /** Compares the size of this $coll to a test value. + * + * @param otherSize the test value that gets compared with the size. + * @return A value `x` where + * {{{ + * x < 0 if this.size < otherSize + * x == 0 if this.size == otherSize + * x > 0 if this.size > otherSize + * }}} + * + * The method as implemented here does not call `size` directly; its running time + * is `O(size min otherSize)` instead of `O(size)`. The method should be overridden + * if computing `size` is cheap and `knownSize` returns `-1`. + * + * @see [[sizeIs]] + */ + def sizeCompare(otherSize: Int): Int = { + if (otherSize < 0) 1 + else { + val known = knownSize + if (known >= 0) Integer.compare(known, otherSize) + else { + var i = 0 + val it = iterator + while (it.hasNext) { + if (i == otherSize) return 1 + it.next() + i += 1 + } + i - otherSize + } + } + } + + /** Returns a value class containing operations for comparing the size of this $coll to a test value. + * + * These operations are implemented in terms of [[sizeCompare(Int) `sizeCompare(Int)`]], and + * allow the following more readable usages: + * + * {{{ + * this.sizeIs < size // this.sizeCompare(size) < 0 + * this.sizeIs <= size // this.sizeCompare(size) <= 0 + * this.sizeIs == size // this.sizeCompare(size) == 0 + * this.sizeIs != size // this.sizeCompare(size) != 0 + * this.sizeIs >= size // this.sizeCompare(size) >= 0 + * this.sizeIs > size // this.sizeCompare(size) > 0 + * }}} + */ + @inline final def sizeIs: IterableOps.SizeCompareOps = new IterableOps.SizeCompareOps(this) + + /** Compares the size of this $coll to the size of another `Iterable`. + * + * @param that the `Iterable` whose size is compared with this $coll's size. + * @return A value `x` where + * {{{ + * x < 0 if this.size < that.size + * x == 0 if this.size == that.size + * x > 0 if this.size > that.size + * }}} + * + * The method as implemented here does not call `size` directly; its running time + * is `O(this.size min that.size)` instead of `O(this.size + that.size)`. + * The method should be overridden if computing `size` is cheap and `knownSize` returns `-1`. + */ + def sizeCompare(that: Iterable[_]): Int = { + val thatKnownSize = that.knownSize + + if (thatKnownSize >= 0) this sizeCompare thatKnownSize + else { + val thisKnownSize = this.knownSize + + if (thisKnownSize >= 0) { + val res = that sizeCompare thisKnownSize + // can't just invert the result, because `-Int.MinValue == Int.MinValue` + if (res == Int.MinValue) 1 else -res + } else { + val thisIt = this.iterator + val thatIt = that.iterator + while (thisIt.hasNext && thatIt.hasNext) { + thisIt.next() + thatIt.next() + } + java.lang.Boolean.compare(thisIt.hasNext, thatIt.hasNext) + } + } + } + + /** A view over a slice of the elements of this collection. */ + @deprecated("Use .view.slice(from, until) instead of .view(from, until)", "2.13.0") + def view(from: Int, until: Int): View[A] = view.slice(from, until) + + /** Transposes this $coll of iterable collections into + * a $coll of ${coll}s. + * + * The resulting collection's type will be guided by the + * static type of $coll. For example: + * + * {{{ + * val xs = List( + * Set(1, 2, 3), + * Set(4, 5, 6)).transpose + * // xs == List( + * // List(1, 4), + * // List(2, 5), + * // List(3, 6)) + * + * val ys = Vector( + * List(1, 2, 3), + * List(4, 5, 6)).transpose + * // ys == Vector( + * // Vector(1, 4), + * // Vector(2, 5), + * // Vector(3, 6)) + * }}} + * + * $willForceEvaluation + * + * @tparam B the type of the elements of each iterable collection. + * @param asIterable an implicit conversion which asserts that the + * element type of this $coll is an `Iterable`. + * @return a two-dimensional $coll of ${coll}s which has as ''n''th row + * the ''n''th column of this $coll. + * @throws IllegalArgumentException if all collections in this $coll + * are not of the same size. + */ + def transpose[B](implicit asIterable: A => /*<:= headSize) fail + bs(i) += x + i += 1 + } + if (i != headSize) + fail + } + iterableFactory.from(bs.map(_.result())) + } + + def filter(pred: A => Boolean): C = fromSpecific(new View.Filter(this, pred, isFlipped = false)) + + def filterNot(pred: A => Boolean): C = fromSpecific(new View.Filter(this, pred, isFlipped = true)) + + /** Creates a non-strict filter of this $coll. + * + * Note: the difference between `c filter p` and `c withFilter p` is that + * the former creates a new collection, whereas the latter only + * restricts the domain of subsequent `map`, `flatMap`, `foreach`, + * and `withFilter` operations. + * $orderDependent + * + * @param p the predicate used to test elements. + * @return an object of class `WithFilter`, which supports + * `map`, `flatMap`, `foreach`, and `withFilter` operations. + * All these operations apply to those elements of this $coll + * which satisfy the predicate `p`. + */ + def withFilter(p: A => Boolean): collection.WithFilter[A, CC] = new IterableOps.WithFilter(this, p) + + /** A pair of, first, all elements that satisfy predicate `p` and, second, + * all elements that do not. Interesting because it splits a collection in two. + * + * The default implementation provided here needs to traverse the collection twice. + * Strict collections have an overridden version of `partition` in `StrictOptimizedIterableOps`, + * which requires only a single traversal. + */ + def partition(p: A => Boolean): (C, C) = { + val first = new View.Filter(this, p, false) + val second = new View.Filter(this, p, true) + (fromSpecific(first), fromSpecific(second)) + } + + override def splitAt(n: Int): (C, C) = (take(n), drop(n)) + + def take(n: Int): C = fromSpecific(new View.Take(this, n)) + + /** Selects the last ''n'' elements. + * $orderDependent + * @param n the number of elements to take from this $coll. + * @return a $coll consisting only of the last `n` elements of this $coll, + * or else the whole $coll, if it has less than `n` elements. + * If `n` is negative, returns an empty $coll. + */ + def takeRight(n: Int): C = fromSpecific(new View.TakeRight(this, n)) + + /** Takes longest prefix of elements that satisfy a predicate. + * $orderDependent + * @param p The predicate used to test elements. + * @return the longest prefix of this $coll whose elements all satisfy + * the predicate `p`. + */ + def takeWhile(p: A => Boolean): C = fromSpecific(new View.TakeWhile(this, p)) + + def span(p: A => Boolean): (C, C) = (takeWhile(p), dropWhile(p)) + + def drop(n: Int): C = fromSpecific(new View.Drop(this, n)) + + /** Selects all elements except last ''n'' ones. + * $orderDependent + * @param n the number of elements to drop from this $coll. + * @return a $coll consisting of all elements of this $coll except the last `n` ones, or else the + * empty $coll, if this $coll has less than `n` elements. + * If `n` is negative, don't drop any elements. + */ + def dropRight(n: Int): C = fromSpecific(new View.DropRight(this, n)) + + def dropWhile(p: A => Boolean): C = fromSpecific(new View.DropWhile(this, p)) + + /** Partitions elements in fixed size ${coll}s. + * @see [[scala.collection.Iterator]], method `grouped` + * + * @param size the number of elements per group + * @return An iterator producing ${coll}s of size `size`, except the + * last will be less than size `size` if the elements don't divide evenly. + */ + def grouped(size: Int): Iterator[C] = + iterator.grouped(size).map(fromSpecific) + + /** Groups elements in fixed size blocks by passing a "sliding window" + * over them (as opposed to partitioning them, as is done in `grouped`.) + * + * An empty collection returns an empty iterator, and a non-empty + * collection containing fewer elements than the window size returns + * an iterator that will produce the original collection as its only + * element. + * @see [[scala.collection.Iterator]], method `sliding` + * + * @param size the number of elements per group + * @return An iterator producing ${coll}s of size `size`, except for a + * non-empty collection with less than `size` elements, which + * returns an iterator that produces the source collection itself + * as its only element. + * @example `List().sliding(2) = empty iterator` + * @example `List(1).sliding(2) = Iterator(List(1))` + * @example `List(1, 2).sliding(2) = Iterator(List(1, 2))` + * @example `List(1, 2, 3).sliding(2) = Iterator(List(1, 2), List(2, 3))` + */ + def sliding(size: Int): Iterator[C] = sliding(size, 1) + + /** Groups elements in fixed size blocks by passing a "sliding window" + * over them (as opposed to partitioning them, as is done in grouped.) + * + * The returned iterator will be empty when called on an empty collection. + * The last element the iterator produces may be smaller than the window + * size when the original collection isn't exhausted by the window before + * it and its last element isn't skipped by the step before it. + * + * @see [[scala.collection.Iterator]], method `sliding` + * + * @param size the number of elements per group + * @param step the distance between the first elements of successive + * groups + * @return An iterator producing ${coll}s of size `size`, except the last + * element (which may be the only element) will be smaller + * if there are fewer than `size` elements remaining to be grouped. + * @example `List(1, 2, 3, 4, 5).sliding(2, 2) = Iterator(List(1, 2), List(3, 4), List(5))` + * @example `List(1, 2, 3, 4, 5, 6).sliding(2, 3) = Iterator(List(1, 2), List(4, 5))` + */ + def sliding(size: Int, step: Int): Iterator[C] = + iterator.sliding(size, step).map(fromSpecific) + + /** The rest of the collection without its first element. */ + def tail: C = { + if (isEmpty) throw new UnsupportedOperationException + drop(1) + } + + /** The initial part of the collection without its last element. + * $willForceEvaluation + */ + def init: C = { + if (isEmpty) throw new UnsupportedOperationException + dropRight(1) + } + + def slice(from: Int, until: Int): C = + fromSpecific(new View.Drop(new View.Take(this, until), from)) + + /** Partitions this $coll into a map of ${coll}s according to some discriminator function. + * + * $willForceEvaluation + * + * @param f the discriminator function. + * @tparam K the type of keys returned by the discriminator function. + * @return A map from keys to ${coll}s such that the following invariant holds: + * {{{ + * (xs groupBy f)(k) = xs filter (x => f(x) == k) + * }}} + * That is, every key `k` is bound to a $coll of those elements `x` + * for which `f(x)` equals `k`. + * + */ + def groupBy[K](f: A => K): immutable.Map[K, C] = { + val m = mutable.Map.empty[K, Builder[A, C]] + val it = iterator + while (it.hasNext) { + val elem = it.next() + val key = f(elem) + val bldr = m.getOrElseUpdate(key, newSpecificBuilder) + bldr += elem + } + var result = immutable.HashMap.empty[K, C] + val mapIt = m.iterator + while (mapIt.hasNext) { + val (k, v) = mapIt.next() + result = result.updated(k, v.result()) + } + result + } + + /** + * Partitions this $coll into a map of ${coll}s according to a discriminator function `key`. + * Each element in a group is transformed into a value of type `B` using the `value` function. + * + * It is equivalent to `groupBy(key).mapValues(_.map(f))`, but more efficient. + * + * {{{ + * case class User(name: String, age: Int) + * + * def namesByAge(users: Seq[User]): Map[Int, Seq[String]] = + * users.groupMap(_.age)(_.name) + * }}} + * + * $willForceEvaluation + * + * @param key the discriminator function + * @param f the element transformation function + * @tparam K the type of keys returned by the discriminator function + * @tparam B the type of values returned by the transformation function + */ + def groupMap[K, B](key: A => K)(f: A => B): immutable.Map[K, CC[B]] = { + val m = mutable.Map.empty[K, Builder[B, CC[B]]] + for (elem <- this) { + val k = key(elem) + val bldr = m.getOrElseUpdate(k, iterableFactory.newBuilder[B]) + bldr += f(elem) + } + class Result extends runtime.AbstractFunction1[(K, Builder[B, CC[B]]), Unit] { + var built = immutable.Map.empty[K, CC[B]] + def apply(kv: (K, Builder[B, CC[B]])) = + built = built.updated(kv._1, kv._2.result()) + } + val result = new Result + m.foreach(result) + result.built + } + + /** + * Partitions this $coll into a map according to a discriminator function `key`. All the values that + * have the same discriminator are then transformed by the `f` function and then reduced into a + * single value with the `reduce` function. + * + * It is equivalent to `groupBy(key).mapValues(_.map(f).reduce(reduce))`, but more efficient. + * + * {{{ + * def occurrences[A](as: Seq[A]): Map[A, Int] = + * as.groupMapReduce(identity)(_ => 1)(_ + _) + * }}} + * + * $willForceEvaluation + */ + def groupMapReduce[K, B](key: A => K)(f: A => B)(reduce: (B, B) => B): immutable.Map[K, B] = { + val m = mutable.Map.empty[K, B] + for (elem <- this) { + val k = key(elem) + val v = + m.get(k) match { + case Some(b) => reduce(b, f(elem)) + case None => f(elem) + } + m.put(k, v) + } + m.to(immutable.Map) + } + + /** Computes a prefix scan of the elements of the collection. + * + * Note: The neutral element `z` may be applied more than once. + * + * @tparam B element type of the resulting collection + * @param z neutral element for the operator `op` + * @param op the associative operator for the scan + * + * @return a new $coll containing the prefix scan of the elements in this $coll + */ + def scan[B >: A](z: B)(op: (B, B) => B): CC[B] = scanLeft(z)(op) + + def scanLeft[B](z: B)(op: (B, A) => B): CC[B] = iterableFactory.from(new View.ScanLeft(this, z, op)) + + /** Produces a collection containing cumulative results of applying the operator going right to left. + * The head of the collection is the last cumulative result. + * $willNotTerminateInf + * $orderDependent + * $willForceEvaluation + * + * Example: + * {{{ + * List(1, 2, 3, 4).scanRight(0)(_ + _) == List(10, 9, 7, 4, 0) + * }}} + * + * @tparam B the type of the elements in the resulting collection + * @param z the initial value + * @param op the binary operator applied to the intermediate result and the element + * @return collection with intermediate results + */ + def scanRight[B](z: B)(op: (A, B) => B): CC[B] = { + class Scanner extends runtime.AbstractFunction1[A, Unit] { + var acc = z + var scanned = acc :: immutable.Nil + def apply(x: A) = { + acc = op(x, acc) + scanned ::= acc + } + } + val scanner = new Scanner + reversed.foreach(scanner) + iterableFactory.from(scanner.scanned) + } + + def map[B](f: A => B): CC[B] = iterableFactory.from(new View.Map(this, f)) + + def flatMap[B](f: A => IterableOnce[B]): CC[B] = iterableFactory.from(new View.FlatMap(this, f)) + + def flatten[B](implicit asIterable: A => IterableOnce[B]): CC[B] = flatMap(asIterable) + + def collect[B](pf: PartialFunction[A, B]): CC[B] = + iterableFactory.from(new View.Collect(this, pf)) + + /** Applies a function `f` to each element of the $coll and returns a pair of ${coll}s: the first one + * made of those values returned by `f` that were wrapped in [[scala.util.Left]], and the second + * one made of those wrapped in [[scala.util.Right]]. + * + * Example: + * {{{ + * val xs = $Coll(1, "one", 2, "two", 3, "three") partitionMap { + * case i: Int => Left(i) + * case s: String => Right(s) + * } + * // xs == ($Coll(1, 2, 3), + * // $Coll(one, two, three)) + * }}} + * + * @tparam A1 the element type of the first resulting collection + * @tparam A2 the element type of the second resulting collection + * @param f the 'split function' mapping the elements of this $coll to an [[scala.util.Either]] + * + * @return a pair of ${coll}s: the first one made of those values returned by `f` that were wrapped in [[scala.util.Left]], + * and the second one made of those wrapped in [[scala.util.Right]]. + */ + def partitionMap[A1, A2](f: A => Either[A1, A2]): (CC[A1], CC[A2]) = { + val left: View[A1] = new LeftPartitionMapped(this, f) + val right: View[A2] = new RightPartitionMapped(this, f) + (iterableFactory.from(left), iterableFactory.from(right)) + } + + /** Returns a new $coll containing the elements from the left hand operand followed by the elements from the + * right hand operand. The element type of the $coll is the most specific superclass encompassing + * the element types of the two operands. + * + * @param suffix the iterable to append. + * @tparam B the element type of the returned collection. + * @return a new $coll which contains all elements + * of this $coll followed by all elements of `suffix`. + */ + def concat[B >: A](suffix: IterableOnce[B]): CC[B] = iterableFactory.from(suffix match { + case xs: Iterable[B] => new View.Concat(this, xs) + case xs => iterator ++ suffix.iterator + }) + + /** Alias for `concat` */ + @`inline` final def ++ [B >: A](suffix: IterableOnce[B]): CC[B] = concat(suffix) + + /** Returns a $coll formed from this $coll and another iterable collection + * by combining corresponding elements in pairs. + * If one of the two collections is longer than the other, its remaining elements are ignored. + * + * @param that The iterable providing the second half of each result pair + * @tparam B the type of the second half of the returned pairs + * @return a new $coll containing pairs consisting of corresponding elements of this $coll and `that`. + * The length of the returned collection is the minimum of the lengths of this $coll and `that`. + */ + def zip[B](that: IterableOnce[B]): CC[(A @uncheckedVariance, B)] = iterableFactory.from(that match { // sound bcs of VarianceNote + case that: Iterable[B] => new View.Zip(this, that) + case _ => iterator.zip(that) + }) + + def zipWithIndex: CC[(A @uncheckedVariance, Int)] = iterableFactory.from(new View.ZipWithIndex(this)) + + /** Returns a $coll formed from this $coll and another iterable collection + * by combining corresponding elements in pairs. + * If one of the two collections is shorter than the other, + * placeholder elements are used to extend the shorter collection to the length of the longer. + * + * @param that the iterable providing the second half of each result pair + * @param thisElem the element to be used to fill up the result if this $coll is shorter than `that`. + * @param thatElem the element to be used to fill up the result if `that` is shorter than this $coll. + * @return a new collection of type `That` containing pairs consisting of + * corresponding elements of this $coll and `that`. The length + * of the returned collection is the maximum of the lengths of this $coll and `that`. + * If this $coll is shorter than `that`, `thisElem` values are used to pad the result. + * If `that` is shorter than this $coll, `thatElem` values are used to pad the result. + */ + def zipAll[A1 >: A, B](that: Iterable[B], thisElem: A1, thatElem: B): CC[(A1, B)] = iterableFactory.from(new View.ZipAll(this, that, thisElem, thatElem)) + + /** Converts this $coll of pairs into two collections of the first and second + * half of each pair. + * + * {{{ + * val xs = $Coll( + * (1, "one"), + * (2, "two"), + * (3, "three")).unzip + * // xs == ($Coll(1, 2, 3), + * // $Coll(one, two, three)) + * }}} + * + * @tparam A1 the type of the first half of the element pairs + * @tparam A2 the type of the second half of the element pairs + * @param asPair an implicit conversion which asserts that the element type + * of this $coll is a pair. + * @return a pair of ${coll}s, containing the first, respectively second + * half of each element pair of this $coll. + */ + def unzip[A1, A2](implicit asPair: A => (A1, A2)): (CC[A1], CC[A2]) = { + val first: View[A1] = new View.Map[A, A1](this, asPair(_)._1) + val second: View[A2] = new View.Map[A, A2](this, asPair(_)._2) + (iterableFactory.from(first), iterableFactory.from(second)) + } + + /** Converts this $coll of triples into three collections of the first, second, + * and third element of each triple. + * + * {{{ + * val xs = $Coll( + * (1, "one", '1'), + * (2, "two", '2'), + * (3, "three", '3')).unzip3 + * // xs == ($Coll(1, 2, 3), + * // $Coll(one, two, three), + * // $Coll(1, 2, 3)) + * }}} + * + * @tparam A1 the type of the first member of the element triples + * @tparam A2 the type of the second member of the element triples + * @tparam A3 the type of the third member of the element triples + * @param asTriple an implicit conversion which asserts that the element type + * of this $coll is a triple. + * @return a triple of ${coll}s, containing the first, second, respectively + * third member of each element triple of this $coll. + */ + def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)): (CC[A1], CC[A2], CC[A3]) = { + val first: View[A1] = new View.Map[A, A1](this, asTriple(_)._1) + val second: View[A2] = new View.Map[A, A2](this, asTriple(_)._2) + val third: View[A3] = new View.Map[A, A3](this, asTriple(_)._3) + (iterableFactory.from(first), iterableFactory.from(second), iterableFactory.from(third)) + } + + /** Iterates over the tails of this $coll. The first value will be this + * $coll and the final one will be an empty $coll, with the intervening + * values the results of successive applications of `tail`. + * + * @return an iterator over all the tails of this $coll + * @example `List(1,2,3).tails = Iterator(List(1,2,3), List(2,3), List(3), Nil)` + */ + def tails: Iterator[C] = iterateUntilEmpty(_.tail) + + /** Iterates over the inits of this $coll. The first value will be this + * $coll and the final one will be an empty $coll, with the intervening + * values the results of successive applications of `init`. + * + * $willForceEvaluation + * + * @return an iterator over all the inits of this $coll + * @example `List(1,2,3).inits = Iterator(List(1,2,3), List(1,2), List(1), Nil)` + */ + def inits: Iterator[C] = iterateUntilEmpty(_.init) + + override def tapEach[U](f: A => U): C = fromSpecific(new View.Map(this, { (a: A) => f(a); a })) + + // A helper for tails and inits. + private[this] def iterateUntilEmpty(f: Iterable[A] => Iterable[A]): Iterator[C] = { + // toIterable ties the knot between `this: IterableOnceOps[A, CC, C]` and `this.tail: C` + // `this.tail.tail` doesn't compile as `C` is unbounded + // `Iterable.from(this)` would eagerly copy non-immutable collections + val it = Iterator.iterate(toIterable: @nowarn("cat=deprecation"))(f).takeWhile(_.nonEmpty) + (it ++ Iterator.single(Iterable.empty)).map(fromSpecific) + } + + @deprecated("Use ++ instead of ++: for collections of type Iterable", "2.13.0") + def ++:[B >: A](that: IterableOnce[B]): CC[B] = iterableFactory.from(that match { + case xs: Iterable[B] => new View.Concat(xs, this) + case _ => that.iterator ++ iterator + }) +} + +object IterableOps { + + /** Operations for comparing the size of a collection to a test value. + * + * These operations are implemented in terms of + * [[scala.collection.IterableOps.sizeCompare(Int) `sizeCompare(Int)`]]. + */ + final class SizeCompareOps private[collection](val it: IterableOps[_, AnyConstr, _]) extends AnyVal { + /** Tests if the size of the collection is less than some value. */ + @inline def <(size: Int): Boolean = it.sizeCompare(size) < 0 + /** Tests if the size of the collection is less than or equal to some value. */ + @inline def <=(size: Int): Boolean = it.sizeCompare(size) <= 0 + /** Tests if the size of the collection is equal to some value. */ + @inline def ==(size: Int): Boolean = it.sizeCompare(size) == 0 + /** Tests if the size of the collection is not equal to some value. */ + @inline def !=(size: Int): Boolean = it.sizeCompare(size) != 0 + /** Tests if the size of the collection is greater than or equal to some value. */ + @inline def >=(size: Int): Boolean = it.sizeCompare(size) >= 0 + /** Tests if the size of the collection is greater than some value. */ + @inline def >(size: Int): Boolean = it.sizeCompare(size) > 0 + } + + /** A trait that contains just the `map`, `flatMap`, `foreach` and `withFilter` methods + * of trait `Iterable`. + * + * @tparam A Element type (e.g. `Int`) + * @tparam CC Collection type constructor (e.g. `List`) + * + * @define coll collection + */ + @SerialVersionUID(3L) + class WithFilter[+A, +CC[_]]( + self: IterableOps[A, CC, _], + p: A => Boolean + ) extends collection.WithFilter[A, CC] with Serializable { + + protected def filtered: Iterable[A] = + new View.Filter(self, p, isFlipped = false) + + def map[B](f: A => B): CC[B] = + self.iterableFactory.from(new View.Map(filtered, f)) + + def flatMap[B](f: A => IterableOnce[B]): CC[B] = + self.iterableFactory.from(new View.FlatMap(filtered, f)) + + def foreach[U](f: A => U): Unit = filtered.foreach(f) + + def withFilter(q: A => Boolean): WithFilter[A, CC] = + new WithFilter(self, (a: A) => p(a) && q(a)) + + } + +} + +@SerialVersionUID(3L) +object Iterable extends IterableFactory.Delegate[Iterable](immutable.Iterable) { + + def single[A](a: A): Iterable[A] = new AbstractIterable[A] { + override def iterator = Iterator.single(a) + override def knownSize = 1 + override def head = a + override def headOption: Some[A] = Some(a) + override def last = a + override def lastOption: Some[A] = Some(a) + override def view: View.Single[A] = new View.Single(a) + override def take(n: Int) = if (n > 0) this else Iterable.empty + override def takeRight(n: Int) = if (n > 0) this else Iterable.empty + override def drop(n: Int) = if (n > 0) Iterable.empty else this + override def dropRight(n: Int) = if (n > 0) Iterable.empty else this + override def tail: Iterable[Nothing] = Iterable.empty + override def init: Iterable[Nothing] = Iterable.empty + } +} + +/** Explicit instantiation of the `Iterable` trait to reduce class file size in subclasses. */ +abstract class AbstractIterable[+A] extends Iterable[A] + +/** This trait provides default implementations for the factory methods `fromSpecific` and + * `newSpecificBuilder` that need to be refined when implementing a collection type that refines + * the `CC` and `C` type parameters. + * + * The default implementations in this trait can be used in the common case when `CC[A]` is the + * same as `C`. + */ +trait IterableFactoryDefaults[+A, +CC[x] <: IterableOps[x, CC, CC[x]]] extends IterableOps[A, CC, CC[A @uncheckedVariance]] { + protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]): CC[A @uncheckedVariance] = iterableFactory.from(coll) + protected def newSpecificBuilder: Builder[A @uncheckedVariance, CC[A @uncheckedVariance]] = iterableFactory.newBuilder[A] + + // overridden for efficiency, since we know CC[A] =:= C + override def empty: CC[A @uncheckedVariance] = iterableFactory.empty +} + +/** This trait provides default implementations for the factory methods `fromSpecific` and + * `newSpecificBuilder` that need to be refined when implementing a collection type that refines + * the `CC` and `C` type parameters. It is used for collections that have an additional constraint, + * expressed by the `evidenceIterableFactory` method. + * + * The default implementations in this trait can be used in the common case when `CC[A]` is the + * same as `C`. + */ +trait EvidenceIterableFactoryDefaults[+A, +CC[x] <: IterableOps[x, CC, CC[x]], Ev[_]] extends IterableOps[A, CC, CC[A @uncheckedVariance]] { + protected def evidenceIterableFactory: EvidenceIterableFactory[CC, Ev] + implicit protected def iterableEvidence: Ev[A @uncheckedVariance] + override protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]): CC[A @uncheckedVariance] = evidenceIterableFactory.from(coll) + override protected def newSpecificBuilder: Builder[A @uncheckedVariance, CC[A @uncheckedVariance]] = evidenceIterableFactory.newBuilder[A] + override def empty: CC[A @uncheckedVariance] = evidenceIterableFactory.empty +} + +/** This trait provides default implementations for the factory methods `fromSpecific` and + * `newSpecificBuilder` that need to be refined when implementing a collection type that refines + * the `CC` and `C` type parameters. It is used for sorted sets. + * + * Note that in sorted sets, the `CC` type of the set is not the same as the `CC` type for the + * underlying iterable (which is fixed to `Set` in [[SortedSetOps]]). This trait has therefore + * two type parameters `CC` and `WithFilterCC`. The `withFilter` method inherited from + * `IterableOps` is overridden with a compatible default implementation. + * + * The default implementations in this trait can be used in the common case when `CC[A]` is the + * same as `C`. + */ +trait SortedSetFactoryDefaults[+A, + +CC[X] <: SortedSet[X] with SortedSetOps[X, CC, CC[X]], + +WithFilterCC[x] <: IterableOps[x, WithFilterCC, WithFilterCC[x]] with Set[x]] extends SortedSetOps[A @uncheckedVariance, CC, CC[A @uncheckedVariance]] { + self: IterableOps[A, WithFilterCC, _] => + + override protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]): CC[A @uncheckedVariance] = sortedIterableFactory.from(coll)(using ordering) + override protected def newSpecificBuilder: mutable.Builder[A @uncheckedVariance, CC[A @uncheckedVariance]] = sortedIterableFactory.newBuilder[A](using ordering) + override def empty: CC[A @uncheckedVariance] = sortedIterableFactory.empty(using ordering) + + override def withFilter(p: A => Boolean): SortedSetOps.WithFilter[A, WithFilterCC, CC] = + new SortedSetOps.WithFilter[A, WithFilterCC, CC](this, p) +} + + +/** This trait provides default implementations for the factory methods `fromSpecific` and + * `newSpecificBuilder` that need to be refined when implementing a collection type that refines + * the `CC` and `C` type parameters. It is used for maps. + * + * Note that in maps, the `CC` type of the map is not the same as the `CC` type for the + * underlying iterable (which is fixed to `Map` in [[MapOps]]). This trait has therefore + * two type parameters `CC` and `WithFilterCC`. The `withFilter` method inherited from + * `IterableOps` is overridden with a compatible default implementation. + * + * The default implementations in this trait can be used in the common case when `CC[A]` is the + * same as `C`. + */ +trait MapFactoryDefaults[K, +V, + +CC[x, y] <: IterableOps[(x, y), Iterable, Iterable[(x, y)]], + +WithFilterCC[x] <: IterableOps[x, WithFilterCC, WithFilterCC[x]] with Iterable[x]] extends MapOps[K, V, CC, CC[K, V @uncheckedVariance]] with IterableOps[(K, V), WithFilterCC, CC[K, V @uncheckedVariance]] { + override protected def fromSpecific(coll: IterableOnce[(K, V @uncheckedVariance)]): CC[K, V @uncheckedVariance] = mapFactory.from(coll) + override protected def newSpecificBuilder: mutable.Builder[(K, V @uncheckedVariance), CC[K, V @uncheckedVariance]] = mapFactory.newBuilder[K, V] + override def empty: CC[K, V @uncheckedVariance] = (this: AnyRef) match { + // Implemented here instead of in TreeSeqMap since overriding empty in TreeSeqMap is not forwards compatible (should be moved) + case self: immutable.TreeSeqMap[_, _] => immutable.TreeSeqMap.empty(self.orderedBy).asInstanceOf[CC[K, V]] + case _ => mapFactory.empty + } + + override def withFilter(p: ((K, V)) => Boolean): MapOps.WithFilter[K, V, WithFilterCC, CC] = + new MapOps.WithFilter[K, V, WithFilterCC, CC](this, p) +} + +/** This trait provides default implementations for the factory methods `fromSpecific` and + * `newSpecificBuilder` that need to be refined when implementing a collection type that refines + * the `CC` and `C` type parameters. It is used for sorted maps. + * + * Note that in sorted maps, the `CC` type of the map is not the same as the `CC` type for the + * underlying map (which is fixed to `Map` in [[SortedMapOps]]). This trait has therefore + * three type parameters `CC`, `WithFilterCC` and `UnsortedCC`. The `withFilter` method inherited + * from `IterableOps` is overridden with a compatible default implementation. + * + * The default implementations in this trait can be used in the common case when `CC[A]` is the + * same as `C`. + */ +trait SortedMapFactoryDefaults[K, +V, + +CC[x, y] <: Map[x, y] with SortedMapOps[x, y, CC, CC[x, y]] with UnsortedCC[x, y], + +WithFilterCC[x] <: IterableOps[x, WithFilterCC, WithFilterCC[x]] with Iterable[x], + +UnsortedCC[x, y] <: Map[x, y]] extends SortedMapOps[K, V, CC, CC[K, V @uncheckedVariance]] with MapOps[K, V, UnsortedCC, CC[K, V @uncheckedVariance]] { + self: IterableOps[(K, V), WithFilterCC, _] => + + override def empty: CC[K, V @uncheckedVariance] = sortedMapFactory.empty(using ordering) + override protected def fromSpecific(coll: IterableOnce[(K, V @uncheckedVariance)]): CC[K, V @uncheckedVariance] = sortedMapFactory.from(coll)(using ordering) + override protected def newSpecificBuilder: mutable.Builder[(K, V @uncheckedVariance), CC[K, V @uncheckedVariance]] = sortedMapFactory.newBuilder[K, V](using ordering) + + override def withFilter(p: ((K, V)) => Boolean): collection.SortedMapOps.WithFilter[K, V, WithFilterCC, UnsortedCC, CC] = + new collection.SortedMapOps.WithFilter[K, V, WithFilterCC, UnsortedCC, CC](this, p) +} diff --git a/scala2-library-bootstrapped/src/scala/collection/SortedMap.scala b/scala2-library-bootstrapped/src/scala/collection/SortedMap.scala new file mode 100644 index 000000000000..5beb811ed0b2 --- /dev/null +++ b/scala2-library-bootstrapped/src/scala/collection/SortedMap.scala @@ -0,0 +1,220 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package collection + +import scala.annotation.{implicitNotFound, nowarn} + +/** A Map whose keys are sorted according to a [[scala.math.Ordering]]*/ +trait SortedMap[K, +V] + extends Map[K, V] + with SortedMapOps[K, V, SortedMap, SortedMap[K, V]] + with SortedMapFactoryDefaults[K, V, SortedMap, Iterable, Map]{ + + def unsorted: Map[K, V] = this + + def sortedMapFactory: SortedMapFactory[SortedMap] = SortedMap + + @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") + override protected[this] def stringPrefix: String = "SortedMap" + + override def equals(that: Any): Boolean = that match { + case _ if this eq that.asInstanceOf[AnyRef] => true + case sm: SortedMap[K @unchecked, _] if sm.ordering == this.ordering => + (sm canEqual this) && + (this.size == sm.size) && { + val i1 = this.iterator + val i2 = sm.iterator + var allEqual = true + while (allEqual && i1.hasNext) { + val kv1 = i1.next() + val kv2 = i2.next() + allEqual = ordering.equiv(kv1._1, kv2._1) && kv1._2 == kv2._2 + } + allEqual + } + case _ => super.equals(that) + } +} + +trait SortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] + extends MapOps[K, V, Map, C] + with SortedOps[K, C] { + + /** The companion object of this sorted map, providing various factory methods. + * + * @note When implementing a custom collection type and refining `CC` to the new type, this + * method needs to be overridden to return a factory for the new type (the compiler will + * issue an error otherwise). + */ + def sortedMapFactory: SortedMapFactory[CC] + + /** Similar to `mapFromIterable`, but returns a SortedMap collection type. + * Note that the return type is now `CC[K2, V2]`. + */ + @`inline` protected final def sortedMapFromIterable[K2, V2](it: Iterable[(K2, V2)])(implicit ordering: Ordering[K2]): CC[K2, V2] = sortedMapFactory.from(it) + + def unsorted: Map[K, V] + + /** + * Creates an iterator over all the key/value pairs + * contained in this map having a key greater than or + * equal to `start` according to the ordering of + * this map. x.iteratorFrom(y) is equivalent + * to but often more efficient than x.from(y).iterator. + * + * @param start The lower bound (inclusive) + * on the keys to be returned + */ + def iteratorFrom(start: K): Iterator[(K, V)] + + /** + * Creates an iterator over all the keys(or elements) contained in this + * collection greater than or equal to `start` + * according to the ordering of this collection. x.keysIteratorFrom(y) + * is equivalent to but often more efficient than + * x.from(y).keysIterator. + * + * @param start The lower bound (inclusive) + * on the keys to be returned + */ + def keysIteratorFrom(start: K): Iterator[K] + + /** + * Creates an iterator over all the values contained in this + * map that are associated with a key greater than or equal to `start` + * according to the ordering of this map. x.valuesIteratorFrom(y) is + * equivalent to but often more efficient than + * x.from(y).valuesIterator. + * + * @param start The lower bound (inclusive) + * on the keys to be returned + */ + def valuesIteratorFrom(start: K): Iterator[V] = iteratorFrom(start).map(_._2) + + def firstKey: K = head._1 + def lastKey: K = last._1 + + /** Find the element with smallest key larger than or equal to a given key. + * @param key The given key. + * @return `None` if there is no such node. + */ + def minAfter(key: K): Option[(K, V)] = rangeFrom(key).headOption + + /** Find the element with largest key less than a given key. + * @param key The given key. + * @return `None` if there is no such node. + */ + def maxBefore(key: K): Option[(K, V)] = rangeUntil(key).lastOption + + def rangeTo(to: K): C = { + val i = keySet.rangeFrom(to).iterator + if (i.isEmpty) return coll + val next = i.next() + if (ordering.compare(next, to) == 0) + if (i.isEmpty) coll + else rangeUntil(i.next()) + else + rangeUntil(next) + } + + override def keySet: SortedSet[K] = new KeySortedSet + + /** The implementation class of the set returned by `keySet` */ + protected class KeySortedSet extends SortedSet[K] with GenKeySet with GenKeySortedSet { + def diff(that: Set[K]): SortedSet[K] = fromSpecific(view.filterNot(that)) + def rangeImpl(from: Option[K], until: Option[K]): SortedSet[K] = { + val map = SortedMapOps.this.rangeImpl(from, until) + new map.KeySortedSet + } + } + + /** A generic trait that is reused by sorted keyset implementations */ + protected trait GenKeySortedSet extends GenKeySet { this: SortedSet[K] => + implicit def ordering: Ordering[K] = SortedMapOps.this.ordering + def iteratorFrom(start: K): Iterator[K] = SortedMapOps.this.keysIteratorFrom(start) + } + + // And finally, we add new overloads taking an ordering + /** Builds a new sorted map by applying a function to all elements of this $coll. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given function + * `f` to each element of this $coll and collecting the results. + */ + def map[K2, V2](f: ((K, V)) => (K2, V2))(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + sortedMapFactory.from(new View.Map[(K, V), (K2, V2)](this, f)) + + /** Builds a new sorted map by applying a function to all elements of this $coll + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given collection-valued function + * `f` to each element of this $coll and concatenating the results. + */ + def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + sortedMapFactory.from(new View.FlatMap(this, f)) + + /** Builds a new sorted map by applying a partial function to all elements of this $coll + * on which the function is defined. + * + * @param pf the partial function which filters and maps the $coll. + * @return a new $coll resulting from applying the given partial function + * `pf` to each element on which it is defined and collecting the results. + * The order of the elements is preserved. + */ + def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + sortedMapFactory.from(new View.Collect(this, pf)) + + override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]): CC[K, V2] = sortedMapFactory.from(suffix match { + case it: Iterable[(K, V2)] => new View.Concat(this, it) + case _ => iterator.concat(suffix.iterator) + })(using ordering) + + /** Alias for `concat` */ + @`inline` override final def ++ [V2 >: V](xs: IterableOnce[(K, V2)]): CC[K, V2] = concat(xs) + + @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") + override def + [V1 >: V](kv: (K, V1)): CC[K, V1] = sortedMapFactory.from(new View.Appended(this, kv))(using ordering) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems))(using ordering) +} + +object SortedMapOps { + private[collection] final val ordMsg = "No implicit Ordering[${K2}] found to build a SortedMap[${K2}, ${V2}]. You may want to upcast to a Map[${K}, ${V}] first by calling `unsorted`." + + /** Specializes `MapWithFilter` for sorted Map collections + * + * @define coll sorted map collection + */ + class WithFilter[K, +V, +IterableCC[_], +MapCC[X, Y] <: Map[X, Y], +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _]]( + self: SortedMapOps[K, V, CC, _] with MapOps[K, V, MapCC, _] with IterableOps[(K, V), IterableCC, _], + p: ((K, V)) => Boolean + ) extends MapOps.WithFilter[K, V, IterableCC, MapCC](self, p) { + + def map[K2 : Ordering, V2](f: ((K, V)) => (K2, V2)): CC[K2, V2] = + self.sortedMapFactory.from(new View.Map(filtered, f)) + + def flatMap[K2 : Ordering, V2](f: ((K, V)) => IterableOnce[(K2, V2)]): CC[K2, V2] = + self.sortedMapFactory.from(new View.FlatMap(filtered, f)) + + override def withFilter(q: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, MapCC, CC] = + new WithFilter[K, V, IterableCC, MapCC, CC](self, (kv: (K, V)) => p(kv) && q(kv)) + + } + +} + +@SerialVersionUID(3L) +object SortedMap extends SortedMapFactory.Delegate[SortedMap](immutable.SortedMap) diff --git a/scala2-library-bootstrapped/src/scala/collection/StrictOptimizedSortedMapOps.scala b/scala2-library-bootstrapped/src/scala/collection/StrictOptimizedSortedMapOps.scala new file mode 100644 index 000000000000..ad5d67a64635 --- /dev/null +++ b/scala2-library-bootstrapped/src/scala/collection/StrictOptimizedSortedMapOps.scala @@ -0,0 +1,46 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection + +import scala.annotation.implicitNotFound + +/** + * Trait that overrides sorted map operations to take advantage of strict builders. + * + * @tparam K Type of keys + * @tparam V Type of values + * @tparam CC Collection type constructor + * @tparam C Collection type + */ +trait StrictOptimizedSortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]] + extends SortedMapOps[K, V, CC, C] + with StrictOptimizedMapOps[K, V, Map, C] { + + override def map[K2, V2](f: ((K, V)) => (K2, V2))(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + strictOptimizedMap(sortedMapFactory.newBuilder, f) + + override def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + strictOptimizedFlatMap(sortedMapFactory.newBuilder, f) + + override def concat[V2 >: V](xs: IterableOnce[(K, V2)]): CC[K, V2] = + strictOptimizedConcat(xs, sortedMapFactory.newBuilder(using ordering)) + + override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = + strictOptimizedCollect(sortedMapFactory.newBuilder, pf) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = { + val m = ((this + elem1).asInstanceOf[Map[K, V]] + elem2).asInstanceOf[CC[K, V1]] + if(elems.isEmpty) m else m.concat(elems).asInstanceOf[CC[K, V1]] + } +} diff --git a/scala2-library-bootstrapped/src/scala/collection/generic/DefaultSerializationProxy.scala b/scala2-library-bootstrapped/src/scala/collection/generic/DefaultSerializationProxy.scala new file mode 100644 index 000000000000..e794044a1af9 --- /dev/null +++ b/scala2-library-bootstrapped/src/scala/collection/generic/DefaultSerializationProxy.scala @@ -0,0 +1,87 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection.generic + +import java.io.{ObjectInputStream, ObjectOutputStream} + +import scala.collection.{Factory, Iterable} +import scala.collection.mutable.Builder + +/** The default serialization proxy for collection implementations. + * + * This class is `final` and requires an extra `Factory` object rather than leaving the details of creating a `Builder` + * to an abstract method that could be implemented by a subclass. This is necessary because the factory is needed + * for deserializing this class's private state, which happens before any subclass fields would be deserialized. Any + * additional state required to create the proper `Builder` needs to be captured by the `factory`. + */ +@SerialVersionUID(3L) +final class DefaultSerializationProxy[A](factory: Factory[A, Any], @transient private[this] val coll: Iterable[A]) extends Serializable { + + @transient protected var builder: Builder[A, Any] = _ + + private[this] def writeObject(out: ObjectOutputStream): Unit = { + out.defaultWriteObject() + val k = coll.knownSize + out.writeInt(k) + var count = 0 + coll.foreach { x => + out.writeObject(x) + count += 1 + } + if(k >= 0) { + if(count != k) throw new IllegalStateException(s"Illegal size $count of collection, expected $k") + } else out.writeObject(SerializeEnd) + } + + private[this] def readObject(in: ObjectInputStream): Unit = { + in.defaultReadObject() + builder = factory.newBuilder + val k = in.readInt() + if(k >= 0) { + builder.sizeHint(k) + var count = 0 + while(count < k) { + builder += in.readObject().asInstanceOf[A] + count += 1 + } + } else { + while (true) in.readObject match { + case SerializeEnd => return + case a => builder += a.asInstanceOf[A] + } + } + } + + protected[this] def readResolve(): Any = builder.result() +} + +@SerialVersionUID(3L) +private[collection] case object SerializeEnd + +/** Mix-in trait to enable DefaultSerializationProxy for the standard collection types. Depending on the type + * it is mixed into, it will dynamically choose `iterableFactory`, `mapFactory`, `sortedIterableFactory` or + * `sortedMapFactory` for deserialization into the respective `CC` type. Override `writeReplace` or implement + * it directly without using this trait if you need a non-standard factory or if you want to use a different + * serialization scheme. + */ +trait DefaultSerializable extends Serializable { this: scala.collection.Iterable[_] => + protected[this] def writeReplace(): AnyRef = { + val f: Factory[Any, Any] = this match { + case it: scala.collection.SortedMap[_, _] => it.sortedMapFactory.sortedMapFactory[Any, Any](using it.ordering.asInstanceOf[Ordering[Any]]).asInstanceOf[Factory[Any, Any]] + case it: scala.collection.Map[_, _] => it.mapFactory.mapFactory[Any, Any].asInstanceOf[Factory[Any, Any]] + case it: scala.collection.SortedSet[_] => it.sortedIterableFactory.evidenceIterableFactory[Any](using it.ordering.asInstanceOf[Ordering[Any]]) + case it => it.iterableFactory.iterableFactory + } + new DefaultSerializationProxy(f, this) + } +} diff --git a/scala2-library-bootstrapped/src/scala/collection/mutable/ArraySeq.scala b/scala2-library-bootstrapped/src/scala/collection/mutable/ArraySeq.scala new file mode 100644 index 000000000000..ebefa4c3c17a --- /dev/null +++ b/scala2-library-bootstrapped/src/scala/collection/mutable/ArraySeq.scala @@ -0,0 +1,354 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable +import java.util.Arrays +import scala.collection.Stepper.EfficientSplit +import scala.collection.convert.impl._ +import scala.reflect.ClassTag +import scala.util.hashing.MurmurHash3 + +/** + * A collection representing `Array[T]`. Unlike `ArrayBuffer` it is always backed by the same + * underlying `Array`, therefore it is not growable or shrinkable. + * + * @tparam T type of the elements in this wrapped array. + * + * @define Coll `ArraySeq` + * @define coll wrapped array + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +@SerialVersionUID(3L) +sealed abstract class ArraySeq[T] + extends AbstractSeq[T] + with IndexedSeq[T] + with IndexedSeqOps[T, ArraySeq, ArraySeq[T]] + with StrictOptimizedSeqOps[T, ArraySeq, ArraySeq[T]] + with Serializable { + + override def iterableFactory: scala.collection.SeqFactory[ArraySeq] = ArraySeq.untagged + + override protected def fromSpecific(coll: scala.collection.IterableOnce[T]): ArraySeq[T] = { + val b = ArrayBuilder.make(using elemTag).asInstanceOf[ArrayBuilder[T]] + val s = coll.knownSize + if(s > 0) b.sizeHint(s) + b ++= coll + ArraySeq.make(b.result()) + } + override protected def newSpecificBuilder: Builder[T, ArraySeq[T]] = ArraySeq.newBuilder(using elemTag).asInstanceOf[Builder[T, ArraySeq[T]]] + override def empty: ArraySeq[T] = ArraySeq.empty(using elemTag.asInstanceOf[ClassTag[T]]) + + /** The tag of the element type. This does not have to be equal to the element type of this ArraySeq. A primitive + * ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an array of a supertype + * or subtype of the element type. */ + def elemTag: ClassTag[_] + + /** Update element at given index */ + def update(@deprecatedName("idx", "2.13.0") index: Int, elem: T): Unit + + /** The underlying array. Its element type does not have to be equal to the element type of this ArraySeq. A primitive + * ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an array of a supertype + * or subtype of the element type. */ + def array: Array[_] + + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit + + override protected[this] def className = "ArraySeq" + + /** Clones this object, including the underlying Array. */ + override def clone(): ArraySeq[T] = ArraySeq.make(array.clone()).asInstanceOf[ArraySeq[T]] + + override def copyToArray[B >: T](xs: Array[B], start: Int, len: Int): Int = { + val copied = IterableOnce.elemsToCopyToArray(length, xs.length, start, len) + if(copied > 0) { + Array.copy(array, 0, xs, start, copied) + } + copied + } + + override def equals(other: Any): Boolean = other match { + case that: ArraySeq[_] if this.array.length != that.array.length => + false + case _ => + super.equals(other) + } + + override def sorted[B >: T](implicit ord: Ordering[B]): ArraySeq[T] = + ArraySeq.make(array.sorted(ord.asInstanceOf[Ordering[Any]])).asInstanceOf[ArraySeq[T]] + + override def sortInPlace[B >: T]()(implicit ord: Ordering[B]): this.type = { + if (length > 1) scala.util.Sorting.stableSort(array.asInstanceOf[Array[B]]) + this + } +} + +/** A companion object used to create instances of `ArraySeq`. + */ +@SerialVersionUID(3L) +object ArraySeq extends StrictOptimizedClassTagSeqFactory[ArraySeq] { self => + val untagged: SeqFactory[ArraySeq] = new ClassTagSeqFactory.AnySeqDelegate(self) + + // This is reused for all calls to empty. + private[this] val EmptyArraySeq = new ofRef[AnyRef](new Array[AnyRef](0)) + def empty[T : ClassTag]: ArraySeq[T] = EmptyArraySeq.asInstanceOf[ArraySeq[T]] + + def from[A : ClassTag](it: scala.collection.IterableOnce[A]): ArraySeq[A] = make(Array.from[A](it)) + + def newBuilder[A : ClassTag]: Builder[A, ArraySeq[A]] = ArrayBuilder.make[A].mapResult(make) + + /** + * Wrap an existing `Array` into a `ArraySeq` of the proper primitive specialization type + * without copying. + * + * Note that an array containing boxed primitives can be converted to a `ArraySeq` without + * copying. For example, `val a: Array[Any] = Array(1)` is an array of `Object` at runtime, + * containing `Integer`s. An `ArraySeq[Int]` can be obtained with a cast: + * `ArraySeq.make(a).asInstanceOf[ArraySeq[Int]]`. The values are still + * boxed, the resulting instance is an [[ArraySeq.ofRef]]. Writing + * `ArraySeq.make(a.asInstanceOf[Array[Int]])` does not work, it throws a `ClassCastException` + * at runtime. + */ + def make[T](x: Array[T]): ArraySeq[T] = ((x.asInstanceOf[Array[_]]: @unchecked) match { + case null => null + case x: Array[AnyRef] => new ofRef[AnyRef](x) + case x: Array[Int] => new ofInt(x) + case x: Array[Double] => new ofDouble(x) + case x: Array[Long] => new ofLong(x) + case x: Array[Float] => new ofFloat(x) + case x: Array[Char] => new ofChar(x) + case x: Array[Byte] => new ofByte(x) + case x: Array[Short] => new ofShort(x) + case x: Array[Boolean] => new ofBoolean(x) + case x: Array[Unit] => new ofUnit(x) + }).asInstanceOf[ArraySeq[T]] + + @SerialVersionUID(3L) + final class ofRef[T <: AnyRef](val array: Array[T]) extends ArraySeq[T] { + def elemTag: ClassTag[T] = ClassTag[T](array.getClass.getComponentType) + def length: Int = array.length + def apply(index: Int): T = array(index) + def update(index: Int, elem: T): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofRef[_] => + Array.equals( + this.array.asInstanceOf[Array[AnyRef]], + that.array.asInstanceOf[Array[AnyRef]]) + case _ => super.equals(that) + } + override def iterator: Iterator[T] = new ArrayOps.ArrayIterator[T](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[T, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + new ObjectArrayStepper(array, 0, array.length) + else shape.parUnbox(new ObjectArrayStepper(array, 0, array.length).asInstanceOf[AnyStepper[T] with EfficientSplit]) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofByte(val array: Array[Byte]) extends ArraySeq[Byte] { + // Type erases to `ManifestFactory.ByteManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Byte.type = ClassTag.Byte + def length: Int = array.length + def apply(index: Int): Byte = array(index) + def update(index: Int, elem: Byte): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofByte => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Byte] = new ArrayOps.ArrayIterator[Byte](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Byte, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedByteArrayStepper(array, 0, array.length)) + else new WidenedByteArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofShort(val array: Array[Short]) extends ArraySeq[Short] { + // Type erases to `ManifestFactory.ShortManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Short.type = ClassTag.Short + def length: Int = array.length + def apply(index: Int): Short = array(index) + def update(index: Int, elem: Short): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofShort => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Short] = new ArrayOps.ArrayIterator[Short](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Short, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedShortArrayStepper(array, 0, array.length)) + else new WidenedShortArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofChar(val array: Array[Char]) extends ArraySeq[Char] { + // Type erases to `ManifestFactory.CharManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Char.type = ClassTag.Char + def length: Int = array.length + def apply(index: Int): Char = array(index) + def update(index: Int, elem: Char): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofChar => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Char] = new ArrayOps.ArrayIterator[Char](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Char, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new WidenedCharArrayStepper(array, 0, array.length)) + else new WidenedCharArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + + override def addString(sb: StringBuilder, start: String, sep: String, end: String): sb.type = { + val jsb = sb.underlying + if (start.length != 0) jsb.append(start) + val len = array.length + if (len != 0) { + if (sep.isEmpty) jsb.append(array) + else { + jsb.ensureCapacity(jsb.length + len + end.length + (len - 1) * sep.length) + jsb.append(array(0)) + var i = 1 + while (i < len) { + jsb.append(sep) + jsb.append(array(i)) + i += 1 + } + } + } + if (end.length != 0) jsb.append(end) + sb + } + } + + @SerialVersionUID(3L) + final class ofInt(val array: Array[Int]) extends ArraySeq[Int] { + // Type erases to `ManifestFactory.IntManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Int.type = ClassTag.Int + def length: Int = array.length + def apply(index: Int): Int = array(index) + def update(index: Int, elem: Int): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofInt => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Int] = new ArrayOps.ArrayIterator[Int](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Int, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParIntStepper(new IntArrayStepper(array, 0, array.length)) + else new IntArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofLong(val array: Array[Long]) extends ArraySeq[Long] { + // Type erases to `ManifestFactory.LongManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Long.type = ClassTag.Long + def length: Int = array.length + def apply(index: Int): Long = array(index) + def update(index: Int, elem: Long): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofLong => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Long] = new ArrayOps.ArrayIterator[Long](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Long, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParLongStepper(new LongArrayStepper(array, 0, array.length)) + else new LongArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofFloat(val array: Array[Float]) extends ArraySeq[Float] { + // Type erases to `ManifestFactory.FloatManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Float.type = ClassTag.Float + def length: Int = array.length + def apply(index: Int): Float = array(index) + def update(index: Int, elem: Float): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofFloat => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Float] = new ArrayOps.ArrayIterator[Float](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Float, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParDoubleStepper(new WidenedFloatArrayStepper(array, 0, array.length)) + else new WidenedFloatArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofDouble(val array: Array[Double]) extends ArraySeq[Double] { + // Type erases to `ManifestFactory.DoubleManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Double.type = ClassTag.Double + def length: Int = array.length + def apply(index: Int): Double = array(index) + def update(index: Int, elem: Double): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofDouble => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Double] = new ArrayOps.ArrayIterator[Double](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Double, S]): S with EfficientSplit = ( + if(shape.shape == StepperShape.ReferenceShape) + AnyStepper.ofParDoubleStepper(new DoubleArrayStepper(array, 0, array.length)) + else new DoubleArrayStepper(array, 0, array.length) + ).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofBoolean(val array: Array[Boolean]) extends ArraySeq[Boolean] { + // Type erases to `ManifestFactory.BooleanManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Boolean.type = ClassTag.Boolean + def length: Int = array.length + def apply(index: Int): Boolean = array(index) + def update(index: Int, elem: Boolean): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofBoolean => Arrays.equals(array, that.array) + case _ => super.equals(that) + } + override def iterator: Iterator[Boolean] = new ArrayOps.ArrayIterator[Boolean](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Boolean, S]): S with EfficientSplit = + new BoxedBooleanArrayStepper(array, 0, array.length).asInstanceOf[S with EfficientSplit] + } + + @SerialVersionUID(3L) + final class ofUnit(val array: Array[Unit]) extends ArraySeq[Unit] { + // Type erases to `ManifestFactory.UnitManifest`, but can't annotate that because it's not accessible + def elemTag: ClassTag.Unit.type = ClassTag.Unit + def length: Int = array.length + def apply(index: Int): Unit = array(index) + def update(index: Int, elem: Unit): Unit = { array(index) = elem } + override def hashCode = MurmurHash3.arraySeqHash(array) + override def equals(that: Any) = that match { + case that: ofUnit => array.length == that.array.length + case _ => super.equals(that) + } + override def iterator: Iterator[Unit] = new ArrayOps.ArrayIterator[Unit](array) + override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Unit, S]): S with EfficientSplit = + new ObjectArrayStepper[AnyRef](array.asInstanceOf[Array[AnyRef]], 0, array.length).asInstanceOf[S with EfficientSplit] + } +} diff --git a/scala2-library-bootstrapped/src/scala/collection/mutable/CollisionProofHashMap.scala b/scala2-library-bootstrapped/src/scala/collection/mutable/CollisionProofHashMap.scala new file mode 100644 index 000000000000..36b53d1e433b --- /dev/null +++ b/scala2-library-bootstrapped/src/scala/collection/mutable/CollisionProofHashMap.scala @@ -0,0 +1,888 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.collection +package mutable + +import scala.{unchecked => uc} +import scala.annotation.{implicitNotFound, tailrec, unused} +import scala.annotation.unchecked.uncheckedVariance +import scala.collection.generic.DefaultSerializationProxy +import scala.runtime.Statics + +/** This class implements mutable maps using a hashtable with red-black trees in the buckets for good + * worst-case performance on hash collisions. An `Ordering` is required for the element type. Equality + * as determined by the `Ordering` has to be consistent with `equals` and `hashCode`. Universal equality + * of numeric types is not supported (similar to `AnyRefMap`). + * + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/concrete-mutable-collection-classes.html#hash-tables "Scala's Collection Library overview"]] + * section on `Hash Tables` for more information. + * + * @define Coll `mutable.CollisionProofHashMap` + * @define coll mutable collision-proof hash map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ +final class CollisionProofHashMap[K, V](initialCapacity: Int, loadFactor: Double)(implicit ordering: Ordering[K]) + extends AbstractMap[K, V] + with MapOps[K, V, Map, CollisionProofHashMap[K, V]] //-- + with StrictOptimizedIterableOps[(K, V), Iterable, CollisionProofHashMap[K, V]] + with StrictOptimizedMapOps[K, V, Map, CollisionProofHashMap[K, V]] { //-- + + private[this] final def sortedMapFactory: SortedMapFactory[CollisionProofHashMap] = CollisionProofHashMap + + def this()(implicit ordering: Ordering[K]) = this(CollisionProofHashMap.defaultInitialCapacity, CollisionProofHashMap.defaultLoadFactor)(ordering) + + import CollisionProofHashMap.Node + private[this] type RBNode = CollisionProofHashMap.RBNode[K, V] + private[this] type LLNode = CollisionProofHashMap.LLNode[K, V] + + /** The actual hash table. */ + private[this] var table: Array[Node] = new Array[Node](tableSizeFor(initialCapacity)) + + /** The next size value at which to resize (capacity * load factor). */ + private[this] var threshold: Int = newThreshold(table.length) + + private[this] var contentSize = 0 + + override def size: Int = contentSize + + @`inline` private[this] final def computeHash(o: K): Int = { + val h = if(o.asInstanceOf[AnyRef] eq null) 0 else o.hashCode + h ^ (h >>> 16) + } + + @`inline` private[this] final def index(hash: Int) = hash & (table.length - 1) + + override protected def fromSpecific(coll: IterableOnce[(K, V)] @uncheckedVariance): CollisionProofHashMap[K, V] @uncheckedVariance = CollisionProofHashMap.from(coll) + override protected def newSpecificBuilder: Builder[(K, V), CollisionProofHashMap[K, V]] @uncheckedVariance = CollisionProofHashMap.newBuilder[K, V] + + override def empty: CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V] + + override def contains(key: K): Boolean = findNode(key) ne null + + def get(key: K): Option[V] = findNode(key) match { + case null => None + case nd => Some(nd match { + case nd: LLNode @uc => nd.value + case nd: RBNode @uc => nd.value + }) + } + + @throws[NoSuchElementException] + override def apply(key: K): V = findNode(key) match { + case null => default(key) + case nd => nd match { + case nd: LLNode @uc => nd.value + case nd: RBNode @uc => nd.value + } + } + + override def getOrElse[V1 >: V](key: K, default: => V1): V1 = { + val nd = findNode(key) + if (nd eq null) default else nd match { + case nd: LLNode @uc => nd.value + case n => n.asInstanceOf[RBNode].value + } + } + + @`inline` private[this] def findNode(elem: K): Node = { + val hash = computeHash(elem) + table(index(hash)) match { + case null => null + case n: LLNode @uc => n.getNode(elem, hash) + case n => n.asInstanceOf[RBNode].getNode(elem, hash) + } + } + + override def sizeHint(size: Int): Unit = { + val target = tableSizeFor(((size + 1).toDouble / loadFactor).toInt) + if(target > table.length) { + if(size == 0) reallocTable(target) + else growTable(target) + } + } + + override def update(key: K, value: V): Unit = put0(key, value, false) + + override def put(key: K, value: V): Option[V] = put0(key, value, true) match { + case null => None + case sm => sm + } + + def addOne(elem: (K, V)): this.type = { put0(elem._1, elem._2, false); this } + + @`inline` private[this] def put0(key: K, value: V, getOld: Boolean): Some[V] = { + if(contentSize + 1 >= threshold) growTable(table.length * 2) + val hash = computeHash(key) + val idx = index(hash) + put0(key, value, getOld, hash, idx) + } + + private[this] def put0(key: K, value: V, getOld: Boolean, hash: Int, idx: Int): Some[V] = { + val res = table(idx) match { + case n: RBNode @uc => + insert(n, idx, key, hash, value) + case _old => + val old: LLNode = _old.asInstanceOf[LLNode] + if(old eq null) { + table(idx) = new LLNode(key, hash, value, null) + } else { + var remaining = CollisionProofHashMap.treeifyThreshold + var prev: LLNode = null + var n = old + while((n ne null) && n.hash <= hash && remaining > 0) { + if(n.hash == hash && key == n.key) { + val old = n.value + n.value = value + return (if(getOld) Some(old) else null) + } + prev = n + n = n.next + remaining -= 1 + } + if(remaining == 0) { + treeify(old, idx) + return put0(key, value, getOld, hash, idx) + } + if(prev eq null) table(idx) = new LLNode(key, hash, value, old) + else prev.next = new LLNode(key, hash, value, prev.next) + } + true + } + if(res) contentSize += 1 + if(res) Some(null.asInstanceOf[V]) else null //TODO + } + + private[this] def treeify(old: LLNode, idx: Int): Unit = { + table(idx) = CollisionProofHashMap.leaf(old.key, old.hash, old.value, red = false, null) + var n: LLNode = old.next + while(n ne null) { + val root = table(idx).asInstanceOf[RBNode] + insertIntoExisting(root, idx, n.key, n.hash, n.value, root) + n = n.next + } + } + + override def addAll(xs: IterableOnce[(K, V)]): this.type = { + val k = xs.knownSize + if(k > 0) sizeHint(contentSize + k) + super.addAll(xs) + } + + // returns the old value or Statics.pfMarker if not found + private[this] def remove0(elem: K) : Any = { + val hash = computeHash(elem) + val idx = index(hash) + table(idx) match { + case null => Statics.pfMarker + case t: RBNode @uc => + val v = delete(t, idx, elem, hash) + if(v.asInstanceOf[AnyRef] ne Statics.pfMarker) contentSize -= 1 + v + case nd: LLNode @uc if nd.hash == hash && nd.key == elem => + // first element matches + table(idx) = nd.next + contentSize -= 1 + nd.value + case nd: LLNode @uc => + // find an element that matches + var prev = nd + var next = nd.next + while((next ne null) && next.hash <= hash) { + if(next.hash == hash && next.key == elem) { + prev.next = next.next + contentSize -= 1 + return next.value + } + prev = next + next = next.next + } + Statics.pfMarker + } + } + + private[this] abstract class MapIterator[R] extends AbstractIterator[R] { + protected[this] def extract(node: LLNode): R + protected[this] def extract(node: RBNode): R + + private[this] var i = 0 + private[this] var node: Node = null + private[this] val len = table.length + + def hasNext: Boolean = { + if(node ne null) true + else { + while(i < len) { + val n = table(i) + i += 1 + n match { + case null => + case n: RBNode @uc => + node = CollisionProofHashMap.minNodeNonNull(n) + return true + case n: LLNode @uc => + node = n + return true + } + } + false + } + } + + def next(): R = + if(!hasNext) Iterator.empty.next() + else node match { + case n: RBNode @uc => + val r = extract(n) + node = CollisionProofHashMap.successor(n ) + r + case n: LLNode @uc => + val r = extract(n) + node = n.next + r + } + } + + override def keysIterator: Iterator[K] = { + if (isEmpty) Iterator.empty + else new MapIterator[K] { + protected[this] def extract(node: LLNode) = node.key + protected[this] def extract(node: RBNode) = node.key + } + } + + override def iterator: Iterator[(K, V)] = { + if (isEmpty) Iterator.empty + else new MapIterator[(K, V)] { + protected[this] def extract(node: LLNode) = (node.key, node.value) + protected[this] def extract(node: RBNode) = (node.key, node.value) + } + } + + private[this] def growTable(newlen: Int) = { + var oldlen = table.length + table = java.util.Arrays.copyOf(table, newlen) + threshold = newThreshold(table.length) + while(oldlen < newlen) { + var i = 0 + while (i < oldlen) { + val old = table(i) + if(old ne null) splitBucket(old, i, i + oldlen, oldlen) + i += 1 + } + oldlen *= 2 + } + } + + @`inline` private[this] def reallocTable(newlen: Int) = { + table = new Array(newlen) + threshold = newThreshold(table.length) + } + + @`inline` private[this] def splitBucket(tree: Node, lowBucket: Int, highBucket: Int, mask: Int): Unit = tree match { + case t: LLNode @uc => splitBucket(t, lowBucket, highBucket, mask) + case t: RBNode @uc => splitBucket(t, lowBucket, highBucket, mask) + } + + private[this] def splitBucket(list: LLNode, lowBucket: Int, highBucket: Int, mask: Int): Unit = { + val preLow: LLNode = new LLNode(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) + val preHigh: LLNode = new LLNode(null.asInstanceOf[K], 0, null.asInstanceOf[V], null) + //preLow.next = null + //preHigh.next = null + var lastLow: LLNode = preLow + var lastHigh: LLNode = preHigh + var n = list + while(n ne null) { + val next = n.next + if((n.hash & mask) == 0) { // keep low + lastLow.next = n + lastLow = n + } else { // move to high + lastHigh.next = n + lastHigh = n + } + n = next + } + lastLow.next = null + if(list ne preLow.next) table(lowBucket) = preLow.next + if(preHigh.next ne null) { + table(highBucket) = preHigh.next + lastHigh.next = null + } + } + + private[this] def splitBucket(tree: RBNode, lowBucket: Int, highBucket: Int, mask: Int): Unit = { + var lowCount, highCount = 0 + tree.foreachNode((n: RBNode) => if((n.hash & mask) != 0) highCount += 1 else lowCount += 1) + if(highCount != 0) { + if(lowCount == 0) { + table(lowBucket) = null + table(highBucket) = tree + } else { + table(lowBucket) = fromNodes(new CollisionProofHashMap.RBNodesIterator(tree).filter(n => (n.hash & mask) == 0), lowCount) + table(highBucket) = fromNodes(new CollisionProofHashMap.RBNodesIterator(tree).filter(n => (n.hash & mask) != 0), highCount) + } + } + } + + private[this] def tableSizeFor(capacity: Int) = + (Integer.highestOneBit((capacity-1).max(4))*2).min(1 << 30) + + private[this] def newThreshold(size: Int) = (size.toDouble * loadFactor).toInt + + override def clear(): Unit = { + java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) + contentSize = 0 + } + + override def remove(key: K): Option[V] = { + val v = remove0(key) + if(v.asInstanceOf[AnyRef] eq Statics.pfMarker) None else Some(v.asInstanceOf[V]) + } + + def subtractOne(elem: K): this.type = { remove0(elem); this } + + override def knownSize: Int = size + + override def isEmpty: Boolean = size == 0 + + override def foreach[U](f: ((K, V)) => U): Unit = { + val len = table.length + var i = 0 + while(i < len) { + val n = table(i) + if(n ne null) n match { + case n: LLNode @uc => n.foreach(f) + case n: RBNode @uc => n.foreach(f) + } + i += 1 + } + } + + override def foreachEntry[U](f: (K, V) => U): Unit = { + val len = table.length + var i = 0 + while(i < len) { + val n = table(i) + if(n ne null) n match { + case n: LLNode @uc => n.foreachEntry(f) + case n: RBNode @uc => n.foreachEntry(f) + } + i += 1 + } + } + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(new CollisionProofHashMap.DeserializationFactory[K, V](table.length, loadFactor, ordering), this) + + override protected[this] def className = "CollisionProofHashMap" + + override def getOrElseUpdate(key: K, defaultValue: => V): V = { + val hash = computeHash(key) + val idx = index(hash) + table(idx) match { + case null => () + case n: LLNode @uc => + val nd = n.getNode(key, hash) + if(nd != null) return nd.value + case n => + val nd = n.asInstanceOf[RBNode].getNode(key, hash) + if(nd != null) return nd.value + } + val table0 = table + val default = defaultValue + if(contentSize + 1 >= threshold) growTable(table.length * 2) + // Avoid recomputing index if the `defaultValue()` or new element hasn't triggered a table resize. + val newIdx = if (table0 eq table) idx else index(hash) + put0(key, default, false, hash, newIdx) + default + } + + ///////////////////// Overrides code from SortedMapOps + + /** Builds a new `CollisionProofHashMap` by applying a function to all elements of this $coll. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given function + * `f` to each element of this $coll and collecting the results. + */ + def map[K2, V2](f: ((K, V)) => (K2, V2)) + (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = + sortedMapFactory.from(new View.Map[(K, V), (K2, V2)](this, f)) + + /** Builds a new `CollisionProofHashMap` by applying a function to all elements of this $coll + * and using the elements of the resulting collections. + * + * @param f the function to apply to each element. + * @return a new $coll resulting from applying the given collection-valued function + * `f` to each element of this $coll and concatenating the results. + */ + def flatMap[K2, V2](f: ((K, V)) => IterableOnce[(K2, V2)]) + (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = + sortedMapFactory.from(new View.FlatMap(this, f)) + + /** Builds a new sorted map by applying a partial function to all elements of this $coll + * on which the function is defined. + * + * @param pf the partial function which filters and maps the $coll. + * @return a new $coll resulting from applying the given partial function + * `pf` to each element on which it is defined and collecting the results. + * The order of the elements is preserved. + */ + def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)]) + (implicit @implicitNotFound(CollisionProofHashMap.ordMsg) ordering: Ordering[K2]): CollisionProofHashMap[K2, V2] = + sortedMapFactory.from(new View.Collect(this, pf)) + + override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]): CollisionProofHashMap[K, V2] = sortedMapFactory.from(suffix match { + case it: Iterable[(K, V2)] => new View.Concat(this, it) + case _ => iterator.concat(suffix.iterator) + }) + + /** Alias for `concat` */ + @`inline` override final def ++ [V2 >: V](xs: IterableOnce[(K, V2)]): CollisionProofHashMap[K, V2] = concat(xs) + + @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") + override def + [V1 >: V](kv: (K, V1)): CollisionProofHashMap[K, V1] = + sortedMapFactory.from(new View.Appended(this, kv)) + + @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CollisionProofHashMap[K, V1] = + sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems)) + + ///////////////////// RedBlackTree code derived from mutable.RedBlackTree: + + @`inline` private[this] def isRed(node: RBNode) = (node ne null) && node.red + @`inline` private[this] def isBlack(node: RBNode) = (node eq null) || !node.red + + @unused @`inline` private[this] def compare(key: K, hash: Int, node: LLNode): Int = { + val i = hash - node.hash + if(i != 0) i else ordering.compare(key, node.key) + } + + @`inline` private[this] def compare(key: K, hash: Int, node: RBNode): Int = { + /*val i = hash - node.hash + if(i != 0) i else*/ ordering.compare(key, node.key) + } + + // ---- insertion ---- + + @tailrec private[this] final def insertIntoExisting(_root: RBNode, bucket: Int, key: K, hash: Int, value: V, x: RBNode): Boolean = { + val cmp = compare(key, hash, x) + if(cmp == 0) { + x.value = value + false + } else { + val next = if(cmp < 0) x.left else x.right + if(next eq null) { + val z = CollisionProofHashMap.leaf(key, hash, value, red = true, x) + if (cmp < 0) x.left = z else x.right = z + table(bucket) = fixAfterInsert(_root, z) + return true + } + else insertIntoExisting(_root, bucket, key, hash, value, next) + } + } + + private[this] final def insert(tree: RBNode, bucket: Int, key: K, hash: Int, value: V): Boolean = { + if(tree eq null) { + table(bucket) = CollisionProofHashMap.leaf(key, hash, value, red = false, null) + true + } else insertIntoExisting(tree, bucket, key, hash, value, tree) + } + + private[this] def fixAfterInsert(_root: RBNode, node: RBNode): RBNode = { + var root = _root + var z = node + while (isRed(z.parent)) { + if (z.parent eq z.parent.parent.left) { + val y = z.parent.parent.right + if (isRed(y)) { + z.parent.red = false + y.red = false + z.parent.parent.red = true + z = z.parent.parent + } else { + if (z eq z.parent.right) { + z = z.parent + root = rotateLeft(root, z) + } + z.parent.red = false + z.parent.parent.red = true + root = rotateRight(root, z.parent.parent) + } + } else { // symmetric cases + val y = z.parent.parent.left + if (isRed(y)) { + z.parent.red = false + y.red = false + z.parent.parent.red = true + z = z.parent.parent + } else { + if (z eq z.parent.left) { + z = z.parent + root = rotateRight(root, z) + } + z.parent.red = false + z.parent.parent.red = true + root = rotateLeft(root, z.parent.parent) + } + } + } + root.red = false + root + } + + // ---- deletion ---- + + // returns the old value or Statics.pfMarker if not found + private[this] def delete(_root: RBNode, bucket: Int, key: K, hash: Int): Any = { + var root = _root + val z = root.getNode(key, hash: Int) + if (z ne null) { + val oldValue = z.value + var y = z + var yIsRed = y.red + var x: RBNode = null + var xParent: RBNode = null + + if (z.left eq null) { + x = z.right + root = transplant(root, z, z.right) + xParent = z.parent + } + else if (z.right eq null) { + x = z.left + root = transplant(root, z, z.left) + xParent = z.parent + } + else { + y = CollisionProofHashMap.minNodeNonNull(z.right) + yIsRed = y.red + x = y.right + + if (y.parent eq z) xParent = y + else { + xParent = y.parent + root = transplant(root, y, y.right) + y.right = z.right + y.right.parent = y + } + root = transplant(root, z, y) + y.left = z.left + y.left.parent = y + y.red = z.red + } + + if (!yIsRed) root = fixAfterDelete(root, x, xParent) + if(root ne _root) table(bucket) = root + oldValue + } else Statics.pfMarker + } + + private[this] def fixAfterDelete(_root: RBNode, node: RBNode, parent: RBNode): RBNode = { + var root = _root + var x = node + var xParent = parent + while ((x ne root) && isBlack(x)) { + if (x eq xParent.left) { + var w = xParent.right + // assert(w ne null) + + if (w.red) { + w.red = false + xParent.red = true + root = rotateLeft(root, xParent) + w = xParent.right + } + if (isBlack(w.left) && isBlack(w.right)) { + w.red = true + x = xParent + } else { + if (isBlack(w.right)) { + w.left.red = false + w.red = true + root = rotateRight(root, w) + w = xParent.right + } + w.red = xParent.red + xParent.red = false + w.right.red = false + root = rotateLeft(root, xParent) + x = root + } + } else { // symmetric cases + var w = xParent.left + // assert(w ne null) + + if (w.red) { + w.red = false + xParent.red = true + root = rotateRight(root, xParent) + w = xParent.left + } + if (isBlack(w.right) && isBlack(w.left)) { + w.red = true + x = xParent + } else { + if (isBlack(w.left)) { + w.right.red = false + w.red = true + root = rotateLeft(root, w) + w = xParent.left + } + w.red = xParent.red + xParent.red = false + w.left.red = false + root = rotateRight(root, xParent) + x = root + } + } + xParent = x.parent + } + if (x ne null) x.red = false + root + } + + // ---- helpers ---- + + @`inline` private[this] def rotateLeft(_root: RBNode, x: RBNode): RBNode = { + var root = _root + val y = x.right + x.right = y.left + + val xp = x.parent + if (y.left ne null) y.left.parent = x + y.parent = xp + + if (xp eq null) root = y + else if (x eq xp.left) xp.left = y + else xp.right = y + + y.left = x + x.parent = y + root + } + + @`inline` private[this] def rotateRight(_root: RBNode, x: RBNode): RBNode = { + var root = _root + val y = x.left + x.left = y.right + + val xp = x.parent + if (y.right ne null) y.right.parent = x + y.parent = xp + + if (xp eq null) root = y + else if (x eq xp.right) xp.right = y + else xp.left = y + + y.right = x + x.parent = y + root + } + + /** + * Transplant the node `from` to the place of node `to`. This is done by setting `from` as a child of `to`'s previous + * parent and setting `from`'s parent to the `to`'s previous parent. The children of `from` are left unchanged. + */ + private[this] def transplant(_root: RBNode, to: RBNode, from: RBNode): RBNode = { + var root = _root + if (to.parent eq null) root = from + else if (to eq to.parent.left) to.parent.left = from + else to.parent.right = from + if (from ne null) from.parent = to.parent + root + } + + // building + + def fromNodes(xs: Iterator[Node], size: Int): RBNode = { + val maxUsedDepth = 32 - Integer.numberOfLeadingZeros(size) // maximum depth of non-leaf nodes + def f(level: Int, size: Int): RBNode = size match { + case 0 => null + case 1 => + val nn = xs.next() + val (key, hash, value) = nn match { + case nn: LLNode @uc => (nn.key, nn.hash, nn.value) + case nn: RBNode @uc => (nn.key, nn.hash, nn.value) + } + new RBNode(key, hash, value, level == maxUsedDepth && level != 1, null, null, null) + case n => + val leftSize = (size-1)/2 + val left = f(level+1, leftSize) + val nn = xs.next() + val right = f(level+1, size-1-leftSize) + val (key, hash, value) = nn match { + case nn: LLNode @uc => (nn.key, nn.hash, nn.value) + case nn: RBNode @uc => (nn.key, nn.hash, nn.value) + } + val n = new RBNode(key, hash, value, false, left, right, null) + if(left ne null) left.parent = n + right.parent = n + n + } + f(1, size) + } +} + +/** + * $factoryInfo + * @define Coll `mutable.CollisionProofHashMap` + * @define coll mutable collision-proof hash map + */ +@SerialVersionUID(3L) +object CollisionProofHashMap extends SortedMapFactory[CollisionProofHashMap] { + private[collection] final val ordMsg = "No implicit Ordering[${K2}] found to build a CollisionProofHashMap[${K2}, ${V2}]. You may want to upcast to a Map[${K}, ${V}] first by calling `unsorted`." + + def from[K : Ordering, V](it: scala.collection.IterableOnce[(K, V)]): CollisionProofHashMap[K, V] = { + val k = it.knownSize + val cap = if(k > 0) ((k + 1).toDouble / defaultLoadFactor).toInt else defaultInitialCapacity + new CollisionProofHashMap[K, V](cap, defaultLoadFactor) ++= it + } + + def empty[K : Ordering, V]: CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V] + + def newBuilder[K : Ordering, V]: Builder[(K, V), CollisionProofHashMap[K, V]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) + + def newBuilder[K : Ordering, V](initialCapacity: Int, loadFactor: Double): Builder[(K, V), CollisionProofHashMap[K, V]] = + new GrowableBuilder[(K, V), CollisionProofHashMap[K, V]](new CollisionProofHashMap[K, V](initialCapacity, loadFactor)) { + override def sizeHint(size: Int) = elems.sizeHint(size) + } + + /** The default load factor for the hash table */ + final def defaultLoadFactor: Double = 0.75 + + /** The default initial capacity for the hash table */ + final def defaultInitialCapacity: Int = 16 + + @SerialVersionUID(3L) + private final class DeserializationFactory[K, V](val tableLength: Int, val loadFactor: Double, val ordering: Ordering[K]) extends Factory[(K, V), CollisionProofHashMap[K, V]] with Serializable { + def fromSpecific(it: IterableOnce[(K, V)]): CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V](tableLength, loadFactor)(ordering) ++= it + def newBuilder: Builder[(K, V), CollisionProofHashMap[K, V]] = CollisionProofHashMap.newBuilder(tableLength, loadFactor)(using ordering) + } + + @unused @`inline` private def compare[K, V](key: K, hash: Int, node: LLNode[K, V])(implicit ord: Ordering[K]): Int = { + val i = hash - node.hash + if(i != 0) i else ord.compare(key, node.key) + } + + @`inline` private def compare[K, V](key: K, hash: Int, node: RBNode[K, V])(implicit ord: Ordering[K]): Int = { + /*val i = hash - node.hash + if(i != 0) i else*/ ord.compare(key, node.key) + } + + private final val treeifyThreshold = 8 + + // Superclass for RBNode and LLNode to help the JIT with optimizing instance checks, but no shared common fields. + // Keeping calls monomorphic where possible and dispatching manually where needed is faster. + sealed abstract class Node + + /////////////////////////// Red-Black Tree Node + + final class RBNode[K, V](var key: K, var hash: Int, var value: V, var red: Boolean, var left: RBNode[K, V], var right: RBNode[K, V], var parent: RBNode[K, V]) extends Node { + override def toString: String = "RBNode(" + key + ", " + hash + ", " + value + ", " + red + ", " + left + ", " + right + ")" + + @tailrec def getNode(k: K, h: Int)(implicit ord: Ordering[K]): RBNode[K, V] = { + val cmp = compare(k, h, this) + if (cmp < 0) { + if(left ne null) left.getNode(k, h) else null + } else if (cmp > 0) { + if(right ne null) right.getNode(k, h) else null + } else this + } + + def foreach[U](f: ((K, V)) => U): Unit = { + if(left ne null) left.foreach(f) + f((key, value)) + if(right ne null) right.foreach(f) + } + + def foreachEntry[U](f: (K, V) => U): Unit = { + if(left ne null) left.foreachEntry(f) + f(key, value) + if(right ne null) right.foreachEntry(f) + } + + def foreachNode[U](f: RBNode[K, V] => U): Unit = { + if(left ne null) left.foreachNode(f) + f(this) + if(right ne null) right.foreachNode(f) + } + } + + @`inline` private def leaf[A, B](key: A, hash: Int, value: B, red: Boolean, parent: RBNode[A, B]): RBNode[A, B] = + new RBNode(key, hash, value, red, null, null, parent) + + @tailrec private def minNodeNonNull[A, B](node: RBNode[A, B]): RBNode[A, B] = + if (node.left eq null) node else minNodeNonNull(node.left) + + /** + * Returns the node that follows `node` in an in-order tree traversal. If `node` has the maximum key (and is, + * therefore, the last node), this method returns `null`. + */ + private def successor[A, B](node: RBNode[A, B]): RBNode[A, B] = { + if (node.right ne null) minNodeNonNull(node.right) + else { + var x = node + var y = x.parent + while ((y ne null) && (x eq y.right)) { + x = y + y = y.parent + } + y + } + } + + private final class RBNodesIterator[A, B](tree: RBNode[A, B])(implicit @unused ord: Ordering[A]) extends AbstractIterator[RBNode[A, B]] { + private[this] var nextNode: RBNode[A, B] = if(tree eq null) null else minNodeNonNull(tree) + + def hasNext: Boolean = nextNode ne null + + @throws[NoSuchElementException] + def next(): RBNode[A, B] = nextNode match { + case null => Iterator.empty.next() + case node => + nextNode = successor(node) + node + } + } + + /////////////////////////// Linked List Node + + private final class LLNode[K, V](var key: K, var hash: Int, var value: V, var next: LLNode[K, V]) extends Node { + override def toString = s"LLNode($key, $value, $hash) -> $next" + + private[this] def eq(a: Any, b: Any): Boolean = + if(a.asInstanceOf[AnyRef] eq null) b.asInstanceOf[AnyRef] eq null else a.asInstanceOf[AnyRef].equals(b) + + @tailrec def getNode(k: K, h: Int)(implicit ord: Ordering[K]): LLNode[K, V] = { + if(h == hash && eq(k, key) /*ord.compare(k, key) == 0*/) this + else if((next eq null) || (hash > h)) null + else next.getNode(k, h) + } + + @tailrec def foreach[U](f: ((K, V)) => U): Unit = { + f((key, value)) + if(next ne null) next.foreach(f) + } + + @tailrec def foreachEntry[U](f: (K, V) => U): Unit = { + f(key, value) + if(next ne null) next.foreachEntry(f) + } + + @tailrec def foreachNode[U](f: LLNode[K, V] => U): Unit = { + f(this) + if(next ne null) next.foreachNode(f) + } + } +} diff --git a/scala2-library-cc/src/scala/Array.scala b/scala2-library-cc/src/scala/Array.scala new file mode 100644 index 000000000000..d2098a76f32f --- /dev/null +++ b/scala2-library-cc/src/scala/Array.scala @@ -0,0 +1,690 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala + +//import scala.collection.generic._ +import scala.collection.{Factory, immutable, mutable} +import mutable.ArrayBuilder +import immutable.ArraySeq +import scala.language.implicitConversions +import scala.reflect.{ClassTag, classTag} +import scala.runtime.BoxedUnit +import scala.runtime.ScalaRunTime +import scala.runtime.ScalaRunTime.{array_apply, array_update} + +/** Utility methods for operating on arrays. + * For example: + * {{{ + * val a = Array(1, 2) + * val b = Array.ofDim[Int](2) + * val c = Array.concat(a, b) + * }}} + * where the array objects `a`, `b` and `c` have respectively the values + * `Array(1, 2)`, `Array(0, 0)` and `Array(1, 2, 0, 0)`. + */ +object Array { + val emptyBooleanArray = new Array[Boolean](0) + val emptyByteArray = new Array[Byte](0) + val emptyCharArray = new Array[Char](0) + val emptyDoubleArray = new Array[Double](0) + val emptyFloatArray = new Array[Float](0) + val emptyIntArray = new Array[Int](0) + val emptyLongArray = new Array[Long](0) + val emptyShortArray = new Array[Short](0) + val emptyObjectArray = new Array[Object](0) + + /** Provides an implicit conversion from the Array object to a collection Factory */ + implicit def toFactory[A : ClassTag](dummy: Array.type): Factory[A, Array[A]] = new ArrayFactory(dummy) + @SerialVersionUID(3L) + private class ArrayFactory[A : ClassTag](dummy: Array.type) extends Factory[A, Array[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]): Array[A] = Array.from[A](it) + def newBuilder: mutable.Builder[A, Array[A]] = Array.newBuilder[A] + } + + /** + * Returns a new [[scala.collection.mutable.ArrayBuilder]]. + */ + def newBuilder[T](implicit t: ClassTag[T]): ArrayBuilder[T] = ArrayBuilder.make[T](using t) + + /** Build an array from the iterable collection. + * + * {{{ + * scala> val a = Array.from(Seq(1, 5)) + * val a: Array[Int] = Array(1, 5) + * + * scala> val b = Array.from(Range(1, 5)) + * val b: Array[Int] = Array(1, 2, 3, 4) + * }}} + * + * @param it the iterable collection + * @return an array consisting of elements of the iterable collection + */ + def from[A : ClassTag](it: IterableOnce[A]): Array[A] = it match { + case it: Iterable[A] => it.toArray[A] + case _ => it.iterator.toArray[A] + } + + private def slowcopy(src : AnyRef, + srcPos : Int, + dest : AnyRef, + destPos : Int, + length : Int): Unit = { + var i = srcPos + var j = destPos + val srcUntil = srcPos + length + while (i < srcUntil) { + array_update(dest, j, array_apply(src, i)) + i += 1 + j += 1 + } + } + + /** Copy one array to another. + * Equivalent to Java's + * `System.arraycopy(src, srcPos, dest, destPos, length)`, + * except that this also works for polymorphic and boxed arrays. + * + * Note that the passed-in `dest` array will be modified by this call. + * + * @param src the source array. + * @param srcPos starting position in the source array. + * @param dest destination array. + * @param destPos starting position in the destination array. + * @param length the number of array elements to be copied. + * + * @see `java.lang.System#arraycopy` + */ + def copy(src: AnyRef, srcPos: Int, dest: AnyRef, destPos: Int, length: Int): Unit = { + val srcClass = src.getClass + if (srcClass.isArray && dest.getClass.isAssignableFrom(srcClass)) + java.lang.System.arraycopy(src, srcPos, dest, destPos, length) + else + slowcopy(src, srcPos, dest, destPos, length) + } + + /** Copy one array to another, truncating or padding with default values (if + * necessary) so the copy has the specified length. + * + * Equivalent to Java's + * `java.util.Arrays.copyOf(original, newLength)`, + * except that this works for primitive and object arrays in a single method. + * + * @see `java.util.Arrays#copyOf` + */ + def copyOf[A](original: Array[A], newLength: Int): Array[A] = ((original: @unchecked) match { + case x: Array[BoxedUnit] => newUnitArray(newLength).asInstanceOf[Array[A]] + case x: Array[AnyRef] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Int] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Double] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Long] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Float] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Char] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Byte] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Short] => java.util.Arrays.copyOf(x, newLength) + case x: Array[Boolean] => java.util.Arrays.copyOf(x, newLength) + }).asInstanceOf[Array[A]] + + /** Copy one array to another, truncating or padding with default values (if + * necessary) so the copy has the specified length. The new array can have + * a different type than the original one as long as the values are + * assignment-compatible. When copying between primitive and object arrays, + * boxing and unboxing are supported. + * + * Equivalent to Java's + * `java.util.Arrays.copyOf(original, newLength, newType)`, + * except that this works for all combinations of primitive and object arrays + * in a single method. + * + * @see `java.util.Arrays#copyOf` + */ + def copyAs[A](original: Array[_], newLength: Int)(implicit ct: ClassTag[A]): Array[A] = { + val runtimeClass = ct.runtimeClass + if (runtimeClass == Void.TYPE) newUnitArray(newLength).asInstanceOf[Array[A]] + else { + val destClass = runtimeClass.asInstanceOf[Class[A]] + if (destClass.isAssignableFrom(original.getClass.getComponentType)) { + if (destClass.isPrimitive) copyOf[A](original.asInstanceOf[Array[A]], newLength) + else { + val destArrayClass = java.lang.reflect.Array.newInstance(destClass, 0).getClass.asInstanceOf[Class[Array[AnyRef]]] + java.util.Arrays.copyOf(original.asInstanceOf[Array[AnyRef]], newLength, destArrayClass).asInstanceOf[Array[A]] + } + } else { + val dest = new Array[A](newLength) + Array.copy(original, 0, dest, 0, original.length) + dest + } + } + } + + private def newUnitArray(len: Int): Array[Unit] = { + val result = new Array[Unit](len) + java.util.Arrays.fill(result.asInstanceOf[Array[AnyRef]], ()) + result + } + + /** Returns an array of length 0 */ + def empty[T: ClassTag]: Array[T] = new Array[T](0) + + /** Creates an array with given elements. + * + * @param xs the elements to put in the array + * @return an array containing all elements from xs. + */ + // Subject to a compiler optimization in Cleanup. + // Array(e0, ..., en) is translated to { val a = new Array(3); a(i) = ei; a } + def apply[T: ClassTag](xs: T*): Array[T] = { + val len = xs.length + xs match { + case wa: immutable.ArraySeq[_] if wa.unsafeArray.getClass.getComponentType == classTag[T].runtimeClass => + // We get here in test/files/run/sd760a.scala, `Array[T](t)` for + // a specialized type parameter `T`. While we still pay for two + // copies of the array it is better than before when we also boxed + // each element when populating the result. + ScalaRunTime.array_clone(wa.unsafeArray).asInstanceOf[Array[T]] + case _ => + val array = new Array[T](len) + val iterator = xs.iterator + var i = 0 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + } + + /** Creates an array of `Boolean` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Boolean, xs: Boolean*): Array[Boolean] = { + val array = new Array[Boolean](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Byte` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Byte, xs: Byte*): Array[Byte] = { + val array = new Array[Byte](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Short` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Short, xs: Short*): Array[Short] = { + val array = new Array[Short](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Char` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Char, xs: Char*): Array[Char] = { + val array = new Array[Char](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Int` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Int, xs: Int*): Array[Int] = { + val array = new Array[Int](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Long` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Long, xs: Long*): Array[Long] = { + val array = new Array[Long](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Float` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Float, xs: Float*): Array[Float] = { + val array = new Array[Float](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Double` objects */ + // Subject to a compiler optimization in Cleanup, see above. + def apply(x: Double, xs: Double*): Array[Double] = { + val array = new Array[Double](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates an array of `Unit` objects */ + def apply(x: Unit, xs: Unit*): Array[Unit] = { + val array = new Array[Unit](xs.length + 1) + array(0) = x + val iterator = xs.iterator + var i = 1 + while (iterator.hasNext) { + array(i) = iterator.next(); i += 1 + } + array + } + + /** Creates array with given dimensions */ + def ofDim[T: ClassTag](n1: Int): Array[T] = + new Array[T](n1) + /** Creates a 2-dimensional array */ + def ofDim[T: ClassTag](n1: Int, n2: Int): Array[Array[T]] = { + val arr: Array[Array[T]] = (new Array[Array[T]](n1): Array[Array[T]]) + for (i <- 0 until n1) arr(i) = new Array[T](n2) + arr + // tabulate(n1)(_ => ofDim[T](n2)) + } + /** Creates a 3-dimensional array */ + def ofDim[T: ClassTag](n1: Int, n2: Int, n3: Int): Array[Array[Array[T]]] = + tabulate(n1)(_ => ofDim[T](n2, n3)) + /** Creates a 4-dimensional array */ + def ofDim[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int): Array[Array[Array[Array[T]]]] = + tabulate(n1)(_ => ofDim[T](n2, n3, n4)) + /** Creates a 5-dimensional array */ + def ofDim[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int): Array[Array[Array[Array[Array[T]]]]] = + tabulate(n1)(_ => ofDim[T](n2, n3, n4, n5)) + + /** Concatenates all arrays into a single array. + * + * @param xss the given arrays + * @return the array created from concatenating `xss` + */ + def concat[T: ClassTag](xss: Array[T]*): Array[T] = { + val b = newBuilder[T] + b.sizeHint(xss.map(_.length).sum) + for (xs <- xss) b ++= xs + b.result() + } + + /** Returns an array that contains the results of some element computation a number + * of times. + * + * Note that this means that `elem` is computed a total of n times: + * {{{ + * scala> Array.fill(3){ math.random } + * res3: Array[Double] = Array(0.365461167592537, 1.550395944913685E-4, 0.7907242137333306) + * }}} + * + * @param n the number of elements desired + * @param elem the element computation + * @return an Array of size n, where each element contains the result of computing + * `elem`. + */ + def fill[T: ClassTag](n: Int)(elem: => T): Array[T] = { + if (n <= 0) { + empty[T] + } else { + val array = new Array[T](n) + var i = 0 + while (i < n) { + array(i) = elem + i += 1 + } + array + } + } + + /** Returns a two-dimensional array that contains the results of some element + * computation a number of times. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param elem the element computation + */ + def fill[T: ClassTag](n1: Int, n2: Int)(elem: => T): Array[Array[T]] = + tabulate(n1)(_ => fill(n2)(elem)) + + /** Returns a three-dimensional array that contains the results of some element + * computation a number of times. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param elem the element computation + */ + def fill[T: ClassTag](n1: Int, n2: Int, n3: Int)(elem: => T): Array[Array[Array[T]]] = + tabulate(n1)(_ => fill(n2, n3)(elem)) + + /** Returns a four-dimensional array that contains the results of some element + * computation a number of times. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param elem the element computation + */ + def fill[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(elem: => T): Array[Array[Array[Array[T]]]] = + tabulate(n1)(_ => fill(n2, n3, n4)(elem)) + + /** Returns a five-dimensional array that contains the results of some element + * computation a number of times. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param elem the element computation + */ + def fill[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(elem: => T): Array[Array[Array[Array[Array[T]]]]] = + tabulate(n1)(_ => fill(n2, n3, n4, n5)(elem)) + + /** Returns an array containing values of a given function over a range of integer + * values starting from 0. + * + * @param n The number of elements in the array + * @param f The function computing element values + * @return An `Array` consisting of elements `f(0),f(1), ..., f(n - 1)` + */ + def tabulate[T: ClassTag](n: Int)(f: Int => T): Array[T] = { + if (n <= 0) { + empty[T] + } else { + val array = new Array[T](n) + var i = 0 + while (i < n) { + array(i) = f(i) + i += 1 + } + array + } + } + + /** Returns a two-dimensional array containing values of a given function + * over ranges of integer values starting from `0`. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param f The function computing element values + */ + def tabulate[T: ClassTag](n1: Int, n2: Int)(f: (Int, Int) => T): Array[Array[T]] = + tabulate(n1)(i1 => tabulate(n2)(f(i1, _))) + + /** Returns a three-dimensional array containing values of a given function + * over ranges of integer values starting from `0`. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param f The function computing element values + */ + def tabulate[T: ClassTag](n1: Int, n2: Int, n3: Int)(f: (Int, Int, Int) => T): Array[Array[Array[T]]] = + tabulate(n1)(i1 => tabulate(n2, n3)(f(i1, _, _))) + + /** Returns a four-dimensional array containing values of a given function + * over ranges of integer values starting from `0`. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param f The function computing element values + */ + def tabulate[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int)(f: (Int, Int, Int, Int) => T): Array[Array[Array[Array[T]]]] = + tabulate(n1)(i1 => tabulate(n2, n3, n4)(f(i1, _, _, _))) + + /** Returns a five-dimensional array containing values of a given function + * over ranges of integer values starting from `0`. + * + * @param n1 the number of elements in the 1st dimension + * @param n2 the number of elements in the 2nd dimension + * @param n3 the number of elements in the 3rd dimension + * @param n4 the number of elements in the 4th dimension + * @param n5 the number of elements in the 5th dimension + * @param f The function computing element values + */ + def tabulate[T: ClassTag](n1: Int, n2: Int, n3: Int, n4: Int, n5: Int)(f: (Int, Int, Int, Int, Int) => T): Array[Array[Array[Array[Array[T]]]]] = + tabulate(n1)(i1 => tabulate(n2, n3, n4, n5)(f(i1, _, _, _, _))) + + /** Returns an array containing a sequence of increasing integers in a range. + * + * @param start the start value of the array + * @param end the end value of the array, exclusive (in other words, this is the first value '''not''' returned) + * @return the array with values in range `start, start + 1, ..., end - 1` + * up to, but excluding, `end`. + */ + def range(start: Int, end: Int): Array[Int] = range(start, end, 1) + + /** Returns an array containing equally spaced values in some integer interval. + * + * @param start the start value of the array + * @param end the end value of the array, exclusive (in other words, this is the first value '''not''' returned) + * @param step the increment value of the array (may not be zero) + * @return the array with values in `start, start + step, ...` up to, but excluding `end` + */ + def range(start: Int, end: Int, step: Int): Array[Int] = { + if (step == 0) throw new IllegalArgumentException("zero step") + val array = new Array[Int](immutable.Range.count(start, end, step, isInclusive = false)) + + var n = 0 + var i = start + while (if (step < 0) end < i else i < end) { + array(n) = i + i += step + n += 1 + } + array + } + + /** Returns an array containing repeated applications of a function to a start value. + * + * @param start the start value of the array + * @param len the number of elements returned by the array + * @param f the function that is repeatedly applied + * @return the array returning `len` values in the sequence `start, f(start), f(f(start)), ...` + */ + def iterate[T: ClassTag](start: T, len: Int)(f: T => T): Array[T] = { + if (len > 0) { + val array = new Array[T](len) + var acc = start + var i = 1 + array(0) = acc + + while (i < len) { + acc = f(acc) + array(i) = acc + i += 1 + } + array + } else { + empty[T] + } + } + + /** Compare two arrays per element. + * + * A more efficient version of `xs.sameElements(ys)`. + * + * Note that arrays are invariant in Scala, but it may + * be sound to cast an array of arbitrary reference type + * to `Array[AnyRef]`. Arrays on the JVM are covariant + * in their element type. + * + * `Array.equals(xs.asInstanceOf[Array[AnyRef]], ys.asInstanceOf[Array[AnyRef]])` + * + * @param xs an array of AnyRef + * @param ys an array of AnyRef + * @return true if corresponding elements are equal + */ + def equals(xs: Array[AnyRef], ys: Array[AnyRef]): Boolean = + (xs eq ys) || + (xs.length == ys.length) && { + var i = 0 + while (i < xs.length && xs(i) == ys(i)) i += 1 + i >= xs.length + } + + /** Called in a pattern match like `{ case Array(x,y,z) => println('3 elements')}`. + * + * @param x the selector value + * @return sequence wrapped in a [[scala.Some]], if `x` is an Array, otherwise `None` + */ + def unapplySeq[T](x: Array[T]): UnapplySeqWrapper[T] = new UnapplySeqWrapper(x) + + final class UnapplySeqWrapper[T](private val a: Array[T]) extends AnyVal { + def isEmpty: false = false + def get: UnapplySeqWrapper[T] = this + def lengthCompare(len: Int): Int = a.lengthCompare(len) + def apply(i: Int): T = a(i) + def drop(n: Int): scala.Seq[T] = ArraySeq.unsafeWrapArray(a.drop(n)) // clones the array, also if n == 0 + def toSeq: scala.Seq[T] = a.toSeq // clones the array + } +} + +/** Arrays are mutable, indexed collections of values. `Array[T]` is Scala's representation + * for Java's `T[]`. + * + * {{{ + * val numbers = Array(1, 2, 3, 4) + * val first = numbers(0) // read the first element + * numbers(3) = 100 // replace the 4th array element with 100 + * val biggerNumbers = numbers.map(_ * 2) // multiply all numbers by two + * }}} + * + * Arrays make use of two common pieces of Scala syntactic sugar, shown on lines 2 and 3 of the above + * example code. + * Line 2 is translated into a call to `apply(Int)`, while line 3 is translated into a call to + * `update(Int, T)`. + * + * Two implicit conversions exist in [[scala.Predef]] that are frequently applied to arrays: a conversion + * to [[scala.collection.ArrayOps]] (shown on line 4 of the example above) and a conversion + * to [[scala.collection.mutable.ArraySeq]] (a subtype of [[scala.collection.Seq]]). + * Both types make available many of the standard operations found in the Scala collections API. + * The conversion to `ArrayOps` is temporary, as all operations defined on `ArrayOps` return an `Array`, + * while the conversion to `ArraySeq` is permanent as all operations return a `ArraySeq`. + * + * The conversion to `ArrayOps` takes priority over the conversion to `ArraySeq`. For instance, + * consider the following code: + * + * {{{ + * val arr = Array(1, 2, 3) + * val arrReversed = arr.reverse + * val seqReversed : collection.Seq[Int] = arr.reverse + * }}} + * + * Value `arrReversed` will be of type `Array[Int]`, with an implicit conversion to `ArrayOps` occurring + * to perform the `reverse` operation. The value of `seqReversed`, on the other hand, will be computed + * by converting to `ArraySeq` first and invoking the variant of `reverse` that returns another + * `ArraySeq`. + * + * @see [[https://www.scala-lang.org/files/archive/spec/2.13/ Scala Language Specification]], for in-depth information on the transformations the Scala compiler makes on Arrays (Sections 6.6 and 6.15 respectively.) + * @see [[https://docs.scala-lang.org/sips/scala-2-8-arrays.html "Scala 2.8 Arrays"]] the Scala Improvement Document detailing arrays since Scala 2.8. + * @see [[https://docs.scala-lang.org/overviews/collections-2.13/arrays.html "The Scala 2.8 Collections' API"]] section on `Array` by Martin Odersky for more information. + * @hideImplicitConversion scala.Predef.booleanArrayOps + * @hideImplicitConversion scala.Predef.byteArrayOps + * @hideImplicitConversion scala.Predef.charArrayOps + * @hideImplicitConversion scala.Predef.doubleArrayOps + * @hideImplicitConversion scala.Predef.floatArrayOps + * @hideImplicitConversion scala.Predef.intArrayOps + * @hideImplicitConversion scala.Predef.longArrayOps + * @hideImplicitConversion scala.Predef.refArrayOps + * @hideImplicitConversion scala.Predef.shortArrayOps + * @hideImplicitConversion scala.Predef.unitArrayOps + * @hideImplicitConversion scala.LowPriorityImplicits.wrapRefArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapIntArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapDoubleArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapLongArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapFloatArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapCharArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapByteArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapShortArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapBooleanArray + * @hideImplicitConversion scala.LowPriorityImplicits.wrapUnitArray + * @hideImplicitConversion scala.LowPriorityImplicits.genericWrapArray + * @define coll array + * @define Coll `Array` + * @define orderDependent + * @define orderDependentFold + * @define mayNotTerminateInf + * @define willNotTerminateInf + * @define collectExample + * @define undefinedorder + */ +final class Array[T](_length: Int) extends java.io.Serializable with java.lang.Cloneable { + + /** The length of the array */ + def length: Int = throw new Error() + + /** The element at given index. + * + * Indices start at `0`; `xs.apply(0)` is the first element of array `xs`. + * Note the indexing syntax `xs(i)` is a shorthand for `xs.apply(i)`. + * + * @param i the index + * @return the element at the given index + * @throws ArrayIndexOutOfBoundsException if `i < 0` or `length <= i` + */ + def apply(i: Int): T = throw new Error() + + /** Update the element at given index. + * + * Indices start at `0`; `xs.update(i, x)` replaces the i^th^ element in the array. + * Note the syntax `xs(i) = x` is a shorthand for `xs.update(i, x)`. + * + * @param i the index + * @param x the value to be written at index `i` + * @throws ArrayIndexOutOfBoundsException if `i < 0` or `length <= i` + */ + def update(i: Int, x: T): Unit = { throw new Error() } + + /** Clone the Array. + * + * @return A clone of the Array. + */ + override def clone(): Array[T] = throw new Error() +} diff --git a/scala2-library-cc/src/scala/collection/ArrayOps.scala b/scala2-library-cc/src/scala/collection/ArrayOps.scala index e8548c12751f..72ec66a0bc86 100644 --- a/scala2-library-cc/src/scala/collection/ArrayOps.scala +++ b/scala2-library-cc/src/scala/collection/ArrayOps.scala @@ -590,7 +590,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { val len = xs.length def boxed = if(len < ArrayOps.MaxStableSortLength) { val a = xs.clone() - Sorting.stableSort(a)(ord.asInstanceOf[Ordering[A]]) + Sorting.stableSort(a)(using ord.asInstanceOf[Ordering[A]]) a } else { val a = Array.copyAs[AnyRef](xs, len)(ClassTag.AnyRef) @@ -1300,7 +1300,7 @@ final class ArrayOps[A](private val xs: Array[A]) extends AnyVal { val bb = new ArrayBuilder.ofRef[Array[B]]()(ClassTag[Array[B]](aClass)) if (xs.length == 0) bb.result() else { - def mkRowBuilder() = ArrayBuilder.make[B](ClassTag[B](aClass.getComponentType)) + def mkRowBuilder() = ArrayBuilder.make[B](using ClassTag[B](aClass.getComponentType)) val bs = new ArrayOps(asArray(xs(0))).map((x: B) => mkRowBuilder()) for (xs <- this) { var i = 0 diff --git a/scala2-library-cc/src/scala/collection/Factory.scala b/scala2-library-cc/src/scala/collection/Factory.scala index 99f584b972fc..96f39bafc905 100644 --- a/scala2-library-cc/src/scala/collection/Factory.scala +++ b/scala2-library-cc/src/scala/collection/Factory.scala @@ -675,16 +675,16 @@ object ClassTagIterableFactory { * sound depending on the use of the `ClassTag` by the collection implementation. */ @SerialVersionUID(3L) class AnyIterableDelegate[CC[_]](delegate: ClassTagIterableFactory[CC]) extends IterableFactory[CC] { - def empty[A]: CC[A] = delegate.empty(ClassTag.Any).asInstanceOf[CC[A]] - def from[A](it: IterableOnce[A]^): CC[A] = delegate.from[Any](it)(ClassTag.Any).asInstanceOf[CC[A]] - def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder(ClassTag.Any).asInstanceOf[Builder[A, CC[A]]] - override def apply[A](elems: A*): CC[A] = delegate.apply[Any](elems: _*)(ClassTag.Any).asInstanceOf[CC[A]] - override def iterate[A](start: A, len: Int)(f: A => A): CC[A] = delegate.iterate[A](start, len)(f)(ClassTag.Any.asInstanceOf[ClassTag[A]]) - override def unfold[A, S](init: S)(f: S => Option[(A, S)]): CC[A] = delegate.unfold[A, S](init)(f)(ClassTag.Any.asInstanceOf[ClassTag[A]]) - override def range[A](start: A, end: A)(implicit i: Integral[A]): CC[A] = delegate.range[A](start, end)(i, ClassTag.Any.asInstanceOf[ClassTag[A]]) - override def range[A](start: A, end: A, step: A)(implicit i: Integral[A]): CC[A] = delegate.range[A](start, end, step)(i, ClassTag.Any.asInstanceOf[ClassTag[A]]) - override def fill[A](n: Int)(elem: => A): CC[A] = delegate.fill[Any](n)(elem)(ClassTag.Any).asInstanceOf[CC[A]] - override def tabulate[A](n: Int)(f: Int => A): CC[A] = delegate.tabulate[Any](n)(f)(ClassTag.Any).asInstanceOf[CC[A]] + def empty[A]: CC[A] = delegate.empty(using ClassTag.Any).asInstanceOf[CC[A]] + def from[A](it: IterableOnce[A]^): CC[A] = delegate.from[Any](it)(using ClassTag.Any).asInstanceOf[CC[A]] + def newBuilder[A]: Builder[A, CC[A]] = delegate.newBuilder(using ClassTag.Any).asInstanceOf[Builder[A, CC[A]]] + override def apply[A](elems: A*): CC[A] = delegate.apply[Any](elems: _*)(using ClassTag.Any).asInstanceOf[CC[A]] + override def iterate[A](start: A, len: Int)(f: A => A): CC[A] = delegate.iterate[A](start, len)(f)(using ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def unfold[A, S](init: S)(f: S => Option[(A, S)]): CC[A] = delegate.unfold[A, S](init)(f)(using ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def range[A](start: A, end: A)(implicit i: Integral[A]): CC[A] = delegate.range[A](start, end)(using i, ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def range[A](start: A, end: A, step: A)(implicit i: Integral[A]): CC[A] = delegate.range[A](start, end, step)(using i, ClassTag.Any.asInstanceOf[ClassTag[A]]) + override def fill[A](n: Int)(elem: => A): CC[A] = delegate.fill[Any](n)(elem)(using ClassTag.Any).asInstanceOf[CC[A]] + override def tabulate[A](n: Int)(f: Int => A): CC[A] = delegate.tabulate[Any](n)(f)(using ClassTag.Any).asInstanceOf[CC[A]] } } diff --git a/scala2-library-cc/src/scala/collection/Iterable.scala b/scala2-library-cc/src/scala/collection/Iterable.scala index 5afc14f4ceef..6556f31d378d 100644 --- a/scala2-library-cc/src/scala/collection/Iterable.scala +++ b/scala2-library-cc/src/scala/collection/Iterable.scala @@ -985,9 +985,9 @@ trait SortedSetFactoryDefaults[+A, +WithFilterCC[x] <: IterableOps[x, WithFilterCC, WithFilterCC[x]] with Set[x]] extends SortedSetOps[A @uncheckedVariance, CC, CC[A @uncheckedVariance]] { self: IterableOps[A, WithFilterCC, _] => - override protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]^): CC[A @uncheckedVariance]^{coll} = sortedIterableFactory.from(coll)(ordering) - override protected def newSpecificBuilder: mutable.Builder[A @uncheckedVariance, CC[A @uncheckedVariance]] = sortedIterableFactory.newBuilder[A](ordering) - override def empty: CC[A @uncheckedVariance] = sortedIterableFactory.empty(ordering) + override protected def fromSpecific(coll: IterableOnce[A @uncheckedVariance]^): CC[A @uncheckedVariance]^{coll} = sortedIterableFactory.from(coll)(using ordering) + override protected def newSpecificBuilder: mutable.Builder[A @uncheckedVariance, CC[A @uncheckedVariance]] = sortedIterableFactory.newBuilder[A](using ordering) + override def empty: CC[A @uncheckedVariance] = sortedIterableFactory.empty(using ordering) override def withFilter(p: A => Boolean): SortedSetOps.WithFilter[A, WithFilterCC, CC]^{p} = new SortedSetOps.WithFilter[A, WithFilterCC, CC](this, p) @@ -1040,9 +1040,9 @@ trait SortedMapFactoryDefaults[K, +V, +UnsortedCC[x, y] <: Map[x, y]] extends SortedMapOps[K, V, CC, CC[K, V @uncheckedVariance]] with MapOps[K, V, UnsortedCC, CC[K, V @uncheckedVariance]] { self: IterableOps[(K, V), WithFilterCC, _] => - override def empty: CC[K, V @uncheckedVariance] = sortedMapFactory.empty(ordering) - override protected def fromSpecific(coll: IterableOnce[(K, V @uncheckedVariance)]^): CC[K, V @uncheckedVariance]^{coll} = sortedMapFactory.from(coll)(ordering) - override protected def newSpecificBuilder: mutable.Builder[(K, V @uncheckedVariance), CC[K, V @uncheckedVariance]] = sortedMapFactory.newBuilder[K, V](ordering) + override def empty: CC[K, V @uncheckedVariance] = sortedMapFactory.empty(using ordering) + override protected def fromSpecific(coll: IterableOnce[(K, V @uncheckedVariance)]^): CC[K, V @uncheckedVariance]^{coll} = sortedMapFactory.from(coll)(using ordering) + override protected def newSpecificBuilder: mutable.Builder[(K, V @uncheckedVariance), CC[K, V @uncheckedVariance]] = sortedMapFactory.newBuilder[K, V](using ordering) override def withFilter(p: ((K, V)) => Boolean): collection.SortedMapOps.WithFilter[K, V, WithFilterCC, UnsortedCC, CC]^{p} = new collection.SortedMapOps.WithFilter[K, V, WithFilterCC, UnsortedCC, CC](this, p) diff --git a/scala2-library-cc/src/scala/collection/SortedMap.scala b/scala2-library-cc/src/scala/collection/SortedMap.scala index 7b9381ebb078..876a83b2709c 100644 --- a/scala2-library-cc/src/scala/collection/SortedMap.scala +++ b/scala2-library-cc/src/scala/collection/SortedMap.scala @@ -181,16 +181,16 @@ trait SortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], override def concat[V2 >: V](suffix: IterableOnce[(K, V2)]^): CC[K, V2] = sortedMapFactory.from(suffix match { case it: Iterable[(K, V2)] => new View.Concat(this, it) case _ => iterator.concat(suffix.iterator) - })(ordering) + })(using ordering) /** Alias for `concat` */ @`inline` override final def ++ [V2 >: V](xs: IterableOnce[(K, V2)]^): CC[K, V2] = concat(xs) @deprecated("Consider requiring an immutable Map or fall back to Map.concat", "2.13.0") - override def + [V1 >: V](kv: (K, V1)): CC[K, V1] = sortedMapFactory.from(new View.Appended(this, kv))(ordering) + override def + [V1 >: V](kv: (K, V1)): CC[K, V1] = sortedMapFactory.from(new View.Appended(this, kv))(using ordering) @deprecated("Use ++ with an explicit collection argument instead of + with varargs", "2.13.0") - override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems))(ordering) + override def + [V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): CC[K, V1] = sortedMapFactory.from(new View.Concat(new View.Appended(new View.Appended(this, elem1), elem2), elems))(using ordering) } object SortedMapOps { diff --git a/scala2-library-cc/src/scala/collection/StrictOptimizedSortedMapOps.scala b/scala2-library-cc/src/scala/collection/StrictOptimizedSortedMapOps.scala index 9a9e6e367922..411a86c7cc5c 100644 --- a/scala2-library-cc/src/scala/collection/StrictOptimizedSortedMapOps.scala +++ b/scala2-library-cc/src/scala/collection/StrictOptimizedSortedMapOps.scala @@ -34,7 +34,7 @@ trait StrictOptimizedSortedMapOps[K, +V, +CC[X, Y] <: Map[X, Y] with SortedMapOp strictOptimizedFlatMap(sortedMapFactory.newBuilder, f) override def concat[V2 >: V](xs: IterableOnce[(K, V2)]^): CC[K, V2] = - strictOptimizedConcat(xs, sortedMapFactory.newBuilder(ordering)) + strictOptimizedConcat(xs, sortedMapFactory.newBuilder(using ordering)) override def collect[K2, V2](pf: PartialFunction[(K, V), (K2, V2)])(implicit @implicitNotFound(SortedMapOps.ordMsg) ordering: Ordering[K2]): CC[K2, V2] = strictOptimizedCollect(sortedMapFactory.newBuilder, pf) diff --git a/scala2-library-cc/src/scala/collection/generic/DefaultSerializationProxy.scala b/scala2-library-cc/src/scala/collection/generic/DefaultSerializationProxy.scala index e36bb77ebdb8..1f0e6164731c 100644 --- a/scala2-library-cc/src/scala/collection/generic/DefaultSerializationProxy.scala +++ b/scala2-library-cc/src/scala/collection/generic/DefaultSerializationProxy.scala @@ -78,9 +78,9 @@ private[collection] case object SerializeEnd trait DefaultSerializable extends Serializable { this: scala.collection.Iterable[_] => protected[this] def writeReplace(): AnyRef = { val f: Factory[Any, Any] = this match { - case it: scala.collection.SortedMap[_, _] => it.sortedMapFactory.sortedMapFactory[Any, Any](it.ordering.asInstanceOf[Ordering[Any]]).asInstanceOf[Factory[Any, Any]] + case it: scala.collection.SortedMap[_, _] => it.sortedMapFactory.sortedMapFactory[Any, Any](using it.ordering.asInstanceOf[Ordering[Any]]).asInstanceOf[Factory[Any, Any]] case it: scala.collection.Map[_, _] => it.mapFactory.mapFactory[Any, Any].asInstanceOf[Factory[Any, Any]] - case it: scala.collection.SortedSet[_] => it.sortedIterableFactory.evidenceIterableFactory[Any](it.ordering.asInstanceOf[Ordering[Any]]) + case it: scala.collection.SortedSet[_] => it.sortedIterableFactory.evidenceIterableFactory[Any](using it.ordering.asInstanceOf[Ordering[Any]]) case it => it.iterableFactory.iterableFactory } new DefaultSerializationProxy(f, this) diff --git a/scala2-library-cc/src/scala/collection/mutable/ArraySeq.scala b/scala2-library-cc/src/scala/collection/mutable/ArraySeq.scala index 70762e5b340d..d1c5b5c9ce72 100644 --- a/scala2-library-cc/src/scala/collection/mutable/ArraySeq.scala +++ b/scala2-library-cc/src/scala/collection/mutable/ArraySeq.scala @@ -46,15 +46,15 @@ sealed abstract class ArraySeq[T] override def iterableFactory: scala.collection.SeqFactory[ArraySeq] = ArraySeq.untagged override protected def fromSpecific(coll: scala.collection.IterableOnce[T]^): ArraySeq[T] = { - val b = ArrayBuilder.make(elemTag).asInstanceOf[ArrayBuilder[T]] + val b = ArrayBuilder.make(using elemTag).asInstanceOf[ArrayBuilder[T]] val s = coll.knownSize if(s > 0) b.sizeHint(s) b ++= coll ArraySeq.make(b.result()) } override protected def newSpecificBuilder: Builder[T, ArraySeq[T]] = - ArraySeq.newBuilder[T](elemTag.asInstanceOf[ClassTag[T]]).asInstanceOf[Builder[T, ArraySeq[T]]] - override def empty: ArraySeq[T] = ArraySeq.empty(elemTag.asInstanceOf[ClassTag[T]]) + ArraySeq.newBuilder[T](using elemTag.asInstanceOf[ClassTag[T]]).asInstanceOf[Builder[T, ArraySeq[T]]] + override def empty: ArraySeq[T] = ArraySeq.empty(using elemTag.asInstanceOf[ClassTag[T]]) /** The tag of the element type. This does not have to be equal to the element type of this ArraySeq. A primitive * ArraySeq can be backed by an array of boxed values and a reference ArraySeq can be backed by an array of a supertype diff --git a/scala2-library-cc/src/scala/collection/mutable/CollisionProofHashMap.scala b/scala2-library-cc/src/scala/collection/mutable/CollisionProofHashMap.scala index ff3bab1dd818..05c3124a3323 100644 --- a/scala2-library-cc/src/scala/collection/mutable/CollisionProofHashMap.scala +++ b/scala2-library-cc/src/scala/collection/mutable/CollisionProofHashMap.scala @@ -768,7 +768,7 @@ object CollisionProofHashMap extends SortedMapFactory[CollisionProofHashMap] { @SerialVersionUID(3L) private final class DeserializationFactory[K, V](val tableLength: Int, val loadFactor: Double, val ordering: Ordering[K]) extends Factory[(K, V), CollisionProofHashMap[K, V]] with Serializable { def fromSpecific(it: IterableOnce[(K, V)]^): CollisionProofHashMap[K, V] = new CollisionProofHashMap[K, V](tableLength, loadFactor)(ordering) ++= it - def newBuilder: Builder[(K, V), CollisionProofHashMap[K, V]] = CollisionProofHashMap.newBuilder(tableLength, loadFactor)(ordering) + def newBuilder: Builder[(K, V), CollisionProofHashMap[K, V]] = CollisionProofHashMap.newBuilder(tableLength, loadFactor)(using ordering) } @unused @`inline` private def compare[K, V](key: K, hash: Int, node: LLNode[K, V])(implicit ord: Ordering[K]): Int = { diff --git a/scaladoc-testcases/src/tests/hugetype.scala b/scaladoc-testcases/src/tests/hugetype.scala index fe1905cb87cc..3445764e2462 100644 --- a/scaladoc-testcases/src/tests/hugetype.scala +++ b/scaladoc-testcases/src/tests/hugetype.scala @@ -31,7 +31,7 @@ trait E: @deprecated protected implicit def same[A](a: A): A -trait XD extends E: +@experimental trait XD extends E: /** * Some important information :o * diff --git a/scaladoc-testcases/src/tests/implicitConversions.scala b/scaladoc-testcases/src/tests/implicitConversions.scala index 720eab1ccb1a..c3051e653663 100644 --- a/scaladoc-testcases/src/tests/implicitConversions.scala +++ b/scaladoc-testcases/src/tests/implicitConversions.scala @@ -6,7 +6,9 @@ given Conversion[A, B] with { def apply(a: A): B = ??? } -extension (a: A) def extended_bar(): String = ??? +extension (a: A) + @annotation.nowarn + def extended_bar(): String = ??? class A { implicit def conversion(c: C): D = ??? @@ -45,7 +47,7 @@ class B { class C { def extensionInCompanion: String = ??? } - +@annotation.nowarn // extensionInCompanion object C { implicit def companionConversion(c: C): B = ??? @@ -70,4 +72,4 @@ package nested { } class Z -} \ No newline at end of file +} diff --git a/scaladoc-testcases/src/tests/inheritedMembers1.scala b/scaladoc-testcases/src/tests/inheritedMembers1.scala index d8fa44607e5e..561e50ceaec2 100644 --- a/scaladoc-testcases/src/tests/inheritedMembers1.scala +++ b/scaladoc-testcases/src/tests/inheritedMembers1.scala @@ -2,6 +2,7 @@ package tests package inheritedMembers1 +/*<-*/@annotation.nowarn/*->*/ class A { def A: String diff --git a/scaladoc-testcases/src/tests/methodsAndConstructors.scala b/scaladoc-testcases/src/tests/methodsAndConstructors.scala index 132d35035b30..cddd0f56e9fe 100644 --- a/scaladoc-testcases/src/tests/methodsAndConstructors.scala +++ b/scaladoc-testcases/src/tests/methodsAndConstructors.scala @@ -1,5 +1,7 @@ package tests.methodsAndConstructors +import scala.language.experimental.clauseInterleaving + class A class B extends A class C @@ -60,8 +62,6 @@ class Methods: def withImplicitParam2(v: String)(implicit ab: Double, a: Int, b: String): String = ??? - import scala.language.experimental.clauseInterleaving - def clauseInterleaving[T](x: T)[U](y: U)(using (T, U)): (T, U) = ??? diff --git a/scaladoc-testcases/src/tests/refinedFunctionTypes.scala b/scaladoc-testcases/src/tests/refinedFunctionTypes.scala new file mode 100644 index 000000000000..d978a0ea2264 --- /dev/null +++ b/scaladoc-testcases/src/tests/refinedFunctionTypes.scala @@ -0,0 +1,37 @@ +package tests +package refinedFunctionTypes + +import annotation.experimental + +@experimental +infix type $throws[R, +E <: Exception] = CanThrow[E] ?=> R + +@experimental +infix type $throws2[+E <: Exception] = (c: CanThrow[E]) ?=> c.type + +@experimental +infix type $throws3[+E <: Exception] = [T] => (c: CanThrow[E]) ?=> c.type + +@experimental +infix type $throws4[+E <: Exception] = [T] => (c: CanThrow[E]) ?=> T //expected: infix type $throws4[+E <: Exception] = [T] => CanThrow[E] ?=> T + +type TA1 = (a: Int, b: (Boolean, String)) => List[(a.type, b.type)] + +type TA2 = (a: Int, b: (Boolean, String)) ?=> List[Boolean] + +@experimental +type TB0 = [R, E <: Exception] =>> PolyFunction { def apply[T](c: CanThrow[E]): R; } //expected: type TB0[R, E <: Exception] = [T] => CanThrow[E] => R + +@experimental +type TB1 = [R, E <: Exception] =>> PolyFunction { def apply[T](c: CanThrow[E], y: c.type): R; } //expected: type TB1[R, E <: Exception] = [T] => (c: CanThrow[E], y: c.type) => R + +@experimental +type TB2 = [R, E <: Exception] =>> PolyFunction { def apply[T](using c: CanThrow[E]): c.type; } //expected: type TB2[R, E <: Exception] = [T] => (c: CanThrow[E]) ?=> c.type + +type TC1 = [T] => (a: T) => T //expected: type TC1 = [T] => T => T + +type TC2 = [T] => (a: T) ?=> T //expected: type TC2 = [T] => T ?=> T + +type TC3 = [T] => (a: T) => a.type + +type TC4 = [T] => (a: T) ?=> a.type diff --git a/scaladoc-testcases/src/tests/thisType.scala b/scaladoc-testcases/src/tests/thisType.scala index 942e50af86ec..28cb55fcc49e 100644 --- a/scaladoc-testcases/src/tests/thisType.scala +++ b/scaladoc-testcases/src/tests/thisType.scala @@ -4,5 +4,5 @@ package thisType // issue 16024 class X[Map[_, _[_]]]: - inline def map[F[_]](f: [t] => t => F[t]): Map[this.type, F] = //expected: inline def map[F[_]](f: [t] => (x$1: t) => F[t]): Map[this.type, F] - ??? + inline def map[F[_]](f: [t] => t => F[t]): Map[this.type, F] + = ??? diff --git a/scaladoc-testcases/src/tests/typesSignatures.scala b/scaladoc-testcases/src/tests/typesSignatures.scala index e7a29ad8c8e1..5b6281ec7cb5 100644 --- a/scaladoc-testcases/src/tests/typesSignatures.scala +++ b/scaladoc-testcases/src/tests/typesSignatures.scala @@ -28,7 +28,7 @@ class Base // Tests do not support multiline signatures type Elem[X] = X match { case String => Char case Array[t] => t case Iterable[t] => t } - type F = [X] => (x: X) => List[X] + type F = [X] => (x: X) => List[X] //expected: type F = [X] => X => List[X] type G = Int => Int diff --git a/scaladoc/resources/dotty_res/styles/theme/typography.css b/scaladoc/resources/dotty_res/styles/theme/typography.css index cd8730f31dc2..3e75847a3e28 100644 --- a/scaladoc/resources/dotty_res/styles/theme/typography.css +++ b/scaladoc/resources/dotty_res/styles/theme/typography.css @@ -1,4 +1,4 @@ -* { +h1, h2, h3, h4, h5, h6 { /*text-rendering: geometricPrecision;*/ font-weight: initial; } diff --git a/scaladoc/src/dotty/tools/scaladoc/renderers/SiteRenderer.scala b/scaladoc/src/dotty/tools/scaladoc/renderers/SiteRenderer.scala index 7f64ce92ffc8..71b0a1b572ac 100644 --- a/scaladoc/src/dotty/tools/scaladoc/renderers/SiteRenderer.scala +++ b/scaladoc/src/dotty/tools/scaladoc/renderers/SiteRenderer.scala @@ -13,6 +13,7 @@ import java.nio.file.Path import java.nio.file.Files import java.io.File import scala.util.chaining._ +import dotty.tools.scaladoc.util.Escape.escapeFilename case class ResolvedTemplate(template: LoadedTemplate, ctx: StaticSiteContext): val resolved = template.resolveToHtml(ctx) @@ -55,11 +56,16 @@ trait SiteRenderer(using DocContext) extends Locations: val staticSiteRootPath = content.ctx.root.toPath.toAbsolutePath def asValidURL: Option[String] = Try(URI(str).toURL).toOption.map(_ => str) def asAsset: Option[String] = Option.when( - Files.exists(staticSiteRootPath.resolve("_assets").resolve(str.stripPrefix("/"))) + Try( + Files.exists(staticSiteRootPath.resolve("_assets").resolve(str.stripPrefix("/"))) + ).getOrElse(false) )( resolveLink(pageDri, str.stripPrefix("/")) ) - def asStaticSite: Option[String] = tryAsDriPlain(str).orElse(tryAsDri(str)) + def asStaticSite: Option[String] = + tryAsDriPlain(str) + .orElse(tryAsDri(str)) + .orElse(tryAsDriPlain(escapeFilename(str))) /* Link resolving checks performs multiple strategies with following priority: 1. We check if the link is a valid URL e.g. http://dotty.epfl.ch diff --git a/scaladoc/src/dotty/tools/scaladoc/site/StaticSiteContext.scala b/scaladoc/src/dotty/tools/scaladoc/site/StaticSiteContext.scala index 7a90a462cba0..a610e41f12f0 100644 --- a/scaladoc/src/dotty/tools/scaladoc/site/StaticSiteContext.scala +++ b/scaladoc/src/dotty/tools/scaladoc/site/StaticSiteContext.scala @@ -7,9 +7,8 @@ import java.nio.file.FileVisitOption import java.nio.file.Path import java.nio.file.Paths -import scala.util.Try import scala.jdk.CollectionConverters._ -import scala.annotation.static +import scala.util.control.NonFatal class StaticSiteContext( val root: File, @@ -75,10 +74,13 @@ class StaticSiteContext( val templateSourceLocation = staticSiteRoot.reverseSiteMappings.get(templateDestLocation) // Check if link is relative or absolute - if link.startsWith("/") - then Seq(root.toPath.resolve(link.drop(1))) - else Seq(templateDestLocation.getParent.resolve(link).normalize) ++ - templateSourceLocation.map(_.getParent.resolve(link).normalize) + try + if link.startsWith("/") + then Seq(root.toPath.resolve(link.drop(1))) + else Seq(templateDestLocation.getParent.resolve(link).normalize) ++ + templateSourceLocation.map(_.getParent.resolve(link).normalize) + catch + case NonFatal(_) => Seq.empty // Try to strip site extension and create all possible file paths val fileNames = if siteExtensions.exists(link.endsWith(_)) diff --git a/scaladoc/src/dotty/tools/scaladoc/snippets/SnippetCompiler.scala b/scaladoc/src/dotty/tools/scaladoc/snippets/SnippetCompiler.scala index b47b15676c57..1648dbe2917b 100644 --- a/scaladoc/src/dotty/tools/scaladoc/snippets/SnippetCompiler.scala +++ b/scaladoc/src/dotty/tools/scaladoc/snippets/SnippetCompiler.scala @@ -27,11 +27,10 @@ class SnippetCompiler( object SnippetDriver extends Driver: val currentCtx = val rootCtx = initCtx.fresh.addMode(Mode.ReadPositions).addMode(Mode.Interactive) - rootCtx.setSetting(rootCtx.settings.YnoExperimental, true) rootCtx.setSetting(rootCtx.settings.experimental, true) rootCtx.setSetting(rootCtx.settings.YretainTrees, true) - rootCtx.setSetting(rootCtx.settings.YcookComments, true) - rootCtx.setSetting(rootCtx.settings.YreadComments, true) + rootCtx.setSetting(rootCtx.settings.XcookComments, true) + rootCtx.setSetting(rootCtx.settings.XreadComments, true) rootCtx.setSetting(rootCtx.settings.color, "never") rootCtx.setSetting(rootCtx.settings.XimportSuggestionTimeout, 0) diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala index 1598accf4f40..88d57cdb9853 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/ClassLikeSupport.scala @@ -235,7 +235,7 @@ trait ClassLikeSupport: extension (c: ClassDef) def extractMembers: Seq[Member] = { val inherited = c.getNonTrivialInheritedMemberTrees.collect { - case dd: DefDef if !dd.symbol.isClassConstructor && !(dd.symbol.isSuperBridgeMethod || dd.symbol.isDefaultHelperMethod) => dd + case dd: DefDef if !dd.symbol.isClassConstructor && !(dd.symbol.isSuperAccessor || dd.symbol.isDefaultHelperMethod) => dd case other => other } c.membersToDocument.flatMap(parseMember(c)) ++ diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/SyntheticSupport.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/SyntheticSupport.scala index b33d5f61faac..39c1a5d8afd5 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/SyntheticSupport.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/SyntheticSupport.scala @@ -10,8 +10,6 @@ object SyntheticsSupport: import reflect._ s.flags.is(Flags.Synthetic) || s.flags.is(Flags.FieldAccessor) || s.isDefaultHelperMethod - def isSuperBridgeMethod: Boolean = s.name.contains("$super$") - def isDefaultHelperMethod: Boolean = ".*\\$default\\$\\d+$".r.matches(s.name) def isOpaque: Boolean = diff --git a/scaladoc/src/dotty/tools/scaladoc/tasty/TypesSupport.scala b/scaladoc/src/dotty/tools/scaladoc/tasty/TypesSupport.scala index 373a26dd0297..855678a091d2 100644 --- a/scaladoc/src/dotty/tools/scaladoc/tasty/TypesSupport.scala +++ b/scaladoc/src/dotty/tools/scaladoc/tasty/TypesSupport.scala @@ -4,6 +4,7 @@ package tasty import scala.jdk.CollectionConverters._ import scala.quoted._ +import scala.util.control.NonFatal import NameNormalizer._ import SyntheticsSupport._ @@ -124,6 +125,12 @@ trait TypesSupport: ++ keyword(" =>> ").l ++ inner(resType) + case Refinement(parent, "apply", mt : MethodType) if isPolyOrEreased(parent) => + val isCtx = isContextualMethod(mt) + val sym = defn.FunctionClass(mt.paramTypes.length, isCtx) + val at = sym.typeRef.appliedTo(mt.paramTypes :+ mt.resType) + inner(Refinement(at, "apply", mt)) + case r: Refinement => { //(parent, name, info) def getRefinementInformation(t: TypeRepr): List[TypeRepr] = t match { case r: Refinement => getRefinementInformation(r.parent) :+ r @@ -164,16 +171,22 @@ trait TypesSupport: case t: PolyType => val paramBounds = getParamBounds(t) val method = t.resType.asInstanceOf[MethodType] - val paramList = getParamList(method) - val resType = inner(method.resType) - plain("[").l ++ paramBounds ++ plain("]").l ++ keyword(" => ").l ++ paramList ++ keyword(" => ").l ++ resType + val rest = parseDependentFunctionType(method) + plain("[").l ++ paramBounds ++ plain("]").l ++ keyword(" => ").l ++ rest case other => noSupported(s"Not supported type in refinement $info") } def parseDependentFunctionType(info: TypeRepr): SSignature = info match { case m: MethodType => - val paramList = getParamList(m) - paramList ++ keyword(" => ").l ++ inner(m.resType) + val isCtx = isContextualMethod(m) + if isDependentMethod(m) then + val paramList = getParamList(m) + val arrow = keyword(if isCtx then " ?=> " else " => ").l + val resType = inner(m.resType) + paramList ++ arrow ++ resType + else + val sym = defn.FunctionClass(m.paramTypes.length, isCtx) + inner(sym.typeRef.appliedTo(m.paramTypes :+ m.resType)) case other => noSupported("Dependent function type without MethodType refinement") } @@ -213,8 +226,9 @@ trait TypesSupport: case Seq(rtpe) => plain("()").l ++ keyword(arrow).l ++ inner(rtpe) case Seq(arg, rtpe) => - val partOfSignature = arg match + val partOfSignature = stripAnnotated(arg) match case _: TermRef | _: TypeRef | _: ConstantType | _: ParamRef => inner(arg) + case at: AppliedType if !isInfix(at) && !at.isFunctionType && !at.isTupleN => inner(arg) case _ => inParens(inner(arg)) partOfSignature ++ keyword(arrow).l ++ inner(rtpe) case args => @@ -385,3 +399,21 @@ trait TypesSupport: case _ => false at.args.size == 2 && (!at.typeSymbol.name.forall(isIdentifierPart) || infixAnnot) + + private def isPolyOrEreased(using Quotes)(tr: reflect.TypeRepr) = + Set("scala.PolyFunction", "scala.runtime.ErasedFunction") + .contains(tr.typeSymbol.fullName) + + private def isContextualMethod(using Quotes)(mt: reflect.MethodType) = + mt.asInstanceOf[dotty.tools.dotc.core.Types.MethodType].isContextualMethod + + private def isDependentMethod(using Quotes)(mt: reflect.MethodType) = + val method = mt.asInstanceOf[dotty.tools.dotc.core.Types.MethodType] + try method.isParamDependent || method.isResultDependent + catch case NonFatal(_) => true + + private def stripAnnotated(using Quotes)(tr: reflect.TypeRepr): reflect.TypeRepr = + import reflect.* + tr match + case AnnotatedType(tr, _) => stripAnnotated(tr) + case other => other diff --git a/scaladoc/src/dotty/tools/scaladoc/util/escape.scala b/scaladoc/src/dotty/tools/scaladoc/util/escape.scala index 686d384337c1..5d4bf02e8b38 100644 --- a/scaladoc/src/dotty/tools/scaladoc/util/escape.scala +++ b/scaladoc/src/dotty/tools/scaladoc/util/escape.scala @@ -5,7 +5,24 @@ object Escape: .replace("#","%23") def escapeFilename(filename: String) = + // from compiler/src/dotty/tools/dotc/util/NameTransformer.scala val escaped = filename + .replace("~", "$tilde") + .replace("=", "$eq") + .replace("<", "$less") + .replace(">", "$greater") + .replace("!", "$bang") + .replace("#", "$hash") + .replace("%", "$percent") + .replace("^", "$up") + .replace("&", "$amp") + .replace("|", "$bar") + .replace("*", "$times") .replace("/", "$div") + .replace("+", "$plus") + .replace("-", "$minus") + .replace(":", "$colon") .replace("\\", "$bslash") + .replace("?", "$qmark") + .replace("@", "$at") if escaped != filename then escaped + "$" else escaped diff --git a/scaladoc/src/scala/tasty/inspector/TastyInspector.scala b/scaladoc/src/scala/tasty/inspector/TastyInspector.scala index 14e5f019b433..906578c9d405 100644 --- a/scaladoc/src/scala/tasty/inspector/TastyInspector.scala +++ b/scaladoc/src/scala/tasty/inspector/TastyInspector.scala @@ -52,7 +52,7 @@ object TastyInspector: def inspectAllTastyFiles(tastyFiles: List[String], jars: List[String], dependenciesClasspath: List[String])(inspector: Inspector): Boolean = def checkFile(fileName: String, ext: String): Unit = val file = dotty.tools.io.Path(fileName) - if file.extension != ext then + if !file.ext.toLowerCase.equalsIgnoreCase(ext) then throw new IllegalArgumentException(s"File extension is not `.$ext`: $file") else if !file.exists then throw new IllegalArgumentException(s"File not found: ${file.toAbsolute}") @@ -103,7 +103,7 @@ object TastyInspector: reset() val ctx2 = ctx.fresh .addMode(Mode.ReadPositions) - .setSetting(ctx.settings.YreadComments, true) + .setSetting(ctx.settings.XreadComments, true) new TASTYRun(this, ctx2) new InspectorDriver diff --git a/scaladoc/test/dotty/tools/scaladoc/signatures/TranslatableSignaturesTestCases.scala b/scaladoc/test/dotty/tools/scaladoc/signatures/TranslatableSignaturesTestCases.scala index 2b654d186aef..d60a4d82ff44 100644 --- a/scaladoc/test/dotty/tools/scaladoc/signatures/TranslatableSignaturesTestCases.scala +++ b/scaladoc/test/dotty/tools/scaladoc/signatures/TranslatableSignaturesTestCases.scala @@ -120,3 +120,5 @@ class MatchTypeTuple extends SignatureTest("matchTypeTuple", SignatureTest.all) class InfixTypes extends SignatureTest("infixTypes", SignatureTest.all) class ExtendsCall extends SignatureTest("extendsCall", SignatureTest.all) + +class RefinedFunctionTypes extends SignatureTest("refinedFunctionTypes", SignatureTest.all) diff --git a/staging/src/scala/quoted/staging/QuoteCompiler.scala b/staging/src/scala/quoted/staging/QuoteCompiler.scala index cf24b1de369a..dea40cd1035d 100644 --- a/staging/src/scala/quoted/staging/QuoteCompiler.scala +++ b/staging/src/scala/quoted/staging/QuoteCompiler.scala @@ -48,7 +48,9 @@ private class QuoteCompiler extends Compiler: override def newRun(implicit ctx: Context): ExprRun = reset() - new ExprRun(this, ctx.addMode(Mode.ReadPositions)) + val run = new ExprRun(this, ctx.addMode(Mode.ReadPositions)) + run.doNotEnrichErrorMessage + run def outputClassName: TypeName = "Generated$Code$From$Quoted".toTypeName diff --git a/staging/src/scala/quoted/staging/QuoteDriver.scala b/staging/src/scala/quoted/staging/QuoteDriver.scala index e894a7bc40f2..0131a56cd8aa 100644 --- a/staging/src/scala/quoted/staging/QuoteDriver.scala +++ b/staging/src/scala/quoted/staging/QuoteDriver.scala @@ -8,6 +8,7 @@ import dotty.tools.dotc.quoted.QuotesCache import dotty.tools.io.{AbstractFile, Directory, PlainDirectory, VirtualDirectory} import dotty.tools.repl.AbstractFileClassLoader import dotty.tools.dotc.reporting._ +import dotty.tools.dotc.config.Settings.Setting.value import dotty.tools.dotc.util.ClasspathFromClassloader import scala.quoted._ import scala.quoted.staging.Compiler @@ -40,7 +41,20 @@ private class QuoteDriver(appClassloader: ClassLoader) extends Driver: setCompilerSettings(ctx1.fresh.setSetting(ctx1.settings.outputDir, outDir), settings) } - new QuoteCompiler().newRun(ctx).compileExpr(exprBuilder) match + val compiledExpr = + try + new QuoteCompiler().newRun(ctx).compileExpr(exprBuilder) + catch case ex: dotty.tools.FatalError => + val enrichedMessage = + s"""An unhandled exception was thrown in the staging compiler. + |This might be caused by using an incorrect classloader + |when creating the `staging.Compiler` instance with `staging.Compiler.make`. + |For details, please refer to the documentation. + |For non-enriched exceptions, compile with -Xno-enrich-error-messages.""".stripMargin + if ctx.settings.XnoEnrichErrorMessages.value(using ctx) then throw ex + else throw new Exception(enrichedMessage, ex) + + compiledExpr match case Right(value) => value.asInstanceOf[T] diff --git a/tasty-inspector/src/scala/tasty/inspector/TastyInspector.scala b/tasty-inspector/src/scala/tasty/inspector/TastyInspector.scala index e70d2d4f6dc5..7c5476d35940 100644 --- a/tasty-inspector/src/scala/tasty/inspector/TastyInspector.scala +++ b/tasty-inspector/src/scala/tasty/inspector/TastyInspector.scala @@ -49,7 +49,7 @@ object TastyInspector: def inspectAllTastyFiles(tastyFiles: List[String], jars: List[String], dependenciesClasspath: List[String])(inspector: Inspector): Boolean = def checkFile(fileName: String, ext: String): Unit = val file = dotty.tools.io.Path(fileName) - if file.extension != ext then + if !file.ext.toLowerCase.equalsIgnoreCase(ext) then throw new IllegalArgumentException(s"File extension is not `.$ext`: $file") else if !file.exists then throw new IllegalArgumentException(s"File not found: ${file.toAbsolute}") @@ -100,7 +100,7 @@ object TastyInspector: reset() val ctx2 = ctx.fresh .addMode(Mode.ReadPositions) - .setSetting(ctx.settings.YreadComments, true) + .setSetting(ctx.settings.XreadComments, true) new TASTYRun(this, ctx2) new InspectorDriver diff --git a/tasty/src/dotty/tools/tasty/TastyFormat.scala b/tasty/src/dotty/tools/tasty/TastyFormat.scala index b5ca6f45f594..67beb1ea1d56 100644 --- a/tasty/src/dotty/tools/tasty/TastyFormat.scala +++ b/tasty/src/dotty/tools/tasty/TastyFormat.scala @@ -110,10 +110,14 @@ Standard-Section: "ASTs" TopLevelStat* WHILE Length cond_Term body_Term -- while cond do body REPEATED Length elem_Type elem_Term* -- Varargs argument of type `elem` SELECTouter Length levels_Nat qual_Term underlying_Type -- Follow `levels` outer links, starting from `qual`, with given `underlying` type + QUOTE Length body_Term bodyTpe_Type -- Quoted expression `'{ body }` of a body typed as `bodyTpe` + SPLICE Length expr_Term tpe_Type -- Spliced expression `${ expr }` typed as `tpe` + SPLICEPATTEN Length pat_Term tpe_Type targs_Type* args_Term* -- Pattern splice `${pat}` or `$pat[targs*](args*)` in a quoted pattern of type `tpe`. -- patterns: BIND Length boundName_NameRef patType_Type pat_Term -- name @ pat, wherev `patType` is the type of the bound symbol ALTERNATIVE Length alt_Term* -- alt1 | ... | altn as a pattern UNAPPLY Length fun_Term ImplicitArg* pat_Type pat_Term* -- Unapply node `fun(_: pat_Type)(implicitArgs)` flowing into patterns `pat`. + QUOTEPATTERN Length body_Term quotes_Term pat_Type bindings_Term* -- Quote pattern node `'{ bindings*; body }(using quotes)` -- type trees: IDENTtpt NameRef Type -- Used for all type idents SELECTtpt NameRef qual_Term -- qual.name @@ -176,6 +180,7 @@ Standard-Section: "ASTs" TopLevelStat* ORtype Length left_Type right_Type -- lefgt | right MATCHtype Length bound_Type sel_Type case_Type* -- sel match {cases} with optional upper `bound` MATCHCASEtype Length pat_type rhs_Type -- match cases are MATCHCASEtypes or TYPELAMBDAtypes over MATCHCASEtypes + FLEXIBLEtype Length underlying_Type -- (underlying)? BIND Length boundName_NameRef bounds_Type Modifier* -- boundName @ bounds, for type-variables defined in a type pattern BYNAMEtype underlying_Type -- => underlying PARAMtype Length binder_ASTRef paramNum_Nat -- A reference to parameter # paramNum in lambda type `binder` @@ -223,6 +228,7 @@ Standard-Section: "ASTs" TopLevelStat* EXPORTED -- An export forwarder OPEN -- an open class INVISIBLE -- invisible during typechecking + TRACKED -- a tracked class parameter / a dependent class Annotation Variance = STABLE -- invariant @@ -504,6 +510,7 @@ object TastyFormat { final val INVISIBLE = 44 final val EMPTYCLAUSE = 45 final val SPLITCLAUSE = 46 + final val TRACKED = 47 // Tree Cat. 2: tag Nat final val firstNatTreeTag = SHAREDterm @@ -543,7 +550,6 @@ object TastyFormat { final val EXPLICITtpt = 103 final val ELIDED = 104 - // Tree Cat. 4: tag Nat AST final val firstNatASTTreeTag = IDENT final val IDENT = 110 @@ -609,14 +615,17 @@ object TastyFormat { final val TYPEREFin = 175 final val SELECTin = 176 final val EXPORT = 177 - // final val ??? = 178 - // final val ??? = 179 + final val QUOTE = 178 + final val SPLICE = 179 final val METHODtype = 180 final val APPLYsigpoly = 181 + final val QUOTEPATTERN = 182 + final val SPLICEPATTERN = 183 final val MATCHtype = 190 final val MATCHtpt = 191 final val MATCHCASEtype = 192 + final val FLEXIBLEtype = 193 final val HOLE = 255 @@ -648,7 +657,7 @@ object TastyFormat { firstNatTreeTag <= tag && tag <= RENAMED || firstASTTreeTag <= tag && tag <= BOUNDED || firstNatASTTreeTag <= tag && tag <= NAMEDARG || - firstLengthTreeTag <= tag && tag <= MATCHCASEtype || + firstLengthTreeTag <= tag && tag <= FLEXIBLEtype || tag == HOLE def isParamTag(tag: Int): Boolean = tag == PARAM || tag == TYPEPARAM @@ -693,7 +702,8 @@ object TastyFormat { | INVISIBLE | ANNOTATION | PRIVATEqualified - | PROTECTEDqualified => true + | PROTECTEDqualified + | TRACKED => true case _ => false } @@ -850,11 +860,16 @@ object TastyFormat { case MATCHCASEtype => "MATCHCASEtype" case MATCHtpt => "MATCHtpt" case PARAMtype => "PARAMtype" + case FLEXIBLEtype => "FLEXIBLEtype" case ANNOTATION => "ANNOTATION" case PRIVATEqualified => "PRIVATEqualified" case PROTECTEDqualified => "PROTECTEDqualified" case EXPLICITtpt => "EXPLICITtpt" case ELIDED => "ELIDED" + case QUOTE => "QUOTE" + case SPLICE => "SPLICE" + case QUOTEPATTERN => "QUOTEPATTERN" + case SPLICEPATTERN => "SPLICEPATTERN" case HOLE => "HOLE" } diff --git a/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala b/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala index a51541192321..78c5c0ba72b9 100644 --- a/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala +++ b/tasty/src/dotty/tools/tasty/TastyHeaderUnpickler.scala @@ -103,7 +103,7 @@ class TastyHeaderUnpickler(config: UnpicklerConfig, reader: TastyReader) { val fileVersion = TastyVersion(fileMajor, fileMinor, 0) val toolVersion = TastyVersion(toolMajor, toolMinor, toolExperimental) val signature = signatureString(fileVersion, toolVersion, what = "Backward", tool = None) - val fix = recompileFix(toolVersion.minStable) + val fix = recompileFix(toolVersion.minStable, config) throw new UnpickleException(signature + fix + tastyAddendum) } else { @@ -117,43 +117,7 @@ class TastyHeaderUnpickler(config: UnpicklerConfig, reader: TastyReader) { new String(bytes, start.index, length) } - val validVersion = TastyFormat.isVersionCompatible( - fileMajor = fileMajor, - fileMinor = fileMinor, - fileExperimental = fileExperimental, - compilerMajor = toolMajor, - compilerMinor = toolMinor, - compilerExperimental = toolExperimental - ) - - check(validVersion, { - // failure means that the TASTy file cannot be read, therefore it is either: - // - backwards incompatible major, in which case the library should be recompiled by the minimum stable minor - // version supported by this compiler - // - any experimental in an older minor, in which case the library should be recompiled by the stable - // compiler in the same minor. - // - older experimental in the same minor, in which case the compiler is also experimental, and the library - // should be recompiled by the current compiler - // - forward incompatible, in which case the compiler must be upgraded to the same version as the file. - val fileVersion = TastyVersion(fileMajor, fileMinor, fileExperimental) - val toolVersion = TastyVersion(toolMajor, toolMinor, toolExperimental) - - val compat = Compatibility.failReason(file = fileVersion, read = toolVersion) - - val what = if (compat < 0) "Backward" else "Forward" - val signature = signatureString(fileVersion, toolVersion, what, tool = Some(toolingVersion)) - val fix = ( - if (compat < 0) { - val newCompiler = - if (compat == Compatibility.BackwardIncompatibleMajor) toolVersion.minStable - else if (compat == Compatibility.BackwardIncompatibleExperimental) fileVersion.nextStable - else toolVersion // recompile the experimental library with the current experimental compiler - recompileFix(newCompiler) - } - else upgradeFix(fileVersion) - ) - signature + fix + tastyAddendum - }) + checkValidVersion(fileMajor, fileMinor, fileExperimental, toolingVersion, config) val uuid = new UUID(readUncompressedLong(), readUncompressedLong()) new TastyHeader(uuid, fileMajor, fileMinor, fileExperimental, toolingVersion) {} @@ -161,11 +125,56 @@ class TastyHeaderUnpickler(config: UnpicklerConfig, reader: TastyReader) { } def isAtEnd: Boolean = reader.isAtEnd +} + +object TastyHeaderUnpickler { private def check(cond: Boolean, msg: => String): Unit = { if (!cond) throw new UnpickleException(msg) } + private def checkValidVersion(fileMajor: Int, fileMinor: Int, fileExperimental: Int, toolingVersion: String, config: UnpicklerConfig) = { + val toolMajor: Int = config.majorVersion + val toolMinor: Int = config.minorVersion + val toolExperimental: Int = config.experimentalVersion + val validVersion = TastyFormat.isVersionCompatible( + fileMajor = fileMajor, + fileMinor = fileMinor, + fileExperimental = fileExperimental, + compilerMajor = toolMajor, + compilerMinor = toolMinor, + compilerExperimental = toolExperimental + ) + check(validVersion, { + // failure means that the TASTy file cannot be read, therefore it is either: + // - backwards incompatible major, in which case the library should be recompiled by the minimum stable minor + // version supported by this compiler + // - any experimental in an older minor, in which case the library should be recompiled by the stable + // compiler in the same minor. + // - older experimental in the same minor, in which case the compiler is also experimental, and the library + // should be recompiled by the current compiler + // - forward incompatible, in which case the compiler must be upgraded to the same version as the file. + val fileVersion = TastyVersion(fileMajor, fileMinor, fileExperimental) + val toolVersion = TastyVersion(toolMajor, toolMinor, toolExperimental) + + val compat = Compatibility.failReason(file = fileVersion, read = toolVersion) + + val what = if (compat < 0) "Backward" else "Forward" + val signature = signatureString(fileVersion, toolVersion, what, tool = Some(toolingVersion)) + val fix = ( + if (compat < 0) { + val newCompiler = + if (compat == Compatibility.BackwardIncompatibleMajor) toolVersion.minStable + else if (compat == Compatibility.BackwardIncompatibleExperimental) fileVersion.nextStable + else toolVersion // recompile the experimental library with the current experimental compiler + recompileFix(newCompiler, config) + } + else upgradeFix(fileVersion, config) + ) + signature + fix + tastyAddendum + }) + } + private def signatureString( fileVersion: TastyVersion, toolVersion: TastyVersion, what: String, tool: Option[String]) = { val optProducedBy = tool.fold("")(t => s", produced by $t") @@ -174,13 +183,13 @@ class TastyHeaderUnpickler(config: UnpicklerConfig, reader: TastyReader) { |""".stripMargin } - private def recompileFix(producerVersion: TastyVersion) = { + private def recompileFix(producerVersion: TastyVersion, config: UnpicklerConfig) = { val addendum = config.recompileAdditionalInfo val newTool = config.upgradedProducerTool(producerVersion) s""" The source of this file should be recompiled by $newTool.$addendum""".stripMargin } - private def upgradeFix(fileVersion: TastyVersion) = { + private def upgradeFix(fileVersion: TastyVersion, config: UnpicklerConfig) = { val addendum = config.upgradeAdditionalInfo(fileVersion) val newTool = config.upgradedReaderTool(fileVersion) s""" To read this ${fileVersion.kind} file, use $newTool.$addendum""".stripMargin @@ -189,9 +198,6 @@ class TastyHeaderUnpickler(config: UnpicklerConfig, reader: TastyReader) { private def tastyAddendum: String = """ | Please refer to the documentation for information on TASTy versioning: | https://docs.scala-lang.org/scala3/reference/language-versions/binary-compatibility.html""".stripMargin -} - -object TastyHeaderUnpickler { private object Compatibility { final val BackwardIncompatibleMajor = -3 diff --git a/tests/best-effort/broken-macro-executed-in-dependency/err/ExecutedMacro.scala b/tests/best-effort/broken-macro-executed-in-dependency/err/ExecutedMacro.scala new file mode 100644 index 000000000000..a6a071c9b85e --- /dev/null +++ b/tests/best-effort/broken-macro-executed-in-dependency/err/ExecutedMacro.scala @@ -0,0 +1,2 @@ +object ExecutedMacro: + val failingMacro = FailingTransparent.execute() diff --git a/tests/best-effort/broken-macro-executed-in-dependency/err/FailingTransparentInline.scala b/tests/best-effort/broken-macro-executed-in-dependency/err/FailingTransparentInline.scala new file mode 100644 index 000000000000..9f9fdc22ee4b --- /dev/null +++ b/tests/best-effort/broken-macro-executed-in-dependency/err/FailingTransparentInline.scala @@ -0,0 +1,11 @@ +object FailingTransparentInline: + sealed trait Foo + case class FooA() extends Foo + case class FooB() extends Foo + + transparent inline def execute(): Foo = ${ executeImpl() } + def executeImpl(using Quotes)() = { + val a = 0 + a.asInstanceOf[String] + FooB() + } diff --git a/tests/best-effort/broken-macro-executed-in-dependency/main/Main.scala b/tests/best-effort/broken-macro-executed-in-dependency/main/Main.scala new file mode 100644 index 000000000000..6603d4ee0cc1 --- /dev/null +++ b/tests/best-effort/broken-macro-executed-in-dependency/main/Main.scala @@ -0,0 +1,2 @@ +object Main: + ExecutedMacro.failingMacro diff --git a/tests/best-effort/broken-macro-executed-in-dependent/err/BrokenMacros.scala b/tests/best-effort/broken-macro-executed-in-dependent/err/BrokenMacros.scala new file mode 100644 index 000000000000..73d121022b23 --- /dev/null +++ b/tests/best-effort/broken-macro-executed-in-dependent/err/BrokenMacros.scala @@ -0,0 +1,13 @@ +import scala.quoted._ +object BrokenMacros: + transparent inline def macro1() = ${macroImpl()} + def macroImpl(using Quotes)(): Expr[String] = + val a: Int = "str" // source of the error + '{a} + + sealed trait Foo + case class FooA() extends Foo + case class FooB() + transparent inline def macro2(): Foo = ${macro2Impl()} + def macro2Impl(using Quotes)(): Expr[Foo] = + '{FooB()} diff --git a/tests/best-effort/broken-macro-executed-in-dependent/main/Main.scala b/tests/best-effort/broken-macro-executed-in-dependent/main/Main.scala new file mode 100644 index 000000000000..d382bd4aabd7 --- /dev/null +++ b/tests/best-effort/broken-macro-executed-in-dependent/main/Main.scala @@ -0,0 +1,3 @@ +object Main + val a = BrokenMacros.macro1() + val b = BrokenMacros.macro2() diff --git a/tests/best-effort/mirrors-in-dependency/err/MirrorTypes.scala b/tests/best-effort/mirrors-in-dependency/err/MirrorTypes.scala new file mode 100644 index 000000000000..280805ba8ab9 --- /dev/null +++ b/tests/best-effort/mirrors-in-dependency/err/MirrorTypes.scala @@ -0,0 +1,2 @@ +object MirrorTypes: + case class BrokenType(a: NonExistent, b: Int) diff --git a/tests/best-effort/mirrors-in-dependency/main/MirrorExec.scala b/tests/best-effort/mirrors-in-dependency/main/MirrorExec.scala new file mode 100644 index 000000000000..12052a27b57d --- /dev/null +++ b/tests/best-effort/mirrors-in-dependency/main/MirrorExec.scala @@ -0,0 +1,7 @@ +import scala.deriving.Mirror + +object MirrorExec: + transparent inline def getNames[T](using m: Mirror.Of[T]): m.MirroredElemTypes = + scala.compiletime.erasedValue[m.MirroredElemTypes] + + val ab = getNames[MirrorTypes.BrokenType] diff --git a/tests/best-effort/simple-type-error/err/SimpleTypeError.scala b/tests/best-effort/simple-type-error/err/SimpleTypeError.scala new file mode 100644 index 000000000000..cf9ad8c8d56a --- /dev/null +++ b/tests/best-effort/simple-type-error/err/SimpleTypeError.scala @@ -0,0 +1,2 @@ +object SimpleTypeError: + def foo: Int = "string" diff --git a/tests/best-effort/simple-type-error/main/Main.scala b/tests/best-effort/simple-type-error/main/Main.scala new file mode 100644 index 000000000000..c1e821d790e7 --- /dev/null +++ b/tests/best-effort/simple-type-error/main/Main.scala @@ -0,0 +1,2 @@ +object Main: + SimpleTypeError.foo diff --git a/tests/cmdTest-sbt-tests/sourcepath-with-inline/src/main/scala/a/zz.scala b/tests/cmdTest-sbt-tests/sourcepath-with-inline/src/main/scala/a/zz.scala new file mode 100644 index 000000000000..17a7488ccb1a --- /dev/null +++ b/tests/cmdTest-sbt-tests/sourcepath-with-inline/src/main/scala/a/zz.scala @@ -0,0 +1,6 @@ +package a + +object Foo: // note that `Foo` is defined in `zz.scala` + class Local + inline def foo(using Local): Nothing = + ??? diff --git a/tests/coverage/run/erased/test.scala b/tests/coverage/run/erased/test.scala index 15a067e9ed50..6645020cac80 100644 --- a/tests/coverage/run/erased/test.scala +++ b/tests/coverage/run/erased/test.scala @@ -2,7 +2,7 @@ import scala.language.experimental.erasedDefinitions erased def parameterless: String = "y" -erased def e(x: String): String = "x" +erased def e(erased x: String): String = "x" def foo(erased a: String)(b: String): String = println(s"foo(a)($b)") b diff --git a/tests/coverage/run/erased/test.scoverage.check b/tests/coverage/run/erased/test.scoverage.check index dedf5689c490..aed6338099aa 100644 --- a/tests/coverage/run/erased/test.scoverage.check +++ b/tests/coverage/run/erased/test.scoverage.check @@ -25,8 +25,8 @@ test$package Object .test$package foo -181 -203 +188 +210 7 println Apply @@ -42,8 +42,8 @@ test$package Object .test$package foo -189 -202 +196 +209 7 s Apply @@ -59,8 +59,8 @@ test$package Object .test$package foo -132 139 +146 6 foo DefDef @@ -76,8 +76,8 @@ test$package Object .test$package identity -245 -269 +252 +276 11 println Apply @@ -93,8 +93,8 @@ test$package Object .test$package identity -253 -268 +260 +275 11 s Apply @@ -110,8 +110,8 @@ test$package Object .test$package identity -209 -221 +216 +228 10 identity DefDef @@ -127,8 +127,8 @@ test$package Object .test$package Test -300 -323 +307 +330 16 foo Apply @@ -144,8 +144,8 @@ test$package Object .test$package Test -326 -342 +333 +349 17 foo Apply @@ -161,8 +161,8 @@ test$package Object .test$package Test -345 -374 +352 +381 18 foo Apply @@ -178,8 +178,8 @@ test$package Object .test$package Test -357 -373 +364 +380 18 identity Apply @@ -195,8 +195,8 @@ test$package Object .test$package Test -275 -289 +282 +296 15 Test DefDef diff --git a/tests/explicit-nulls/flexible-types-common/i7883.scala b/tests/explicit-nulls/flexible-types-common/i7883.scala new file mode 100644 index 000000000000..9ee92553b60d --- /dev/null +++ b/tests/explicit-nulls/flexible-types-common/i7883.scala @@ -0,0 +1,9 @@ +import scala.util.matching.Regex + +object Test extends App { + def head(s: String, r: Regex): Option[(String, String)] = + s.trim match { + case r(hd, tl) => Some((hd, tl)) // error // error // error + case _ => None + } +} \ No newline at end of file diff --git a/tests/explicit-nulls/neg/interop-array-src/J.java b/tests/explicit-nulls/flexible-types-common/interop-array-src/J.java similarity index 100% rename from tests/explicit-nulls/neg/interop-array-src/J.java rename to tests/explicit-nulls/flexible-types-common/interop-array-src/J.java diff --git a/tests/explicit-nulls/neg/interop-array-src/S.scala b/tests/explicit-nulls/flexible-types-common/interop-array-src/S.scala similarity index 100% rename from tests/explicit-nulls/neg/interop-array-src/S.scala rename to tests/explicit-nulls/flexible-types-common/interop-array-src/S.scala diff --git a/tests/explicit-nulls/flexible-types-common/interop-chain.scala b/tests/explicit-nulls/flexible-types-common/interop-chain.scala new file mode 100644 index 000000000000..27a2d507801e --- /dev/null +++ b/tests/explicit-nulls/flexible-types-common/interop-chain.scala @@ -0,0 +1,9 @@ +// With flexible types, we can select a member of its underlying type. + +class Foo { + import java.util.ArrayList + import java.util.Iterator + + val x3 = new ArrayList[ArrayList[ArrayList[String]]]() + val x4: Int = x3.get(0).get(0).get(0).length() // error +} diff --git a/tests/explicit-nulls/pos/interop-enum-src/Day.java b/tests/explicit-nulls/flexible-types-common/interop-enum-src/Day.java similarity index 100% rename from tests/explicit-nulls/pos/interop-enum-src/Day.java rename to tests/explicit-nulls/flexible-types-common/interop-enum-src/Day.java diff --git a/tests/explicit-nulls/neg/interop-enum-src/Planet.java b/tests/explicit-nulls/flexible-types-common/interop-enum-src/Planet.java similarity index 100% rename from tests/explicit-nulls/neg/interop-enum-src/Planet.java rename to tests/explicit-nulls/flexible-types-common/interop-enum-src/Planet.java diff --git a/tests/explicit-nulls/pos/interop-enum-src/S.scala b/tests/explicit-nulls/flexible-types-common/interop-enum-src/S.scala similarity index 60% rename from tests/explicit-nulls/pos/interop-enum-src/S.scala rename to tests/explicit-nulls/flexible-types-common/interop-enum-src/S.scala index 75e4654869a4..ce0935271d11 100644 --- a/tests/explicit-nulls/pos/interop-enum-src/S.scala +++ b/tests/explicit-nulls/flexible-types-common/interop-enum-src/S.scala @@ -3,4 +3,5 @@ class S { val d: Day = Day.MON val p: Planet = Planet.MARS + val p2: Planet = p.next() // error: expected Planet but got Planet|Null } diff --git a/tests/explicit-nulls/neg/interop-generics/J.java b/tests/explicit-nulls/flexible-types-common/interop-generics/J.java similarity index 100% rename from tests/explicit-nulls/neg/interop-generics/J.java rename to tests/explicit-nulls/flexible-types-common/interop-generics/J.java diff --git a/tests/explicit-nulls/neg/interop-generics/S.scala b/tests/explicit-nulls/flexible-types-common/interop-generics/S.scala similarity index 100% rename from tests/explicit-nulls/neg/interop-generics/S.scala rename to tests/explicit-nulls/flexible-types-common/interop-generics/S.scala diff --git a/tests/explicit-nulls/flexible-types-common/interop-implicit.scala b/tests/explicit-nulls/flexible-types-common/interop-implicit.scala new file mode 100644 index 000000000000..4bbba8f11cab --- /dev/null +++ b/tests/explicit-nulls/flexible-types-common/interop-implicit.scala @@ -0,0 +1,10 @@ +class S { + locally { + // OfType Implicits + + import java.nio.charset.StandardCharsets + import scala.io.Codec + + val c: Codec = StandardCharsets.UTF_8 // error + } +} \ No newline at end of file diff --git a/tests/explicit-nulls/flexible-types-common/interop-java-call/J.java b/tests/explicit-nulls/flexible-types-common/interop-java-call/J.java new file mode 100644 index 000000000000..554b91749889 --- /dev/null +++ b/tests/explicit-nulls/flexible-types-common/interop-java-call/J.java @@ -0,0 +1,17 @@ +public class J { + public String f1() { + return ""; + } + + public int f2() { + return 0; + } + + public T g1() { + return null; + } +} + +class J2 { + public T x = null; +} \ No newline at end of file diff --git a/tests/explicit-nulls/flexible-types-common/interop-java-call/S.scala b/tests/explicit-nulls/flexible-types-common/interop-java-call/S.scala new file mode 100644 index 000000000000..acdbbafc3fab --- /dev/null +++ b/tests/explicit-nulls/flexible-types-common/interop-java-call/S.scala @@ -0,0 +1,37 @@ +// Check Java calls have been cast to non-nullable. + +val j: J = new J + +val s1: String = j.f1() // error + +val s1n: String | Null = j.f1() + +val i1: Int = j.f2() + +val s2: String = j.g1[String]() // error + +val s2n: String | Null = j.g1[String]() + +// val s3: String = j.g1[String | Null]() error + +val s3n: String | Null = j.g1[String | Null]() + +val i2: Int = j.g1[Int]() // error + +val a1: Any = j.g1[Any]() + +val ar1: AnyRef = j.g1[AnyRef]() // error + +val n1: Null = j.g1[Null]() + +// val ar2: AnyRef = j.g1[Null]() error + +def clo1[T]: T = j.g1[T]() // error + +def clo2[T <: AnyRef]: T = j.g1[T]() // error + +def clo3[T >: Null <: AnyRef | Null]: T = j.g1[T]() + +def testJ2[T]: T = + val j2: J2[T] = new J2 + j2.x // error diff --git a/tests/explicit-nulls/flexible-types-common/interop-java-chain/J.java b/tests/explicit-nulls/flexible-types-common/interop-java-chain/J.java new file mode 100644 index 000000000000..bd266bae13d9 --- /dev/null +++ b/tests/explicit-nulls/flexible-types-common/interop-java-chain/J.java @@ -0,0 +1,7 @@ +class J1 { + J2 getJ2() { return new J2(); } +} + +class J2 { + J1 getJ1() { return new J1(); } +} \ No newline at end of file diff --git a/tests/explicit-nulls/flexible-types-common/interop-java-chain/S.scala b/tests/explicit-nulls/flexible-types-common/interop-java-chain/S.scala new file mode 100644 index 000000000000..9fe5aa3f08ce --- /dev/null +++ b/tests/explicit-nulls/flexible-types-common/interop-java-chain/S.scala @@ -0,0 +1,4 @@ +class S { + val j: J2 = new J2() + j.getJ1().getJ2().getJ1().getJ2().getJ1().getJ2() // error +} diff --git a/tests/explicit-nulls/pos/interop-java-varargs-src/Names.java b/tests/explicit-nulls/flexible-types-common/interop-java-varargs-src/Names.java similarity index 100% rename from tests/explicit-nulls/pos/interop-java-varargs-src/Names.java rename to tests/explicit-nulls/flexible-types-common/interop-java-varargs-src/Names.java diff --git a/tests/explicit-nulls/pos/interop-java-varargs-src/S.scala b/tests/explicit-nulls/flexible-types-common/interop-java-varargs-src/S.scala similarity index 64% rename from tests/explicit-nulls/pos/interop-java-varargs-src/S.scala rename to tests/explicit-nulls/flexible-types-common/interop-java-varargs-src/S.scala index e867202e506d..ef0b702b0006 100644 --- a/tests/explicit-nulls/pos/interop-java-varargs-src/S.scala +++ b/tests/explicit-nulls/flexible-types-common/interop-java-varargs-src/S.scala @@ -16,4 +16,14 @@ class S { // Multiple arguments, some null. Names.setNames(null, null, "hello", "world", null) + + val arg1: Array[String] = ??? + val arg2: Array[String | Null] = ??? + val arg3: Array[String] | Null = ??? + val arg4: Array[String | Null] | Null = ??? + + Names.setNames(arg1*) + Names.setNames(arg2*) + Names.setNames(arg3*) // error + Names.setNames(arg4*) // error } diff --git a/tests/explicit-nulls/flexible-types-common/interop-java-varargs.scala b/tests/explicit-nulls/flexible-types-common/interop-java-varargs.scala new file mode 100644 index 000000000000..9ec27cb090a1 --- /dev/null +++ b/tests/explicit-nulls/flexible-types-common/interop-java-varargs.scala @@ -0,0 +1,38 @@ +import java.nio.file.Paths + +def test1 = { + Paths.get("") + Paths.get("", null) + Paths.get("", "") + Paths.get("", "", null) + + val x1: String = ??? + val x2: String | Null = ??? + + Paths.get("", x1) + Paths.get("", x2) +} + +def test2 = { + val xs1: Seq[String] = ??? + val xs2: Seq[String | Null] = ??? + val xs3: Seq[String | Null] | Null = ??? + val xs4: Seq[String] | Null = ??? + + val ys1: Array[String] = ??? + val ys2: Array[String | Null] = ??? + val ys3: Array[String | Null] | Null = ??? + val ys4: Array[String] | Null = ??? + + Paths.get("", xs1*) + Paths.get("", xs2*) + Paths.get("", xs3*) // error + Paths.get("", xs4*) // error + + Paths.get("", ys1*) + Paths.get("", ys2*) + Paths.get("", ys3*) // error + Paths.get("", ys4*) // error + + Paths.get("", null*) // error +} \ No newline at end of file diff --git a/tests/explicit-nulls/neg/interop-method-src/J.java b/tests/explicit-nulls/flexible-types-common/interop-method-src/J.java similarity index 100% rename from tests/explicit-nulls/neg/interop-method-src/J.java rename to tests/explicit-nulls/flexible-types-common/interop-method-src/J.java diff --git a/tests/explicit-nulls/neg/interop-method-src/S.scala b/tests/explicit-nulls/flexible-types-common/interop-method-src/S.scala similarity index 100% rename from tests/explicit-nulls/neg/interop-method-src/S.scala rename to tests/explicit-nulls/flexible-types-common/interop-method-src/S.scala diff --git a/tests/explicit-nulls/flexible-types-common/interop-propagate.scala b/tests/explicit-nulls/flexible-types-common/interop-propagate.scala new file mode 100644 index 000000000000..40eb12dd287c --- /dev/null +++ b/tests/explicit-nulls/flexible-types-common/interop-propagate.scala @@ -0,0 +1,18 @@ + class Foo { + import java.util.ArrayList + + // Test that type mapping works with flexible types. + val ll: ArrayList[ArrayList[ArrayList[String]]] = new ArrayList[ArrayList[ArrayList[String]]] + val level1: ArrayList[ArrayList[String]] = ll.get(0) // error + val level2: ArrayList[String] = ll.get(0).get(0) // error + val level3: String = ll.get(0).get(0).get(0) // error + + val lb = new ArrayList[ArrayList[ArrayList[String]]] + val levelA = lb.get(0) + val levelB = lb.get(0).get(0) // error + val levelC = lb.get(0).get(0).get(0) // error + + val x = levelA.get(0) // error + val y = levelB.get(0) + val z: String = levelA.get(0).get(0) // error +} diff --git a/tests/explicit-nulls/flexible-types-common/interop-select-type-member.scala b/tests/explicit-nulls/flexible-types-common/interop-select-type-member.scala new file mode 100644 index 000000000000..ddd402545edb --- /dev/null +++ b/tests/explicit-nulls/flexible-types-common/interop-select-type-member.scala @@ -0,0 +1,7 @@ +import java.util.ArrayList + +def f[T]: ArrayList[T] = { + val cz = Class.forName("java.util.ArrayList") + val o = cz.newInstance() // error: T of Class[?] | Null + o.asInstanceOf[ArrayList[T]] +} \ No newline at end of file diff --git a/tests/explicit-nulls/neg/from-nullable.scala b/tests/explicit-nulls/neg/from-nullable.scala new file mode 100644 index 000000000000..ab4ab7f63e8e --- /dev/null +++ b/tests/explicit-nulls/neg/from-nullable.scala @@ -0,0 +1,6 @@ +import scala.annotation.experimental + +@experimental def testFromNullable = + val s: String | Null = "abc" + val sopt1: Option[String] = Option(s) // error + val sopt2: Option[String] = Option.fromNullable(s) // ok \ No newline at end of file diff --git a/tests/explicit-nulls/neg/i7883.check b/tests/explicit-nulls/neg/i7883.check index e37285332359..f14e5d4e7481 100644 --- a/tests/explicit-nulls/neg/i7883.check +++ b/tests/explicit-nulls/neg/i7883.check @@ -1,19 +1,19 @@ --- [E134] Type Error: tests/explicit-nulls/neg/i7883.scala:6:11 -------------------------------------------------------- -6 | case r(hd, tl) => Some((hd, tl)) // error // error // error +-- [E134] Type Error: tests/explicit-nulls/neg/i7883.scala:8:11 -------------------------------------------------------- +8 | case r(hd, tl) => Some((hd, tl)) // error // error // error | ^ | None of the overloaded alternatives of method unapplySeq in class Regex with types | (m: scala.util.matching.Regex.Match): Option[List[String]] | (c: Char): Option[List[Char]] | (s: CharSequence): Option[List[String]] | match arguments (String | Null) --- [E006] Not Found Error: tests/explicit-nulls/neg/i7883.scala:6:30 --------------------------------------------------- -6 | case r(hd, tl) => Some((hd, tl)) // error // error // error +-- [E006] Not Found Error: tests/explicit-nulls/neg/i7883.scala:8:30 --------------------------------------------------- +8 | case r(hd, tl) => Some((hd, tl)) // error // error // error | ^^ | Not found: hd | | longer explanation available when compiling with `-explain` --- [E006] Not Found Error: tests/explicit-nulls/neg/i7883.scala:6:34 --------------------------------------------------- -6 | case r(hd, tl) => Some((hd, tl)) // error // error // error +-- [E006] Not Found Error: tests/explicit-nulls/neg/i7883.scala:8:34 --------------------------------------------------- +8 | case r(hd, tl) => Some((hd, tl)) // error // error // error | ^^ | Not found: tl | diff --git a/tests/explicit-nulls/neg/i7883.scala b/tests/explicit-nulls/neg/i7883.scala index 7938c92dce1e..10d2a6231dca 100644 --- a/tests/explicit-nulls/neg/i7883.scala +++ b/tests/explicit-nulls/neg/i7883.scala @@ -1,3 +1,5 @@ +//> using options -Yno-flexible-types + import scala.util.matching.Regex object Test extends App { diff --git a/tests/explicit-nulls/neg/interop-enum-src/S.scala b/tests/explicit-nulls/neg/interop-enum-src/S.scala deleted file mode 100644 index 99e92cedc68d..000000000000 --- a/tests/explicit-nulls/neg/interop-enum-src/S.scala +++ /dev/null @@ -1,6 +0,0 @@ -// Verify that enum values aren't nullified. - -class S { - val p: Planet = Planet.MARS // ok: accessing static member - val p2: Planet = p.next() // error: expected Planet but got Planet|Null -} diff --git a/tests/explicit-nulls/neg/interop-propagate.scala b/tests/explicit-nulls/neg/interop-propagate.scala deleted file mode 100644 index 6af7ee182cac..000000000000 --- a/tests/explicit-nulls/neg/interop-propagate.scala +++ /dev/null @@ -1,10 +0,0 @@ - class Foo { - import java.util.ArrayList - - // Test that the nullability is propagated to nested containers. - val ll = new ArrayList[ArrayList[ArrayList[String]]] - val level1: ArrayList[ArrayList[String]] = ll.get(0) // error - val level2: ArrayList[String] = ll.get(0).get(0) // error - val level3: String = ll.get(0).get(0).get(0) // error - val ok: String = ll.get(0).get(0).get(0) // error -} diff --git a/tests/explicit-nulls/neg/interop-return.scala b/tests/explicit-nulls/neg/interop-return.scala index 1d6df4da93bc..422d37882179 100644 --- a/tests/explicit-nulls/neg/interop-return.scala +++ b/tests/explicit-nulls/neg/interop-return.scala @@ -1,3 +1,5 @@ +//> using options -Yno-flexible-types + // Test that the return type of Java methods as well as the type of Java fields is marked as nullable. class Foo { diff --git a/tests/explicit-nulls/neg/notnull/S.scala b/tests/explicit-nulls/neg/notnull/S.scala index eada60eea6e7..a10bdaabc77c 100644 --- a/tests/explicit-nulls/neg/notnull/S.scala +++ b/tests/explicit-nulls/neg/notnull/S.scala @@ -1,3 +1,5 @@ +//> using options -Yno-flexible-types + // Test that NotNull annotations not in the list are not working in Java files. class S { diff --git a/tests/explicit-nulls/pos/i14682.scala b/tests/explicit-nulls/pos/i14682.scala index 318de6094a88..76ae621a406e 100644 --- a/tests/explicit-nulls/pos/i14682.scala +++ b/tests/explicit-nulls/pos/i14682.scala @@ -1,4 +1,4 @@ -//> using options -Ysafe-init +//> using options -Wsafe-init class C1: sealed abstract class Name { @@ -26,4 +26,4 @@ class C2: val localName = LocalName() println(localName) - var count = 0 \ No newline at end of file + var count = 0 diff --git a/tests/explicit-nulls/pos/interop-applied-types/J.java b/tests/explicit-nulls/pos/interop-applied-types/J.java new file mode 100644 index 000000000000..c85a921a81b9 --- /dev/null +++ b/tests/explicit-nulls/pos/interop-applied-types/J.java @@ -0,0 +1,3 @@ +public class J { + public J j = this; +} \ No newline at end of file diff --git a/tests/explicit-nulls/pos/interop-applied-types/S.scala b/tests/explicit-nulls/pos/interop-applied-types/S.scala new file mode 100644 index 000000000000..8ff50ab63840 --- /dev/null +++ b/tests/explicit-nulls/pos/interop-applied-types/S.scala @@ -0,0 +1,14 @@ +def test1[T](x: J[T]): J[T] = + x match { + case y: J[_] => y + } + +def test2[T](x: J[T]): J[T] = + x match { + case y: J[_] => y.j + } + +def test3[T](x: J[T]): J[T] = + x.j match { + case y: J[_] => y.j + } \ No newline at end of file diff --git a/tests/explicit-nulls/pos/interop-constructor-src/S.scala b/tests/explicit-nulls/pos/interop-constructor-src/S.scala index 3defd73f3945..be87b6052699 100644 --- a/tests/explicit-nulls/pos/interop-constructor-src/S.scala +++ b/tests/explicit-nulls/pos/interop-constructor-src/S.scala @@ -3,4 +3,5 @@ class S { val x1: J = new J("hello") val x2: J = new J(null) val x3: J = new J(null, null, null) + val x4: J = new J("hello", null, "world") } diff --git a/tests/explicit-nulls/pos/interop-enum-src/Planet.java b/tests/explicit-nulls/pos/interop-enum-src/Planet.java deleted file mode 100644 index 287aed6aecc5..000000000000 --- a/tests/explicit-nulls/pos/interop-enum-src/Planet.java +++ /dev/null @@ -1,19 +0,0 @@ -public enum Planet { - MERCURY (3.303e+23, 2.4397e6), - VENUS (4.869e+24, 6.0518e6), - EARTH (5.976e+24, 6.37814e6), - MARS (6.421e+23, 3.3972e6), - JUPITER (1.9e+27, 7.1492e7), - SATURN (5.688e+26, 6.0268e7), - URANUS (8.686e+25, 2.5559e7), - NEPTUNE (1.024e+26, 2.4746e7); - - private final double mass; // in kilograms - private final double radius; // in meters - Planet(double mass, double radius) { - this.mass = mass; - this.radius = radius; - } - private double mass() { return mass; } - private double radius() { return radius; } -} diff --git a/tests/explicit-nulls/pos/interop-generics/J.java b/tests/explicit-nulls/pos/interop-generics/J.java deleted file mode 100644 index 4bbdbd4cf319..000000000000 --- a/tests/explicit-nulls/pos/interop-generics/J.java +++ /dev/null @@ -1,13 +0,0 @@ - -class I {} - -class J { - I foo(T x) { - return new I(); - } - - I[] bar(T x) { - Object[] r = new Object[]{new I()}; - return (I[]) r; - } -} diff --git a/tests/explicit-nulls/pos/interop-generics/S.scala b/tests/explicit-nulls/pos/interop-generics/S.scala deleted file mode 100644 index 10a0572b0edf..000000000000 --- a/tests/explicit-nulls/pos/interop-generics/S.scala +++ /dev/null @@ -1,6 +0,0 @@ -class S { - val j = new J() - // Check that the inside of a Java generic isn't nullified - val x: I[String] | Null = j.foo("hello") - val y: Array[I[String] | Null] | Null = j.bar[String](null) -} diff --git a/tests/explicit-nulls/pos/interop-nn-src/S.scala b/tests/explicit-nulls/pos/interop-nn-src/S.scala index 6250c4c3c961..3f6cddb4731b 100644 --- a/tests/explicit-nulls/pos/interop-nn-src/S.scala +++ b/tests/explicit-nulls/pos/interop-nn-src/S.scala @@ -1,7 +1,7 @@ class S { val j = new J() - // Test that the `nn` extension method can be used to strip away - // nullability from a type. + + // Test that the `nn` extension method should work with flexible types. val s: String = j.foo.nn val a: Array[String | Null] = j.bar.nn diff --git a/tests/explicit-nulls/pos/interop-ortype-src/J.java b/tests/explicit-nulls/pos/interop-ortype-src/J.java new file mode 100644 index 000000000000..b0d767bccf3e --- /dev/null +++ b/tests/explicit-nulls/pos/interop-ortype-src/J.java @@ -0,0 +1,3 @@ +class J { + public static T foo(T t) { return null; } +} diff --git a/tests/explicit-nulls/pos/interop-ortype-src/S.scala b/tests/explicit-nulls/pos/interop-ortype-src/S.scala new file mode 100644 index 000000000000..8576ee0895ed --- /dev/null +++ b/tests/explicit-nulls/pos/interop-ortype-src/S.scala @@ -0,0 +1,7 @@ +// Tests that member finding works on (FlexibleType(T) | S) +class S { + def foo(a: J | String) = (a match { + case x: J => J.foo(x: J) + case y: String => "" + }).asInstanceOf[J] +} diff --git a/tests/explicit-nulls/pos/interop-poly-src/S.scala b/tests/explicit-nulls/pos/interop-poly-src/S.scala index 1fea277efe90..8aed9e99b689 100644 --- a/tests/explicit-nulls/pos/interop-poly-src/S.scala +++ b/tests/explicit-nulls/pos/interop-poly-src/S.scala @@ -9,12 +9,29 @@ class Test { // because JavaCat, being a Java class, _already_ nullifies its // fields. val jc: JavaCat[String]|Null = J.getJavaCat[String]() + val jc2: JavaCat[String] = J.getJavaCat[String]() // ScalaCat is Scala-defined, so we need the inner |Null. val sc: ScalaCat[String|Null]|Null = J.getScalaCat[String]() + val sc2: ScalaCat[String]|Null = J.getScalaCat[String]() + val sc3: ScalaCat[String|Null] = J.getScalaCat[String]() + val sc4: ScalaCat[String] = J.getScalaCat[String]() import java.util.List val las: List[Array[String|Null]]|Null = J.getListOfStringArray() + val las2: List[Array[String|Null]] = J.getListOfStringArray() + val las3: List[Array[String]]|Null = J.getListOfStringArray() + val las4: List[Array[String]] = J.getListOfStringArray() val als: Array[List[String]|Null]|Null = J.getArrayOfStringList() + val als2: Array[List[String]|Null] = J.getArrayOfStringList() + val als3: Array[List[String]]|Null = J.getArrayOfStringList() + val als4: Array[List[String]] = J.getArrayOfStringList() val css: List[Array[List[Array[String|Null]]|Null]]|Null = J.getComplexStrings() + val css2: List[Array[List[Array[String]]|Null]]|Null = J.getComplexStrings() + val css3: List[Array[List[Array[String|Null]]]]|Null = J.getComplexStrings() + val css4: List[Array[List[Array[String|Null]]|Null]] = J.getComplexStrings() + val css5: List[Array[List[Array[String|Null]]]] = J.getComplexStrings() + val css6: List[Array[List[Array[String]]]]|Null = J.getComplexStrings() + val css7: List[Array[List[Array[String]]|Null]] = J.getComplexStrings() + val css8: List[Array[List[Array[String]]]] = J.getComplexStrings() } diff --git a/tests/explicit-nulls/pos/interop-static-src/S.scala b/tests/explicit-nulls/pos/interop-static-src/S.scala index 3db9c3f6d281..7e0e4a34898e 100644 --- a/tests/explicit-nulls/pos/interop-static-src/S.scala +++ b/tests/explicit-nulls/pos/interop-static-src/S.scala @@ -1,5 +1,7 @@ class S { // Java static methods are also nullified val x: Int = J.foo(null) + val x2: Int = J.foo("hello") val y: String | Null = J.bar(0) + val y2: String = J.bar(0) } diff --git a/tests/explicit-nulls/pos/match-with-applied-types.scala.scala b/tests/explicit-nulls/pos/match-with-applied-types.scala.scala new file mode 100644 index 000000000000..7b9886ca60ed --- /dev/null +++ b/tests/explicit-nulls/pos/match-with-applied-types.scala.scala @@ -0,0 +1,7 @@ +class A + +def test = + val xs: java.util.LinkedHashMap[String, A | List[A]] = ??? + xs.get("a") match + case a: A => ??? + case as: List[A] => ??? \ No newline at end of file diff --git a/tests/explicit-nulls/pos/sam-parameter-javadefined/injava.java b/tests/explicit-nulls/pos/sam-parameter-javadefined/injava.java new file mode 100644 index 000000000000..28925b3c492a --- /dev/null +++ b/tests/explicit-nulls/pos/sam-parameter-javadefined/injava.java @@ -0,0 +1,6 @@ +class injava { + static void overloaded(Runnable r) {} + static void overloaded(int i) {} + + static void notoverloaded(Runnable r) {} +} diff --git a/tests/explicit-nulls/pos/sam-parameter-javadefined/sam-test.scala b/tests/explicit-nulls/pos/sam-parameter-javadefined/sam-test.scala new file mode 100644 index 000000000000..d3573f590713 --- /dev/null +++ b/tests/explicit-nulls/pos/sam-parameter-javadefined/sam-test.scala @@ -0,0 +1,23 @@ +def foo = { + def unit: Unit = () + + injava.overloaded({ () => unit } : Runnable ) + injava.overloaded({ () => unit } ) + + injava.notoverloaded({ () => unit } : Runnable ) + injava.notoverloaded({ () => unit } ) + + val list = new java.util.Vector[Int]() + java.util.Collections.sort[Int](list, { (a,b) => a - b } : java.util.Comparator[Int] ) + java.util.Collections.sort[Int](list, { (a,b) => a - b }) + + new Thread({ () => unit } : Runnable ) + new Thread({ () => unit } ) + + // See cats.effect.kernel.AsyncPlatform + val cf = new java.util.concurrent.CompletableFuture[String] + cf.handle[Unit]({ + case (string, null) => unit + case (string, throwable) => unit + }) +} diff --git a/tests/explicit-nulls/pos/widen-nullable-union.scala b/tests/explicit-nulls/pos/widen-nullable-union.scala index 9ffa767b84e5..f87b61f781ae 100644 --- a/tests/explicit-nulls/pos/widen-nullable-union.scala +++ b/tests/explicit-nulls/pos/widen-nullable-union.scala @@ -39,4 +39,16 @@ class Test { val y = x val _: (A & B) | Null = y } + + def test1(s: String): String = + val ss = if !s.isEmpty() then s.trim() else s + ss + "!" + + def test2(s: String): String = + val ss = if !s.isEmpty() then s.trim().nn else s + ss + "!" + + def test3(s: String): String = + val ss: String = if !s.isEmpty() then s.trim().nn else s + ss + "!" } diff --git a/tests/explicit-nulls/run/from-nullable.check b/tests/explicit-nulls/run/from-nullable.check new file mode 100644 index 000000000000..43d418e64a03 --- /dev/null +++ b/tests/explicit-nulls/run/from-nullable.check @@ -0,0 +1,2 @@ +hello +None diff --git a/tests/explicit-nulls/run/from-nullable.scala b/tests/explicit-nulls/run/from-nullable.scala new file mode 100644 index 000000000000..6f01e402e790 --- /dev/null +++ b/tests/explicit-nulls/run/from-nullable.scala @@ -0,0 +1,17 @@ +object Test: + import scala.annotation.experimental + + @experimental def main(args: Array[String]): Unit = + val s1: String | Null = "hello" + val s2: String | Null = null + + val opts1: Option[String] = Option.fromNullable(s1) + val opts2: Option[String] = Option.fromNullable(s2) + + opts1 match + case Some(s) => println(s) + case None => println("None") + + opts2 match + case Some(s) => println(s) + case None => println("None") diff --git a/tests/explicit-nulls/run/tasty-flexible-type.check b/tests/explicit-nulls/run/tasty-flexible-type.check new file mode 100644 index 000000000000..dcb4db1129d0 --- /dev/null +++ b/tests/explicit-nulls/run/tasty-flexible-type.check @@ -0,0 +1,7 @@ +Inlined(None, Nil, Literal(StringConstant("hello"))) +ConstantType(StringConstant("hello")) + +Inlined(None, Nil, Apply(Select(Literal(StringConstant("world")), "trim"), Nil)) +FlexibleType(TypeRef(ThisType(TypeRef(NoPrefix(), "lang")), "String")) + +FlexibleType(TypeRef(TermRef(ThisType(TypeRef(NoPrefix(), "java")), "lang"), "String")) diff --git a/tests/explicit-nulls/run/tasty-flexible-type/quoted_1.scala b/tests/explicit-nulls/run/tasty-flexible-type/quoted_1.scala new file mode 100644 index 000000000000..782eec53602a --- /dev/null +++ b/tests/explicit-nulls/run/tasty-flexible-type/quoted_1.scala @@ -0,0 +1,34 @@ +import scala.quoted.* + +object Macros { + + inline def printTree[T](inline x: T): Unit = + ${ impl('x) } + + def impl[T](x: Expr[T])(using Quotes): Expr[Unit] = { + import quotes.reflect.* + + val tree = x.asTerm + val treeStr = Expr(tree.show(using Printer.TreeStructure)) + val treeTpeStr = Expr(tree.tpe.show(using Printer.TypeReprStructure)) + + '{ + println(${treeStr}) + println(${treeTpeStr}) + println() + } + } + + inline def theTestBlock: Unit = ${ theTestBlockImpl } + + def theTestBlockImpl(using Quotes): Expr[Unit] = { + import quotes.reflect.* + + val ft1 = FlexibleType(TypeRepr.of[String]) + val ft1e = Expr(ft1.show(using Printer.TypeReprStructure)) + + '{ + println(${ft1e}) + } + } +} diff --git a/tests/explicit-nulls/run/tasty-flexible-type/quoted_2.scala b/tests/explicit-nulls/run/tasty-flexible-type/quoted_2.scala new file mode 100644 index 000000000000..7b22c541ee58 --- /dev/null +++ b/tests/explicit-nulls/run/tasty-flexible-type/quoted_2.scala @@ -0,0 +1,10 @@ + +import Macros.* + +object Test { + def main(args: Array[String]): Unit = { + printTree("hello") + printTree("world".trim()) + theTestBlock + } +} diff --git a/tests/explicit-nulls/unsafe-common/unsafe-java-varargs-src/S.scala b/tests/explicit-nulls/unsafe-common/unsafe-java-varargs-src/S.scala index e27b0dcaacbf..67fa583a7b66 100644 --- a/tests/explicit-nulls/unsafe-common/unsafe-java-varargs-src/S.scala +++ b/tests/explicit-nulls/unsafe-common/unsafe-java-varargs-src/S.scala @@ -12,8 +12,8 @@ class S { val arg3: Array[String] | Null = ??? val arg4: Array[String | Null] | Null = ??? - j.foo(arg1: _*) - j.foo(arg2: _*) - j.foo(arg3: _*) // error - j.foo(arg4: _*) // error + j.foo(arg1*) + j.foo(arg2*) + j.foo(arg3*) // error + j.foo(arg4*) // error } \ No newline at end of file diff --git a/tests/init-global/pos/EmptyMap.scala b/tests/init-global/pos/EmptyMap.scala new file mode 100644 index 000000000000..776fa81763d6 --- /dev/null +++ b/tests/init-global/pos/EmptyMap.scala @@ -0,0 +1,6 @@ +import scala.collection.immutable.HashMap + +object O { + val emptyMap: HashMap[Int, Int] = HashMap.empty + val key = emptyMap.get(0) +} diff --git a/tests/init-global/pos/EmptyMap2.scala b/tests/init-global/pos/EmptyMap2.scala new file mode 100644 index 000000000000..b66c92f449da --- /dev/null +++ b/tests/init-global/pos/EmptyMap2.scala @@ -0,0 +1,4 @@ +import scala.collection.immutable.HashMap + +object A: + val a = HashMap.empty[Int, Int].updated(1, 2) diff --git a/tests/init-global/pos/EmptySet.scala b/tests/init-global/pos/EmptySet.scala new file mode 100644 index 000000000000..7966d01a0aac --- /dev/null +++ b/tests/init-global/pos/EmptySet.scala @@ -0,0 +1,6 @@ +import scala.collection.immutable.HashSet + +object O { + val emptySet = HashSet.empty + val emptySetSize = emptySet.size +} diff --git a/tests/init-global/pos/EmptySet2.scala b/tests/init-global/pos/EmptySet2.scala new file mode 100644 index 000000000000..f2945c050eba --- /dev/null +++ b/tests/init-global/pos/EmptySet2.scala @@ -0,0 +1,4 @@ +import scala.collection.immutable.HashSet + +object A: + val a = HashSet.empty[Int] + 1 diff --git a/tests/init-global/pos/EmptyVectorIterator.scala b/tests/init-global/pos/EmptyVectorIterator.scala new file mode 100644 index 000000000000..40fcce8d7d3e --- /dev/null +++ b/tests/init-global/pos/EmptyVectorIterator.scala @@ -0,0 +1,7 @@ +import scala.collection.immutable.Vector + +object O { + val emptyVector = Vector.empty + val emptyVectorIterator = emptyVector.iterator + val hasNext = emptyVectorIterator.hasNext +} diff --git a/tests/init-global/pos/LazyList.scala b/tests/init-global/pos/LazyList.scala new file mode 100644 index 000000000000..fea70b5a8a54 --- /dev/null +++ b/tests/init-global/pos/LazyList.scala @@ -0,0 +1,4 @@ +import scala.collection.immutable.LazyList + +object A: + val a = LazyList.empty[Int] :+ 1 \ No newline at end of file diff --git a/tests/init-global/pos/global-region1.scala b/tests/init-global/pos/global-region1.scala index 0797d22aa5bf..85e925967337 100644 --- a/tests/init-global/pos/global-region1.scala +++ b/tests/init-global/pos/global-region1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.init.region diff --git a/tests/init-global/pos/list-colon-colon-next.scala b/tests/init-global/pos/list-colon-colon-next.scala new file mode 100644 index 000000000000..5a87cb311bea --- /dev/null +++ b/tests/init-global/pos/list-colon-colon-next.scala @@ -0,0 +1,5 @@ +object A: + val a: List[Int] = List(1, 2, 3) + +object B: + val b = A.a.size diff --git a/tests/init-global/pos/scala2-library.scala b/tests/init-global/pos/scala2-library.scala new file mode 100644 index 000000000000..8fa9245aebe0 --- /dev/null +++ b/tests/init-global/pos/scala2-library.scala @@ -0,0 +1,2 @@ +//> using options -Ycompile-scala2-library +case class UninitializedFieldError(msg: String) extends RuntimeException(msg) diff --git a/tests/init-global/warn/i18628_3.scala b/tests/init-global/warn/i18628_3.scala index e7a057ded0d8..84ab1d9f6ef3 100644 --- a/tests/init-global/warn/i18628_3.scala +++ b/tests/init-global/warn/i18628_3.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.init.widen diff --git a/tests/neg-custom-args/captures/byname.check b/tests/neg-custom-args/captures/byname.check index 226bee2cd0e5..e06a3a1f8268 100644 --- a/tests/neg-custom-args/captures/byname.check +++ b/tests/neg-custom-args/captures/byname.check @@ -9,7 +9,7 @@ | Found: (x$0: Int) ->{cap2} Int | Required: (x$0: Int) -> Int | - | Note that the expected type Int => Int + | Note that the expected type Int ->{} Int | is the previously inferred result type of method test | which is also the type seen in separately compiled sources. | The new inferred type (x$0: Int) ->{cap2} Int diff --git a/tests/neg-custom-args/captures/effect-swaps.check b/tests/neg-custom-args/captures/effect-swaps.check new file mode 100644 index 000000000000..bda3509645d1 --- /dev/null +++ b/tests/neg-custom-args/captures/effect-swaps.check @@ -0,0 +1,21 @@ +-- [E007] Type Mismatch Error: tests/neg-custom-args/captures/effect-swaps.scala:64:8 ---------------------------------- +63 | Result: +64 | Future: // error, escaping label from Result + | ^ + | Found: Result.Ok[box Future[box T^?]^{fr, contextual$1}] + | Required: Result[Future[T], Nothing] +65 | fr.await.ok + |-------------------------------------------------------------------------------------------------------------------- + |Inline stack trace + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + |This location contains code that was inlined from effect-swaps.scala:41 +41 | boundary(Ok(body)) + | ^^^^^^^^ + -------------------------------------------------------------------------------------------------------------------- + | + | longer explanation available when compiling with `-explain` +-- Error: tests/neg-custom-args/captures/effect-swaps.scala:68:15 ------------------------------------------------------ +68 | Result.make: //lbl ?=> // error, escaping label from Result + | ^^^^^^^^^^^ + |local reference contextual$9 from (using contextual$9: boundary.Label[Result[box Future[box T^?]^{fr, contextual$9}, box E^?]]^): + | box Future[box T^?]^{fr, contextual$9} leaks into outer capture set of type parameter T of method make in object Result diff --git a/tests/neg-custom-args/captures/effect-swaps.scala b/tests/neg-custom-args/captures/effect-swaps.scala new file mode 100644 index 000000000000..1d72077bb8da --- /dev/null +++ b/tests/neg-custom-args/captures/effect-swaps.scala @@ -0,0 +1,70 @@ +import annotation.capability + +object boundary: + + @capability final class Label[-T] + + /** Abort current computation and instead return `value` as the value of + * the enclosing `boundary` call that created `label`. + */ + def break[T](value: T)(using label: Label[T]): Nothing = ??? + + def apply[T](body: Label[T] ?=> T): T = ??? +end boundary + +import boundary.{Label, break} + +@capability trait Async +object Async: + def blocking[T](body: Async ?=> T): T = ??? + +class Future[+T]: + this: Future[T]^ => + def await(using Async): T = ??? +object Future: + def apply[T](op: Async ?=> T)(using Async): Future[T]^{op} = ??? + +enum Result[+T, +E]: + case Ok[+T](value: T) extends Result[T, Nothing] + case Err[+E](error: E) extends Result[Nothing, E] + + +object Result: + extension [T, E](r: Result[T, E]^)(using Label[Err[E]]) + + /** `_.ok` propagates Err to current Label */ + def ok: T = r match + case Ok(value) => value + case Err(value) => break[Err[E]](Err(value)) + + transparent inline def apply[T, E](inline body: Label[Result[T, E]] ?=> T): Result[T, E] = + boundary(Ok(body)) + + // same as apply, but not an inline method + def make[T, E](body: Label[Result[T, E]] ?=> T): Result[T, E] = + boundary(Ok(body)) + +end Result + +def test[T, E](using Async) = + import Result.* + Async.blocking: async ?=> + val good1: List[Future[Result[T, E]]] => Future[Result[List[T], E]] = frs => + Future: + Result: + frs.map(_.await.ok) // OK + + val good2: Result[Future[T], E] => Future[Result[T, E]] = rf => + Future: + Result: + rf.ok.await // OK, Future argument has type Result[T] + + def fail3(fr: Future[Result[T, E]]^) = + Result: + Future: // error, escaping label from Result + fr.await.ok + + def fail4[T, E](fr: Future[Result[T, E]]^) = + Result.make: //lbl ?=> // error, escaping label from Result + Future: fut ?=> + fr.await.ok diff --git a/tests/neg-custom-args/captures/extending-cap-classes.scala b/tests/neg-custom-args/captures/extending-cap-classes.scala new file mode 100644 index 000000000000..17497e415a1e --- /dev/null +++ b/tests/neg-custom-args/captures/extending-cap-classes.scala @@ -0,0 +1,15 @@ +import annotation.capability + +class C1 +@capability class C2 extends C1 +class C3 extends C2 + +def test = + val x1: C1 = new C1 + val x2: C1 = new C2 // error + val x3: C1 = new C3 // error + + val y1: C2 = new C2 + val y2: C2 = new C3 + + val z1: C3 = new C3 \ No newline at end of file diff --git a/tests/neg-custom-args/captures/extending-impure-function.scala b/tests/neg-custom-args/captures/extending-impure-function.scala new file mode 100644 index 000000000000..e491b31caed5 --- /dev/null +++ b/tests/neg-custom-args/captures/extending-impure-function.scala @@ -0,0 +1,30 @@ +class F1 extends (Int => Unit) { + def apply(x: Int): Unit = () +} + +class F2 extends (Int -> Unit) { + def apply(x: Int): Unit = () +} + +def test = + val x1 = new (Int => Unit) { + def apply(x: Int): Unit = () + } + + val x2: Int -> Unit = new (Int => Unit) { // error + def apply(x: Int): Unit = () + } + + val x3: Int -> Unit = new (Int -> Unit) { + def apply(x: Int): Unit = () + } + + val y1: Int => Unit = new F1 + val y2: Int -> Unit = new F1 // error + val y3: Int => Unit = new F2 + val y4: Int -> Unit = new F2 + + val z1 = () => () + val z2: () -> Unit = () => () + val z3: () -> Unit = z1 + val z4: () => Unit = () => () diff --git a/tests/neg-custom-args/captures/i16725.scala b/tests/neg-custom-args/captures/i16725.scala new file mode 100644 index 000000000000..ff06b3be78a7 --- /dev/null +++ b/tests/neg-custom-args/captures/i16725.scala @@ -0,0 +1,15 @@ +import language.experimental.captureChecking +@annotation.capability +class IO: + def brewCoffee(): Unit = ??? +def usingIO[T](op: IO => T): T = ??? + +type Wrapper[T] = [R] -> (f: T => R) -> R +def mk[T](x: T): Wrapper[T] = [R] => f => f(x) +def useWrappedIO(wrapper: Wrapper[IO]): () -> Unit = + () => + wrapper: io => // error + io.brewCoffee() +def main(): Unit = + val escaped = usingIO(io => useWrappedIO(mk(io))) + escaped() // boom diff --git a/tests/neg-custom-args/captures/i20169.scala b/tests/neg-custom-args/captures/i20169.scala new file mode 100644 index 000000000000..13e8c87f8e0c --- /dev/null +++ b/tests/neg-custom-args/captures/i20169.scala @@ -0,0 +1,8 @@ +case class Box[T](x: T): + def foreach(f: T => Unit): Unit = f(x) + +def runOps(ops: Box[() => Unit]): () -> Unit = + val applyFn: (() => Unit) -> Unit = f => f() + val fn: () -> Unit = () => + ops.foreach(applyFn) // error + fn diff --git a/tests/neg-custom-args/captures/leaking-iterators.check b/tests/neg-custom-args/captures/leaking-iterators.check index 0481a9a4d9e2..2f47a26e894a 100644 --- a/tests/neg-custom-args/captures/leaking-iterators.check +++ b/tests/neg-custom-args/captures/leaking-iterators.check @@ -1,4 +1,4 @@ -- Error: tests/neg-custom-args/captures/leaking-iterators.scala:56:2 -------------------------------------------------- 56 | usingLogFile: log => // error | ^^^^^^^^^^^^ - | local reference log leaks into outer capture set of type parameter R of method usingLogFile + | local reference log leaks into outer capture set of type parameter R of method usingLogFile in package cctest diff --git a/tests/neg-custom-args/captures/reaches.check b/tests/neg-custom-args/captures/reaches.check index ee628a212ce7..a1c5a56369e9 100644 --- a/tests/neg-custom-args/captures/reaches.check +++ b/tests/neg-custom-args/captures/reaches.check @@ -41,3 +41,8 @@ | Required: File^{id*} | | longer explanation available when compiling with `-explain` +-- Error: tests/neg-custom-args/captures/reaches.scala:77:5 ------------------------------------------------------------ +77 | ps.map((x, y) => compose1(x, y)) // error: cannot mix cap and * + | ^^^^^^ + | Reach capability cap and universal capability cap cannot both + | appear in the type [B](f: ((box A ->{ps*} A, box A ->{ps*} A)) => B): List[B] of this expression diff --git a/tests/neg-custom-args/captures/reaches.scala b/tests/neg-custom-args/captures/reaches.scala index 8ba7f60d6335..de5e4362cdf2 100644 --- a/tests/neg-custom-args/captures/reaches.scala +++ b/tests/neg-custom-args/captures/reaches.scala @@ -59,3 +59,21 @@ def attack2 = val leaked = usingFile[File^{id*}]: f => val f1: File^{id*} = id(f) // error f1 + +class List[+A]: + def head: A = ??? + def tail: List[A] = ??? + def map[B](f: A => B): List[B] = ??? + def nonEmpty: Boolean = ??? + +extension [A](x: A) def :: (xs: List[A]): List[A] = ??? + +object Nil extends List[Nothing] + +def compose1[A, B, C](f: A => B, g: B => C): A ->{f, g} C = + z => g(f(z)) + +def mapCompose[A](ps: List[(A => A, A => A)]): List[A ->{ps*} A] = + ps.map((x, y) => compose1(x, y)) // error: cannot mix cap and * + + diff --git a/tests/neg-custom-args/captures/usingLogFile.check b/tests/neg-custom-args/captures/usingLogFile.check index ef0c5d1e77c9..bf5c1dc4f83a 100644 --- a/tests/neg-custom-args/captures/usingLogFile.check +++ b/tests/neg-custom-args/captures/usingLogFile.check @@ -1,12 +1,12 @@ -- Error: tests/neg-custom-args/captures/usingLogFile.scala:23:14 ------------------------------------------------------ 23 | val later = usingLogFile { f => () => f.write(0) } // error | ^^^^^^^^^^^^ - | local reference f leaks into outer capture set of type parameter T of method usingLogFile + | local reference f leaks into outer capture set of type parameter T of method usingLogFile in object Test2 -- Error: tests/neg-custom-args/captures/usingLogFile.scala:28:23 ------------------------------------------------------ 28 | private val later2 = usingLogFile { f => Cell(() => f.write(0)) } // error | ^^^^^^^^^^^^ - | local reference f leaks into outer capture set of type parameter T of method usingLogFile + | local reference f leaks into outer capture set of type parameter T of method usingLogFile in object Test2 -- Error: tests/neg-custom-args/captures/usingLogFile.scala:44:16 ------------------------------------------------------ 44 | val later = usingFile("out", f => (y: Int) => xs.foreach(x => f.write(x + y))) // error | ^^^^^^^^^ - | local reference f leaks into outer capture set of type parameter T of method usingFile + | local reference f leaks into outer capture set of type parameter T of method usingFile in object Test3 diff --git a/tests/neg-deep-subtype/interop-polytypes.scala b/tests/neg-deep-subtype/interop-polytypes.scala index 90922b63f7d0..987e4720bf13 100644 --- a/tests/neg-deep-subtype/interop-polytypes.scala +++ b/tests/neg-deep-subtype/interop-polytypes.scala @@ -1,4 +1,4 @@ -//> using options -Yexplicit-nulls +//> using options -Yexplicit-nulls -Yno-flexible-types class Foo { import java.util.ArrayList diff --git a/tests/neg-macros/annot-accessIndirect/Macro_1.scala b/tests/neg-macros/annot-accessIndirect/Macro_1.scala index 8679edcfc0c3..7f4136e10652 100644 --- a/tests/neg-macros/annot-accessIndirect/Macro_1.scala +++ b/tests/neg-macros/annot-accessIndirect/Macro_1.scala @@ -3,9 +3,9 @@ import scala.quoted._ @experimental class hello extends MacroAnnotation { - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ val helloSymbol = Symbol.newVal(Symbol.spliceOwner, Symbol.freshName("hello"), TypeRepr.of[String], Flags.EmptyFlags, Symbol.noSymbol) val helloVal = ValDef(helloSymbol, Some(Literal(StringConstant("Hello, World!")))) - List(helloVal, tree) + List(helloVal, definition) } diff --git a/tests/neg-macros/annot-accessIndirect/Macro_2.scala b/tests/neg-macros/annot-accessIndirect/Macro_2.scala index d069175ce166..5fa2ba1aa2bf 100644 --- a/tests/neg-macros/annot-accessIndirect/Macro_2.scala +++ b/tests/neg-macros/annot-accessIndirect/Macro_2.scala @@ -3,16 +3,16 @@ import scala.quoted._ @experimental class foo extends MacroAnnotation { - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ val s = '{@hello def foo1(x: Int): Int = x + 1;()}.asTerm val fooDef = s.asInstanceOf[Inlined].body.asInstanceOf[Block].statements.head.asInstanceOf[DefDef] val hello = Ref(Symbol.spliceOwner.declaredFields("hello").head).asExprOf[String] // error - tree match + definition match case DefDef(name, params, tpt, Some(t)) => val rhs = '{ ${t.asExprOf[String]} + $hello }.asTerm - val newDef = DefDef.copy(tree)(name, params, tpt, Some(rhs)) + val newDef = DefDef.copy(definition)(name, params, tpt, Some(rhs)) List(fooDef, newDef) } diff --git a/tests/neg-macros/annot-crash/Macro_1.scala b/tests/neg-macros/annot-crash/Macro_1.scala index f3d5b3f602f8..06fb08062181 100644 --- a/tests/neg-macros/annot-crash/Macro_1.scala +++ b/tests/neg-macros/annot-crash/Macro_1.scala @@ -3,6 +3,6 @@ import scala.quoted._ @experimental class crash extends MacroAnnotation { - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = ??? } diff --git a/tests/neg-macros/annot-empty-result.check b/tests/neg-macros/annot-empty-result.check index 6d43c19664cb..5a0e637837e4 100644 --- a/tests/neg-macros/annot-empty-result.check +++ b/tests/neg-macros/annot-empty-result.check @@ -2,12 +2,12 @@ -- Error: tests/neg-macros/annot-empty-result/Test_2.scala:5:2 --------------------------------------------------------- 5 | @nilAnnot // error | ^^^^^^^^^ - | Unexpected `Nil` returned by `(new nilAnnot()).transform(..)` during macro expansion + | Transformed tree for method f2 was not return by `(new nilAnnot()).transform(..)` during macro expansion -- Error: tests/neg-macros/annot-empty-result/Test_2.scala:9:4 --------------------------------------------------------- 9 | @nilAnnot // error | ^^^^^^^^^ - | Unexpected `Nil` returned by `(new nilAnnot()).transform(..)` during macro expansion + | Transformed tree for method f3 was not return by `(new nilAnnot()).transform(..)` during macro expansion -- Error: tests/neg-macros/annot-empty-result/Test_2.scala:1:0 --------------------------------------------------------- 1 |@nilAnnot // error |^^^^^^^^^ - |Unexpected `Nil` returned by `(new nilAnnot()).transform(..)` during macro expansion + |Transformed tree for method f1 was not return by `(new nilAnnot()).transform(..)` during macro expansion diff --git a/tests/neg-macros/annot-empty-result/Macro_1.scala b/tests/neg-macros/annot-empty-result/Macro_1.scala index ff3be61c05d2..9b22027ce929 100644 --- a/tests/neg-macros/annot-empty-result/Macro_1.scala +++ b/tests/neg-macros/annot-empty-result/Macro_1.scala @@ -3,6 +3,6 @@ import scala.quoted._ @experimental class nilAnnot extends MacroAnnotation { - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = Nil } diff --git a/tests/neg-macros/annot-error-annot/Macro_1.scala b/tests/neg-macros/annot-error-annot/Macro_1.scala index d54b69903e02..6597a11be7d2 100644 --- a/tests/neg-macros/annot-error-annot/Macro_1.scala +++ b/tests/neg-macros/annot-error-annot/Macro_1.scala @@ -3,7 +3,7 @@ import scala.quoted._ @experimental class error extends MacroAnnotation { - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = - quotes.reflect.report.error("MACRO ERROR", tree.pos) - List(tree) + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = + quotes.reflect.report.error("MACRO ERROR", definition.pos) + List(definition) } diff --git a/tests/neg-macros/annot-ill-abort/Macro_1.scala b/tests/neg-macros/annot-ill-abort/Macro_1.scala index 446ce0a5331b..4689ffbe2f2c 100644 --- a/tests/neg-macros/annot-ill-abort/Macro_1.scala +++ b/tests/neg-macros/annot-ill-abort/Macro_1.scala @@ -3,6 +3,6 @@ import scala.quoted._ @experimental class crash extends MacroAnnotation { - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = throw new scala.quoted.runtime.StopMacroExpansion } diff --git a/tests/neg-macros/annot-mod-class-add-top-method/Macro_1.scala b/tests/neg-macros/annot-mod-class-add-top-method/Macro_1.scala index b5c49695ad2a..7dc8cb2c4479 100644 --- a/tests/neg-macros/annot-mod-class-add-top-method/Macro_1.scala +++ b/tests/neg-macros/annot-mod-class-add-top-method/Macro_1.scala @@ -4,14 +4,14 @@ import scala.collection.mutable @experimental class addTopLevelMethod extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case ClassDef(name, ctr, parents, self, body) => val methType = MethodType(Nil)(_ => Nil, _ => TypeRepr.of[Int]) val methSym = Symbol.newMethod(Symbol.spliceOwner, Symbol.freshName("toLevelMethod"), methType, Flags.EmptyFlags, Symbol.noSymbol) val methDef = DefDef(methSym, _ => Some(Literal(IntConstant(1)))) - List(methDef, tree) + List(methDef, definition) case _ => report.error("Annotation only supports `class`") - List(tree) + List(definition) diff --git a/tests/neg-macros/annot-mod-class-add-top-val/Macro_1.scala b/tests/neg-macros/annot-mod-class-add-top-val/Macro_1.scala index c6f21e181879..12443d329108 100644 --- a/tests/neg-macros/annot-mod-class-add-top-val/Macro_1.scala +++ b/tests/neg-macros/annot-mod-class-add-top-val/Macro_1.scala @@ -4,13 +4,13 @@ import scala.collection.mutable @experimental class addTopLevelVal extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case ClassDef(name, ctr, parents, self, body) => val valSym = Symbol.newVal(Symbol.spliceOwner, Symbol.freshName("toLevelVal"), TypeRepr.of[Int], Flags.EmptyFlags, Symbol.noSymbol) val valDef = ValDef(valSym, Some(Literal(IntConstant(1)))) - List(valDef, tree) + List(valDef, definition) case _ => report.error("Annotation only supports `class`") - List(tree) + List(definition) diff --git a/tests/neg-macros/annot-mod-top-method-add-top-method/Macro_1.scala b/tests/neg-macros/annot-mod-top-method-add-top-method/Macro_1.scala index 45679b65c03b..8609af376ad7 100644 --- a/tests/neg-macros/annot-mod-top-method-add-top-method/Macro_1.scala +++ b/tests/neg-macros/annot-mod-top-method-add-top-method/Macro_1.scala @@ -5,9 +5,9 @@ import scala.collection.mutable @experimental // Assumes annotation is on top level def or val class addTopLevelMethodOutsidePackageObject extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ val methType = MethodType(Nil)(_ => Nil, _ => TypeRepr.of[Int]) val methSym = Symbol.newMethod(Symbol.spliceOwner.owner, Symbol.freshName("toLevelMethod"), methType, Flags.EmptyFlags, Symbol.noSymbol) val methDef = DefDef(methSym, _ => Some(Literal(IntConstant(1)))) - List(methDef, tree) + List(methDef, definition) diff --git a/tests/neg-macros/annot-mod-top-method-add-top-val/Macro_1.scala b/tests/neg-macros/annot-mod-top-method-add-top-val/Macro_1.scala index c6c4c32afcb8..a9ae0efd76b4 100644 --- a/tests/neg-macros/annot-mod-top-method-add-top-val/Macro_1.scala +++ b/tests/neg-macros/annot-mod-top-method-add-top-val/Macro_1.scala @@ -5,8 +5,8 @@ import scala.collection.mutable @experimental // Assumes annotation is on top level def or val class addTopLevelValOutsidePackageObject extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ val valSym = Symbol.newVal(Symbol.spliceOwner.owner, Symbol.freshName("toLevelVal"), TypeRepr.of[Int], Flags.EmptyFlags, Symbol.noSymbol) val valDef = ValDef(valSym, Some(Literal(IntConstant(1)))) - List(valDef, tree) + List(valDef, definition) diff --git a/tests/neg-macros/annot-on-type/Macro_1.scala b/tests/neg-macros/annot-on-type/Macro_1.scala index 7468c5a200a6..631a5bcc201d 100644 --- a/tests/neg-macros/annot-on-type/Macro_1.scala +++ b/tests/neg-macros/annot-on-type/Macro_1.scala @@ -3,6 +3,6 @@ import scala.quoted._ @experimental class voidAnnot extends MacroAnnotation { - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = - List(tree) + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = + List(definition) } diff --git a/tests/neg-macros/annot-result-owner/Macro_1.scala b/tests/neg-macros/annot-result-owner/Macro_1.scala index 34f7541f726b..af3d8dbb6656 100644 --- a/tests/neg-macros/annot-result-owner/Macro_1.scala +++ b/tests/neg-macros/annot-result-owner/Macro_1.scala @@ -3,9 +3,9 @@ import scala.quoted._ @experimental class insertVal extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ // Use of wrong owner - val valSym = Symbol.newVal(tree.symbol, Symbol.freshName("definitionWithWrongOwner"), TypeRepr.of[Unit], Flags.Private, Symbol.noSymbol) + val valSym = Symbol.newVal(definition.symbol, Symbol.freshName("definitionWithWrongOwner"), TypeRepr.of[Unit], Flags.Private, Symbol.noSymbol) val valDef = ValDef(valSym, Some('{}.asTerm)) - List(valDef, tree) + List(valDef, definition) diff --git a/tests/neg-macros/annot-suspend-cycle.check b/tests/neg-macros/annot-suspend-cycle.check index 437398f1d668..7b87943be2af 100644 --- a/tests/neg-macros/annot-suspend-cycle.check +++ b/tests/neg-macros/annot-suspend-cycle.check @@ -1,9 +1,9 @@ --- [E129] Potential Issue Warning: tests/neg-macros/annot-suspend-cycle/Macro.scala:7:4 -------------------------------- -7 | new Foo - | ^^^^^^^ - | A pure expression does nothing in statement position - | - | longer explanation available when compiling with `-explain` +-- [E129] Potential Issue Warning: tests/neg-macros/annot-suspend-cycle/Macro.scala:10:6 ------------------------------- +10 | new Foo + | ^^^^^^^ + | A pure expression does nothing in statement position + | + | longer explanation available when compiling with `-explain` Cyclic macro dependencies in tests/neg-macros/annot-suspend-cycle/Test.scala. Compilation stopped since no further progress can be made. diff --git a/tests/neg-macros/annot-suspend-cycle/Macro.scala b/tests/neg-macros/annot-suspend-cycle/Macro.scala index 4143e2c32062..9106c3e2a37e 100644 --- a/tests/neg-macros/annot-suspend-cycle/Macro.scala +++ b/tests/neg-macros/annot-suspend-cycle/Macro.scala @@ -3,7 +3,11 @@ import scala.quoted._ @experimental class cycle extends MacroAnnotation { - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = - new Foo - List(tree) + def transform(using Quotes)( + definition: quotes.reflect.Definition, + companion: Option[quotes.reflect.Definition] + ): List[quotes.reflect.Definition] = + new Foo + List(definition) + end transform } diff --git a/tests/neg-macros/i11795.scala b/tests/neg-macros/i11795.scala deleted file mode 100644 index 2a7f89831e0e..000000000000 --- a/tests/neg-macros/i11795.scala +++ /dev/null @@ -1,10 +0,0 @@ -import scala.quoted._ -import scala.deriving._ - -def blah[P <: Product] - (m: Mirror.ProductOf[P]) - (using Quotes, Type[m.MirroredElemLabels], Type[m.MirroredElemTypes]) = { - type z = Tuple.Zip[m.MirroredElemLabels, m.MirroredElemTypes] - Type.of[z] // error - () -} diff --git a/tests/neg-macros/i14772.check b/tests/neg-macros/i14772.check index 94b4a3445b01..5c1836811b03 100644 --- a/tests/neg-macros/i14772.check +++ b/tests/neg-macros/i14772.check @@ -5,6 +5,7 @@ | | The error occurred while trying to compute the signature of method $anonfun | which required to compute the signature of method impl + | which required to type the right hand side of method impl since no explicit type was given | | Run with both -explain-cyclic and -Ydebug-cyclic to see full stack trace. | diff --git a/tests/neg-macros/i16582.check b/tests/neg-macros/i16582.check index 548a4491ed24..546d0b7efaf3 100644 --- a/tests/neg-macros/i16582.check +++ b/tests/neg-macros/i16582.check @@ -6,7 +6,9 @@ | dotty.tools.dotc.core.CyclicReference: Recursive value o2 needs type | | The error occurred while trying to compute the signature of method test + | which required to type the right hand side of method test since no explicit type was given | which required to compute the signature of value o2 + | which required to type the right hand side of value o2 since no explicit type was given | which required to compute the signature of value o2 | | Run with both -explain-cyclic and -Ydebug-cyclic to see full stack trace. diff --git a/tests/neg-macros/i18677-a.check b/tests/neg-macros/i18677-a.check index 963affc47181..d190ce36318a 100644 --- a/tests/neg-macros/i18677-a.check +++ b/tests/neg-macros/i18677-a.check @@ -7,10 +7,10 @@ |The tree does not conform to the compiler's tree invariants. | |Macro was: - |@scala.annotation.internal.SourceFile("tests/neg-macros/i18677-a/Test_2.scala") @scala.annotation.experimental @extendFoo class AFoo() + |@scala.annotation.internal.SourceFile("tests/neg-macros/i18677-a/Test_2.scala") @scala.annotation.experimental("Added by -experimental") @extendFoo class AFoo() | |The macro returned: - |@scala.annotation.internal.SourceFile("tests/neg-macros/i18677-a/Test_2.scala") @scala.annotation.experimental @extendFoo class AFoo() extends Foo + |@scala.annotation.internal.SourceFile("tests/neg-macros/i18677-a/Test_2.scala") @scala.annotation.experimental("Added by -experimental") @extendFoo class AFoo() extends Foo | |Error: |assertion failed: Parents of class symbol differs from the parents in the tree for class AFoo diff --git a/tests/neg-macros/i18677-a/Macro_1.scala b/tests/neg-macros/i18677-a/Macro_1.scala index c3df616ed4e6..69f2a521e012 100644 --- a/tests/neg-macros/i18677-a/Macro_1.scala +++ b/tests/neg-macros/i18677-a/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import annotation.MacroAnnotation import quoted.* @@ -6,13 +6,17 @@ import quoted.* trait Foo class extendFoo extends MacroAnnotation : - override def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = - import quotes.reflect.* - tree match - case ClassDef(name, ctr, p, self, body) => - val parents = List(TypeTree.of[Foo]) - val newTree = ClassDef.copy(tree)(name, ctr, parents, self, body) - newTree :: Nil - case _ => - report.error("@extendFoo can only annotate class definitions") - tree :: Nil \ No newline at end of file + def transform(using Quotes)( + definition: quotes.reflect.Definition, + companion: Option[quotes.reflect.Definition] + ): List[quotes.reflect.Definition] = + import quotes.reflect.* + definition match + case ClassDef(name, ctr, p, self, body) => + val parents = List(TypeTree.of[Foo]) + val newTree = ClassDef.copy(definition)(name, ctr, parents, self, body) + newTree :: Nil + case _ => + report.error("@extendFoo can only annotate class definitions") + definition :: Nil + end transform diff --git a/tests/neg-macros/i18677-a/Test_2.scala b/tests/neg-macros/i18677-a/Test_2.scala index 5fb6680df0ca..511cad28f4fb 100644 --- a/tests/neg-macros/i18677-a/Test_2.scala +++ b/tests/neg-macros/i18677-a/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @extendFoo class AFoo // error \ No newline at end of file diff --git a/tests/neg-macros/i18677-b.check b/tests/neg-macros/i18677-b.check index b8fecde2e36d..eca2bdcde726 100644 --- a/tests/neg-macros/i18677-b.check +++ b/tests/neg-macros/i18677-b.check @@ -7,10 +7,10 @@ |The tree does not conform to the compiler's tree invariants. | |Macro was: - |@scala.annotation.internal.SourceFile("tests/neg-macros/i18677-b/Test_2.scala") @scala.annotation.experimental @extendFoo class AFoo() + |@scala.annotation.internal.SourceFile("tests/neg-macros/i18677-b/Test_2.scala") @scala.annotation.experimental("Added by -experimental") @extendFoo class AFoo() | |The macro returned: - |@scala.annotation.internal.SourceFile("tests/neg-macros/i18677-b/Test_2.scala") @scala.annotation.experimental @extendFoo class AFoo() extends Foo + |@scala.annotation.internal.SourceFile("tests/neg-macros/i18677-b/Test_2.scala") @scala.annotation.experimental("Added by -experimental") @extendFoo class AFoo() extends Foo | |Error: |assertion failed: Parents of class symbol differs from the parents in the tree for class AFoo diff --git a/tests/neg-macros/i18677-b/Macro_1.scala b/tests/neg-macros/i18677-b/Macro_1.scala index 9e1b9be5e696..02c683b00b4b 100644 --- a/tests/neg-macros/i18677-b/Macro_1.scala +++ b/tests/neg-macros/i18677-b/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import annotation.MacroAnnotation import quoted.* @@ -6,13 +6,17 @@ import quoted.* class Foo class extendFoo extends MacroAnnotation : - override def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = - import quotes.reflect.* - tree match - case ClassDef(name, ctr, p, self, body) => - val parents = List(TypeTree.of[Foo]) - val newTree = ClassDef.copy(tree)(name, ctr, parents, self, body) - newTree :: Nil - case _ => - report.error("@extendFoo can only annotate class definitions") - tree :: Nil \ No newline at end of file + def transform(using Quotes)( + definition: quotes.reflect.Definition, + companion: Option[quotes.reflect.Definition] + ): List[quotes.reflect.Definition] = + import quotes.reflect.* + definition match + case ClassDef(name, ctr, p, self, body) => + val parents = List(TypeTree.of[Foo]) + val newTree = ClassDef.copy(definition)(name, ctr, parents, self, body) + newTree :: Nil + case _ => + report.error("@extendFoo can only annotate class definitions") + definition :: Nil + end transform diff --git a/tests/neg-macros/i18677-b/Test_2.scala b/tests/neg-macros/i18677-b/Test_2.scala index 5fb6680df0ca..511cad28f4fb 100644 --- a/tests/neg-macros/i18677-b/Test_2.scala +++ b/tests/neg-macros/i18677-b/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @extendFoo class AFoo // error \ No newline at end of file diff --git a/tests/neg-macros/i18825/Macro_1.scala b/tests/neg-macros/i18825/Macro_1.scala index c099954f3858..adeb320c1403 100644 --- a/tests/neg-macros/i18825/Macro_1.scala +++ b/tests/neg-macros/i18825/Macro_1.scala @@ -4,16 +4,16 @@ import scala.quoted.* @experimental class toString extends MacroAnnotation : - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect.* - tree match + definition match case ClassDef(name, ctr, parents, self, body) => - val cls = tree.symbol + val cls = definition.symbol val toStringSym = Symbol.requiredMethod("java.lang.Object.toString") val toStringOverrideSym = Symbol.newMethod(cls, "toString", toStringSym.info, Flags.Override, Symbol.noSymbol) val toStringDef = DefDef(toStringOverrideSym, _ => Some(Literal(StringConstant("Hello from macro")))) - val newClassDef = ClassDef.copy(tree)(name, ctr, parents, self, toStringDef :: body) + val newClassDef = ClassDef.copy(definition)(name, ctr, parents, self, toStringDef :: body) List(newClassDef) case _ => report.error("@toString can only be annotated on class definitions") - tree :: Nil + definition :: Nil diff --git a/tests/neg-macros/i19676.check b/tests/neg-macros/i19676.check new file mode 100644 index 000000000000..ca6b89a55087 --- /dev/null +++ b/tests/neg-macros/i19676.check @@ -0,0 +1,5 @@ + +-- Error: tests/neg-macros/i19676/Test_2.scala:3:0 --------------------------------------------------------------------- +3 |@buggy // error + |^^^^^^ + |Transformed companion for class Foo was returned more than once by `(new buggy()).transform(..)` during macro expansion diff --git a/tests/neg-macros/i19676/Macro_1.scala b/tests/neg-macros/i19676/Macro_1.scala new file mode 100644 index 000000000000..44988760f535 --- /dev/null +++ b/tests/neg-macros/i19676/Macro_1.scala @@ -0,0 +1,19 @@ +//> using options -experimental + +import scala.annotation.MacroAnnotation +import scala.quoted.* + +class buggy extends MacroAnnotation: + + def transform(using Quotes) + (definition: quotes.reflect.Definition, + companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = + + import quotes.reflect.* + companion match + case Some(companion) => + List(definition, companion, companion) + case None => + report.error("The goal of this test is to return the companion more than once to trigger a compilation error") + List(definition) + end transform \ No newline at end of file diff --git a/tests/neg-macros/i19676/Test_2.scala b/tests/neg-macros/i19676/Test_2.scala new file mode 100644 index 000000000000..b80589adaf35 --- /dev/null +++ b/tests/neg-macros/i19676/Test_2.scala @@ -0,0 +1,4 @@ +//> using options -experimental + +@buggy // error +case class Foo() \ No newline at end of file diff --git a/tests/neg-macros/i19842-a.check b/tests/neg-macros/i19842-a.check index af5dbc604f93..30b295cd05a5 100644 --- a/tests/neg-macros/i19842-a.check +++ b/tests/neg-macros/i19842-a.check @@ -8,7 +8,7 @@ |Parents in tree: [trait Serializer] | | at scala.runtime.Scala3RunTime$.assertFailed(Scala3RunTime.scala:8) - | at dotty.tools.dotc.transform.TreeChecker$.checkParents(TreeChecker.scala:209) + | at dotty.tools.dotc.transform.TreeChecker$.checkParents(TreeChecker.scala:210) | at scala.quoted.runtime.impl.QuotesImpl$reflect$ClassDef$.module(QuotesImpl.scala:257) | at scala.quoted.runtime.impl.QuotesImpl$reflect$ClassDef$.module(QuotesImpl.scala:256) | at Macros$.makeSerializer(Macro.scala:25) diff --git a/tests/neg-macros/i19842-a/Macro.scala b/tests/neg-macros/i19842-a/Macro.scala index 936e08b02592..18a1bc16045f 100644 --- a/tests/neg-macros/i19842-a/Macro.scala +++ b/tests/neg-macros/i19842-a/Macro.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, targetName} import scala.quoted.* diff --git a/tests/neg-macros/i19842-b.check b/tests/neg-macros/i19842-b.check index 95a71fb9f98d..d84d916acb66 100644 --- a/tests/neg-macros/i19842-b.check +++ b/tests/neg-macros/i19842-b.check @@ -8,7 +8,7 @@ |Parents in tree: [class Object, trait Serializer, trait Foo] | | at scala.runtime.Scala3RunTime$.assertFailed(Scala3RunTime.scala:8) - | at dotty.tools.dotc.transform.TreeChecker$.checkParents(TreeChecker.scala:209) + | at dotty.tools.dotc.transform.TreeChecker$.checkParents(TreeChecker.scala:210) | at scala.quoted.runtime.impl.QuotesImpl$reflect$ClassDef$.module(QuotesImpl.scala:257) | at scala.quoted.runtime.impl.QuotesImpl$reflect$ClassDef$.module(QuotesImpl.scala:256) | at Macros$.makeSerializer(Macro.scala:27) diff --git a/tests/neg-macros/i19842-b/Macro.scala b/tests/neg-macros/i19842-b/Macro.scala index d46ff758342b..f1399d328f49 100644 --- a/tests/neg-macros/i19842-b/Macro.scala +++ b/tests/neg-macros/i19842-b/Macro.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, targetName} import scala.quoted.* diff --git a/tests/neg-macros/macro-experimental.scala b/tests/neg-macros/macro-experimental.scala index eaa822d4b541..6dc552d74102 100644 --- a/tests/neg-macros/macro-experimental.scala +++ b/tests/neg-macros/macro-experimental.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.quoted.* import scala.annotation.experimental diff --git a/tests/neg-macros/newClassExtendsNoParents/Macro_1.scala b/tests/neg-macros/newClassExtendsNoParents/Macro_1.scala index 663e5e959e87..0a64bf320b72 100644 --- a/tests/neg-macros/newClassExtendsNoParents/Macro_1.scala +++ b/tests/neg-macros/newClassExtendsNoParents/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.quoted.* diff --git a/tests/neg-macros/newClassExtendsNoParents/Test_2.scala b/tests/neg-macros/newClassExtendsNoParents/Test_2.scala index 733879441e91..da8b3d84b522 100644 --- a/tests/neg-macros/newClassExtendsNoParents/Test_2.scala +++ b/tests/neg-macros/newClassExtendsNoParents/Test_2.scala @@ -1,3 +1,3 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental def test: Any = makeClass("foo") // error diff --git a/tests/neg-macros/newClassExtendsOnlyTrait/Macro_1.scala b/tests/neg-macros/newClassExtendsOnlyTrait/Macro_1.scala index a6a4753185ba..6ced5bf34462 100644 --- a/tests/neg-macros/newClassExtendsOnlyTrait/Macro_1.scala +++ b/tests/neg-macros/newClassExtendsOnlyTrait/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.quoted.* diff --git a/tests/neg-macros/newClassExtendsOnlyTrait/Test_2.scala b/tests/neg-macros/newClassExtendsOnlyTrait/Test_2.scala index b9aaa2a87c60..7bb465eae617 100644 --- a/tests/neg-macros/newClassExtendsOnlyTrait/Test_2.scala +++ b/tests/neg-macros/newClassExtendsOnlyTrait/Test_2.scala @@ -1,3 +1,3 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental def test: Foo = makeClass("foo") // error diff --git a/tests/neg-macros/toexproftuple.scala b/tests/neg-macros/toexproftuple.scala index 7b69c578be70..f33bfd5f6dfb 100644 --- a/tests/neg-macros/toexproftuple.scala +++ b/tests/neg-macros/toexproftuple.scala @@ -1,22 +1,10 @@ import scala.quoted._, scala.deriving.* inline def mcr: Any = ${mcrImpl} - def mcrImpl(using ctx: Quotes): Expr[Any] = { - val tpl: (Expr[1], Expr[2], Expr[3]) = ('{1}, '{2}, '{3}) '{val res: (1, 3, 3) = ${Expr.ofTuple(tpl)}; res} // error - // ^^^^^^^^^^^^^^^^^ - // Found: quoted.Expr[(1 : Int) *: (2 : Int) *: (3 : Int) *: EmptyTuple] - // Required: quoted.Expr[((1 : Int), (3 : Int), (3 : Int))] val tpl2: (Expr[1], 2, Expr[3]) = ('{1}, 2, '{3}) '{val res = ${Expr.ofTuple(tpl2)}; res} // error - // ^ - // Cannot prove that (quoted.Expr[(1 : Int)], (2 : Int), quoted.Expr[(3 : Int)]) =:= scala.Tuple.Map[ - // scala.Tuple.InverseMap[ - // (quoted.Expr[(1 : Int)], (2 : Int), quoted.Expr[(3 : Int)]) - // , quoted.Expr] - // , quoted.Expr]. - } diff --git a/tests/neg-macros/wrong-owner.check b/tests/neg-macros/wrong-owner.check index ca8751d0fe1c..26316d3fc687 100644 --- a/tests/neg-macros/wrong-owner.check +++ b/tests/neg-macros/wrong-owner.check @@ -17,6 +17,6 @@ | |Error: |assertion failed: bad owner; method toString has owner class String, expected was class Foo - |owner chain = method toString, class String, package java.lang, package java, package , ctxOwners = class Foo, class Foo, package , package , package , package , package , package , package , package , package , , , , , + |owner chain = method toString, class String, package java.lang, package java, package , ctxOwners = class Foo, class Foo, package , package , package , package , package , package , package , package , package , package , , , , , | |stacktrace available when compiling with `-Ydebug` diff --git a/tests/neg-macros/wrong-owner/Macro_1.scala b/tests/neg-macros/wrong-owner/Macro_1.scala index 85127b701f81..2e101f849802 100644 --- a/tests/neg-macros/wrong-owner/Macro_1.scala +++ b/tests/neg-macros/wrong-owner/Macro_1.scala @@ -4,16 +4,16 @@ import scala.quoted.* @experimental class wrongOwner extends MacroAnnotation : - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect.* - tree match + definition match case ClassDef(name, ctr, parents, self, body) => - val cls = tree.symbol + val cls = definition.symbol val toStringSym = Symbol.requiredMethod("java.lang.Object.toString") val toStringOverrideSym = Symbol.newMethod(Symbol.classSymbol("java.lang.String"), "toString", toStringSym.info, Flags.Override, Symbol.noSymbol) val toStringDef = DefDef(toStringOverrideSym, _ => Some(Literal(StringConstant("Hello from macro")))) - val newClassDef = ClassDef.copy(tree)(name, ctr, parents, self, toStringDef :: body) + val newClassDef = ClassDef.copy(definition)(name, ctr, parents, self, toStringDef :: body) List(newClassDef) case _ => report.error("@toString can only be annotated on class definitions") - tree :: Nil + definition :: Nil diff --git a/tests/neg-scalajs/jsexport-on-non-toplevel-class-object.scala b/tests/neg-scalajs/jsexport-on-non-toplevel-class-object.scala deleted file mode 100644 index 7d127a5654ae..000000000000 --- a/tests/neg-scalajs/jsexport-on-non-toplevel-class-object.scala +++ /dev/null @@ -1,30 +0,0 @@ -import scala.scalajs.js -import scala.scalajs.js.annotation.* - -class A { - @JSExport // error - class A1 { - @JSExport // error - def this(x: Int) = this() - } - - @JSExport // error - class A2 extends js.Object - - @JSExport // error - object A3 - - @JSExport // error - object A4 extends js.Object -} - -object B { - @JSExport // error - class B1 { - @JSExport // error - def this(x: Int) = this() - } - - @JSExport // error - class B2 extends js.Object -} diff --git a/tests/neg/10349.scala b/tests/neg/10349.scala index 4ea683f6a8fb..b591c1a79abb 100644 --- a/tests/neg/10349.scala +++ b/tests/neg/10349.scala @@ -4,7 +4,7 @@ object Firsts: case Map[_, v] => First[Option[v]] def first[X](x: X): First[X] = x match - case x: Map[_, _] => first(x.values.headOption) // error + case x: Map[_, _] => first(x.values.headOption) @main def runFirsts2(): Unit = diff --git a/tests/neg/10747.scala b/tests/neg/10747.scala index a299f2a6590c..5275ebc84121 100644 --- a/tests/neg/10747.scala +++ b/tests/neg/10747.scala @@ -2,4 +2,5 @@ type Foo[A] = A match { case Int => String } -type B = Foo[Boolean] // error +type B = Foo[Boolean] +val _: B = "hello" // error diff --git a/tests/neg/14034.scala b/tests/neg/14034.scala index bdb09a011777..bbb633726523 100644 --- a/tests/neg/14034.scala +++ b/tests/neg/14034.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import annotation.experimental diff --git a/tests/neg/16463.scala b/tests/neg/16463.scala new file mode 100644 index 000000000000..80a84cf02bc8 --- /dev/null +++ b/tests/neg/16463.scala @@ -0,0 +1,43 @@ +//> using scala "3.2.1" + +import scala.compiletime.ops.int._ + +object TupleOps { + import Tuple._ + + type Reduce[T <: NonEmptyTuple, F[_, _]] = + Fold[Tuple.Tail[T], Tuple.Head[T], F] + + type Maximum[T <: NonEmptyTuple] = Reduce[ + T, + [A, B] =>> (A, B) match { + case (Int, Int) => A `Max` B + } + ] + + type IndexOfRec[T <: Tuple, Elem, I <: Int] = Tuple.Elem[T, I] match { + case Elem => I + case _ => IndexOfRec[T, Elem, I + 1] + } + + type IndexOf[T <: Tuple, Elem] = IndexOfRec[T, Elem, 0] + + type DropLargest[T <: NonEmptyTuple] = + T `IndexOf` Maximum[T] match { + case Int => + ( + (T `Take` (T `IndexOf` Maximum[T])) `Concat` + (T `Drop` ((T `IndexOf` Maximum[T]) + 1)) + ) *: EmptyTuple + } + + type BubbleSort[T <: Tuple] = T match { + case EmptyTuple => EmptyTuple + case NonEmptyTuple => + BubbleSort[DropLargest[T]] `Concat` (Maximum[T] *: EmptyTuple) + } +} + +object demo extends App { + println(compiletime.constValue[TupleOps.BubbleSort[(1, 2)]]) // error: Recursion limit exceeded +} diff --git a/tests/neg/19414-desugared.check b/tests/neg/19414-desugared.check new file mode 100644 index 000000000000..c21806e16c2c --- /dev/null +++ b/tests/neg/19414-desugared.check @@ -0,0 +1,14 @@ +-- [E172] Type Error: tests/neg/19414-desugared.scala:22:34 ------------------------------------------------------------ +22 | summon[BodySerializer[JsObject]] // error: Ambiguous given instances + | ^ + |No best given instance of type BodySerializer[JsObject] was found for parameter x of method summon in object Predef. + |I found: + | + | given_BodySerializer_B[B]( + | writer = + | /* ambiguous: both given instance given_Writer_JsValue and given instance given_Writer_JsObject match type Writer[B] */ + | summon[Writer[B]] + | , + | this.given_BodySerializer_B$default$2[B]) + | + |But both given instance given_Writer_JsValue and given instance given_Writer_JsObject match type Writer[B]. diff --git a/tests/neg/19414-desugared.scala b/tests/neg/19414-desugared.scala new file mode 100644 index 000000000000..9fc16e2249a2 --- /dev/null +++ b/tests/neg/19414-desugared.scala @@ -0,0 +1,22 @@ +trait JsValue +trait JsObject extends JsValue + +trait Writer[T] +trait BodySerializer[-B] + +class Printer + +given Writer[JsValue] = ??? +given Writer[JsObject] = ??? + +// This is not an exact desugaring of the original code: currently the compiler +// actually changes the modifier of the parameter list from `using` to +// `implicit` when desugaring the context-bound `B: Writer` to `implicit writer: +// Writer[B]`, but we can't write it in user code as this is not valid syntax. +given [B](using + writer: Writer[B], + printer: Printer = new Printer +): BodySerializer[B] = ??? + +def f: Unit = + summon[BodySerializer[JsObject]] // error: Ambiguous given instances diff --git a/tests/neg/19414.check b/tests/neg/19414.check new file mode 100644 index 000000000000..6804546df037 --- /dev/null +++ b/tests/neg/19414.check @@ -0,0 +1,14 @@ +-- [E172] Type Error: tests/neg/19414.scala:15:34 ---------------------------------------------------------------------- +15 | summon[BodySerializer[JsObject]] // error: Ambiguous given instances + | ^ + |No best given instance of type BodySerializer[JsObject] was found for parameter x of method summon in object Predef. + |I found: + | + | given_BodySerializer_B[B]( + | evidence$1 = + | /* ambiguous: both given instance given_Writer_JsValue and given instance given_Writer_JsObject match type Writer[B] */ + | summon[Writer[B]] + | , + | this.given_BodySerializer_B$default$2[B]) + | + |But both given instance given_Writer_JsValue and given instance given_Writer_JsObject match type Writer[B]. diff --git a/tests/neg/19414.scala b/tests/neg/19414.scala new file mode 100644 index 000000000000..bb275ad943b7 --- /dev/null +++ b/tests/neg/19414.scala @@ -0,0 +1,15 @@ +trait JsValue +trait JsObject extends JsValue + +trait Writer[T] +trait BodySerializer[-B] + +class Printer + +given Writer[JsValue] = ??? +given Writer[JsObject] = ??? + +given [B: Writer](using printer: Printer = new Printer): BodySerializer[B] = ??? + +def f: Unit = + summon[BodySerializer[JsObject]] // error: Ambiguous given instances diff --git a/tests/neg/19680.check b/tests/neg/19680.check new file mode 100644 index 000000000000..5bdaaad99c2a --- /dev/null +++ b/tests/neg/19680.check @@ -0,0 +1,23 @@ +-- [E007] Type Mismatch Error: tests/neg/19680.scala:9:67 -------------------------------------------------------------- +9 |def renderWidget(using Config): Unit = renderWebsite("/tmp")(Config()) // error: found Config, required Int + | ^^^^^^^^ + | Found: Config + | Required: Int + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | + | Tree: new Config() + | I tried to show that + | Config + | conforms to + | Int + | but none of the attempts shown below succeeded: + | + | ==> Config <: Int = false + | + | The tests were made under the empty constraint + | + | The required type comes from a parameter of the automatically + | inserted `apply` method of `scala.collection.StringOps`. + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/19680.scala b/tests/neg/19680.scala new file mode 100644 index 000000000000..57fdd851dc54 --- /dev/null +++ b/tests/neg/19680.scala @@ -0,0 +1,9 @@ +//> using options -explain + +// Tests that the error message indicates that the required type `Int` comes +// from the automatically inserted `apply` method of `String`. This note is +// inserted by `insertedApplyNote` in `Applications`. + +class Config() +def renderWebsite(path: String)(using config: Config): String = ??? +def renderWidget(using Config): Unit = renderWebsite("/tmp")(Config()) // error: found Config, required Int diff --git a/tests/neg/19680b.check b/tests/neg/19680b.check new file mode 100644 index 000000000000..06ff26ee3289 --- /dev/null +++ b/tests/neg/19680b.check @@ -0,0 +1,24 @@ +-- [E007] Type Mismatch Error: tests/neg/19680b.scala:2:21 ------------------------------------------------------------- +2 |def Test = List(1,2)("hello") // error: found String, required Int + | ^^^^^^^ + | Found: ("hello" : String) + | Required: Int + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | + | Tree: "hello" + | I tried to show that + | ("hello" : String) + | conforms to + | Int + | but none of the attempts shown below succeeded: + | + | ==> ("hello" : String) <: Int + | ==> String <: Int = false + | + | The tests were made under the empty constraint + | + | The required type comes from a parameter of the automatically + | inserted `apply` method of `List[Int]`. + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/19680b.scala b/tests/neg/19680b.scala new file mode 100644 index 000000000000..a089d23e6a32 --- /dev/null +++ b/tests/neg/19680b.scala @@ -0,0 +1,2 @@ +//> using options -explain +def Test = List(1,2)("hello") // error: found String, required Int diff --git a/tests/neg/binaryLiterals.scala b/tests/neg/binaryLiterals.scala new file mode 100644 index 000000000000..5d5f0b4986fc --- /dev/null +++ b/tests/neg/binaryLiterals.scala @@ -0,0 +1,8 @@ + +object Test: + val x = 0b1__0000_0000_0000_0000__0000_0000_0000_0000 // error: number too large + val X = 0B1__0000_0000_0000_0000__0000_0000_0000_0000 // error: number too large + val y = 0b1__0000_0000_0000_0000__0000_0000_0000_0000__0000_0000_0000_0000__0000_0000_0000_0000L // error: number too large + val Y = 0B1__0000_0000_0000_0000__0000_0000_0000_0000__0000_0000_0000_0000__0000_0000_0000_0000L // error: number too large + 0b // error: invalid literal number + 0b2 // error: invalid literal number diff --git a/tests/neg/cb-companion-leaks.check b/tests/neg/cb-companion-leaks.check new file mode 100644 index 000000000000..560561e0e261 --- /dev/null +++ b/tests/neg/cb-companion-leaks.check @@ -0,0 +1,66 @@ +-- [E195] Type Error: tests/neg/cb-companion-leaks.scala:9:23 ---------------------------------------------------------- +9 | def foo[A: {C, D}] = A // error + | ^ + | context bound companion value A cannot be used as a value + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | A context bound companion is a symbol made up by the compiler to represent the + | witness or witnesses generated for the context bound(s) of a type parameter or type. + | For instance, in + | + | class Monoid extends SemiGroup: + | type Self + | def unit: Self + | + | type A: Monoid + | + | there is just a type `A` declared but not a value `A`. Nevertheless, one can write + | the selection `A.unit`, which works because the compiler created a context bound + | companion value with the (term-)name `A`. However, these context bound companions + | are not values themselves, they can only be referred to in selections. + --------------------------------------------------------------------------------------------------------------------- +-- [E195] Type Error: tests/neg/cb-companion-leaks.scala:13:10 --------------------------------------------------------- +13 | val x = A // error + | ^ + | context bound companion value A cannot be used as a value + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | A context bound companion is a symbol made up by the compiler to represent the + | witness or witnesses generated for the context bound(s) of a type parameter or type. + | For instance, in + | + | class Monoid extends SemiGroup: + | type Self + | def unit: Self + | + | type A: Monoid + | + | there is just a type `A` declared but not a value `A`. Nevertheless, one can write + | the selection `A.unit`, which works because the compiler created a context bound + | companion value with the (term-)name `A`. However, these context bound companions + | are not values themselves, they can only be referred to in selections. + -------------------------------------------------------------------------------------------------------------------- +-- [E195] Type Error: tests/neg/cb-companion-leaks.scala:15:9 ---------------------------------------------------------- +15 | val y: A.type = ??? // error + | ^ + | context bound companion value A cannot be used as a value + |-------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | A context bound companion is a symbol made up by the compiler to represent the + | witness or witnesses generated for the context bound(s) of a type parameter or type. + | For instance, in + | + | class Monoid extends SemiGroup: + | type Self + | def unit: Self + | + | type A: Monoid + | + | there is just a type `A` declared but not a value `A`. Nevertheless, one can write + | the selection `A.unit`, which works because the compiler created a context bound + | companion value with the (term-)name `A`. However, these context bound companions + | are not values themselves, they can only be referred to in selections. + -------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/cb-companion-leaks.scala b/tests/neg/cb-companion-leaks.scala new file mode 100644 index 000000000000..07155edb05dc --- /dev/null +++ b/tests/neg/cb-companion-leaks.scala @@ -0,0 +1,16 @@ +//> using options -language:experimental.modularity -source future -explain + +class C[Self] + +class D[Self] + +trait Test: + + def foo[A: {C, D}] = A // error + + type A: C + + val x = A // error + + val y: A.type = ??? // error + diff --git a/tests/neg/cyclic.check b/tests/neg/cyclic.check index 19eedac04f1c..d9afb91975f8 100644 --- a/tests/neg/cyclic.check +++ b/tests/neg/cyclic.check @@ -4,9 +4,13 @@ | Overloaded or recursive method f needs return type | | The error occurred while trying to compute the signature of method f + | which required to type the right hand side of method f since no explicit type was given | which required to compute the signature of method g + | which required to type the right hand side of method g since no explicit type was given | which required to compute the signature of method h + | which required to type the right hand side of method h since no explicit type was given | which required to compute the signature of method i + | which required to type the right hand side of method i since no explicit type was given | which required to compute the signature of method f | | Run with both -explain-cyclic and -Ydebug-cyclic to see full stack trace. diff --git a/tests/neg/deferred-givens-2.check b/tests/neg/deferred-givens-2.check new file mode 100644 index 000000000000..4a29141cc48b --- /dev/null +++ b/tests/neg/deferred-givens-2.check @@ -0,0 +1,12 @@ +-- [E172] Type Error: tests/neg/deferred-givens-2.scala:17:6 ----------------------------------------------------------- +17 |class SortedIntWrong1 extends Sorted: // error + |^ + |No given instance of type Ord{type Self = SortedIntWrong1.this.Element} was found for inferring the implementation of the deferred given instance given_Ord_Element in trait Sorted +18 | type Element = Int +19 | override given (Element is Ord)() +-- [E172] Type Error: tests/neg/deferred-givens-2.scala:21:6 ----------------------------------------------------------- +21 |class SortedIntWrong2 extends Sorted: // error + |^ + |No given instance of type Ord{type Self = SortedIntWrong2.this.Element} was found for inferring the implementation of the deferred given instance given_Ord_Element in trait Sorted +22 | type Element = Int +23 | override given (Int is Ord)() diff --git a/tests/neg/deferred-givens-2.scala b/tests/neg/deferred-givens-2.scala new file mode 100644 index 000000000000..4e75ceb08728 --- /dev/null +++ b/tests/neg/deferred-givens-2.scala @@ -0,0 +1,23 @@ +//> using options -language:experimental.modularity -source future +trait Ord: + type Self + +trait Sorted: + type Element: Ord + +object Scoped: + given (Int is Ord)() + class SortedIntCorrect extends Sorted: + type Element = Int + +class SortedIntCorrect2 extends Sorted: + type Element = Int + override given (Int is Ord)() as given_Ord_Element + +class SortedIntWrong1 extends Sorted: // error + type Element = Int + override given (Element is Ord)() + +class SortedIntWrong2 extends Sorted: // error + type Element = Int + override given (Int is Ord)() \ No newline at end of file diff --git a/tests/neg/deferred-givens.check b/tests/neg/deferred-givens.check new file mode 100644 index 000000000000..cc15901d087f --- /dev/null +++ b/tests/neg/deferred-givens.check @@ -0,0 +1,13 @@ +-- [E172] Type Error: tests/neg/deferred-givens.scala:11:6 ------------------------------------------------------------- +11 |class B extends A // error + |^^^^^^^^^^^^^^^^^ + |No given instance of type Ctx was found for inferring the implementation of the deferred given instance ctx in trait A +-- [E172] Type Error: tests/neg/deferred-givens.scala:13:15 ------------------------------------------------------------ +13 |abstract class C extends A // error + |^^^^^^^^^^^^^^^^^^^^^^^^^^ + |No given instance of type Ctx was found for inferring the implementation of the deferred given instance ctx in trait A +-- Error: tests/neg/deferred-givens.scala:26:8 ------------------------------------------------------------------------- +26 | class E extends A2 // error, can't summon polymorphic given + | ^^^^^^^^^^^^^^^^^^ + | Cannnot infer the implementation of the deferred given instance given_Ctx3_T in trait A2 + | since that given is parameterized. An implementing given needs to be written explicitly. diff --git a/tests/neg/deferred-givens.scala b/tests/neg/deferred-givens.scala new file mode 100644 index 000000000000..7ff67d784714 --- /dev/null +++ b/tests/neg/deferred-givens.scala @@ -0,0 +1,30 @@ +//> using options -language:experimental.modularity -source future +import compiletime.deferred + +class Ctx +class Ctx2 + +trait A: + given Ctx as ctx = deferred + given Ctx2 = deferred + +class B extends A // error + +abstract class C extends A // error + +class D extends A: + given Ctx as ctx = Ctx() // ok, was implemented + given Ctx2 = Ctx2() // ok + +class Ctx3[T] + +trait A2: + given [T] => Ctx3[T] = deferred + +object O: + given [T] => Ctx3[T] = Ctx3[T]() + class E extends A2 // error, can't summon polymorphic given + +class E extends A2: + given [T] => Ctx3[T] = Ctx3[T]() // ok + diff --git a/tests/neg/deferredSummon.check b/tests/neg/deferredSummon.check new file mode 100644 index 000000000000..bd76ad73467e --- /dev/null +++ b/tests/neg/deferredSummon.check @@ -0,0 +1,17 @@ +-- Error: tests/neg/deferredSummon.scala:4:26 -------------------------------------------------------------------------- +4 | given Int = compiletime.deferred // error + | ^^^^^^^^^^^^^^^^^^^^ + | `deferred` can only be used as the right hand side of a given definition in a trait +-- Error: tests/neg/deferredSummon.scala:7:26 -------------------------------------------------------------------------- +7 | given Int = compiletime.deferred // error + | ^^^^^^^^^^^^^^^^^^^^ + | `deferred` can only be used as the right hand side of a given definition in a trait +-- Error: tests/neg/deferredSummon.scala:12:16 ------------------------------------------------------------------------- +12 | given Int = deferred // error + | ^^^^^^^^ + | `deferred` can only be used as the right hand side of a given definition in a trait +-- Error: tests/neg/deferredSummon.scala:16:14 ------------------------------------------------------------------------- +16 | given Int = defered // error + | ^^^^^^^ + |`deferred` can only be used as the right hand side of a given definition in a trait. + |Note that `deferred` can only be used under its own name when implementing a given in a trait; `defered` is not accepted. diff --git a/tests/neg/deferredSummon.scala b/tests/neg/deferredSummon.scala new file mode 100644 index 000000000000..cddde82535fb --- /dev/null +++ b/tests/neg/deferredSummon.scala @@ -0,0 +1,19 @@ +//> using options -language:experimental.modularity + +object Test: + given Int = compiletime.deferred // error + +abstract class C: + given Int = compiletime.deferred // error + +trait A: + import compiletime.deferred + locally: + given Int = deferred // error + +trait B: + import compiletime.deferred as defered + given Int = defered // error + + + diff --git a/tests/neg/depfuns.scala b/tests/neg/depfuns.scala index ac96915a78b5..989aa72be820 100644 --- a/tests/neg/depfuns.scala +++ b/tests/neg/depfuns.scala @@ -1,5 +1,7 @@ +import language.experimental.erasedDefinitions + object Test { - type T = (x: Int) + type T = (erased x: Int) } // error: `=>' expected diff --git a/tests/neg/erased-1.scala b/tests/neg/erased-1.scala index 62a1024e80f5..deaa2a6d750e 100644 --- a/tests/neg/erased-1.scala +++ b/tests/neg/erased-1.scala @@ -12,25 +12,14 @@ object Test { }) foo1(a) // OK foo2( // error - a // error - ) - foo3( // error - a + a // Ok ) a // error } - erased def foo2(a: Int): Int = { - foo0(a) // OK - foo1(a) // OK - foo2(a) // OK - foo3(a) // OK - a // OK - } - erased def foo3(erased a: Int): Int = { + erased def foo2(erased a: Int): Int = { foo0(a) // OK foo1(a) // OK foo2(a) // OK - foo3(a) // OK a // OK } -} \ No newline at end of file +} diff --git a/tests/neg/erased-2.scala b/tests/neg/erased-2.scala index 02e4b56e11ac..3b51d9a4a40b 100644 --- a/tests/neg/erased-2.scala +++ b/tests/neg/erased-2.scala @@ -8,39 +8,26 @@ object Test { ) foo1(u) // OK foo2( // error - u // error - ) - foo3( // error - u + u // Ok ) u // error u // error } - erased def foo2(a: Int): Int = { - foo0(u) // OK - foo1(u) // OK - foo2(u) // OK - foo3(u) // OK - u // warn - u // OK - } - erased def foo3(erased a: Int): Int = { + erased def foo2(erased a: Int): Int = { foo0(u) // OK foo1(u) // OK foo2(u) // OK - foo3(u) // OK u // warn u // OK } - erased val foo4: Int = { + erased val foo3: Int = { foo0(u) // OK foo1(u) // OK foo2(u) // OK - foo3(u) // OK u // warn u // OK } erased def u: Int = 42 -} \ No newline at end of file +} diff --git a/tests/neg/erased-3.scala b/tests/neg/erased-3.scala index 5c6a31860b11..7b33794791b6 100644 --- a/tests/neg/erased-3.scala +++ b/tests/neg/erased-3.scala @@ -8,40 +8,27 @@ object Test { ) foo1(u()) // OK foo2( // error - u() // error - ) - foo3( // error - u() + u() // Ok ) u() // error u() // error } - erased def foo2(a: Int): Int = { - foo0(u()) // OK - foo1(u()) // OK - foo2(u()) // OK - foo3(u()) // OK - u() // warn - u() // OK - } - erased def foo3(erased a: Int): Int = { + erased def foo2(erased a: Int): Int = { foo0(u()) // OK foo1(u()) // OK foo2(u()) // OK - foo3(u()) // OK u() // warn u() // OK } - erased val foo4: Int = { + erased val foo3: Int = { foo0(u()) // OK foo1(u()) // OK foo2(u()) // OK - foo3(u()) // OK println() u() // warn u() // OK } erased def u(): Int = 42 -} \ No newline at end of file +} diff --git a/tests/neg/erased-args-lifted.scala b/tests/neg/erased-args-lifted.scala index 2deee749ed3d..dfa7b74ee3d4 100644 --- a/tests/neg/erased-args-lifted.scala +++ b/tests/neg/erased-args-lifted.scala @@ -2,7 +2,7 @@ object Test { def foo(a: Int)(b: Int, c: Int) = 42 - erased def bar(i: Int): Int = { + erased def bar(erased i: Int): Int = { println(1) 42 } diff --git a/tests/neg/erased-params.scala b/tests/neg/erased-params.scala new file mode 100644 index 000000000000..2315b6bdf54d --- /dev/null +++ b/tests/neg/erased-params.scala @@ -0,0 +1,12 @@ +import scala.language.experimental.erasedDefinitions + +erased def test1(x: Int): Int = x // error +erased def test2(erased x: Int): Int = x +erased def test3(erased x: Int, erased y: Int): Int = x +erased def test4(erased x: Int, y: Int): Int = x // error +erased def test5(x: Int, erased y: Int): Int = y // error +erased def test6(x: Int, y: Int): Int = y // error // error +erased def test7(erased x: Int)(erased y: Int): Int = x +erased def test8(erased x: Int)(y: Int): Int = x // error +erased def test9(x: Int)(erased y: Int): Int = y // error +erased def test10(x: Int)(y: Int): Int = y // error // error diff --git a/tests/neg/expeimental-flag-with-lang-feature-1.scala b/tests/neg/expeimental-flag-with-lang-feature-1.scala deleted file mode 100644 index a5ece729fa3d..000000000000 --- a/tests/neg/expeimental-flag-with-lang-feature-1.scala +++ /dev/null @@ -1,5 +0,0 @@ -//> using options -Yno-experimental - -import scala.language.experimental.erasedDefinitions - -erased def erasedFun(erased x: Int): Int = x // error // error diff --git a/tests/neg/expeimental-flag.scala b/tests/neg/expeimental-flag.scala index 8b2e729ea8da..dff7efa92c50 100644 --- a/tests/neg/expeimental-flag.scala +++ b/tests/neg/expeimental-flag.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimental-2.scala b/tests/neg/experimental-2.scala index f2d0262d83f5..38e814247031 100644 --- a/tests/neg/experimental-2.scala +++ b/tests/neg/experimental-2.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + class Test7 { import scala.language.experimental diff --git a/tests/neg/experimental-erased.scala b/tests/neg/experimental-erased.scala deleted file mode 100644 index 3619d0059a95..000000000000 --- a/tests/neg/experimental-erased.scala +++ /dev/null @@ -1,11 +0,0 @@ -//> using options -Yno-experimental - -import language.experimental.erasedDefinitions -import annotation.experimental - -@experimental -erased class CanThrow[-E <: Exception] - -erased class CanThrow2[-E <: Exception] // error - -def other = 1 diff --git a/tests/neg/experimental-imports.scala b/tests/neg/experimental-imports.scala index 3a672ac65a22..e3a91be45f08 100644 --- a/tests/neg/experimental-imports.scala +++ b/tests/neg/experimental-imports.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import annotation.experimental @@ -14,7 +14,7 @@ object Object2: import language.experimental.fewerBraces // error import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error - import language.experimental.erasedDefinitions + import language.experimental.erasedDefinitions // error erased def f = 1 @experimental @@ -29,7 +29,7 @@ object Class2: import language.experimental.fewerBraces // error import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error - import language.experimental.erasedDefinitions + import language.experimental.erasedDefinitions // error erased def f = 1 @experimental @@ -44,5 +44,5 @@ def fun2 = import language.experimental.fewerBraces // error import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error - import language.experimental.erasedDefinitions + import language.experimental.erasedDefinitions // error erased def f = 1 diff --git a/tests/neg/experimental-message-experimental-flag.check b/tests/neg/experimental-message-experimental-flag.check new file mode 100644 index 000000000000..69174eaa789f --- /dev/null +++ b/tests/neg/experimental-message-experimental-flag.check @@ -0,0 +1,10 @@ + +-- Error: tests/neg/experimental-message-experimental-flag/Test_2.scala:3:10 ------------------------------------------- +3 |def g() = f() // error + | ^ + | method f is marked @experimental: Added by -experimental + | + | Experimental definition may only be used under experimental mode: + | 1. in a definition marked as @experimental, or + | 2. an experimental feature is imported at the package level, or + | 3. compiling with the -experimental compiler flag. diff --git a/tests/neg/experimental-message-experimental-flag/Lib_1.scala b/tests/neg/experimental-message-experimental-flag/Lib_1.scala new file mode 100644 index 000000000000..d95aaadf2fe5 --- /dev/null +++ b/tests/neg/experimental-message-experimental-flag/Lib_1.scala @@ -0,0 +1,3 @@ +//> using options -experimental + +def f() = ??? diff --git a/tests/neg/experimental-message-experimental-flag/Test_2.scala b/tests/neg/experimental-message-experimental-flag/Test_2.scala new file mode 100644 index 000000000000..a039d32081bc --- /dev/null +++ b/tests/neg/experimental-message-experimental-flag/Test_2.scala @@ -0,0 +1,3 @@ + + +def g() = f() // error diff --git a/tests/neg/experimental-message.check b/tests/neg/experimental-message.check new file mode 100644 index 000000000000..ea26a6c50b51 --- /dev/null +++ b/tests/neg/experimental-message.check @@ -0,0 +1,27 @@ +-- Error: tests/neg/experimental-message.scala:15:2 -------------------------------------------------------------------- +15 | f1() // error + | ^^ + | method f1 is marked @experimental + | + | Experimental definition may only be used under experimental mode: + | 1. in a definition marked as @experimental, or + | 2. an experimental feature is imported at the package level, or + | 3. compiling with the -experimental compiler flag. +-- Error: tests/neg/experimental-message.scala:16:2 -------------------------------------------------------------------- +16 | f2() // error + | ^^ + | method f2 is marked @experimental + | + | Experimental definition may only be used under experimental mode: + | 1. in a definition marked as @experimental, or + | 2. an experimental feature is imported at the package level, or + | 3. compiling with the -experimental compiler flag. +-- Error: tests/neg/experimental-message.scala:17:2 -------------------------------------------------------------------- +17 | f3() // error + | ^^ + | method f3 is marked @experimental: not yet stable + | + | Experimental definition may only be used under experimental mode: + | 1. in a definition marked as @experimental, or + | 2. an experimental feature is imported at the package level, or + | 3. compiling with the -experimental compiler flag. diff --git a/tests/neg/experimental-message.scala b/tests/neg/experimental-message.scala new file mode 100644 index 000000000000..8521873a6800 --- /dev/null +++ b/tests/neg/experimental-message.scala @@ -0,0 +1,17 @@ + + +import scala.annotation.experimental + +@experimental +def f1() = ??? + +@experimental() +def f2() = ??? + +@experimental("not yet stable") +def f3() = ??? + +def g() = + f1() // error + f2() // error + f3() // error diff --git a/tests/neg/experimental-nested-imports-2.scala b/tests/neg/experimental-nested-imports-2.scala index 4aac719a81d6..f969a2a4b5a6 100644 --- a/tests/neg/experimental-nested-imports-2.scala +++ b/tests/neg/experimental-nested-imports-2.scala @@ -1,31 +1,31 @@ -//> using options -Yno-experimental + import annotation.experimental class Class1: import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error - import language.experimental.erasedDefinitions // ok: only check at erased definition + import language.experimental.erasedDefinitions // error @experimental def f = 1 def g = 1 object Object1: import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error - import language.experimental.erasedDefinitions // ok: only check at erased definition + import language.experimental.erasedDefinitions // error @experimental def f = 1 def g = 1 def fun1 = import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error - import language.experimental.erasedDefinitions // ok: only check at erased definition + import language.experimental.erasedDefinitions // error @experimental def f = 1 def g = 1 val value1 = import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error - import language.experimental.erasedDefinitions // ok: only check at erased definition + import language.experimental.erasedDefinitions // error @experimental def f = 1 def g = 1 \ No newline at end of file diff --git a/tests/neg/experimental-nested-imports-3.scala b/tests/neg/experimental-nested-imports-3.scala index 39b548b2586b..c3fbbc3676a5 100644 --- a/tests/neg/experimental-nested-imports-3.scala +++ b/tests/neg/experimental-nested-imports-3.scala @@ -1,23 +1,23 @@ -//> using options -Yno-experimental + import annotation.experimental class Class1: import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error - import language.experimental.erasedDefinitions // ok: only check at erased definition + import language.experimental.erasedDefinitions // error object Object1: import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error - import language.experimental.erasedDefinitions // ok: only check at erased definition + import language.experimental.erasedDefinitions // error def fun1 = import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error - import language.experimental.erasedDefinitions // ok: only check at erased definition + import language.experimental.erasedDefinitions // error val value1 = import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error - import language.experimental.erasedDefinitions // ok: only check at erased definition + import language.experimental.erasedDefinitions // error diff --git a/tests/neg/experimental-nested-imports.scala b/tests/neg/experimental-nested-imports.scala index 91fe3bfeb27b..b2a8f29d76eb 100644 --- a/tests/neg/experimental-nested-imports.scala +++ b/tests/neg/experimental-nested-imports.scala @@ -1,27 +1,27 @@ -//> using options -Yno-experimental + import annotation.experimental class Class1: import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error - import language.experimental.erasedDefinitions // ok: only check at erased definition + import language.experimental.erasedDefinitions // error @experimental def f = 1 object Object1: import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error - import language.experimental.erasedDefinitions // ok: only check at erased definition + import language.experimental.erasedDefinitions // error @experimental def f = 1 def fun1 = import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error - import language.experimental.erasedDefinitions // ok: only check at erased definition + import language.experimental.erasedDefinitions // error @experimental def f = 1 val value1 = import language.experimental.namedTypeArguments // error import language.experimental.genericNumberLiterals // error - import language.experimental.erasedDefinitions // ok: only check at erased definition + import language.experimental.erasedDefinitions // error @experimental def f = 1 diff --git a/tests/neg/experimental.scala b/tests/neg/experimental.scala index efca9a26ec14..f35a7ca19d7f 100644 --- a/tests/neg/experimental.scala +++ b/tests/neg/experimental.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + class Test0 { import language.experimental.namedTypeArguments // error @@ -9,7 +9,7 @@ class Test0 { } class Test1 { - import scala.language.experimental.erasedDefinitions + import scala.language.experimental.erasedDefinitions // error import scala.compiletime.erasedValue type UnivEq[A] object UnivEq: diff --git a/tests/neg/experimentalAnnot.scala b/tests/neg/experimentalAnnot.scala index e50d9165b5a4..e14b35fffc5d 100644 --- a/tests/neg/experimentalAnnot.scala +++ b/tests/neg/experimentalAnnot.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimentalCaseClass.scala b/tests/neg/experimentalCaseClass.scala index 383824954041..1d72738c6f91 100644 --- a/tests/neg/experimentalCaseClass.scala +++ b/tests/neg/experimentalCaseClass.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimentalDefaultParams.scala b/tests/neg/experimentalDefaultParams.scala index fb9ffa282e60..f2648e0eaae6 100644 --- a/tests/neg/experimentalDefaultParams.scala +++ b/tests/neg/experimentalDefaultParams.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimentalEnum.scala b/tests/neg/experimentalEnum.scala index 14ced7ca22bb..9ab7013a147e 100644 --- a/tests/neg/experimentalEnum.scala +++ b/tests/neg/experimentalEnum.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimentalErased.scala b/tests/neg/experimentalErased.scala deleted file mode 100644 index 99de8048c261..000000000000 --- a/tests/neg/experimentalErased.scala +++ /dev/null @@ -1,24 +0,0 @@ -//> using options -Yno-experimental - -import language.experimental.erasedDefinitions -import annotation.experimental - -@experimental -erased class Foo - -erased class Bar // error - -@experimental -erased def foo = 2 - -erased def bar = 2 // error - -@experimental -erased val foo2 = 2 - -erased val bar2 = 2 // error - -@experimental -def foo3(erased a: Int) = 2 - -def bar3(erased a: Int) = 2 // error diff --git a/tests/neg/experimentalExperimental.scala b/tests/neg/experimentalExperimental.scala new file mode 100644 index 000000000000..9011a3e49225 --- /dev/null +++ b/tests/neg/experimentalExperimental.scala @@ -0,0 +1 @@ +class MyExperimentalAnnot extends scala.annotation.experimental // error diff --git a/tests/neg/experimentalInline.scala b/tests/neg/experimentalInline.scala index b837ad498492..ffe4f5271749 100644 --- a/tests/neg/experimentalInline.scala +++ b/tests/neg/experimentalInline.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimentalInline2.scala b/tests/neg/experimentalInline2.scala index 8bf6a82fba2e..575947e3216c 100644 --- a/tests/neg/experimentalInline2.scala +++ b/tests/neg/experimentalInline2.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimentalMembers.scala b/tests/neg/experimentalMembers.scala index fe4adbfcf44d..aae0e59bb9e5 100644 --- a/tests/neg/experimentalMembers.scala +++ b/tests/neg/experimentalMembers.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimentalOverloads.scala b/tests/neg/experimentalOverloads.scala index 7adaf0b78840..1bc40bbe7631 100644 --- a/tests/neg/experimentalOverloads.scala +++ b/tests/neg/experimentalOverloads.scala @@ -1,3 +1,5 @@ + + import scala.annotation.experimental trait A: diff --git a/tests/neg/experimentalOverride.scala b/tests/neg/experimentalOverride.scala index adc8b919dc6b..2372242f6577 100644 --- a/tests/neg/experimentalOverride.scala +++ b/tests/neg/experimentalOverride.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimentalRHS.scala b/tests/neg/experimentalRHS.scala index cffa35ed4ba9..a682a64ef1e0 100644 --- a/tests/neg/experimentalRHS.scala +++ b/tests/neg/experimentalRHS.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimentalSam.scala b/tests/neg/experimentalSam.scala index ab86cabc6816..e262e3202261 100644 --- a/tests/neg/experimentalSam.scala +++ b/tests/neg/experimentalSam.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimentalSignature.scala b/tests/neg/experimentalSignature.scala index 479f9140d0a0..3c8dd3d97d33 100644 --- a/tests/neg/experimentalSignature.scala +++ b/tests/neg/experimentalSignature.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimentalTerms.scala b/tests/neg/experimentalTerms.scala index 10776e78bae7..6a9aa7a7e95b 100644 --- a/tests/neg/experimentalTerms.scala +++ b/tests/neg/experimentalTerms.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimentalTests.scala b/tests/neg/experimentalTests.scala index a45809c099c4..df8dcf370c93 100644 --- a/tests/neg/experimentalTests.scala +++ b/tests/neg/experimentalTests.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimentalType.scala b/tests/neg/experimentalType.scala index 22bdecf415e3..b380d1d6a36f 100644 --- a/tests/neg/experimentalType.scala +++ b/tests/neg/experimentalType.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimentalTypeRHS.scala b/tests/neg/experimentalTypeRHS.scala index f5801ea2f11d..e995e0736902 100644 --- a/tests/neg/experimentalTypeRHS.scala +++ b/tests/neg/experimentalTypeRHS.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimentalTypes2.scala b/tests/neg/experimentalTypes2.scala index 3d042792c4de..c175eab4c389 100644 --- a/tests/neg/experimentalTypes2.scala +++ b/tests/neg/experimentalTypes2.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/experimentalUnapply.scala b/tests/neg/experimentalUnapply.scala index 049577bf63fd..89a2e2950f45 100644 --- a/tests/neg/experimentalUnapply.scala +++ b/tests/neg/experimentalUnapply.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/neg/fieldsOf.scala b/tests/neg/fieldsOf.scala new file mode 100644 index 000000000000..d3539070b556 --- /dev/null +++ b/tests/neg/fieldsOf.scala @@ -0,0 +1,11 @@ +case class Person(name: String, age: Int) +class Anon(name: String, age: Int) +def foo[T](): NamedTuple.From[T] = ??? + +def test = + var x: NamedTuple.From[Person] = ??? + x = foo[Person]() // ok + x = foo[Anon]() // error + x = foo() // error + + diff --git a/tests/neg/given-ambiguous-1.check b/tests/neg/given-ambiguous-1.check new file mode 100644 index 000000000000..ed64164b351f --- /dev/null +++ b/tests/neg/given-ambiguous-1.check @@ -0,0 +1,9 @@ +-- [E172] Type Error: tests/neg/given-ambiguous-1.scala:12:23 ---------------------------------------------------------- +12 |def f: Unit = summon[B] // error: Ambiguous given instances + | ^ + | No best given instance of type B was found for parameter x of method summon in object Predef. + | I found: + | + | given_B(/* ambiguous: both given instance a1 and given instance a2 match type A */summon[A]) + | + | But both given instance a1 and given instance a2 match type A. diff --git a/tests/neg/given-ambiguous-1.scala b/tests/neg/given-ambiguous-1.scala new file mode 100644 index 000000000000..0ce4f566e615 --- /dev/null +++ b/tests/neg/given-ambiguous-1.scala @@ -0,0 +1,12 @@ +class A +class B +given a1: A = ??? +given a2: A = ??? +given (using a: A): B = ??? + +// In this case, the ambiguous given instance is not directly the argument of +// `summon`; it is the argument of `given_B` which is needed for the argument of +// `summon`. This is a nested ambiguous implicit, thus we report an error in +// the style "I found ... but". See `given-ambiguous-2` for a direct ambiguous +// implicit error. +def f: Unit = summon[B] // error: Ambiguous given instances diff --git a/tests/neg/given-ambiguous-2.check b/tests/neg/given-ambiguous-2.check new file mode 100644 index 000000000000..ec84b750e691 --- /dev/null +++ b/tests/neg/given-ambiguous-2.check @@ -0,0 +1,4 @@ +-- [E172] Type Error: tests/neg/given-ambiguous-2.scala:10:15 ---------------------------------------------------------- +10 |def f: Unit = g // error: Ambiguous given instances + | ^ + | Ambiguous given instances: both given instance a1 and given instance a2 match type A of parameter a of method g diff --git a/tests/neg/given-ambiguous-2.scala b/tests/neg/given-ambiguous-2.scala new file mode 100644 index 000000000000..2c3c52f1ccb0 --- /dev/null +++ b/tests/neg/given-ambiguous-2.scala @@ -0,0 +1,10 @@ +class A +class B +given a1: A = ??? +given a2: A = ??? +def g(using a: A): B = ??? + +// In this case, the ambiguous given instance is directly the argument of +// `summon`. This is a direct ambiguous implicit, thus we report the error +// directly. See `given-ambiguous-1` for a nested ambiguous implicit error. +def f: Unit = g // error: Ambiguous given instances diff --git a/tests/neg/given-ambiguous-default-1.check b/tests/neg/given-ambiguous-default-1.check new file mode 100644 index 000000000000..1a5006c23055 --- /dev/null +++ b/tests/neg/given-ambiguous-default-1.check @@ -0,0 +1,9 @@ +-- [E172] Type Error: tests/neg/given-ambiguous-default-1.scala:18:23 -------------------------------------------------- +18 |def f: Unit = summon[B] // error: Ambiguous given instances + | ^ + | No best given instance of type B was found for parameter x of method summon in object Predef. + | I found: + | + | given_B(a = /* ambiguous: both given instance a1 and given instance a2 match type A */summon[A]) + | + | But both given instance a1 and given instance a2 match type A. diff --git a/tests/neg/given-ambiguous-default-1.scala b/tests/neg/given-ambiguous-default-1.scala new file mode 100644 index 000000000000..140736e9eee3 --- /dev/null +++ b/tests/neg/given-ambiguous-default-1.scala @@ -0,0 +1,18 @@ +/** This test checks that provided ambiguous given instances take precedence + * over default given arguments. In the following code, the compiler must + * report an "Ambiguous implicits" error for the parameter `a`, and must not + * use its default value. + * + * See also: + * - tests/neg/19414.scala + * - tests/neg/19414-desugared.scala + * - tests/neg/given-ambiguous-default-2.scala + */ + +class A +class B +given a1: A = ??? +given a2: A = ??? +given (using a: A = A()): B = ??? + +def f: Unit = summon[B] // error: Ambiguous given instances diff --git a/tests/neg/given-ambiguous-default-2.check b/tests/neg/given-ambiguous-default-2.check new file mode 100644 index 000000000000..cbe8b972a389 --- /dev/null +++ b/tests/neg/given-ambiguous-default-2.check @@ -0,0 +1,9 @@ +-- [E172] Type Error: tests/neg/given-ambiguous-default-2.scala:18:23 -------------------------------------------------- +18 |def f: Unit = summon[C] // error: Ambiguous given instances + | ^ + |No best given instance of type C was found for parameter x of method summon in object Predef. + |I found: + | + | given_C(a = /* ambiguous: both given instance a1 and given instance a2 match type A */summon[A], this.given_C$default$2) + | + |But both given instance a1 and given instance a2 match type A. diff --git a/tests/neg/given-ambiguous-default-2.scala b/tests/neg/given-ambiguous-default-2.scala new file mode 100644 index 000000000000..9e639b66f3d1 --- /dev/null +++ b/tests/neg/given-ambiguous-default-2.scala @@ -0,0 +1,18 @@ +/** This test checks that provided given instances take precedence over default + * given arguments, even when there are multiple default arguments. Before the + * fix for issue #19414, this code would compile without errors. + * + * See also: + * - tests/neg/given-ambiguous-default-1.scala + * - tests/neg/19414.scala + * - tests/neg/19414-desugared.scala + */ + +class A +class B +class C +given a1: A = ??? +given a2: A = ??? +given (using a: A = A(), b: B = B()): C = ??? + +def f: Unit = summon[C] // error: Ambiguous given instances diff --git a/tests/neg/given-loop-prevention.check b/tests/neg/given-loop-prevention.check new file mode 100644 index 000000000000..460adf03be49 --- /dev/null +++ b/tests/neg/given-loop-prevention.check @@ -0,0 +1,14 @@ +-- Error: tests/neg/given-loop-prevention.scala:10:36 ------------------------------------------------------------------ +10 | given List[Foo] = List(summon[Foo]) // error + | ^ + | Result of implicit search for Foo will change. + | Current result Baz.given_Foo will be no longer eligible + | because it is not defined before the search position. + | Result with new rules: No Matching Implicit. + | To opt into the new rules, compile with `-source future` or use + | the `scala.language.future` language import. + | + | To fix the problem without the language import, you could try one of the following: + | - use a `given ... with` clause as the enclosing given, + | - rearrange definitions so that Baz.given_Foo comes earlier, + | - use an explicit argument. diff --git a/tests/neg/given-loop-prevention.scala b/tests/neg/given-loop-prevention.scala new file mode 100644 index 000000000000..9d404b8c6d8e --- /dev/null +++ b/tests/neg/given-loop-prevention.scala @@ -0,0 +1,12 @@ + +class Foo + +object Bar { + given Foo with {} + given List[Foo] = List(summon[Foo]) // ok +} + +object Baz { + given List[Foo] = List(summon[Foo]) // error + given Foo with {} +} diff --git a/tests/neg/given-triangle.check b/tests/neg/given-triangle.check new file mode 100644 index 000000000000..f366c18e78f0 --- /dev/null +++ b/tests/neg/given-triangle.check @@ -0,0 +1,12 @@ +-- [E172] Type Error: tests/neg/given-triangle.scala:15:18 ------------------------------------------------------------- +15 |@main def Test = f // error + | ^ + |Ambiguous given instances: both given instance given_B and given instance given_C match type A of parameter a of method f + | + |Note: Given search preference for A between alternatives + | (given_B : B) + |and + | (given_A : A) + |will change. + |Current choice : the first alternative + |New choice from Scala 3.7: the second alternative diff --git a/tests/neg/given-triangle.scala b/tests/neg/given-triangle.scala new file mode 100644 index 000000000000..4842c5314f51 --- /dev/null +++ b/tests/neg/given-triangle.scala @@ -0,0 +1,15 @@ +//> using options -source 3.6 +class A +class B extends A +class C extends A + +given A = A() +given B = B() +given C = C() + +def f(using a: A, b: B, c: C) = + println(a.getClass) + println(b.getClass) + println(c.getClass) + +@main def Test = f // error diff --git a/tests/neg/i0248-inherit-refined.check b/tests/neg/i0248-inherit-refined.check new file mode 100644 index 000000000000..4e14c3c6f14b --- /dev/null +++ b/tests/neg/i0248-inherit-refined.check @@ -0,0 +1,12 @@ +-- [E170] Type Error: tests/neg/i0248-inherit-refined.scala:8:18 ------------------------------------------------------- +8 | class C extends Y // error + | ^ + | test.A & test.B is not a class type + | + | longer explanation available when compiling with `-explain` +-- [E170] Type Error: tests/neg/i0248-inherit-refined.scala:10:18 ------------------------------------------------------ +10 | class D extends Z // error + | ^ + | test.A | test.B is not a class type + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i0248-inherit-refined.scala b/tests/neg/i0248-inherit-refined.scala index 97b6f5cdab73..f7cd6375afc9 100644 --- a/tests/neg/i0248-inherit-refined.scala +++ b/tests/neg/i0248-inherit-refined.scala @@ -1,10 +1,12 @@ +//> using options -source future -language:experimental.modularity + object test { class A { type T } type X = A { type T = Int } - class B extends X // error + class B extends X // was error, now OK type Y = A & B class C extends Y // error type Z = A | B class D extends Z // error - abstract class E extends ({ val x: Int }) // error + abstract class E extends ({ val x: Int }) // was error, now OK } diff --git a/tests/neg/i12049.check b/tests/neg/i12049.check index b44eb612f627..e0c2d498f119 100644 --- a/tests/neg/i12049.check +++ b/tests/neg/i12049.check @@ -15,22 +15,39 @@ | case B => String | | longer explanation available when compiling with `-explain` --- [E184] Type Error: tests/neg/i12049.scala:14:23 --------------------------------------------------------------------- +-- [E007] Type Mismatch Error: tests/neg/i12049.scala:14:17 ------------------------------------------------------------ 14 |val y3: String = ??? : Last[Int *: Int *: Boolean *: String *: EmptyTuple] // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | Match type reduction failed since selector EmptyTuple - | matches none of the cases + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | Found: Last[EmptyTuple] + | Required: String | - | case _ *: _ *: t => Last[t] - | case t *: EmptyTuple => t --- [E184] Type Error: tests/neg/i12049.scala:22:26 --------------------------------------------------------------------- + | Note: a match type could not be fully reduced: + | + | trying to reduce Last[EmptyTuple] + | failed since selector EmptyTuple + | matches none of the cases + | + | case _ *: _ *: t => Last[t] + | case t *: EmptyTuple => t + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/i12049.scala:22:20 ------------------------------------------------------------ 22 |val z3: (A, B, A) = ??? : Reverse[(A, B, A)] // error - | ^^^^^^^^^^^^^^^^^^ - | Match type reduction failed since selector A *: EmptyTuple.type - | matches none of the cases + | ^^^^^^^^^^^^^^^^^^^^^^^^ + | Found: Tuple.Concat[Reverse[A *: EmptyTuple.type], (B, A)] + | Required: (A, B, A) + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Tuple.Concat[Reverse[A *: EmptyTuple.type], (B, A)] + | trying to reduce Reverse[A *: EmptyTuple.type] + | failed since selector A *: EmptyTuple.type + | matches none of the cases | - | case t1 *: t2 *: ts => Tuple.Concat[Reverse[ts], (t2, t1)] - | case EmptyTuple => EmptyTuple + | case t1 *: t2 *: ts => Tuple.Concat[Reverse[ts], (t2, t1)] + | case EmptyTuple => EmptyTuple + | + | longer explanation available when compiling with `-explain` -- [E172] Type Error: tests/neg/i12049.scala:24:20 --------------------------------------------------------------------- 24 |val _ = summon[M[B]] // error | ^ @@ -45,22 +62,33 @@ | Therefore, reduction cannot advance to the remaining case | | case B => String --- [E184] Type Error: tests/neg/i12049.scala:25:26 --------------------------------------------------------------------- +-- [E172] Type Error: tests/neg/i12049.scala:25:78 --------------------------------------------------------------------- 25 |val _ = summon[String =:= Last[Int *: Int *: Boolean *: String *: EmptyTuple]] // error - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | Match type reduction failed since selector EmptyTuple - | matches none of the cases + | ^ + | Cannot prove that String =:= Last[EmptyTuple]. + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Last[EmptyTuple] + | failed since selector EmptyTuple + | matches none of the cases | - | case _ *: _ *: t => Last[t] - | case t *: EmptyTuple => t --- [E184] Type Error: tests/neg/i12049.scala:26:29 --------------------------------------------------------------------- + | case _ *: _ *: t => Last[t] + | case t *: EmptyTuple => t +-- [E172] Type Error: tests/neg/i12049.scala:26:48 --------------------------------------------------------------------- 26 |val _ = summon[(A, B, A) =:= Reverse[(A, B, A)]] // error - | ^^^^^^^^^^^^^^^^^^ - | Match type reduction failed since selector A *: EmptyTuple.type - | matches none of the cases + | ^ + | Cannot prove that (A, B, A) =:= Tuple.Concat[Reverse[A *: EmptyTuple.type], (B, A)]. + | + | Note: a match type could not be fully reduced: + | + | trying to reduce Tuple.Concat[Reverse[A *: EmptyTuple.type], (B, A)] + | trying to reduce Reverse[A *: EmptyTuple.type] + | failed since selector A *: EmptyTuple.type + | matches none of the cases | - | case t1 *: t2 *: ts => Tuple.Concat[Reverse[ts], (t2, t1)] - | case EmptyTuple => EmptyTuple + | case t1 *: t2 *: ts => Tuple.Concat[Reverse[ts], (t2, t1)] + | case EmptyTuple => EmptyTuple -- [E008] Not Found Error: tests/neg/i12049.scala:28:21 ---------------------------------------------------------------- 28 |val _ = (??? : M[B]).length // error | ^^^^^^^^^^^^^^^^^^^ diff --git a/tests/neg/i12348.check b/tests/neg/i12348.check index ccc2b9f7ed00..55806fa5ca1b 100644 --- a/tests/neg/i12348.check +++ b/tests/neg/i12348.check @@ -2,7 +2,3 @@ 2 | given inline x: Int = 0 // error | ^ | 'with' expected, but identifier found --- [E040] Syntax Error: tests/neg/i12348.scala:3:10 -------------------------------------------------------------------- -3 |} // error - | ^ - | '}' expected, but eof found diff --git a/tests/neg/i12348.scala b/tests/neg/i12348.scala index 69fc77fb532e..bd8bf63994e6 100644 --- a/tests/neg/i12348.scala +++ b/tests/neg/i12348.scala @@ -1,3 +1,2 @@ object A { given inline x: Int = 0 // error -} // error \ No newline at end of file diff --git a/tests/neg/i12456.scala b/tests/neg/i12456.scala index b9fb0283dcd7..c1a3ada5a420 100644 --- a/tests/neg/i12456.scala +++ b/tests/neg/i12456.scala @@ -1 +1 @@ -object F { type T[G[X] <: X, F <: G[F]] } // error // error +object F { type T[G[X] <: X, F <: G[F]] } // error diff --git a/tests/neg/i13044.check b/tests/neg/i13044.check index 1fbe978a49b8..e504b14185da 100644 --- a/tests/neg/i13044.check +++ b/tests/neg/i13044.check @@ -1,5 +1,5 @@ --- Error: tests/neg/i13044.scala:65:40 --------------------------------------------------------------------------------- -65 | implicit def typeSchema: Schema[A] = Schema.gen // error // error +-- Error: tests/neg/i13044.scala:61:40 --------------------------------------------------------------------------------- +61 | implicit def typeSchema: Schema[A] = Schema.gen // error // error | ^^^^^^^^^^ | given instance gen is declared as `inline`, but was not inlined | @@ -12,11 +12,11 @@ | ^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:17 -31 | lazy val fields = recurse[m.MirroredElemTypes] +29 | lazy val fields = recurse[m.MirroredElemTypes] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:17 -37 | inline given gen[A]: Schema[A] = derived +33 | inline given gen[A]: Schema[A] = derived | ^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:17 @@ -24,11 +24,11 @@ | ^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:17 -31 | lazy val fields = recurse[m.MirroredElemTypes] +29 | lazy val fields = recurse[m.MirroredElemTypes] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:17 -37 | inline given gen[A]: Schema[A] = derived +33 | inline given gen[A]: Schema[A] = derived | ^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:17 @@ -36,11 +36,11 @@ | ^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:17 -31 | lazy val fields = recurse[m.MirroredElemTypes] +29 | lazy val fields = recurse[m.MirroredElemTypes] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:17 -37 | inline given gen[A]: Schema[A] = derived +33 | inline given gen[A]: Schema[A] = derived | ^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:17 @@ -48,11 +48,11 @@ | ^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:17 -31 | lazy val fields = recurse[m.MirroredElemTypes] +29 | lazy val fields = recurse[m.MirroredElemTypes] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:17 -37 | inline given gen[A]: Schema[A] = derived +33 | inline given gen[A]: Schema[A] = derived | ^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:17 @@ -64,15 +64,15 @@ | ^^^^^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:17 -31 | lazy val fields = recurse[m.MirroredElemTypes] +29 | lazy val fields = recurse[m.MirroredElemTypes] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:17 -37 | inline given gen[A]: Schema[A] = derived +33 | inline given gen[A]: Schema[A] = derived | ^^^^^^^ -------------------------------------------------------------------------------------------------------------------- --- Error: tests/neg/i13044.scala:65:40 --------------------------------------------------------------------------------- -65 | implicit def typeSchema: Schema[A] = Schema.gen // error // error +-- Error: tests/neg/i13044.scala:61:40 --------------------------------------------------------------------------------- +61 | implicit def typeSchema: Schema[A] = Schema.gen // error // error | ^^^^^^^^^^ | method recurse is declared as `inline`, but was not inlined | @@ -85,11 +85,11 @@ | ^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:18 -31 | lazy val fields = recurse[m.MirroredElemTypes] +29 | lazy val fields = recurse[m.MirroredElemTypes] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:18 -37 | inline given gen[A]: Schema[A] = derived +33 | inline given gen[A]: Schema[A] = derived | ^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:18 @@ -97,11 +97,11 @@ | ^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:18 -31 | lazy val fields = recurse[m.MirroredElemTypes] +29 | lazy val fields = recurse[m.MirroredElemTypes] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:18 -37 | inline given gen[A]: Schema[A] = derived +33 | inline given gen[A]: Schema[A] = derived | ^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:18 @@ -109,11 +109,11 @@ | ^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:18 -31 | lazy val fields = recurse[m.MirroredElemTypes] +29 | lazy val fields = recurse[m.MirroredElemTypes] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:18 -37 | inline given gen[A]: Schema[A] = derived +33 | inline given gen[A]: Schema[A] = derived | ^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:18 @@ -121,11 +121,11 @@ | ^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:18 -31 | lazy val fields = recurse[m.MirroredElemTypes] +29 | lazy val fields = recurse[m.MirroredElemTypes] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:18 -37 | inline given gen[A]: Schema[A] = derived +33 | inline given gen[A]: Schema[A] = derived | ^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:18 @@ -137,10 +137,10 @@ | ^^^^^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:18 -31 | lazy val fields = recurse[m.MirroredElemTypes] +29 | lazy val fields = recurse[m.MirroredElemTypes] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |This location contains code that was inlined from i13044.scala:18 -37 | inline given gen[A]: Schema[A] = derived +33 | inline given gen[A]: Schema[A] = derived | ^^^^^^^ -------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/i13044.scala b/tests/neg/i13044.scala index 42417a9096f9..2b00fc188f8c 100644 --- a/tests/neg/i13044.scala +++ b/tests/neg/i13044.scala @@ -23,15 +23,11 @@ trait SchemaDerivation { inline summonInline[Mirror.Of[A]] match { case m: Mirror.SumOf[A] => lazy val subTypes = recurse[m.MirroredElemTypes] - new Schema[A] { - def build: A = ??? - } + ??? case m: Mirror.ProductOf[A] => lazy val fields = recurse[m.MirroredElemTypes] - new Schema[A] { - def build: A = ??? - } + ??? } inline given gen[A]: Schema[A] = derived diff --git a/tests/neg/i13091.check b/tests/neg/i13091.check new file mode 100644 index 000000000000..5cd793a9cfcb --- /dev/null +++ b/tests/neg/i13091.check @@ -0,0 +1,15 @@ +-- [E190] Potential Issue Warning: tests/neg/i13091.scala:7:17 --------------------------------------------------------- +7 |def test: Unit = new Foo // error: class Foo is marked @experimental ... + | ^^^^^^^ + | Discarded non-Unit value of type Foo. You may want to use `()`. + | + | longer explanation available when compiling with `-explain` +-- Error: tests/neg/i13091.scala:7:21 ---------------------------------------------------------------------------------- +7 |def test: Unit = new Foo // error: class Foo is marked @experimental ... + | ^^^ + | class Foo is marked @experimental + | + | Experimental definition may only be used under experimental mode: + | 1. in a definition marked as @experimental, or + | 2. an experimental feature is imported at the package level, or + | 3. compiling with the -experimental compiler flag. diff --git a/tests/neg/i13091.scala b/tests/neg/i13091.scala index 208d05d6886d..549fdf6d0fae 100644 --- a/tests/neg/i13091.scala +++ b/tests/neg/i13091.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import annotation.experimental diff --git a/tests/neg/i13703.check b/tests/neg/i13703.check index eb782c982295..a02bbdf407f7 100644 --- a/tests/neg/i13703.check +++ b/tests/neg/i13703.check @@ -1,10 +1,5 @@ --- Error: tests/neg/i13703.scala:3:17 ---------------------------------------------------------------------------------- -3 |val f: Foo { var i: Int } = new Foo { var i: Int = 0 } // error - | ^^^^^^^^^^ - | refinement cannot be a mutable var. - | You can use an explicit getter i and setter i_= instead --- [E007] Type Mismatch Error: tests/neg/i13703.scala:5:78 ------------------------------------------------------------- -5 |val f2: Foo { val i: Int; def i_=(x: Int): Unit } = new Foo { var i: Int = 0 } // error +-- [E007] Type Mismatch Error: tests/neg/i13703.scala:3:78 ------------------------------------------------------------- +3 |val f2: Foo { val i: Int; def i_=(x: Int): Unit } = new Foo { var i: Int = 0 } // error | ^ | Found: Object with Foo {...} | Required: Foo{val i: Int; def i_=(x: Int): Unit} diff --git a/tests/neg/i13703.scala b/tests/neg/i13703.scala index e8e54db8807d..6616b4f2e11c 100644 --- a/tests/neg/i13703.scala +++ b/tests/neg/i13703.scala @@ -1,7 +1,5 @@ trait Foo extends reflect.Selectable -val f: Foo { var i: Int } = new Foo { var i: Int = 0 } // error - val f2: Foo { val i: Int; def i_=(x: Int): Unit } = new Foo { var i: Int = 0 } // error val f3: Foo { def i: Int; def i_=(x: Int): Unit } = new Foo { var i: Int = 0 } // OK diff --git a/tests/neg/i13757-match-type-anykind.scala b/tests/neg/i13757-match-type-anykind.scala index a80e8b2b289b..998c54292b15 100644 --- a/tests/neg/i13757-match-type-anykind.scala +++ b/tests/neg/i13757-match-type-anykind.scala @@ -8,7 +8,7 @@ object Test: type AnyKindMatchType3[X <: AnyKind] = X match // error: the scrutinee of a match type cannot be higher-kinded case _ => Int - type AnyKindMatchType4[X <: Option] = X match // error // error: the scrutinee of a match type cannot be higher-kinded + type AnyKindMatchType4[X <: Option] = X match // error: the scrutinee of a match type cannot be higher-kinded case _ => Int type AnyKindMatchType5[X[_]] = X match // error: the scrutinee of a match type cannot be higher-kinded diff --git a/tests/neg/i13848.scala b/tests/neg/i13848.scala index fcc519c47592..298985e4da9e 100644 --- a/tests/neg/i13848.scala +++ b/tests/neg/i13848.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import annotation.experimental diff --git a/tests/neg/i15264.scala b/tests/neg/i15264.scala new file mode 100644 index 000000000000..d690eccf23f3 --- /dev/null +++ b/tests/neg/i15264.scala @@ -0,0 +1,59 @@ +import language.`3.7` +object priority: + // lower number = higher priority + class Prio0 extends Prio1 + object Prio0 { given Prio0() } + + class Prio1 extends Prio2 + object Prio1 { given Prio1() } + + class Prio2 + object Prio2 { given Prio2() } + +object repro: + // analogous to cats Eq, Hash, Order: + class A[V] + class B[V] extends A[V] + class C[V] extends A[V] + + class Q[V] + + object context: + // prios work here, which is cool + given[V](using priority.Prio0): C[V] = new C[V] + given[V](using priority.Prio1): B[V] = new B[V] + given[V](using priority.Prio2): A[V] = new A[V] + + object exports: + // so will these exports + export context.given + + // if you import these don't import from 'context' above + object qcontext: + // base defs, like what you would get from cats + given gb: B[Int] = new B[Int] + given gc: C[Int] = new C[Int] + + // these seem like they should work but don't + given gcq[V](using p0: priority.Prio0)(using c: C[V]): C[Q[V]] = new C[Q[V]] + given gbq[V](using p1: priority.Prio1)(using b: B[V]): B[Q[V]] = new B[Q[V]] + given gaq[V](using p2: priority.Prio2)(using a: A[V]): A[Q[V]] = new A[Q[V]] + +object test1: + import repro.* + import repro.exports.given + + // these will work + val a = summon[A[Int]] + +object test2: + import repro.* + import repro.qcontext.given + + // This one will fail as ambiguous - prios aren't having an effect. + // Priorities indeed don't have an effect if the result is already decided + // without using clauses, they onyl act as a tie breaker. + // With the new resolution rules, it's ambiguous since we pick `gaq` for + // summon, and that needs an A[Int], but there are only the two competing choices + // qb and qc. + val a = summon[A[Q[Int]]] // error: ambiguous between qb and qc for A[Int] diff --git a/tests/neg/i15474.check b/tests/neg/i15474.check index 3205f703cd50..9fa8fa6c722a 100644 --- a/tests/neg/i15474.check +++ b/tests/neg/i15474.check @@ -13,7 +13,6 @@ | - rearrange definitions so that Test2.c comes earlier, | - use an explicit conversion, | - use an import to get extension method into scope. - | This will be an error in Scala 3.5 and later. -- Error: tests/neg/i15474.scala:12:56 --------------------------------------------------------------------------------- 12 | given Ordering[Price] = summon[Ordering[BigDecimal]] // error | ^ @@ -28,4 +27,3 @@ | - use a `given ... with` clause as the enclosing given, | - rearrange definitions so that Prices.Price.given_Ordering_Price comes earlier, | - use an explicit argument. - | This will be an error in Scala 3.5 and later. diff --git a/tests/neg/i15741.scala b/tests/neg/i15741.scala index 2d536c515f76..45d6c3bed16d 100644 --- a/tests/neg/i15741.scala +++ b/tests/neg/i15741.scala @@ -1,15 +1,15 @@ def get(using Int): String = summon[Int].toString - def pf2: PartialFunction[String, Int ?=> String] = { + def pf2: PartialFunction[String, Int ?=> String] = { // error case "hoge" => get case "huga" => get - } // error + } type IS = Int ?=> String - def pf3: PartialFunction[String, IS] = { + def pf3: PartialFunction[String, IS] = { // error case "hoge" => get case "huga" => get - } // error + } diff --git a/tests/neg/i16438.scala b/tests/neg/i16438.scala index a2b88080c2cd..793e6518ea71 100644 --- a/tests/neg/i16438.scala +++ b/tests/neg/i16438.scala @@ -1,4 +1,4 @@ -//> using options -Ysafe-init +//> using options -Wsafe-init trait ATrait(val string: String, val int: Int) trait AnotherTrait( override val string: String, override val int: Int) extends ATrait case class ACaseClass(override val string: String) extends AnotherTrait(string, 3) // error diff --git a/tests/neg/i17292.scala b/tests/neg/i17292.scala index 9cf7fc7b1c30..e99cb5e9a75e 100644 --- a/tests/neg/i17292.scala +++ b/tests/neg/i17292.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import annotation.experimental diff --git a/tests/neg/i17292b.scala b/tests/neg/i17292b.scala index b89a64439699..08fbda18a886 100644 --- a/tests/neg/i17292b.scala +++ b/tests/neg/i17292b.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import annotation.experimental type A[T] = Int diff --git a/tests/neg/i17944.check b/tests/neg/i17944.check index 80dfaac8c4c8..c969edccb46b 100644 --- a/tests/neg/i17944.check +++ b/tests/neg/i17944.check @@ -14,33 +14,3 @@ | Therefore, reduction cannot advance to the remaining case | | case _ *: t => test.FindField0[t, ("i" : String), scala.compiletime.ops.int.S[(0 : Int)]] - | trying to reduce test.FindField[(("s" : String) ->> String, ("i" : String) ->> Int), ("i" : String)] - | trying to reduce test.FindField0[(("s" : String) ->> String, ("i" : String) ->> Int), ("i" : String), (0 : Int)] - | failed since selector (("s" : String) ->> String, ("i" : String) ->> Int) - | does not match case (("i" : String) ->> f) *: _ => (f, (0 : Int)) - | and cannot be shown to be disjoint from it either. - | Therefore, reduction cannot advance to the remaining case - | - | case _ *: t => test.FindField0[t, ("i" : String), scala.compiletime.ops.int.S[(0 : Int)]] - | trying to reduce test.FindField0[(("s" : String) ->> String, ("i" : String) ->> Int), ("i" : String), (0 : Int)] - | failed since selector (("s" : String) ->> String, ("i" : String) ->> Int) - | does not match case (("i" : String) ->> f) *: _ => (f, (0 : Int)) - | and cannot be shown to be disjoint from it either. - | Therefore, reduction cannot advance to the remaining case - | - | case _ *: t => test.FindField0[t, ("i" : String), scala.compiletime.ops.int.S[(0 : Int)]] - | trying to reduce test.FindField[(("s" : String) ->> String, ("i" : String) ->> Int), ("i" : String)] - | trying to reduce test.FindField0[(("s" : String) ->> String, ("i" : String) ->> Int), ("i" : String), (0 : Int)] - | failed since selector (("s" : String) ->> String, ("i" : String) ->> Int) - | does not match case (("i" : String) ->> f) *: _ => (f, (0 : Int)) - | and cannot be shown to be disjoint from it either. - | Therefore, reduction cannot advance to the remaining case - | - | case _ *: t => test.FindField0[t, ("i" : String), scala.compiletime.ops.int.S[(0 : Int)]] - | trying to reduce test.FindField0[(("s" : String) ->> String, ("i" : String) ->> Int), ("i" : String), (0 : Int)] - | failed since selector (("s" : String) ->> String, ("i" : String) ->> Int) - | does not match case (("i" : String) ->> f) *: _ => (f, (0 : Int)) - | and cannot be shown to be disjoint from it either. - | Therefore, reduction cannot advance to the remaining case - | - | case _ *: t => test.FindField0[t, ("i" : String), scala.compiletime.ops.int.S[(0 : Int)]] diff --git a/tests/neg/i18552.check b/tests/neg/i18552.check new file mode 100644 index 000000000000..a7a04ed78b47 --- /dev/null +++ b/tests/neg/i18552.check @@ -0,0 +1,13 @@ +-- Error: tests/neg/i18552.scala:9:6 ----------------------------------------------------------------------------------- +9 |class MB(id:Int) extends MA(id) with M[B] // error + | ^ + | illegal inheritance: class MB inherits conflicting instances of base trait M. + | + | Direct basetype: M[B] + | Basetype via case class MA: M[A] + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Refining a basetype of a case class is not allowed. + | This is a limitation that enables better GADT constraints in case class patterns + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/neg/i18552.scala b/tests/neg/i18552.scala new file mode 100644 index 000000000000..29f928e1dcfa --- /dev/null +++ b/tests/neg/i18552.scala @@ -0,0 +1,9 @@ +//> using options -explain + +trait A +trait B extends A + +trait M[+T] + +case class MA(id:Int) extends M[A] +class MB(id:Int) extends MA(id) with M[B] // error diff --git a/tests/neg/i18632.check b/tests/neg/i18632.check new file mode 100644 index 000000000000..a0fa733cf8e3 --- /dev/null +++ b/tests/neg/i18632.check @@ -0,0 +1,5 @@ +-- [E176] Potential Issue Warning: tests/neg/i18632.scala:12:2 --------------------------------------------------------- +12 | bar // warn + | ^^^ + | unused value of type String +No warnings can be incurred under -Werror (or -Xfatal-warnings) diff --git a/tests/neg/i18632.scala b/tests/neg/i18632.scala new file mode 100644 index 000000000000..9253951b166a --- /dev/null +++ b/tests/neg/i18632.scala @@ -0,0 +1,14 @@ +//> using options -Wnonunit-statement -Werror + +class Context + +object Foo { + def run(program: Context ?=> String): Unit = ??? +} + +def bar(using Context): String = ??? + +@main def run = Foo.run: + bar // warn + bar +// nopos-error: No warnings can be incurred under -Werror (or -Xfatal-warnings) diff --git a/tests/neg/i19198.scala b/tests/neg/i19198.scala new file mode 100644 index 000000000000..be4fc1602697 --- /dev/null +++ b/tests/neg/i19198.scala @@ -0,0 +1,13 @@ +import deriving.Mirror +import compiletime.summonInline + +type DoesNotReduce[T] = T match + case String => Any + +type DoesNotReduce2[T] <: T = T match + case String => T + +class Foo +@main def Test: Unit = + summonInline[Mirror.Of[DoesNotReduce[Option[Int]]]] // error + summonInline[Mirror.Of[DoesNotReduce2[Option[Int]]]] // error diff --git a/tests/neg/i19351a.check b/tests/neg/i19351a.check index 3c1353811f3d..10789c2db5aa 100644 --- a/tests/neg/i19351a.check +++ b/tests/neg/i19351a.check @@ -1,12 +1,4 @@ -- Error: tests/neg/i19351a/Test.scala:8:34 ---------------------------------------------------------------------------- -8 |inline def not(b: Bool): Bool = ${notMacro('b)} // error // error +8 |inline def not(b: Bool): Bool = ${notMacro('b)} // error | ^ |Cyclic macro dependency; macro refers to a toplevel symbol in tests/neg/i19351a/Test.scala from which the macro is called --- [E046] Cyclic Error: tests/neg/i19351a/Test.scala:8:46 -------------------------------------------------------------- -8 |inline def not(b: Bool): Bool = ${notMacro('b)} // error // error - | ^ - | Cyclic reference involving method $anonfun - | - | Run with -explain-cyclic for more details. - | - | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i19351a/Test.scala b/tests/neg/i19351a/Test.scala index 51f608aa46ea..84fb6ca4ae78 100644 --- a/tests/neg/i19351a/Test.scala +++ b/tests/neg/i19351a/Test.scala @@ -5,7 +5,7 @@ type Bool = [R] => (R, R) => R val True: Bool = [R] => (t: R, _: R) => t val False: Bool = [R] => (_: R, f: R) => f -inline def not(b: Bool): Bool = ${notMacro('b)} // error // error +inline def not(b: Bool): Bool = ${notMacro('b)} // error inline def show(b: Bool): String = ${showMacro('b)} //inline def not(b: Bool): Bool = ${foldMacro('b, 'False, 'True)} //inline def show(b: Bool): String = ${foldMacro('b, '{"TRUE"}, '{"FALSE"})} diff --git a/tests/neg/i19746.check b/tests/neg/i19746.check new file mode 100644 index 000000000000..6be8700bb550 --- /dev/null +++ b/tests/neg/i19746.check @@ -0,0 +1,7 @@ +-- [E007] Type Mismatch Error: tests/neg/i19746.scala:9:30 ------------------------------------------------------------- +9 | def asX(w: W[Any]): w.X = self // error: Type Mismatch + | ^^^^ + | Found: (self : Any) + | Required: w.X + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i19746.scala b/tests/neg/i19746.scala new file mode 100644 index 000000000000..b2969d4d3fef --- /dev/null +++ b/tests/neg/i19746.scala @@ -0,0 +1,15 @@ +trait V: + type X = this.type match + case W[x] => x + +trait W[+Y] extends V + +object Test: + extension (self: Any) def as[T]: T = + def asX(w: W[Any]): w.X = self // error: Type Mismatch + asX(new W[T] {}) + + def main(args: Array[String]): Unit = + val b = 0.as[Boolean] // java.lang.ClassCastException if the code is allowed to compile + println(b) +end Test diff --git a/tests/neg/i19809.check b/tests/neg/i19809.check new file mode 100644 index 000000000000..269eacd18fd2 --- /dev/null +++ b/tests/neg/i19809.check @@ -0,0 +1,10 @@ +-- [E120] Naming Error: tests/neg/i19809.scala:3:6 --------------------------------------------------------------------- +3 | def x_=(x: Int): Unit // error + | ^ + | Double definition: + | def x_=(x$1: Int): Unit in trait at line 2 and + | def x_=(x: Int): Unit in trait at line 3 + | have the same type after erasure. + | + | Consider adding a @targetName annotation to one of the conflicting definitions + | for disambiguation. diff --git a/tests/neg/i19809.scala b/tests/neg/i19809.scala new file mode 100644 index 000000000000..02eb9b08faf5 --- /dev/null +++ b/tests/neg/i19809.scala @@ -0,0 +1,4 @@ +type A = Any { + var x : Int + def x_=(x: Int): Unit // error +} diff --git a/tests/neg/i19949.scala b/tests/neg/i19949.scala new file mode 100644 index 000000000000..96a22e42e079 --- /dev/null +++ b/tests/neg/i19949.scala @@ -0,0 +1,9 @@ + +trait T[N]: + type M = N match + case 0 => Any + +val t: T[Double] = new T[Double] {} +val x: t.M = "hello" // error + +val z: T[Double]#M = "hello" // error diff --git a/tests/neg/i20071a.scala b/tests/neg/i20071a.scala new file mode 100644 index 000000000000..2d3dd5fe17d1 --- /dev/null +++ b/tests/neg/i20071a.scala @@ -0,0 +1,28 @@ + +trait Scope +object Scope: + given i: Int = ??? + +type ReferencesScope[S] >: Int <: Int + +type ScopeToInt[Why] = Why match + case Scope => Int + +def foo[T](using d: ReferencesScope[T]): Any = ??? + +def bar[T](using d: ScopeToInt[T]): Any = ??? + +def test: Unit = + foo[Scope] // ok + bar[Scope] // error + + import Scope.i + bar[Scope] // ok + + /* + Before the changes: + `ScopeToInt[Scope]` may or may not be reduced before implicit search, + thereby impacting the scope considered for the search. `Scope.i` is included + iff `Scope` still appears in the type, which is the case only before reduction. + In contrast, `ReferencesScope[Scope]` is ok since it will never lose the anchor. + */ diff --git a/tests/neg/i20071b/A_1.scala b/tests/neg/i20071b/A_1.scala new file mode 100644 index 000000000000..ea3aa97c6a6f --- /dev/null +++ b/tests/neg/i20071b/A_1.scala @@ -0,0 +1,13 @@ + +trait Scope +object Scope: + given i: Int = ??? + +type ReferencesScope[S] >: Int <: Int + +type ScopeToInt[Why] = Why match + case Scope => Int + +def foo[T](using d: ReferencesScope[T]): Any = ??? + +def bar[T](using d: ScopeToInt[T]): Any = ??? diff --git a/tests/neg/i20071b/B_2.scala b/tests/neg/i20071b/B_2.scala new file mode 100644 index 000000000000..0b5169cf901c --- /dev/null +++ b/tests/neg/i20071b/B_2.scala @@ -0,0 +1,8 @@ + +def test: Unit = + foo[Scope] // ok + bar[Scope] // error + + import Scope.i + bar[Scope] // ok + diff --git a/tests/neg/i20079/Lib_1.scala b/tests/neg/i20079/Lib_1.scala new file mode 100644 index 000000000000..6d72042464ce --- /dev/null +++ b/tests/neg/i20079/Lib_1.scala @@ -0,0 +1,5 @@ +object Foo: + def xyz[A, CC[X] <: Iterable[X]](coll: CC[A]): Unit = () + +object Bar: + export Foo.xyz diff --git a/tests/neg/i20079/Test_2.scala b/tests/neg/i20079/Test_2.scala new file mode 100644 index 000000000000..c19d98b55bd8 --- /dev/null +++ b/tests/neg/i20079/Test_2.scala @@ -0,0 +1,6 @@ +object Test: + val ints = List(1) + Foo.xyz[Int, List](ints) + Foo.xyz[Int, scala.collection.View](ints) // error + Bar.xyz[Int, List](ints) + Bar.xyz[Int, scala.collection.View](ints) // error \ No newline at end of file diff --git a/tests/neg/i20127.check b/tests/neg/i20127.check new file mode 100644 index 000000000000..933dd0437eb5 --- /dev/null +++ b/tests/neg/i20127.check @@ -0,0 +1,8 @@ +-- [E172] Type Error: tests/neg/i20127.scala:13:9 ---------------------------------------------------------------------- +13 | Foo.foo // error + | ^ + | foo! +-- [E172] Type Error: tests/neg/i20127.scala:14:14 --------------------------------------------------------------------- +14 | FooClone.foo // error + | ^ + | foo! diff --git a/tests/neg/i20127.scala b/tests/neg/i20127.scala new file mode 100644 index 000000000000..a21e10a13e75 --- /dev/null +++ b/tests/neg/i20127.scala @@ -0,0 +1,14 @@ +import scala.annotation.* + +trait X + +object Foo: + def foo(using @implicitNotFound("foo!") x: X) = "foo" + +object FooClone: + export Foo.foo + +object Main: + val n = 10 + Foo.foo // error + FooClone.foo // error \ No newline at end of file diff --git a/tests/neg/i20245.check b/tests/neg/i20245.check new file mode 100644 index 000000000000..565bde7678b7 --- /dev/null +++ b/tests/neg/i20245.check @@ -0,0 +1,17 @@ + +-- [E046] Cyclic Error: tests/neg/i20245/Typer_2.scala:16:57 ----------------------------------------------------------- +16 | private[typer] val unification = new Unification(using this) // error + | ^ + | Cyclic reference involving class Context + | + | The error occurred while trying to compute the base classes of class Context + | which required to compute the base classes of trait TyperOps + | which required to compute the signature of trait TyperOps + | which required to elaborate the export clause export unification.requireSubtype + | which required to compute the signature of value unification + | which required to type the right hand side of value unification since no explicit type was given + | which required to compute the base classes of class Context + | + | Run with both -explain-cyclic and -Ydebug-cyclic to see full stack trace. + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i20245/Context_1.scala b/tests/neg/i20245/Context_1.scala new file mode 100644 index 000000000000..a38d8fe7531d --- /dev/null +++ b/tests/neg/i20245/Context_1.scala @@ -0,0 +1,12 @@ +package effekt +package context + +import effekt.typer.TyperOps + + +abstract class Context extends TyperOps { + + // bring the context itself in scope + implicit val context: Context = this + +} diff --git a/tests/neg/i20245/Messages_1.scala b/tests/neg/i20245/Messages_1.scala new file mode 100644 index 000000000000..c8cc8267d44c --- /dev/null +++ b/tests/neg/i20245/Messages_1.scala @@ -0,0 +1,8 @@ +package effekt +package util + +object messages { + trait ErrorReporter { + + } +} diff --git a/tests/neg/i20245/Tree_1.scala b/tests/neg/i20245/Tree_1.scala new file mode 100644 index 000000000000..54a2a5cc1a64 --- /dev/null +++ b/tests/neg/i20245/Tree_1.scala @@ -0,0 +1,18 @@ +package effekt +package source + +import effekt.context.Context + +object Resolvable { + + // There need to be two resolve extension methods for the error to show up + // They also need to take an implicit Context + extension (n: Int) { + def resolve(using C: Context): Unit = ??? + } + + extension (b: Boolean) { + def resolve(using C: Context): Unit = ??? + } +} +export Resolvable.resolve diff --git a/tests/neg/i20245/Typer_1.scala b/tests/neg/i20245/Typer_1.scala new file mode 100644 index 000000000000..0a61346ecaef --- /dev/null +++ b/tests/neg/i20245/Typer_1.scala @@ -0,0 +1,28 @@ +package effekt +package typer + +import effekt.util.messages.ErrorReporter + +import effekt.context.{ Context } + +// This import is also NECESSARY for the cyclic error +import effekt.source.{ resolve } + + +trait TyperOps extends ErrorReporter { self: Context => + + // passing `this` as ErrorReporter here is also NECESSARY for the cyclic error + private[typer] val unification = new Unification(using this) + + // this export is NECESSARY for the cyclic error + export unification.{ requireSubtype } + + println(1) + + // vvvvvvvv insert a line here, save, and run `compile` again vvvvvvvvvv +} + + + + + diff --git a/tests/neg/i20245/Typer_2.scala b/tests/neg/i20245/Typer_2.scala new file mode 100644 index 000000000000..ed7f05de80d0 --- /dev/null +++ b/tests/neg/i20245/Typer_2.scala @@ -0,0 +1,27 @@ +//> using options -explain-cyclic +package effekt +package typer + +import effekt.util.messages.ErrorReporter + +import effekt.context.{ Context } + +// This import is also NECESSARY for the cyclic error +import effekt.source.{ resolve } + + +trait TyperOps extends ErrorReporter { self: Context => + + // passing `this` as ErrorReporter here is also NECESSARY for the cyclic error + private[typer] val unification = new Unification(using this) // error + + // this export is NECESSARY for the cyclic error + export unification.{ requireSubtype } + + // vvvvvvvv insert a line here, save, and run `compile` again vvvvvvvvvv +} + + + + + diff --git a/tests/neg/i20245/Unification_1.scala b/tests/neg/i20245/Unification_1.scala new file mode 100644 index 000000000000..406ab1b93b00 --- /dev/null +++ b/tests/neg/i20245/Unification_1.scala @@ -0,0 +1,11 @@ +package effekt +package typer + +import effekt.util.messages.ErrorReporter + + +class Unification(using C: ErrorReporter) { + + def requireSubtype(): Unit = () + +} diff --git a/tests/neg/i21212.check b/tests/neg/i21212.check new file mode 100644 index 000000000000..06740af36d77 --- /dev/null +++ b/tests/neg/i21212.check @@ -0,0 +1,4 @@ +-- [E172] Type Error: tests/neg/i21212.scala:9:52 ---------------------------------------------------------------------- +9 | def test2(using a2: A)(implicit b2: B) = summon[A] // error: ambiguous + | ^ + |Ambiguous given instances: both parameter b2 and parameter a2 match type Minimization.A of parameter x of method summon in object Predef diff --git a/tests/neg/i21212.scala b/tests/neg/i21212.scala new file mode 100644 index 000000000000..3b030cefcdc7 --- /dev/null +++ b/tests/neg/i21212.scala @@ -0,0 +1,12 @@ +//> using options -source 3.7 + +object Minimization: + + trait A + trait B extends A + + def test1(using a1: A)(using b1: B) = summon[A] // picks (most general) a1 + def test2(using a2: A)(implicit b2: B) = summon[A] // error: ambiguous + def test3(implicit a3: A, b3: B) = summon[A] // picks (most specific) b3 + +end Minimization diff --git a/tests/neg/i21303/JavaEnum.java b/tests/neg/i21303/JavaEnum.java new file mode 100644 index 000000000000..e626d5070626 --- /dev/null +++ b/tests/neg/i21303/JavaEnum.java @@ -0,0 +1 @@ +public enum JavaEnum { ABC, DEF, GHI } diff --git a/tests/neg/i21303/Test.scala b/tests/neg/i21303/Test.scala new file mode 100644 index 000000000000..25d43dac344e --- /dev/null +++ b/tests/neg/i21303/Test.scala @@ -0,0 +1,33 @@ +//> using options -source 3.7-migration +import scala.deriving.Mirror +import scala.compiletime.* +import scala.reflect.ClassTag +import scala.annotation.implicitNotFound + + +trait TSType[T] +object TSType extends DefaultTSTypes with TSTypeMacros + +trait TSNamedType[T] extends TSType[T] + +trait DefaultTSTypes extends JavaTSTypes +trait JavaTSTypes { + given javaEnumTSType[E <: java.lang.Enum[E]: ClassTag]: TSNamedType[E] = ??? +} +object DefaultTSTypes extends DefaultTSTypes +trait TSTypeMacros { + inline given [T: Mirror.Of]: TSType[T] = derived[T] + inline def derived[T](using m: Mirror.Of[T]): TSType[T] = { + val elemInstances = summonAll[m.MirroredElemTypes] + ??? + } + + private inline def summonAll[T <: Tuple]: List[TSType[_]] = { + inline erasedValue[T] match { + case _: EmptyTuple => Nil + case _: (t *: ts) => summonInline[TSType[t]] :: summonAll[ts] + } + } +} + +@main def Test = summon[TSType[JavaEnum]] // error \ No newline at end of file diff --git a/tests/neg/i2887b.check b/tests/neg/i2887b.check index 7b85d1a0223b..5bd5f570fbf7 100644 --- a/tests/neg/i2887b.check +++ b/tests/neg/i2887b.check @@ -4,7 +4,7 @@ | Recursion limit exceeded. | Maybe there is an illegal cyclic reference? | If that's not the case, you could also try to increase the stacksize using the -Xss JVM option. - | For the unprocessed stack trace, compile with -Yno-decode-stacktraces. + | For the unprocessed stack trace, compile with -Xno-decode-stacktraces. | A recurring operation is (inner to outer): | | try to instantiate Z[Z] diff --git a/tests/neg/i2974.scala b/tests/neg/i2974.scala new file mode 100644 index 000000000000..0bff2da1f3ba --- /dev/null +++ b/tests/neg/i2974.scala @@ -0,0 +1,16 @@ + +trait Foo[-T] +trait Bar[-T] extends Foo[T] + +object Test { + + locally: + implicit val fa: Foo[Int] = ??? + implicit val ba: Bar[Int] = ??? + summon[Foo[Int]] // ok + + locally: + implicit val fa: Foo[Int] = ??? + implicit val ba: Bar[Any] = ??? + summon[Foo[Int]] // error: ambiguous +} diff --git a/tests/neg/i3964.scala b/tests/neg/i3964.scala new file mode 100644 index 000000000000..eaf3953bc230 --- /dev/null +++ b/tests/neg/i3964.scala @@ -0,0 +1,12 @@ +//> using options -source future -language:experimental.modularity +trait Animal +class Dog extends Animal +class Cat extends Animal + +object Test1: + + abstract class Bar { val x: Animal } + val bar: Bar { val x: Cat } = new Bar { val x = new Cat } // error, but should work + + trait Foo { val x: Animal } + val foo: Foo { val x: Cat } = new Foo { val x = new Cat } // error, but should work diff --git a/tests/neg/i4496b.scala b/tests/neg/i4496b.scala index e84c29fd9347..b19d4915474f 100644 --- a/tests/neg/i4496b.scala +++ b/tests/neg/i4496b.scala @@ -5,7 +5,7 @@ trait Foo2 { def a: Int } trait Foo3 { var a: Int } object TestStructuralVar { - type T0 = {var a: Int} // error + type T0 = {var a: Int} object TestStructuralVar { type T = {val a: Int; def a_=(x: Int): Unit} def upcast1(v: Foo1): T = v // error diff --git a/tests/neg/i4986c.check b/tests/neg/i4986c.check index 8befc30f5a60..0517ae10e427 100644 --- a/tests/neg/i4986c.check +++ b/tests/neg/i4986c.check @@ -61,4 +61,4 @@ -- [E172] Type Error: tests/neg/i4986c.scala:62:19 --------------------------------------------------------------------- 62 | i.m[Option[Long]] // error | ^ - | String; List; [A, _] =>> List[Option[?]]; Int; Option[Long]; + | String; List; [A, _] =>> List[Option[?]]; Int; Option[Long]; ?XX diff --git a/tests/neg/i6716.check b/tests/neg/i6716.check index 4684842e73fe..0144f539f53c 100644 --- a/tests/neg/i6716.check +++ b/tests/neg/i6716.check @@ -1,5 +1,5 @@ --- Warning: tests/neg/i6716.scala:12:39 -------------------------------------------------------------------------------- -12 | given Monad[Bar] = summon[Monad[Foo]] // warn +-- Error: tests/neg/i6716.scala:11:39 ---------------------------------------------------------------------------------- +11 | given Monad[Bar] = summon[Monad[Foo]] // error | ^ | Result of implicit search for Monad[Foo] will change. | Current result Bar.given_Monad_Bar will be no longer eligible @@ -12,5 +12,3 @@ | - use a `given ... with` clause as the enclosing given, | - rearrange definitions so that Bar.given_Monad_Bar comes earlier, | - use an explicit argument. - | This will be an error in Scala 3.5 and later. -No warnings can be incurred under -Werror (or -Xfatal-warnings) diff --git a/tests/neg/i6716.scala b/tests/neg/i6716.scala index 311209fd9006..8b37d4e223ac 100644 --- a/tests/neg/i6716.scala +++ b/tests/neg/i6716.scala @@ -1,4 +1,3 @@ -//> using options -Xfatal-warnings trait Monad[T]: def id: String @@ -9,11 +8,10 @@ object Foo { opaque type Bar = Foo object Bar { - given Monad[Bar] = summon[Monad[Foo]] // warn + given Monad[Bar] = summon[Monad[Foo]] // error } object Test extends App { println(summon[Monad[Foo]].id) println(summon[Monad[Bar]].id) } -// nopos-error: No warnings can be incurred under -Werror (or -Xfatal-warnings) \ No newline at end of file diff --git a/tests/neg/i7045.scala b/tests/neg/i7045.scala new file mode 100644 index 000000000000..b4c6d60cd35a --- /dev/null +++ b/tests/neg/i7045.scala @@ -0,0 +1,7 @@ +trait Bar { type Y } +trait Foo { type X } + +class Test: + given a1(using b: Bar): Foo = new Foo { type X = b.Y } // ok + given a2(using b: Bar): (Foo { type X = b.Y }) = new Foo { type X = b.Y } // ok + given a3(using b: Bar): Foo { type X = b.Y } = new Foo { type X = b.Y } // error \ No newline at end of file diff --git a/tests/neg/i7092.check b/tests/neg/i7092.check new file mode 100644 index 000000000000..bd4bdc783a08 --- /dev/null +++ b/tests/neg/i7092.check @@ -0,0 +1,32 @@ +-- [E172] Type Error: tests/neg/i7092.scala:24:19 ---------------------------------------------------------------------- +24 | summon[F[String]] // error + | ^ + | Not found for String +-- [E172] Type Error: tests/neg/i7092.scala:25:19 ---------------------------------------------------------------------- +25 | summon[G[String]] // error + | ^ + | Not found for String +-- [E172] Type Error: tests/neg/i7092.scala:26:16 ---------------------------------------------------------------------- +26 | summon[H[Int]] // error + | ^ + | Not found for Int, ?B +-- [E172] Type Error: tests/neg/i7092.scala:27:23 ---------------------------------------------------------------------- +27 | summon[H[Int][Float]] // error + | ^ + | Not found for Int, Float +-- [E172] Type Error: tests/neg/i7092.scala:28:18 ---------------------------------------------------------------------- +28 | summon[AAA[Int]] // error + | ^ + | Not found for Int +-- [E172] Type Error: tests/neg/i7092.scala:29:25 ---------------------------------------------------------------------- +29 | summon[AAA[Int][Float]] // error + | ^ + | Not found for Int +-- [E172] Type Error: tests/neg/i7092.scala:30:19 ---------------------------------------------------------------------- +30 | summon[op.F[Int]] // error + | ^ + | Could not find Int +-- [E172] Type Error: tests/neg/i7092.scala:31:28 ---------------------------------------------------------------------- +31 | summon[String =!:= String] // error + | ^ + | Cannot proof type inequality because types are equal: String =:= String diff --git a/tests/neg/i7092.scala b/tests/neg/i7092.scala new file mode 100644 index 000000000000..9481008b9d70 --- /dev/null +++ b/tests/neg/i7092.scala @@ -0,0 +1,31 @@ +import scala.annotation.implicitNotFound +import scala.util.NotGiven + +@implicitNotFound("Not found for ${A}") +type F[A] + +@implicitNotFound("Not found for ${A}") +trait G[A] + +@implicitNotFound("Not found for ${A}, ${B}") +type H = [A] =>> [B] =>> (A, B) + +@implicitNotFound("Not found for ${A}") +type AAA = [A] =>> [A] =>> A + +object op: + @implicitNotFound("Could not find ${A}") + opaque type F[A] = A + +@implicitNotFound("Cannot proof type inequality because types are equal: ${A} =:= ${B}") +type =!:=[A, B] = NotGiven[A =:= B] + +object Test: + summon[F[String]] // error + summon[G[String]] // error + summon[H[Int]] // error + summon[H[Int][Float]] // error + summon[AAA[Int]] // error + summon[AAA[Int][Float]] // error + summon[op.F[Int]] // error + summon[String =!:= String] // error diff --git a/tests/neg/i7247.scala b/tests/neg/i7247.scala index 9172f90fad07..3514f20c47fe 100644 --- a/tests/neg/i7247.scala +++ b/tests/neg/i7247.scala @@ -1,2 +1,2 @@ val x = "foo" match - case _: (a *: (b: Any)) => ??? // error \ No newline at end of file + case _: (a *: (b: Any)) => ??? // error, now OK since (b: Any) is a named tuple \ No newline at end of file diff --git a/tests/neg/i7294-a.check b/tests/neg/i7294-a.check deleted file mode 100644 index c33735258ad0..000000000000 --- a/tests/neg/i7294-a.check +++ /dev/null @@ -1,27 +0,0 @@ --- [E007] Type Mismatch Error: tests/neg/i7294-a.scala:10:20 ----------------------------------------------------------- -10 | case x: T => x.g(10) // error - | ^^^^^^^ - | Found: Any - | Required: T - | - | where: T is a type in given instance f with bounds <: foo.Foo - | - | longer explanation available when compiling with `-explain` --- Warning: tests/neg/i7294-a.scala:10:12 ------------------------------------------------------------------------------ -10 | case x: T => x.g(10) // error - | ^ - | Result of implicit search for scala.reflect.TypeTest[Nothing, T] will change. - | Current result foo.Test.f will be no longer eligible - | because it is not defined before the search position. - | Result with new rules: No Matching Implicit. - | To opt into the new rules, compile with `-source future` or use - | the `scala.language.future` language import. - | - | To fix the problem without the language import, you could try one of the following: - | - use a `given ... with` clause as the enclosing given, - | - rearrange definitions so that foo.Test.f comes earlier, - | - use an explicit argument. - | This will be an error in Scala 3.5 and later. - | - | where: T is a type in given instance f with bounds <: foo.Foo -No warnings can be incurred under -Werror (or -Xfatal-warnings) diff --git a/tests/neg/i7294-a.scala b/tests/neg/i7294-a.scala deleted file mode 100644 index a5193097e941..000000000000 --- a/tests/neg/i7294-a.scala +++ /dev/null @@ -1,14 +0,0 @@ -//> using options -Xfatal-warnings - -package foo - -trait Foo { def g(x: Int): Any } - -object Test: - - inline given f[T <: Foo]: T = ??? match { - case x: T => x.g(10) // error - } - - @main def Test = f -// nopos-error: No warnings can be incurred under -Werror (or -Xfatal-warnings) diff --git a/tests/neg/i7294.check b/tests/neg/i7294.check new file mode 100644 index 000000000000..d6e559997f78 --- /dev/null +++ b/tests/neg/i7294.check @@ -0,0 +1,25 @@ +-- Error: tests/neg/i7294.scala:7:10 ----------------------------------------------------------------------------------- +7 | case x: T => x.g(10) // error // error + | ^ + | Result of implicit search for scala.reflect.TypeTest[Nothing, T] will change. + | Current result foo.f will be no longer eligible + | because it is not defined before the search position. + | Result with new rules: No Matching Implicit. + | To opt into the new rules, compile with `-source future` or use + | the `scala.language.future` language import. + | + | To fix the problem without the language import, you could try one of the following: + | - use a `given ... with` clause as the enclosing given, + | - rearrange definitions so that foo.f comes earlier, + | - use an explicit argument. + | + | where: T is a type in given instance f with bounds <: foo.Foo +-- [E007] Type Mismatch Error: tests/neg/i7294.scala:7:18 -------------------------------------------------------------- +7 | case x: T => x.g(10) // error // error + | ^^^^^^^ + | Found: Any + | Required: T + | + | where: T is a type in given instance f with bounds <: foo.Foo + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/i7294-b.scala b/tests/neg/i7294.scala similarity index 57% rename from tests/neg/i7294-b.scala rename to tests/neg/i7294.scala index ba12239af039..fbb00f9b7e89 100644 --- a/tests/neg/i7294-b.scala +++ b/tests/neg/i7294.scala @@ -1,4 +1,3 @@ -//> using options -Xfatal-warnings package foo @@ -9,4 +8,3 @@ inline given f[T <: Foo]: T = ??? match { } @main def Test = f -// nopos-error: No warnings can be incurred under -Werror (or -Xfatal-warnings) diff --git a/tests/neg/i7751.scala b/tests/neg/i7751.scala index 978ed860574f..fd66e7d451be 100644 --- a/tests/neg/i7751.scala +++ b/tests/neg/i7751.scala @@ -1,3 +1,3 @@ import language.`3.3` -val a = Some(a=a,)=> // error // error +val a = Some(a=a,)=> // error // error // error // error val a = Some(x=y,)=> diff --git a/tests/neg/i9299.scala b/tests/neg/i9299.scala index 6c23d11553ff..c3ae55ab9d18 100644 --- a/tests/neg/i9299.scala +++ b/tests/neg/i9299.scala @@ -1,4 +1,4 @@ type F <: F = 1 match { // error - case _ => foo.foo // error // error + case _ => foo.foo // error } def foo(a: Int): Unit = ??? diff --git a/tests/neg/i9328.scala b/tests/neg/i9328.scala index dabde498e1dc..c13d33e103b9 100644 --- a/tests/neg/i9328.scala +++ b/tests/neg/i9328.scala @@ -3,7 +3,7 @@ type Id[T] = T match { case _ => T } -class Foo2[T <: Id[T]] // error // error +class Foo2[T <: Id[T]] // error object Foo { // error object Foo { } diff --git a/tests/neg/i9330.scala b/tests/neg/i9330.scala index ca25582ef7e8..6ba57c033473 100644 --- a/tests/neg/i9330.scala +++ b/tests/neg/i9330.scala @@ -1,4 +1,4 @@ val x = { - () == "" // error + () == "" implicit def foo[A: A] // error // error // error } diff --git a/tests/neg/illegal-refinements.scala b/tests/neg/illegal-refinements.scala index 4a170bc345d8..374d3dca7a84 100644 --- a/tests/neg/illegal-refinements.scala +++ b/tests/neg/illegal-refinements.scala @@ -2,6 +2,5 @@ trait x0 { type T = String { val x: Int = 1 } // error: illegal refinement type U = String { def x(): Int = 1 } // error: illegal refinement - type V = String { var x: Int } // error: illegal refinement - + type V = String { var x: Int = 1 } // error: illegal refinement } diff --git a/tests/neg/inline-unstable-accessors.scala b/tests/neg/inline-unstable-accessors.scala index d40f69819a2e..c02097f1921a 100644 --- a/tests/neg/inline-unstable-accessors.scala +++ b/tests/neg/inline-unstable-accessors.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental -Werror -WunstableInlineAccessors -explain +//> using options -experimental -Werror -WunstableInlineAccessors -explain package foo import scala.annotation.publicInBinary diff --git a/tests/neg/kind-projector-underscores.scala b/tests/neg/kind-projector-underscores.scala index 76aada871fae..e2cdee917e81 100644 --- a/tests/neg/kind-projector-underscores.scala +++ b/tests/neg/kind-projector-underscores.scala @@ -1,4 +1,4 @@ -//> using options -Ykind-projector:underscores +//> using options -Xkind-projector:underscores package kind_projector_neg diff --git a/tests/neg/kind-projector.scala b/tests/neg/kind-projector.scala index a7fc24c70b93..a03dd83a1945 100644 --- a/tests/neg/kind-projector.scala +++ b/tests/neg/kind-projector.scala @@ -1,4 +1,4 @@ -//> using options -Ykind-projector +//> using options -Xkind-projector package kind_projector_neg diff --git a/tests/neg/looping-givens.check b/tests/neg/looping-givens.check new file mode 100644 index 000000000000..1e7ee08d79df --- /dev/null +++ b/tests/neg/looping-givens.check @@ -0,0 +1,48 @@ +-- Error: tests/neg/looping-givens.scala:9:22 -------------------------------------------------------------------------- +9 | given aa: A = summon // error + | ^ + | Result of implicit search for T will change. + | Current result ab will be no longer eligible + | because it is not defined before the search position. + | Result with new rules: a. + | To opt into the new rules, compile with `-source future` or use + | the `scala.language.future` language import. + | + | To fix the problem without the language import, you could try one of the following: + | - use a `given ... with` clause as the enclosing given, + | - rearrange definitions so that ab comes earlier, + | - use an explicit argument. + | + | where: T is a type variable with constraint <: A +-- Error: tests/neg/looping-givens.scala:10:22 ------------------------------------------------------------------------- +10 | given bb: B = summon // error + | ^ + | Result of implicit search for T will change. + | Current result ab will be no longer eligible + | because it is not defined before the search position. + | Result with new rules: b. + | To opt into the new rules, compile with `-source future` or use + | the `scala.language.future` language import. + | + | To fix the problem without the language import, you could try one of the following: + | - use a `given ... with` clause as the enclosing given, + | - rearrange definitions so that ab comes earlier, + | - use an explicit argument. + | + | where: T is a type variable with constraint <: B +-- Error: tests/neg/looping-givens.scala:11:28 ------------------------------------------------------------------------- +11 | given ab: (A & B) = summon // error + | ^ + | Result of implicit search for T will change. + | Current result ab will be no longer eligible + | because it is not defined before the search position. + | Result with new rules: Search Failure: joint(ab, ab). + | To opt into the new rules, compile with `-source future` or use + | the `scala.language.future` language import. + | + | To fix the problem without the language import, you could try one of the following: + | - use a `given ... with` clause as the enclosing given, + | - rearrange definitions so that ab comes earlier, + | - use an explicit argument. + | + | where: T is a type variable with constraint <: A & B diff --git a/tests/neg/looping-givens.scala b/tests/neg/looping-givens.scala index 357a417f0ed9..57dc95f99aab 100644 --- a/tests/neg/looping-givens.scala +++ b/tests/neg/looping-givens.scala @@ -1,4 +1,4 @@ -//> using options -Xfatal-warnings +//> options -source 3.4 class A class B diff --git a/tests/neg/main-annotation-currying.scala b/tests/neg/main-annotation-currying.scala deleted file mode 100644 index fa8e9593849c..000000000000 --- a/tests/neg/main-annotation-currying.scala +++ /dev/null @@ -1,8 +0,0 @@ -import scala.annotation.newMain - -object myProgram: - - @newMain def add(num: Int)(inc: Int): Unit = // error - println(s"$num + $inc = ${num + inc}") - -end myProgram diff --git a/tests/neg/main-annotation-generic.scala b/tests/neg/main-annotation-generic.scala deleted file mode 100644 index 6f951056f1b2..000000000000 --- a/tests/neg/main-annotation-generic.scala +++ /dev/null @@ -1,8 +0,0 @@ -import scala.annotation.newMain - -object myProgram: - - @newMain def nop[T](t: T): T = // error - t - -end myProgram diff --git a/tests/neg/main-annotation-implicit-given.scala b/tests/neg/main-annotation-implicit-given.scala deleted file mode 100644 index 2a7d8202acf5..000000000000 --- a/tests/neg/main-annotation-implicit-given.scala +++ /dev/null @@ -1,13 +0,0 @@ -import scala.annotation.newMain - -object myProgram: - implicit val x: Int = 2 - given Int = 3 - - @newMain def showImplicit(implicit num: Int): Unit = // error - println(num) - - @newMain def showUsing(using num: Int): Unit = // error - println(num) - -end myProgram diff --git a/tests/neg/main-annotation-mainannotation.scala b/tests/neg/main-annotation-mainannotation.scala deleted file mode 100644 index 21e37d1779af..000000000000 --- a/tests/neg/main-annotation-mainannotation.scala +++ /dev/null @@ -1,3 +0,0 @@ -import scala.annotation.MainAnnotation - -@MainAnnotation def f(i: Int, n: Int) = () // error diff --git a/tests/neg/main-annotation-multiple-annot.scala b/tests/neg/main-annotation-multiple-annot.scala deleted file mode 100644 index faec8162e9c4..000000000000 --- a/tests/neg/main-annotation-multiple-annot.scala +++ /dev/null @@ -1,8 +0,0 @@ -import scala.annotation.newMain - -object myProgram: - - @newMain @newMain def add1(num: Int, inc: Int): Unit = // error - println(s"$num + $inc = ${num + inc}") - -end myProgram diff --git a/tests/neg/main-annotation-nonmethod.scala b/tests/neg/main-annotation-nonmethod.scala deleted file mode 100644 index 2e46098a9ac5..000000000000 --- a/tests/neg/main-annotation-nonmethod.scala +++ /dev/null @@ -1,11 +0,0 @@ -import scala.annotation.newMain - -object myProgram: - - @newMain val n = 2 // error - - @newMain class A // error - - @newMain val f = ((s: String) => println(s)) // error - -end myProgram diff --git a/tests/neg/main-annotation-nonstatic.scala b/tests/neg/main-annotation-nonstatic.scala deleted file mode 100644 index 68d3ba2b3569..000000000000 --- a/tests/neg/main-annotation-nonstatic.scala +++ /dev/null @@ -1,4 +0,0 @@ -import scala.annotation.newMain - -class A: - @newMain def foo(bar: Int) = () // error diff --git a/tests/neg/main-annotation-unknown-parser-1.scala b/tests/neg/main-annotation-unknown-parser-1.scala deleted file mode 100644 index 75ff2ceac444..000000000000 --- a/tests/neg/main-annotation-unknown-parser-1.scala +++ /dev/null @@ -1,12 +0,0 @@ -import scala.annotation.newMain - -class MyNumber(val value: Int) { - def +(other: MyNumber): MyNumber = MyNumber(value + other.value) -} - -object myProgram: - - @newMain def add(num: MyNumber, inc: MyNumber): Unit = // error - println(s"$num + $inc = ${num + inc}") - -end myProgram diff --git a/tests/neg/main-annotation-unknown-parser-2.scala b/tests/neg/main-annotation-unknown-parser-2.scala deleted file mode 100644 index a5681c39419b..000000000000 --- a/tests/neg/main-annotation-unknown-parser-2.scala +++ /dev/null @@ -1,27 +0,0 @@ -import scala.annotation.newMain -import scala.util.CommandLineParser.FromString - -object myProgram: - - @newMain def add(num: Test.MyNumber, inc: Test.MyNumber): Unit = // error - val numV = Test.value(num) - val incV = Test.value(inc) - println(s"$numV + $incV = ${numV + incV}") - -end myProgram - - -object Test: - opaque type MyNumber = Int - - def create(n: Int): MyNumber = n - def value(n: MyNumber): Int = n - - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("add") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("2", "3")) -end Test diff --git a/tests/neg/matchtype-seq.check b/tests/neg/matchtype-seq.check index 980329d585dc..1e786b6714c6 100644 --- a/tests/neg/matchtype-seq.check +++ b/tests/neg/matchtype-seq.check @@ -1,19 +1,35 @@ --- [E184] Type Error: tests/neg/matchtype-seq.scala:9:11 --------------------------------------------------------------- +-- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:9:18 ------------------------------------------------------ 9 | identity[T1[3]]("") // error - | ^^^^^ - | Match type reduction failed since selector (3 : Int) - | matches none of the cases + | ^^ + | Found: ("" : String) + | Required: Test.T1[(3 : Int)] | - | case (1 : Int) => Int - | case (2 : Int) => String --- [E184] Type Error: tests/neg/matchtype-seq.scala:10:11 -------------------------------------------------------------- + | Note: a match type could not be fully reduced: + | + | trying to reduce Test.T1[(3 : Int)] + | failed since selector (3 : Int) + | matches none of the cases + | + | case (1 : Int) => Int + | case (2 : Int) => String + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:10:18 ----------------------------------------------------- 10 | identity[T1[3]](1) // error - | ^^^^^ - | Match type reduction failed since selector (3 : Int) - | matches none of the cases + | ^ + | Found: (1 : Int) + | Required: Test.T1[(3 : Int)] | - | case (1 : Int) => Int - | case (2 : Int) => String + | Note: a match type could not be fully reduced: + | + | trying to reduce Test.T1[(3 : Int)] + | failed since selector (3 : Int) + | matches none of the cases + | + | case (1 : Int) => Int + | case (2 : Int) => String + | + | longer explanation available when compiling with `-explain` -- [E007] Type Mismatch Error: tests/neg/matchtype-seq.scala:11:20 ----------------------------------------------------- 11 | identity[T1[Int]]("") // error | ^^ diff --git a/tests/neg/mt-deskolemize-2.scala b/tests/neg/mt-deskolemize-2.scala new file mode 100644 index 000000000000..90d506a42e6f --- /dev/null +++ b/tests/neg/mt-deskolemize-2.scala @@ -0,0 +1,60 @@ +//> using options -language:experimental.betterMatchTypeExtractors + +trait Expr: + type Value +object Expr: + type Of[V] = Expr { type Value = V } + type ExtractValue[F <: Expr] = F match + case Expr.Of[v] => v +import Expr.ExtractValue + +class SimpleLoop1 extends Expr: + type Value = ExtractValue[SimpleLoop2] + +class SimpleLoop2 extends Expr: + type Value = ExtractValue[SimpleLoop1] + +object Test1: + val x: ExtractValue[SimpleLoop1] = 1 // error + +trait Description: + type Elem <: Tuple + +class PrimBroken extends Expr: + type Value = Alias + type Alias = Value // error + +class Prim extends Expr: + type Value = BigInt + +class VecExpr[E <: Expr] extends Expr: + type Value = Vector[ExtractValue[E]] + +trait ProdExpr extends Expr: + val description: Description + type Value = Tuple.Map[description.Elem, [X] =>> ExtractValue[X & Expr]] + + +class MyExpr1 extends ProdExpr: + final val description = new Description: + type Elem = (VecExpr[Prim], MyExpr2) + +class MyExpr2 extends ProdExpr: + final val description = new Description: + type Elem = (VecExpr[VecExpr[MyExpr1]], Prim) + +trait Constable[E <: Expr]: + def lit(v: ExtractValue[E]): E +object Constable: + given [E <: Expr]: Constable[E] = ??? + +object Test2: + def fromLiteral[E <: Expr : Constable](v: ExtractValue[E]): E = + summon[Constable[E]].lit(v) + val x0: ExtractValue[Prim] = "" // error + val x1: ExtractValue[PrimBroken] = 1 // error + + val foo: MyExpr2 = new MyExpr2 + val v: foo.Value = (Vector(Vector()), 1) // error: Recursion limit exceeded + val c: MyExpr2 = fromLiteral: + (Vector(Vector()), 1) // error: Recursion limit exceeded diff --git a/tests/neg/mt-deskolemize.scala b/tests/neg/mt-deskolemize.scala new file mode 100644 index 000000000000..0a58d5db7bc4 --- /dev/null +++ b/tests/neg/mt-deskolemize.scala @@ -0,0 +1,16 @@ +trait Expr: + type Value +object Expr: + type Of[V] = Expr { type Value = V } + type ExtractValue[F <: Expr] = F match + case Expr.Of[v] => v +import Expr.ExtractValue + +class SimpleLoop1 extends Expr: + type Value = ExtractValue[SimpleLoop2] + +class SimpleLoop2 extends Expr: + type Value = ExtractValue[SimpleLoop1] + +object Test1: + val x: ExtractValue[SimpleLoop1] = 1 // error diff --git a/tests/neg/named-tuples-2.check b/tests/neg/named-tuples-2.check new file mode 100644 index 000000000000..0a52d5f3989b --- /dev/null +++ b/tests/neg/named-tuples-2.check @@ -0,0 +1,8 @@ +-- Error: tests/neg/named-tuples-2.scala:5:9 --------------------------------------------------------------------------- +5 | case (name, age) => () // error + | ^ + | this case is unreachable since type (String, Int, Boolean) is not a subclass of class Tuple2 +-- Error: tests/neg/named-tuples-2.scala:6:9 --------------------------------------------------------------------------- +6 | case (n, a, m, x) => () // error + | ^ + | this case is unreachable since type (String, Int, Boolean) is not a subclass of class Tuple4 diff --git a/tests/neg/named-tuples-2.scala b/tests/neg/named-tuples-2.scala new file mode 100644 index 000000000000..0507891e0549 --- /dev/null +++ b/tests/neg/named-tuples-2.scala @@ -0,0 +1,6 @@ +import language.experimental.namedTuples +def Test = + val person = (name = "Bob", age = 33, married = true) + person match + case (name, age) => () // error + case (n, a, m, x) => () // error diff --git a/tests/neg/named-tuples-3.check b/tests/neg/named-tuples-3.check new file mode 100644 index 000000000000..2091c36191c0 --- /dev/null +++ b/tests/neg/named-tuples-3.check @@ -0,0 +1,7 @@ +-- [E007] Type Mismatch Error: tests/neg/named-tuples-3.scala:7:16 ----------------------------------------------------- +7 |val p: Person = f // error + | ^ + | Found: NamedTuple.NamedTuple[(Int, Any), (Int, String)] + | Required: Person + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/named-tuples-3.scala b/tests/neg/named-tuples-3.scala new file mode 100644 index 000000000000..0f1215338b0a --- /dev/null +++ b/tests/neg/named-tuples-3.scala @@ -0,0 +1,7 @@ +import language.experimental.namedTuples + +def f: NamedTuple.NamedTuple[(Int, Any), (Int, String)] = ??? + +type Person = (name: Int, age: String) + +val p: Person = f // error diff --git a/tests/neg/named-tuples.check b/tests/neg/named-tuples.check new file mode 100644 index 000000000000..db3cc703722f --- /dev/null +++ b/tests/neg/named-tuples.check @@ -0,0 +1,103 @@ +-- Error: tests/neg/named-tuples.scala:9:19 ---------------------------------------------------------------------------- +9 | val illformed = (_2 = 2) // error + | ^^^^^^ + | _2 cannot be used as the name of a tuple element because it is a regular tuple selector +-- Error: tests/neg/named-tuples.scala:10:20 --------------------------------------------------------------------------- +10 | type Illformed = (_1: Int) // error + | ^^^^^^^ + | _1 cannot be used as the name of a tuple element because it is a regular tuple selector +-- Error: tests/neg/named-tuples.scala:11:40 --------------------------------------------------------------------------- +11 | val illformed2 = (name = "", age = 0, name = true) // error + | ^^^^^^^^^^^ + | Duplicate tuple element name +-- Error: tests/neg/named-tuples.scala:12:45 --------------------------------------------------------------------------- +12 | type Illformed2 = (name: String, age: Int, name: Boolean) // error + | ^^^^^^^^^^^^^ + | Duplicate tuple element name +-- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:20:20 ------------------------------------------------------ +20 | val _: NameOnly = person // error + | ^^^^^^ + | Found: (Test.person : (name : String, age : Int)) + | Required: Test.NameOnly + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:21:18 ------------------------------------------------------ +21 | val _: Person = nameOnly // error + | ^^^^^^^^ + | Found: (Test.nameOnly : (name : String)) + | Required: Test.Person + | + | longer explanation available when compiling with `-explain` +-- [E172] Type Error: tests/neg/named-tuples.scala:22:41 --------------------------------------------------------------- +22 | val _: Person = (name = "") ++ nameOnly // error + | ^ + | Cannot prove that Tuple.Disjoint[Tuple1[("name" : String)], Tuple1[("name" : String)]] =:= (true : Boolean). +-- [E008] Not Found Error: tests/neg/named-tuples.scala:23:9 ----------------------------------------------------------- +23 | person._1 // error + | ^^^^^^^^^ + | value _1 is not a member of (name : String, age : Int) +-- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:25:36 ------------------------------------------------------ +25 | val _: (age: Int, name: String) = person // error + | ^^^^^^ + | Found: (Test.person : (name : String, age : Int)) + | Required: (age : Int, name : String) + | + | longer explanation available when compiling with `-explain` +-- Error: tests/neg/named-tuples.scala:27:17 --------------------------------------------------------------------------- +27 | val (name = x, agee = y) = person // error + | ^^^^^^^^ + | No element named `agee` is defined in selector type (name : String, age : Int) +-- Error: tests/neg/named-tuples.scala:30:10 --------------------------------------------------------------------------- +30 | case (name = n, age = a) => () // error // error + | ^^^^^^^^ + | No element named `name` is defined in selector type (String, Int) +-- Error: tests/neg/named-tuples.scala:30:20 --------------------------------------------------------------------------- +30 | case (name = n, age = a) => () // error // error + | ^^^^^^^ + | No element named `age` is defined in selector type (String, Int) +-- [E172] Type Error: tests/neg/named-tuples.scala:32:27 --------------------------------------------------------------- +32 | val pp = person ++ (1, 2) // error + | ^ + | Cannot prove that Tuple.Disjoint[(("name" : String), ("age" : String)), Tuple] =:= (true : Boolean). +-- [E172] Type Error: tests/neg/named-tuples.scala:35:18 --------------------------------------------------------------- +35 | person ++ (1, 2) match // error + | ^ + | Cannot prove that Tuple.Disjoint[(("name" : String), ("age" : String)), Tuple] =:= (true : Boolean). +-- Error: tests/neg/named-tuples.scala:38:17 --------------------------------------------------------------------------- +38 | val bad = ("", age = 10) // error + | ^^^^^^^^ + | Illegal combination of named and unnamed tuple elements +-- Error: tests/neg/named-tuples.scala:41:20 --------------------------------------------------------------------------- +41 | case (name = n, age) => () // error + | ^^^ + | Illegal combination of named and unnamed tuple elements +-- Error: tests/neg/named-tuples.scala:42:16 --------------------------------------------------------------------------- +42 | case (name, age = a) => () // error + | ^^^^^^^ + | Illegal combination of named and unnamed tuple elements +-- Error: tests/neg/named-tuples.scala:45:10 --------------------------------------------------------------------------- +45 | case (age = x) => // error + | ^^^^^^^ + | No element named `age` is defined in selector type Tuple +-- [E172] Type Error: tests/neg/named-tuples.scala:47:27 --------------------------------------------------------------- +47 | val p2 = person ++ person // error + | ^ + |Cannot prove that Tuple.Disjoint[(("name" : String), ("age" : String)), (("name" : String), ("age" : String))] =:= (true : Boolean). +-- [E172] Type Error: tests/neg/named-tuples.scala:48:43 --------------------------------------------------------------- +48 | val p3 = person ++ (first = 11, age = 33) // error + | ^ + |Cannot prove that Tuple.Disjoint[(("name" : String), ("age" : String)), (("first" : String), ("age" : String))] =:= (true : Boolean). +-- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:50:22 ------------------------------------------------------ +50 | val p5 = person.zip((first = 11, age = 33)) // error + | ^^^^^^^^^^^^^^^^^^^^^^ + | Found: (first : Int, age : Int) + | Required: NamedTuple.NamedTuple[(("name" : String), ("age" : String)), Tuple] + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/named-tuples.scala:61:32 ------------------------------------------------------ +61 | val typo: (name: ?, age: ?) = (name = "he", ag = 1) // error + | ^^^^^^^^^^^^^^^^^^^^^ + | Found: (name : String, ag : Int) + | Required: (name : ?, age : ?) + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/named-tuples.scala b/tests/neg/named-tuples.scala new file mode 100644 index 000000000000..8f78f7915206 --- /dev/null +++ b/tests/neg/named-tuples.scala @@ -0,0 +1,61 @@ +import annotation.experimental +import language.experimental.namedTuples + +@experimental object Test: + + type Person = (name: String, age: Int) + val person = (name = "Bob", age = 33): (name: String, age: Int) + + val illformed = (_2 = 2) // error + type Illformed = (_1: Int) // error + val illformed2 = (name = "", age = 0, name = true) // error + type Illformed2 = (name: String, age: Int, name: Boolean) // error + + type NameOnly = (name: String) + + val nameOnly = (name = "Louis") + + val y: (String, Int) = person // ok, conversion + val _: (String, Int) = (name = "", age = 0) // ok, conversion + val _: NameOnly = person // error + val _: Person = nameOnly // error + val _: Person = (name = "") ++ nameOnly // error + person._1 // error + + val _: (age: Int, name: String) = person // error + + val (name = x, agee = y) = person // error + + ("Ives", 2) match + case (name = n, age = a) => () // error // error + + val pp = person ++ (1, 2) // error + val qq = ("a", true) ++ (1, 2) + + person ++ (1, 2) match // error + case _ => + + val bad = ("", age = 10) // error + + person match + case (name = n, age) => () // error + case (name, age = a) => () // error + + (??? : Tuple) match + case (age = x) => // error + + val p2 = person ++ person // error + val p3 = person ++ (first = 11, age = 33) // error + val p4 = person.zip(person) // ok + val p5 = person.zip((first = 11, age = 33)) // error + // Note: this one depends on the details of the conversion named -> unnamed + // we do a conversion only of the expected type is a tuple. If we used a + // regular implicit conversion, then (first = 11, age = 33) would be converted + // to (Int, Int) and that would be upcast to (name: Int, age: Int), which + // would hide an error. So we have be careful that the "downwards" conversion + // is specific and does not apply to a different "upwards" type. + // The same problem happens if we assume named <: unnamed. In that case we would first + // upcast (first: Int, age: Int) to (Int, Int), and then use the downwards + // conversion to (name: Int, age: Int). This one would be harder to guard against. + + val typo: (name: ?, age: ?) = (name = "he", ag = 1) // error diff --git a/tests/neg/namedTypeParams.check b/tests/neg/namedTypeParams.check index 3f6f9f7913e8..5e0672f20f25 100644 --- a/tests/neg/namedTypeParams.check +++ b/tests/neg/namedTypeParams.check @@ -24,16 +24,16 @@ 19 | f[X = Int, String](1, "") // error // error | ^ | '=' expected, but ']' found --- Error: tests/neg/namedTypeParams.scala:6:8 -------------------------------------------------------------------------- +-- Error: tests/neg/namedTypeParams.scala:6:4 -------------------------------------------------------------------------- 6 | f[X = Int, Y = Int](1, 2) // error: experimental // error: experimental - | ^^^ - | Named type arguments are experimental, - | they must be enabled with a `experimental.namedTypeArguments` language import or setting --- Error: tests/neg/namedTypeParams.scala:6:17 ------------------------------------------------------------------------- + | ^^^^^^^ + | Named type arguments are experimental, + | they must be enabled with a `experimental.namedTypeArguments` language import or setting +-- Error: tests/neg/namedTypeParams.scala:6:13 ------------------------------------------------------------------------- 6 | f[X = Int, Y = Int](1, 2) // error: experimental // error: experimental - | ^^^ - | Named type arguments are experimental, - | they must be enabled with a `experimental.namedTypeArguments` language import or setting + | ^^^^^^^ + | Named type arguments are experimental, + | they must be enabled with a `experimental.namedTypeArguments` language import or setting -- [E006] Not Found Error: tests/neg/namedTypeParams.scala:11:11 ------------------------------------------------------- 11 | val x: C[T = Int] = // error: ']' expected, but `=` found // error | ^ diff --git a/tests/neg/overrides.scala b/tests/neg/overrides.scala index ff83b91d26be..8016f5646d09 100644 --- a/tests/neg/overrides.scala +++ b/tests/neg/overrides.scala @@ -1,3 +1,5 @@ +//> using options -experimental + class Foo { type A = Int type B >: Int <: Int diff --git a/tests/neg/parent-refinement-access.check b/tests/neg/parent-refinement-access.check new file mode 100644 index 000000000000..5cde9d51558f --- /dev/null +++ b/tests/neg/parent-refinement-access.check @@ -0,0 +1,7 @@ +-- [E164] Declaration Error: tests/neg/parent-refinement-access.scala:6:6 ---------------------------------------------- +6 |trait Year2(private[Year2] val value: Int) extends (Gen { val x: Int }) // error + | ^ + | error overriding value x in trait Year2 of type Int; + | value x in trait Gen of type Any has weaker access privileges; it should be public + | (Note that value x in trait Year2 of type Int is abstract, + | and is therefore overridden by concrete value x in trait Gen of type Any) diff --git a/tests/neg/parent-refinement-access.scala b/tests/neg/parent-refinement-access.scala new file mode 100644 index 000000000000..57d45f4fb201 --- /dev/null +++ b/tests/neg/parent-refinement-access.scala @@ -0,0 +1,6 @@ +//> using options -source future -language:experimental.modularity + +trait Gen: + private[Gen] val x: Any = () + +trait Year2(private[Year2] val value: Int) extends (Gen { val x: Int }) // error diff --git a/tests/neg/parent-refinement.check b/tests/neg/parent-refinement.check index 550430bd35a7..cf9a57bc7821 100644 --- a/tests/neg/parent-refinement.check +++ b/tests/neg/parent-refinement.check @@ -1,4 +1,25 @@ --- Error: tests/neg/parent-refinement.scala:5:2 ------------------------------------------------------------------------ -5 | with Ordered[Year] { // error - | ^^^^ - | end of toplevel definition expected but 'with' found +-- Error: tests/neg/parent-refinement.scala:11:6 ----------------------------------------------------------------------- +11 |class Bar extends IdOf[Int], (X { type Value = String }) // error + | ^^^ + |class Bar cannot be instantiated since it has a member Value with possibly conflicting bounds Int | String <: ... <: Int & String +-- [E007] Type Mismatch Error: tests/neg/parent-refinement.scala:15:17 ------------------------------------------------- +15 | val x: Value = 0 // error + | ^ + | Found: (0 : Int) + | Required: Baz.this.Value + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/parent-refinement.scala:21:6 -------------------------------------------------- +21 | foo(2) // error + | ^ + | Found: (2 : Int) + | Required: Boolean + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/parent-refinement.scala:17:22 ------------------------------------------------- +17 |val x: IdOf[Int] = Baz() // error + | ^^^^^ + | Found: Baz + | Required: IdOf[Int] + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/parent-refinement.scala b/tests/neg/parent-refinement.scala index ca2b88a75fd8..868747faba57 100644 --- a/tests/neg/parent-refinement.scala +++ b/tests/neg/parent-refinement.scala @@ -1,7 +1,21 @@ +//> using options -source future -language:experimental.modularity trait Id { type Value } +trait X { type Value } +type IdOf[T] = Id { type Value = T } + case class Year(value: Int) extends AnyVal - with Id { type Value = Int } - with Ordered[Year] { // error + with (Id { type Value = Int }) + with Ordered[Year] + +class Bar extends IdOf[Int], (X { type Value = String }) // error + +class Baz extends IdOf[Int]: + type Value = String + val x: Value = 0 // error + +val x: IdOf[Int] = Baz() // error -} \ No newline at end of file +object Clash extends ({ def foo(x: Int): Int }): + def foo(x: Boolean): Int = 1 + foo(2) // error diff --git a/tests/neg/parser-stability-12.scala b/tests/neg/parser-stability-12.scala index 78ff178d010c..17a611d70e34 100644 --- a/tests/neg/parser-stability-12.scala +++ b/tests/neg/parser-stability-12.scala @@ -1,4 +1,4 @@ trait x0[]: // error - trait x1[x1 <:x0] // error: type x0 takes type parameters + trait x1[x1 <:x0] extends x1[ // error // error \ No newline at end of file diff --git a/tests/neg/publicInBinaryOverride.scala b/tests/neg/publicInBinaryOverride.scala index 342e4773c56f..6529bf09736a 100644 --- a/tests/neg/publicInBinaryOverride.scala +++ b/tests/neg/publicInBinaryOverride.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.publicInBinary diff --git a/tests/neg/scala-uri.check b/tests/neg/scala-uri.check new file mode 100644 index 000000000000..b6d52d6fffd0 --- /dev/null +++ b/tests/neg/scala-uri.check @@ -0,0 +1,14 @@ +-- [E172] Type Error: tests/neg/scala-uri.scala:31:59 ------------------------------------------------------------------ +31 |@main def Test = summon[QueryKeyValue[(String, None.type)]] // error + | ^ + |No best given instance of type QueryKeyValue[(String, None.type)] was found for parameter x of method summon in object Predef. + |I found: + | + | QueryKeyValue.tuple2QueryKeyValue[String, None.type](QueryKey.stringQueryKey, + | QueryValue.optionQueryValue[A]( + | /* ambiguous: both given instance stringQueryValue in trait QueryValueInstances1 and given instance noneQueryValue in trait QueryValueInstances1 match type QueryValue[A] */ + | summon[QueryValue[A]] + | ) + | ) + | + |But both given instance stringQueryValue in trait QueryValueInstances1 and given instance noneQueryValue in trait QueryValueInstances1 match type QueryValue[A]. diff --git a/tests/neg/scala-uri.scala b/tests/neg/scala-uri.scala new file mode 100644 index 000000000000..f3bff269234f --- /dev/null +++ b/tests/neg/scala-uri.scala @@ -0,0 +1,31 @@ +//> using options -source:3.6 +import scala.language.implicitConversions + +trait QueryKey[A] +object QueryKey extends QueryKeyInstances +sealed trait QueryKeyInstances: + given stringQueryKey: QueryKey[String] = ??? + +trait QueryValue[-A] +object QueryValue extends QueryValueInstances +sealed trait QueryValueInstances1: + given stringQueryValue: QueryValue[String] = ??? + given noneQueryValue: QueryValue[None.type] = ??? + // The noneQueryValue makes no sense at this priority. Since QueryValue + // is contravariant, QueryValue[None.type] is always better than QueryValue[Option[A]] + // no matter whether it's old or new resolution. So taking both owner and type + // score into account, it's always a draw. With the new disambiguation, we prefer + // the optionQueryValue[A], which gives an ambiguity down the road, because we don't + // know what the wrapped type A is. Previously, we preferred QueryValue[None.type] + // because it is unconditional. The solution is to put QueryValue[None.type] in the + // same trait as QueryValue[Option[A]], as is shown in pos/scala-uri.scala. + +sealed trait QueryValueInstances extends QueryValueInstances1: + given optionQueryValue[A: QueryValue]: QueryValue[Option[A]] = ??? + +trait QueryKeyValue[A] +object QueryKeyValue: + given tuple2QueryKeyValue[K: QueryKey, V: QueryValue]: QueryKeyValue[(K, V)] = ??? + + +@main def Test = summon[QueryKeyValue[(String, None.type)]] // error diff --git a/tests/neg/singleton-ctx-bound.check b/tests/neg/singleton-ctx-bound.check new file mode 100644 index 000000000000..785123c0e680 --- /dev/null +++ b/tests/neg/singleton-ctx-bound.check @@ -0,0 +1,34 @@ +-- [E007] Type Mismatch Error: tests/neg/singleton-ctx-bound.scala:7:5 ------------------------------------------------- +7 | f1(someInt) // error + | ^^^^^^^ + | Found: Int + | Required: Singleton + | + | longer explanation available when compiling with `-explain` +-- [E007] Type Mismatch Error: tests/neg/singleton-ctx-bound.scala:12:5 ------------------------------------------------ +12 | f2(someInt) // error + | ^^^^^^^ + | Found: Int + | Required: Singleton + | + | longer explanation available when compiling with `-explain` +-- [E172] Type Error: tests/neg/singleton-ctx-bound.scala:13:26 -------------------------------------------------------- +13 | f2(if ??? then 1 else 2) // error + | ^ + |No given instance of type (1 : Int) | (2 : Int) is Singleton was found for parameter x$2 of method f2 in object Test. Failed to synthesize an instance of type (1 : Int) | (2 : Int) is Singleton: (1 : Int) | (2 : Int) is not a singleton +-- [E007] Type Mismatch Error: tests/neg/singleton-ctx-bound.scala:17:5 ------------------------------------------------ +17 | f3(someInt) // error + | ^^^^^^^ + | Found: Int + | Required: Singleton + | + | longer explanation available when compiling with `-explain` +-- [E172] Type Error: tests/neg/singleton-ctx-bound.scala:18:26 -------------------------------------------------------- +18 | f3(if ??? then 1 else 2) // error + | ^ + |No given instance of type Singleton{type Self = (1 : Int) | (2 : Int)} was found for a context parameter of method f3 in object Test. Failed to synthesize an instance of type Singleton{type Self = (1 : Int) | (2 : Int)}: (1 : Int) | (2 : Int) is not a singleton +-- [E172] Type Error: tests/neg/singleton-ctx-bound.scala:33:6 --------------------------------------------------------- +33 |class D extends A: // error + |^ + |No given instance of type Singleton{type Self = D.this.Elem} was found for inferring the implementation of the deferred given instance given_Singleton_Elem in trait A. Failed to synthesize an instance of type Singleton{type Self = D.this.Elem}: D.this.Elem is not a singleton +34 | type Elem = Int diff --git a/tests/neg/singleton-ctx-bound.scala b/tests/neg/singleton-ctx-bound.scala new file mode 100644 index 000000000000..e061ec54bb16 --- /dev/null +++ b/tests/neg/singleton-ctx-bound.scala @@ -0,0 +1,35 @@ +//> using options -language:experimental.modularity -source future +object Test: + + def someInt = 1 + + def f1[T <: Singleton](x: T): T = x + f1(someInt) // error + f1(if ??? then 1 else 2) // OK, but should be error + f1(3 * 2) // OK + + def f2[T](x: T)(using T is Singleton): T = x + f2(someInt) // error + f2(if ??? then 1 else 2) // error + f2(3 * 2) // OK + + def f3[T: Singleton](x: T): T = x + f3(someInt) // error + f3(if ??? then 1 else 2) // error + f3(3 * 2) // OK + f3(6) // OK + +import compiletime.* + +trait A: + type Elem: Singleton + +class B extends A: + type Elem = 1 // OK + +class C[X: Singleton] extends A: + type Elem = X // OK + +class D extends A: // error + type Elem = Int + diff --git a/tests/neg/structural.scala b/tests/neg/structural.scala index de70092c0396..e8fad254a801 100644 --- a/tests/neg/structural.scala +++ b/tests/neg/structural.scala @@ -11,7 +11,7 @@ object Test3 { type A = { def foo(x: Int): Unit; def foo(x: String): Unit } // error: overloaded definition // error: overloaded definition type B = { val foo: Int; def foo: Int } // error: duplicate foo - type C = { var foo: Int } // error: refinements cannot have vars + type C = { var foo: Int } trait Entry { type Key; val key: Key } type D = { def foo(e: Entry, k: e.Key): Unit } diff --git a/tests/neg/tracked.check b/tests/neg/tracked.check new file mode 100644 index 000000000000..ae734e7aa0b4 --- /dev/null +++ b/tests/neg/tracked.check @@ -0,0 +1,50 @@ +-- Error: tests/neg/tracked.scala:2:16 --------------------------------------------------------------------------------- +2 |class C(tracked x: Int) // error + | ^ + | `val` or `var` expected +-- [E040] Syntax Error: tests/neg/tracked.scala:7:18 ------------------------------------------------------------------- +7 | def foo(tracked a: Int) = // error + | ^ + | ':' expected, but identifier found +-- Error: tests/neg/tracked.scala:8:12 --------------------------------------------------------------------------------- +8 | tracked val b: Int = 2 // error + | ^^^ + | end of statement expected but 'val' found +-- Error: tests/neg/tracked.scala:11:10 -------------------------------------------------------------------------------- +11 | tracked object Foo // error // error + | ^^^^^^ + | end of statement expected but 'object' found +-- Error: tests/neg/tracked.scala:14:10 -------------------------------------------------------------------------------- +14 | tracked class D // error // error + | ^^^^^ + | end of statement expected but 'class' found +-- Error: tests/neg/tracked.scala:17:10 -------------------------------------------------------------------------------- +17 | tracked type T = Int // error // error + | ^^^^ + | end of statement expected but 'type' found +-- Error: tests/neg/tracked.scala:20:29 -------------------------------------------------------------------------------- +20 | given g2(using tracked val x: Int): C = C(x) // error + | ^^^^^^^^^^^^^^^^^^ + | method parameter x may not be a `val` +-- Error: tests/neg/tracked.scala:4:21 --------------------------------------------------------------------------------- +4 |class C2(tracked var x: Int) // error + | ^ + | mutable variables may not be `tracked` +-- [E006] Not Found Error: tests/neg/tracked.scala:11:2 ---------------------------------------------------------------- +11 | tracked object Foo // error // error + | ^^^^^^^ + | Not found: tracked + | + | longer explanation available when compiling with `-explain` +-- [E006] Not Found Error: tests/neg/tracked.scala:14:2 ---------------------------------------------------------------- +14 | tracked class D // error // error + | ^^^^^^^ + | Not found: tracked + | + | longer explanation available when compiling with `-explain` +-- [E006] Not Found Error: tests/neg/tracked.scala:17:2 ---------------------------------------------------------------- +17 | tracked type T = Int // error // error + | ^^^^^^^ + | Not found: tracked + | + | longer explanation available when compiling with `-explain` diff --git a/tests/neg/tracked.scala b/tests/neg/tracked.scala new file mode 100644 index 000000000000..8d315a7b89ac --- /dev/null +++ b/tests/neg/tracked.scala @@ -0,0 +1,20 @@ +//> using options -source future -language:experimental.modularity +class C(tracked x: Int) // error + +class C2(tracked var x: Int) // error + +object A: + def foo(tracked a: Int) = // error + tracked val b: Int = 2 // error + +object B: + tracked object Foo // error // error + +object C: + tracked class D // error // error + +object D: + tracked type T = Int // error // error + +object E: + given g2(using tracked val x: Int): C = C(x) // error diff --git a/tests/neg/tracked2.scala b/tests/neg/tracked2.scala new file mode 100644 index 000000000000..2e6fa8cf6045 --- /dev/null +++ b/tests/neg/tracked2.scala @@ -0,0 +1 @@ +class C(tracked val x: Int) // error diff --git a/tests/neg/unapplied-types.scala b/tests/neg/unapplied-types.scala deleted file mode 100644 index 2f2339baa026..000000000000 --- a/tests/neg/unapplied-types.scala +++ /dev/null @@ -1,7 +0,0 @@ -trait T { - type L[X] = List[X] - type T1 <: L // error: takes type parameters - type T2 = L // error: takes type parameters - type T3 = List // error: takes type parameters - type T4 <: List // error: takes type parameters -} diff --git a/tests/neg/unselectable-fields.check b/tests/neg/unselectable-fields.check new file mode 100644 index 000000000000..f7f0bf51a6bc --- /dev/null +++ b/tests/neg/unselectable-fields.check @@ -0,0 +1,4 @@ +-- Error: tests/neg/unselectable-fields.scala:4:13 --------------------------------------------------------------------- +4 |val _ = foo1.xyz // error + | ^^^^^^^^ + | Cannot use selectDynamic here since it needs another selectDynamic to be invoked diff --git a/tests/neg/unselectable-fields.scala b/tests/neg/unselectable-fields.scala new file mode 100644 index 000000000000..7abe49d24764 --- /dev/null +++ b/tests/neg/unselectable-fields.scala @@ -0,0 +1,6 @@ +val foo1 = new Selectable: + type Fields = (xyz: Int) + def selectDynamic(name: String): Any = 23 +val _ = foo1.xyz // error + + diff --git a/tests/neg/unsound-reach-2.scala b/tests/neg/unsound-reach-2.scala new file mode 100644 index 000000000000..27742d72557b --- /dev/null +++ b/tests/neg/unsound-reach-2.scala @@ -0,0 +1,25 @@ +import language.experimental.captureChecking +trait Consumer[-T]: + def apply(x: T): Unit + +trait File: + def close(): Unit + +def withFile[R](path: String)(op: Consumer[File]): R = ??? + +trait Foo[+X]: + def use(x: File^)(op: Consumer[X]): Unit +class Bar extends Foo[File^]: + def use(x: File^)(op: Consumer[File^]): Unit = op.apply(x) + +def bad(): Unit = + val backdoor: Foo[File^] = new Bar + val boom: Foo[File^{backdoor*}] = backdoor + + var escaped: File^{backdoor*} = null + withFile("hello.txt"): f => + boom.use(f): // error + new Consumer[File^{backdoor*}]: + def apply(f1: File^{backdoor*}) = + escaped = f1 + diff --git a/tests/neg/unsound-reach-3.scala b/tests/neg/unsound-reach-3.scala new file mode 100644 index 000000000000..71c27fe5007d --- /dev/null +++ b/tests/neg/unsound-reach-3.scala @@ -0,0 +1,21 @@ +import language.experimental.captureChecking +trait File: + def close(): Unit + +def withFile[R](path: String)(op: File^ => R): R = ??? + +trait Foo[+X]: + def use(x: File^): X +class Bar extends Foo[File^]: + def use(x: File^): File^ = x + +def bad(): Unit = + val backdoor: Foo[File^] = new Bar + val boom: Foo[File^{backdoor*}] = backdoor + + var escaped: File^{backdoor*} = null + withFile("hello.txt"): f => + escaped = boom.use(f) // error + // boom.use: (x: File^) -> File^{backdoor*}, it is a selection so reach capabilities are allowed + // f: File^, so there is no reach capabilities + diff --git a/tests/neg/unsound-reach-4.check b/tests/neg/unsound-reach-4.check new file mode 100644 index 000000000000..47256baf408a --- /dev/null +++ b/tests/neg/unsound-reach-4.check @@ -0,0 +1,5 @@ +-- Error: tests/neg/unsound-reach-4.scala:20:19 ------------------------------------------------------------------------ +20 | escaped = boom.use(f) // error + | ^^^^^^^^ + | Reach capability backdoor* and universal capability cap cannot both + | appear in the type (x: F): box File^{backdoor*} of this expression diff --git a/tests/neg/unsound-reach-4.scala b/tests/neg/unsound-reach-4.scala new file mode 100644 index 000000000000..fa395fa117ca --- /dev/null +++ b/tests/neg/unsound-reach-4.scala @@ -0,0 +1,20 @@ +import language.experimental.captureChecking +trait File: + def close(): Unit + +def withFile[R](path: String)(op: File^ => R): R = ??? + +type F = File^ + +trait Foo[+X]: + def use(x: F): X +class Bar extends Foo[File^]: + def use(x: F): File^ = x + +def bad(): Unit = + val backdoor: Foo[File^] = new Bar + val boom: Foo[File^{backdoor*}] = backdoor + + var escaped: File^{backdoor*} = null + withFile("hello.txt"): f => + escaped = boom.use(f) // error diff --git a/tests/neg/unsound-reach.check b/tests/neg/unsound-reach.check new file mode 100644 index 000000000000..fd5c401416d1 --- /dev/null +++ b/tests/neg/unsound-reach.check @@ -0,0 +1,5 @@ +-- Error: tests/neg/unsound-reach.scala:18:9 --------------------------------------------------------------------------- +18 | boom.use(f): (f1: File^{backdoor*}) => // error + | ^^^^^^^^ + | Reach capability backdoor* and universal capability cap cannot both + | appear in the type (x: File^)(op: box File^{backdoor*} => Unit): Unit of this expression diff --git a/tests/neg/unsound-reach.scala b/tests/neg/unsound-reach.scala new file mode 100644 index 000000000000..468730168019 --- /dev/null +++ b/tests/neg/unsound-reach.scala @@ -0,0 +1,20 @@ +import language.experimental.captureChecking +trait File: + def close(): Unit + +def withFile[R](path: String)(op: File^ => R): R = ??? + +trait Foo[+X]: + def use(x: File^)(op: X => Unit): Unit +class Bar extends Foo[File^]: + def use(x: File^)(op: File^ => Unit): Unit = op(x) + +def bad(): Unit = + val backdoor: Foo[File^] = new Bar + val boom: Foo[File^{backdoor*}] = backdoor + + var escaped: File^{backdoor*} = null + withFile("hello.txt"): f => + boom.use(f): (f1: File^{backdoor*}) => // error + escaped = f1 + diff --git a/tests/neg/use-experimental-def.check b/tests/neg/use-experimental-def.check index 66c4a7a305b5..a3d9d93f8a7a 100644 --- a/tests/neg/use-experimental-def.check +++ b/tests/neg/use-experimental-def.check @@ -1,10 +1,9 @@ -- Error: tests/neg/use-experimental-def.scala:7:15 -------------------------------------------------------------------- 7 |def bar: Int = foo // error | ^^^ + | method foo is marked @experimental + | | Experimental definition may only be used under experimental mode: | 1. in a definition marked as @experimental, or - | 2. compiling with the -experimental compiler flag, or - | 3. with a nightly or snapshot version of the compiler. - | - | method foo is marked @experimental - | + | 2. an experimental feature is imported at the package level, or + | 3. compiling with the -experimental compiler flag. diff --git a/tests/neg/use-experimental-def.scala b/tests/neg/use-experimental-def.scala index 68ce0d6987ab..ff7b95567a60 100644 --- a/tests/neg/use-experimental-def.scala +++ b/tests/neg/use-experimental-def.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/new/test.scala b/tests/new/test.scala index e6bfc29fd808..16a823547553 100644 --- a/tests/new/test.scala +++ b/tests/new/test.scala @@ -1,2 +1,9 @@ -object Test: - def f: Any = 1 +import language.experimental.namedTuples + +type Person = (name: String, age: Int) + +def test = + val bob = (name = "Bob", age = 33): (name: String, age: Int) + + val silly = bob match + case (name = n, age = a) => n.length + a diff --git a/tests/new/tracked-mixin-traits.scala b/tests/new/tracked-mixin-traits.scala new file mode 100644 index 000000000000..21d890d44f42 --- /dev/null +++ b/tests/new/tracked-mixin-traits.scala @@ -0,0 +1,16 @@ +trait A: + type T +object a extends A: + type T = Int + +trait B(tracked val b: A): + type T = b.T + +trait C(tracked val c: A): + type T = c.T + +class D extends B(a), C(a): + val x: T = 2 + + + diff --git a/tests/pending/pos/cbproxy-default.scala b/tests/pending/pos/cbproxy-default.scala new file mode 100644 index 000000000000..e8f12ceeae75 --- /dev/null +++ b/tests/pending/pos/cbproxy-default.scala @@ -0,0 +1,4 @@ +def f[S: Monad]( + initial: S.Self = S.unit // error +) = + S.unit // works \ No newline at end of file diff --git a/tests/pending/pos/singleton-infer.scala b/tests/pending/pos/singleton-infer.scala new file mode 100644 index 000000000000..72e00baf3aab --- /dev/null +++ b/tests/pending/pos/singleton-infer.scala @@ -0,0 +1,8 @@ +//> using options -Xprint:typer -language:experimental.modularity -source future + +def f1[S, T <: S : Singleton](x: S) = () +def f2[S, T >: S : Singleton](x: S) = () + +def Test = + f1(42) // f1[Int, Singleton & Int] // should infer (42 : Int) or throw an error? + f2(42) // f2[(42 : Int), (42 : Int)] \ No newline at end of file diff --git a/tests/plugins/custom/analyzer/Analyzer_1.scala b/tests/plugins/custom/analyzer/Analyzer_1.scala index 0e1cc53290d0..d611972e0e48 100644 --- a/tests/plugins/custom/analyzer/Analyzer_1.scala +++ b/tests/plugins/custom/analyzer/Analyzer_1.scala @@ -52,7 +52,7 @@ class InitChecker extends PluginPhase with StandardPlugin { override val runsAfter = Set(SetDefTree.name) override val runsBefore = Set(FirstTransform.name) - def init(options: List[String]): List[PluginPhase] = this :: (new SetDefTree) :: Nil + override def initialize(options: List[String])(using Context): List[PluginPhase] = this :: (new SetDefTree) :: Nil private def checkDef(tree: Tree)(implicit ctx: Context): Tree = { if (tree.symbol.defTree.isEmpty) diff --git a/tests/plugins/neg/divideZero/plugin_1.scala b/tests/plugins/neg/divideZero/plugin_1.scala index ef8e077fd14d..68b2a8eae478 100644 --- a/tests/plugins/neg/divideZero/plugin_1.scala +++ b/tests/plugins/neg/divideZero/plugin_1.scala @@ -20,7 +20,7 @@ class DivideZero extends PluginPhase with StandardPlugin { override val runsAfter = Set(Pickler.name) override val runsBefore = Set(PickleQuotes.name) - override def init(options: List[String]): List[PluginPhase] = this :: Nil + override def initialize(options: List[String])(using Context): List[PluginPhase] = this :: Nil private def isNumericDivide(sym: Symbol)(implicit ctx: Context): Boolean = { def test(tpe: String): Boolean = diff --git a/tests/pos-custom-args/captures/captureRef-separate-compilation/Future_1.scala b/tests/pos-custom-args/captures/captureRef-separate-compilation/Future_1.scala new file mode 100644 index 000000000000..c01aeadd9360 --- /dev/null +++ b/tests/pos-custom-args/captures/captureRef-separate-compilation/Future_1.scala @@ -0,0 +1,5 @@ +class Future[T] +object Future: + class Collector[T](fs: (Future[T]^)*) + class MutableCollector[T](val futures: (Future[T]^)*) extends Collector[T](futures*): + def add(future: Future[T]^{futures*}) = ??? diff --git a/tests/pos-custom-args/captures/captureRef-separate-compilation/test_2.scala b/tests/pos-custom-args/captures/captureRef-separate-compilation/test_2.scala new file mode 100644 index 000000000000..35714090a9ac --- /dev/null +++ b/tests/pos-custom-args/captures/captureRef-separate-compilation/test_2.scala @@ -0,0 +1,5 @@ +def test = + val collector: Future.MutableCollector[Int] = Future.MutableCollector() + collector.add(???) + + diff --git a/tests/pos-custom-args/captures/curried-closures.scala b/tests/pos-custom-args/captures/curried-closures.scala index baea8b15075c..0ad729375b3c 100644 --- a/tests/pos-custom-args/captures/curried-closures.scala +++ b/tests/pos-custom-args/captures/curried-closures.scala @@ -1,3 +1,5 @@ +//> using options -experimental + object Test: def map2(xs: List[Int])(f: Int => Int): List[Int] = xs.map(f) val f1 = map2 diff --git a/tests/pos-custom-args/captures/dep-reach.scala b/tests/pos-custom-args/captures/dep-reach.scala new file mode 100644 index 000000000000..56343fbf8e53 --- /dev/null +++ b/tests/pos-custom-args/captures/dep-reach.scala @@ -0,0 +1,21 @@ +object Test: + class C + type Proc = () => Unit + + def f(c: C^, d: C^): () ->{c, d} Unit = + def foo(xs: Proc*): () ->{xs*} Unit = + xs.head + val a: () ->{c} Unit = () => () + val b: () ->{d} Unit = () => () + val xx = foo(a, b) + xx + + def g(c: C^, d: C^): () ->{c, d} Unit = + + def foo(xs: Seq[() => Unit]): () ->{xs*} Unit = + xs.head + + val a: () ->{c} Unit = () => () + val b: () ->{d} Unit = () => () + val xx = foo(Seq(a, b)) + xx diff --git a/tests/pos-custom-args/captures/i20231.scala b/tests/pos-custom-args/captures/i20231.scala new file mode 100644 index 000000000000..5557bc9929e6 --- /dev/null +++ b/tests/pos-custom-args/captures/i20231.scala @@ -0,0 +1,4 @@ +class Async +class C(val x: Async ?=> Unit) +def foo(x: Async ?=> Unit): C^{x} = C(x) +def foo(x: Async ?=> Unit)(using Async): C^{x} = C(x) \ No newline at end of file diff --git a/tests/pos-custom-args/captures/reaches.scala b/tests/pos-custom-args/captures/reaches.scala index 0f7df02e13b1..f17c25712c39 100644 --- a/tests/pos-custom-args/captures/reaches.scala +++ b/tests/pos-custom-args/captures/reaches.scala @@ -10,6 +10,16 @@ class Ref[T](init: T): def get: T = x def set(y: T) = { x = y } +class List[+A]: + def head: A = ??? + def tail: List[A] = ??? + def map[B](f: A -> B): List[B] = ??? + def nonEmpty: Boolean = ??? + +extension [A](x: A) def :: (xs: List[A]): List[A] = ??? + +object Nil extends List[Nothing] + def runAll(xs: List[Proc]): Unit = var cur: List[() ->{xs*} Unit] = xs // OK, by revised VAR while cur.nonEmpty do @@ -36,7 +46,7 @@ def compose2[A, B, C](f: A => B, g: B => C): A => C = z => g(f(z)) def mapCompose[A](ps: List[(A => A, A => A)]): List[A ->{ps*} A] = - ps.map((x, y) => compose1(x, y)) + ps.map((x, y) => compose1(x, y)) // Does not work if map takes an impure function, see reaches in neg @annotation.capability class IO diff --git a/tests/pos-custom-args/captures/tablediff.scala b/tests/pos-custom-args/captures/tablediff.scala new file mode 100644 index 000000000000..244ee1a46a23 --- /dev/null +++ b/tests/pos-custom-args/captures/tablediff.scala @@ -0,0 +1,11 @@ +import language.experimental.captureChecking + +trait Seq[+A]: + def zipAll[A1 >: A, B](that: Seq[B]^, thisElem: A1, thatElem: B): Seq[(A1, B)]^{this, that} + def map[B](f: A => B): Seq[B]^{this, f} + +def zipAllOption[X](left: Seq[X], right: Seq[X]) = + left.map(Option(_)).zipAll(right.map(Option(_)), None, None) + +def fillRow[T](headRow: Seq[T], tailRow: Seq[T]) = + val paddedZip = zipAllOption(headRow, tailRow) diff --git a/tests/pos-deep-subtype/CollectionStrawMan6.scala b/tests/pos-deep-subtype/CollectionStrawMan6.scala index 9f189afbcf3a..99f634a66622 100644 --- a/tests/pos-deep-subtype/CollectionStrawMan6.scala +++ b/tests/pos-deep-subtype/CollectionStrawMan6.scala @@ -754,11 +754,11 @@ object CollectionStrawMan6 extends LowPriority { def elemTag: ClassTag[A] = ClassTag(xs.getClass.getComponentType) - protected def fromIterableWithSameElemType(coll: Iterable[A]): Array[A] = coll.toArray[A](elemTag) + protected def fromIterableWithSameElemType(coll: Iterable[A]): Array[A] = coll.toArray[A](using elemTag) def fromIterable[B: ClassTag](coll: Iterable[B]): Array[B] = coll.toArray[B] - protected[this] def newBuilder = new ArrayBuffer[A].mapResult(_.toArray(elemTag)) + protected[this] def newBuilder = new ArrayBuffer[A].mapResult(_.toArray(using elemTag)) override def knownSize = xs.length diff --git a/tests/pos-macros/annot-dependency-between-modules/Macro_1.scala b/tests/pos-macros/annot-dependency-between-modules/Macro_1.scala new file mode 100644 index 000000000000..94a5d44c0777 --- /dev/null +++ b/tests/pos-macros/annot-dependency-between-modules/Macro_1.scala @@ -0,0 +1,11 @@ +import scala.annotation.* +import scala.quoted.* + +@experimental +class void extends MacroAnnotation: + def transform(using Quotes)( + definition: quotes.reflect.Definition, + companion: Option[quotes.reflect.Definition] + ) : List[quotes.reflect.Definition] = + definition +: companion.toList + end transform diff --git a/tests/pos-macros/annot-dependency-between-modules/Test_2.scala b/tests/pos-macros/annot-dependency-between-modules/Test_2.scala new file mode 100644 index 000000000000..a3c9d1e86f06 --- /dev/null +++ b/tests/pos-macros/annot-dependency-between-modules/Test_2.scala @@ -0,0 +1,4 @@ +//> using options -experimental + +@void @void +class Foo \ No newline at end of file diff --git a/tests/pos-macros/annot-in-object/Macro_1.scala b/tests/pos-macros/annot-in-object/Macro_1.scala index 143bd46b8ecc..cc2727fc6cce 100644 --- a/tests/pos-macros/annot-in-object/Macro_1.scala +++ b/tests/pos-macros/annot-in-object/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ @@ -6,9 +6,11 @@ import scala.quoted._ object Foo: @experimental class void extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = List(tree) + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = + List(definition) object Bar: @experimental class void extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = List(tree) + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = + List(definition) diff --git a/tests/pos-macros/annot-in-object/Test_2.scala b/tests/pos-macros/annot-in-object/Test_2.scala index 146b426b0bf8..382685873adb 100644 --- a/tests/pos-macros/annot-in-object/Test_2.scala +++ b/tests/pos-macros/annot-in-object/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @Foo.void @Foo.Bar.void diff --git a/tests/pos-macros/annot-suspend/Macro_1.scala b/tests/pos-macros/annot-suspend/Macro_1.scala index 3c391a1a041f..7a7cbdb10f01 100644 --- a/tests/pos-macros/annot-suspend/Macro_1.scala +++ b/tests/pos-macros/annot-suspend/Macro_1.scala @@ -1,9 +1,9 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ @experimental class void extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = - List(tree) + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = + List(definition) diff --git a/tests/pos-macros/annot-suspend/Test_2.scala b/tests/pos-macros/annot-suspend/Test_2.scala index 8014af03235c..4fdbcf919373 100644 --- a/tests/pos-macros/annot-suspend/Test_2.scala +++ b/tests/pos-macros/annot-suspend/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @void def test = 0 diff --git a/tests/pos-macros/annot-then-inline/Macro_1.scala b/tests/pos-macros/annot-then-inline/Macro_1.scala index 99fece18299a..aef00d9cce58 100644 --- a/tests/pos-macros/annot-then-inline/Macro_1.scala +++ b/tests/pos-macros/annot-then-inline/Macro_1.scala @@ -1,18 +1,18 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ @experimental class useInlinedIdentity extends MacroAnnotation { - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect.* - tree match + definition match case DefDef(name, params, tpt, Some(rhs)) => val newRhs = - given Quotes = tree.symbol.asQuotes + given Quotes = definition.symbol.asQuotes '{ inlinedIdentity(${rhs.asExpr}) }.asTerm - List(DefDef.copy(tree)(name, params, tpt, Some(newRhs))) + List(DefDef.copy(definition)(name, params, tpt, Some(newRhs))) } inline def inlinedIdentity(x: Any): x.type = x diff --git a/tests/pos-macros/annot-then-inline/Test_2.scala b/tests/pos-macros/annot-then-inline/Test_2.scala index 99cb2e4e4d5b..1798fde62676 100644 --- a/tests/pos-macros/annot-then-inline/Test_2.scala +++ b/tests/pos-macros/annot-then-inline/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @useInlinedIdentity def test = 0 diff --git a/tests/pos-macros/erasedArgs/Test_2.scala b/tests/pos-macros/erasedArgs/Test_2.scala index 19f0364d3f71..56a7fe3383fe 100644 --- a/tests/pos-macros/erasedArgs/Test_2.scala +++ b/tests/pos-macros/erasedArgs/Test_2.scala @@ -1 +1,3 @@ +//> using options -experimental + def test: "1abc" = mcr diff --git a/tests/pos-macros/i11795.scala b/tests/pos-macros/i11795.scala index 32eaccf2f4e2..26d1c4da1417 100644 --- a/tests/pos-macros/i11795.scala +++ b/tests/pos-macros/i11795.scala @@ -1,7 +1,17 @@ import scala.quoted._ import scala.deriving._ -def blah2[P <: Product, MEL <: Tuple: Type, MET <: Tuple: Type](m: Mirror.ProductOf[P] { type MirroredElemLabels = MEL; type MirroredElemTypes = MET})(using Quotes) = { +def blah[P <: Product] + (m: Mirror.ProductOf[P]) + (using Quotes, Type[m.MirroredElemLabels], Type[m.MirroredElemTypes]) = { + type z = Tuple.Zip[m.MirroredElemLabels, m.MirroredElemTypes] + Type.of[z] // error + () +} + +def blah2[P <: Product, MEL <: Tuple: Type, MET <: Tuple: Type] + (m: Mirror.ProductOf[P] { type MirroredElemLabels = MEL; type MirroredElemTypes = MET}) + (using Quotes) = { Type.of[Tuple.Zip[MEL, MET]] () } diff --git a/tests/pos-macros/i13021/DFBits.scala b/tests/pos-macros/i13021/DFBits.scala new file mode 100644 index 000000000000..0ab76f1687ac --- /dev/null +++ b/tests/pos-macros/i13021/DFBits.scala @@ -0,0 +1,6 @@ +object DFBits: + opaque type Token[W <: Int] <: DFToken.Of[Int] = DFToken.Of[Int] + extension [W <: Int](token: Token[W]) + def data: Int = + token.asIR + 1 diff --git a/tests/pos-macros/i13021/DFToken.scala b/tests/pos-macros/i13021/DFToken.scala new file mode 100644 index 000000000000..ce8e2f11b733 --- /dev/null +++ b/tests/pos-macros/i13021/DFToken.scala @@ -0,0 +1,14 @@ +trait Token: + val data: Any + +opaque type DFToken = Token +object DFToken: + extension (of: DFToken) def asIR: Token = ??? + + opaque type Of[D] <: DFToken = DFToken + object Of: + extension [D](token: Of[D]) def width(using w: Width[?]): Int = ??? + +def getWidth[W <: Int](token: DFBits.Token[W]): Int = token.width +def getData[W <: Int](token: DFBits.Token[W]): Int = + token.data //error here diff --git a/tests/pos-macros/i13021/Width.scala b/tests/pos-macros/i13021/Width.scala new file mode 100644 index 000000000000..a163e1b5ebf1 --- /dev/null +++ b/tests/pos-macros/i13021/Width.scala @@ -0,0 +1,12 @@ +import scala.quoted.* + +trait Width[T]: + type Out <: Int +object Width: + transparent inline given [T]: Width[T] = ${ getWidthMacro[T] } + def getWidthMacro[T](using Quotes, Type[T]): Expr[Width[T]] = + '{ + new Width[T] { + type Out = Int + } + } diff --git a/tests/pos/i13532/Bar.scala b/tests/pos-macros/i13532/Bar.scala similarity index 100% rename from tests/pos/i13532/Bar.scala rename to tests/pos-macros/i13532/Bar.scala diff --git a/tests/pos/i13532/Foo.scala b/tests/pos-macros/i13532/Foo.scala similarity index 100% rename from tests/pos/i13532/Foo.scala rename to tests/pos-macros/i13532/Foo.scala diff --git a/tests/pos/i13532/TestMacro.scala b/tests/pos-macros/i13532/TestMacro.scala similarity index 100% rename from tests/pos/i13532/TestMacro.scala rename to tests/pos-macros/i13532/TestMacro.scala diff --git a/tests/pos-macros/i15413/Macro_1.scala b/tests/pos-macros/i15413/Macro_1.scala index 6166a5d6f55d..f451742dff9e 100644 --- a/tests/pos-macros/i15413/Macro_1.scala +++ b/tests/pos-macros/i15413/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental -Werror -WunstableInlineAccessors +//> using options -experimental -Werror -WunstableInlineAccessors import scala.quoted.* import scala.annotation.publicInBinary diff --git a/tests/pos-macros/i15413/Test_2.scala b/tests/pos-macros/i15413/Test_2.scala index 7f8c90161c66..0cdfb8fc8186 100644 --- a/tests/pos-macros/i15413/Test_2.scala +++ b/tests/pos-macros/i15413/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental def test = new Macro().foo diff --git a/tests/pos-macros/i15413b/Macro_1.scala b/tests/pos-macros/i15413b/Macro_1.scala index b8a91eaba0fe..df27b6267915 100644 --- a/tests/pos-macros/i15413b/Macro_1.scala +++ b/tests/pos-macros/i15413b/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental -Werror -WunstableInlineAccessors +//> using options -experimental -Werror -WunstableInlineAccessors package bar diff --git a/tests/pos-macros/i15413b/Test_2.scala b/tests/pos-macros/i15413b/Test_2.scala index f03559b9adb9..6c5b86487cd6 100644 --- a/tests/pos-macros/i15413b/Test_2.scala +++ b/tests/pos-macros/i15413b/Test_2.scala @@ -1,3 +1,3 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental def test = bar.foo diff --git a/tests/pos/i16331/Macro.scala b/tests/pos-macros/i16331/Macro.scala similarity index 100% rename from tests/pos/i16331/Macro.scala rename to tests/pos-macros/i16331/Macro.scala diff --git a/tests/pos/i16331/Main.scala b/tests/pos-macros/i16331/Main.scala similarity index 100% rename from tests/pos/i16331/Main.scala rename to tests/pos-macros/i16331/Main.scala diff --git a/tests/pos-macros/i17009/Macro_1.scala b/tests/pos-macros/i17009/Macro_1.scala new file mode 100644 index 000000000000..0535220420e5 --- /dev/null +++ b/tests/pos-macros/i17009/Macro_1.scala @@ -0,0 +1,6 @@ +import scala.quoted._ + +object Macro { + transparent inline def transform[T](inline expr: T): T = ${ transformImpl[T]('expr) } + def transformImpl[T: Type](f: Expr[T])(using Quotes): Expr[T] = f +} diff --git a/tests/pos-macros/i17009/Main_2.scala b/tests/pos-macros/i17009/Main_2.scala new file mode 100644 index 000000000000..a32b032e8b9d --- /dev/null +++ b/tests/pos-macros/i17009/Main_2.scala @@ -0,0 +1,6 @@ +def processLine(line: String): Unit = { + Macro.transform { + line.split(" ").nn + ??? + } +} diff --git a/tests/pos-macros/i17506/Macro_1.scala b/tests/pos-macros/i17506/Macro_1.scala new file mode 100644 index 000000000000..a66428a126be --- /dev/null +++ b/tests/pos-macros/i17506/Macro_1.scala @@ -0,0 +1,80 @@ +class Foo +class Bar +class Baz + +import scala.quoted._ + +def assertBetaReduction(using Quotes)(applied: Expr[Any], expected: String): quotes.reflect.Term = + import quotes.reflect._ + val reducedMaybe = Term.betaReduce(applied.asTerm) + assert(reducedMaybe.isDefined) + val reduced = reducedMaybe.get + assert(reduced.show == expected,s"obtained: ${reduced.show}, expected: ${expected}") + reduced + +inline def regularCurriedCtxFun2BetaReduceTest(inline f: Foo ?=> Bar ?=> Int): Unit = + ${regularCurriedCtxFun2BetaReduceTestImpl('f)} +def regularCurriedCtxFun2BetaReduceTestImpl(f: Expr[Foo ?=> Bar ?=> Int])(using Quotes): Expr[Int] = + val expected = + """|{ + | val contextual$3: Bar = new Bar() + | val contextual$2: Foo = new Foo() + | 123 + |}""".stripMargin + val applied = '{$f(using new Foo())(using new Bar())} + assertBetaReduction(applied, expected).asExprOf[Int] + +inline def regularCurriedFun2BetaReduceTest(inline f: Foo => Bar => Int): Int = + ${regularCurriedFun2BetaReduceTestImpl('f)} +def regularCurriedFun2BetaReduceTestImpl(f: Expr[Foo => Bar => Int])(using Quotes): Expr[Int] = + val expected = + """|{ + | val b: Bar = new Bar() + | val f: Foo = new Foo() + | 123 + |}""".stripMargin + val applied = '{$f(new Foo())(new Bar())} + assertBetaReduction(applied, expected).asExprOf[Int] + +inline def typeParamCurriedFun2BetaReduceTest(inline f: [A] => A => [B] => B => Unit): Unit = + ${typeParamCurriedFun2BetaReduceTestImpl('f)} +def typeParamCurriedFun2BetaReduceTestImpl(f: Expr[[A] => (a: A) => [B] => (b: B) => Unit])(using Quotes): Expr[Unit] = + val expected = + """|{ + | type Y = Bar + | val y: Bar = new Bar() + | type X = Foo + | val x: Foo = new Foo() + | typeParamFun2[Y, X](y, x) + |}""".stripMargin + val applied = '{$f.apply[Foo](new Foo()).apply[Bar](new Bar())} + assertBetaReduction(applied, expected).asExprOf[Unit] + +inline def regularCurriedFun3BetaReduceTest(inline f: Foo => Bar => Baz => Int): Int = + ${regularCurriedFun3BetaReduceTestImpl('f)} +def regularCurriedFun3BetaReduceTestImpl(f: Expr[Foo => Bar => Baz => Int])(using Quotes): Expr[Int] = + val expected = + """|{ + | val i: Baz = new Baz() + | val b: Bar = new Bar() + | val f: Foo = new Foo() + | 123 + |}""".stripMargin + val applied = '{$f(new Foo())(new Bar())(new Baz())} + assertBetaReduction(applied, expected).asExprOf[Int] + +inline def typeParamCurriedFun3BetaReduceTest(inline f: [A] => A => [B] => B => [C] => C => Unit): Unit = + ${typeParamCurriedFun3BetaReduceTestImpl('f)} +def typeParamCurriedFun3BetaReduceTestImpl(f: Expr[[A] => A => [B] => B => [C] => C => Unit])(using Quotes): Expr[Unit] = + val expected = + """|{ + | type Z = Baz + | val z: Baz = new Baz() + | type Y = Bar + | val y: Bar = new Bar() + | type X = Foo + | val x: Foo = new Foo() + | typeParamFun3[Z, Y, X](z, y, x) + |}""".stripMargin + val applied = '{$f.apply[Foo](new Foo()).apply[Bar](new Bar()).apply[Baz](new Baz())} + assertBetaReduction(applied, expected).asExprOf[Unit] diff --git a/tests/pos-macros/i17506/Test_2.scala b/tests/pos-macros/i17506/Test_2.scala new file mode 100644 index 000000000000..97a146ecba93 --- /dev/null +++ b/tests/pos-macros/i17506/Test_2.scala @@ -0,0 +1,11 @@ +@main def run() = + def typeParamFun2[A, B](a: A, b: B): Unit = println(a.toString + " " + b.toString) + def typeParamFun3[A, B, C](a: A, b: B, c: C): Unit = println(a.toString + " " + b.toString) + + regularCurriedCtxFun2BetaReduceTest((f: Foo) ?=> (b: Bar) ?=> 123) + regularCurriedCtxFun2BetaReduceTest(123) + regularCurriedFun2BetaReduceTest(((f: Foo) => (b: Bar) => 123)) + typeParamCurriedFun2BetaReduceTest([X] => (x: X) => [Y] => (y: Y) => typeParamFun2[Y, X](y, x)) + + regularCurriedFun3BetaReduceTest((f: Foo) => (b: Bar) => (i: Baz) => 123) + typeParamCurriedFun3BetaReduceTest([X] => (x: X) => [Y] => (y: Y) => [Z] => (z: Z) => typeParamFun3[Z, Y, X](z, y, x)) diff --git a/tests/pos-macros/i19526b/Test.scala b/tests/pos-macros/i19526b/Test.scala index ede2db53c5b0..96274091218f 100644 --- a/tests/pos-macros/i19526b/Test.scala +++ b/tests/pos-macros/i19526b/Test.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental package crash.test diff --git a/tests/pos-macros/i19537/Macro_1.scala b/tests/pos-macros/i19537/Macro_1.scala index 932994657d24..a44c212599d3 100644 --- a/tests/pos-macros/i19537/Macro_1.scala +++ b/tests/pos-macros/i19537/Macro_1.scala @@ -3,4 +3,5 @@ import scala.quoted.* @experimental class annotation extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition) = List(tree) + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = + List(definition) diff --git a/tests/pos-macros/i19539/Macro_1.scala b/tests/pos-macros/i19539/Macro_1.scala index 932994657d24..a44c212599d3 100644 --- a/tests/pos-macros/i19539/Macro_1.scala +++ b/tests/pos-macros/i19539/Macro_1.scala @@ -3,4 +3,5 @@ import scala.quoted.* @experimental class annotation extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition) = List(tree) + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = + List(definition) diff --git a/tests/pos/i19604/ZSet.scala b/tests/pos-macros/i19604/ZSet.scala similarity index 100% rename from tests/pos/i19604/ZSet.scala rename to tests/pos-macros/i19604/ZSet.scala diff --git a/tests/pos/i19604/core.scala b/tests/pos-macros/i19604/core.scala similarity index 100% rename from tests/pos/i19604/core.scala rename to tests/pos-macros/i19604/core.scala diff --git a/tests/pos/i19604/macro.scala b/tests/pos-macros/i19604/macro.scala similarity index 100% rename from tests/pos/i19604/macro.scala rename to tests/pos-macros/i19604/macro.scala diff --git a/tests/pos/i19604/prelude.scala b/tests/pos-macros/i19604/prelude.scala similarity index 100% rename from tests/pos/i19604/prelude.scala rename to tests/pos-macros/i19604/prelude.scala diff --git a/tests/pos-macros/i19767.scala b/tests/pos-macros/i19767.scala new file mode 100644 index 000000000000..2fb655b58a1b --- /dev/null +++ b/tests/pos-macros/i19767.scala @@ -0,0 +1,7 @@ +import scala.quoted.* + +class ICons[K <: Singleton](val key: K) + +def filterX(using Quotes): Unit = + (??? : Expr[Any]) match + case '{ $y : ICons[k1] } => '{ ICons($y.key) } \ No newline at end of file diff --git a/tests/pos-macros/i20286/Macro_1.scala b/tests/pos-macros/i20286/Macro_1.scala new file mode 100644 index 000000000000..d582d33a1198 --- /dev/null +++ b/tests/pos-macros/i20286/Macro_1.scala @@ -0,0 +1,24 @@ +import scala.quoted.* + +type P[+T] = ParsingRun[T] +trait ParsingRun[+T] { + var successValue: Any + def freshSuccessUnit(): ParsingRun[Unit] + +} + +object MacroInlineImpls { + inline def flatMapXInline[T, V]( + lhs: ParsingRun[T] + )(inline f: T => ParsingRun[V]): ParsingRun[V] = { + f(lhs.successValue.asInstanceOf[T]) + } + + def parsedSequence0[T: Type, V: Type, R: Type]( + lhs: Expr[ParsingRun[T]], + rhs: Expr[ParsingRun[V]] + )(using quotes: Quotes): Expr[ParsingRun[R]] = { + import quotes.reflect.* + '{ $rhs.asInstanceOf[ParsingRun[R]] } + } +} diff --git a/tests/pos-macros/i20286/Test_2.scala b/tests/pos-macros/i20286/Test_2.scala new file mode 100644 index 000000000000..b60a5682c051 --- /dev/null +++ b/tests/pos-macros/i20286/Test_2.scala @@ -0,0 +1,17 @@ +implicit inline def LiteralStr(s: String)(implicit ctx: P[Any]): P[Unit] = ??? + +extension [T](inline parse0: P[T]) { + inline def ~[V, R](inline other: P[V])(using + ctx: P[?] + ): P[R] = ${ MacroInlineImpls.parsedSequence0[T, V, R]('parse0, 'other) } + + inline def flatMapX[V](inline f: T => P[V]): P[V] = + MacroInlineImpls.flatMapXInline[T, V](parse0)(f) +} + +def deeper[$: P]: P[Int] = ??? +def newline[$: P]: P[Unit] = ??? +def blockBody[p: P]: P[Seq[Int]] = newline ~ deeper.flatMapX { i => + val y = LiteralStr("")(using ???) + ??? +} diff --git a/tests/pos-macros/i8325/Macro_1.scala b/tests/pos-macros/i8325/Macro_1.scala index 18466e17b3df..92a54d21b00a 100644 --- a/tests/pos-macros/i8325/Macro_1.scala +++ b/tests/pos-macros/i8325/Macro_1.scala @@ -3,7 +3,7 @@ package a import scala.quoted.* -object A: +object O: inline def transform[A](inline expr: A): A = ${ transformImplExpr('expr) @@ -15,7 +15,7 @@ object A: import quotes.reflect.* expr.asTerm match { case Inlined(x,y,z) => transformImplExpr(z.asExpr.asInstanceOf[Expr[A]]) - case Apply(fun,args) => '{ A.pure(${Apply(fun,args).asExpr.asInstanceOf[Expr[A]]}) } + case Apply(fun,args) => '{ O.pure(${Apply(fun,args).asExpr.asInstanceOf[Expr[A]]}) } case other => expr } } diff --git a/tests/pos-macros/i8325/Test_2.scala b/tests/pos-macros/i8325/Test_2.scala index 8b0a74b11a08..90e88dfee341 100644 --- a/tests/pos-macros/i8325/Test_2.scala +++ b/tests/pos-macros/i8325/Test_2.scala @@ -3,7 +3,7 @@ package a class Test1 { def t1(): Unit = { - A.transform( + O.transform( s"a ${1} ${2}") } diff --git a/tests/pos-macros/i8325b/Macro_1.scala b/tests/pos-macros/i8325b/Macro_1.scala index 181efa260f9b..139abed94078 100644 --- a/tests/pos-macros/i8325b/Macro_1.scala +++ b/tests/pos-macros/i8325b/Macro_1.scala @@ -3,7 +3,7 @@ package a import scala.quoted.* -object A: +object O: inline def transform[A](inline expr: A): A = ${ transformImplExpr('expr) @@ -16,7 +16,7 @@ object A: expr.asTerm match { case Inlined(x,y,z) => transformImplExpr(z.asExpr.asInstanceOf[Expr[A]]) case r@Apply(fun,args) => '{ - A.pure(${r.asExpr.asInstanceOf[Expr[A]]}) } + O.pure(${r.asExpr.asInstanceOf[Expr[A]]}) } case other => expr } } diff --git a/tests/pos-macros/i8325b/Test_2.scala b/tests/pos-macros/i8325b/Test_2.scala index 8b0a74b11a08..90e88dfee341 100644 --- a/tests/pos-macros/i8325b/Test_2.scala +++ b/tests/pos-macros/i8325b/Test_2.scala @@ -3,7 +3,7 @@ package a class Test1 { def t1(): Unit = { - A.transform( + O.transform( s"a ${1} ${2}") } diff --git a/tests/pos-macros/macro-annot-with-companion/Macro_1.scala b/tests/pos-macros/macro-annot-with-companion/Macro_1.scala new file mode 100644 index 000000000000..29b76a47be80 --- /dev/null +++ b/tests/pos-macros/macro-annot-with-companion/Macro_1.scala @@ -0,0 +1,14 @@ +//> using options -experimental + +import scala.annotation.MacroAnnotation + +import scala.quoted.* + +class transform extends MacroAnnotation: + override def transform(using Quotes)( + tree: quotes.reflect.Definition, + companion: Option[quotes.reflect.Definition] + ) : List[quotes.reflect.Definition] = { + import quotes.reflect.* + List(tree) + } diff --git a/tests/pos-macros/macro-annot-with-companion/Test_2.scala b/tests/pos-macros/macro-annot-with-companion/Test_2.scala new file mode 100644 index 000000000000..4ae6d05fcdcf --- /dev/null +++ b/tests/pos-macros/macro-annot-with-companion/Test_2.scala @@ -0,0 +1,16 @@ +//> using options -experimental + +@transform +class Foo + +@transform +class Bar + +@transform +object Foo + +@transform +class A + +@transform +object B \ No newline at end of file diff --git a/tests/pos-with-compiler-cc/dotc/reporting/messages.scala b/tests/pos-with-compiler-cc/dotc/reporting/messages.scala index 142ac63af0f3..cd5c5f67ff41 100644 --- a/tests/pos-with-compiler-cc/dotc/reporting/messages.scala +++ b/tests/pos-with-compiler-cc/dotc/reporting/messages.scala @@ -873,7 +873,7 @@ class MatchableWarning(tp: Type, pattern: Boolean)(using DetachedContext) extends TypeMsg(MatchableWarningID) { def msg(using Context) = val kind = if pattern then "pattern selector" else "value" - i"""${kind} should be an instance of Matchable,, + i"""${kind} should be an instance of Matchable, |but it has unmatchable type $tp instead""" def explain(using Context) = diff --git a/tests/pos/13633.scala b/tests/pos/13633.scala index 8883ef98d0be..ca0f7e68e81e 100644 --- a/tests/pos/13633.scala +++ b/tests/pos/13633.scala @@ -21,7 +21,7 @@ object Sums extends App: type Reverse[A] = ReverseLoop[A, EmptyTuple] - type PlusTri[A, B, C] <: Tuple = (A, B, C) match + type PlusTri[A, B, C] = (A, B, C) match case (false, false, false) => (false, false) case (true, false, false) | (false, true, false) | (false, false, true) => (false, true) case (true, true, false) | (true, false, true) | (false, true, true) => (true, false) diff --git a/tests/pos/20088.scala b/tests/pos/20088.scala new file mode 100644 index 000000000000..308c5a0f0a91 --- /dev/null +++ b/tests/pos/20088.scala @@ -0,0 +1,6 @@ +trait Foo +trait Bar + +given (using foo: Foo = new {}): Bar with {} + +def Test = summon[Bar] diff --git a/tests/pos/20088b.scala b/tests/pos/20088b.scala new file mode 100644 index 000000000000..8cbf79d16959 --- /dev/null +++ b/tests/pos/20088b.scala @@ -0,0 +1,6 @@ +trait Foo +class Bar + +given (using foo: Foo = new {}): Bar() + +def Test = summon[Bar] diff --git a/tests/pos/FromString-cb-companion.scala b/tests/pos/FromString-cb-companion.scala new file mode 100644 index 000000000000..d086420761ee --- /dev/null +++ b/tests/pos/FromString-cb-companion.scala @@ -0,0 +1,14 @@ +//> using options -language:experimental.modularity -source future + +trait FromString[Self]: + def fromString(s: String): Self + +given FromString[Int] = _.toInt + +given FromString[Double] = _.toDouble + +def add[N: {FromString, Numeric as num}](a: String, b: String): N = + N.plus( + num.plus(N.fromString(a), N.fromString(b)), + N.fromString(a) + ) \ No newline at end of file diff --git a/tests/pos/FromString-named.scala b/tests/pos/FromString-named.scala new file mode 100644 index 000000000000..efa0882ae347 --- /dev/null +++ b/tests/pos/FromString-named.scala @@ -0,0 +1,11 @@ +//> using options -language:experimental.modularity -source future + +trait FromString[A]: + def fromString(s: String): A + +given FromString[Int] = _.toInt + +given FromString[Double] = _.toDouble + +def add[N: {FromString as N, Numeric as num}](a: String, b: String): N = + num.plus(N.fromString(a), N.fromString(b)) diff --git a/tests/pos/FromString-typeparam.scala b/tests/pos/FromString-typeparam.scala new file mode 100644 index 000000000000..893bcfd3decc --- /dev/null +++ b/tests/pos/FromString-typeparam.scala @@ -0,0 +1,13 @@ +//> using options -language:experimental.modularity -source future + +trait FromString[A]: + def fromString(s: String): A + +given FromString[Int] = _.toInt + +given FromString[Double] = _.toDouble + +def add[N: {FromString, Numeric}](a: String, b: String): N = + val num = summon[Numeric[N]] + val N = summon[FromString[N]] + num.plus(N.fromString(a), N.fromString(b)) diff --git a/tests/pos/FromString.scala b/tests/pos/FromString.scala new file mode 100644 index 000000000000..333a4c002989 --- /dev/null +++ b/tests/pos/FromString.scala @@ -0,0 +1,15 @@ +//> using options -language:experimental.modularity -source future + +trait FromString: + type Self + def fromString(s: String): Self + +given Int is FromString = _.toInt + +given Double is FromString = _.toDouble + +def add[N: {FromString, Numeric as num}](a: String, b: String): N = + N.plus( + num.plus(N.fromString(a), N.fromString(b)), + N.fromString(a) + ) \ No newline at end of file diff --git a/tests/pos/Tuple.Drop.scala b/tests/pos/Tuple.Drop.scala deleted file mode 100644 index 9b88cc227966..000000000000 --- a/tests/pos/Tuple.Drop.scala +++ /dev/null @@ -1,7 +0,0 @@ -import compiletime.ops.int.* - -type Drop[T <: Tuple, N <: Int] <: Tuple = N match - case 0 => T - case S[n1] => T match - case EmptyTuple => EmptyTuple - case x *: xs => Drop[xs, n1] diff --git a/tests/pos/Tuple.Elem.scala b/tests/pos/Tuple.Elem.scala deleted file mode 100644 index 81494485c321..000000000000 --- a/tests/pos/Tuple.Elem.scala +++ /dev/null @@ -1,7 +0,0 @@ -import compiletime.ops.int.* - -type Elem[T <: Tuple, I <: Int] = T match - case h *: tail => - I match - case 0 => h - case S[j] => Elem[tail, j] diff --git a/tests/pos/TupleReverse.scala b/tests/pos/TupleReverse.scala index 9b83280afcf1..6d70e6759e19 100644 --- a/tests/pos/TupleReverse.scala +++ b/tests/pos/TupleReverse.scala @@ -1,3 +1,5 @@ +//> using options -experimental + import scala.Tuple.* def test[T1, T2, T3, T4] = summon[Reverse[EmptyTuple] =:= EmptyTuple] @@ -12,5 +14,6 @@ def test[T1, T2, T3, T4] = def test2[Tup <: Tuple] = summon[Reverse[Tup] =:= Reverse[Tup]] -def test3[T1, T2, T3, T4](tup1: (T1, T2, T3, T4)) = - summon[Reverse[tup1.type] =:= (T4, T3, T2, T1)] +def test3[T1, T2, T3, T4](tup1: (T1, T2, T3, T4)): Unit = + val tup11: (T1, T2, T3, T4) = tup1 + summon[Reverse[tup11.type] =:= (T4, T3, T2, T1)] diff --git a/tests/pos/TupleReverseOnto.scala b/tests/pos/TupleReverseOnto.scala index 09d5a323cb29..4d4a40f60cec 100644 --- a/tests/pos/TupleReverseOnto.scala +++ b/tests/pos/TupleReverseOnto.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.Tuple.* @@ -13,6 +13,7 @@ def test2[Tup1 <: Tuple, Tup2 <: Tuple] = summon[ReverseOnto[EmptyTuple, Tup1] =:= Tup1] summon[ReverseOnto[Tup1, EmptyTuple] =:= Reverse[Tup1]] -def test3[T1, T2, T3, T4](tup1: (T1, T2), tup2: (T3, T4)) = - summon[ReverseOnto[tup1.type, tup2.type] <:< (T2, T1, T3, T4)] - summon[ReverseOnto[tup1.type, tup2.type] =:= T2 *: T1 *: tup2.type] +def test3[T1, T2, T3, T4](tup1: (T1, T2), tup2: (T3, T4)): Unit = + val tup11: (T1, T2) = tup1 + summon[ReverseOnto[tup11.type, tup2.type] <:< (T2, T1, T3, T4)] + summon[ReverseOnto[tup11.type, tup2.type] =:= T2 *: T1 *: tup2.type] diff --git a/tests/pos/bson/Test.scala b/tests/pos/bson/Test.scala new file mode 100644 index 000000000000..78b6687adabf --- /dev/null +++ b/tests/pos/bson/Test.scala @@ -0,0 +1,5 @@ +//> using options -source 3.5 +import bson.* + +def stringMapHandler[V](using writer: BSONWriter[Map[String, V]]): BSONHandler[Map[String, V]] = ??? +def typedMapHandler[K, V: BSONHandler] = stringMapHandler[V] // warn diff --git a/tests/pos/bson/bson.scala b/tests/pos/bson/bson.scala new file mode 100644 index 000000000000..d901ee3e3a4f --- /dev/null +++ b/tests/pos/bson/bson.scala @@ -0,0 +1,29 @@ +package bson + +trait BSONWriter[T] +trait BSONDocumentWriter[T] extends BSONWriter[T] +object BSONWriter extends BSONWriterInstances + +trait BSONHandler[T] extends BSONWriter[T] + +private[bson] trait BSONWriterInstances { + given mapWriter[V](using BSONWriter[V]): BSONDocumentWriter[Map[String, V]] = bson.mapWriter[V] + export bson.collectionWriter +} + +final class ¬[A, B] +object ¬ { + implicit def defaultEvidence[A, B]: ¬[A, B] = new ¬[A, B]() + @annotation.implicitAmbiguous("Could not prove type ${A} is not (¬) ${A}") + implicit def ambiguousEvidence1[A]: ¬[A, A] = null + implicit def ambiguousEvidence2[A]: ¬[A, A] = null +} + +private[bson] trait DefaultBSONHandlers extends LowPriorityHandlers +private[bson] trait LowPriorityHandlers{ + given collectionWriter[T, Repr <: Iterable[T]](using BSONWriter[T], Repr ¬ Option[T]): BSONWriter[Repr] = ??? + private[bson] def mapWriter[V](implicit valueWriter: BSONWriter[V]): BSONDocumentWriter[Map[String, V]] = ??? +} + +// --- +package object bson extends DefaultBSONHandlers \ No newline at end of file diff --git a/tests/pos/cb-companion-joins.scala b/tests/pos/cb-companion-joins.scala new file mode 100644 index 000000000000..97e0a8a7e4ac --- /dev/null +++ b/tests/pos/cb-companion-joins.scala @@ -0,0 +1,21 @@ +import language.experimental.modularity +import language.future + +trait M[Self]: + extension (x: Self) def combine (y: Self): String + def unit: Self + +trait Num[Self]: + def zero: Self + +trait A extends M[A] +trait B extends M[A] + +trait AA: + type X: M +trait BB: + type X: Num +class CC[X1: {M, Num}] extends AA, BB: + type X = X1 + X.zero + X.unit diff --git a/tests/pos/cbproxy-expansion.scala b/tests/pos/cbproxy-expansion.scala new file mode 100644 index 000000000000..ee145b62d4ed --- /dev/null +++ b/tests/pos/cbproxy-expansion.scala @@ -0,0 +1,16 @@ +//> using options -language:experimental.modularity -source future +trait TC[T]: + type Self + +def f1[S, T: TC[S] as tc](x: S, y: tc.Self) = () +def f2[S, T: TC[S]](x: S, y: T.Self) = () +def f3[S, T: TC[S]](x: S, y: Int) = () + +given TC[String] with + type Self = Int + def unit = 42 + +def main = + f1("hello", 23) + f2("hello", 23) + f3("hello", 23) diff --git a/tests/pos/cc-experimental.scala b/tests/pos/cc-experimental.scala index 4ee1f6732356..4027779a9036 100644 --- a/tests/pos/cc-experimental.scala +++ b/tests/pos/cc-experimental.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + package scala.runtime diff --git a/tests/pos/constfold.scala b/tests/pos/constfold.scala index a45400d7f259..5a76d414032d 100644 --- a/tests/pos/constfold.scala +++ b/tests/pos/constfold.scala @@ -15,4 +15,12 @@ object Test extends App { Console.println(A.y); Console.println(A.z); Console.println(A.s); + + def f(x: 12): Int = 1 + def f(x: Int): Double = 2 + val x = f(12) + val _: Int = x + val y = f(2 * 6) + val _: Int = x + } diff --git a/tests/pos/deferred-givens-singletons.scala b/tests/pos/deferred-givens-singletons.scala new file mode 100644 index 000000000000..60a881340b75 --- /dev/null +++ b/tests/pos/deferred-givens-singletons.scala @@ -0,0 +1,13 @@ +//> using options -language:experimental.modularity -source future +import compiletime.* + +trait A: + type Elem: Singleton + +class B extends A: + type Elem = 1 + +class C[X: Singleton] extends A: + type Elem = X + + diff --git a/tests/pos/deferred-givens.scala b/tests/pos/deferred-givens.scala new file mode 100644 index 000000000000..b9018c97e151 --- /dev/null +++ b/tests/pos/deferred-givens.scala @@ -0,0 +1,36 @@ +//> using options -language:experimental.modularity -source future +import compiletime.* +class Ord[Elem] +given Ord[Double] + +trait A: + type Elem : Ord + def foo = summon[Ord[Elem]] + +class AC extends A: + type Elem = Double + override given Ord[Elem] = ??? + +class AD extends A: + type Elem = Double + +trait B: + type Elem + given Ord[Elem] = deferred + def foo = summon[Ord[Elem]] + +class C extends B: + type Elem = String + override given Ord[Elem] = ??? + +def bar(using Ord[String]) = 1 + +class D(using Ord[String]) extends B: + type Elem = String + +class E(using x: Ord[String]) extends B: + type Elem = String + override given Ord[Elem] = x + +class F[X: Ord] extends B: + type Elem = X diff --git a/tests/pos/deferredSummon.scala b/tests/pos/deferredSummon.scala new file mode 100644 index 000000000000..f8252576d81a --- /dev/null +++ b/tests/pos/deferredSummon.scala @@ -0,0 +1,48 @@ +//> using options -language:experimental.modularity -source future +import compiletime.deferred + +trait Ord: + type Self + def less(x: Self, y: Self): Boolean + +trait A: + type Elem + given Elem is Ord = deferred + def foo = summon[Elem is Ord] + +trait B: + type Elem: Ord + def foo = summon[Elem is Ord] + +object Inst: + given Int is Ord: + def less(x: Int, y: Int) = x < y + +object Test1: + import Inst.given + class C extends A: + type Elem = Int + object E extends A: + type Elem = Int + given A: + type Elem = Int + +class D1[T: Ord] extends B: + type Elem = T + +object Test2: + import Inst.given + class C extends B: + type Elem = Int + object E extends B: + type Elem = Int + given B: + type Elem = Int + +class D2[T: Ord] extends B: + type Elem = T + + + + + diff --git a/tests/pos/dep-context-bounds.scala b/tests/pos/dep-context-bounds.scala new file mode 100644 index 000000000000..c724d92e9809 --- /dev/null +++ b/tests/pos/dep-context-bounds.scala @@ -0,0 +1,17 @@ +//> using options -language:experimental.modularity -source future +trait A: + type Self + +object Test1: + def foo[X: A](x: X.Self) = ??? + + def bar[X: A](a: Int) = ??? + + def baz[X: A](a: Int)(using String) = ??? + +object Test2: + def foo[X: A as x](a: x.Self) = ??? + + def bar[X: A as x](a: Int) = ??? + + def baz[X: A as x](a: Int)(using String) = ??? diff --git a/tests/pos/depclass-1.scala b/tests/pos/depclass-1.scala new file mode 100644 index 000000000000..38daef85ae98 --- /dev/null +++ b/tests/pos/depclass-1.scala @@ -0,0 +1,19 @@ +//> using options -source future -language:experimental.modularity +class A(tracked val source: String) + +class B(x: Int, tracked val source1: String) extends A(source1) + +class C(tracked val source2: String) extends B(1, source2) + +//class D(source1: String) extends C(source1) +val x = C("hello") +val _: A{ val source: "hello" } = x + +class Vec[Elem](tracked val size: Int) +class Vec8 extends Vec[Float](8) + +val v = Vec[Float](10) +val v2 = Vec8() +val xx: 10 = v.size +val x2: 8 = v2.size + diff --git a/tests/pos/dotty-experimental.scala b/tests/pos/dotty-experimental.scala index 9cffddc0b8ba..ee9a84a1b497 100644 --- a/tests/pos/dotty-experimental.scala +++ b/tests/pos/dotty-experimental.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import language.experimental.captureChecking object test { diff --git a/tests/neg/expeimental-flag-with-lang-feature-2.scala b/tests/pos/expeimental-flag-with-lang-feature-2.scala similarity index 55% rename from tests/neg/expeimental-flag-with-lang-feature-2.scala rename to tests/pos/expeimental-flag-with-lang-feature-2.scala index 3e0b9359711a..7985f9db3a29 100644 --- a/tests/neg/expeimental-flag-with-lang-feature-2.scala +++ b/tests/pos/expeimental-flag-with-lang-feature-2.scala @@ -1,6 +1,6 @@ -//> using options -Yno-experimental -import scala.language.experimental.namedTypeArguments // error + +import scala.language.experimental.namedTypeArguments def namedTypeArgumentsFun[T, U]: Int = namedTypeArgumentsFun[T = Int, U = Int] diff --git a/tests/pos/expeimental-flag-with-lang-feature.scala b/tests/pos/expeimental-flag-with-lang-feature.scala index 9cfb716b1015..96069c332e02 100644 --- a/tests/pos/expeimental-flag-with-lang-feature.scala +++ b/tests/pos/expeimental-flag-with-lang-feature.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.language.experimental.erasedDefinitions import scala.language.experimental.namedTypeArguments diff --git a/tests/pos/expeimental-flag.scala b/tests/pos/experimental-flag.scala similarity index 85% rename from tests/pos/expeimental-flag.scala rename to tests/pos/experimental-flag.scala index 9d3daf12fddc..e5d6274c4c80 100644 --- a/tests/pos/expeimental-flag.scala +++ b/tests/pos/experimental-flag.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.experimental diff --git a/tests/pos/experimental-import-with-top-level-val-underscore.scala b/tests/pos/experimental-import-with-top-level-val-underscore.scala new file mode 100644 index 000000000000..306979634d71 --- /dev/null +++ b/tests/pos/experimental-import-with-top-level-val-underscore.scala @@ -0,0 +1,8 @@ + +import language.experimental.erasedDefinitions + +def test() = () + +val _ = + test() + 42 diff --git a/tests/pos/experimental-imports-empty.scala b/tests/pos/experimental-imports-empty.scala index 18d83839e7e7..8729905494ff 100644 --- a/tests/pos/experimental-imports-empty.scala +++ b/tests/pos/experimental-imports-empty.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import annotation.experimental import language.experimental.namedTypeArguments diff --git a/tests/pos/experimental-imports-top.scala b/tests/pos/experimental-imports-top.scala index 16f44e48eb32..9ba2b5cd2c99 100644 --- a/tests/pos/experimental-imports-top.scala +++ b/tests/pos/experimental-imports-top.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import language.experimental.erasedDefinitions import annotation.experimental diff --git a/tests/neg/experimental-package-imports.scala b/tests/pos/experimental-package-imports.scala similarity index 56% rename from tests/neg/experimental-package-imports.scala rename to tests/pos/experimental-package-imports.scala index 7a4b04606b9d..b816130c18d7 100644 --- a/tests/neg/experimental-package-imports.scala +++ b/tests/pos/experimental-package-imports.scala @@ -1,14 +1,14 @@ -//> using options -Yno-experimental + import annotation.experimental package foo { - import language.experimental.namedTypeArguments // error - import language.experimental.genericNumberLiterals // error - import language.experimental.erasedDefinitions // ok: only check at erased definition + import language.experimental.namedTypeArguments + import language.experimental.genericNumberLiterals + import language.experimental.erasedDefinitions package bar { - def foo = 1 + def foo = 1 // marked as @experimental because of the language imports } } diff --git a/tests/pos/experimentalExperimental.scala b/tests/pos/experimentalExperimental.scala deleted file mode 100644 index 4b57e5b94346..000000000000 --- a/tests/pos/experimentalExperimental.scala +++ /dev/null @@ -1 +0,0 @@ -class MyExperimentalAnnot extends scala.annotation.experimental diff --git a/tests/pos/export-param-flags/A_1.scala b/tests/pos/export-param-flags/A_1.scala new file mode 100644 index 000000000000..1ac8d10ba930 --- /dev/null +++ b/tests/pos/export-param-flags/A_1.scala @@ -0,0 +1,5 @@ +object A: + def defaultParam(x: Int = 1) = x + +object Exported: + export A.* diff --git a/tests/pos/export-param-flags/B_2.scala b/tests/pos/export-param-flags/B_2.scala new file mode 100644 index 000000000000..0387f66d7aa7 --- /dev/null +++ b/tests/pos/export-param-flags/B_2.scala @@ -0,0 +1,2 @@ +object B: + val x = Exported.defaultParam() diff --git a/tests/pos/ext-override.scala b/tests/pos/ext-override.scala new file mode 100644 index 000000000000..d08439e13c9a --- /dev/null +++ b/tests/pos/ext-override.scala @@ -0,0 +1,12 @@ +//> using options -Xfatal-warnings + +trait Foo[T]: + extension (x: T) + def hi: String + +class Bla: + def hi: String = "hi" +object Bla: + given Foo[Bla] with + extension (x: Bla) + def hi: String = x.hi diff --git a/tests/pos/extmethods.scala b/tests/pos/extmethods.scala index 368b4f439916..40683c56c694 100644 --- a/tests/pos/extmethods.scala +++ b/tests/pos/extmethods.scala @@ -17,7 +17,7 @@ object CollectionStrawMan { def elemTag: ClassTag[A] = ClassTag(xs.getClass.getComponentType) - protected[this] def newBuilder = new ArrayBuffer[A].mapResult(_.toArray(elemTag)) + protected[this] def newBuilder = new ArrayBuffer[A].mapResult(_.toArray(using elemTag)) } } diff --git a/tests/pos/fieldsOf.scala b/tests/pos/fieldsOf.scala new file mode 100644 index 000000000000..2594dae2cbf7 --- /dev/null +++ b/tests/pos/fieldsOf.scala @@ -0,0 +1,18 @@ +import language.experimental.namedTuples + +case class Person(name: String, age: Int) + +type PF = NamedTuple.From[Person] + +def foo[T]: NamedTuple.From[T] = ??? + +class Anon(name: String, age: Int) + +def test = + var x: NamedTuple.From[Person] = ??? + val y: (name: String, age: Int) = x + x = y + x = foo[Person] + //x = foo[Anon] // error + + diff --git a/tests/pos/first-class-patterns.scala b/tests/pos/first-class-patterns.scala new file mode 100644 index 000000000000..98d7faf4d8e7 --- /dev/null +++ b/tests/pos/first-class-patterns.scala @@ -0,0 +1,23 @@ + + // Trait of all extractors with unapply methods + trait Matcher[A, B]: + def unapply(x: A): Option[B] + + // An extractor defined by an unappy method + object Even extends Matcher[Int, Int]: + def unapply(x: Int): Option[Int] = + if x % 2 == 0 then Some(x) else None + + // Method using a given extractor in pattern position + def collect[A, B](xs: List[A], m: Matcher[A, B]): List[B] = + xs match + case Nil => Nil + case m(x) :: xs1 => x :: collect(xs1, m) + case _ :: xs1 => collect(xs1, m) + + @main def test = + val xs = List(1, 2, 3, 4) + val ys = collect(xs, Even) + println(ys) + + diff --git a/tests/pos/given-loop-prevention.scala b/tests/pos/given-loop-prevention.scala deleted file mode 100644 index 0bae0bb24fed..000000000000 --- a/tests/pos/given-loop-prevention.scala +++ /dev/null @@ -1,14 +0,0 @@ -//> using options -Xfatal-warnings - -class Foo - -object Bar { - given Foo with {} - given List[Foo] = List(summon[Foo]) // ok -} - -object Baz { - @annotation.nowarn - given List[Foo] = List(summon[Foo]) // gives a warning, which is suppressed - given Foo with {} -} diff --git a/tests/pos/given-priority.scala b/tests/pos/given-priority.scala new file mode 100644 index 000000000000..048e063eff35 --- /dev/null +++ b/tests/pos/given-priority.scala @@ -0,0 +1,24 @@ +/* These tests show various mechanisms available for implicit prioritization. + */ +import language.`3.6` + +class A // The type for which we infer terms below +class B extends A + +/* First, two schemes that require a pre-planned architecture for how and + * where given instances are defined. + * + * Traditional scheme: prioritize with location in class hierarchy + */ +class LowPriorityImplicits: + given g1: A() + +object NormalImplicits extends LowPriorityImplicits: + given g2: B() + +def test1 = + import NormalImplicits.given + val x = summon[A] + val _: B = x + val y = summon[B] + val _: B = y diff --git a/tests/pos/hylolib-cb-extract.scala b/tests/pos/hylolib-cb-extract.scala new file mode 100644 index 000000000000..b80a88485a2b --- /dev/null +++ b/tests/pos/hylolib-cb-extract.scala @@ -0,0 +1,18 @@ +//> using options -language:experimental.modularity -source future +package hylotest +import compiletime.deferred + +trait Value[Self] + +/** A collection of elements accessible by their position. */ +trait Collection[Self]: + + /** The type of the elements in the collection. */ + type Element: Value + +class BitArray + +given Value[Boolean] {} + +given Collection[BitArray] with + type Element = Boolean diff --git a/tests/pos/hylolib-cb/AnyCollection.scala b/tests/pos/hylolib-cb/AnyCollection.scala new file mode 100644 index 000000000000..50f4313e46ce --- /dev/null +++ b/tests/pos/hylolib-cb/AnyCollection.scala @@ -0,0 +1,66 @@ +package hylo + +/** A type-erased collection. + * + * A `AnyCollection` forwards its operations to a wrapped value, hiding its implementation. + */ +final class AnyCollection[Element] private ( + val _start: () => AnyValue, + val _end: () => AnyValue, + val _after: (AnyValue) => AnyValue, + val _at: (AnyValue) => Element +) + +object AnyCollection { + + /** Creates an instance forwarding its operations to `base`. */ + def apply[Base: Collection as b](base: Base): AnyCollection[b.Element] = + // NOTE: This evidence is redefined so the compiler won't report ambiguity between `intIsValue` + // and `anyValueIsValue` when the method is called on a collection of `Int`s. None of these + // choices is even correct! Note also that the ambiguity is suppressed if the constructor of + // `AnyValue` is declared with a context bound rather than an implicit parameter. + given Value[b.Position] = b.positionIsValue + + def start(): AnyValue = + AnyValue(base.startPosition) + + def end(): AnyValue = + AnyValue(base.endPosition) + + def after(p: AnyValue): AnyValue = + AnyValue(base.positionAfter(p.unsafelyUnwrappedAs[b.Position])) + + def at(p: AnyValue): b.Element = + base.at(p.unsafelyUnwrappedAs[b.Position]) + + new AnyCollection[b.Element]( + _start = start, + _end = end, + _after = after, + _at = at + ) + +} + +given anyCollectionIsCollection[T: Value]: Collection[AnyCollection[T]] with { + + type Element = T + type Position = AnyValue + + extension (self: AnyCollection[T]) { + + def startPosition = + self._start() + + def endPosition = + self._end() + + def positionAfter(p: Position) = + self._after(p) + + def at(p: Position) = + self._at(p) + + } + +} diff --git a/tests/pos/hylolib-cb/AnyValue.scala b/tests/pos/hylolib-cb/AnyValue.scala new file mode 100644 index 000000000000..b9d39869c09a --- /dev/null +++ b/tests/pos/hylolib-cb/AnyValue.scala @@ -0,0 +1,76 @@ +package hylo + +/** A wrapper around an object providing a reference API. */ +private final class Ref[T](val value: T) { + + override def toString: String = + s"Ref($value)" + +} + +/** A type-erased value. + * + * An `AnyValue` forwards its operations to a wrapped value, hiding its implementation. + */ +final class AnyValue private ( + private val wrapped: AnyRef, + private val _copy: (AnyRef) => AnyValue, + private val _eq: (AnyRef, AnyRef) => Boolean, + private val _hashInto: (AnyRef, Hasher) => Hasher +) { + + /** Returns a copy of `this`. */ + def copy(): AnyValue = + _copy(this.wrapped) + + /** Returns `true` iff `this` and `other` have an equivalent value. */ + def eq(other: AnyValue): Boolean = + _eq(this.wrapped, other.wrapped) + + /** Hashes the salient parts of `this` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher = + _hashInto(this.wrapped, hasher) + + /** Returns the value wrapped in `this` as an instance of `T`. */ + def unsafelyUnwrappedAs[T]: T = + wrapped.asInstanceOf[Ref[T]].value + + /** Returns a textual description of `this`. */ + override def toString: String = + wrapped.toString + +} + +object AnyValue { + + /** Creates an instance wrapping `wrapped`. */ + def apply[T](using Value[T])(wrapped: T): AnyValue = + def copy(a: AnyRef): AnyValue = + AnyValue(a.asInstanceOf[Ref[T]].value.copy()) + + def eq(a: AnyRef, b: AnyRef): Boolean = + a.asInstanceOf[Ref[T]].value `eq` b.asInstanceOf[Ref[T]].value + + def hashInto(a: AnyRef, hasher: Hasher): Hasher = + a.asInstanceOf[Ref[T]].value.hashInto(hasher) + + new AnyValue(Ref(wrapped), copy, eq, hashInto) + +} + +given anyValueIsValue: Value[AnyValue] with { + + extension (self: AnyValue) { + + def copy(): AnyValue = + self.copy() + + def eq(other: AnyValue): Boolean = + self `eq` other + + def hashInto(hasher: Hasher): Hasher = + self.hashInto(hasher) + + } + +} diff --git a/tests/pos/hylolib-cb/BitArray.scala b/tests/pos/hylolib-cb/BitArray.scala new file mode 100644 index 000000000000..3a0b4658f747 --- /dev/null +++ b/tests/pos/hylolib-cb/BitArray.scala @@ -0,0 +1,372 @@ +package hylo + +import scala.collection.mutable + +/** An array of bit values represented as Booleans, where `true` indicates that the bit is on. */ +final class BitArray private ( + private var _bits: HyArray[Int], + private var _count: Int +) { + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + _count == 0 + + /** Returns the number of elements in `this`. */ + def count: Int = + _count + + /** The number of bits that the array can contain before allocating new storage. */ + def capacity: Int = + _bits.capacity << 5 + + /** Reserves enough storage to store `n` elements in `this`. */ + def reserveCapacity(n: Int, assumeUniqueness: Boolean = false): BitArray = + if (n == 0) { + this + } else { + val k = 1 + ((n - 1) >> 5) + if (assumeUniqueness) { + _bits = _bits.reserveCapacity(k, assumeUniqueness) + this + } else { + new BitArray(_bits.reserveCapacity(k), _count) + } + } + + /** Adds a new element at the end of the array. */ + def append(bit: Boolean, assumeUniqueness: Boolean = false): BitArray = + val result = if assumeUniqueness && (count < capacity) then this else copy(count + 1) + val p = BitArray.Position(count) + if (p.bucket >= _bits.count) { + result._bits = _bits.append(if bit then 1 else 0) + } else { + result.setValue(bit, p) + } + result._count += 1 + result + + /** Removes and returns the last element, or returns `None` if the array is empty. */ + def popLast(assumeUniqueness: Boolean = false): (BitArray, Option[Boolean]) = + if (isEmpty) { + (this, None) + } else { + val result = if assumeUniqueness then this else copy() + val bit = result.at(BitArray.Position(count)) + result._count -= 1 + (result, Some(bit)) + } + + /** Removes all elements in the array, keeping allocated storage iff `keepStorage` is true. */ + def removeAll( + keepStorage: Boolean = false, + assumeUniqueness: Boolean = false + ): BitArray = + if (isEmpty) { + this + } else if (keepStorage) { + val result = if assumeUniqueness then this else copy() + result._bits.removeAll(keepStorage, assumeUniqueness = true) + result._count = 0 + result + } else { + BitArray() + } + + /** Returns `true` iff all elements in `this` are `false`. */ + def allFalse: Boolean = + if (isEmpty) { + true + } else { + val k = (count - 1) >> 5 + def loop(i: Int): Boolean = + if (i == k) { + val m = (1 << (count & 31)) - 1 + (_bits.at(k) & m) == 0 + } else if (_bits.at(i) != 0) { + false + } else { + loop(i + 1) + } + loop(0) + } + + /** Returns `true` iff all elements in `this` are `true`. */ + def allTrue: Boolean = + if (isEmpty) { + true + } else { + val k = (count - 1) >> 5 + def loop(i: Int): Boolean = + if (i == k) { + val m = (1 << (count & 31)) - 1 + (_bits.at(k) & m) == m + } else if (_bits.at(i) != ~0) { + false + } else { + loop(i + 1) + } + loop(0) + } + + /** Returns the bitwise OR of `this` and `other`. */ + def | (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ | _, assumeUniqueness = true) + + /** Returns the bitwise AND of `this` and `other`. */ + def & (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ & _, assumeUniqueness = true) + + /** Returns the bitwise XOR of `this` and `other`. */ + def ^ (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ ^ _, assumeUniqueness = true) + + /** Assigns each bits in `this` to the result of `operation` applied on those bits and their + * corresponding bits in `other`. + * + * @requires + * `self.count == other.count`. + */ + private def applyBitwise( + other: BitArray, + operation: (Int, Int) => Int, + assumeUniqueness: Boolean = false + ): BitArray = + require(this.count == other.count) + if (isEmpty) { + this + } else { + val result = if assumeUniqueness then this else copy() + var u = assumeUniqueness + val k = (count - 1) >> 5 + + for (i <- 0 until k) { + result._bits = result._bits.modifyAt( + i, (n) => operation(n, other._bits.at(n)), + assumeUniqueness = u + ) + u = true + } + val m = (1 << (count & 31)) - 1 + result._bits = result._bits.modifyAt( + k, (n) => operation(n & m, other._bits.at(k) & m), + assumeUniqueness = u + ) + + result + } + + /** Returns the position of `this`'s first element', or `endPosition` if `this` is empty. + * + * @complexity + * O(1). + */ + def startPosition: BitArray.Position = + BitArray.Position(0) + + /** Returns the "past the end" position in `this`, that is, the position immediately after the + * last element in `this`. + * + * @complexity + * O(1). + */ + def endPosition: BitArray.Position = + BitArray.Position(count) + + /** Returns the position immediately after `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def positionAfter(p: BitArray.Position): BitArray.Position = + if (p.offsetInBucket == 63) { + BitArray.Position(p.bucket + 1, 0) + } else { + BitArray.Position(p.bucket, p.offsetInBucket + 1) + } + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: BitArray.Position): Boolean = + val m = 1 << p.offsetInBucket + val b: Int = _bits.at(p.bucket) + (b & m) == m + + /** Accesses the `i`-th element of `this`. + * + * @requires + * `i` is greater than or equal to 0, and less than `count`. + * @complexity + * O(1). + */ + def atIndex(i: Int): Boolean = + at(BitArray.Position(i)) + + /** Calls `transform` on the element at `p` to update its value. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def modifyAt( + p: BitArray.Position, + transform: (Boolean) => Boolean, + assumeUniqueness: Boolean = false + ): BitArray = + val result = if assumeUniqueness then this else copy() + result.setValue(transform(result.at(p)), p) + result + + /** Calls `transform` on `i`-th element of `this` to update its value. + * + * @requires + * `i` is greater than or equal to 0, and less than `count`. + * @complexity + * O(1). + */ + def modifyAtIndex( + i: Int, + transform: (Boolean) => Boolean, + assumeUniqueness: Boolean = false + ): BitArray = + modifyAt(BitArray.Position(i), transform, assumeUniqueness) + + /** Returns an independent copy of `this`. */ + def copy(minimumCapacity: Int = 0): BitArray = + if (minimumCapacity > capacity) { + // If the requested capacity on the copy is greater than what we have, `reserveCapacity` will + // create an independent value. + reserveCapacity(minimumCapacity) + } else { + val k = 1 + ((minimumCapacity - 1) >> 5) + val newBits = _bits.copy(k) + new BitArray(newBits, _count) + } + + /** Returns a textual description of `this`. */ + override def toString: String = + _bits.toString + + /** Sets the value `b` for the bit at position `p`. + * + * @requires + * `this` is uniquely referenced and `p` is a valid position in `this`. + */ + private def setValue(b: Boolean, p: BitArray.Position): Unit = + val m = 1 << p.offsetInBucket + _bits = _bits.modifyAt( + p.bucket, + (e) => if b then e | m else e & ~m, + assumeUniqueness = true + ) + +} + +object BitArray { + + /** A position in a `BitArray`. + * + * @param bucket + * The bucket containing `this`. + * @param offsetInBucket + * The offset of `this` in its containing bucket. + */ + final class Position( + private[BitArray] val bucket: Int, + private[BitArray] val offsetInBucket: Int + ) { + + /** Creates a position from an index. */ + private[BitArray] def this(index: Int) = + this(index >> 5, index & 31) + + /** Returns the index corresponding to this position. */ + private def index: Int = + (bucket >> 5) + offsetInBucket + + /** Returns a copy of `this`. */ + def copy(): Position = + new Position(bucket, offsetInBucket) + + /** Returns `true` iff `this` and `other` have an equivalent value. */ + def eq(other: Position): Boolean = + (this.bucket == other.bucket) && (this.offsetInBucket == other.offsetInBucket) + + /** Hashes the salient parts of `self` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher = + hasher.combine(bucket) + hasher.combine(offsetInBucket) + + } + + /** Creates an array with the given `bits`. */ + def apply[T](bits: Boolean*): BitArray = + var result = new BitArray(HyArray[Int](), 0) + for (b <- bits) result = result.append(b, assumeUniqueness = true) + result + +} + +given bitArrayPositionIsValue: Value[BitArray.Position] with { + + extension (self: BitArray.Position) { + + def copy(): BitArray.Position = + self.copy() + + def eq(other: BitArray.Position): Boolean = + self.eq(other) + + def hashInto(hasher: Hasher): Hasher = + self.hashInto(hasher) + + } + +} + +given bitArrayIsCollection: Collection[BitArray] with { + + type Element = Boolean + type Position = BitArray.Position + + extension (self: BitArray) { + + override def count: Int = + self.count + + def startPosition: BitArray.Position = + self.startPosition + + def endPosition: BitArray.Position = + self.endPosition + + def positionAfter(p: BitArray.Position): BitArray.Position = + self.positionAfter(p) + + def at(p: BitArray.Position): Boolean = + self.at(p) + + } + +} + +given bitArrayIsStringConvertible: StringConvertible[BitArray] with { + + extension (self: BitArray) + override def description: String = + var contents = mutable.StringBuilder() + self.forEach((e) => { contents += (if e then '1' else '0'); true }) + contents.mkString + +} diff --git a/tests/pos/hylolib-cb/Collection.scala b/tests/pos/hylolib-cb/Collection.scala new file mode 100644 index 000000000000..2fc04f02b9ac --- /dev/null +++ b/tests/pos/hylolib-cb/Collection.scala @@ -0,0 +1,278 @@ +//> using options -language:experimental.modularity -source future +package hylo + +/** A collection of elements accessible by their position. */ +trait Collection[Self] { + + /** The type of the elements in the collection. */ + type Element: Value + + /** The type of a position in the collection. */ + type Position: Value as positionIsValue + + extension (self: Self) { + + /** Returns `true` iff `self` is empty. */ + def isEmpty: Boolean = + startPosition `eq` endPosition + + /** Returns the number of elements in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def count: Int = + val e = endPosition + def _count(p: Position, n: Int): Int = + if p `eq` e then n else _count(self.positionAfter(p), n + 1) + _count(startPosition, 0) + + /** Returns the position of `self`'s first element', or `endPosition` if `self` is empty. + * + * @complexity + * O(1) + */ + def startPosition: Position + + /** Returns the "past the end" position in `self`, that is, the position immediately after the + * last element in `self`. + * + * @complexity + * O(1). + */ + def endPosition: Position + + /** Returns the position immediately after `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def positionAfter(p: Position): Position + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: Position): Element + + /** Returns `true` iff `i` precedes `j`. + * + * @requires + * `i` and j` are valid positions in `self`. + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def isBefore(i: Position, j: Position): Boolean = + val e = self.endPosition + if (i.eq(e)) { + false + } else if (j.eq(e)) { + true + } else { + def _isBefore(n: Position): Boolean = + if (n.eq(j)) { + true + } else if (n.eq(e)) { + false + } else { + _isBefore(self.positionAfter(n)) + } + _isBefore(self.positionAfter(i)) + } + + } + +} + +extension [Self: Collection as s](self: Self) { + + /** Returns the first element of `self` along with a slice containing the suffix after this + * element, or `None` if `self` is empty. + * + * @complexity + * O(1) + */ + def headAndTail: Option[(s.Element, Slice[Self])] = + if (self.isEmpty) { + None + } else { + val p = self.startPosition + val q = self.positionAfter(p) + val t = Slice(self, Range(q, self.endPosition, (a, b) => (a `eq` b) || self.isBefore(a, b))) + Some((self.at(p), t)) + } + + /** Applies `combine` on `partialResult` and each element of `self`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def reduce[T](partialResult: T, combine: (T, s.Element) => T): T = + val e = self.endPosition + def loop(p: s.Position, r: T): T = + if (p.eq(e)) { + r + } else { + loop(self.positionAfter(p), combine(r, self.at(p))) + } + loop(self.startPosition, partialResult) + + /** Applies `action` on each element of `self`, in order, until `action` returns `false`, and + * returns `false` iff `action` did. + * + * You can return `false` from `action` to emulate a `continue` statement as found in traditional + * imperative languages (e.g., C). + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def forEach(action: (s.Element) => Boolean): Boolean = + val e = self.endPosition + def loop(p: s.Position): Boolean = + if (p.eq(e)) { + true + } else if (!action(self.at(p))) { + false + } else { + loop(self.positionAfter(p)) + } + loop(self.startPosition) + + /** Returns a collection with the elements of `self` transformed by `transform`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def map[T: Value](transform: (s.Element) => T): HyArray[T] = + self.reduce( + HyArray[T](), + (r, e) => r.append(transform(e), assumeUniqueness = true) + ) + + /** Returns a collection with the elements of `self` satisfying `isInclude`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def filter(isIncluded: (s.Element) => Boolean): HyArray[s.Element] = + self.reduce( + HyArray[s.Element](), + (r, e) => if (isIncluded(e)) then r.append(e, assumeUniqueness = true) else r + ) + + /** Returns `true` if `self` contains an element satisfying `predicate`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def containsWhere(predicate: (s.Element) => Boolean): Boolean = + self.firstPositionWhere(predicate) != None + + /** Returns `true` if all elements in `self` satisfy `predicate`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def allSatisfy(predicate: (s.Element) => Boolean): Boolean = + self.firstPositionWhere(predicate) == None + + /** Returns the position of the first element of `self` satisfying `predicate`, or `None` if no + * such element exists. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def firstPositionWhere(predicate: (s.Element) => Boolean): Option[s.Position] = + val e = self.endPosition + def loop(p: s.Position): Option[s.Position] = + if (p.eq(e)) { + None + } else if (predicate(self.at(p))) { + Some(p) + } else { + loop(self.positionAfter(p)) + } + loop(self.startPosition) + + /** Returns the minimum element in `self`, using `isLessThan` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def minElement(isLessThan: (s.Element, s.Element) => Boolean): Option[s.Element] = + self.leastElement(isLessThan) + + // NOTE: I can't find a reasonable way to call this method. + /** Returns the minimum element in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def minElement()(using Comparable[s.Element]): Option[s.Element] = + self.minElement(isLessThan = _ `lt` _) + + /** Returns the maximum element in `self`, using `isGreaterThan` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def maxElement(isGreaterThan: (s.Element, s.Element) => Boolean): Option[s.Element] = + self.leastElement(isGreaterThan) + + /** Returns the maximum element in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def maxElement()(using Comparable[s.Element]): Option[s.Element] = + self.maxElement(isGreaterThan = _ `gt` _) + + /** Returns the maximum element in `self`, using `isOrderedBefore` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def leastElement(isOrderedBefore: (s.Element, s.Element) => Boolean): Option[s.Element] = + if (self.isEmpty) { + None + } else { + val e = self.endPosition + def _least(p: s.Position, least: s.Element): s.Element = + if (p.eq(e)) { + least + } else { + val x = self.at(p) + val y = if isOrderedBefore(x, least) then x else least + _least(self.positionAfter(p), y) + } + + val b = self.startPosition + Some(_least(self.positionAfter(b), self.at(b))) + } + +} + +extension [Self: Collection as s](self: Self)(using + Value[s.Element] +) { + + /** Returns `true` if `self` contains the same elements as `other`, in the same order. */ + def elementsEqual[T](using o: Collection[T] { type Element = s.Element })(other: T): Boolean = + def loop(i: s.Position, j: o.Position): Boolean = + if (i `eq` self.endPosition) { + j `eq` other.endPosition + } else if (j `eq` other.endPosition) { + false + } else if (self.at(i) `neq` other.at(j)) { + false + } else { + loop(self.positionAfter(i), other.positionAfter(j)) + } + loop(self.startPosition, other.startPosition) + +} diff --git a/tests/pos/hylolib-cb/CoreTraits.scala b/tests/pos/hylolib-cb/CoreTraits.scala new file mode 100644 index 000000000000..01b2c5242af9 --- /dev/null +++ b/tests/pos/hylolib-cb/CoreTraits.scala @@ -0,0 +1,57 @@ +package hylo + +/** A type whose instance can be treated as independent values. + * + * The data structure of and algorithms of Hylo's standard library operate "notional values" rather + * than arbitrary references. This trait defines the basis operations of all values. + */ +trait Value[Self] { + + extension (self: Self) { + + /** Returns a copy of `self`. */ + def copy(): Self + + /** Returns `true` iff `self` and `other` have an equivalent value. */ + def eq(other: Self): Boolean + + /** Hashes the salient parts of `self` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher + + } + +} + +extension [Self: Value](self: Self) def neq(other: Self): Boolean = !self.eq(other) + +// ---------------------------------------------------------------------------- +// Comparable +// ---------------------------------------------------------------------------- + +trait Comparable[Self] extends Value[Self] { + + extension (self: Self) { + + /** Returns `true` iff `self` is ordered before `other`. */ + def lt(other: Self): Boolean + + /** Returns `true` iff `self` is ordered after `other`. */ + def gt(other: Self): Boolean = other.lt(self) + + /** Returns `true` iff `self` is equal to or ordered before `other`. */ + def le(other: Self): Boolean = !other.lt(self) + + /** Returns `true` iff `self` is equal to or ordered after `other`. */ + def ge(other: Self): Boolean = !self.lt(other) + + } + +} + +/** Returns the lesser of `x` and `y`. */ +def min[T: Comparable](x: T, y: T): T = + if y.lt(x) then y else x + +/** Returns the greater of `x` and `y`. */ +def max[T: Comparable](x: T, y: T): T = + if x.lt(y) then y else x diff --git a/tests/pos/hylolib-cb/Hasher.scala b/tests/pos/hylolib-cb/Hasher.scala new file mode 100644 index 000000000000..ef6813df6b60 --- /dev/null +++ b/tests/pos/hylolib-cb/Hasher.scala @@ -0,0 +1,38 @@ +package hylo + +import scala.util.Random + +/** A universal hash function. */ +final class Hasher private (private val hash: Int = Hasher.offsetBasis) { + + /** Returns the computed hash value. */ + def finalizeHash(): Int = + hash + + /** Adds `n` to the computed hash value. */ + def combine(n: Int): Hasher = + var h = hash + h = h ^ n + h = h * Hasher.prime + new Hasher(h) +} + +object Hasher { + + private val offsetBasis = 0x811c9dc5 + private val prime = 0x01000193 + + /** A random seed ensuring different hashes across multiple runs. */ + private lazy val seed = scala.util.Random.nextInt() + + /** Creates an instance with the given `seed`. */ + def apply(): Hasher = + val h = new Hasher() + h.combine(seed) + h + + /** Returns the hash of `v`. */ + def hash[T: Value](v: T): Int = + v.hashInto(Hasher()).finalizeHash() + +} diff --git a/tests/pos/hylolib-cb/HyArray.scala b/tests/pos/hylolib-cb/HyArray.scala new file mode 100644 index 000000000000..0fff45e744ec --- /dev/null +++ b/tests/pos/hylolib-cb/HyArray.scala @@ -0,0 +1,220 @@ +//> using options -language:experimental.modularity -source future +package hylo + +import java.util.Arrays +import scala.collection.mutable + +/** An ordered, random-access collection. */ +final class HyArray[Element: Value as elementIsCValue]( + private var _storage: scala.Array[AnyRef | Null] | Null, + private var _count: Int // NOTE: where do I document private fields +) { + + // NOTE: The fact that we need Array[AnyRef] is diappointing and difficult to discover + // The compiler error sent me on a wild goose chase with ClassTag. + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + _count == 0 + + /** Returns the number of elements in `this`. */ + def count: Int = + _count + + /** Returns the number of elements that `this` can contain before allocating new storage. */ + def capacity: Int = + if _storage == null then 0 else _storage.length + + /** Reserves enough storage to store `n` elements in `this`. */ + def reserveCapacity(n: Int, assumeUniqueness: Boolean = false): HyArray[Element] = + if (n <= capacity) { + this + } else { + var newCapacity = max(1, capacity) + while (newCapacity < n) { newCapacity = newCapacity << 1 } + + val newStorage = new scala.Array[AnyRef | Null](newCapacity) + val s = _storage.asInstanceOf[scala.Array[AnyRef | Null]] + var i = 0 + while (i < count) { + newStorage(i) = _storage(i).asInstanceOf[Element].copy().asInstanceOf[AnyRef] + i += 1 + } + + if (assumeUniqueness) { + _storage = newStorage + this + } else { + new HyArray(newStorage, count) + } + } + + /** Adds a new element at the end of the array. */ + def append(source: Element, assumeUniqueness: Boolean = false): HyArray[Element] = + val result = if assumeUniqueness && (count < capacity) then this else copy(count + 1) + result._storage(count) = source.asInstanceOf[AnyRef] + result._count += 1 + result + + // NOTE: Can't refine `C.Element` without renaming the generic parameter of `HyArray`. + // /** Adds the contents of `source` at the end of the array. */ + // def appendContents[C](using + // s: Collection[C] + // )( + // source: C { type Element = Element }, + // assumeUniqueness: Boolean = false + // ): HyArray[Element] = + // val result = if (assumeUniqueness) { this } else { copy(count + source.count) } + // source.reduce(result, (r, e) => r.append(e, assumeUniqueness = true)) + + /** Removes and returns the last element, or returns `None` if the array is empty. */ + def popLast(assumeUniqueness: Boolean = false): (HyArray[Element], Option[Element]) = + if (isEmpty) { + (this, None) + } else { + val result = if assumeUniqueness then this else copy() + result._count -= 1 + (result, Some(result._storage(result._count).asInstanceOf[Element])) + } + + /** Removes all elements in the array, keeping allocated storage iff `keepStorage` is true. */ + def removeAll( + keepStorage: Boolean = false, + assumeUniqueness: Boolean = false + ): HyArray[Element] = + if (isEmpty) { + this + } else if (keepStorage) { + val result = if assumeUniqueness then this else copy() + Arrays.fill(result._storage, null) + result._count = 0 + result + } else { + HyArray() + } + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: Int): Element = + _storage(p).asInstanceOf[Element] + + /** Calls `transform` on the element at `p` to update its value. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def modifyAt( + p: Int, + transform: (Element) => Element, + assumeUniqueness: Boolean = false + ): HyArray[Element] = + val result = if assumeUniqueness then this else copy() + result._storage(p) = transform(at(p)).asInstanceOf[AnyRef] + result + + /** Returns a textual description of `this`. */ + override def toString: String = + var s = "[" + var i = 0 + while (i < count) { + if (i > 0) { s += ", " } + s += s"${at(i)}" + i += 1 + } + s + "]" + + /** Returns an independent copy of `this`, capable of storing `minimumCapacity` elements before + * allocating new storage. + */ + def copy(minimumCapacity: Int = 0): HyArray[Element] = + if (minimumCapacity > capacity) { + // If the requested capacity on the copy is greater than what we have, `reserveCapacity` will + // create an independent value. + reserveCapacity(minimumCapacity) + } else { + val clone = HyArray[Element]().reserveCapacity(max(minimumCapacity, count)) + var i = 0 + while (i < count) { + clone._storage(i) = _storage(i).asInstanceOf[Element].copy().asInstanceOf[AnyRef] + i += 1 + } + clone._count = count + clone + } + +} + +object HyArray { + + /** Creates an array with the given `elements`. */ + def apply[T: Value](elements: T*): HyArray[T] = + var a = new HyArray[T](null, 0) + for (e <- elements) a = a.append(e, assumeUniqueness = true) + a + +} + +given [T: Value] => Value[HyArray[T]] with { + + extension (self: HyArray[T]) { + + def copy(): HyArray[T] = + self.copy() + + def eq(other: HyArray[T]): Boolean = + self.elementsEqual(other) + + def hashInto(hasher: Hasher): Hasher = + self.reduce(hasher, (h, e) => e.hashInto(h)) + + } + +} + +given [T: Value] => Collection[HyArray[T]] with { + + type Element = T + type Position = Int + + extension (self: HyArray[T]) { + + // NOTE: Having to explicitly override means that primary declaration can't automatically + // specialize trait requirements. + override def isEmpty: Boolean = self.isEmpty + + override def count: Int = self.count + + def startPosition = 0 + + def endPosition = self.count + + def positionAfter(p: Int) = p + 1 + + def at(p: Int) = self.at(p) + + } + +} + +// NOTE: This should work. +// given hyArrayIsStringConvertible[T](using +// tIsValue: Value[T], +// tIsStringConvertible: StringConvertible[T] +// ): StringConvertible[HyArray[T]] with { +// +// given Collection[HyArray[T]] = hyArrayIsCollection[T] +// +// extension (self: HyArray[T]) +// override def description: String = +// var contents = mutable.StringBuilder() +// self.forEach((e) => { contents ++= e.description; true }) +// s"[${contents.mkString(", ")}]" +// +// } diff --git a/tests/pos/hylolib-cb/Integers.scala b/tests/pos/hylolib-cb/Integers.scala new file mode 100644 index 000000000000..b9bc203a88ea --- /dev/null +++ b/tests/pos/hylolib-cb/Integers.scala @@ -0,0 +1,58 @@ +package hylo + +given booleanIsValue: Value[Boolean] with { + + extension (self: Boolean) { + + def copy(): Boolean = + // Note: Scala's `Boolean` has value semantics already. + self + + def eq(other: Boolean): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(if self then 1 else 0) + + } + +} + +given intIsValue: Value[Int] with { + + extension (self: Int) { + + def copy(): Int = + // Note: Scala's `Int` has value semantics already. + self + + def eq(other: Int): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(self) + + } + +} + +given intIsComparable: Comparable[Int] with { + + extension (self: Int) { + + def copy(): Int = + self + + def eq(other: Int): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(self) + + def lt(other: Int): Boolean = self < other + + } + +} + +given intIsStringConvertible: StringConvertible[Int] with {} diff --git a/tests/pos/hylolib-cb/Range.scala b/tests/pos/hylolib-cb/Range.scala new file mode 100644 index 000000000000..1f597652ead1 --- /dev/null +++ b/tests/pos/hylolib-cb/Range.scala @@ -0,0 +1,37 @@ +package hylo + +/** A half-open interval from a lower bound up to, but not including, an uppor bound. */ +final class Range[Bound] private (val lowerBound: Bound, val upperBound: Bound) { + + /** Returns a textual description of `this`. */ + override def toString: String = + s"[${lowerBound}, ${upperBound})" + +} + +object Range { + + /** Creates a half-open interval [`lowerBound`, `upperBound`), using `isLessThanOrEqual` to ensure + * that the bounds are well-formed. + * + * @requires + * `lowerBound` is lesser than or equal to `upperBound`. + */ + def apply[Bound]( + lowerBound: Bound, + upperBound: Bound, + isLessThanOrEqual: (Bound, Bound) => Boolean + ) = + require(isLessThanOrEqual(lowerBound, upperBound)) + new Range(lowerBound, upperBound) + + /** Creates a half-open interval [`lowerBound`, `upperBound`). + * + * @requires + * `lowerBound` is lesser than or equal to `upperBound`. + */ + def apply[Bound](lowerBound: Bound, upperBound: Bound)(using Comparable[Bound]) = + require(lowerBound `le` upperBound) + new Range(lowerBound, upperBound) + +} diff --git a/tests/pos/hylolib-cb/Slice.scala b/tests/pos/hylolib-cb/Slice.scala new file mode 100644 index 000000000000..b577ceeb3739 --- /dev/null +++ b/tests/pos/hylolib-cb/Slice.scala @@ -0,0 +1,44 @@ +package hylo + +/** A view into a collection. */ +final class Slice[Base: Collection as b]( + val base: Base, + val bounds: Range[b.Position] +) { + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + bounds.lowerBound.eq(bounds.upperBound) + + def startPosition: b.Position = + bounds.lowerBound + + def endPosition: b.Position = + bounds.upperBound + + def positionAfter(p: b.Position): b.Position = + base.positionAfter(p) + + def at(p: b.Position): b.Element = + base.at(p) + +} + +given sliceIsCollection[T: Collection as c]: Collection[Slice[T]] with { + + type Element = c.Element + type Position = c.Position + + extension (self: Slice[T]) { + + def startPosition = self.bounds.lowerBound.asInstanceOf[Position] // NOTE: Ugly hack + + def endPosition = self.bounds.upperBound.asInstanceOf[Position] + + def positionAfter(p: Position) = self.base.positionAfter(p) + + def at(p: Position) = self.base.at(p) + + } + +} diff --git a/tests/pos/hylolib-cb/StringConvertible.scala b/tests/pos/hylolib-cb/StringConvertible.scala new file mode 100644 index 000000000000..0702f79f2794 --- /dev/null +++ b/tests/pos/hylolib-cb/StringConvertible.scala @@ -0,0 +1,14 @@ +package hylo + +/** A type whose instances can be described by a character string. */ +trait StringConvertible[Self] { + + extension (self: Self) { + + /** Returns a textual description of `self`. */ + def description: String = + self.toString + + } + +} diff --git a/tests/pos/hylolib-deferred-given-extract.scala b/tests/pos/hylolib-deferred-given-extract.scala new file mode 100644 index 000000000000..02d889dc9aac --- /dev/null +++ b/tests/pos/hylolib-deferred-given-extract.scala @@ -0,0 +1,19 @@ +//> using options -language:experimental.modularity -source future +package hylotest +import compiletime.deferred + +trait Value[Self] + +/** A collection of elements accessible by their position. */ +trait Collection[Self]: + + /** The type of the elements in the collection. */ + type Element + given elementIsValue: Value[Element] = compiletime.deferred + +class BitArray + +given Value[Boolean] {} + +given Collection[BitArray] with + type Element = Boolean diff --git a/tests/pos/hylolib-deferred-given/AnyCollection.scala b/tests/pos/hylolib-deferred-given/AnyCollection.scala new file mode 100644 index 000000000000..55e453d6dc87 --- /dev/null +++ b/tests/pos/hylolib-deferred-given/AnyCollection.scala @@ -0,0 +1,69 @@ +package hylo + +/** A type-erased collection. + * + * A `AnyCollection` forwards its operations to a wrapped value, hiding its implementation. + */ +final class AnyCollection[Element] private ( + val _start: () => AnyValue, + val _end: () => AnyValue, + val _after: (AnyValue) => AnyValue, + val _at: (AnyValue) => Element +) + +object AnyCollection { + + /** Creates an instance forwarding its operations to `base`. */ + def apply[Base](using b: Collection[Base])(base: Base): AnyCollection[b.Element] = + // NOTE: This evidence is redefined so the compiler won't report ambiguity between `intIsValue` + // and `anyValueIsValue` when the method is called on a collection of `Int`s. None of these + // choices is even correct! Note also that the ambiguity is suppressed if the constructor of + // `AnyValue` is declared with a context bound rather than an implicit parameter. + given Value[b.Position] = b.positionIsValue + + def start(): AnyValue = + AnyValue(base.startPosition) + + def end(): AnyValue = + AnyValue(base.endPosition) + + def after(p: AnyValue): AnyValue = + AnyValue(base.positionAfter(p.unsafelyUnwrappedAs[b.Position])) + + def at(p: AnyValue): b.Element = + base.at(p.unsafelyUnwrappedAs[b.Position]) + + new AnyCollection[b.Element]( + _start = start, + _end = end, + _after = after, + _at = at + ) + +} + +given anyCollectionIsCollection[T](using tIsValue: Value[T]): Collection[AnyCollection[T]] with { + + type Element = T + //given elementIsValue: Value[Element] = tIsValue + + type Position = AnyValue + given positionIsValue: Value[Position] = anyValueIsValue + + extension (self: AnyCollection[T]) { + + def startPosition = + self._start() + + def endPosition = + self._end() + + def positionAfter(p: Position) = + self._after(p) + + def at(p: Position) = + self._at(p) + + } + +} diff --git a/tests/pos/hylolib-deferred-given/AnyValue.scala b/tests/pos/hylolib-deferred-given/AnyValue.scala new file mode 100644 index 000000000000..21f2965e102e --- /dev/null +++ b/tests/pos/hylolib-deferred-given/AnyValue.scala @@ -0,0 +1,76 @@ +package hylo + +/** A wrapper around an object providing a reference API. */ +private final class Ref[T](val value: T) { + + override def toString: String = + s"Ref($value)" + +} + +/** A type-erased value. + * + * An `AnyValue` forwards its operations to a wrapped value, hiding its implementation. + */ +final class AnyValue private ( + private val wrapped: AnyRef, + private val _copy: (AnyRef) => AnyValue, + private val _eq: (AnyRef, AnyRef) => Boolean, + private val _hashInto: (AnyRef, Hasher) => Hasher +) { + + /** Returns a copy of `this`. */ + def copy(): AnyValue = + _copy(this.wrapped) + + /** Returns `true` iff `this` and `other` have an equivalent value. */ + def eq(other: AnyValue): Boolean = + _eq(this.wrapped, other.wrapped) + + /** Hashes the salient parts of `this` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher = + _hashInto(this.wrapped, hasher) + + /** Returns the value wrapped in `this` as an instance of `T`. */ + def unsafelyUnwrappedAs[T]: T = + wrapped.asInstanceOf[Ref[T]].value + + /** Returns a textual description of `this`. */ + override def toString: String = + wrapped.toString + +} + +object AnyValue { + + /** Creates an instance wrapping `wrapped`. */ + def apply[T: Value](wrapped: T): AnyValue = + def copy(a: AnyRef): AnyValue = + AnyValue(a.asInstanceOf[Ref[T]].value.copy()) + + def eq(a: AnyRef, b: AnyRef): Boolean = + a.asInstanceOf[Ref[T]].value `eq` b.asInstanceOf[Ref[T]].value + + def hashInto(a: AnyRef, hasher: Hasher): Hasher = + a.asInstanceOf[Ref[T]].value.hashInto(hasher) + + new AnyValue(Ref(wrapped), copy, eq, hashInto) + +} + +given anyValueIsValue: Value[AnyValue] with { + + extension (self: AnyValue) { + + def copy(): AnyValue = + self.copy() + + def eq(other: AnyValue): Boolean = + self `eq` other + + def hashInto(hasher: Hasher): Hasher = + self.hashInto(hasher) + + } + +} diff --git a/tests/pos/hylolib-deferred-given/BitArray.scala b/tests/pos/hylolib-deferred-given/BitArray.scala new file mode 100644 index 000000000000..485f30472847 --- /dev/null +++ b/tests/pos/hylolib-deferred-given/BitArray.scala @@ -0,0 +1,375 @@ +package hylo + +import scala.collection.mutable + +/** An array of bit values represented as Booleans, where `true` indicates that the bit is on. */ +final class BitArray private ( + private var _bits: HyArray[Int], + private var _count: Int +) { + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + _count == 0 + + /** Returns the number of elements in `this`. */ + def count: Int = + _count + + /** The number of bits that the array can contain before allocating new storage. */ + def capacity: Int = + _bits.capacity << 5 + + /** Reserves enough storage to store `n` elements in `this`. */ + def reserveCapacity(n: Int, assumeUniqueness: Boolean = false): BitArray = + if (n == 0) { + this + } else { + val k = 1 + ((n - 1) >> 5) + if (assumeUniqueness) { + _bits = _bits.reserveCapacity(k, assumeUniqueness) + this + } else { + new BitArray(_bits.reserveCapacity(k), _count) + } + } + + /** Adds a new element at the end of the array. */ + def append(bit: Boolean, assumeUniqueness: Boolean = false): BitArray = + val result = if assumeUniqueness && (count < capacity) then this else copy(count + 1) + val p = BitArray.Position(count) + if (p.bucket >= _bits.count) { + result._bits = _bits.append(if bit then 1 else 0) + } else { + result.setValue(bit, p) + } + result._count += 1 + result + + /** Removes and returns the last element, or returns `None` if the array is empty. */ + def popLast(assumeUniqueness: Boolean = false): (BitArray, Option[Boolean]) = + if (isEmpty) { + (this, None) + } else { + val result = if assumeUniqueness then this else copy() + val bit = result.at(BitArray.Position(count)) + result._count -= 1 + (result, Some(bit)) + } + + /** Removes all elements in the array, keeping allocated storage iff `keepStorage` is true. */ + def removeAll( + keepStorage: Boolean = false, + assumeUniqueness: Boolean = false + ): BitArray = + if (isEmpty) { + this + } else if (keepStorage) { + val result = if assumeUniqueness then this else copy() + result._bits.removeAll(keepStorage, assumeUniqueness = true) + result._count = 0 + result + } else { + BitArray() + } + + /** Returns `true` iff all elements in `this` are `false`. */ + def allFalse: Boolean = + if (isEmpty) { + true + } else { + val k = (count - 1) >> 5 + def loop(i: Int): Boolean = + if (i == k) { + val m = (1 << (count & 31)) - 1 + (_bits.at(k) & m) == 0 + } else if (_bits.at(i) != 0) { + false + } else { + loop(i + 1) + } + loop(0) + } + + /** Returns `true` iff all elements in `this` are `true`. */ + def allTrue: Boolean = + if (isEmpty) { + true + } else { + val k = (count - 1) >> 5 + def loop(i: Int): Boolean = + if (i == k) { + val m = (1 << (count & 31)) - 1 + (_bits.at(k) & m) == m + } else if (_bits.at(i) != ~0) { + false + } else { + loop(i + 1) + } + loop(0) + } + + /** Returns the bitwise OR of `this` and `other`. */ + def | (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ | _, assumeUniqueness = true) + + /** Returns the bitwise AND of `this` and `other`. */ + def & (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ & _, assumeUniqueness = true) + + /** Returns the bitwise XOR of `this` and `other`. */ + def ^ (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ ^ _, assumeUniqueness = true) + + /** Assigns each bits in `this` to the result of `operation` applied on those bits and their + * corresponding bits in `other`. + * + * @requires + * `self.count == other.count`. + */ + private def applyBitwise( + other: BitArray, + operation: (Int, Int) => Int, + assumeUniqueness: Boolean = false + ): BitArray = + require(this.count == other.count) + if (isEmpty) { + this + } else { + val result = if assumeUniqueness then this else copy() + var u = assumeUniqueness + val k = (count - 1) >> 5 + + for (i <- 0 until k) { + result._bits = result._bits.modifyAt( + i, (n) => operation(n, other._bits.at(n)), + assumeUniqueness = u + ) + u = true + } + val m = (1 << (count & 31)) - 1 + result._bits = result._bits.modifyAt( + k, (n) => operation(n & m, other._bits.at(k) & m), + assumeUniqueness = u + ) + + result + } + + /** Returns the position of `this`'s first element', or `endPosition` if `this` is empty. + * + * @complexity + * O(1). + */ + def startPosition: BitArray.Position = + BitArray.Position(0) + + /** Returns the "past the end" position in `this`, that is, the position immediately after the + * last element in `this`. + * + * @complexity + * O(1). + */ + def endPosition: BitArray.Position = + BitArray.Position(count) + + /** Returns the position immediately after `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def positionAfter(p: BitArray.Position): BitArray.Position = + if (p.offsetInBucket == 63) { + BitArray.Position(p.bucket + 1, 0) + } else { + BitArray.Position(p.bucket, p.offsetInBucket + 1) + } + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: BitArray.Position): Boolean = + val m = 1 << p.offsetInBucket + val b: Int = _bits.at(p.bucket) + (b & m) == m + + /** Accesses the `i`-th element of `this`. + * + * @requires + * `i` is greater than or equal to 0, and less than `count`. + * @complexity + * O(1). + */ + def atIndex(i: Int): Boolean = + at(BitArray.Position(i)) + + /** Calls `transform` on the element at `p` to update its value. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def modifyAt( + p: BitArray.Position, + transform: (Boolean) => Boolean, + assumeUniqueness: Boolean = false + ): BitArray = + val result = if assumeUniqueness then this else copy() + result.setValue(transform(result.at(p)), p) + result + + /** Calls `transform` on `i`-th element of `this` to update its value. + * + * @requires + * `i` is greater than or equal to 0, and less than `count`. + * @complexity + * O(1). + */ + def modifyAtIndex( + i: Int, + transform: (Boolean) => Boolean, + assumeUniqueness: Boolean = false + ): BitArray = + modifyAt(BitArray.Position(i), transform, assumeUniqueness) + + /** Returns an independent copy of `this`. */ + def copy(minimumCapacity: Int = 0): BitArray = + if (minimumCapacity > capacity) { + // If the requested capacity on the copy is greater than what we have, `reserveCapacity` will + // create an independent value. + reserveCapacity(minimumCapacity) + } else { + val k = 1 + ((minimumCapacity - 1) >> 5) + val newBits = _bits.copy(k) + new BitArray(newBits, _count) + } + + /** Returns a textual description of `this`. */ + override def toString: String = + _bits.toString + + /** Sets the value `b` for the bit at position `p`. + * + * @requires + * `this` is uniquely referenced and `p` is a valid position in `this`. + */ + private def setValue(b: Boolean, p: BitArray.Position): Unit = + val m = 1 << p.offsetInBucket + _bits = _bits.modifyAt( + p.bucket, + (e) => if b then e | m else e & ~m, + assumeUniqueness = true + ) + +} + +object BitArray { + + /** A position in a `BitArray`. + * + * @param bucket + * The bucket containing `this`. + * @param offsetInBucket + * The offset of `this` in its containing bucket. + */ + final class Position( + private[BitArray] val bucket: Int, + private[BitArray] val offsetInBucket: Int + ) { + + /** Creates a position from an index. */ + private[BitArray] def this(index: Int) = + this(index >> 5, index & 31) + + /** Returns the index corresponding to this position. */ + private def index: Int = + (bucket >> 5) + offsetInBucket + + /** Returns a copy of `this`. */ + def copy(): Position = + new Position(bucket, offsetInBucket) + + /** Returns `true` iff `this` and `other` have an equivalent value. */ + def eq(other: Position): Boolean = + (this.bucket == other.bucket) && (this.offsetInBucket == other.offsetInBucket) + + /** Hashes the salient parts of `self` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher = + hasher.combine(bucket) + hasher.combine(offsetInBucket) + + } + + /** Creates an array with the given `bits`. */ + def apply[T](bits: Boolean*): BitArray = + var result = new BitArray(HyArray[Int](), 0) + for (b <- bits) result = result.append(b, assumeUniqueness = true) + result + +} + +given bitArrayPositionIsValue: Value[BitArray.Position] with { + + extension (self: BitArray.Position) { + + def copy(): BitArray.Position = + self.copy() + + def eq(other: BitArray.Position): Boolean = + self.eq(other) + + def hashInto(hasher: Hasher): Hasher = + self.hashInto(hasher) + + } + +} + +given bitArrayIsCollection: Collection[BitArray] with { + + type Element = Boolean + //given elementIsValue: Value[Boolean] = booleanIsValue + + type Position = BitArray.Position + given positionIsValue: Value[BitArray.Position] = bitArrayPositionIsValue + + extension (self: BitArray) { + + override def count: Int = + self.count + + def startPosition: BitArray.Position = + self.startPosition + + def endPosition: BitArray.Position = + self.endPosition + + def positionAfter(p: BitArray.Position): BitArray.Position = + self.positionAfter(p) + + def at(p: BitArray.Position): Boolean = + self.at(p) + + } + +} + +given bitArrayIsStringConvertible: StringConvertible[BitArray] with { + + extension (self: BitArray) + override def description: String = + var contents = mutable.StringBuilder() + self.forEach((e) => { contents += (if e then '1' else '0'); true }) + contents.mkString + +} diff --git a/tests/pos/hylolib-deferred-given/Collection.scala b/tests/pos/hylolib-deferred-given/Collection.scala new file mode 100644 index 000000000000..6b5e7a762dc8 --- /dev/null +++ b/tests/pos/hylolib-deferred-given/Collection.scala @@ -0,0 +1,281 @@ +//> using options -language:experimental.modularity -source future +package hylo + +/** A collection of elements accessible by their position. */ +trait Collection[Self] { + + /** The type of the elements in the collection. */ + type Element + given elementIsValue: Value[Element] = compiletime.deferred + + /** The type of a position in the collection. */ + type Position + given positionIsValue: Value[Position] + + extension (self: Self) { + + /** Returns `true` iff `self` is empty. */ + def isEmpty: Boolean = + startPosition `eq` endPosition + + /** Returns the number of elements in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def count: Int = + val e = endPosition + def _count(p: Position, n: Int): Int = + if p `eq` e then n else _count(self.positionAfter(p), n + 1) + _count(startPosition, 0) + + /** Returns the position of `self`'s first element', or `endPosition` if `self` is empty. + * + * @complexity + * O(1) + */ + def startPosition: Position + + /** Returns the "past the end" position in `self`, that is, the position immediately after the + * last element in `self`. + * + * @complexity + * O(1). + */ + def endPosition: Position + + /** Returns the position immediately after `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def positionAfter(p: Position): Position + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: Position): Element + + /** Returns `true` iff `i` precedes `j`. + * + * @requires + * `i` and j` are valid positions in `self`. + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def isBefore(i: Position, j: Position): Boolean = + val e = self.endPosition + if (i.eq(e)) { + false + } else if (j.eq(e)) { + true + } else { + def _isBefore(n: Position): Boolean = + if (n.eq(j)) { + true + } else if (n.eq(e)) { + false + } else { + _isBefore(self.positionAfter(n)) + } + _isBefore(self.positionAfter(i)) + } + + } + +} + +extension [Self](self: Self)(using s: Collection[Self]) { + + /** Returns the first element of `self` along with a slice containing the suffix after this + * element, or `None` if `self` is empty. + * + * @complexity + * O(1) + */ + def headAndTail: Option[(s.Element, Slice[Self])] = + if (self.isEmpty) { + None + } else { + val p = self.startPosition + val q = self.positionAfter(p) + val t = Slice(self, Range(q, self.endPosition, (a, b) => (a `eq` b) || self.isBefore(a, b))) + Some((self.at(p), t)) + } + + /** Applies `combine` on `partialResult` and each element of `self`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def reduce[T](partialResult: T, combine: (T, s.Element) => T): T = + val e = self.endPosition + def loop(p: s.Position, r: T): T = + if (p.eq(e)) { + r + } else { + loop(self.positionAfter(p), combine(r, self.at(p))) + } + loop(self.startPosition, partialResult) + + /** Applies `action` on each element of `self`, in order, until `action` returns `false`, and + * returns `false` iff `action` did. + * + * You can return `false` from `action` to emulate a `continue` statement as found in traditional + * imperative languages (e.g., C). + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def forEach(action: (s.Element) => Boolean): Boolean = + val e = self.endPosition + def loop(p: s.Position): Boolean = + if (p.eq(e)) { + true + } else if (!action(self.at(p))) { + false + } else { + loop(self.positionAfter(p)) + } + loop(self.startPosition) + + /** Returns a collection with the elements of `self` transformed by `transform`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def map[T](using Value[T])(transform: (s.Element) => T): HyArray[T] = + self.reduce( + HyArray[T](), + (r, e) => r.append(transform(e), assumeUniqueness = true) + ) + + /** Returns a collection with the elements of `self` satisfying `isInclude`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def filter(isIncluded: (s.Element) => Boolean): HyArray[s.Element] = + self.reduce( + HyArray[s.Element](), + (r, e) => if (isIncluded(e)) then r.append(e, assumeUniqueness = true) else r + ) + + /** Returns `true` if `self` contains an element satisfying `predicate`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def containsWhere(predicate: (s.Element) => Boolean): Boolean = + self.firstPositionWhere(predicate) != None + + /** Returns `true` if all elements in `self` satisfy `predicate`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def allSatisfy(predicate: (s.Element) => Boolean): Boolean = + self.firstPositionWhere(predicate) == None + + /** Returns the position of the first element of `self` satisfying `predicate`, or `None` if no + * such element exists. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def firstPositionWhere(predicate: (s.Element) => Boolean): Option[s.Position] = + val e = self.endPosition + def loop(p: s.Position): Option[s.Position] = + if (p.eq(e)) { + None + } else if (predicate(self.at(p))) { + Some(p) + } else { + loop(self.positionAfter(p)) + } + loop(self.startPosition) + + /** Returns the minimum element in `self`, using `isLessThan` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def minElement(isLessThan: (s.Element, s.Element) => Boolean): Option[s.Element] = + self.leastElement(isLessThan) + + // NOTE: I can't find a reasonable way to call this method. + /** Returns the minimum element in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def minElement()(using Comparable[s.Element]): Option[s.Element] = + self.minElement(isLessThan = _ `lt` _) + + /** Returns the maximum element in `self`, using `isGreaterThan` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def maxElement(isGreaterThan: (s.Element, s.Element) => Boolean): Option[s.Element] = + self.leastElement(isGreaterThan) + + /** Returns the maximum element in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def maxElement()(using Comparable[s.Element]): Option[s.Element] = + self.maxElement(isGreaterThan = _ `gt` _) + + /** Returns the maximum element in `self`, using `isOrderedBefore` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def leastElement(isOrderedBefore: (s.Element, s.Element) => Boolean): Option[s.Element] = + if (self.isEmpty) { + None + } else { + val e = self.endPosition + def _least(p: s.Position, least: s.Element): s.Element = + if (p.eq(e)) { + least + } else { + val x = self.at(p) + val y = if isOrderedBefore(x, least) then x else least + _least(self.positionAfter(p), y) + } + + val b = self.startPosition + Some(_least(self.positionAfter(b), self.at(b))) + } + +} + +extension [Self](self: Self)(using + s: Collection[Self], + e: Value[s.Element] +) { + + /** Returns `true` if `self` contains the same elements as `other`, in the same order. */ + def elementsEqual[T](using o: Collection[T] { type Element = s.Element })(other: T): Boolean = + def loop(i: s.Position, j: o.Position): Boolean = + if (i `eq` self.endPosition) { + j `eq` other.endPosition + } else if (j `eq` other.endPosition) { + false + } else if (self.at(i) `neq` other.at(j)) { + false + } else { + loop(self.positionAfter(i), other.positionAfter(j)) + } + loop(self.startPosition, other.startPosition) + +} diff --git a/tests/pos/hylolib-deferred-given/CoreTraits.scala b/tests/pos/hylolib-deferred-given/CoreTraits.scala new file mode 100644 index 000000000000..01b2c5242af9 --- /dev/null +++ b/tests/pos/hylolib-deferred-given/CoreTraits.scala @@ -0,0 +1,57 @@ +package hylo + +/** A type whose instance can be treated as independent values. + * + * The data structure of and algorithms of Hylo's standard library operate "notional values" rather + * than arbitrary references. This trait defines the basis operations of all values. + */ +trait Value[Self] { + + extension (self: Self) { + + /** Returns a copy of `self`. */ + def copy(): Self + + /** Returns `true` iff `self` and `other` have an equivalent value. */ + def eq(other: Self): Boolean + + /** Hashes the salient parts of `self` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher + + } + +} + +extension [Self: Value](self: Self) def neq(other: Self): Boolean = !self.eq(other) + +// ---------------------------------------------------------------------------- +// Comparable +// ---------------------------------------------------------------------------- + +trait Comparable[Self] extends Value[Self] { + + extension (self: Self) { + + /** Returns `true` iff `self` is ordered before `other`. */ + def lt(other: Self): Boolean + + /** Returns `true` iff `self` is ordered after `other`. */ + def gt(other: Self): Boolean = other.lt(self) + + /** Returns `true` iff `self` is equal to or ordered before `other`. */ + def le(other: Self): Boolean = !other.lt(self) + + /** Returns `true` iff `self` is equal to or ordered after `other`. */ + def ge(other: Self): Boolean = !self.lt(other) + + } + +} + +/** Returns the lesser of `x` and `y`. */ +def min[T: Comparable](x: T, y: T): T = + if y.lt(x) then y else x + +/** Returns the greater of `x` and `y`. */ +def max[T: Comparable](x: T, y: T): T = + if x.lt(y) then y else x diff --git a/tests/pos/hylolib-deferred-given/Hasher.scala b/tests/pos/hylolib-deferred-given/Hasher.scala new file mode 100644 index 000000000000..ca45550ed002 --- /dev/null +++ b/tests/pos/hylolib-deferred-given/Hasher.scala @@ -0,0 +1,39 @@ +//> using options -language:experimental.modularity -source future +package hylo + +import scala.util.Random + +/** A universal hash function. */ +final class Hasher private (private val hash: Int = Hasher.offsetBasis) { + + /** Returns the computed hash value. */ + def finalizeHash(): Int = + hash + + /** Adds `n` to the computed hash value. */ + def combine(n: Int): Hasher = + var h = hash + h = h ^ n + h = h * Hasher.prime + new Hasher(h) +} + +object Hasher { + + private val offsetBasis = 0x811c9dc5 + private val prime = 0x01000193 + + /** A random seed ensuring different hashes across multiple runs. */ + private lazy val seed = scala.util.Random.nextInt() + + /** Creates an instance with the given `seed`. */ + def apply(): Hasher = + val h = new Hasher() + h.combine(seed) + h + + /** Returns the hash of `v`. */ + def hash[T: Value](v: T): Int = + v.hashInto(Hasher()).finalizeHash() + +} diff --git a/tests/pos/hylolib-deferred-given/HyArray.scala b/tests/pos/hylolib-deferred-given/HyArray.scala new file mode 100644 index 000000000000..98632dcb65bc --- /dev/null +++ b/tests/pos/hylolib-deferred-given/HyArray.scala @@ -0,0 +1,224 @@ +package hylo + +import java.util.Arrays +import scala.collection.mutable + +/** An ordered, random-access collection. */ +final class HyArray[Element] private (using + elementIsValue: Value[Element] +)( + private var _storage: scala.Array[AnyRef | Null] | Null, + private var _count: Int // NOTE: where do I document private fields +) { + + // NOTE: The fact that we need Array[AnyRef] is diappointing and difficult to discover + // The compiler error sent me on a wild goose chase with ClassTag. + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + _count == 0 + + /** Returns the number of elements in `this`. */ + def count: Int = + _count + + /** Returns the number of elements that `this` can contain before allocating new storage. */ + def capacity: Int = + if _storage == null then 0 else _storage.length + + /** Reserves enough storage to store `n` elements in `this`. */ + def reserveCapacity(n: Int, assumeUniqueness: Boolean = false): HyArray[Element] = + if (n <= capacity) { + this + } else { + var newCapacity = max(1, capacity) + while (newCapacity < n) { newCapacity = newCapacity << 1 } + + val newStorage = new scala.Array[AnyRef | Null](newCapacity) + val s = _storage.asInstanceOf[scala.Array[AnyRef | Null]] + var i = 0 + while (i < count) { + newStorage(i) = _storage(i).asInstanceOf[Element].copy().asInstanceOf[AnyRef] + i += 1 + } + + if (assumeUniqueness) { + _storage = newStorage + this + } else { + new HyArray(newStorage, count) + } + } + + /** Adds a new element at the end of the array. */ + def append(source: Element, assumeUniqueness: Boolean = false): HyArray[Element] = + val result = if assumeUniqueness && (count < capacity) then this else copy(count + 1) + result._storage(count) = source.asInstanceOf[AnyRef] + result._count += 1 + result + + // NOTE: Can't refine `C.Element` without renaming the generic parameter of `HyArray`. + // /** Adds the contents of `source` at the end of the array. */ + // def appendContents[C](using + // s: Collection[C] + // )( + // source: C { type Element = Element }, + // assumeUniqueness: Boolean = false + // ): HyArray[Element] = + // val result = if (assumeUniqueness) { this } else { copy(count + source.count) } + // source.reduce(result, (r, e) => r.append(e, assumeUniqueness = true)) + + /** Removes and returns the last element, or returns `None` if the array is empty. */ + def popLast(assumeUniqueness: Boolean = false): (HyArray[Element], Option[Element]) = + if (isEmpty) { + (this, None) + } else { + val result = if assumeUniqueness then this else copy() + result._count -= 1 + (result, Some(result._storage(result._count).asInstanceOf[Element])) + } + + /** Removes all elements in the array, keeping allocated storage iff `keepStorage` is true. */ + def removeAll( + keepStorage: Boolean = false, + assumeUniqueness: Boolean = false + ): HyArray[Element] = + if (isEmpty) { + this + } else if (keepStorage) { + val result = if assumeUniqueness then this else copy() + Arrays.fill(result._storage, null) + result._count = 0 + result + } else { + HyArray() + } + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: Int): Element = + _storage(p).asInstanceOf[Element] + + /** Calls `transform` on the element at `p` to update its value. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def modifyAt( + p: Int, + transform: (Element) => Element, + assumeUniqueness: Boolean = false + ): HyArray[Element] = + val result = if assumeUniqueness then this else copy() + result._storage(p) = transform(at(p)).asInstanceOf[AnyRef] + result + + /** Returns a textual description of `this`. */ + override def toString: String = + var s = "[" + var i = 0 + while (i < count) { + if (i > 0) { s += ", " } + s += s"${at(i)}" + i += 1 + } + s + "]" + + /** Returns an independent copy of `this`, capable of storing `minimumCapacity` elements before + * allocating new storage. + */ + def copy(minimumCapacity: Int = 0): HyArray[Element] = + if (minimumCapacity > capacity) { + // If the requested capacity on the copy is greater than what we have, `reserveCapacity` will + // create an independent value. + reserveCapacity(minimumCapacity) + } else { + val clone = HyArray[Element]().reserveCapacity(max(minimumCapacity, count)) + var i = 0 + while (i < count) { + clone._storage(i) = _storage(i).asInstanceOf[Element].copy().asInstanceOf[AnyRef] + i += 1 + } + clone._count = count + clone + } + +} + +object HyArray { + + /** Creates an array with the given `elements`. */ + def apply[T](using t: Value[T])(elements: T*): HyArray[T] = + var a = new HyArray[T](null, 0) + for (e <- elements) a = a.append(e, assumeUniqueness = true) + a + +} + +given hyArrayIsValue[T](using tIsValue: Value[T]): Value[HyArray[T]] with { + + extension (self: HyArray[T]) { + + def copy(): HyArray[T] = + self.copy() + + def eq(other: HyArray[T]): Boolean = + self.elementsEqual(other) + + def hashInto(hasher: Hasher): Hasher = + self.reduce(hasher, (h, e) => e.hashInto(h)) + + } + +} + +given hyArrayIsCollection[T](using tIsValue: Value[T]): Collection[HyArray[T]] with { + + type Element = T + //given elementIsValue: Value[T] = tIsValue + + type Position = Int + given positionIsValue: Value[Int] = intIsValue + + extension (self: HyArray[T]) { + + // NOTE: Having to explicitly override means that primary declaration can't automatically + // specialize trait requirements. + override def isEmpty: Boolean = self.isEmpty + + override def count: Int = self.count + + def startPosition = 0 + + def endPosition = self.count + + def positionAfter(p: Int) = p + 1 + + def at(p: Int) = self.at(p) + + } + +} + +// NOTE: This should work. +// given hyArrayIsStringConvertible[T](using +// tIsValue: Value[T], +// tIsStringConvertible: StringConvertible[T] +// ): StringConvertible[HyArray[T]] with { +// +// given Collection[HyArray[T]] = hyArrayIsCollection[T] +// +// extension (self: HyArray[T]) +// override def description: String = +// var contents = mutable.StringBuilder() +// self.forEach((e) => { contents ++= e.description; true }) +// s"[${contents.mkString(", ")}]" +// +// } diff --git a/tests/pos/hylolib-deferred-given/Integers.scala b/tests/pos/hylolib-deferred-given/Integers.scala new file mode 100644 index 000000000000..b9bc203a88ea --- /dev/null +++ b/tests/pos/hylolib-deferred-given/Integers.scala @@ -0,0 +1,58 @@ +package hylo + +given booleanIsValue: Value[Boolean] with { + + extension (self: Boolean) { + + def copy(): Boolean = + // Note: Scala's `Boolean` has value semantics already. + self + + def eq(other: Boolean): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(if self then 1 else 0) + + } + +} + +given intIsValue: Value[Int] with { + + extension (self: Int) { + + def copy(): Int = + // Note: Scala's `Int` has value semantics already. + self + + def eq(other: Int): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(self) + + } + +} + +given intIsComparable: Comparable[Int] with { + + extension (self: Int) { + + def copy(): Int = + self + + def eq(other: Int): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(self) + + def lt(other: Int): Boolean = self < other + + } + +} + +given intIsStringConvertible: StringConvertible[Int] with {} diff --git a/tests/pos/hylolib-deferred-given/Range.scala b/tests/pos/hylolib-deferred-given/Range.scala new file mode 100644 index 000000000000..b0f50dd55c8c --- /dev/null +++ b/tests/pos/hylolib-deferred-given/Range.scala @@ -0,0 +1,37 @@ +package hylo + +/** A half-open interval from a lower bound up to, but not including, an uppor bound. */ +final class Range[Bound] private (val lowerBound: Bound, val upperBound: Bound) { + + /** Returns a textual description of `this`. */ + override def toString: String = + s"[${lowerBound}, ${upperBound})" + +} + +object Range { + + /** Creates a half-open interval [`lowerBound`, `upperBound`), using `isLessThanOrEqual` to ensure + * that the bounds are well-formed. + * + * @requires + * `lowerBound` is lesser than or equal to `upperBound`. + */ + def apply[Bound]( + lowerBound: Bound, + upperBound: Bound, + isLessThanOrEqual: (Bound, Bound) => Boolean + ) = + require(isLessThanOrEqual(lowerBound, upperBound)) + new Range(lowerBound, upperBound) + + /** Creates a half-open interval [`lowerBound`, `upperBound`). + * + * @requires + * `lowerBound` is lesser than or equal to `upperBound`. + */ + def apply[Bound: Comparable](lowerBound: Bound, upperBound: Bound) = + require(lowerBound `le` upperBound) + new Range(lowerBound, upperBound) + +} diff --git a/tests/pos/hylolib-deferred-given/Slice.scala b/tests/pos/hylolib-deferred-given/Slice.scala new file mode 100644 index 000000000000..57cdb38f6e53 --- /dev/null +++ b/tests/pos/hylolib-deferred-given/Slice.scala @@ -0,0 +1,49 @@ +package hylo + +/** A view into a collection. */ +final class Slice[Base](using + val b: Collection[Base] +)( + val base: Base, + val bounds: Range[b.Position] +) { + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + bounds.lowerBound.eq(bounds.upperBound) + + def startPosition: b.Position = + bounds.lowerBound + + def endPosition: b.Position = + bounds.upperBound + + def positionAfter(p: b.Position): b.Position = + base.positionAfter(p) + + def at(p: b.Position): b.Element = + base.at(p) + +} + +given sliceIsCollection[T](using c: Collection[T]): Collection[Slice[T]] with { + + type Element = c.Element + //given elementIsValue: Value[Element] = c.elementIsValue + + type Position = c.Position + given positionIsValue: Value[Position] = c.positionIsValue + + extension (self: Slice[T]) { + + def startPosition = self.bounds.lowerBound.asInstanceOf[Position] // NOTE: Ugly hack + + def endPosition = self.bounds.upperBound.asInstanceOf[Position] + + def positionAfter(p: Position) = self.base.positionAfter(p) + + def at(p: Position) = self.base.at(p) + + } + +} diff --git a/tests/pos/hylolib-deferred-given/StringConvertible.scala b/tests/pos/hylolib-deferred-given/StringConvertible.scala new file mode 100644 index 000000000000..0702f79f2794 --- /dev/null +++ b/tests/pos/hylolib-deferred-given/StringConvertible.scala @@ -0,0 +1,14 @@ +package hylo + +/** A type whose instances can be described by a character string. */ +trait StringConvertible[Self] { + + extension (self: Self) { + + /** Returns a textual description of `self`. */ + def description: String = + self.toString + + } + +} diff --git a/tests/pos/hylolib-extract.scala b/tests/pos/hylolib-extract.scala new file mode 100644 index 000000000000..846e52f30df6 --- /dev/null +++ b/tests/pos/hylolib-extract.scala @@ -0,0 +1,29 @@ +//> using options -language:experimental.modularity -source future +package hylotest + +trait Value: + type Self + extension (self: Self) def eq(other: Self): Boolean + +/** A collection of elements accessible by their position. */ +trait Collection: + type Self + + /** The type of the elements in the collection. */ + type Element: Value + +class BitArray + +given Boolean is Value: + extension (self: Self) def eq(other: Self): Boolean = + self == other + +given BitArray is Collection: + type Element = Boolean + +extension [Self: Value](self: Self) + def neq(other: Self): Boolean = !self.eq(other) + +extension [Self: Collection](self: Self) + def elementsEqual[T: Collection { type Element = Self.Element } ](other: T): Boolean = + ??? diff --git a/tests/pos/hylolib/AnyCollection.scala b/tests/pos/hylolib/AnyCollection.scala new file mode 100644 index 000000000000..6c2b835852e6 --- /dev/null +++ b/tests/pos/hylolib/AnyCollection.scala @@ -0,0 +1,51 @@ +//> using options -language:experimental.modularity -source future +package hylo + +/** A type-erased collection. + * + * A `AnyCollection` forwards its operations to a wrapped value, hiding its implementation. + */ +final class AnyCollection[Element] private ( + val _start: () => AnyValue, + val _end: () => AnyValue, + val _after: (AnyValue) => AnyValue, + val _at: (AnyValue) => Element +) + +object AnyCollection { + + /** Creates an instance forwarding its operations to `base`. */ + def apply[Base: Collection](base: Base): AnyCollection[Base.Element] = + + def start(): AnyValue = + AnyValue(base.startPosition) + + def end(): AnyValue = + AnyValue(base.endPosition) + + def after(p: AnyValue): AnyValue = + AnyValue(base.positionAfter(p.unsafelyUnwrappedAs[Base.Position])) + + def at(p: AnyValue): Base.Element = + base.at(p.unsafelyUnwrappedAs[Base.Position]) + + new AnyCollection[Base.Element]( + _start = start, + _end = end, + _after = after, + _at = at + ) + +} + +given [T: Value] => AnyCollection[T] is Collection: + + type Element = T + type Position = AnyValue + + extension (self: AnyCollection[T]) + def startPosition = self._start() + def endPosition = self._end() + def positionAfter(p: Position) = self._after(p) + def at(p: Position) = self._at(p) + diff --git a/tests/pos/hylolib/AnyValue.scala b/tests/pos/hylolib/AnyValue.scala new file mode 100644 index 000000000000..6844135b646b --- /dev/null +++ b/tests/pos/hylolib/AnyValue.scala @@ -0,0 +1,67 @@ +package hylo + +/** A wrapper around an object providing a reference API. */ +private final class Ref[T](val value: T) { + + override def toString: String = + s"Ref($value)" + +} + +/** A type-erased value. + * + * An `AnyValue` forwards its operations to a wrapped value, hiding its implementation. + */ +final class AnyValue private ( + private val wrapped: AnyRef, + private val _copy: (AnyRef) => AnyValue, + private val _eq: (AnyRef, AnyRef) => Boolean, + private val _hashInto: (AnyRef, Hasher) => Hasher +) { + + /** Returns a copy of `this`. */ + def copy(): AnyValue = + _copy(this.wrapped) + + /** Returns `true` iff `this` and `other` have an equivalent value. */ + def eq(other: AnyValue): Boolean = + _eq(this.wrapped, other.wrapped) + + /** Hashes the salient parts of `this` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher = + _hashInto(this.wrapped, hasher) + + /** Returns the value wrapped in `this` as an instance of `T`. */ + def unsafelyUnwrappedAs[T]: T = + wrapped.asInstanceOf[Ref[T]].value + + /** Returns a textual description of `this`. */ + override def toString: String = + wrapped.toString + +} + +object AnyValue { + + /** Creates an instance wrapping `wrapped`. */ + def apply[T: Value](wrapped: T): AnyValue = + def copy(a: AnyRef): AnyValue = + AnyValue(a.asInstanceOf[Ref[T]].value.copy()) + + def eq(a: AnyRef, b: AnyRef): Boolean = + a.asInstanceOf[Ref[T]].value `eq` b.asInstanceOf[Ref[T]].value + + def hashInto(a: AnyRef, hasher: Hasher): Hasher = + a.asInstanceOf[Ref[T]].value.hashInto(hasher) + + new AnyValue(Ref(wrapped), copy, eq, hashInto) + +} + +given AnyValue is Value: + + extension (self: AnyValue) + def copy(): AnyValue = self.copy() + def eq(other: AnyValue): Boolean = self `eq` other + def hashInto(hasher: Hasher): Hasher = self.hashInto(hasher) + diff --git a/tests/pos/hylolib/AnyValueTests.scala b/tests/pos/hylolib/AnyValueTests.scala new file mode 100644 index 000000000000..96d3563f4f53 --- /dev/null +++ b/tests/pos/hylolib/AnyValueTests.scala @@ -0,0 +1,15 @@ +//> using options -language:experimental.modularity -source future +import hylo.* +import hylo.given + +class AnyValueTests extends munit.FunSuite: + + test("eq"): + val a = AnyValue(1) + assert(a `eq` a) + assert(!(a `neq` a)) + + val b = AnyValue(2) + assert(!(a `eq` b)) + assert(a `neq` b) + diff --git a/tests/pos/hylolib/BitArray.scala b/tests/pos/hylolib/BitArray.scala new file mode 100644 index 000000000000..6ef406e5ad83 --- /dev/null +++ b/tests/pos/hylolib/BitArray.scala @@ -0,0 +1,362 @@ +package hylo + +import scala.collection.mutable + +/** An array of bit values represented as Booleans, where `true` indicates that the bit is on. */ +final class BitArray private ( + private var _bits: HyArray[Int], + private var _count: Int +) { + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + _count == 0 + + /** Returns the number of elements in `this`. */ + def count: Int = + _count + + /** The number of bits that the array can contain before allocating new storage. */ + def capacity: Int = + _bits.capacity << 5 + + /** Reserves enough storage to store `n` elements in `this`. */ + def reserveCapacity(n: Int, assumeUniqueness: Boolean = false): BitArray = + if (n == 0) { + this + } else { + val k = 1 + ((n - 1) >> 5) + if (assumeUniqueness) { + _bits = _bits.reserveCapacity(k, assumeUniqueness) + this + } else { + new BitArray(_bits.reserveCapacity(k), _count) + } + } + + /** Adds a new element at the end of the array. */ + def append(bit: Boolean, assumeUniqueness: Boolean = false): BitArray = + val result = if assumeUniqueness && (count < capacity) then this else copy(count + 1) + val p = BitArray.Position(count) + if (p.bucket >= _bits.count) { + result._bits = _bits.append(if bit then 1 else 0) + } else { + result.setValue(bit, p) + } + result._count += 1 + result + + /** Removes and returns the last element, or returns `None` if the array is empty. */ + def popLast(assumeUniqueness: Boolean = false): (BitArray, Option[Boolean]) = + if (isEmpty) { + (this, None) + } else { + val result = if assumeUniqueness then this else copy() + val bit = result.at(BitArray.Position(count)) + result._count -= 1 + (result, Some(bit)) + } + + /** Removes all elements in the array, keeping allocated storage iff `keepStorage` is true. */ + def removeAll( + keepStorage: Boolean = false, + assumeUniqueness: Boolean = false + ): BitArray = + if (isEmpty) { + this + } else if (keepStorage) { + val result = if assumeUniqueness then this else copy() + result._bits.removeAll(keepStorage, assumeUniqueness = true) + result._count = 0 + result + } else { + BitArray() + } + + /** Returns `true` iff all elements in `this` are `false`. */ + def allFalse: Boolean = + if (isEmpty) { + true + } else { + val k = (count - 1) >> 5 + def loop(i: Int): Boolean = + if (i == k) { + val m = (1 << (count & 31)) - 1 + (_bits.at(k) & m) == 0 + } else if (_bits.at(i) != 0) { + false + } else { + loop(i + 1) + } + loop(0) + } + + /** Returns `true` iff all elements in `this` are `true`. */ + def allTrue: Boolean = + if (isEmpty) { + true + } else { + val k = (count - 1) >> 5 + def loop(i: Int): Boolean = + if (i == k) { + val m = (1 << (count & 31)) - 1 + (_bits.at(k) & m) == m + } else if (_bits.at(i) != ~0) { + false + } else { + loop(i + 1) + } + loop(0) + } + + /** Returns the bitwise OR of `this` and `other`. */ + def | (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ | _, assumeUniqueness = true) + + /** Returns the bitwise AND of `this` and `other`. */ + def & (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ & _, assumeUniqueness = true) + + /** Returns the bitwise XOR of `this` and `other`. */ + def ^ (other: BitArray): BitArray = + val result = copy() + result.applyBitwise(other, _ ^ _, assumeUniqueness = true) + + /** Assigns each bits in `this` to the result of `operation` applied on those bits and their + * corresponding bits in `other`. + * + * @requires + * `self.count == other.count`. + */ + private def applyBitwise( + other: BitArray, + operation: (Int, Int) => Int, + assumeUniqueness: Boolean = false + ): BitArray = + require(this.count == other.count) + if (isEmpty) { + this + } else { + val result = if assumeUniqueness then this else copy() + var u = assumeUniqueness + val k = (count - 1) >> 5 + + for (i <- 0 until k) { + result._bits = result._bits.modifyAt( + i, (n) => operation(n, other._bits.at(n)), + assumeUniqueness = u + ) + u = true + } + val m = (1 << (count & 31)) - 1 + result._bits = result._bits.modifyAt( + k, (n) => operation(n & m, other._bits.at(k) & m), + assumeUniqueness = u + ) + + result + } + + /** Returns the position of `this`'s first element', or `endPosition` if `this` is empty. + * + * @complexity + * O(1). + */ + def startPosition: BitArray.Position = + BitArray.Position(0) + + /** Returns the "past the end" position in `this`, that is, the position immediately after the + * last element in `this`. + * + * @complexity + * O(1). + */ + def endPosition: BitArray.Position = + BitArray.Position(count) + + /** Returns the position immediately after `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def positionAfter(p: BitArray.Position): BitArray.Position = + if (p.offsetInBucket == 63) { + BitArray.Position(p.bucket + 1, 0) + } else { + BitArray.Position(p.bucket, p.offsetInBucket + 1) + } + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: BitArray.Position): Boolean = + val m = 1 << p.offsetInBucket + val b: Int = _bits.at(p.bucket) + (b & m) == m + + /** Accesses the `i`-th element of `this`. + * + * @requires + * `i` is greater than or equal to 0, and less than `count`. + * @complexity + * O(1). + */ + def atIndex(i: Int): Boolean = + at(BitArray.Position(i)) + + /** Calls `transform` on the element at `p` to update its value. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def modifyAt( + p: BitArray.Position, + transform: (Boolean) => Boolean, + assumeUniqueness: Boolean = false + ): BitArray = + val result = if assumeUniqueness then this else copy() + result.setValue(transform(result.at(p)), p) + result + + /** Calls `transform` on `i`-th element of `this` to update its value. + * + * @requires + * `i` is greater than or equal to 0, and less than `count`. + * @complexity + * O(1). + */ + def modifyAtIndex( + i: Int, + transform: (Boolean) => Boolean, + assumeUniqueness: Boolean = false + ): BitArray = + modifyAt(BitArray.Position(i), transform, assumeUniqueness) + + /** Returns an independent copy of `this`. */ + def copy(minimumCapacity: Int = 0): BitArray = + if (minimumCapacity > capacity) { + // If the requested capacity on the copy is greater than what we have, `reserveCapacity` will + // create an independent value. + reserveCapacity(minimumCapacity) + } else { + val k = 1 + ((minimumCapacity - 1) >> 5) + val newBits = _bits.copy(k) + new BitArray(newBits, _count) + } + + /** Returns a textual description of `this`. */ + override def toString: String = + _bits.toString + + /** Sets the value `b` for the bit at position `p`. + * + * @requires + * `this` is uniquely referenced and `p` is a valid position in `this`. + */ + private def setValue(b: Boolean, p: BitArray.Position): Unit = + val m = 1 << p.offsetInBucket + _bits = _bits.modifyAt( + p.bucket, + (e) => if b then e | m else e & ~m, + assumeUniqueness = true + ) + +} + +object BitArray { + + /** A position in a `BitArray`. + * + * @param bucket + * The bucket containing `this`. + * @param offsetInBucket + * The offset of `this` in its containing bucket. + */ + final class Position( + private[BitArray] val bucket: Int, + private[BitArray] val offsetInBucket: Int + ) { + + /** Creates a position from an index. */ + private[BitArray] def this(index: Int) = + this(index >> 5, index & 31) + + /** Returns the index corresponding to this position. */ + private def index: Int = + (bucket >> 5) + offsetInBucket + + /** Returns a copy of `this`. */ + def copy(): Position = + new Position(bucket, offsetInBucket) + + /** Returns `true` iff `this` and `other` have an equivalent value. */ + def eq(other: Position): Boolean = + (this.bucket == other.bucket) && (this.offsetInBucket == other.offsetInBucket) + + /** Hashes the salient parts of `self` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher = + hasher.combine(bucket) + hasher.combine(offsetInBucket) + + } + + /** Creates an array with the given `bits`. */ + def apply[T](bits: Boolean*): BitArray = + var result = new BitArray(HyArray[Int](), 0) + for (b <- bits) result = result.append(b, assumeUniqueness = true) + result + +} + +given BitArray.Position is Value: + + extension (self: BitArray.Position) + + def copy(): BitArray.Position = + self.copy() + + def eq(other: BitArray.Position): Boolean = + self.eq(other) + + def hashInto(hasher: Hasher): Hasher = + self.hashInto(hasher) + +given BitArray is Collection: + + type Element = Boolean + type Position = BitArray.Position + + extension (self: BitArray) + + override def count: Int = + self.count + + def startPosition: BitArray.Position = + self.startPosition + + def endPosition: BitArray.Position = + self.endPosition + + def positionAfter(p: BitArray.Position): BitArray.Position = + self.positionAfter(p) + + def at(p: BitArray.Position): Boolean = + self.at(p) + +given BitArray is StringConvertible: + extension (self: BitArray) + override def description: String = + var contents = mutable.StringBuilder() + self.forEach((e) => { contents += (if e then '1' else '0'); true }) + contents.mkString + diff --git a/tests/pos/hylolib/Collection.scala b/tests/pos/hylolib/Collection.scala new file mode 100644 index 000000000000..bef86a967e6e --- /dev/null +++ b/tests/pos/hylolib/Collection.scala @@ -0,0 +1,267 @@ +//> using options -language:experimental.modularity -source future +package hylo + +/** A collection of elements accessible by their position. */ +trait Collection: + type Self + + /** The type of the elements in the collection. */ + type Element: Value + + /** The type of a position in the collection. */ + type Position: Value + + extension (self: Self) + + /** Returns `true` iff `self` is empty. */ + def isEmpty: Boolean = + startPosition `eq` endPosition + + /** Returns the number of elements in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def count: Int = + val e = endPosition + def loop(p: Position, n: Int): Int = + if p `eq` e then n else loop(self.positionAfter(p), n + 1) + loop(startPosition, 0) + + /** Returns the position of `self`'s first element', or `endPosition` if `self` is empty. + * + * @complexity + * O(1) + */ + def startPosition: Position + + /** Returns the "past the end" position in `self`, that is, the position immediately after the + * last element in `self`. + * + * @complexity + * O(1). + */ + def endPosition: Position + + /** Returns the position immediately after `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def positionAfter(p: Position): Position + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: Position): Element + + /** Returns `true` iff `i` precedes `j`. + * + * @requires + * `i` and j` are valid positions in `self`. + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def isBefore(i: Position, j: Position): Boolean = + val e = self.endPosition + if i `eq` e then false + else if j `eq` e then true + else + def recur(n: Position): Boolean = + if n `eq` j then true + else if n `eq` e then false + else recur(self.positionAfter(n)) + recur(self.positionAfter(i)) + + class Slice2(val base: Self, val bounds: Range[Position]): + + def isEmpty: Boolean = + bounds.lowerBound.eq(bounds.upperBound) + + def startPosition: Position = + bounds.lowerBound + + def endPosition: Position = + bounds.upperBound + + def at(p: Position): Element = + base.at(p) + end Slice2 + +end Collection + +extension [Self: Collection](self: Self) + + /** Returns the first element of `self` along with a slice containing the suffix after this + * element, or `None` if `self` is empty. + * + * @complexity + * O(1) + */ + def headAndTail: Option[(Self.Element, Slice[Self])] = + if self.isEmpty then + None + else + val p = self.startPosition + val q = self.positionAfter(p) + val t = Slice(self, Range(q, self.endPosition, (a, b) => (a `eq` b) || self.isBefore(a, b))) + Some((self.at(p), t)) + + def headAndTail2: Option[(Self.Element, Self.Slice2)] = + if self.isEmpty then + None + else + val p = self.startPosition + val q = self.positionAfter(p) + val t = Self.Slice2(self, Range(q, self.endPosition, (a, b) => (a `eq` b) || self.isBefore(a, b))) + Some((self.at(p), t)) + + /** Applies `combine` on `partialResult` and each element of `self`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def reduce[T](partialResult: T)(combine: (T, Self.Element) => T): T = + val e = self.endPosition + def loop(p: Self.Position, r: T): T = + if p `eq` e then r + else loop(self.positionAfter(p), combine(r, self.at(p))) + loop(self.startPosition, partialResult) + + /** Applies `action` on each element of `self`, in order, until `action` returns `false`, and + * returns `false` iff `action` did. + * + * You can return `false` from `action` to emulate a `continue` statement as found in traditional + * imperative languages (e.g., C). + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def forEach(action: Self.Element => Boolean): Boolean = + val e = self.endPosition + def loop(p: Self.Position): Boolean = + if p `eq` e then true + else if !action(self.at(p)) then false + else loop(self.positionAfter(p)) + loop(self.startPosition) + + /** Returns a collection with the elements of `self` transformed by `transform`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def map[T: Value](transform: Self.Element => T): HyArray[T] = + self.reduce(HyArray[T]()): (r, e) => + r.append(transform(e), assumeUniqueness = true) + + /** Returns a collection with the elements of `self` satisfying `isInclude`, in order. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def filter(isIncluded: Self.Element => Boolean): HyArray[Self.Element] = + self.reduce(HyArray[Self.Element]()): (r, e) => + if isIncluded(e) then r.append(e, assumeUniqueness = true) else r + + /** Returns `true` if `self` contains an element satisfying `predicate`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def containsWhere(predicate: Self.Element => Boolean): Boolean = + self.firstPositionWhere(predicate) != None + + /** Returns `true` if all elements in `self` satisfy `predicate`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def allSatisfy(predicate: Self.Element => Boolean): Boolean = + self.firstPositionWhere(predicate) == None + + /** Returns the position of the first element of `self` satisfying `predicate`, or `None` if no + * such element exists. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def firstPositionWhere(predicate: Self.Element => Boolean): Option[Self.Position] = + val e = self.endPosition + def loop(p: Self.Position): Option[Self.Position] = + if p `eq` e then None + else if predicate(self.at(p)) then Some(p) + else loop(self.positionAfter(p)) + loop(self.startPosition) + + /** Returns the minimum element in `self`, using `isLessThan` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def minElement(isLessThan: (Self.Element, Self.Element) => Boolean): Option[Self.Element] = + self.leastElement(isLessThan) + + // NOTE: I can't find a reasonable way to call this method. + /** Returns the minimum element in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def minElement()(using Self.Element is Comparable): Option[Self.Element] = + self.minElement(isLessThan = _ `lt` _) + + /** Returns the maximum element in `self`, using `isGreaterThan` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def maxElement(isGreaterThan: (Self.Element, Self.Element) => Boolean): Option[Self.Element] = + self.leastElement(isGreaterThan) + + /** Returns the maximum element in `self`. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def maxElement()(using Self.Element is Comparable): Option[Self.Element] = + self.maxElement(isGreaterThan = _ `gt` _) + + /** Returns the maximum element in `self`, using `isOrderedBefore` to compare elements. + * + * @complexity + * O(n) where n is the number of elements in `self`. + */ + def leastElement(isOrderedBefore: (Self.Element, Self.Element) => Boolean): Option[Self.Element] = + if self.isEmpty then + None + else + val e = self.endPosition + def loop(p: Self.Position, least: Self.Element): Self.Element = + if p `eq` e then + least + else + val x = self.at(p) + val y = if isOrderedBefore(x, least) then x else least + loop(self.positionAfter(p), y) + val b = self.startPosition + Some(loop(self.positionAfter(b), self.at(b))) + + /** Returns `true` if `self` contains the same elements as `other`, in the same order. */ + def elementsEqual[T: Collection { type Element = Self.Element } ](other: T): Boolean = + def loop(i: Self.Position, j: T.Position): Boolean = + if i `eq` self.endPosition then + j `eq` other.endPosition + else if j `eq` other.endPosition then + false + else if self.at(i) `neq` other.at(j)then + false + else + loop(self.positionAfter(i), other.positionAfter(j)) + loop(self.startPosition, other.startPosition) +end extension diff --git a/tests/pos/hylolib/CollectionTests.scala b/tests/pos/hylolib/CollectionTests.scala new file mode 100644 index 000000000000..d884790f64d7 --- /dev/null +++ b/tests/pos/hylolib/CollectionTests.scala @@ -0,0 +1,67 @@ +//> using options -language:experimental.modularity -source future +import hylo.* +import hylo.given + +class CollectionTests extends munit.FunSuite: + + test("isEmpty"): + val empty = AnyCollection(HyArray[Int]()) + assert(empty.isEmpty) + + val nonEmpty = AnyCollection(HyArray[Int](1, 2)) + assert(!nonEmpty.isEmpty) + + test("count"): + val a = AnyCollection(HyArray[Int](1, 2)) + assertEquals(a.count, 2) + + test("isBefore"): + val empty = AnyCollection(HyArray[Int]()) + assert(!empty.isBefore(empty.startPosition, empty.endPosition)) + + val nonEmpty = AnyCollection(HyArray[Int](1, 2)) + val p0 = nonEmpty.startPosition + val p1 = nonEmpty.positionAfter(p0) + val p2 = nonEmpty.positionAfter(p1) + assert(nonEmpty.isBefore(p0, nonEmpty.endPosition)) + assert(nonEmpty.isBefore(p1, nonEmpty.endPosition)) + assert(!nonEmpty.isBefore(p2, nonEmpty.endPosition)) + + test("headAndTail"): + val empty = AnyCollection(HyArray[Int]()) + assertEquals(empty.headAndTail, None) + + val one = AnyCollection(HyArray[Int](1)) + val Some((h0, t0)) = one.headAndTail: @unchecked + assert(h0 eq 1) + assert(t0.isEmpty) + + val two = AnyCollection(HyArray[Int](1, 2)) + val Some((h1, t1)) = two.headAndTail: @unchecked + assertEquals(h1, 1) + assertEquals(t1.count, 1) + + test("reduce"): + val empty = AnyCollection(HyArray[Int]()) + assertEquals(empty.reduce(0)((s, x) => s + x), 0) + + val nonEmpty = AnyCollection(HyArray[Int](1, 2, 3)) + assertEquals(nonEmpty.reduce(0)((s, x) => s + x), 6) + + test("forEach"): + val empty = AnyCollection(HyArray[Int]()) + assert(empty.forEach((e) => false)) + + val nonEmpty = AnyCollection(HyArray[Int](1, 2, 3)) + var s = 0 + assert(nonEmpty.forEach((e) => { s += e; true })) + assertEquals(s, 6) + + s = 0 + assert(!nonEmpty.forEach((e) => { s += e; false })) + assertEquals(s, 1) + + test("elementsEqual"): + val a = HyArray(1, 2) + assert(a.elementsEqual(a)) +end CollectionTests diff --git a/tests/pos/hylolib/CoreTraits.scala b/tests/pos/hylolib/CoreTraits.scala new file mode 100644 index 000000000000..f4b3699b430e --- /dev/null +++ b/tests/pos/hylolib/CoreTraits.scala @@ -0,0 +1,56 @@ +package hylo + +/** A type whose instance can be treated as independent values. + * + * The data structure of and algorithms of Hylo's standard library operate "notional values" rather + * than arbitrary references. This trait defines the basis operations of all values. + */ +trait Value: + type Self + + extension (self: Self) { + + /** Returns a copy of `self`. */ + def copy(): Self + + /** Returns `true` iff `self` and `other` have an equivalent value. */ + def eq(other: Self): Boolean + + def neq(other: Self): Boolean = !self.eq(other) + + /** Hashes the salient parts of `self` into `hasher`. */ + def hashInto(hasher: Hasher): Hasher + + } + +// ---------------------------------------------------------------------------- +// Comparable +// ---------------------------------------------------------------------------- + +trait Comparable extends Value { + + extension (self: Self) { + + /** Returns `true` iff `self` is ordered before `other`. */ + def lt(other: Self): Boolean + + /** Returns `true` iff `self` is ordered after `other`. */ + def gt(other: Self): Boolean = other.lt(self) + + /** Returns `true` iff `self` is equal to or ordered before `other`. */ + def le(other: Self): Boolean = !other.lt(self) + + /** Returns `true` iff `self` is equal to or ordered after `other`. */ + def ge(other: Self): Boolean = !self.lt(other) + + } + +} + +/** Returns the lesser of `x` and `y`. */ +def min[T: Comparable](x: T, y: T): T = + if y.lt(x) then y else x + +/** Returns the greater of `x` and `y`. */ +def max[T: Comparable](x: T, y: T): T = + if x.lt(y) then y else x diff --git a/tests/pos/hylolib/Hasher.scala b/tests/pos/hylolib/Hasher.scala new file mode 100644 index 000000000000..ca45550ed002 --- /dev/null +++ b/tests/pos/hylolib/Hasher.scala @@ -0,0 +1,39 @@ +//> using options -language:experimental.modularity -source future +package hylo + +import scala.util.Random + +/** A universal hash function. */ +final class Hasher private (private val hash: Int = Hasher.offsetBasis) { + + /** Returns the computed hash value. */ + def finalizeHash(): Int = + hash + + /** Adds `n` to the computed hash value. */ + def combine(n: Int): Hasher = + var h = hash + h = h ^ n + h = h * Hasher.prime + new Hasher(h) +} + +object Hasher { + + private val offsetBasis = 0x811c9dc5 + private val prime = 0x01000193 + + /** A random seed ensuring different hashes across multiple runs. */ + private lazy val seed = scala.util.Random.nextInt() + + /** Creates an instance with the given `seed`. */ + def apply(): Hasher = + val h = new Hasher() + h.combine(seed) + h + + /** Returns the hash of `v`. */ + def hash[T: Value](v: T): Int = + v.hashInto(Hasher()).finalizeHash() + +} diff --git a/tests/pos/hylolib/HyArray.scala b/tests/pos/hylolib/HyArray.scala new file mode 100644 index 000000000000..de5e83d3b1a3 --- /dev/null +++ b/tests/pos/hylolib/HyArray.scala @@ -0,0 +1,202 @@ +//> using options -language:experimental.modularity -source future +package hylo + +import java.util.Arrays +import scala.collection.mutable + +/** An ordered, random-access collection. */ +final class HyArray[Element: Value as elementIsCValue]( + private var _storage: scala.Array[AnyRef | Null] | Null, + private var _count: Int // NOTE: where do I document private fields +) { + + // NOTE: The fact that we need Array[AnyRef] is diappointing and difficult to discover + // The compiler error sent me on a wild goose chase with ClassTag. + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + _count == 0 + + /** Returns the number of elements in `this`. */ + def count: Int = + _count + + /** Returns the number of elements that `this` can contain before allocating new storage. */ + def capacity: Int = + if _storage == null then 0 else _storage.length + + /** Reserves enough storage to store `n` elements in `this`. */ + def reserveCapacity(n: Int, assumeUniqueness: Boolean = false): HyArray[Element] = + if (n <= capacity) { + this + } else { + var newCapacity = max(1, capacity) + while (newCapacity < n) { newCapacity = newCapacity << 1 } + + val newStorage = new scala.Array[AnyRef | Null](newCapacity) + val s = _storage.asInstanceOf[scala.Array[AnyRef | Null]] + var i = 0 + while (i < count) { + newStorage(i) = _storage(i).asInstanceOf[Element].copy().asInstanceOf[AnyRef] + i += 1 + } + + if (assumeUniqueness) { + _storage = newStorage + this + } else { + new HyArray(newStorage, count) + } + } + + /** Adds a new element at the end of the array. */ + def append(source: Element, assumeUniqueness: Boolean = false): HyArray[Element] = + val result = if assumeUniqueness && (count < capacity) then this else copy(count + 1) + result._storage(count) = source.asInstanceOf[AnyRef] + result._count += 1 + result + + /** Adds the contents of `source` at the end of the array. */ + def appendContents[C: Collection { type Element = HyArray.this.Element }]( + source: C, assumeUniqueness: Boolean = false + ): HyArray[Element] = + val result = if (assumeUniqueness) { this } else { copy(count + source.count) } + source.reduce(result): (r, e) => + r.append(e, assumeUniqueness = true) + + /** Removes and returns the last element, or returns `None` if the array is empty. */ + def popLast(assumeUniqueness: Boolean = false): (HyArray[Element], Option[Element]) = + if (isEmpty) { + (this, None) + } else { + val result = if assumeUniqueness then this else copy() + result._count -= 1 + (result, Some(result._storage(result._count).asInstanceOf[Element])) + } + + /** Removes all elements in the array, keeping allocated storage iff `keepStorage` is true. */ + def removeAll( + keepStorage: Boolean = false, + assumeUniqueness: Boolean = false + ): HyArray[Element] = + if (isEmpty) { + this + } else if (keepStorage) { + val result = if assumeUniqueness then this else copy() + Arrays.fill(result._storage, null) + result._count = 0 + result + } else { + HyArray() + } + + /** Accesses the element at `p`. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def at(p: Int): Element = + _storage(p).asInstanceOf[Element] + + /** Calls `transform` on the element at `p` to update its value. + * + * @requires + * `p` is a valid position in `self` different from `endPosition`. + * @complexity + * O(1). + */ + def modifyAt( + p: Int, + transform: (Element) => Element, + assumeUniqueness: Boolean = false + ): HyArray[Element] = + val result = if assumeUniqueness then this else copy() + result._storage(p) = transform(at(p)).asInstanceOf[AnyRef] + result + + /** Returns a textual description of `this`. */ + override def toString: String = + var s = "[" + var i = 0 + while (i < count) { + if (i > 0) { s += ", " } + s += s"${at(i)}" + i += 1 + } + s + "]" + + /** Returns an independent copy of `this`, capable of storing `minimumCapacity` elements before + * allocating new storage. + */ + def copy(minimumCapacity: Int = 0): HyArray[Element] = + if (minimumCapacity > capacity) { + // If the requested capacity on the copy is greater than what we have, `reserveCapacity` will + // create an independent value. + reserveCapacity(minimumCapacity) + } else { + val clone = HyArray[Element]().reserveCapacity(max(minimumCapacity, count)) + var i = 0 + while (i < count) { + clone._storage(i) = _storage(i).asInstanceOf[Element].copy().asInstanceOf[AnyRef] + i += 1 + } + clone._count = count + clone + } + +} + +object HyArray { + + /** Creates an array with the given `elements`. */ + def apply[T: Value](elements: T*): HyArray[T] = + var a = new HyArray[T](null, 0) + for (e <- elements) a = a.append(e, assumeUniqueness = true) + a + +} + +given [T: Value] => HyArray[T] is Value: + + extension (self: HyArray[T]) + + def copy(): HyArray[T] = + self.copy() + + def eq(other: HyArray[T]): Boolean = + self.elementsEqual(other) + + def hashInto(hasher: Hasher): Hasher = + self.reduce(hasher)((h, e) => e.hashInto(h)) + +given [T: Value] => HyArray[T] is Collection: + + type Element = T + type Position = Int + + extension (self: HyArray[T]) + + // NOTE: Having to explicitly override means that primary declaration can't automatically + // specialize trait requirements. + override def isEmpty: Boolean = self.isEmpty + + override def count: Int = self.count + + def startPosition = 0 + + def endPosition = self.count + + def positionAfter(p: Int) = p + 1 + + def at(p: Int) = self.at(p) + +given [T: {Value, StringConvertible}] => HyArray[T] is StringConvertible: + extension (self: HyArray[T]) + override def description: String = + val contents = mutable.StringBuilder() + self.forEach: e => + contents ++= e.description + true + s"[${contents.mkString(", ")}]" diff --git a/tests/pos/hylolib/HyArrayTests.scala b/tests/pos/hylolib/HyArrayTests.scala new file mode 100644 index 000000000000..0de65603d0c7 --- /dev/null +++ b/tests/pos/hylolib/HyArrayTests.scala @@ -0,0 +1,17 @@ +import hylo.* +import hylo.given + +class HyArrayTests extends munit.FunSuite: + + test("reserveCapacity"): + var a = HyArray[Int]() + a = a.append(1) + a = a.append(2) + + a = a.reserveCapacity(10) + assert(a.capacity >= 10) + assertEquals(a.count, 2) + assertEquals(a.at(0), 1) + assertEquals(a.at(1), 2) + +end HyArrayTests diff --git a/tests/pos/hylolib/Integers.scala b/tests/pos/hylolib/Integers.scala new file mode 100644 index 000000000000..f7334ae40786 --- /dev/null +++ b/tests/pos/hylolib/Integers.scala @@ -0,0 +1,46 @@ +package hylo + +given Boolean is Value: + + extension (self: Boolean) + + def copy(): Boolean = + // Note: Scala's `Boolean` has value semantics already. + self + + def eq(other: Boolean): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(if self then 1 else 0) + +given Int is Value: + + extension (self: Int) + + def copy(): Int = + // Note: Scala's `Int` has value semantics already. + self + + def eq(other: Int): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(self) + +given Int is Comparable: + + extension (self: Int) + + def copy(): Int = + self + + def eq(other: Int): Boolean = + self == other + + def hashInto(hasher: Hasher): Hasher = + hasher.combine(self) + + def lt(other: Int): Boolean = self < other + +given Int is StringConvertible diff --git a/tests/pos/hylolib/IntegersTests.scala b/tests/pos/hylolib/IntegersTests.scala new file mode 100644 index 000000000000..74dedf30d83e --- /dev/null +++ b/tests/pos/hylolib/IntegersTests.scala @@ -0,0 +1,14 @@ +//> using options -language:experimental.modularity -source future +import hylo.* +import hylo.given + +class IntegersTests extends munit.FunSuite: + + test("Int.hashInto"): + val x = Hasher.hash(42) + val y = Hasher.hash(42) + assertEquals(x, y) + + val z = Hasher.hash(1337) + assertNotEquals(x, z) + diff --git a/tests/pos/hylolib/Range.scala b/tests/pos/hylolib/Range.scala new file mode 100644 index 000000000000..b0f50dd55c8c --- /dev/null +++ b/tests/pos/hylolib/Range.scala @@ -0,0 +1,37 @@ +package hylo + +/** A half-open interval from a lower bound up to, but not including, an uppor bound. */ +final class Range[Bound] private (val lowerBound: Bound, val upperBound: Bound) { + + /** Returns a textual description of `this`. */ + override def toString: String = + s"[${lowerBound}, ${upperBound})" + +} + +object Range { + + /** Creates a half-open interval [`lowerBound`, `upperBound`), using `isLessThanOrEqual` to ensure + * that the bounds are well-formed. + * + * @requires + * `lowerBound` is lesser than or equal to `upperBound`. + */ + def apply[Bound]( + lowerBound: Bound, + upperBound: Bound, + isLessThanOrEqual: (Bound, Bound) => Boolean + ) = + require(isLessThanOrEqual(lowerBound, upperBound)) + new Range(lowerBound, upperBound) + + /** Creates a half-open interval [`lowerBound`, `upperBound`). + * + * @requires + * `lowerBound` is lesser than or equal to `upperBound`. + */ + def apply[Bound: Comparable](lowerBound: Bound, upperBound: Bound) = + require(lowerBound `le` upperBound) + new Range(lowerBound, upperBound) + +} diff --git a/tests/pos/hylolib/Slice.scala b/tests/pos/hylolib/Slice.scala new file mode 100644 index 000000000000..d54f855b1041 --- /dev/null +++ b/tests/pos/hylolib/Slice.scala @@ -0,0 +1,63 @@ +package hylo + +/** A view into a collection. */ +final class Slice[Base: Collection]( + val base: Base, + val bounds: Range[Base.Position] +) { + + /** Returns `true` iff `this` is empty. */ + def isEmpty: Boolean = + bounds.lowerBound.eq(bounds.upperBound) + + def startPosition: Base.Position = + bounds.lowerBound + + def endPosition: Base.Position = + bounds.upperBound + + def positionAfter(p: Base.Position): Base.Position = + base.positionAfter(p) + + def at(p: Base.Position): Base.Element = + base.at(p) + +} + +given [C: Collection] => Slice[C] is Collection: + + type Element = C.Element + type Position = C.Position + + extension (self: Slice[C]) + + def startPosition = self.bounds.lowerBound.asInstanceOf[Position] + // This is actually unsafe. We have: + // self.bounds: Range(Slice[C].Base.Position) + // But the _value_ of Slice[C].Base is not necssarily this given, even + // though it is true that `type Slice[C].Base = C`. There might be multiple + // implementations of `Slice[C] is Collection` that define different `Position` + // types. So we cannot conclude that `Slice[C].Base.Position = this.Position`. + // To make this safe, we'd need some form of coherence, where we ensure that + // there is only one way to implement `Slice is Collection`. + // + // As an alternativem we can make Slice dependent on the original Collection + // _instance_ instead of the original Collection _type_. This design is + // realized by the Slice2 definitions. It works without casts. + + def endPosition = self.bounds.upperBound.asInstanceOf[Position] + + def positionAfter(p: Position) = self.base.positionAfter(p) + + def at(p: Position) = self.base.at(p) + +given [C: Collection] => C.Slice2 is Collection: + type Element = C.Element + type Position = C.Position + + extension (self: C.Slice2) + + def startPosition = self.bounds.lowerBound + def endPosition = self.bounds.upperBound + def positionAfter(p: Position) = self.base.positionAfter(p) + def at(p: Position) = self.base.at(p) diff --git a/tests/pos/hylolib/StringConvertible.scala b/tests/pos/hylolib/StringConvertible.scala new file mode 100644 index 000000000000..cf901d9a3313 --- /dev/null +++ b/tests/pos/hylolib/StringConvertible.scala @@ -0,0 +1,9 @@ +package hylo + +/** A type whose instances can be described by a character string. */ +trait StringConvertible: + type Self + + /** Returns a textual description of `self`. */ + extension (self: Self) + def description: String = self.toString diff --git a/tests/pos/hylolib/Test.scala b/tests/pos/hylolib/Test.scala new file mode 100644 index 000000000000..9e8d6181affd --- /dev/null +++ b/tests/pos/hylolib/Test.scala @@ -0,0 +1,16 @@ +//> using options -language:experimental.modularity -source future +import hylo.* +import hylo.given + +object munit: + open class FunSuite: + def test(name: String)(op: => Unit): Unit = op + def assertEquals[T](x: T, y: T) = assert(x == y) + def assertNotEquals[T](x: T, y: T) = assert(x != y) + +@main def Test = + CollectionTests() + AnyValueTests() + HyArrayTests() + IntegersTests() + println("done") diff --git a/tests/pos/i10693.scala b/tests/pos/i10693.scala new file mode 100644 index 000000000000..122984484658 --- /dev/null +++ b/tests/pos/i10693.scala @@ -0,0 +1,8 @@ +def test[A, B](a: A, b: B): A | B = a +val v0 = test("string", 1) +val v1 = test(1, "string") +val v2 = test(v0, v1) +val v3 = test(v1, v0) +val v4 = test(v2, v3) +val v5 = test(v3, v2) +val v6 = test(v4, v5) diff --git a/tests/pos/i10929-new-syntax.scala b/tests/pos/i10929-new-syntax.scala new file mode 100644 index 000000000000..11c5e9313d4c --- /dev/null +++ b/tests/pos/i10929-new-syntax.scala @@ -0,0 +1,22 @@ +//> using options -language:experimental.modularity -source future +trait TupleOf[+A]: + type Self + type Mapped[+A] <: Tuple + def map[B](x: Self)(f: A => B): Mapped[B] + +object TupleOf: + + given EmptyTuple is TupleOf[Nothing]: + type Mapped[+A] = EmptyTuple + def map[B](x: EmptyTuple)(f: Nothing => B): Mapped[B] = x + + given [A, Rest <: Tuple : TupleOf[A]] => A *: Rest is TupleOf[A]: + type Mapped[+A] = A *: Rest.Mapped[A] + def map[B](x: A *: Rest)(f: A => B): Mapped[B] = + (f(x.head) *: Rest.map(x.tail)(f)) + +def foo[T: TupleOf[Int]](xs: T): T.Mapped[Int] = T.map(xs)(_ + 1) + +@main def test = + foo(EmptyTuple): EmptyTuple // ok + foo(1 *: EmptyTuple): Int *: EmptyTuple // now also ok diff --git a/tests/pos/i10929.scala b/tests/pos/i10929.scala new file mode 100644 index 000000000000..e916e4547e59 --- /dev/null +++ b/tests/pos/i10929.scala @@ -0,0 +1,21 @@ +//> using options -language:experimental.modularity -source future +infix abstract class TupleOf[T, +A]: + type Mapped[+A] <: Tuple + def map[B](x: T)(f: A => B): Mapped[B] + +object TupleOf: + + given TupleOf[EmptyTuple, Nothing] with + type Mapped[+A] = EmptyTuple + def map[B](x: EmptyTuple)(f: Nothing => B): Mapped[B] = x + + given [A, Rest <: Tuple](using tracked val tup: Rest TupleOf A): TupleOf[A *: Rest, A] with + type Mapped[+A] = A *: tup.Mapped[A] + def map[B](x: A *: Rest)(f: A => B): Mapped[B] = + (f(x.head) *: tup.map(x.tail)(f)) + +def foo[T](xs: T)(using tup: T TupleOf Int): tup.Mapped[Int] = tup.map(xs)(_ + 1) + +@main def test = + foo(EmptyTuple): EmptyTuple // ok + foo(1 *: EmptyTuple): Int *: EmptyTuple // now also ok \ No newline at end of file diff --git a/tests/pos/i11022.scala b/tests/pos/i11022.scala index aa211426387d..6cccea0ac77c 100644 --- a/tests/pos/i11022.scala +++ b/tests/pos/i11022.scala @@ -1,3 +1,5 @@ //> using options -Werror -deprecation @deprecated("no CaseClass") case class CaseClass(rgb: Int) + +case class K(@deprecated("don't use k, ok?","0.1") k: Int) diff --git a/tests/neg/i12591/Inner.scala b/tests/pos/i12591/Inner.scala similarity index 64% rename from tests/neg/i12591/Inner.scala rename to tests/pos/i12591/Inner.scala index aae9bd5b9234..2f8018c4d824 100644 --- a/tests/neg/i12591/Inner.scala +++ b/tests/pos/i12591/Inner.scala @@ -9,5 +9,6 @@ object Foo: import Foo.TC //Adding import Foo.Bar resolves the issue -val badSummon = summon[TC[Bar]] // error here +val badSummon = summon[TC[Bar]] + // was an ambiguous error, now OK, since the two references are the same diff --git a/tests/neg/i12591/Outer.scala b/tests/pos/i12591/Outer.scala similarity index 100% rename from tests/neg/i12591/Outer.scala rename to tests/pos/i12591/Outer.scala diff --git a/tests/pos/i13044.scala b/tests/pos/i13044.scala index 4c9b8b914062..36299d9e8366 100644 --- a/tests/pos/i13044.scala +++ b/tests/pos/i13044.scala @@ -1,4 +1,4 @@ -//> using options -Xmax-inlines:33 +//> using options -Xmax-inlines:35 import scala.deriving.Mirror import scala.compiletime._ diff --git a/tests/pos/i13091.scala b/tests/pos/i13091.scala deleted file mode 100644 index fa255cd6c08f..000000000000 --- a/tests/pos/i13091.scala +++ /dev/null @@ -1,3 +0,0 @@ -import annotation.experimental -@experimental class Foo -val foo = new Foo diff --git a/tests/pos/i13580.scala b/tests/pos/i13580.scala new file mode 100644 index 000000000000..c3c491a19dbe --- /dev/null +++ b/tests/pos/i13580.scala @@ -0,0 +1,13 @@ +//> using options -language:experimental.modularity -source future +trait IntWidth: + type Out +given IntWidth: + type Out = 155 + +trait IntCandidate: + type Out +given (using tracked val w: IntWidth) => IntCandidate: + type Out = w.Out + +val x = summon[IntCandidate] +val xx = summon[x.Out =:= 155] diff --git a/tests/pos/i13848.scala b/tests/pos/i13848.scala index 266f3edcf7ae..36fac091023a 100644 --- a/tests/pos/i13848.scala +++ b/tests/pos/i13848.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import annotation.experimental diff --git a/tests/pos/i15133a.scala b/tests/pos/i15133a.scala index 1aff3a5c1cfc..002eec13c05e 100644 --- a/tests/pos/i15133a.scala +++ b/tests/pos/i15133a.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/pos/i15133b.scala b/tests/pos/i15133b.scala index 4c235d37c698..5b1fd62981ba 100644 --- a/tests/pos/i15133b.scala +++ b/tests/pos/i15133b.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/pos/i15183/test_2.scala b/tests/pos/i15183/test_2.scala index 2069d5637734..eeb3848449be 100644 --- a/tests/pos/i15183/test_2.scala +++ b/tests/pos/i15183/test_2.scala @@ -1,4 +1,8 @@ // Fails in each cases below +import Decoder.{derived as _, given} +// NOTE Decoder.derived is already in the implicit scope +// but the others require an import as they depend on match type reduction + enum Env derives Decoder: case Local,Sit,Prod diff --git a/tests/pos/i15264.scala b/tests/pos/i15264.scala index 05992df61b94..18ca92df6cb1 100644 --- a/tests/pos/i15264.scala +++ b/tests/pos/i15264.scala @@ -1,3 +1,4 @@ +import language.`3.7` object priority: // lower number = higher priority class Prio0 extends Prio1 @@ -30,6 +31,7 @@ object repro: // if you import these don't import from 'context' above object qcontext: // base defs, like what you would get from cats + given ga: A[Int] = new B[Int] // added so that we don't get an ambiguity in test2 given gb: B[Int] = new B[Int] given gc: C[Int] = new C[Int] @@ -45,9 +47,9 @@ object test1: // these will work val a = summon[A[Int]] + object test2: import repro.* import repro.qcontext.given - // this one will fail as ambiguous - prios aren't having an effect - val a = summon[A[Q[Int]]] \ No newline at end of file + val a = summon[A[Q[Int]]] diff --git a/tests/pos/i16091.scala b/tests/pos/i16091.scala index 349e16e6d7e6..37afe19ab065 100644 --- a/tests/pos/i16091.scala +++ b/tests/pos/i16091.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + import scala.annotation.experimental diff --git a/tests/pos/i16208.scala b/tests/pos/i16208.scala new file mode 100644 index 000000000000..18455f2e698c --- /dev/null +++ b/tests/pos/i16208.scala @@ -0,0 +1,12 @@ + +class Ann(x: Any) extends annotation.Annotation +object Message: + implicit def toNoExplanation(str: String): Message @Ann(str) = ??? +class Message + +object report: + def error(x: Message): Unit = ??? + +def test = + report.error("a") // works + report.error("a".stripMargin) // was an error diff --git a/tests/pos/i16777.scala b/tests/pos/i16777.scala index 302ace3ea9aa..4b7399ac053c 100644 --- a/tests/pos/i16777.scala +++ b/tests/pos/i16777.scala @@ -1,4 +1,4 @@ -//> using options -Ykind-projector:underscores +//> using options -Xkind-projector:underscores sealed abstract class Free[+S[_, _], +E, +A] { @inline final def flatMap[S1[e, a] >: S[e, a], B, E1 >: E](fun: A => Free[S1, E1, B]): Free[S1, E1, B] = Free.FlatMapped[S1, E, E1, A, B](this, fun) diff --git a/tests/pos/i17245.scala b/tests/pos/i17245.scala index 3b5b3a74108d..8609a8293670 100644 --- a/tests/pos/i17245.scala +++ b/tests/pos/i17245.scala @@ -14,7 +14,7 @@ type OnChannel = Channel => Any val case1: OnChannel = Mockito.mock[OnChannel] val case2: OnChannel = Mockito.mock val case3 = Mockito.mock[OnChannel] - val case4: OnChannel = Mockito.mock[OnChannel](summon[ClassTag[OnChannel]]) + val case4: OnChannel = Mockito.mock[OnChannel](using summon[ClassTag[OnChannel]]) // not a regressive case, but an added improvement with the fix for the above val case5: Channel => Any = Mockito.mock[Channel => Any] diff --git a/tests/pos/i17314.scala b/tests/pos/i17314.scala index 2d0c409ced10..8ece4a3bd7ac 100644 --- a/tests/pos/i17314.scala +++ b/tests/pos/i17314.scala @@ -13,8 +13,9 @@ object circelike { inline final def derived[A](using conf: Configuration)(using inline mirror: Mirror.Of[A] ): ConfiguredCodec[A] = - new ConfiguredCodec[A]: + class InlinedConfiguredCodec extends ConfiguredCodec[A]: val codec = summonInline[Codec[URI]] // simplification + new InlinedConfiguredCodec } object foo { diff --git a/tests/pos-special/fatal-warnings/i17735.scala b/tests/pos/i17735.scala similarity index 90% rename from tests/pos-special/fatal-warnings/i17735.scala rename to tests/pos/i17735.scala index f171d4a028f7..17fb31010a8a 100644 --- a/tests/pos-special/fatal-warnings/i17735.scala +++ b/tests/pos/i17735.scala @@ -1,4 +1,4 @@ -//> using options -Wvalue-discard +//> using options -Xfatal-warnings -Wvalue-discard import scala.collection.mutable import scala.annotation.nowarn @@ -21,4 +21,4 @@ object Foo: // here @nowarn is effective without -Wfatal-warnings (i.e. no warning) // But with -Wfatal-warnings we get an error messageBuilder.append("\n").append(s): @nowarn("msg=discarded non-Unit value*") - messageBuilder.result() \ No newline at end of file + messageBuilder.result() diff --git a/tests/pos-special/fatal-warnings/i17735a.scala b/tests/pos/i17735a.scala similarity index 90% rename from tests/pos-special/fatal-warnings/i17735a.scala rename to tests/pos/i17735a.scala index fe0ea7e6bc45..b4d91f8d25fc 100644 --- a/tests/pos-special/fatal-warnings/i17735a.scala +++ b/tests/pos/i17735a.scala @@ -1,4 +1,4 @@ -//> using options -Wvalue-discard -Wconf:msg=non-Unit:s +//> using options -Xfatal-warnings -Wvalue-discard -Wconf:msg=non-Unit:s import scala.collection.mutable import scala.annotation.nowarn diff --git a/tests/pos-special/fatal-warnings/i17741.scala b/tests/pos/i17741.scala similarity index 90% rename from tests/pos-special/fatal-warnings/i17741.scala rename to tests/pos/i17741.scala index 7171aab83e4b..aa32e5a573d4 100644 --- a/tests/pos-special/fatal-warnings/i17741.scala +++ b/tests/pos/i17741.scala @@ -1,4 +1,4 @@ -//> using options -Wnonunit-statement +//> using options -Xfatal-warnings -Wnonunit-statement class Node() class Elem( @@ -29,4 +29,4 @@ object Main { ) } }: @annotation.nowarn() -} \ No newline at end of file +} diff --git a/tests/pos/i18151a.scala b/tests/pos/i18151a.scala new file mode 100644 index 000000000000..6be2c5c23a30 --- /dev/null +++ b/tests/pos/i18151a.scala @@ -0,0 +1,10 @@ +case class El[A](attr: String, child: String) + +transparent inline def inlineTest(): String = + inline { + val el: El[Any] = El("1", "2") + El[Any](el.attr, el.child) + } match + case El(attr, child) => attr + child + +def test: Unit = inlineTest() diff --git a/tests/pos/i18151b.scala b/tests/pos/i18151b.scala new file mode 100644 index 000000000000..01d2aaee972a --- /dev/null +++ b/tests/pos/i18151b.scala @@ -0,0 +1,10 @@ +case class El[A](val attr: String, val child: String) + +transparent inline def tmplStr(inline t: El[Any]): String = + inline t match + case El(attr, child) => attr + child + +def test: Unit = tmplStr { + val el = El("1", "2") + El[Any](el.attr, null) +} diff --git a/tests/pos/i18151c.scala b/tests/pos/i18151c.scala new file mode 100644 index 000000000000..a46ec9dd927c --- /dev/null +++ b/tests/pos/i18151c.scala @@ -0,0 +1,39 @@ +import scala.compiletime.* +import scala.compiletime.ops.any.ToString + +trait Attr +case object EmptyAttr extends Attr +transparent inline def attrStr(inline a: Attr): String = inline a match + case EmptyAttr => "" +transparent inline def attrStrHelper(inline a: Attr): String = inline a match + case EmptyAttr => "" +trait TmplNode +case class El[T <: String & Singleton, A <: Attr, C <: Tmpl](val tag: T, val attr: A, val child: C) + extends TmplNode +case class Sib[L <: Tmpl, R <: Tmpl](left: L, right: R) extends TmplNode +type TmplSingleton = String | Char | Int | Long | Float | Double | Boolean +type Tmpl = TmplNode | Unit | (TmplSingleton & Singleton) +transparent inline def tmplStr(inline t: Tmpl): String = inline t match + case El(tag, attr, child) => inline attrStr(attr) match + case "" => "<" + tag + ">" + tmplStr(child) + case x => "<" + tag + " " + x + ">" + tmplStr(child) + case Sib(left, right) => inline tmplStr(right) match + case "" => tmplStr(left) + case right => tmplStrHelper(left) + right + case () => "" + case s: (t & TmplSingleton) => constValue[ToString[t]] +transparent inline def tmplStrHelper(inline t: Tmpl): String = inline t match + case El(tag, attr, child) => inline (tmplStr(child), attrStr(attr)) match + case ("", "") => "<" + tag + "/>" + case (child, "") => "<" + tag + ">" + child + "" + case ("", attr) => "<" + tag + " " + attr + "/>" + case (child, attr) => "<" + tag + " " + attr + ">" + child + "" + case Sib(left, right) => tmplStrHelper(left) + tmplStrHelper(right) + case () => "" + case s: (t & TmplSingleton) => constValue[ToString[t]] +transparent inline def el(tag: String & Singleton): El[tag.type, EmptyAttr.type, Unit] = + El(tag, EmptyAttr, ()) +extension [T <: String & Singleton, A <: Attr, C <: Tmpl](el: El[T, A, C]) + transparent inline def >>[C2 <: Tmpl](child: C2) = El(el.tag, el.attr, el.child ++ child) + +extension [L <: Tmpl](left: L) transparent inline def ++[R <: Tmpl](right: R) = Sib(left, right) diff --git a/tests/pos/i18361.scala b/tests/pos/i18351.scala similarity index 100% rename from tests/pos/i18361.scala rename to tests/pos/i18351.scala diff --git a/tests/pos/i18488.scala b/tests/pos/i18488.scala deleted file mode 100644 index c225a2c20711..000000000000 --- a/tests/pos/i18488.scala +++ /dev/null @@ -1,15 +0,0 @@ -trait AbstractTable[T] - -trait Query[E, U] - -class TableQuery[E <: AbstractTable[?]] extends Query[E, Extract[E]] - -type Extract[E] = E match - case AbstractTable[t] => t - -trait BaseCrudRepository[E[T[_]]]: - - type EntityTable <: AbstractTable[E[Option]] - - def filterById: Query[EntityTable, Extract[EntityTable]] = - new TableQuery[EntityTable] diff --git a/tests/pos/i18555.scala b/tests/pos/i18555.scala new file mode 100644 index 000000000000..84198409370e --- /dev/null +++ b/tests/pos/i18555.scala @@ -0,0 +1,14 @@ +trait GenericCollectionWithCommands { + self: PackSupport => + + def bar(foo: Int = 1): Any = ??? + def bar(writer: GenericCollectionWithCommands.this.pack.Writer[Any]): Any = ??? +} + +trait PackSupport { + val pack: SerializationPack +} + +trait SerializationPack { + type Writer[A] +} \ No newline at end of file diff --git a/tests/pos/i19434.scala b/tests/pos/i19434.scala new file mode 100644 index 000000000000..e8595fa252d0 --- /dev/null +++ b/tests/pos/i19434.scala @@ -0,0 +1,11 @@ + +object Test: + + object Named: + opaque type Named[name <: String & Singleton, A] >: A = A + + type DropNames[T <: Tuple] = T match + case Named.Named[_, x] *: xs => x *: DropNames[xs] + case _ => T + + def f[T <: Tuple]: DropNames[T] = ??? \ No newline at end of file diff --git a/tests/pos/i19706.scala b/tests/pos/i19706.scala new file mode 100644 index 000000000000..ba66b3baf5c4 --- /dev/null +++ b/tests/pos/i19706.scala @@ -0,0 +1,29 @@ + +import scala.compiletime.ops.string.{Length, Matches, Substring} + +def emptyContext(): Unit = + summon[Decoded["Tuple(0, EmptyTuple)"] =:= 0 *: EmptyTuple] + +type Decoded[S <: String] = Matches[S, "Tuple(.+, .+)"] match + case true => Parsed[Substring[S, 6, 19], 0, ""] match + case (h, t) => Decoded["0"] *: EmptyTuple + case false => 0 + +type Parsed[S <: String, I <: Int, A <: String] <: (String, String) = Matches[S, "other"] match + case true => I match + case 1 => ("", "") + case _ => Parsed[Substring[S, 1, Length[S]], I, ""] + case false => ("0", "EmptyTuple") + + +object Minimization: + + type Cond[B <: Boolean] <: Tuple2[String, String] = B match + case true => ("", "") + case false => ("a", "b") + + type Decoded[B <: Boolean] = Cond[B] match + case (h1, _) => Int + + val _: Decoded[false] = 1 + diff --git a/tests/pos/i19710.scala b/tests/pos/i19710.scala deleted file mode 100644 index 03fd1e2d80b3..000000000000 --- a/tests/pos/i19710.scala +++ /dev/null @@ -1,11 +0,0 @@ -import scala.util.NotGiven - -type HasName1 = [n] =>> [x] =>> x match { - case n => true - case _ => false - } -@main def Test = { - summon[HasName1["foo"]["foo"] =:= true] - summon[NotGiven[HasName1["foo"]["bar"] =:= true]] - summon[Tuple.Filter[(1, "foo", 2, "bar"), HasName1["foo"]] =:= Tuple1["foo"]] // error -} diff --git a/tests/pos/i19715.scala b/tests/pos/i19715.scala index 91aeda5c1698..be5471ffa9b3 100644 --- a/tests/pos/i19715.scala +++ b/tests/pos/i19715.scala @@ -6,7 +6,8 @@ class NT(t: Tup): object NT: extension (x: NT) def app(n: Int): Boolean = true - given Conversion[NT, Tup] = _.toTup + given c1: Conversion[NT, Tup] = _.toTup + implicit def c2(t: NT): Tup = c1(t) def test = val nt = new NT(Tup()) diff --git a/tests/pos/i19749.scala b/tests/pos/i19749.scala new file mode 100644 index 000000000000..367c5f61bdce --- /dev/null +++ b/tests/pos/i19749.scala @@ -0,0 +1,15 @@ +import scala.deriving.Mirror + +case class A(x: Int, y: String) + +trait SomeTrait[T] + +object SomeTrait: + given [T]: SomeTrait[T] with {} + +def f1[T](using p: Mirror.ProductOf[T]): Tuple.Elem[p.MirroredElemTypes, 0] = ??? + +def f2[T, R](f: T => R)(using SomeTrait[R]) = ??? + +// Scala3.3 is fine, 3.4 has compilation errors, p MirroredElemTypes type is missing and has been changed to Nothing +val x = f2(_ => f1[A]) diff --git a/tests/pos/i19789.scala b/tests/pos/i19789.scala new file mode 100644 index 000000000000..24c3bdb1df8f --- /dev/null +++ b/tests/pos/i19789.scala @@ -0,0 +1,5 @@ +type Kinded[F[_]] = F[Any] | F[Nothing] + +def values[F[_]]: Vector[Kinded[F]] = ??? + +def mapValues[F[_], T](f: Kinded[F] => T): Vector[T] = values[F].map { case x => f(x) } diff --git a/tests/pos/i19806/Module.scala b/tests/pos/i19806/Module.scala index d0142fc24682..328dcd213d2e 100644 --- a/tests/pos/i19806/Module.scala +++ b/tests/pos/i19806/Module.scala @@ -1,4 +1,4 @@ -//> using options -Yjava-tasty -Ytest-pickler-check +//> using options -Xjava-tasty -Ytest-pickler-check package p diff --git a/tests/pos/i19809.scala b/tests/pos/i19809.scala new file mode 100644 index 000000000000..4c1b55d01eeb --- /dev/null +++ b/tests/pos/i19809.scala @@ -0,0 +1,7 @@ +type A = Any { var x: Int } + +val f: Any { var i: Int } = new AnyRef { var i: Int = 0 } + +def Test = + summon[Any { def x: Int; def x_=(x: Int): Unit } <:< Any { var x: Int }] + summon[Any { var x: Int } <:< Any { def x: Int; def x_=(x: Int): Unit }] diff --git a/tests/pos/i19821.scala b/tests/pos/i19821.scala new file mode 100644 index 000000000000..0dcad965a38b --- /dev/null +++ b/tests/pos/i19821.scala @@ -0,0 +1,26 @@ + +object Test: + + trait T: + type S + type F = T.F[S] + + def foo: F + def bar: T.F[S] + + object T: + type F[X] = X match + case String => Option[Int] + + type G[X] = X match + case Option[x] => Int + + val t: T {type S = String} = ??? + + val b = t.bar + val m1: T.G[b.type] = ??? + val _: Int = m1 // Ok + + val f = t.foo + val m: T.G[f.type] = ??? + val _: Int = m // Error before changes diff --git a/tests/pos/i19892.scala b/tests/pos/i19892.scala new file mode 100644 index 000000000000..6f3e0bd6d06c --- /dev/null +++ b/tests/pos/i19892.scala @@ -0,0 +1,26 @@ +abstract class ZPartialServerEndpoint[R, A, B, I, E, O, -C] + extends EndpointOps[A, I, E, O, C]{ + override type ThisType[-_R] = ZPartialServerEndpoint[R, A, B, I, E, O, _R] + override type EndpointType[_A, _I, _E, _O, -_R] =ZPartialServerEndpoint[R, _A, B, _I, _E, _O, _R] +} + +trait EndpointOps[A, I, E, O, -R] { + type EndpointType[_A, _I, _E, _O, -_R] + type ThisType[-_R] + def out[T]: EndpointType[A, I, E, T, R] + def description(d: String): ThisType[R] +} + +object Test { + def basicEndpoint[R](): ZPartialServerEndpoint[R, Any, Any, Unit, Any, Unit, Any] = ??? + + // commonts next to `.out[Any]` contain information about compilation time when chaining up to N `out` functions + val case1 = + basicEndpoint() // 1.5s + .out[Any] // 1.6s + .out[Any] // 1.7s + .out[Any] // 2s + .out[Any] // 4s + .out[Any] // 33s + .out[Any] // aborted after 5 min +} \ No newline at end of file diff --git a/tests/pos/i19929.scala b/tests/pos/i19929.scala new file mode 100644 index 000000000000..2e1c691af8f5 --- /dev/null +++ b/tests/pos/i19929.scala @@ -0,0 +1,5 @@ +trait A: + private type M + +def foo(a: A{type M = Int}) = + val _: a.M = ??? // was crash diff --git a/tests/pos/i19950.scala b/tests/pos/i19950.scala new file mode 100644 index 000000000000..349140f43ff5 --- /dev/null +++ b/tests/pos/i19950.scala @@ -0,0 +1,10 @@ + +trait Apply[F[_]]: + extension [T <: NonEmptyTuple](tuple: T)(using toMap: Tuple.IsMappedBy[F][T]) + def mapN[B](f: Tuple.InverseMap[T, F] => B): F[B] = ??? + +given Apply[Option] = ??? +given Apply[List] = ??? +given Apply[util.Try] = ??? + +@main def Repro = (Option(1), Option(2), Option(3)).mapN(_ + _ + _) \ No newline at end of file diff --git a/tests/pos/i19955a.scala b/tests/pos/i19955a.scala new file mode 100644 index 000000000000..b8ea95d41d24 --- /dev/null +++ b/tests/pos/i19955a.scala @@ -0,0 +1,27 @@ + +trait Summon[R, T <: R]: + type Out +object Summon: + given [R, T <: R]: Summon[R, T] with + type Out = R + +trait DFTypeAny +trait DFBits[W <: Int] extends DFTypeAny +class DFVal[+T <: DFTypeAny] +type DFValAny = DFVal[DFTypeAny] +type DFValOf[+T <: DFTypeAny] = DFVal[T] +trait Candidate[R]: + type OutW <: Int +object Candidate: + type Aux[R, O <: Int] = Candidate[R] { type OutW = O } + given [W <: Int, R <: DFValOf[DFBits[W]]]: Candidate[R] with + type OutW = W + +extension [L](lhs: L) def foo(using es: Summon[L, lhs.type]): Unit = ??? +extension [L <: DFValAny](lhs: L)(using icL: Candidate[L]) def baz: DFValOf[DFBits[icL.OutW]] = ??? +extension [L <: DFValAny, W <: Int](lhs: L)(using icL: Candidate.Aux[L, W]) + def bazAux: DFValOf[DFBits[W]] = ??? + +val x = new DFVal[DFBits[4]] +val works = x.bazAux.foo +val fails = x.baz.foo \ No newline at end of file diff --git a/tests/pos/i19955b.scala b/tests/pos/i19955b.scala new file mode 100644 index 000000000000..99e101b312b1 --- /dev/null +++ b/tests/pos/i19955b.scala @@ -0,0 +1,17 @@ + +trait Wrap[W] + +trait IsWrapOfInt[R]: + type Out <: Int +given [W <: Int, R <: Wrap[W]]: IsWrapOfInt[R] with + type Out = Int + +trait IsInt[U <: Int] +given [U <: Int]: IsInt[U] = ??? + +extension [L](lhs: L) def get(using ev: IsWrapOfInt[L]): ev.Out = ??? +extension (lhs: Int) def isInt(using IsInt[lhs.type]): Unit = ??? + +val x: Wrap[Int] = ??? +val works = (x.get: Int).isInt +val fails = x.get.isInt diff --git a/tests/pos/i20053b.scala b/tests/pos/i20053b.scala new file mode 100644 index 000000000000..25180d56bbae --- /dev/null +++ b/tests/pos/i20053b.scala @@ -0,0 +1,22 @@ + +trait Sub[R, T >: R] +given [R, T >: R]: Sub[R, T] with {} + +trait Candidate[-R]: + type OutP +given [P]: Candidate[Option[P]] with + type OutP = P + +extension [L](lhs: L) + def ^^^[P](rhs: Option[P]) + (using es: Sub[lhs.type, Any]) + (using c: Candidate[L]) + (using check: c.type <:< Any): Option[c.OutP] = ??? + +val x: Option[Boolean] = ??? + +val z1 = x ^^^ x // Ok +val z2 = z1 ^^^ x // Ok +val zz = ^^^[Option[Boolean]](x ^^^ x)(x) // Ok + +val zzz = x ^^^ x ^^^ x // Error before changes diff --git a/tests/pos/i20078/AbstractShapeBuilder.java b/tests/pos/i20078/AbstractShapeBuilder.java new file mode 100644 index 000000000000..5ca57c4f70a6 --- /dev/null +++ b/tests/pos/i20078/AbstractShapeBuilder.java @@ -0,0 +1,3 @@ +public abstract class AbstractShapeBuilder, S extends Shape> { + abstract public B addTrait(Trait trait); +} \ No newline at end of file diff --git a/tests/pos/i20078/Shape.java b/tests/pos/i20078/Shape.java new file mode 100644 index 000000000000..e4b4540362bc --- /dev/null +++ b/tests/pos/i20078/Shape.java @@ -0,0 +1 @@ +public interface Shape {} diff --git a/tests/pos/i20078/Test.scala b/tests/pos/i20078/Test.scala new file mode 100644 index 000000000000..b529af7e40c3 --- /dev/null +++ b/tests/pos/i20078/Test.scala @@ -0,0 +1,3 @@ +@main def Test = + val builder: AbstractShapeBuilder[? <: AbstractShapeBuilder[?, ?], ? <: Shape] = ??? + List.empty[Trait].foreach(builder.addTrait(_)) \ No newline at end of file diff --git a/tests/pos/i20078/Trait.java b/tests/pos/i20078/Trait.java new file mode 100644 index 000000000000..8082056c1e26 --- /dev/null +++ b/tests/pos/i20078/Trait.java @@ -0,0 +1 @@ +public interface Trait {} diff --git a/tests/pos/i20080.scala b/tests/pos/i20080.scala new file mode 100644 index 000000000000..dbf6843fcbc4 --- /dev/null +++ b/tests/pos/i20080.scala @@ -0,0 +1,32 @@ + +trait Zippable[-A, -B]: + type Out + def zip(left: A, right: B): Out + +object Zippable extends ZippableLowPrio: + given append[A <: Tuple, B]: (Zippable[A, B] { type Out = Tuple.Append[A, B] }) = + (left, right) => left :* right + +trait ZippableLowPrio: + given pair[A, B]: (Zippable[A, B] { type Out = (A, B) }) = + (left, right) => (left, right) + + +object Minimization: + + trait Fun1: + type Out + def apply(x: Any): Out + + type M[X] = X match + case String => X + + def test[A] = + + val _: Fun1 { type Out = M[A] } = new Fun1: + type Out = M[A] + def apply(x: Any): Out = ??? + + val _: Fun1 { type Out = M[A] } = x => ??? + + val _: Fun1 { type Out = A match {case String => A} } = x => ??? diff --git a/tests/pos/i20107.scala b/tests/pos/i20107.scala new file mode 100644 index 000000000000..80ce350cf29d --- /dev/null +++ b/tests/pos/i20107.scala @@ -0,0 +1,6 @@ +object foo: + transparent inline def unapply[F](e: F): Option[F] = Some(e.asInstanceOf[F]) + +class A: + def test(x: Int) = x match + case foo(e) => e diff --git a/tests/pos/i20135.scala b/tests/pos/i20135.scala new file mode 100644 index 000000000000..6143d642fbbb --- /dev/null +++ b/tests/pos/i20135.scala @@ -0,0 +1,11 @@ +import language.experimental.captureChecking + +class Network + +class Page(using nw: Network^): + def render(client: Page^{nw} ?-> Unit) = client(using this) + +def main(net: Network^) = + var page = Page(using net) + page.render(()) + diff --git a/tests/pos/i20136a.scala b/tests/pos/i20136a.scala new file mode 100644 index 000000000000..5378119f14d2 --- /dev/null +++ b/tests/pos/i20136a.scala @@ -0,0 +1,14 @@ + +trait Expr: + type Value +object Expr: + type Of[V] = Expr { type Value = V } + type ExtractValue[E <: Expr] = E match + case Expr.Of[v] => v + +trait TC[E <: Expr]: + type Elem = Expr.ExtractValue[E] +class BIExpr extends Expr: + type Value = BigInt +class Foo extends TC[BIExpr]: + val v: Elem = 0 diff --git a/tests/pos/i20136b/A_1.scala b/tests/pos/i20136b/A_1.scala new file mode 100644 index 000000000000..7c8dc3ebbf52 --- /dev/null +++ b/tests/pos/i20136b/A_1.scala @@ -0,0 +1,8 @@ +package a + +trait Expr: + type Value +object Expr: + type Of[V] = Expr { type Value = V } + type ExtractValue[E <: Expr] = E match + case Expr.Of[v] => v diff --git a/tests/pos/i20136b/B_2.scala b/tests/pos/i20136b/B_2.scala new file mode 100644 index 000000000000..54a3da158f89 --- /dev/null +++ b/tests/pos/i20136b/B_2.scala @@ -0,0 +1,8 @@ +package a + +trait TC[E <: Expr]: + type Elem = Expr.ExtractValue[E] +class BIExpr extends Expr: + type Value = BigInt +class Foo extends TC[BIExpr]: + val v: Elem = 0 diff --git a/tests/pos/i20154.scala b/tests/pos/i20154.scala new file mode 100644 index 000000000000..17dc41be7011 --- /dev/null +++ b/tests/pos/i20154.scala @@ -0,0 +1,15 @@ +sealed abstract class Kyo[+T, -S] +opaque type <[+T, -S] >: T = T | Kyo[T, S] + +abstract class Effect[+E]: + type Command[_] + +case class Recurse[Command[_], Result[_], E <: Effect[E], T, S, S2]( + h: ResultHandler[Command, Result, E, S], + v: T < (E & S & S2) +) + +abstract class ResultHandler[Command[_], Result[_], E <: Effect[E], S]: + opaque type Handle[T, S2] >: (Result[T] < (S & S2)) = Result[T] < (S & S2) | Recurse[Command, Result, E, T, S, S2] + + def handle[T, S2](h: ResultHandler[Command, Result, E, S], v: T < (E & S & S2)): Handle[T, S2] = Recurse(h, v) diff --git a/tests/pos/i20176.scala b/tests/pos/i20176.scala new file mode 100644 index 000000000000..df0c6cc1e8a7 --- /dev/null +++ b/tests/pos/i20176.scala @@ -0,0 +1,12 @@ +type Accumulator[A] + +object Accumulator { + + val usage = + use[Int]: + "asd" + + inline def use[A](using DummyImplicit): [B] => Any => Any = ??? + + inline def use[A]: [B] => Any => Any = ??? +} diff --git a/tests/pos/i20184.scala b/tests/pos/i20184.scala new file mode 100644 index 000000000000..2c9f6ed62be0 --- /dev/null +++ b/tests/pos/i20184.scala @@ -0,0 +1,12 @@ +object Outer: + def Test = + object Inner: + var x: Int = 2 + class Rgb(): + def f = x + + type Id[X] = X + type TRgb = Id[Inner.Rgb] + + val ok = new Inner.Rgb() + val crash = new Id[Inner.Rgb] \ No newline at end of file diff --git a/tests/pos/i20187/A_1.scala b/tests/pos/i20187/A_1.scala new file mode 100644 index 000000000000..32dbae995219 --- /dev/null +++ b/tests/pos/i20187/A_1.scala @@ -0,0 +1,19 @@ +import scala.deriving.Mirror + +enum E: + case Foo1() + case Foo2() + +class Outer: + case class Inner() +val o = new Outer + +type F = E.Foo1 +type G = Tuple.Head[E.Foo1 *: E.Foo2 *: EmptyTuple] +type H = Tuple.Head[o.Inner *: EmptyTuple] +type I = Tuple.Last[E *: EmptyTuple] + +def local = + case class Bar() + type B = Tuple.Head[Bar *: EmptyTuple] + summon[Mirror.Of[B]] diff --git a/tests/pos/i20187/B_2.scala b/tests/pos/i20187/B_2.scala new file mode 100644 index 000000000000..99cfc7ba9b91 --- /dev/null +++ b/tests/pos/i20187/B_2.scala @@ -0,0 +1,7 @@ +import scala.deriving.Mirror + +def Test = + summon[Mirror.Of[F]] // ok + summon[Mirror.Of[G]] // was crash + summon[Mirror.Of[H]] // was crash + summon[Mirror.Of[I]] // was crash diff --git a/tests/pos/i20206.scala b/tests/pos/i20206.scala new file mode 100644 index 000000000000..07ef3dc0ba73 --- /dev/null +++ b/tests/pos/i20206.scala @@ -0,0 +1,15 @@ +//> using options -experimental + +import language.experimental.erasedDefinitions + +erased trait A +trait B + +def foo1: A ?=> B ?=> Nothing = ??? +def foo2: (A, B) ?=> Nothing = ??? +def foo3: (B, A) ?=> Nothing = ??? + +def bar: (A, B) ?=> Nothing = + foo1 + foo2 + foo3 diff --git a/tests/pos/i20237.scala b/tests/pos/i20237.scala new file mode 100644 index 000000000000..da3e902b78b4 --- /dev/null +++ b/tests/pos/i20237.scala @@ -0,0 +1,15 @@ +import language.experimental.captureChecking +import scala.annotation.capability + +@capability class Cap: + def use[T](body: Cap ?=> T) = body(using this) + +class Box[T](body: Cap ?=> T): + inline def open(using cap: Cap) = cap.use(body) + +object Box: + def make[T](body: Cap ?=> T)(using Cap): Box[T]^{body} = Box(body) + +def main = + given Cap = new Cap + val box = Box.make(1).open \ No newline at end of file diff --git a/tests/pos/i20300.scala b/tests/pos/i20300.scala new file mode 100644 index 000000000000..721b79940ba1 --- /dev/null +++ b/tests/pos/i20300.scala @@ -0,0 +1,8 @@ +trait T: + + def v() = () + + trait F: + def f(): Unit = + inline def op(): Unit = v() + op() \ No newline at end of file diff --git a/tests/pos/i20344.scala b/tests/pos/i20344.scala new file mode 100644 index 000000000000..d3b2a060d6e2 --- /dev/null +++ b/tests/pos/i20344.scala @@ -0,0 +1,28 @@ +trait Monad[F[_]] extends Invariant[F] + +trait Invariant[F[_]] +object Invariant: + implicit def catsInstancesForList: Monad[List] = ??? + implicit def catsInstancesForVector: Monad[Vector] = ??? + +trait Shrink[T] +object Shrink extends ShrinkLowPriorityImplicits: + trait Buildable[T,C] + implicit def shrinkContainer[C[_],T](implicit v: C[T] => Traversable[T], s: Shrink[T], b: Buildable[T,C[T]]): Shrink[C[T]] = ??? +trait ShrinkLowPriorityImplicits: + implicit def shrinkAny[T]: Shrink[T] = ??? + +trait Distribution[F[_], -P, X] extends (P => F[X]) +type GenBeta[A, B, X] = [F[_]] =>> Distribution[F, Beta.Params[A, B], X] +type Beta[R] = [F[_]] =>> GenBeta[R, R, R][F] + +object Beta: + trait Params[+A, +B] +trait BetaInstances: + given schrodingerRandomBetaForDouble[F[_]: Monad]: Beta[Double][F] = ??? + +object all extends BetaInstances + +@main def Test = + import all.given + summon[Shrink[Beta.Params[Double, Double]]] \ No newline at end of file diff --git a/tests/pos/i20483.scala b/tests/pos/i20483.scala new file mode 100644 index 000000000000..a01a77327181 --- /dev/null +++ b/tests/pos/i20483.scala @@ -0,0 +1,13 @@ + +class Foo + (x: Option[String]) + (using Boolean) + (using Int) + (using Double): + + def this + (x: String) + (using Boolean) + (using Int) + (using Double) = + this(Some(x)) \ No newline at end of file diff --git a/tests/pos/i20484.scala b/tests/pos/i20484.scala new file mode 100644 index 000000000000..2f02e6206101 --- /dev/null +++ b/tests/pos/i20484.scala @@ -0,0 +1,3 @@ +given Int = ??? +given Char = ??? +val a = summon[Int] \ No newline at end of file diff --git a/tests/pos/i20572.scala b/tests/pos/i20572.scala new file mode 100644 index 000000000000..4ee4490c839c --- /dev/null +++ b/tests/pos/i20572.scala @@ -0,0 +1,7 @@ +//> using options -Werror +trait Writes[T] +trait Format[T] extends Writes[T] +given [T: List]: Writes[T] = null +given [T]: Format[T] = null + +val _ = summon[Writes[Int]] diff --git a/tests/pos/i20860.scala b/tests/pos/i20860.scala new file mode 100644 index 000000000000..1e1ddea11b75 --- /dev/null +++ b/tests/pos/i20860.scala @@ -0,0 +1,3 @@ +def `i20860 use result to check selector bound`: Unit = + import Ordering.Implicits.given Ordering[?] + summon[Ordering[Seq[Int]]] diff --git a/tests/pos/i20901/Foo.scala b/tests/pos/i20901/Foo.scala new file mode 100644 index 000000000000..c1277781db38 --- /dev/null +++ b/tests/pos/i20901/Foo.scala @@ -0,0 +1,6 @@ +//> using options -Ytest-pickler-check + +import reflect.ClassTag + +class Foo: + def mkArray[T: ClassTag] = ??? diff --git a/tests/pos/i20901/Foo.tastycheck b/tests/pos/i20901/Foo.tastycheck new file mode 100644 index 000000000000..0201bfec2056 --- /dev/null +++ b/tests/pos/i20901/Foo.tastycheck @@ -0,0 +1,124 @@ +Header: + version: + tooling: + UUID: + +Names (276 bytes, starting from ): + 0: ASTs + 1: + 2: scala + 3: reflect + 4: scala[Qualified . reflect] + 5: ClassTag + 6: Foo + 7: + 8: java + 9: lang + 10: java[Qualified . lang] + 11: Object + 12: java[Qualified . lang][Qualified . Object] + 13: [Signed Signature(List(),java.lang.Object) @] + 14: Unit + 15: mkArray + 16: T + 17: Nothing + 18: Any + 19: evidence$ + 20: [Unique evidence$ 1] + 21: ??? + 22: Predef + 23: SourceFile + 24: annotation + 25: scala[Qualified . annotation] + 26: internal + 27: scala[Qualified . annotation][Qualified . internal] + 28: scala[Qualified . annotation][Qualified . internal][Qualified . SourceFile] + 29: String + 30: java[Qualified . lang][Qualified . String] + 31: [Signed Signature(List(java.lang.String),scala.annotation.internal.SourceFile) @] + 32: + 33: Positions + 34: Comments + 35: Attributes + +Trees (94 bytes, starting from ): + 0: PACKAGE(92) + 2: TERMREFpkg 1 [] + 4: IMPORT(4) + 6: TERMREFpkg 4 [scala[Qualified . reflect]] + 8: IMPORTED 5 [ClassTag] + 10: TYPEDEF(82) 6 [Foo] + 13: TEMPLATE(61) + 15: APPLY(10) + 17: SELECTin(8) 13 [[Signed Signature(List(),java.lang.Object) @]] + 20: NEW + 21: TYPEREF 11 [Object] + 23: TERMREFpkg 10 [java[Qualified . lang]] + 25: SHAREDtype 21 + 27: DEFDEF(7) 7 [] + 30: EMPTYCLAUSE + 31: TYPEREF 14 [Unit] + 33: TERMREFpkg 2 [scala] + 35: STABLE + 36: DEFDEF(38) 15 [mkArray] + 39: TYPEPARAM(11) 16 [T] + 42: TYPEBOUNDStpt(8) + 44: TYPEREF 17 [Nothing] + 46: SHAREDtype 33 + 48: TYPEREF 18 [Any] + 50: SHAREDtype 33 + 52: PARAM(14) 20 [[Unique evidence$ 1]] + 55: APPLIEDtpt(10) + 57: IDENTtpt 5 [ClassTag] + 59: TYPEREF 5 [ClassTag] + 61: SHAREDtype 6 + 63: IDENTtpt 16 [T] + 65: TYPEREFdirect 39 + 67: IMPLICIT + 68: SHAREDtype 44 + 70: TERMREF 21 [???] + 72: TERMREF 22 [Predef] + 74: SHAREDtype 33 + 76: ANNOTATION(16) + 78: TYPEREF 23 [SourceFile] + 80: TERMREFpkg 27 [scala[Qualified . annotation][Qualified . internal]] + 82: APPLY(10) + 84: SELECTin(6) 31 [[Signed Signature(List(java.lang.String),scala.annotation.internal.SourceFile) @]] + 87: NEW + 88: SHAREDtype 78 + 90: SHAREDtype 78 + 92: STRINGconst 32 [] + 94: + +Positions (72 bytes, starting from ): + lines: 7 + line sizes: + 38, 0, 23, 0, 10, 32, 0 + positions: + 0: 40 .. 108 + 4: 40 .. 63 + 6: 47 .. 54 + 8: 55 .. 63 + 10: 65 .. 108 + 13: 78 .. 108 + 21: 71 .. 71 + 27: 78 .. 78 + 31: 78 .. 78 + 36: 78 .. 108 + 39: 90 .. 101 + 44: 93 .. 93 + 48: 93 .. 93 + 52: 93 .. 101 + 57: 93 .. 101 + 63: 93 .. 101 + 68: 102 .. 102 + 70: 105 .. 108 + 82: 65 .. 108 + 88: 65 .. 65 + 92: 65 .. 65 + + source paths: + 0: 32 [] + +Attributes (2 bytes, starting from ): + SOURCEFILEattr 32 [] diff --git a/tests/pos/i21036.scala b/tests/pos/i21036.scala new file mode 100644 index 000000000000..1c98346e4ef3 --- /dev/null +++ b/tests/pos/i21036.scala @@ -0,0 +1,16 @@ +//> using options -source 3.5 -Werror +trait SameRuntime[A, B] +trait BSONWriter[T] +trait BSONHandler[T] extends BSONWriter[T] + +opaque type Id = String +object Id: + given SameRuntime[Id, String] = ??? + +given BSONHandler[String] = ??? +given [T: BSONHandler]: BSONHandler[List[T]] = ??? + +given opaqueWriter[T, A](using rs: SameRuntime[T, A], writer: BSONWriter[A]): BSONWriter[T] = ??? + +val x = summon[BSONHandler[List[Id]]] // this doesn't emit warning +val y = summon[BSONWriter[List[Id]]] // this did emit warning diff --git a/tests/pos/i21212.scala b/tests/pos/i21212.scala new file mode 100644 index 000000000000..1a1f2e35819a --- /dev/null +++ b/tests/pos/i21212.scala @@ -0,0 +1,22 @@ + +trait Functor[F[_]]: + def map[A, B](fa: F[A])(f: A => B): F[B] = ??? +trait Monad[F[_]] extends Functor[F] +trait MonadError[F[_], E] extends Monad[F]: + def raiseError[A](e: E): F[A] +trait Temporal[F[_]] extends MonadError[F, Throwable] + +trait FunctorOps[F[_], A]: + def map[B](f: A => B): F[B] = ??? +implicit def toFunctorOps[F[_], A](target: F[A])(implicit tc: Functor[F]): FunctorOps[F, A] = ??? + +class ContextBounds[F[_]: Temporal](using err: MonadError[F, Throwable]): + def useCase = err.raiseError(new RuntimeException()) + val bool: F[Boolean] = ??? + def fails = toFunctorOps(bool).map(_ => ()) // warns under -source:3.5, // error under -source:3.6 + +class UsingArguments[F[_]](using Temporal[F])(using err: MonadError[F, Throwable]): + def useCase = err.raiseError(new RuntimeException()) + val bool: F[Boolean] = ??? + def works = toFunctorOps(bool).map(_ => ()) // warns under -source:3.5 + diff --git a/tests/pos/i21303/JavaEnum.java b/tests/pos/i21303/JavaEnum.java new file mode 100644 index 000000000000..e626d5070626 --- /dev/null +++ b/tests/pos/i21303/JavaEnum.java @@ -0,0 +1 @@ +public enum JavaEnum { ABC, DEF, GHI } diff --git a/tests/pos/i21303/Test.scala b/tests/pos/i21303/Test.scala new file mode 100644 index 000000000000..fe3efa6e38f3 --- /dev/null +++ b/tests/pos/i21303/Test.scala @@ -0,0 +1,32 @@ +import scala.deriving.Mirror +import scala.compiletime.* +import scala.reflect.ClassTag +import scala.annotation.implicitNotFound + + +trait TSType[T] +object TSType extends DefaultTSTypes with TSTypeMacros + +trait TSNamedType[T] extends TSType[T] + +trait DefaultTSTypes extends JavaTSTypes +trait JavaTSTypes { + given javaEnumTSType[E <: java.lang.Enum[E]: ClassTag]: TSType[E] = ??? +} +object DefaultTSTypes extends DefaultTSTypes +trait TSTypeMacros { + inline given [T: Mirror.Of]: TSType[T] = derived[T] + inline def derived[T](using m: Mirror.Of[T]): TSType[T] = { + val elemInstances = summonAll[m.MirroredElemTypes] + ??? + } + + private inline def summonAll[T <: Tuple]: List[TSType[_]] = { + inline erasedValue[T] match { + case _: EmptyTuple => Nil + case _: (t *: ts) => summonInline[TSType[t]] :: summonAll[ts] + } + } +} + +@main def Test = summon[TSType[JavaEnum]] \ No newline at end of file diff --git a/tests/pos/i21303a/JavaEnum.java b/tests/pos/i21303a/JavaEnum.java new file mode 100644 index 000000000000..e626d5070626 --- /dev/null +++ b/tests/pos/i21303a/JavaEnum.java @@ -0,0 +1 @@ +public enum JavaEnum { ABC, DEF, GHI } diff --git a/tests/pos/i21303a/Test.scala b/tests/pos/i21303a/Test.scala new file mode 100644 index 000000000000..83a598b5f17f --- /dev/null +++ b/tests/pos/i21303a/Test.scala @@ -0,0 +1,35 @@ +import scala.deriving.Mirror +import scala.compiletime.* +import scala.reflect.ClassTag +import scala.annotation.implicitNotFound + + +trait TSType[T] +object TSType extends DefaultTSTypes with TSTypeMacros + +trait TSNamedType[T] extends TSType[T] + +trait DefaultTSTypes extends JavaTSTypes +trait JavaTSTypes { + given javaEnumTSType[E <: java.lang.Enum[E]: ClassTag]: TSType[E] = ??? + given javaEnumTSNamedType[E <: java.lang.Enum[E]: ClassTag]: TSNamedType[E] = ??? +} +object DefaultTSTypes extends DefaultTSTypes +trait TSTypeMacros { + inline given [T: Mirror.Of]: TSType[T] = derived[T] + inline def derived[T](using m: Mirror.Of[T]): TSType[T] = { + val elemInstances = summonAll[m.MirroredElemTypes] + ??? + } + + private inline def summonAll[T <: Tuple]: List[TSType[_]] = { + inline erasedValue[T] match { + case _: EmptyTuple => Nil + case _: (t *: ts) => summonInline[TSType[t]] :: summonAll[ts] + } + } +} + +@main def Test = + summon[TSType[JavaEnum]] + summon[TSNamedType[JavaEnum]] diff --git a/tests/pos/i21320.scala b/tests/pos/i21320.scala new file mode 100644 index 000000000000..0a7e0d1941d1 --- /dev/null +++ b/tests/pos/i21320.scala @@ -0,0 +1,73 @@ +import scala.deriving.* +import scala.compiletime.* + +trait ConfigMonoid[T]: + def zero: T + def orElse(main: T, defaults: T): T + +object ConfigMonoid: + given option[T]: ConfigMonoid[Option[T]] = ??? + + inline def zeroTuple[C <: Tuple]: Tuple = + inline erasedValue[C] match + case _: EmptyTuple => EmptyTuple + case _: (t *: ts) => + summonInline[ConfigMonoid[t]].zero *: zeroTuple[ts] + + inline def valueTuple[C <: Tuple, T](index: Int, main: T, defaults: T): Tuple = + inline erasedValue[C] match + case _: EmptyTuple => EmptyTuple + case _: (t *: ts) => + def get(v: T) = v.asInstanceOf[Product].productElement(index).asInstanceOf[t] + summonInline[ConfigMonoid[t]].orElse(get(main), get(defaults)) *: valueTuple[ts, T]( + index + 1, + main, + defaults + ) + + inline given derive[T](using m: Mirror.ProductOf[T]): ConfigMonoid[T] = + new ConfigMonoid[T]: + def zero: T = m.fromProduct(zeroTuple[m.MirroredElemTypes]) + def orElse(main: T, defaults: T): T = m.fromProduct(valueTuple[m.MirroredElemTypes, T](0, main, defaults)) + + + +final case class PublishOptions( + v1: Option[String] = None, + v2: Option[String] = None, + v3: Option[String] = None, + v4: Option[String] = None, + v5: Option[String] = None, + v6: Option[String] = None, + v7: Option[String] = None, + v8: Option[String] = None, + v9: Option[String] = None, + ci: PublishContextualOptions = PublishContextualOptions(), +) +object PublishOptions: + implicit val monoid: ConfigMonoid[PublishOptions] = ConfigMonoid.derive + +final case class PublishContextualOptions( + v1: Option[String] = None, + v2: Option[String] = None, + v3: Option[String] = None, + v4: Option[String] = None, + v5: Option[String] = None, + v6: Option[String] = None, + v7: Option[String] = None, + v8: Option[String] = None, + v9: Option[String] = None, + v10: Option[String] = None, + v11: Option[String] = None, + v12: Option[String] = None, + v13: Option[String] = None, + v14: Option[String] = None, + v15: Option[String] = None, + v16: Option[String] = None, + v17: Option[String] = None, + v18: Option[String] = None, + v19: Option[String] = None, + v20: Option[String] = None +) +object PublishContextualOptions: + given monoid: ConfigMonoid[PublishContextualOptions] = ConfigMonoid.derive \ No newline at end of file diff --git a/tests/pos/i2974.scala b/tests/pos/i2974.scala index 75c6a24a41bb..8f1c2e2d6d2f 100644 --- a/tests/pos/i2974.scala +++ b/tests/pos/i2974.scala @@ -7,6 +7,7 @@ object Test { implicit val ba: Bar[Int] = ??? def test: Unit = { - implicitly[Foo[Int]] + val x = summon[Foo[Int]] + val _: Bar[Int] = x } } diff --git a/tests/pos/i3920.scala b/tests/pos/i3920.scala new file mode 100644 index 000000000000..6cd74187098f --- /dev/null +++ b/tests/pos/i3920.scala @@ -0,0 +1,32 @@ +//> using options -source future -language:experimental.modularity +trait Ordering { + type T + def compare(t1:T, t2: T): Int +} + +class SetFunctor(tracked val ord: Ordering) { + type Set = List[ord.T] + def empty: Set = Nil + + implicit class helper(s: Set) { + def add(x: ord.T): Set = x :: remove(x) + def remove(x: ord.T): Set = s.filter(e => ord.compare(x, e) != 0) + def member(x: ord.T): Boolean = s.exists(e => ord.compare(x, e) == 0) + } +} + +object Test { + val orderInt = new Ordering { + type T = Int + def compare(t1: T, t2: T): Int = t1 - t2 + } + + val IntSet = new SetFunctor(orderInt) + import IntSet.* + + def main(args: Array[String]) = { + val set = IntSet.empty.add(6).add(8).add(23) + assert(!set.member(7)) + assert(set.member(8)) + } +} \ No newline at end of file diff --git a/tests/pos/i3964.scala b/tests/pos/i3964.scala new file mode 100644 index 000000000000..42412b910899 --- /dev/null +++ b/tests/pos/i3964.scala @@ -0,0 +1,32 @@ +//> using options -source future -language:experimental.modularity +trait Animal +class Dog extends Animal +class Cat extends Animal + +object Test2: + class Bar(tracked val x: Animal) + val b = new Bar(new Cat) + val bar: Bar { val x: Cat } = new Bar(new Cat) // ok + + trait Foo(tracked val x: Animal) + val foo: Foo { val x: Cat } = new Foo(new Cat) {} // ok + +object Test3: + trait Vec(tracked val size: Int) + class Vec8 extends Vec(8) + + abstract class Lst(tracked val size: Int) + class Lst8 extends Lst(8) + + val v8a: Vec { val size: 8 } = new Vec8 + val v8b: Vec { val size: 8 } = new Vec(8) {} + + val l8a: Lst { val size: 8 } = new Lst8 + val l8b: Lst { val size: 8 } = new Lst(8) {} + + class VecN(tracked val n: Int) extends Vec(n) + class Vec9 extends VecN(9) + val v9a = VecN(9) + val _: Vec { val size: 9 } = v9a + val v9b = Vec9() + val _: Vec { val size: 9 } = v9b diff --git a/tests/pos/i3964a/Defs_1.scala b/tests/pos/i3964a/Defs_1.scala new file mode 100644 index 000000000000..7dcc89f7003e --- /dev/null +++ b/tests/pos/i3964a/Defs_1.scala @@ -0,0 +1,18 @@ +//> using options -source future -language:experimental.modularity +trait Animal +class Dog extends Animal +class Cat extends Animal + +object Test2: + class Bar(tracked val x: Animal) + val b = new Bar(new Cat) + val bar: Bar { val x: Cat } = new Bar(new Cat) // ok + + trait Foo(tracked val x: Animal) + val foo: Foo { val x: Cat } = new Foo(new Cat) {} // ok + +package coll: + trait Vec(tracked val size: Int) + class Vec8 extends Vec(8) + + abstract class Lst(tracked val size: Int) \ No newline at end of file diff --git a/tests/pos/i3964a/Uses_2.scala b/tests/pos/i3964a/Uses_2.scala new file mode 100644 index 000000000000..9d1b6ebaa58b --- /dev/null +++ b/tests/pos/i3964a/Uses_2.scala @@ -0,0 +1,16 @@ +//> using options -source future -language:experimental.modularity +import coll.* +class Lst8 extends Lst(8) + +val v8a: Vec { val size: 8 } = new Vec8 +val v8b: Vec { val size: 8 } = new Vec(8) {} + +val l8a: Lst { val size: 8 } = new Lst8 +val l8b: Lst { val size: 8 } = new Lst(8) {} + +class VecN(tracked val n: Int) extends Vec(n) +class Vec9 extends VecN(9) +val v9a = VecN(9) +val _: Vec { val size: 9 } = v9a +val v9b = Vec9() +val _: Vec { val size: 9 } = v9b diff --git a/tests/pos/i7045.scala b/tests/pos/i7045.scala deleted file mode 100644 index e683654dd5c3..000000000000 --- a/tests/pos/i7045.scala +++ /dev/null @@ -1,9 +0,0 @@ -trait Bar { type Y } -trait Foo { type X } - -class Test: - given a1(using b: Bar): Foo = new Foo { type X = b.Y } - - given a2(using b: Bar): Foo { type X = b.Y } = new Foo { type X = b.Y } - - given a3(using b: Bar): (Foo { type X = b.Y }) = new Foo { type X = b.Y } diff --git a/tests/pos/i7741.scala b/tests/pos/i7741.scala index 237616d04d2a..af9912915cc0 100644 --- a/tests/pos/i7741.scala +++ b/tests/pos/i7741.scala @@ -4,7 +4,7 @@ class A1 { @native private def a: Unit } trait A2 { - erased def i(a: Int): Int + erased def i(erased a: Int): Int } trait A3 { erased val a: Int diff --git a/tests/pos/i7851.scala b/tests/pos/i7851.scala index 5a6408cbe12a..16d28ad353f9 100644 --- a/tests/pos/i7851.scala +++ b/tests/pos/i7851.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental trait Wrappable[T] { } given Wrappable[Float] with { } diff --git a/tests/pos/i8945.scala b/tests/pos/i8945.scala index 2ae8fc268cbf..d0a25b474f28 100644 --- a/tests/pos/i8945.scala +++ b/tests/pos/i8945.scala @@ -1,4 +1,4 @@ -//> using options -Yno-experimental + // src-2/MacroImpl.scala trait Context { diff --git a/tests/pos/i9967.scala b/tests/pos/i9967.scala index 4e915a27bfbf..d8cbf99b9d6e 100644 --- a/tests/pos/i9967.scala +++ b/tests/pos/i9967.scala @@ -1,6 +1,6 @@ import collection.mutable class MaxSizeMap[K, V](maxSize: Int)(using o: Ordering[K]): - val sortedMap: mutable.TreeMap[K, V] = mutable.TreeMap.empty[K, V](o) + val sortedMap: mutable.TreeMap[K, V] = mutable.TreeMap.empty[K, V](using o) export sortedMap._ diff --git a/tests/pos/implicit-prefix-disambiguation.scala b/tests/pos/implicit-prefix-disambiguation.scala new file mode 100644 index 000000000000..f7843e7f5831 --- /dev/null +++ b/tests/pos/implicit-prefix-disambiguation.scala @@ -0,0 +1,17 @@ + +class I[X] +class J[X] + +trait A: + given I[B] = ??? + given (using I[B]): J[B] = ??? +object A extends A + +trait B extends A +object B extends B + +//import B.given, A.given + +def Test = + summon[I[B]] + summon[J[B]] diff --git a/tests/pos/interleaving-chainedParams.scala b/tests/pos/interleaving-chainedParams.scala index e502888d97c8..a54885d28002 100644 --- a/tests/pos/interleaving-chainedParams.scala +++ b/tests/pos/interleaving-chainedParams.scala @@ -5,7 +5,7 @@ object chainedParams{ trait Chain{ type Tail <: Chain } - + def f[C1 <: Chain](c1: C1)[C2 <: c1.Tail](c2: C2)[C3 <: c2.Tail](c3: C3): c3.Tail = ??? val self = new Chain{ type Tail = this.type } diff --git a/tests/pos/into-bigint.scala b/tests/pos/into-bigint.scala new file mode 100644 index 000000000000..d7ecee40b3ba --- /dev/null +++ b/tests/pos/into-bigint.scala @@ -0,0 +1,21 @@ +import language.experimental.into + +class BigInt(x: Int): + def + (other: into BigInt): BigInt = ??? + def * (other: into BigInt): BigInt = ??? + +object BigInt: + given Conversion[Int, BigInt] = BigInt(_) + + extension (x: into BigInt) + def + (other: BigInt): BigInt = ??? + def * (other: BigInt): BigInt = ??? + +@main def Test = + val x = BigInt(2) + val y = 3 + val a1 = x + y + val a2 = y * x + val a3 = x * x + val a4 = y + y + diff --git a/tests/pos/kind-projector-underscores.scala b/tests/pos/kind-projector-underscores.scala index f72a300a64eb..6f4349a8ec7c 100644 --- a/tests/pos/kind-projector-underscores.scala +++ b/tests/pos/kind-projector-underscores.scala @@ -1,4 +1,4 @@ -//> using options -Ykind-projector:underscores +//> using options -Xkind-projector:underscores package kind_projector diff --git a/tests/pos/kind-projector.scala b/tests/pos/kind-projector.scala index 4d6ec8c932a9..7e4a2c7f5c1b 100644 --- a/tests/pos/kind-projector.scala +++ b/tests/pos/kind-projector.scala @@ -1,4 +1,4 @@ -//> using options -Ykind-projector +//> using options -Xkind-projector package kind_projector diff --git a/tests/pos/match-type-extract-matchalias.scala b/tests/pos/match-type-extract-matchalias.scala new file mode 100644 index 000000000000..b15f87fb271c --- /dev/null +++ b/tests/pos/match-type-extract-matchalias.scala @@ -0,0 +1,11 @@ +trait Base: + type Value +trait Sub[T <: Tuple] extends Base: + type Value = Tuple.Head[T] +object Base: + type BaseOf[V] = Base { type Value = V } + type ExtractValue[B <: Base] = B match + case BaseOf[v] => v + +class Test: + val test: Base.ExtractValue[Sub[Int *: EmptyTuple]] = 1 diff --git a/tests/pos/match-type-printf.scala b/tests/pos/match-type-printf.scala new file mode 100644 index 000000000000..cf1ff043e310 --- /dev/null +++ b/tests/pos/match-type-printf.scala @@ -0,0 +1,19 @@ +import scala.compiletime.ops.int.+ +import scala.compiletime.ops.string.{CharAt, Length, Substring} +import scala.Tuple.* + +type ArgTypes[S <: String] <: Tuple = S match + case "" => EmptyTuple + case _ => + CharAt[S, 0] match + case '%' => + CharAt[S, 1] match + case 'd' => Int *: ArgTypes[Substring[S, 2, Length[S]]] + case 's' => String *: ArgTypes[Substring[S, 2, Length[S]]] + case _ => ArgTypes[Substring[S, 1, Length[S]]] + +def printf(s: String)(t: ArgTypes[s.type]): Unit = () + +def test() = + printf("%s is %d")(("Ada", 36)) // works in Scala 3.2.0, 3.3.0 and 3.4.0 + printf("%s is lorem %d")(("Ada", 36)) // works in Scala 3.4.0 but fails in Scala 3.2.0 and 3.3.0 diff --git a/tests/pos/mt-deskolemize.scala b/tests/pos/mt-deskolemize.scala new file mode 100644 index 000000000000..abd61d9d55e6 --- /dev/null +++ b/tests/pos/mt-deskolemize.scala @@ -0,0 +1,57 @@ +//> using options -language:experimental.betterMatchTypeExtractors + +trait Expr: + type Value + +object Expr: + type Of[V] = Expr { type Value = V } + type ExtractValue[F <: Expr] = F match + case Expr.Of[v] => v +import Expr.ExtractValue + +class Prim extends Expr: + type Value = Alias + type Alias = BigInt + +class VecExpr[E <: Expr] extends Expr: + type Value = Vector[ExtractValue[E]] + +trait Description: + type Elem <: Tuple + +trait ProdExpr extends Expr: + val description: Description + type Value = Tuple.Map[description.Elem, [X] =>> ExtractValue[X & Expr]] + +class MyExpr1 extends ProdExpr: + final val description = new Description: + type Elem = (VecExpr[Prim], Prim) + +class MyExpr2 extends ProdExpr: + final val description = new Description: + type Elem = (VecExpr[VecExpr[MyExpr1]], Prim) + +trait ProdExprAlt[T <: Tuple] extends Expr: + type Value = Tuple.Map[T, [X] =>> ExtractValue[X & Expr]] + +class MyExpr3 extends ProdExprAlt[(Prim, VecExpr[Prim], Prim)] + +trait Constable[E <: Expr]: + def lit(v: ExtractValue[E]): E +object Constable: + given [E <: Expr]: Constable[E] = ??? + +object Test: + def fromLiteral[E <: Expr : Constable](v: ExtractValue[E]): E = + summon[Constable[E]].lit(v) + val a: Prim = fromLiteral(1) + val b: VecExpr[Prim] = fromLiteral(Vector(1)) + val c: MyExpr1 = fromLiteral((Vector(1), 1)) + val d: MyExpr2 = fromLiteral(Vector(Vector((Vector(1), 1))), 2) + val e: MyExpr3 = fromLiteral((1, Vector(1), 1)) + val f: ProdExprAlt[(MyExpr1, VecExpr[MyExpr3])] = fromLiteral: + ( + (Vector(1), 1), + Vector((1, Vector(1), 1), (2, Vector(1), 2)) + ) + val g: Expr { type Alias = Int; type Value = Alias } = fromLiteral(1) diff --git a/tests/pos/named-tuple-widen.scala b/tests/pos/named-tuple-widen.scala new file mode 100644 index 000000000000..410832e04c17 --- /dev/null +++ b/tests/pos/named-tuple-widen.scala @@ -0,0 +1,9 @@ +import language.experimental.namedTuples + +class A +class B +val y1: (a1: A, b1: B) = ??? +val y2: (a2: A, b2: B) = ??? +var z1 = if ??? then y1 else y2 // -- what is the type of z2 +var z2: NamedTuple.AnyNamedTuple = z1 +val _ = z1 = z2 \ No newline at end of file diff --git a/tests/pos/named-tuples-strawman.scala b/tests/pos/named-tuples-strawman.scala new file mode 100644 index 000000000000..35675d1bfc76 --- /dev/null +++ b/tests/pos/named-tuples-strawman.scala @@ -0,0 +1,49 @@ +// Currently does not compile because of #19434 +object Test: + + object Named: + opaque type Named[name <: String & Singleton, A] >: A = A + def apply[S <: String & Singleton, A](name: S, x: A): Named[name.type, A] = x + extension [name <: String & Singleton, A](named: Named[name, A]) def value: A = named + import Named.* + + type DropNames[T <: Tuple] = T match + case Named[_, x] *: xs => x *: DropNames[xs] + case _ => T + + extension [T <: Tuple](x: T) def toTuple: DropNames[T] = + x.asInstanceOf // named and unnamed tuples have the same runtime representation + + val name = "hi" + val named = Named(name, 33) // ok, but should be rejectd + + inline val name2 = "hi" + val named2 = Named(name2, 33) // ok, but should be rejectd + val _: Named["hi", Int] = named2 + + var x = (Named("name", "Bob"), Named("age", 33)) + + val y: (String, Int) = x.toTuple + + x = y + + val z = y.toTuple + + type PersonInfo = (Named["name", String], Named["age", Int]) + type AddressInfo = (Named["city", String], Named["zip", Int]) + + val ok1: (Named["name", String], Named["age", Int]) = x + val ok2: PersonInfo = y + //val err1: (Named["bad", String], Named["age", Int]) = x // error + val err2: (Named["bad", String], Named["age", Int]) = x.toTuple // ok + val ok3: (Named["bad", String], Named["age", Int]) = y // ok + + val addr = (Named("city", "Lausanne"), Named("zip", 1003)) + val _: AddressInfo = addr + + type CombinedInfo = Tuple.Concat[PersonInfo, AddressInfo] + + val combined: CombinedInfo = x ++ addr + +// val person = (name = "Bob", age = 33): (name: String, age: Int) +// person.age diff --git a/tests/pos/named-tuples1.scala b/tests/pos/named-tuples1.scala new file mode 100644 index 000000000000..58e3fc065e61 --- /dev/null +++ b/tests/pos/named-tuples1.scala @@ -0,0 +1,13 @@ +import annotation.experimental +import language.experimental.namedTuples + +@main def Test = + val bob = (name = "Bob", age = 33): (name: String, age: Int) + val persons = List( + bob, + (name = "Bill", age = 40), + (name = "Lucy", age = 45) + ) + val ages = persons.map(_.age) + // pickling failure: matchtype is reduced after pickling, unreduced before. + assert(ages.sum == 118) diff --git a/tests/pos/namedtuple-src-incompat.scala b/tests/pos/namedtuple-src-incompat.scala new file mode 100644 index 000000000000..57451a4321b7 --- /dev/null +++ b/tests/pos/namedtuple-src-incompat.scala @@ -0,0 +1,17 @@ +import language.experimental.namedTuples +var age = 22 +val x = (age = 1) +val _: (age: Int) = x +val x2 = {age = 1} +val _: Unit = x2 + +class C: + infix def id[T](age: T): T = age + +def test = + val c: C = ??? + val y = c id (age = 1) + val _: (age: Int) = y + val y2 = c.id(age = 1) + val _: Int = y2 + diff --git a/tests/pos/not-looping-implicit.scala b/tests/pos/not-looping-implicit.scala index 90fba9f807a7..ebaf25e760f2 100644 --- a/tests/pos/not-looping-implicit.scala +++ b/tests/pos/not-looping-implicit.scala @@ -24,10 +24,10 @@ object Schema { inline summonInline[Mirror.Of[A]] match { case m: Mirror.SumOf[A] => lazy val members = recurse[m.MirroredElemLabels, m.MirroredElemTypes]() - new Schema[A] {} + ??? case m: Mirror.ProductOf[A] => lazy val fields = recurse[m.MirroredElemLabels, m.MirroredElemTypes]() - new Schema[A] {} + ??? } inline given gen[A]: Schema[A] = derived[A] diff --git a/tests/pos-special/fatal-warnings/nowarnannot.scala b/tests/pos/nowarnannot.scala similarity index 66% rename from tests/pos-special/fatal-warnings/nowarnannot.scala rename to tests/pos/nowarnannot.scala index 26e9713d0543..1710ae34b56f 100644 --- a/tests/pos-special/fatal-warnings/nowarnannot.scala +++ b/tests/pos/nowarnannot.scala @@ -1,3 +1,5 @@ +//> using options -Xfatal-warnings -Wvalue-discard + case class F(i: Int) object Main { diff --git a/tests/pos/ord-over-tracked.scala b/tests/pos/ord-over-tracked.scala new file mode 100644 index 000000000000..a9b4aba556e1 --- /dev/null +++ b/tests/pos/ord-over-tracked.scala @@ -0,0 +1,15 @@ +import language.experimental.modularity + +trait Ord[T]: + def lt(x: T, y: T): Boolean + +given Ord[Int] = ??? + +case class D(tracked val x: Int) +given [T <: D]: Ord[T] = (a, b) => a.x < b.x + +def mySort[T: Ord](x: Array[T]): Array[T] = ??? + +def test = + val arr = Array(D(1)) + val arr1 = mySort(arr) // error: no given instance of type Ord[D{val x: (1 : Int)}] \ No newline at end of file diff --git a/tests/pos/overload-disambiguation.scala b/tests/pos/overload-disambiguation.scala new file mode 100644 index 000000000000..58b085758d92 --- /dev/null +++ b/tests/pos/overload-disambiguation.scala @@ -0,0 +1,13 @@ +class A +class B +class C[-T] + +def foo(using A): C[Any] = ??? +def foo(using B): C[Int] = ??? + + +@main def Test = + given A = A() + given B = B() + val x = foo + val _: C[Any] = x diff --git a/tests/pos/overrides.scala b/tests/pos/overrides.scala index 146dc06c76a9..c3b6235d7c1f 100644 --- a/tests/pos/overrides.scala +++ b/tests/pos/overrides.scala @@ -1,7 +1,9 @@ +//> using options -experimental + class A[T] { def f(x: T)(y: T = x) = y - + import scala.language.experimental.clauseInterleaving def b[U <: T](x: Int)[V >: T](y: String) = false diff --git a/tests/pos/parent-refinement.scala b/tests/pos/parent-refinement.scala new file mode 100644 index 000000000000..eaa74228c5d6 --- /dev/null +++ b/tests/pos/parent-refinement.scala @@ -0,0 +1,48 @@ +//> using options -source future -language:experimental.modularity + +class A +class B extends A +class C extends B + +trait Id { type Value } +type IdOf[T] = Id { type Value = T } +trait X { type Value } + +case class Year(value: Int) extends IdOf[Int]: + val x: Value = 2 + +type Between[Lo, Hi] = X { type Value >: Lo <: Hi } + +class Foo() extends IdOf[B], Between[C, A]: + val x: Value = B() + +trait Bar extends IdOf[Int], (X { type Value = String }) + +class Baz extends IdOf[Int]: + type Value = String + val x: Value = "" + +trait Gen: + type T + val x: T + +type IntInst = Gen: + type T = Int + val x: 0 + +trait IntInstTrait extends IntInst + +abstract class IntInstClass extends IntInstTrait, IntInst + +object obj1 extends IntInstTrait: + val x = 0 + +object obj2 extends IntInstClass: + val x = 0 + +def main = + val x: obj1.T = 2 - obj2.x + val y: obj2.T = 2 - obj1.x + + + diff --git a/tests/pos/parsercombinators-arrow.scala b/tests/pos/parsercombinators-arrow.scala new file mode 100644 index 000000000000..f8bec02067e5 --- /dev/null +++ b/tests/pos/parsercombinators-arrow.scala @@ -0,0 +1,48 @@ +//> using options -language:experimental.modularity -source future +import collection.mutable + +/// A parser combinator. +trait Combinator: + + type Self + + /// The context from which elements are being parsed, typically a stream of tokens. + type Context + /// The element being parsed. + type Element + + extension (self: Self) + /// Parses and returns an element from `context`. + def parse(context: Context): Option[Element] +end Combinator + +final case class Apply[C, E](action: C => Option[E]) +final case class Combine[A, B](first: A, second: B) + +given [C, E] => Apply[C, E] is Combinator: + type Context = C + type Element = E + extension(self: Apply[C, E]) + def parse(context: C): Option[E] = self.action(context) + +given [A: Combinator, B: Combinator { type Context = A.Context }] + => Combine[A, B] is Combinator: + type Context = A.Context + type Element = (A.Element, B.Element) + extension(self: Combine[A, B]) + def parse(context: Context): Option[Element] = ??? + +extension [A] (buf: mutable.ListBuffer[A]) def popFirst() = + if buf.isEmpty then None + else try Some(buf.head) finally buf.remove(0) + +@main def hello: Unit = + val source = (0 to 10).toList + val stream = source.to(mutable.ListBuffer) + + val n = Apply[mutable.ListBuffer[Int], Int](s => s.popFirst()) + val m = Combine(n, n) + + val r = m.parse(stream) // error: type mismatch, found `mutable.ListBuffer[Int]`, required `?1.Context` + val rc: Option[(Int, Int)] = r + // it would be great if this worked diff --git a/tests/pos/parsercombinators-ctx-bounds.scala b/tests/pos/parsercombinators-ctx-bounds.scala new file mode 100644 index 000000000000..d77abea5e539 --- /dev/null +++ b/tests/pos/parsercombinators-ctx-bounds.scala @@ -0,0 +1,49 @@ +//> using options -language:experimental.modularity -source future +import collection.mutable + +/// A parser combinator. +trait Combinator[T]: + + /// The context from which elements are being parsed, typically a stream of tokens. + type Context + /// The element being parsed. + type Element + + extension (self: T) + /// Parses and returns an element from `context`. + def parse(context: Context): Option[Element] +end Combinator + +final case class Apply[C, E](action: C => Option[E]) +final case class Combine[A, B](first: A, second: B) + +given apply[C, E]: Combinator[Apply[C, E]] with { + type Context = C + type Element = E + extension(self: Apply[C, E]) { + def parse(context: C): Option[E] = self.action(context) + } +} + +given combine[A: Combinator, B: [X] =>> Combinator[X] { type Context = A.Context }] + : Combinator[Combine[A, B]] with + type Context = A.Context + type Element = (A.Element, B.Element) + extension(self: Combine[A, B]) + def parse(context: Context): Option[Element] = ??? + +extension [A] (buf: mutable.ListBuffer[A]) def popFirst() = + if buf.isEmpty then None + else try Some(buf.head) finally buf.remove(0) + +@main def hello: Unit = { + val source = (0 to 10).toList + val stream = source.to(mutable.ListBuffer) + + val n = Apply[mutable.ListBuffer[Int], Int](s => s.popFirst()) + val m = Combine(n, n) + + val r = m.parse(stream) // error: type mismatch, found `mutable.ListBuffer[Int]`, required `?1.Context` + val rc: Option[(Int, Int)] = r + // it would be great if this worked +} diff --git a/tests/pos/parsercombinators-expanded.scala b/tests/pos/parsercombinators-expanded.scala new file mode 100644 index 000000000000..cf8137bfe8eb --- /dev/null +++ b/tests/pos/parsercombinators-expanded.scala @@ -0,0 +1,64 @@ +//> using options -source future -language:experimental.modularity + +import collection.mutable + +/// A parser combinator. +trait Combinator[T]: + + /// The context from which elements are being parsed, typically a stream of tokens. + type Context + /// The element being parsed. + type Element + + extension (self: T) + /// Parses and returns an element from `context`. + def parse(context: Context): Option[Element] +end Combinator + +final case class Apply[C, E](action: C => Option[E]) +final case class Combine[A, B](first: A, second: B) + +object test: + + class apply[C, E] extends Combinator[Apply[C, E]]: + type Context = C + type Element = E + extension(self: Apply[C, E]) + def parse(context: C): Option[E] = self.action(context) + + def apply[C, E]: apply[C, E] = new apply[C, E] + + class combine[A, B]( + tracked val f: Combinator[A], + tracked val s: Combinator[B] { type Context = f.Context} + ) extends Combinator[Combine[A, B]]: + type Context = f.Context + type Element = (f.Element, s.Element) + extension(self: Combine[A, B]) + def parse(context: Context): Option[Element] = ??? + + def combine[A, B]( + _f: Combinator[A], + _s: Combinator[B] { type Context = _f.Context} + ) = new combine[A, B](_f, _s) + // cast is needed since the type of new combine[A, B](_f, _s) + // drops the required refinement. + + extension [A] (buf: mutable.ListBuffer[A]) def popFirst() = + if buf.isEmpty then None + else try Some(buf.head) finally buf.remove(0) + + @main def hello: Unit = { + val source = (0 to 10).toList + val stream = source.to(mutable.ListBuffer) + + val n = Apply[mutable.ListBuffer[Int], Int](s => s.popFirst()) + val m = Combine(n, n) + + val c = combine( + apply[mutable.ListBuffer[Int], Int], + apply[mutable.ListBuffer[Int], Int] + ) + val r = c.parse(m)(stream) // was type mismatch, now OK + val rc: Option[(Int, Int)] = r + } diff --git a/tests/pos/parsercombinators-givens-2.scala b/tests/pos/parsercombinators-givens-2.scala new file mode 100644 index 000000000000..8349d69a30af --- /dev/null +++ b/tests/pos/parsercombinators-givens-2.scala @@ -0,0 +1,52 @@ +//> using options -source future -language:experimental.modularity + +import collection.mutable + +/// A parser combinator. +trait Combinator[T]: + + /// The context from which elements are being parsed, typically a stream of tokens. + type Context + /// The element being parsed. + type Element + + extension (self: T) + /// Parses and returns an element from `context`. + def parse(context: Context): Option[Element] +end Combinator + +final case class Apply[C, E](action: C => Option[E]) +final case class Combine[A, B](first: A, second: B) + +given apply[C, E]: Combinator[Apply[C, E]] with { + type Context = C + type Element = E + extension(self: Apply[C, E]) { + def parse(context: C): Option[E] = self.action(context) + } +} + +given combine[A, B, C](using + f: Combinator[A] { type Context = C }, + s: Combinator[B] { type Context = C } +): Combinator[Combine[A, B]] with { + type Context = f.Context + type Element = (f.Element, s.Element) + extension(self: Combine[A, B]) { + def parse(context: Context): Option[Element] = ??? + } +} + +extension [A] (buf: mutable.ListBuffer[A]) def popFirst() = + if buf.isEmpty then None + else try Some(buf.head) finally buf.remove(0) + +@main def hello: Unit = { + val source = (0 to 10).toList + val stream = source.to(mutable.ListBuffer) + + val n = Apply[mutable.ListBuffer[Int], Int](s => s.popFirst()) + val m = Combine(n, n) + + val r = m.parse(stream) // works, but Element type is not resolved correctly +} diff --git a/tests/pos/parsercombinators-givens.scala b/tests/pos/parsercombinators-givens.scala new file mode 100644 index 000000000000..5b5588c93840 --- /dev/null +++ b/tests/pos/parsercombinators-givens.scala @@ -0,0 +1,54 @@ +//> using options -source future -language:experimental.modularity + +import collection.mutable + +/// A parser combinator. +trait Combinator[T]: + + /// The context from which elements are being parsed, typically a stream of tokens. + type Context + /// The element being parsed. + type Element + + extension (self: T) + /// Parses and returns an element from `context`. + def parse(context: Context): Option[Element] +end Combinator + +final case class Apply[C, E](action: C => Option[E]) +final case class Combine[A, B](first: A, second: B) + +given apply[C, E]: Combinator[Apply[C, E]] with { + type Context = C + type Element = E + extension(self: Apply[C, E]) { + def parse(context: C): Option[E] = self.action(context) + } +} + +given combine[A, B](using + tracked val f: Combinator[A], + tracked val s: Combinator[B] { type Context = f.Context } +): Combinator[Combine[A, B]] with { + type Context = f.Context + type Element = (f.Element, s.Element) + extension(self: Combine[A, B]) { + def parse(context: Context): Option[Element] = ??? + } +} + +extension [A] (buf: mutable.ListBuffer[A]) def popFirst() = + if buf.isEmpty then None + else try Some(buf.head) finally buf.remove(0) + +@main def hello: Unit = { + val source = (0 to 10).toList + val stream = source.to(mutable.ListBuffer) + + val n = Apply[mutable.ListBuffer[Int], Int](s => s.popFirst()) + val m = Combine(n, n) + + val r = m.parse(stream) // error: type mismatch, found `mutable.ListBuffer[Int]`, required `?1.Context` + val rc: Option[(Int, Int)] = r + // it would be great if this worked +} diff --git a/tests/pos/parsercombinators-new-syntax.scala b/tests/pos/parsercombinators-new-syntax.scala new file mode 100644 index 000000000000..f984972b915d --- /dev/null +++ b/tests/pos/parsercombinators-new-syntax.scala @@ -0,0 +1,45 @@ +//> using options -language:experimental.modularity -source future +import collection.mutable + +/// A parser combinator. +trait Combinator: + type Self + type Input + type Result + + extension (self: Self) + /// Parses and returns an element from input `in`. + def parse(in: Input): Option[Result] +end Combinator + +case class Apply[I, R](action: I => Option[R]) +case class Combine[A, B](first: A, second: B) + +given [I, R] => Apply[I, R] is Combinator: + type Input = I + type Result = R + extension (self: Apply[I, R]) + def parse(in: I): Option[R] = self.action(in) + +given [A: Combinator, B: Combinator { type Input = A.Input }] + => Combine[A, B] is Combinator: + type Input = A.Input + type Result = (A.Result, B.Result) + extension (self: Combine[A, B]) + def parse(in: Input): Option[Result] = + for x <- self.first.parse(in); y <- self.second.parse(in) yield (x, y) + +extension [A] (buf: mutable.ListBuffer[A]) def popFirst() = + if buf.isEmpty then None + else try Some(buf.head) finally buf.remove(0) + +@main def hello: Unit = + val source = (0 to 10).toList + val stream = source.to(mutable.ListBuffer) + + val n = Apply[mutable.ListBuffer[Int], Int](s => s.popFirst()) + val m = Combine(n, n) + + val r = m.parse(stream) // was error: type mismatch, found `mutable.ListBuffer[Int]`, required `?1.Input` + val rc: Option[(Int, Int)] = r + diff --git a/tests/pos/parsercombinators-this.scala b/tests/pos/parsercombinators-this.scala new file mode 100644 index 000000000000..70b423985400 --- /dev/null +++ b/tests/pos/parsercombinators-this.scala @@ -0,0 +1,53 @@ +//> using options -language:experimental.modularity -source future +import collection.mutable + +/// A parser combinator. +trait Combinator: + + type Self + + /// The context from which elements are being parsed, typically a stream of tokens. + type Context + /// The element being parsed. + type Element + + extension (self: Self) + /// Parses and returns an element from `context`. + def parse(context: Context): Option[Element] +end Combinator + +final case class Apply[C, E](action: C => Option[E]) +final case class Combine[A, B](first: A, second: B) + +given apply[C, E]: Combinator with { + type Self = Apply[C, E] + type Context = C + type Element = E + extension(self: Apply[C, E]) { + def parse(context: C): Option[E] = self.action(context) + } +} + +given combine[A: Combinator, B: Combinator { type Context = A.Context }] + : Combinator with + type Self = Combine[A, B] + type Context = A.Context + type Element = (A.Element, B.Element) + extension(self: Combine[A, B]) + def parse(context: Context): Option[Element] = ??? + +extension [A] (buf: mutable.ListBuffer[A]) def popFirst() = + if buf.isEmpty then None + else try Some(buf.head) finally buf.remove(0) + +@main def hello: Unit = { + val source = (0 to 10).toList + val stream = source.to(mutable.ListBuffer) + + val n = Apply[mutable.ListBuffer[Int], Int](s => s.popFirst()) + val m = Combine(n, n) + + val r = m.parse(stream) // error: type mismatch, found `mutable.ListBuffer[Int]`, required `?1.Context` + val rc: Option[(Int, Int)] = r + // it would be great if this worked +} diff --git a/tests/pos/phantom-Eq2/Phantom-Eq_2.scala b/tests/pos/phantom-Eq2/Phantom-Eq_2.scala index 87c6cc2275f1..f1535049a514 100644 --- a/tests/pos/phantom-Eq2/Phantom-Eq_2.scala +++ b/tests/pos/phantom-Eq2/Phantom-Eq_2.scala @@ -1,3 +1,4 @@ +//> using options -experimental /* This is a version of ../pos/phantom.scala that tests phantom clases with separate compilation */ object PhantomEq { diff --git a/tests/pos/precise-ctx-bound.scala b/tests/pos/precise-ctx-bound.scala new file mode 100644 index 000000000000..3f17a5b4a54e --- /dev/null +++ b/tests/pos/precise-ctx-bound.scala @@ -0,0 +1,51 @@ +//> using options -language:experimental.modularity -source future +object Test: + + class Wrap[T](x: T) + + def f0[T](x: T): Wrap[T] = Wrap(x) + val x0 = f0(1) + val _: Wrap[Int] = x0 + + def f1[T: Precise](x: T): Wrap[T] = Wrap(x) + def l = "hello".length + val x1 = Wrap(l) + val _: Wrap[Int] = x1 + + def f2[T](x: T)(using Precise { type Self = T}): Wrap[T] = Wrap(x) + val x2 = f2(1) + val _: Wrap[1] = x2 + + def f3[T: Precise](x: T): Wrap[T] = Wrap(x) + val x3 = f3(identity(1)) + val _: Wrap[1] = x3 + val x3a = f3(1 + 2) + val _: Wrap[3] = x3a + + def f4[T](x: T)(using T is Precise): Wrap[T] = Wrap(x) + val x4 = f4(1) + val _: Wrap[1] = x4 + val x4a = f4(1 + 2) + val _: Wrap[3] = x4a + val y4 = f4(if ??? then 1 else 2) + val _: Wrap[1 | 2] = y4 + val z4 = f4(if ??? then B() else C()) + val _: Wrap[B | C] = z4 + trait A + class B extends A + class C extends A + + class C0[T](x: T): + def fld: T = x + val y0 = C0("hi") + val _: String = y0.fld + + class C2[T](x: T)(using T is Precise): + def fld: T = x + val y2 = C2(identity("hi")) + val _: "hi" = y2.fld + + class C3[T: Precise](x: T): + def fld: T = x + val y3 = C3("hi") + val _: "hi" = y3.fld diff --git a/tests/pos/precise-indexof.scala b/tests/pos/precise-indexof.scala new file mode 100644 index 000000000000..af1e6c5b504b --- /dev/null +++ b/tests/pos/precise-indexof.scala @@ -0,0 +1,46 @@ +//> using options -language:experimental.modularity -source future +import compiletime.* +import compiletime.ops.int.* + +/** The index of `Y` in tuple `X` as a literal constant Int, + * or `Size[X]` if `Y` does not occur in `X` + */ +type IndexOf[X <: Tuple, Y] <: Int = X match + case Y *: _ => 0 + case x *: xs => S[IndexOf[xs, Y]] + case EmptyTuple => 0 + +extension [X <: Tuple](inline x: X) + + /** The index (starting at 0) of the first element in the type `X` of `x` + * that matches type `Y`. + */ + inline def indexOfType[Y] = constValue[IndexOf[X, Y]] + + inline def indexOf[Y: Precise](y: Y) = constValue[IndexOf[X, Y]] + +// Note: without the Precise, the index calcularion would go wrong. For instance, +// (1, 2, "hello", true).indexOf(2) would be 0, the same as (1, 2, "hello", true).indexOTypef[Int] +// (1, 2, "hello", true).indexOf("foo") would be 2, the same as (1, 2, "hello", true).indexOTypef[String] +// But we could alternatively pick Singleton + +@main def Test = + val t: (1, 2, "hello", true) = (1, 2, "hello", true) + val x1: 0 = t.indexOfType[1] + val x2: 1 = t.indexOfType[2] + val x3: 2 = t.indexOfType["hello"] + val x4: 3 = t.indexOfType[true] + val x5: 4 = t.indexOfType[77] + val x6: 0 = t.indexOfType[Int] + val x7: 2 = t.indexOfType[String] + val x8: 4 = t.indexOfType[Double] + + val y1: 0 = t.indexOf(1) + val y2: 1 = t.indexOf(2) + val y3: 2 = t.indexOf("hello") + val y4: 3 = t.indexOf(true) + val y5: 4 = t.indexOf(identity(77)) + val y6: 0 = t.indexOf(identity(1)) + val y7: 4 = t.indexOf("foo") + + diff --git a/tests/pos/scala-uri.scala b/tests/pos/scala-uri.scala new file mode 100644 index 000000000000..75ea2fc70d8a --- /dev/null +++ b/tests/pos/scala-uri.scala @@ -0,0 +1,22 @@ +// This works for implicit/implicit pairs but not for givens, see neg version. +import scala.language.implicitConversions + +trait QueryKey[A] +object QueryKey extends QueryKeyInstances +sealed trait QueryKeyInstances: + implicit val stringQueryKey: QueryKey[String] = ??? + +trait QueryValue[-A] +object QueryValue extends QueryValueInstances +sealed trait QueryValueInstances1: + implicit final val stringQueryValue: QueryValue[String] = ??? + implicit final val noneQueryValue: QueryValue[None.type] = ??? + +sealed trait QueryValueInstances extends QueryValueInstances1: + implicit final def optionQueryValue[A: QueryValue]: QueryValue[Option[A]] = ??? + +trait QueryKeyValue[A] +object QueryKeyValue: + implicit def tuple2QueryKeyValue[K: QueryKey, V: QueryValue]: QueryKeyValue[(K, V)] = ??? + +@main def Test = summon[QueryKeyValue[(String, None.type)]] diff --git a/tests/pos/sets-tc.scala b/tests/pos/sets-tc.scala new file mode 100644 index 000000000000..86349bf6a405 --- /dev/null +++ b/tests/pos/sets-tc.scala @@ -0,0 +1,46 @@ +import language.experimental.modularity + +// First version: higher-kinded self type +object v1: + trait Set: + type Self[A] + def empty[A]: Self[A] + def union[A](self: Self[A], other: Self[A]): Self[A] + + case class ListSet[A](elems: List[A]) + + given ListSet is Set: + def empty[A]: ListSet[A] = ListSet(Nil) + + def union[A](self: ListSet[A], other: ListSet[A]): ListSet[A] = + ListSet(self.elems ++ other.elems) + + def listUnion[A, S[_]: Set](xs: List[S[A]]): S[A] = + xs.foldLeft(S.empty)(S.union) + + val xs = ListSet(List(1, 2, 3)) + val ys = ListSet(List(4, 5)) + val zs = listUnion(List(xs, ys)) + + // Second version: parameterized type class +object v2: + trait Set[A]: + type Self + def empty: Self + extension (s: Self) def union (other: Self): Self + + case class ListSet[A](elems: List[A]) + + given [A] => ListSet[A] is Set[A]: + def empty: ListSet[A] = ListSet(Nil) + + extension (self: ListSet[A]) def union(other: ListSet[A]): ListSet[A] = + ListSet(self.elems ++ other.elems) + + def listUnion[A, S: Set[A]](xs: List[S]): S = + xs.foldLeft(S.empty)(_ `union` _) + + val xs = ListSet(List(1, 2, 3)) + val ys = ListSet(List(4, 5)) + val zs = listUnion(List(xs, ys)) + diff --git a/tests/pos/singleton-ctx-bound.scala b/tests/pos/singleton-ctx-bound.scala new file mode 100644 index 000000000000..c6b0d2fb823c --- /dev/null +++ b/tests/pos/singleton-ctx-bound.scala @@ -0,0 +1,47 @@ +//> using options -language:experimental.modularity -source future +object Test: + + class Wrap[T](x: T) + + def f0[T](x: T): Wrap[T] = Wrap(x) + val x0 = f0(1) + val _: Wrap[Int] = x0 + + def f1[T <: Singleton](x: T): Wrap[T] = Wrap(x) + val x1 = f1(1) + val _: Wrap[1] = x1 + + def f2[T](x: T)(using Singleton { type Self = T}): Wrap[T] = Wrap(x) + val x2 = f2(1) + val _: Wrap[1] = x2 + + def f3[T: Singleton](x: T): Wrap[T] = Wrap(x) + val x3 = f3(1) + val _: Wrap[1] = x3 + + def f4[T](x: T)(using T is Singleton): Wrap[T] = Wrap(x) + val x4 = f4(1) + val _: Wrap[1] = x4 + + class C0[T](x: T): + def fld: T = x + val y0 = C0("hi") + val _: String = y0.fld + + class C1[T <: Singleton](x: T): + def fld: T = x + val y1 = C1("hi") + val _: "hi" = y1.fld + + class C2[T](x: T)(using T is Singleton): + def fld: T = x + val y2 = C2("hi") + val _: "hi" = y2.fld + + class C3[T: Singleton](x: T): + def fld: T = x + val y3 = C3("hi") + val _: "hi" = y3.fld + + + diff --git a/tests/pos/slick-migration-api-example.scala b/tests/pos/slick-migration-api-example.scala new file mode 100644 index 000000000000..3b6f1b4a82f4 --- /dev/null +++ b/tests/pos/slick-migration-api-example.scala @@ -0,0 +1,23 @@ +trait Migration +object Migration: + implicit class MigrationConcat[M <: Migration](m: M): + def &[N <: Migration, O](n: N)(implicit ccm: CanConcatMigrations[M, N, O]): O = ??? + +trait ReversibleMigration extends Migration +trait MigrationSeq extends Migration +trait ReversibleMigrationSeq extends MigrationSeq with ReversibleMigration + +trait ToReversible[-A <: Migration] +object ToReversible: + implicit val reversible: ToReversible[ReversibleMigration] = ??? +class CanConcatMigrations[-A, -B, +C] +trait CanConcatMigrationsLow: + implicit def default[A <: Migration, B <: Migration]: CanConcatMigrations[A, B, MigrationSeq] = ??? +object CanConcatMigrations extends CanConcatMigrationsLow: + implicit def reversible[A <: Migration, B <: Migration](implicit reverseA: ToReversible[A], + reverseB: ToReversible[B]): CanConcatMigrations[A, B, ReversibleMigrationSeq] = ??? + +@main def Test = + val rm: ReversibleMigration = ??? + val rms = rm & rm & rm + summon[rms.type <:< ReversibleMigrationSeq] // error Cannot prove that (rms : slick.migration.api.MigrationSeq) <:< slick.migration.api.ReversibleMigrationSeq. \ No newline at end of file diff --git a/tests/pos/source-import-3-7-migration.scala b/tests/pos/source-import-3-7-migration.scala new file mode 100644 index 000000000000..2e80fcb0bab2 --- /dev/null +++ b/tests/pos/source-import-3-7-migration.scala @@ -0,0 +1 @@ +import language.`3.7-migration` \ No newline at end of file diff --git a/tests/pos/source-import-3-7.scala b/tests/pos/source-import-3-7.scala new file mode 100644 index 000000000000..7fa68fd496f6 --- /dev/null +++ b/tests/pos/source-import-3-7.scala @@ -0,0 +1 @@ +import language.`3.7` \ No newline at end of file diff --git a/tests/pos/t5643.scala b/tests/pos/t5643.scala index 1ce34ba36226..9866f8d399c2 100644 --- a/tests/pos/t5643.scala +++ b/tests/pos/t5643.scala @@ -13,7 +13,7 @@ object TupledEvidenceTest { def f[T : GetResult] = "" - f[(String,String)](getTuple[(String, String)]) + f[(String,String)](using getTuple[(String, String)]) f[(String,String)] } diff --git a/tests/pos/tuple-ops.scala b/tests/pos/tuple-ops.scala new file mode 100644 index 000000000000..739b1ebeeb02 --- /dev/null +++ b/tests/pos/tuple-ops.scala @@ -0,0 +1,18 @@ +import language.experimental.namedTuples +import Tuple.* + +def test = + summon[Disjoint[(1, 2, 3), (4, 5)] =:= true] + summon[Disjoint[(1, 2, 6), (4, 5)] =:= true] + summon[Disjoint[(1, 2, 6), EmptyTuple] =:= true] + summon[Disjoint[EmptyTuple, EmptyTuple] =:= true] + + summon[Contains[(1, 2, 3), Int] =:= true] + summon[Contains[(1, 2, 3), 2] =:= true] + summon[Contains[(1, 2, 3), 4] =:= false] + + summon[Disjoint[(1, 2, 3), (4, 2)] =:= false] + summon[Disjoint[("a", "b"), ("b", "c")] =:= false] + summon[Disjoint[(1, 2, 6), Tuple1[2]] =:= false] + summon[Disjoint[Tuple1[3], (4, 3, 6)] =:= false] + diff --git a/tests/pos/tupled-function-instances.scala b/tests/pos/tupled-function-instances.scala index 3a3bc81b7426..b0af67aabfe4 100644 --- a/tests/pos/tupled-function-instances.scala +++ b/tests/pos/tupled-function-instances.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.util.TupledFunction object Test { diff --git a/tests/pos/typeclass-aggregates.scala b/tests/pos/typeclass-aggregates.scala new file mode 100644 index 000000000000..5e4551b226b7 --- /dev/null +++ b/tests/pos/typeclass-aggregates.scala @@ -0,0 +1,47 @@ +//> using options -source future -language:experimental.modularity +trait Ord: + type Self + extension (x: Self) + def compareTo(y: Self): Int + def < (y: Self): Boolean = compareTo(y) < 0 + def > (y: Self): Boolean = compareTo(y) > 0 + + trait OrdProxy extends Ord: + export Ord.this.* + +trait SemiGroup: + type Self + extension (x: Self) def combine(y: Self): Self + + trait SemiGroupProxy extends SemiGroup: + export SemiGroup.this.* + +trait Monoid extends SemiGroup: + def unit: Self + + trait MonoidProxy extends Monoid: + export Monoid.this.* + +def ordWithMonoid(ord: Ord, monoid: Monoid{ type Self = ord.Self }): Ord & Monoid = + new ord.OrdProxy with monoid.MonoidProxy {} + +trait OrdWithMonoid extends Ord, Monoid + +def ordWithMonoid2(ord: Ord, monoid: Monoid{ type Self = ord.Self }) = //: OrdWithMonoid { type Self = ord.Self} = + new OrdWithMonoid with ord.OrdProxy with monoid.MonoidProxy {} + +given intOrd: (Ord { type Self = Int }) = ??? +given intMonoid: (Monoid { type Self = Int }) = ??? + +//given (using ord: Ord, monoid: Monoid{ type Self = ord.Self }): (Ord & Monoid { type Self = ord.Self}) = +// ordWithMonoid2(ord, monoid) + +val x = summon[Ord & Monoid { type Self = Int}] +val y: Int = ??? : x.Self + +// given [A, B](using ord: A is Ord, monoid: A is Monoid) => A is Ord & Monoid = +// new ord.OrdProxy with monoid.MonoidProxy {} + +given [A](using ord: Ord { type Self = A }, monoid: Monoid { type Self = A}): ((Ord & Monoid) { type Self = A}) = + new ord.OrdProxy with monoid.MonoidProxy {} + diff --git a/tests/pos/typeclasses-arrow.scala b/tests/pos/typeclasses-arrow.scala new file mode 100644 index 000000000000..379365ffa1c5 --- /dev/null +++ b/tests/pos/typeclasses-arrow.scala @@ -0,0 +1,140 @@ +//> using options -language:experimental.modularity -source future + +class Common: + + trait Ord: + type Self + extension (x: Self) + def compareTo(y: Self): Int + def < (y: Self): Boolean = compareTo(y) < 0 + def > (y: Self): Boolean = compareTo(y) > 0 + def <= (y: Self): Boolean = compareTo(y) <= 0 + def >= (y: Self): Boolean = compareTo(y) >= 0 + def max(y: Self): Self = if x < y then y else x + + trait Show: + type Self + extension (x: Self) def show: String + + trait SemiGroup: + type Self + extension (x: Self) def combine(y: Self): Self + + trait Monoid extends SemiGroup: + def unit: Self + + trait Functor: + type Self[A] + extension [A](x: Self[A]) def map[B](f: A => B): Self[B] + + trait Monad extends Functor: + def pure[A](x: A): Self[A] + extension [A](x: Self[A]) + def flatMap[B](f: A => Self[B]): Self[B] + def map[B](f: A => B) = x.flatMap(f `andThen` pure) +end Common + +object Instances extends Common: + + given Int is Ord as intOrd: + extension (x: Int) + def compareTo(y: Int) = + if x < y then -1 + else if x > y then +1 + else 0 + + given [T: Ord] => List[T] is Ord: + extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match + case (Nil, Nil) => 0 + case (Nil, _) => -1 + case (_, Nil) => +1 + case (x :: xs1, y :: ys1) => + val fst = x.compareTo(y) + if (fst != 0) fst else xs1.compareTo(ys1) + + given List is Monad as listMonad: + extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = + xs.flatMap(f) + def pure[A](x: A): List[A] = + List(x) + + type Reader[Ctx] = [X] =>> Ctx => X + + given [Ctx] => Reader[Ctx] is Monad as readerMonad: + extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = + ctx => f(r(ctx))(ctx) + def pure[A](x: A): Ctx => A = + ctx => x + + extension (xs: Seq[String]) + def longestStrings: Seq[String] = + val maxLength = xs.map(_.length).max + xs.filter(_.length == maxLength) + + extension [T](xs: List[T]) + def second = xs.tail.head + def third = xs.tail.tail.head + + extension [M[_]: Monad, A](xss: M[M[A]]) + def flatten: M[A] = + xss.flatMap(identity) + + def maximum[T: Ord](xs: List[T]): T = + xs.reduce(_ `max` _) + + given [T: Ord] => T is Ord as descending: + extension (x: T) def compareTo(y: T) = T.compareTo(y)(x) + + def minimum[T: Ord](xs: List[T]) = + maximum(xs)(using descending) + + def test(): Unit = + val xs = List(1, 2, 3) + println(maximum(xs)) + println(maximum(xs)(using descending)) + println(maximum(xs)(using descending(using intOrd))) + println(minimum(xs)) + +// Adapted from the Rust by Example book: https://doc.rust-lang.org/rust-by-example/trait.html +// +// lines words chars +// wc Scala: 28 105 793 +// wc Rust : 57 193 1466 + +trait Animal: + type Self + // Associated function signature; `Self` refers to the implementor type. + def apply(name: String): Self + + // Method signatures; these will return a string. + extension (self: Self) + def name: String + def noise: String + def talk(): Unit = println(s"$name, $noise") +end Animal + +class Sheep(val name: String): + var isNaked = false + def shear() = + if isNaked then + println(s"$name is already naked...") + else + println(s"$name gets a haircut!") + isNaked = true + +given Sheep is Animal: + def apply(name: String) = Sheep(name) + extension (self: Self) + def name: String = self.name + def noise: String = if self.isNaked then "baaaaah?" else "baaaaah!" + override def talk(): Unit = + println(s"$name pauses briefly... $noise") + +/* + + - In a type pattern, A <: T, A >: T, A: T, A: _ are all allowed and mean + T is a fresh type variable (T can start with a capital letter). + - instance definitions + - `as m` syntax in context bounds and instance definitions + +*/ diff --git a/tests/pos/typeclasses-arrow0.scala b/tests/pos/typeclasses-arrow0.scala new file mode 100644 index 000000000000..22d84fe6478d --- /dev/null +++ b/tests/pos/typeclasses-arrow0.scala @@ -0,0 +1,136 @@ +//> using options -language:experimental.modularity -source future + +class Common: + + trait Ord[A]: + extension (x: A) + def compareTo(y: A): Int + def < (y: A): Boolean = compareTo(y) < 0 + def > (y: A): Boolean = compareTo(y) > 0 + def <= (y: A): Boolean = compareTo(y) <= 0 + def >= (y: A): Boolean = compareTo(y) >= 0 + def max(y: A): A = if x < y then y else x + + trait Show[A]: + extension (x: A) def show: String + + trait SemiGroup[A]: + extension (x: A) def combine(y: A): A + + trait Monoid[A] extends SemiGroup[A]: + def unit: A + + trait Functor[F[_]]: + extension [A](x: F[A]) def map[B](f: A => B): F[B] + + trait Monad[F[_]] extends Functor[F]: + def pure[A](x: A): F[A] + extension [A](x: F[A]) + def flatMap[B](f: A => F[B]): F[B] + def map[B](f: A => B) = x.flatMap(f `andThen` pure) +end Common + +object Instances extends Common: + + given Ord[Int] as intOrd: + extension (x: Int) + def compareTo(y: Int) = + if x < y then -1 + else if x > y then +1 + else 0 + + given [T: Ord] => Ord[List[T]]: + extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match + case (Nil, Nil) => 0 + case (Nil, _) => -1 + case (_, Nil) => +1 + case (x :: xs1, y :: ys1) => + val fst = x.compareTo(y) + if (fst != 0) fst else xs1.compareTo(ys1) + + given Monad[List] as listMonad: + extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = + xs.flatMap(f) + def pure[A](x: A): List[A] = + List(x) + + type Reader[Ctx] = [X] =>> Ctx => X + + given [Ctx] => Monad[Reader[Ctx]] as readerMonad: + extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = + ctx => f(r(ctx))(ctx) + def pure[A](x: A): Ctx => A = + ctx => x + + extension (xs: Seq[String]) + def longestStrings: Seq[String] = + val maxLength = xs.map(_.length).max + xs.filter(_.length == maxLength) + + extension [T](xs: List[T]) + def second = xs.tail.head + def third = xs.tail.tail.head + + extension [M[_]: Monad, A](xss: M[M[A]]) + def flatten: M[A] = + xss.flatMap(identity) + + def maximum[T: Ord](xs: List[T]): T = + xs.reduce(_ `max` _) + + given [T: Ord] => Ord[T] as descending: + extension (x: T) def compareTo(y: T) = summon[Ord[T]].compareTo(y)(x) + + def minimum[T: Ord](xs: List[T]) = + maximum(xs)(using descending) + + def test(): Unit = + val xs = List(1, 2, 3) + println(maximum(xs)) + println(maximum(xs)(using descending)) + println(maximum(xs)(using descending(using intOrd))) + println(minimum(xs)) + +// Adapted from the Rust by Example book: https://doc.rust-lang.org/rust-by-example/trait.html +// +// lines words chars +// wc Scala: 28 105 793 +// wc Rust : 57 193 1466 + +trait Animal[Self]: + + // Associated function signature; `Self` refers to the implementor type. + def apply(name: String): Self + + // Method signatures; these will return a string. + extension (self: Self) + def name: String + def noise: String + def talk(): Unit = println(s"$name, $noise") +end Animal + +class Sheep(val name: String): + var isNaked = false + def shear() = + if isNaked then + println(s"$name is already naked...") + else + println(s"$name gets a haircut!") + isNaked = true + +given Animal[Sheep]: + def apply(name: String) = Sheep(name) + extension (self: Sheep) + def name: String = self.name + def noise: String = if self.isNaked then "baaaaah?" else "baaaaah!" + override def talk(): Unit = + println(s"$name pauses briefly... $noise") + +/* + + - In a type pattern, A <: T, A >: T, A: T, A: _ are all allowed and mean + T is a fresh type variable (T can start with a capital letter). + - instance definitions + - `as m` syntax in context bounds and instance definitions + +*/ diff --git a/tests/pos/typeclasses-this.scala b/tests/pos/typeclasses-this.scala new file mode 100644 index 000000000000..33ccb8d9d653 --- /dev/null +++ b/tests/pos/typeclasses-this.scala @@ -0,0 +1,141 @@ +//> using options -language:experimental.modularity -source future + +class Common: + + trait Ord: + type Self + extension (x: Self) + def compareTo(y: Self): Int + def < (y: Self): Boolean = compareTo(y) < 0 + def > (y: Self): Boolean = compareTo(y) > 0 + def <= (y: Self): Boolean = compareTo(y) <= 0 + def >= (y: Self): Boolean = compareTo(y) >= 0 + def max(y: Self): Self = if x < y then y else x + + trait Show: + type Self + extension (x: Self) def show: String + + trait SemiGroup: + type Self + extension (x: Self) def combine(y: Self): Self + + trait Monoid extends SemiGroup: + def unit: Self + + trait Functor: + type Self[A] + extension [A](x: Self[A]) def map[B](f: A => B): Self[B] + + trait Monad extends Functor: + def pure[A](x: A): Self[A] + extension [A](x: Self[A]) + def flatMap[B](f: A => Self[B]): Self[B] + def map[B](f: A => B) = x.flatMap(f `andThen` pure) +end Common + +object Instances extends Common: + + given intOrd: (Int is Ord) with + extension (x: Int) + def compareTo(y: Int) = + if x < y then -1 + else if x > y then +1 + else 0 + +// given [T](using tracked val ev: Ord { type Self = T}): Ord { type Self = List[T] } with + given [T: Ord]: (List[T] is Ord) with + extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match + case (Nil, Nil) => 0 + case (Nil, _) => -1 + case (_, Nil) => +1 + case (x :: xs1, y :: ys1) => + val fst = x.compareTo(y) + if (fst != 0) fst else xs1.compareTo(ys1) + + given listMonad: (List is Monad) with + extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = + xs.flatMap(f) + def pure[A](x: A): List[A] = + List(x) + + type Reader[Ctx] = [X] =>> Ctx => X + + given readerMonad[Ctx]: (Reader[Ctx] is Monad) with + extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = + ctx => f(r(ctx))(ctx) + def pure[A](x: A): Ctx => A = + ctx => x + + extension (xs: Seq[String]) + def longestStrings: Seq[String] = + val maxLength = xs.map(_.length).max + xs.filter(_.length == maxLength) + + extension [T](xs: List[T]) + def second = xs.tail.head + def third = xs.tail.tail.head + + extension [M[_]: Monad, A](xss: M[M[A]]) + def flatten: M[A] = + xss.flatMap(identity) + + def maximum[T: Ord](xs: List[T]): T = + xs.reduce(_ `max` _) + + given descending[T: Ord]: (T is Ord) with + extension (x: T) def compareTo(y: T) = T.compareTo(y)(x) + + def minimum[T: Ord](xs: List[T]) = + maximum(xs)(using descending) + + def test(): Unit = + val xs = List(1, 2, 3) + println(maximum(xs)) + println(maximum(xs)(using descending)) + println(maximum(xs)(using descending(using intOrd))) + println(minimum(xs)) + +// Adapted from the Rust by Example book: https://doc.rust-lang.org/rust-by-example/trait.html +// +// lines words chars +// wc Scala: 28 105 793 +// wc Rust : 57 193 1466 + +trait Animal: + type Self + // Associated function signature; `Self` refers to the implementor type. + def apply(name: String): Self + + // Method signatures; these will return a string. + extension (self: Self) + def name: String + def noise: String + def talk(): Unit = println(s"$name, $noise") +end Animal + +class Sheep(val name: String): + var isNaked = false + def shear() = + if isNaked then + println(s"$name is already naked...") + else + println(s"$name gets a haircut!") + isNaked = true + +given Sheep is Animal with + def apply(name: String) = Sheep(name) + extension (self: Self) + def name: String = self.name + def noise: String = if self.isNaked then "baaaaah?" else "baaaaah!" + override def talk(): Unit = + println(s"$name pauses briefly... $noise") + +/* + + - In a type pattern, A <: T, A >: T, A: T, A: _ are all allowed and mean + T is a fresh type variable (T can start with a capital letter). + - instance definitions + - `as m` syntax in context bounds and instance definitions + +*/ diff --git a/tests/pos/typeclasses.scala b/tests/pos/typeclasses.scala index 07fe5a31ce5d..d0315a318310 100644 --- a/tests/pos/typeclasses.scala +++ b/tests/pos/typeclasses.scala @@ -1,66 +1,45 @@ -class Common: +//> using options -source future -language:experimental.modularity - // this should go in Predef - infix type at [A <: { type This}, B] = A { type This = B } +class Common: trait Ord: - type This - extension (x: This) - def compareTo(y: This): Int - def < (y: This): Boolean = compareTo(y) < 0 - def > (y: This): Boolean = compareTo(y) > 0 + type Self + extension (x: Self) + def compareTo(y: Self): Int + def < (y: Self): Boolean = compareTo(y) < 0 + def > (y: Self): Boolean = compareTo(y) > 0 trait SemiGroup: - type This - extension (x: This) def combine(y: This): This + type Self + extension (x: Self) def combine(y: Self): Self trait Monoid extends SemiGroup: - def unit: This + def unit: Self trait Functor: - type This[A] - extension [A](x: This[A]) def map[B](f: A => B): This[B] + type Self[A] + extension [A](x: Self[A]) def map[B](f: A => B): Self[B] trait Monad extends Functor: - def pure[A](x: A): This[A] - extension [A](x: This[A]) - def flatMap[B](f: A => This[B]): This[B] + def pure[A](x: A): Self[A] + extension [A](x: Self[A]) + def flatMap[B](f: A => Self[B]): Self[B] def map[B](f: A => B) = x.flatMap(f `andThen` pure) + end Common object Instances extends Common: -/* - instance Int: Ord as intOrd with + given intOrd: (Int is Ord) with + type Self = Int extension (x: Int) def compareTo(y: Int) = if x < y then -1 else if x > y then +1 else 0 -*/ - given intOrd: Ord with - type This = Int - extension (x: Int) - def compareTo(y: Int) = - if x < y then -1 - else if x > y then +1 - else 0 -/* - instance List[T: Ord]: Ord as listOrd with - extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match - case (Nil, Nil) => 0 - case (Nil, _) => -1 - case (_, Nil) => +1 - case (x :: xs1, y :: ys1) => - val fst = x.compareTo(y) - if (fst != 0) fst else xs1.compareTo(ys1) -*/ - // Proposed short syntax: - // given listOrd[T: Ord as ord]: Ord at T with - given listOrd[T](using ord: Ord { type This = T}): Ord with - type This = List[T] + given listOrd[T](using ord: T is Ord): (List[T] is Ord) with extension (xs: List[T]) def compareTo(ys: List[T]): Int = (xs, ys) match case (Nil, Nil) => 0 case (Nil, _) => -1 @@ -70,32 +49,18 @@ object Instances extends Common: if (fst != 0) fst else xs1.compareTo(ys1) end listOrd -/* - instance List: Monad as listMonad with + given listMonad: (List is Monad) with extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = xs.flatMap(f) def pure[A](x: A): List[A] = List(x) -*/ - given listMonad: Monad with - type This[A] = List[A] - extension [A](xs: List[A]) def flatMap[B](f: A => List[B]): List[B] = - xs.flatMap(f) - def pure[A](x: A): List[A] = - List(x) -/* - type Reader[Ctx] = X =>> Ctx => X - instance Reader[Ctx: _]: Monad as readerMonad with - extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = - ctx => f(r(ctx))(ctx) - def pure[A](x: A): Ctx => A = - ctx => x -*/ + type Reader[Ctx] = [X] =>> Ctx => X - given readerMonad[Ctx]: Monad with - type This[X] = Ctx => X + //given [Ctx] => Reader[Ctx] is Monad as readerMonad: + + given readerMonad[Ctx]: (Reader[Ctx] is Monad) with extension [A](r: Ctx => A) def flatMap[B](f: A => Ctx => B): Ctx => B = ctx => f(r(ctx))(ctx) def pure[A](x: A): Ctx => A = @@ -110,29 +75,17 @@ object Instances extends Common: def second = xs.tail.head def third = xs.tail.tail.head - //Proposed short syntax: - //extension [M: Monad as m, A](xss: M[M[A]]) - // def flatten: M[A] = - // xs.flatMap(identity) - - extension [M, A](using m: Monad)(xss: m.This[m.This[A]]) - def flatten: m.This[A] = + extension [M, A](using m: Monad)(xss: m.Self[m.Self[A]]) + def flatten: m.Self[A] = xss.flatMap(identity) - // Proposed short syntax: - //def maximum[T: Ord](xs: List[T]: T = - def maximum[T](xs: List[T])(using Ord at T): T = + def maximum[T](xs: List[T])(using T is Ord): T = xs.reduceLeft((x, y) => if (x < y) y else x) - // Proposed short syntax: - // def descending[T: Ord as asc]: Ord at T = new Ord: - def descending[T](using asc: Ord at T): Ord at T = new Ord: - type This = T + def descending[T](using asc: T is Ord): T is Ord = new: extension (x: T) def compareTo(y: T) = asc.compareTo(y)(x) - // Proposed short syntax: - // def minimum[T: Ord](xs: List[T]) = - def minimum[T](xs: List[T])(using Ord at T) = + def minimum[T](xs: List[T])(using T is Ord) = maximum(xs)(using descending) def test(): Unit = @@ -148,12 +101,12 @@ object Instances extends Common: // wc Scala: 30 115 853 // wc Rust : 57 193 1466 trait Animal: - type This - // Associated function signature; `This` refers to the implementor type. - def apply(name: String): This + type Self + // Associated function signature; `Self` refers to the implementor type. + def apply(name: String): Self // Method signatures; these will return a string. - extension (self: This) + extension (self: Self) def name: String def noise: String def talk(): Unit = println(s"$name, $noise") @@ -171,7 +124,7 @@ class Sheep(val name: String): /* instance Sheep: Animal with def apply(name: String) = Sheep(name) - extension (self: This) + extension (self: Self) def name: String = self.name def noise: String = if self.isNaked then "baaaaah?" else "baaaaah!" override def talk(): Unit = @@ -179,10 +132,9 @@ instance Sheep: Animal with */ // Implement the `Animal` trait for `Sheep`. -given Animal with - type This = Sheep +given (Sheep is Animal) with def apply(name: String) = Sheep(name) - extension (self: This) + extension (self: Self) def name: String = self.name def noise: String = if self.isNaked then "baaaaah?" else "baaaaah!" override def talk(): Unit = diff --git a/tests/pos/unapplied-types.scala b/tests/pos/unapplied-types.scala new file mode 100644 index 000000000000..604e63deb8ad --- /dev/null +++ b/tests/pos/unapplied-types.scala @@ -0,0 +1,7 @@ +trait T { + type L[X] = List[X] + type T1 <: L // was error: takes type parameters + type T2 = L // was error: takes type parameters + type T3 = List // was error: takes type parameters + type T4 <: List // was error: takes type parameters +} diff --git a/tests/printing/export-param-flags.check b/tests/printing/export-param-flags.check new file mode 100644 index 000000000000..ffab6f77c93d --- /dev/null +++ b/tests/printing/export-param-flags.check @@ -0,0 +1,13 @@ +[[syntax trees at end of typer]] // tests/printing/export-param-flags.scala +package { + final lazy module val A: A = new A() + final module class A() extends Object() { this: A.type => + inline def inlinedParam(inline x: Int): Int = x.+(x):Int + } + final lazy module val Exported: Exported = new Exported() + final module class Exported() extends Object() { this: Exported.type => + export A.* + final inline def inlinedParam(inline x: Int): Int = A.inlinedParam(x) + } +} + diff --git a/tests/printing/export-param-flags.scala b/tests/printing/export-param-flags.scala new file mode 100644 index 000000000000..cad2d3c8fee8 --- /dev/null +++ b/tests/printing/export-param-flags.scala @@ -0,0 +1,5 @@ +object A: + inline def inlinedParam(inline x: Int): Int = x + x + +object Exported: + export A.* diff --git a/tests/rewrites/i20002.check b/tests/rewrites/i20002.check new file mode 100644 index 000000000000..70c9411eb4b2 --- /dev/null +++ b/tests/rewrites/i20002.check @@ -0,0 +1,51 @@ +object Reactions: + def main: Unit = + Reactions += { + case 0 => + case 1 => + } + + Reactions run: + case 0 => + case 1 => + + Reactions run_+ : + case 0 => + case 1 => + + Reactions `+=`: + case 0 => + case 1 => + + def bar: Int = ??? + + bar match + case 0 => + case 1 => + + def partPartial(i: Int): PartialFunction[Int, Unit] = + case `i` => + + Reactions += { + val pp1 = partPartial(1) + val pp2 = partPartial(2) + def codeBlock = + ??? + ??? + pp1 orElse pp2 + } + + val partialFunction = partPartial(1) orElse partPartial(2) + Reactions += { + partialFunction + } + + def +=(f: PartialFunction[Int, Unit]) = + ??? + + def run (f: PartialFunction[Int, Unit]) = + ??? + + def run_+ (f: PartialFunction[Int, Unit]) = + ??? + diff --git a/tests/rewrites/i20002.scala b/tests/rewrites/i20002.scala new file mode 100644 index 000000000000..56ea023b63b0 --- /dev/null +++ b/tests/rewrites/i20002.scala @@ -0,0 +1,62 @@ +object Reactions { + def main: Unit = { + Reactions += { + case 0 => + case 1 => + } + + Reactions run { + case 0 => + case 1 => + } + + Reactions run_+ { + case 0 => + case 1 => + } + + Reactions `+=` { + case 0 => + case 1 => + } + + def bar: Int = ??? + + bar match { + case 0 => + case 1 => + } + + def partPartial(i: Int): PartialFunction[Int, Unit] = { + case `i` => + } + + Reactions += { + val pp1 = partPartial(1) + val pp2 = partPartial(2) + def codeBlock = { + ??? + ??? + } + pp1 orElse pp2 + } + + val partialFunction = partPartial(1) orElse partPartial(2) + Reactions += { + partialFunction + } + } + + def +=(f: PartialFunction[Int, Unit]) = { + ??? + } + + def run (f: PartialFunction[Int, Unit]) = { + ??? + } + + def run_+ (f: PartialFunction[Int, Unit]) = { + ??? + } + +} diff --git a/tests/run-with-compiler/scala2-library-from-tasty-jar.scala b/tests/run-custom-args/scala2-library-from-tasty-jar.scala similarity index 98% rename from tests/run-with-compiler/scala2-library-from-tasty-jar.scala rename to tests/run-custom-args/scala2-library-from-tasty-jar.scala index 913cf7dc24fc..f6d7b7b87d5b 100644 --- a/tests/run-with-compiler/scala2-library-from-tasty-jar.scala +++ b/tests/run-custom-args/scala2-library-from-tasty-jar.scala @@ -32,7 +32,7 @@ def scalaLibClassesPath = lazy val scalaLibTastyPaths = new Directory(scalaLibClassesPath).deepFiles - .filter(_.`extension` == "tasty") + .filter(_.ext.isTasty) .map(_.normalize.path.stripPrefix(scalaLibClassesPath.toString + separator)) .toList diff --git a/tests/run-with-compiler/scala2-library-from-tasty.scala b/tests/run-custom-args/scala2-library-from-tasty.scala similarity index 98% rename from tests/run-with-compiler/scala2-library-from-tasty.scala rename to tests/run-custom-args/scala2-library-from-tasty.scala index ee2ec8951701..c3a52ea95ae1 100644 --- a/tests/run-with-compiler/scala2-library-from-tasty.scala +++ b/tests/run-custom-args/scala2-library-from-tasty.scala @@ -29,7 +29,7 @@ def scalaLibClassesPath = lazy val scalaLibTastyPaths = new Directory(scalaLibClassesPath).deepFiles - .filter(_.`extension` == "tasty") + .filter(_.ext.isTasty) .map(_.normalize.path.stripPrefix(scalaLibClassesPath.toString + separator)) .toList diff --git a/tests/run-deep-subtype/Tuple-reverse.scala b/tests/run-deep-subtype/Tuple-reverse.scala index 230800062bb5..c611ba504d82 100644 --- a/tests/run-deep-subtype/Tuple-reverse.scala +++ b/tests/run-deep-subtype/Tuple-reverse.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.reflect.ClassTag diff --git a/tests/run-macros/Xmacro-settings-compileTimeEnv/Test.scala b/tests/run-macros/Xmacro-settings-compileTimeEnv/Test.scala index aee87a46ce81..d34cadf91765 100644 --- a/tests/run-macros/Xmacro-settings-compileTimeEnv/Test.scala +++ b/tests/run-macros/Xmacro-settings-compileTimeEnv/Test.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental -Xmacro-settings:a,b=1,c.b.a=x.y.z=1,myLogger.level=INFO +//> using options -experimental -Xmacro-settings:a,b=1,c.b.a=x.y.z=1,myLogger.level=INFO import scala.compiletime.* diff --git a/tests/run-macros/Xmacro-settings-simple/M1.scala b/tests/run-macros/Xmacro-settings-simple/M1.scala index 57a8be886ba1..487ad5baec46 100644 --- a/tests/run-macros/Xmacro-settings-simple/M1.scala +++ b/tests/run-macros/Xmacro-settings-simple/M1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental package x diff --git a/tests/run-macros/annot-add-global-class/Macro_1.scala b/tests/run-macros/annot-add-global-class/Macro_1.scala index 6ac77913e3ab..247829954218 100644 --- a/tests/run-macros/annot-add-global-class/Macro_1.scala +++ b/tests/run-macros/annot-add-global-class/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental package mymacro @@ -8,9 +8,9 @@ import scala.collection.mutable @experimental class addClass extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case DefDef(name, List(TermParamClause(Nil)), tpt, Some(rhs)) => val parents = List(TypeTree.of[Object]) def decls(cls: Symbol): List[Symbol] = @@ -25,8 +25,8 @@ class addClass extends MacroAnnotation: val newCls = Apply(Select(New(TypeIdent(cls)), cls.primaryConstructor), Nil) - val newDef = DefDef.copy(tree)(name, List(TermParamClause(Nil)), tpt, Some(Apply(Select(newCls, runSym), Nil))) + val newDef = DefDef.copy(definition)(name, List(TermParamClause(Nil)), tpt, Some(Apply(Select(newCls, runSym), Nil))) List(clsDef, newDef) case _ => report.error("Annotation only supports `def` with one argument") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-add-global-class/Test_2.scala b/tests/run-macros/annot-add-global-class/Test_2.scala index 824dd2381760..6fa058060906 100644 --- a/tests/run-macros/annot-add-global-class/Test_2.scala +++ b/tests/run-macros/annot-add-global-class/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import mymacro.addClass diff --git a/tests/run-macros/annot-add-global-object/Macro_1.scala b/tests/run-macros/annot-add-global-object/Macro_1.scala index f7c901a49aa5..031d6e33fefe 100644 --- a/tests/run-macros/annot-add-global-object/Macro_1.scala +++ b/tests/run-macros/annot-add-global-object/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ @@ -6,9 +6,9 @@ import scala.collection.mutable @experimental class addClass extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case DefDef(name, List(TermParamClause(Nil)), tpt, Some(rhs)) => val parents = List(TypeTree.of[Object]) def decls(cls: Symbol): List[Symbol] = @@ -23,8 +23,8 @@ class addClass extends MacroAnnotation: val modDef = ClassDef.module(mod, parents, body = List(runDef)) - val newDef = DefDef.copy(tree)(name, List(TermParamClause(Nil)), tpt, Some(Apply(Select(Ref(mod), runSym), Nil))) + val newDef = DefDef.copy(definition)(name, List(TermParamClause(Nil)), tpt, Some(Apply(Select(Ref(mod), runSym), Nil))) modDef.toList ::: newDef :: Nil case _ => report.error("Annotation only supports `def` with one argument") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-add-global-object/Test_2.scala b/tests/run-macros/annot-add-global-object/Test_2.scala index 181bc4e935ea..b3c7dbff1558 100644 --- a/tests/run-macros/annot-add-global-object/Test_2.scala +++ b/tests/run-macros/annot-add-global-object/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @addClass def foo(): Unit = println("macro generated main") diff --git a/tests/run-macros/annot-add-local-class/Macro_1.scala b/tests/run-macros/annot-add-local-class/Macro_1.scala index 57a2d543ffbc..e9c8eaa9318d 100644 --- a/tests/run-macros/annot-add-local-class/Macro_1.scala +++ b/tests/run-macros/annot-add-local-class/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ @@ -6,9 +6,9 @@ import scala.collection.mutable @experimental class addClass extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case DefDef(name, List(TermParamClause(Nil)), tpt, Some(rhs)) => val parents = List(TypeTree.of[Object]) def decls(cls: Symbol): List[Symbol] = @@ -22,8 +22,8 @@ class addClass extends MacroAnnotation: val newCls = Apply(Select(New(TypeIdent(cls)), cls.primaryConstructor), Nil) - val newDef = DefDef.copy(tree)(name, List(TermParamClause(Nil)), tpt, Some(Apply(Select(newCls, runSym), Nil))) + val newDef = DefDef.copy(definition)(name, List(TermParamClause(Nil)), tpt, Some(Apply(Select(newCls, runSym), Nil))) List(clsDef, newDef) case _ => report.error("Annotation only supports `def` with one argument") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-add-local-class/Test_2.scala b/tests/run-macros/annot-add-local-class/Test_2.scala index 8fe4f9db5656..ece281dcb56c 100644 --- a/tests/run-macros/annot-add-local-class/Test_2.scala +++ b/tests/run-macros/annot-add-local-class/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @main def Test(): Unit = @addClass def foo(): Unit = diff --git a/tests/run-macros/annot-add-local-object/Macro_1.scala b/tests/run-macros/annot-add-local-object/Macro_1.scala index 6f6e11e7361c..3d47fafd599a 100644 --- a/tests/run-macros/annot-add-local-object/Macro_1.scala +++ b/tests/run-macros/annot-add-local-object/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ @@ -6,9 +6,9 @@ import scala.collection.mutable @experimental class addClass extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case DefDef(name, List(TermParamClause(Nil)), tpt, Some(rhs)) => val parents = List(TypeTree.of[Object]) def decls(cls: Symbol): List[Symbol] = @@ -23,8 +23,8 @@ class addClass extends MacroAnnotation: val (modVal, clsDef) = ClassDef.module(mod, parents, body = List(runDef)) - val newDef = DefDef.copy(tree)(name, List(TermParamClause(Nil)), tpt, Some(Apply(Select(Ref(mod), runSym), Nil))) + val newDef = DefDef.copy(definition)(name, List(TermParamClause(Nil)), tpt, Some(Apply(Select(Ref(mod), runSym), Nil))) List(modVal, clsDef, newDef) case _ => report.error("Annotation only supports `def` with one argument") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-add-local-object/Test_2.scala b/tests/run-macros/annot-add-local-object/Test_2.scala index 0ff7862fb338..2dfcc801d61a 100644 --- a/tests/run-macros/annot-add-local-object/Test_2.scala +++ b/tests/run-macros/annot-add-local-object/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @main def Test(): Unit = @addClass def foo(): Unit = diff --git a/tests/run-macros/annot-add-nested-class/Macro_1.scala b/tests/run-macros/annot-add-nested-class/Macro_1.scala index e13e3841501a..ecdd6ae35cb0 100644 --- a/tests/run-macros/annot-add-nested-class/Macro_1.scala +++ b/tests/run-macros/annot-add-nested-class/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ @@ -6,9 +6,9 @@ import scala.collection.mutable @experimental class addClass extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case DefDef(name, List(TermParamClause(Nil)), tpt, Some(rhs)) => val parents = List(TypeTree.of[Object]) def decls(cls: Symbol): List[Symbol] = @@ -23,8 +23,8 @@ class addClass extends MacroAnnotation: val newCls = Apply(Select(New(TypeIdent(cls)), cls.primaryConstructor), Nil) - val newDef = DefDef.copy(tree)(name, List(TermParamClause(Nil)), tpt, Some(Apply(Select(newCls, runSym), Nil))) + val newDef = DefDef.copy(definition)(name, List(TermParamClause(Nil)), tpt, Some(Apply(Select(newCls, runSym), Nil))) List(clsDef, newDef) case _ => report.error("Annotation only supports `def` with one argument") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-add-nested-class/Test_2.scala b/tests/run-macros/annot-add-nested-class/Test_2.scala index b92225b7f107..e328f97218d3 100644 --- a/tests/run-macros/annot-add-nested-class/Test_2.scala +++ b/tests/run-macros/annot-add-nested-class/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental class Foo(): @addClass def foo(): Unit = diff --git a/tests/run-macros/annot-add-nested-object/Macro_1.scala b/tests/run-macros/annot-add-nested-object/Macro_1.scala index f8cde8de5bf0..ce6cbaa67a57 100644 --- a/tests/run-macros/annot-add-nested-object/Macro_1.scala +++ b/tests/run-macros/annot-add-nested-object/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ @@ -6,9 +6,9 @@ import scala.collection.mutable @experimental class addClass extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case DefDef(name, List(TermParamClause(Nil)), tpt, Some(rhs)) => val parents = List(TypeTree.of[Object]) def decls(cls: Symbol): List[Symbol] = @@ -23,8 +23,8 @@ class addClass extends MacroAnnotation: val (modVal, clsDef) = ClassDef.module(mod, parents, body = List(runDef)) - val newDef = DefDef.copy(tree)(name, List(TermParamClause(Nil)), tpt, Some(Apply(Select(Ref(mod), runSym), Nil))) + val newDef = DefDef.copy(definition)(name, List(TermParamClause(Nil)), tpt, Some(Apply(Select(Ref(mod), runSym), Nil))) List(modVal, clsDef, newDef) case _ => report.error("Annotation only supports `def` with one argument") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-add-nested-object/Test_2.scala b/tests/run-macros/annot-add-nested-object/Test_2.scala index cf6b3a8400fe..f466cf59980b 100644 --- a/tests/run-macros/annot-add-nested-object/Test_2.scala +++ b/tests/run-macros/annot-add-nested-object/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental class Foo(): @addClass def foo(): Unit = diff --git a/tests/run-macros/annot-annot-order/Macro_1.scala b/tests/run-macros/annot-annot-order/Macro_1.scala index 9d3e8e40c01a..ab48d6622d6e 100644 --- a/tests/run-macros/annot-annot-order/Macro_1.scala +++ b/tests/run-macros/annot-annot-order/Macro_1.scala @@ -1,19 +1,19 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ @experimental class print(msg: String) extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case DefDef(name, params, tpt, Some(rhsTree)) => - given Quotes = tree.symbol.asQuotes + given Quotes = definition.symbol.asQuotes rhsTree.asExpr match case '{ $rhsExpr: t } => val newRhs = '{ println(${Expr(msg)}); $rhsExpr }.asTerm - List(DefDef.copy(tree)(name, params, tpt, Some(newRhs))) + List(DefDef.copy(definition)(name, params, tpt, Some(newRhs))) case _ => report.error("Annotation only supported on `def`") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-annot-order/Test_2.scala b/tests/run-macros/annot-annot-order/Test_2.scala index 2c073bdcbb7b..9f312c47c6af 100644 --- a/tests/run-macros/annot-annot-order/Test_2.scala +++ b/tests/run-macros/annot-annot-order/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @print("foo") def foo(): Unit = () diff --git a/tests/run-macros/annot-bind/Macro_1.scala b/tests/run-macros/annot-bind/Macro_1.scala index 0997f35ccf4a..ce79560833d9 100644 --- a/tests/run-macros/annot-bind/Macro_1.scala +++ b/tests/run-macros/annot-bind/Macro_1.scala @@ -1,19 +1,19 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ @experimental class bind(str: String) extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case ValDef(name, tpt, Some(rhsTree)) => val valSym = Symbol.newVal(Symbol.spliceOwner, Symbol.freshName(str), tpt.tpe, Flags.Private, Symbol.noSymbol) val valDef = ValDef(valSym, Some(rhsTree)) val newRhs = Ref(valSym) - val newTree = ValDef.copy(tree)(name, tpt, Some(newRhs)) + val newTree = ValDef.copy(definition)(name, tpt, Some(newRhs)) List(valDef, newTree) case _ => report.error("Annotation only supported on `val` with a single argument are supported") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-bind/Test_2.scala b/tests/run-macros/annot-bind/Test_2.scala index 60895adabc24..44726e69adc7 100644 --- a/tests/run-macros/annot-bind/Test_2.scala +++ b/tests/run-macros/annot-bind/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @bind("a") val foo: String = "foo" diff --git a/tests/run-macros/annot-changeVal/Macro_1.scala b/tests/run-macros/annot-changeVal/Macro_1.scala index d55282f8c390..35d1edf684a4 100644 --- a/tests/run-macros/annot-changeVal/Macro_1.scala +++ b/tests/run-macros/annot-changeVal/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.experimental import scala.quoted.* @@ -7,8 +7,8 @@ import scala.annotation.MacroAnnotation object ChangeVal: @experimental class change(i: Int) extends MacroAnnotation { - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect.* - tree match - case ValDef(n, t, _) => List(ValDef.copy(tree)(n, t, Some(Literal(IntConstant(i))))) + definition match + case ValDef(n, t, _) => List(ValDef.copy(definition)(n, t, Some(Literal(IntConstant(i))))) } diff --git a/tests/run-macros/annot-changeVal/Test_2.scala b/tests/run-macros/annot-changeVal/Test_2.scala index 8e048e885651..6816712a43a0 100644 --- a/tests/run-macros/annot-changeVal/Test_2.scala +++ b/tests/run-macros/annot-changeVal/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import ChangeVal._ diff --git a/tests/run-macros/annot-concrete-class/Macro_1.scala b/tests/run-macros/annot-concrete-class/Macro_1.scala index e91f9c1ccafe..bda268891b44 100644 --- a/tests/run-macros/annot-concrete-class/Macro_1.scala +++ b/tests/run-macros/annot-concrete-class/Macro_1.scala @@ -1,11 +1,11 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.MacroAnnotation import scala.quoted.* class implementAFoo extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(tree: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect.* tree match case ClassDef(name, cstr, parents, self, body) => diff --git a/tests/run-macros/annot-concrete-class/Test_2.scala b/tests/run-macros/annot-concrete-class/Test_2.scala index 7b8fc5cd0f94..6f30a68b1561 100644 --- a/tests/run-macros/annot-concrete-class/Test_2.scala +++ b/tests/run-macros/annot-concrete-class/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental trait AFoo: def foo: String diff --git a/tests/run-macros/annot-export/Macro_1.scala b/tests/run-macros/annot-export/Macro_1.scala index fbe97684079b..68ecb6c5e451 100644 --- a/tests/run-macros/annot-export/Macro_1.scala +++ b/tests/run-macros/annot-export/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ @@ -6,10 +6,10 @@ import scala.collection.mutable.Map @experimental class returnClassName extends MacroAnnotation { - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case DefDef(name, params, tpt, _) => val rhs = Literal(StringConstant(Symbol.spliceOwner.name.stripSuffix("$"))) - List(DefDef.copy(tree)(name, params, tpt, Some(rhs))) + List(DefDef.copy(definition)(name, params, tpt, Some(rhs))) } diff --git a/tests/run-macros/annot-export/Test_2.scala b/tests/run-macros/annot-export/Test_2.scala index 86bbee9c93fd..25954967b953 100644 --- a/tests/run-macros/annot-export/Test_2.scala +++ b/tests/run-macros/annot-export/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental object Bar: @returnClassName diff --git a/tests/run-macros/annot-gen2/Macro_1.scala b/tests/run-macros/annot-gen2/Macro_1.scala index 05428aac7375..edc709ca8172 100644 --- a/tests/run-macros/annot-gen2/Macro_1.scala +++ b/tests/run-macros/annot-gen2/Macro_1.scala @@ -1,18 +1,18 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ @experimental class hello extends MacroAnnotation { - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case DefDef(name, params, tpt, Some(t)) => - given Quotes = tree.symbol.asQuotes + given Quotes = definition.symbol.asQuotes val rhs = '{ ${t.asExprOf[String]} + "hello" }.asTerm - val newDef = DefDef.copy(tree)(name, params, tpt, Some(rhs)) + val newDef = DefDef.copy(definition)(name, params, tpt, Some(rhs)) List(newDef) } diff --git a/tests/run-macros/annot-gen2/Macro_2.scala b/tests/run-macros/annot-gen2/Macro_2.scala index 3e2e228abb3e..6260dbd785a7 100644 --- a/tests/run-macros/annot-gen2/Macro_2.scala +++ b/tests/run-macros/annot-gen2/Macro_2.scala @@ -1,15 +1,15 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ @experimental class foo extends MacroAnnotation { - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case DefDef(name, params, tpt, Some(t)) => - given Quotes = tree.symbol.asQuotes + given Quotes = definition.symbol.asQuotes val s = Ref(params.head.params.head.symbol).asExprOf[String] val rhs = '{ @hello def foo1(s: String): String = ${ @@ -18,6 +18,6 @@ class foo extends MacroAnnotation { } foo1($s) }.asTerm - val newDef = DefDef.copy(tree)(name, params, tpt, Some(rhs)) + val newDef = DefDef.copy(definition)(name, params, tpt, Some(rhs)) List(newDef) } diff --git a/tests/run-macros/annot-gen2/Test_3.scala b/tests/run-macros/annot-gen2/Test_3.scala index 08abafdb2741..2b2a86aabfac 100644 --- a/tests/run-macros/annot-gen2/Test_3.scala +++ b/tests/run-macros/annot-gen2/Test_3.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental class Bar: @foo def bar(s: String) = s diff --git a/tests/run-macros/annot-generate/Macro_1.scala b/tests/run-macros/annot-generate/Macro_1.scala index b88cc62afb06..e1c66b5c2127 100644 --- a/tests/run-macros/annot-generate/Macro_1.scala +++ b/tests/run-macros/annot-generate/Macro_1.scala @@ -1,13 +1,13 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ @experimental class hello extends MacroAnnotation { - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ val helloSymbol = Symbol.newVal(Symbol.spliceOwner, Symbol.freshName("hello"), TypeRepr.of[String], Flags.EmptyFlags, Symbol.noSymbol) val helloVal = ValDef(helloSymbol, Some(Literal(StringConstant("Hello, World!")))) - List(helloVal, tree) + List(helloVal, definition) } diff --git a/tests/run-macros/annot-generate/Macro_2.scala b/tests/run-macros/annot-generate/Macro_2.scala index 911625eac645..0a2c2e34b5f7 100644 --- a/tests/run-macros/annot-generate/Macro_2.scala +++ b/tests/run-macros/annot-generate/Macro_2.scala @@ -1,19 +1,19 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ @experimental class foo extends MacroAnnotation { - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case DefDef(name, params, tpt, Some(t)) => - given Quotes = tree.symbol.asQuotes + given Quotes = definition.symbol.asQuotes val rhs = '{ @hello def foo(x: Int): Int = x + 1 ${t.asExprOf[Int]} }.asTerm - val newDef = DefDef.copy(tree)(name, params, tpt, Some(rhs)) + val newDef = DefDef.copy(definition)(name, params, tpt, Some(rhs)) List(newDef) } diff --git a/tests/run-macros/annot-generate/Test_3.scala b/tests/run-macros/annot-generate/Test_3.scala index 591b3e864f31..8c8509c81c46 100644 --- a/tests/run-macros/annot-generate/Test_3.scala +++ b/tests/run-macros/annot-generate/Test_3.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental class Bar: @foo def bar(x: Int) = x + 1 diff --git a/tests/run-macros/annot-macro-main/Macro_1.scala b/tests/run-macros/annot-macro-main/Macro_1.scala index 2a585bee2bc1..93e312459810 100644 --- a/tests/run-macros/annot-macro-main/Macro_1.scala +++ b/tests/run-macros/annot-macro-main/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ @@ -6,9 +6,9 @@ import scala.collection.mutable @experimental class mainMacro extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case DefDef(name, List(TermParamClause(Nil)), _, _) => val parents = List(TypeTree.of[Object]) def decls(cls: Symbol): List[Symbol] = @@ -17,10 +17,10 @@ class mainMacro extends MacroAnnotation: val cls = Symbol.newClass(Symbol.spliceOwner.owner, name, parents = parents.map(_.tpe), decls, selfType = None) val mainSym = cls.declaredMethod("main").head - val mainDef = DefDef(mainSym, _ => Some(Apply(Ref(tree.symbol), Nil))) + val mainDef = DefDef(mainSym, _ => Some(Apply(Ref(definition.symbol), Nil))) val clsDef = ClassDef(cls, parents, body = List(mainDef)) - List(clsDef, tree) + List(clsDef, definition) case _ => report.error("Annotation only supports `def` without arguments") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-macro-main/Test_2.scala b/tests/run-macros/annot-macro-main/Test_2.scala index a6733ec1c220..45c1b7b0ebfc 100644 --- a/tests/run-macros/annot-macro-main/Test_2.scala +++ b/tests/run-macros/annot-macro-main/Test_2.scala @@ -1,3 +1,3 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @mainMacro def Test(): Unit = println("macro generated main") diff --git a/tests/run-macros/annot-memo/Macro_1.scala b/tests/run-macros/annot-memo/Macro_1.scala index cd990e1d6cce..c0a59b57864a 100644 --- a/tests/run-macros/annot-memo/Macro_1.scala +++ b/tests/run-macros/annot-memo/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ @@ -6,9 +6,9 @@ import scala.collection.concurrent @experimental class memoize extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case DefDef(name, TermParamClause(param :: Nil) :: Nil, tpt, Some(rhsTree)) => (param.tpt.tpe.asType, tpt.tpe.asType) match case ('[t], '[u]) => @@ -19,13 +19,13 @@ class memoize extends MacroAnnotation: '{ concurrent.TrieMap.empty[t, u] }.asTerm val cacheVal = ValDef(cacheSymbol, Some(cacheRhs)) val newRhs = - given Quotes = tree.symbol.asQuotes + given Quotes = definition.symbol.asQuotes val cacheRefExpr = Ref(cacheSymbol).asExprOf[concurrent.Map[t, u]] val paramRefExpr = Ref(param.symbol).asExprOf[t] val rhsExpr = rhsTree.asExprOf[u] '{ $cacheRefExpr.getOrElseUpdate($paramRefExpr, $rhsExpr) }.asTerm - val newTree = DefDef.copy(tree)(name, TermParamClause(param :: Nil) :: Nil, tpt, Some(newRhs)) + val newTree = DefDef.copy(definition)(name, TermParamClause(param :: Nil) :: Nil, tpt, Some(newRhs)) List(cacheVal, newTree) case _ => report.error("Annotation only supported on `def` with a single argument are supported") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-memo/Test_2.scala b/tests/run-macros/annot-memo/Test_2.scala index da58e550adda..ad39ec43a6c3 100644 --- a/tests/run-macros/annot-memo/Test_2.scala +++ b/tests/run-macros/annot-memo/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental class Bar: @memoize diff --git a/tests/run-macros/annot-mod-class-add-def/Macro_1.scala b/tests/run-macros/annot-mod-class-add-def/Macro_1.scala index 855dce06f279..2003ebd837df 100644 --- a/tests/run-macros/annot-mod-class-add-def/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-add-def/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ @@ -6,11 +6,11 @@ import scala.collection.mutable @experimental class addIndirectToString(msg: String) extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case ClassDef(name, ctr, parents, self, body) => - val cls = tree.symbol + val cls = definition.symbol val stringMethType = ByNameType.apply(TypeRepr.of[String]) val stringSym = Symbol.newMethod(cls, Symbol.freshName("string"), stringMethType, Flags.Private, Symbol.noSymbol) val stringDef = DefDef(stringSym, _ => Some(Literal(StringConstant(msg)))) @@ -20,9 +20,9 @@ class addIndirectToString(msg: String) extends MacroAnnotation: val toStringDef = DefDef(toStringOverrideSym, _ => Some(Ref(stringSym))) - val newClassDef = ClassDef.copy(tree)(name, ctr, parents, self, stringDef :: toStringDef :: body) + val newClassDef = ClassDef.copy(definition)(name, ctr, parents, self, stringDef :: toStringDef :: body) List(newClassDef) case _ => report.error("Annotation only supports `class`") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-mod-class-add-def/Test_2.scala b/tests/run-macros/annot-mod-class-add-def/Test_2.scala index d6aae4e90cf3..61eabc28878d 100644 --- a/tests/run-macros/annot-mod-class-add-def/Test_2.scala +++ b/tests/run-macros/annot-mod-class-add-def/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @addIndirectToString("This is Foo") class Foo diff --git a/tests/run-macros/annot-mod-class-add-inner-class/Macro_1.scala b/tests/run-macros/annot-mod-class-add-inner-class/Macro_1.scala index 395bfd7a28db..40697d1027fe 100644 --- a/tests/run-macros/annot-mod-class-add-inner-class/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-add-inner-class/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ @@ -6,11 +6,11 @@ import scala.collection.mutable @experimental class addInnerClass extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case ClassDef(name, ctr, parents, self, body) => - val cls = tree.symbol + val cls = definition.symbol def showClassDecls(showCls: Symbol): List[Symbol] = List(Symbol.newMethod(showCls, "showMe", MethodType(List("x"))(_ => List(cls.typeRef), _ => TypeRepr.of[String]))) @@ -27,9 +27,9 @@ class addInnerClass extends MacroAnnotation: val toStringOverrideSym = Symbol.newMethod(cls, "toString", toStringMethType, Flags.Override, Symbol.noSymbol) val toStringDef = DefDef(toStringOverrideSym, _ => Some(newShowCallShowMe)) - val newClassDef = ClassDef.copy(tree)(name, ctr, parents, self, showClass :: toStringDef :: body) + val newClassDef = ClassDef.copy(definition)(name, ctr, parents, self, showClass :: toStringDef :: body) List(newClassDef) case _ => report.error("Annotation only supports `class`") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-mod-class-add-inner-class/Test_2.scala b/tests/run-macros/annot-mod-class-add-inner-class/Test_2.scala index b90d237b60d4..bba522b5f2c4 100644 --- a/tests/run-macros/annot-mod-class-add-inner-class/Test_2.scala +++ b/tests/run-macros/annot-mod-class-add-inner-class/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @addInnerClass class Foo diff --git a/tests/run-macros/annot-mod-class-add-lazy-val/Macro_1.scala b/tests/run-macros/annot-mod-class-add-lazy-val/Macro_1.scala index f72f28b610d6..642f84f4cbce 100644 --- a/tests/run-macros/annot-mod-class-add-lazy-val/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-add-lazy-val/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ @@ -6,11 +6,11 @@ import scala.collection.mutable @experimental class addMemoToString(msg: String) extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case ClassDef(name, ctr, parents, self, body) => - val cls = tree.symbol + val cls = definition.symbol val stringLazyValSym = Symbol.newVal(cls, Symbol.freshName("string"), TypeRepr.of[String], Flags.Lazy | Flags.Private, Symbol.noSymbol) val toStringMethType = Symbol.requiredMethod("java.lang.Object.toString").info @@ -19,9 +19,9 @@ class addMemoToString(msg: String) extends MacroAnnotation: val stringLazyValDef = ValDef(stringLazyValSym, Some(Literal(StringConstant(msg)))) val toStringDef = DefDef(toStringOverrideSym, _ => Some(Ref(stringLazyValSym))) - val newClassDef = ClassDef.copy(tree)(name, ctr, parents, self, stringLazyValDef :: toStringDef :: body) + val newClassDef = ClassDef.copy(definition)(name, ctr, parents, self, stringLazyValDef :: toStringDef :: body) List(newClassDef) case _ => report.error("Annotation only supports `class`") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-mod-class-add-lazy-val/Test_2.scala b/tests/run-macros/annot-mod-class-add-lazy-val/Test_2.scala index 5aa0601e037e..3dcab1f0d535 100644 --- a/tests/run-macros/annot-mod-class-add-lazy-val/Test_2.scala +++ b/tests/run-macros/annot-mod-class-add-lazy-val/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @addMemoToString("This is Foo") class Foo diff --git a/tests/run-macros/annot-mod-class-add-local-class/Macro_1.scala b/tests/run-macros/annot-mod-class-add-local-class/Macro_1.scala index 0156812adeb1..116a60d4a40c 100644 --- a/tests/run-macros/annot-mod-class-add-local-class/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-add-local-class/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ @@ -6,11 +6,11 @@ import scala.collection.mutable @experimental class addInnerClass extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case ClassDef(name, ctr, parents, self, body) => - val cls = tree.symbol + val cls = definition.symbol val toStringMethType = Symbol.requiredMethod("java.lang.Object.toString").info val toStringOverrideSym = Symbol.newMethod(cls, "toString", toStringMethType, Flags.Override, Symbol.noSymbol) @@ -28,9 +28,9 @@ class addInnerClass extends MacroAnnotation: val showClass = ClassDef(showClassSym, parents, body = List(showMeDef)) val toStringDef = DefDef(toStringOverrideSym, _ => Some(Block(List(showClass), newShowCallShowMe))) - val newClassDef = ClassDef.copy(tree)(name, ctr, parents, self, toStringDef :: body) + val newClassDef = ClassDef.copy(definition)(name, ctr, parents, self, toStringDef :: body) List(newClassDef) case _ => report.error("Annotation only supports `class`") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-mod-class-add-local-class/Test_2.scala b/tests/run-macros/annot-mod-class-add-local-class/Test_2.scala index db4c975992d0..f313900635c9 100644 --- a/tests/run-macros/annot-mod-class-add-local-class/Test_2.scala +++ b/tests/run-macros/annot-mod-class-add-local-class/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @addInnerClass class Foo diff --git a/tests/run-macros/annot-mod-class-add-val/Macro_1.scala b/tests/run-macros/annot-mod-class-add-val/Macro_1.scala index fc0294dcb051..7a9e824edc2a 100644 --- a/tests/run-macros/annot-mod-class-add-val/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-add-val/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ @@ -6,11 +6,11 @@ import scala.collection.mutable @experimental class addMemoToString(msg: String) extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case ClassDef(name, ctr, parents, self, body) => - val cls = tree.symbol + val cls = definition.symbol val stringValSym = Symbol.newVal(cls, Symbol.freshName("string"), TypeRepr.of[String], Flags.Private, Symbol.noSymbol) val toStringMethType = Symbol.requiredMethod("java.lang.Object.toString").info @@ -19,9 +19,9 @@ class addMemoToString(msg: String) extends MacroAnnotation: val stringValDef = ValDef(stringValSym, Some(Literal(StringConstant(msg)))) val toStringDef = DefDef(toStringOverrideSym, _ => Some(Ref(stringValSym))) - val newClassDef = ClassDef.copy(tree)(name, ctr, parents, self, stringValDef :: toStringDef :: body) + val newClassDef = ClassDef.copy(definition)(name, ctr, parents, self, stringValDef :: toStringDef :: body) List(newClassDef) case _ => report.error("Annotation only supports `class`") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-mod-class-add-val/Test_2.scala b/tests/run-macros/annot-mod-class-add-val/Test_2.scala index f6ea732f3084..8c4af8f79ce9 100644 --- a/tests/run-macros/annot-mod-class-add-val/Test_2.scala +++ b/tests/run-macros/annot-mod-class-add-val/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @addMemoToString("This is Foo") class Foo diff --git a/tests/run-macros/annot-mod-class-add-var/Macro_1.scala b/tests/run-macros/annot-mod-class-add-var/Macro_1.scala index be38689613e7..bb9ff164bfaa 100644 --- a/tests/run-macros/annot-mod-class-add-var/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-add-var/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ @@ -6,11 +6,11 @@ import scala.collection.mutable @experimental class addCountToString(msg: String) extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case ClassDef(name, ctr, parents, self, body) => - val cls = tree.symbol + val cls = definition.symbol val countVarSym = Symbol.newVal(cls, Symbol.freshName("count"), TypeRepr.of[Int], Flags.Mutable | Flags.Private, Symbol.noSymbol) val toStringMethType = Symbol.requiredMethod("java.lang.Object.toString").info @@ -26,9 +26,9 @@ class addCountToString(msg: String) extends MacroAnnotation: ) )) - val newClassDef = ClassDef.copy(tree)(name, ctr, parents, self, countVarDef :: toStringDef :: body) + val newClassDef = ClassDef.copy(definition)(name, ctr, parents, self, countVarDef :: toStringDef :: body) List(newClassDef) case _ => report.error("Annotation only supports `class`") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-mod-class-add-var/Test_2.scala b/tests/run-macros/annot-mod-class-add-var/Test_2.scala index c2ee86fbced5..cb8ea1db8313 100644 --- a/tests/run-macros/annot-mod-class-add-var/Test_2.scala +++ b/tests/run-macros/annot-mod-class-add-var/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @addCountToString("This is Foo: ") class Foo: diff --git a/tests/run-macros/annot-mod-class-data/Macro_1.scala b/tests/run-macros/annot-mod-class-data/Macro_1.scala index a175eb274268..f2275546e27a 100644 --- a/tests/run-macros/annot-mod-class-data/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-data/Macro_1.scala @@ -1,15 +1,15 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted.* @experimental class data extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect.* - tree match + definition match case ClassDef(className, ctr, parents, self, body) => - val cls = tree.symbol + val cls = definition.symbol val constructorParameters = ctr.paramss.collect { case clause: TermParamClause => clause } if constructorParameters.size != 1 || constructorParameters.head.params.isEmpty then @@ -51,10 +51,10 @@ class data extends MacroAnnotation: val equalsOverrideDef = DefDef(equalsOverrideSym, equalsOverrideDefBody) val newBody = toStringDef :: hashCodeOverrideDef :: equalsOverrideDef :: body - List(ClassDef.copy(tree)(className, ctr, parents, self, newBody)) + List(ClassDef.copy(definition)(className, ctr, parents, self, newBody)) case _ => report.error("Annotation only supports `class`") - List(tree) + List(definition) private def toStringExpr(className: String, thisFields: List[Expr[Any]])(using Quotes): Expr[String] = val fieldsSeq = Expr.ofSeq(thisFields) diff --git a/tests/run-macros/annot-mod-class-data/Test_2.scala b/tests/run-macros/annot-mod-class-data/Test_2.scala index 5def49c2daf8..b6007562f820 100644 --- a/tests/run-macros/annot-mod-class-data/Test_2.scala +++ b/tests/run-macros/annot-mod-class-data/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @data class Foo(val a: String, val b: Int) //> override def toString(): String = Seq(this.a, this.b).mkString("Foo(", ", ", ")") diff --git a/tests/run-macros/annot-mod-class-equals/Macro_1.scala b/tests/run-macros/annot-mod-class-equals/Macro_1.scala index 10184eada1e2..c500d7a909ab 100644 --- a/tests/run-macros/annot-mod-class-equals/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-equals/Macro_1.scala @@ -1,15 +1,15 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted.* @experimental class equals extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect.* - tree match + definition match case ClassDef(className, ctr, parents, self, body) => - val cls = tree.symbol + val cls = definition.symbol val constructorParameters = ctr.paramss.collect { case clause: TermParamClause => clause } if constructorParameters.size != 1 || constructorParameters.head.params.isEmpty then @@ -42,10 +42,10 @@ class equals extends MacroAnnotation: val hashCodeOverrideDef = DefDef(hashCodeOverrideSym, _ => Some(Ref(hashSym))) val newBody = equalsOverrideDef :: hashVal :: hashCodeOverrideDef :: body - List(ClassDef.copy(tree)(className, ctr, parents, self, newBody)) + List(ClassDef.copy(definition)(className, ctr, parents, self, newBody)) case _ => report.error("Annotation only supports `class`") - List(tree) + List(definition) private def equalsExpr[T: Type](that: Expr[Any], thisFields: List[Expr[Any]])(using Quotes): Expr[Boolean] = '{ diff --git a/tests/run-macros/annot-mod-class-equals/Test_2.scala b/tests/run-macros/annot-mod-class-equals/Test_2.scala index 1e5287743c8b..82981f303568 100644 --- a/tests/run-macros/annot-mod-class-equals/Test_2.scala +++ b/tests/run-macros/annot-mod-class-equals/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @equals class Foo(val a: String, val b: Int) //> override def equals(that: Any): Boolean = diff --git a/tests/run-macros/annot-mod-class-mod-def/Macro_1.scala b/tests/run-macros/annot-mod-class-mod-def/Macro_1.scala index 7a73b0a773e9..657f75d3213f 100644 --- a/tests/run-macros/annot-mod-class-mod-def/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-mod-def/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ @@ -6,11 +6,11 @@ import scala.collection.mutable @experimental class modToString(msg: String) extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case ClassDef(name, ctr, parents, self, body) => - val cls = tree.symbol + val cls = definition.symbol val toStringSym = cls.methodMember("toString").head val newBody = body.span(_.symbol != toStringSym) match @@ -21,7 +21,7 @@ class modToString(msg: String) extends MacroAnnotation: report.error("toString was not defined") body - List(ClassDef.copy(tree)(name, ctr, parents, self, newBody)) + List(ClassDef.copy(definition)(name, ctr, parents, self, newBody)) case _ => report.error("Annotation only supports `class`") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-mod-class-mod-def/Test_2.scala b/tests/run-macros/annot-mod-class-mod-def/Test_2.scala index b0f2b4531986..d7d03fcb661c 100644 --- a/tests/run-macros/annot-mod-class-mod-def/Test_2.scala +++ b/tests/run-macros/annot-mod-class-mod-def/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @modToString("This is Foo") class Foo: diff --git a/tests/run-macros/annot-mod-class-mod-val/Macro_1.scala b/tests/run-macros/annot-mod-class-mod-val/Macro_1.scala index fda7b5f037d8..03b9ffce7035 100644 --- a/tests/run-macros/annot-mod-class-mod-val/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-mod-val/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ @@ -6,11 +6,11 @@ import scala.collection.mutable @experimental class setValue(field: String, value: String) extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case ClassDef(name, ctr, parents, self, body) => - val cls = tree.symbol + val cls = definition.symbol val valSym = cls.fieldMember(field) val newBody = body.span(_.symbol != valSym) match @@ -21,7 +21,7 @@ class setValue(field: String, value: String) extends MacroAnnotation: report.error(s"`val $field` was not defined") body - List(ClassDef.copy(tree)(name, ctr, parents, self, newBody)) + List(ClassDef.copy(definition)(name, ctr, parents, self, newBody)) case _ => report.error("Annotation only supports `class`") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-mod-class-mod-val/Test_2.scala b/tests/run-macros/annot-mod-class-mod-val/Test_2.scala index 862977e2aa31..17e5cae85155 100644 --- a/tests/run-macros/annot-mod-class-mod-val/Test_2.scala +++ b/tests/run-macros/annot-mod-class-mod-val/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @setValue("valDef", "a") @setValue("varDef", "b") diff --git a/tests/run-macros/annot-mod-class-override-def/Macro_1.scala b/tests/run-macros/annot-mod-class-override-def/Macro_1.scala index e6d7bba79d54..5a013c8ed8b3 100644 --- a/tests/run-macros/annot-mod-class-override-def/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-override-def/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ @@ -6,19 +6,19 @@ import scala.collection.mutable @experimental class genToString(msg: String) extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case ClassDef(name, ctr, parents, self, body) => - val cls = tree.symbol + val cls = definition.symbol val toStringSym = Symbol.requiredMethod("java.lang.Object.toString") val toStringOverrideSym = Symbol.newMethod(cls, "toString", toStringSym.info, Flags.Override, Symbol.noSymbol) val toStringDef = DefDef(toStringOverrideSym, _ => Some(Literal(StringConstant(msg)))) - val newClassDef = ClassDef.copy(tree)(name, ctr, parents, self, toStringDef :: body) + val newClassDef = ClassDef.copy(definition)(name, ctr, parents, self, toStringDef :: body) List(newClassDef) case _ => report.error("Annotation only supports `class`") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-mod-class-override-def/Test_2.scala b/tests/run-macros/annot-mod-class-override-def/Test_2.scala index 8c6121664208..8aa04610d039 100644 --- a/tests/run-macros/annot-mod-class-override-def/Test_2.scala +++ b/tests/run-macros/annot-mod-class-override-def/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @genToString("This is Foo") class Foo diff --git a/tests/run-macros/annot-mod-class-override-val/Macro_1.scala b/tests/run-macros/annot-mod-class-override-val/Macro_1.scala index d7409a649427..bfbbbf690072 100644 --- a/tests/run-macros/annot-mod-class-override-val/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-override-val/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ @@ -6,18 +6,18 @@ import scala.collection.mutable @experimental class overrideField(field: String, value: String) extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case ClassDef(name, ctr, parents, self, body) => - val cls = tree.symbol + val cls = definition.symbol val overrideSym = Symbol.newVal(cls, field, TypeRepr.of[String], Flags.Override, Symbol.noSymbol) val valDef = ValDef(overrideSym, Some(Literal(StringConstant(value)))) - val newClassDef = ClassDef.copy(tree)(name, ctr, parents, self, valDef :: body) + val newClassDef = ClassDef.copy(definition)(name, ctr, parents, self, valDef :: body) List(newClassDef) case _ => report.error("Annotation only supports `class`") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-mod-class-override-val/Test_2.scala b/tests/run-macros/annot-mod-class-override-val/Test_2.scala index f067ba678af8..b633031b9f6d 100644 --- a/tests/run-macros/annot-mod-class-override-val/Test_2.scala +++ b/tests/run-macros/annot-mod-class-override-val/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental class Foo: val val1: String = "?" diff --git a/tests/run-macros/annot-mod-class-unused-new-sym/Macro_1.scala b/tests/run-macros/annot-mod-class-unused-new-sym/Macro_1.scala index fbcb9049d947..7f0c5ee51f3a 100644 --- a/tests/run-macros/annot-mod-class-unused-new-sym/Macro_1.scala +++ b/tests/run-macros/annot-mod-class-unused-new-sym/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ @@ -6,15 +6,15 @@ import scala.collection.mutable @experimental class newUnusedSymbol extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case ClassDef(name, ctr, parents, self, body) => - val cls = tree.symbol + val cls = definition.symbol val toStringMethType = Symbol.requiredMethod("java.lang.Object.toString").info val toStringOverrideSym = Symbol.newMethod(cls, "toString", toStringMethType, Flags.Override, Symbol.noSymbol) // Test that toStringOverrideSym is not accidentally entered in the class - List(tree) + List(definition) case _ => report.error("Annotation only supports `class`") - List(tree) + List(definition) diff --git a/tests/run-macros/annot-mod-class-unused-new-sym/Test_2.scala b/tests/run-macros/annot-mod-class-unused-new-sym/Test_2.scala index 340b7503ff71..598f8fa1fc09 100644 --- a/tests/run-macros/annot-mod-class-unused-new-sym/Test_2.scala +++ b/tests/run-macros/annot-mod-class-unused-new-sym/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @newUnusedSymbol class Foo diff --git a/tests/run-macros/annot-result-order/Macro_1.scala b/tests/run-macros/annot-result-order/Macro_1.scala index c81641037b67..1e4d21cc4fcb 100644 --- a/tests/run-macros/annot-result-order/Macro_1.scala +++ b/tests/run-macros/annot-result-order/Macro_1.scala @@ -1,11 +1,11 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ @experimental class print(msg: String) extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ def printMsg(msg: String) = val valSym = Symbol.newVal(Symbol.spliceOwner, Symbol.freshName("print"), TypeRepr.of[Unit], Flags.Private, Symbol.noSymbol) @@ -13,4 +13,4 @@ class print(msg: String) extends MacroAnnotation: given Quotes = valSym.asQuotes '{ println(${Expr(msg)}) }.asTerm ValDef(valSym, Some(valRhs)) - List(printMsg(s"before: $msg"), tree, printMsg(s"after: $msg")) + List(printMsg(s"before: $msg"), definition, printMsg(s"after: $msg")) diff --git a/tests/run-macros/annot-result-order/Test_2.scala b/tests/run-macros/annot-result-order/Test_2.scala index 06b050d11c29..0f356a447772 100644 --- a/tests/run-macros/annot-result-order/Test_2.scala +++ b/tests/run-macros/annot-result-order/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @print("foo") def foo(): Unit = () diff --git a/tests/run-macros/annot-simple-fib/Macro_1.scala b/tests/run-macros/annot-simple-fib/Macro_1.scala index e5852d5ce73c..0e95ddb80ff7 100644 --- a/tests/run-macros/annot-simple-fib/Macro_1.scala +++ b/tests/run-macros/annot-simple-fib/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.{experimental, MacroAnnotation} import scala.quoted._ @@ -6,9 +6,9 @@ import scala.collection.mutable.Map @experimental class memoize extends MacroAnnotation { - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case DefDef(name, params, tpt, Some(fibTree)) => val cacheName = Symbol.freshName(name + "Cache") val cacheSymbol = Symbol.newVal(Symbol.spliceOwner, cacheName, TypeRepr.of[Map[Int, Int]], Flags.EmptyFlags, Symbol.noSymbol) @@ -17,7 +17,7 @@ class memoize extends MacroAnnotation { '{Map.empty[Int, Int]}.asTerm val cacheVal = ValDef(cacheSymbol, Some(cacheRhs)) val rhs = - given Quotes = tree.symbol.asQuotes + given Quotes = definition.symbol.asQuotes val fibCache = Ref(cacheSymbol).asExprOf[Map[Int, Int]] val n = Ref(params.head.params.head.symbol).asExprOf[Int] '{ @@ -28,6 +28,6 @@ class memoize extends MacroAnnotation { $fibCache($n) = res res }.asTerm - val newFib = DefDef.copy(tree)(name, params, tpt, Some(rhs)) + val newFib = DefDef.copy(definition)(name, params, tpt, Some(rhs)) List(cacheVal, newFib) } diff --git a/tests/run-macros/annot-simple-fib/Test_2.scala b/tests/run-macros/annot-simple-fib/Test_2.scala index 534f4569b619..ace473d848a0 100644 --- a/tests/run-macros/annot-simple-fib/Test_2.scala +++ b/tests/run-macros/annot-simple-fib/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental class Bar: @memoize diff --git a/tests/run-macros/annot-unrollLast/Macro_1.scala b/tests/run-macros/annot-unrollLast/Macro_1.scala index e220811433e3..974eab1ea037 100644 --- a/tests/run-macros/annot-unrollLast/Macro_1.scala +++ b/tests/run-macros/annot-unrollLast/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental package example @@ -12,7 +12,7 @@ class unrollLast extends StaticAnnotation @experimental class unrollHelper extends MacroAnnotation { - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(tree: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ tree match case tree: DefDef => transformDefDef(tree) diff --git a/tests/run-macros/annot-unrollLast/Test_2.scala b/tests/run-macros/annot-unrollLast/Test_2.scala index b45b6aecd751..c05bd8a121ed 100644 --- a/tests/run-macros/annot-unrollLast/Test_2.scala +++ b/tests/run-macros/annot-unrollLast/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import example.{unrollHelper, unrollLast} diff --git a/tests/run-macros/i11685/Macro_1.scala b/tests/run-macros/i11685/Macro_1.scala index 72965266dddd..2723d5898050 100644 --- a/tests/run-macros/i11685/Macro_1.scala +++ b/tests/run-macros/i11685/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental package test diff --git a/tests/run-macros/i11685/Test_2.scala b/tests/run-macros/i11685/Test_2.scala index eaab3af7acb5..32278480a2df 100644 --- a/tests/run-macros/i11685/Test_2.scala +++ b/tests/run-macros/i11685/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import test.MyMacro diff --git a/tests/run-macros/i12021/Macro_1.scala b/tests/run-macros/i12021/Macro_1.scala index 4c36c1fc4cf0..5592da075e5b 100644 --- a/tests/run-macros/i12021/Macro_1.scala +++ b/tests/run-macros/i12021/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.quoted.* diff --git a/tests/run-macros/i12351/Test_2.scala b/tests/run-macros/i12351/Test_2.scala index e480b3c7e86e..a48d30772d5c 100644 --- a/tests/run-macros/i12351/Test_2.scala +++ b/tests/run-macros/i12351/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -Yread-docs +//> using options -Xread-docs @main def Test(): Unit = { println(getDocString[Data]) diff --git a/tests/run-macros/i12352/Main.scala b/tests/run-macros/i12352/Main.scala index b62bd80eaf2c..19cdf2a82d3b 100644 --- a/tests/run-macros/i12352/Main.scala +++ b/tests/run-macros/i12352/Main.scala @@ -1,4 +1,4 @@ -//> using options -Yread-docs +//> using options -Xread-docs @main def Test(): Unit = { val res = getDocString[scala.quoted.Quotes] diff --git a/tests/run-macros/i16734b/Macro_1.scala b/tests/run-macros/i16734b/Macro_1.scala index e080193c398a..cbfe82425c01 100644 --- a/tests/run-macros/i16734b/Macro_1.scala +++ b/tests/run-macros/i16734b/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.quoted.* diff --git a/tests/run-macros/i16734b/Test_2.scala b/tests/run-macros/i16734b/Test_2.scala index 81316875d561..bc2a7ae4ef44 100644 --- a/tests/run-macros/i16734b/Test_2.scala +++ b/tests/run-macros/i16734b/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental type F1Inv[A] type F1Cov[+A] diff --git a/tests/run-macros/i17105/Test_3.scala b/tests/run-macros/i17105/Test_3.scala index c19ac507e1a4..a3503c5ed8f2 100644 --- a/tests/run-macros/i17105/Test_3.scala +++ b/tests/run-macros/i17105/Test_3.scala @@ -1,3 +1,5 @@ +//> using options -experimental + import reflect.Selectable.reflectiveSelectable class Hoe { def f(x: Int): String = s"Hoe got ${x}" } diff --git a/tests/run-macros/i18806/Macro_1.scala b/tests/run-macros/i18806/Macro_1.scala index 461080b67b95..06ab612ce416 100644 --- a/tests/run-macros/i18806/Macro_1.scala +++ b/tests/run-macros/i18806/Macro_1.scala @@ -3,11 +3,11 @@ import scala.quoted._ @experimental class gen1 extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(definition: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ - tree match + definition match case ClassDef(name, ctr, parents, self, body) => - val cls = tree.symbol + val cls = definition.symbol // val meth = cls.methodMember("foo").head // val fooTpe = cls.typeRef.memberType(meth) @@ -17,8 +17,8 @@ class gen1 extends MacroAnnotation: val fooDef = DefDef(fooOverrideSym, _ => Some(Literal(StringConstant("hi")))) - val newClassDef = ClassDef.copy(tree)(name, ctr, parents, self, fooDef :: body) + val newClassDef = ClassDef.copy(definition)(name, ctr, parents, self, fooDef :: body) List(newClassDef) case _ => report.error("Annotation only supports `class`") - List(tree) + List(definition) diff --git a/tests/run-macros/i19676/Macro_1.scala b/tests/run-macros/i19676/Macro_1.scala new file mode 100644 index 000000000000..82a5ca718477 --- /dev/null +++ b/tests/run-macros/i19676/Macro_1.scala @@ -0,0 +1,28 @@ +//> using options -experimental + +import scala.annotation.MacroAnnotation +import scala.quoted.* + +class companionToString(str: String) extends MacroAnnotation: + + def transform(using Quotes)( + definition: quotes.reflect.Definition, + companion: Option[quotes.reflect.Definition] + ): List[quotes.reflect.Definition] = + + import quotes.reflect.* + companion match + case Some(cls@ClassDef(name, ctr, parents, self, body)) => + val symbol = cls.symbol + val toStringSym = Symbol.requiredMethod("java.lang.Object.toString") + val toStringOverrideSym = Symbol.newMethod(symbol, "toString", toStringSym.info, Flags.Override, Symbol.noSymbol) + val toStringDef = DefDef(toStringOverrideSym, _ => Some(Literal(StringConstant(s"$name: $str")))) + val newCompanion = ClassDef.copy(cls)(name, ctr, parents, self, toStringDef :: body) + List(definition, newCompanion) + case Some(unexpected) => + report.error(s"Unexpected companion: ${unexpected.show}") + List(definition) + case None => + report.error("Companion is not available to transform") + List(definition) + end transform \ No newline at end of file diff --git a/tests/run-macros/i19676/Test_2.scala b/tests/run-macros/i19676/Test_2.scala new file mode 100644 index 000000000000..2baa650f138a --- /dev/null +++ b/tests/run-macros/i19676/Test_2.scala @@ -0,0 +1,36 @@ +//> using options -experimental + +@companionToString("transformed by class") +class InPackage + +@companionToString("transformed by object") +object InPackage + +val (cls: Any, obj: Any) = { + + @companionToString("transformed by class") + class InBlock + + @companionToString("transformed by object") + object InBlock + + (new InBlock, InBlock) +} + +object Wrapper { + + @companionToString("transformed by class") + class InInnerClass + + @companionToString("transformed by object") + object InInnerClass + +} + +@main def Test = + assert((new InPackage).toString() == "InPackage: transformed by object") + assert(InPackage.toString() == "InPackage$: transformed by class") + assert(cls.toString() == "InBlock: transformed by object") + assert(obj.toString() == "InBlock$: transformed by class") + assert((new Wrapper.InInnerClass).toString() == "InInnerClass: transformed by object") + assert(Wrapper.InInnerClass.toString() == "InInnerClass$: transformed by class") diff --git a/tests/run-macros/macro-erased/Test_2.scala b/tests/run-macros/macro-erased/Test_2.scala index 1f7f8be436c7..880099021609 100644 --- a/tests/run-macros/macro-erased/Test_2.scala +++ b/tests/run-macros/macro-erased/Test_2.scala @@ -1,3 +1,5 @@ +//> using options -experimental + object Test { def main(args: Array[String]): Unit = { assert(Macro.foo1(1) == 0) diff --git a/tests/run-macros/newClass/Macro_1.scala b/tests/run-macros/newClass/Macro_1.scala index 75f757c038dc..0ad619c3d9c4 100644 --- a/tests/run-macros/newClass/Macro_1.scala +++ b/tests/run-macros/newClass/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.quoted.* diff --git a/tests/run-macros/newClass/Test_2.scala b/tests/run-macros/newClass/Test_2.scala index b126e90a5e41..8cf726d89d0b 100644 --- a/tests/run-macros/newClass/Test_2.scala +++ b/tests/run-macros/newClass/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @main def Test: Unit = { val foo = makeClass("foo") diff --git a/tests/run-macros/newClassExtends/Macro_1.scala b/tests/run-macros/newClassExtends/Macro_1.scala index d23b8fba88b6..8b817f2674b2 100644 --- a/tests/run-macros/newClassExtends/Macro_1.scala +++ b/tests/run-macros/newClassExtends/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.quoted.* diff --git a/tests/run-macros/newClassExtends/Test_2.scala b/tests/run-macros/newClassExtends/Test_2.scala index 8d782e346a68..6e902825fdc6 100644 --- a/tests/run-macros/newClassExtends/Test_2.scala +++ b/tests/run-macros/newClassExtends/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @main def Test: Unit = { val foo: Foo = makeClass("foo") diff --git a/tests/run-macros/newClassExtendsClassParams/Macro_1.scala b/tests/run-macros/newClassExtendsClassParams/Macro_1.scala index e5d28c0ceb9b..99e639e0aa4f 100644 --- a/tests/run-macros/newClassExtendsClassParams/Macro_1.scala +++ b/tests/run-macros/newClassExtendsClassParams/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.quoted.* diff --git a/tests/run-macros/newClassExtendsClassParams/Test_2.scala b/tests/run-macros/newClassExtendsClassParams/Test_2.scala index 8d782e346a68..6e902825fdc6 100644 --- a/tests/run-macros/newClassExtendsClassParams/Test_2.scala +++ b/tests/run-macros/newClassExtendsClassParams/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @main def Test: Unit = { val foo: Foo = makeClass("foo") diff --git a/tests/run-macros/newClassSelf/Macro_1.scala b/tests/run-macros/newClassSelf/Macro_1.scala index 46871d4d6b4c..8562b814677d 100644 --- a/tests/run-macros/newClassSelf/Macro_1.scala +++ b/tests/run-macros/newClassSelf/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.quoted.* diff --git a/tests/run-macros/newClassSelf/Test_2.scala b/tests/run-macros/newClassSelf/Test_2.scala index 437be3ca519d..1c1f64fd20e3 100644 --- a/tests/run-macros/newClassSelf/Test_2.scala +++ b/tests/run-macros/newClassSelf/Test_2.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental @main def Test: Unit = { val a: Bar = makeClass("A") diff --git a/tests/run-macros/reflect-method-type-kind/macro_1.scala b/tests/run-macros/reflect-method-type-kind/macro_1.scala new file mode 100644 index 000000000000..6e996a85603d --- /dev/null +++ b/tests/run-macros/reflect-method-type-kind/macro_1.scala @@ -0,0 +1,64 @@ +trait Foo +trait Bar + +object Methods: + def implicitMethod(implicit foo: Foo, int: Int): Bar = ??? + def contextualMethod(using foo: Foo, int: Int): Bar = ??? + def plainMethod(foo: Foo, int: Int): Bar = ??? + +object Macro: + import scala.quoted._ + inline def macroCall(): Unit = ${ macroCallImpl } + def macroCallImpl(using Quotes): Expr[Unit] = + testReadingMethodTypeKind + testCreatingMethodTypeKind + '{()} + + def testReadingMethodTypeKind(using Quotes) = + import quotes.reflect._ + def getFromMethods(name: String): TypeRepr = + val typeRepr = TypeRepr.of[Methods.type] + val symbol = + typeRepr.typeSymbol.methodMember(name).headOption.getOrElse( + typeRepr.typeSymbol.fieldMember(name) + ) + typeRepr.memberType(symbol) + + assert(getFromMethods("implicitMethod").asInstanceOf[MethodType].isImplicit) + assert(!getFromMethods("implicitMethod").asInstanceOf[MethodType].isContextual) + assert(getFromMethods("implicitMethod").asInstanceOf[MethodType].methodTypeKind == MethodTypeKind.Implicit) + + assert(getFromMethods("contextualMethod").asInstanceOf[MethodType].isImplicit) + assert(getFromMethods("contextualMethod").asInstanceOf[MethodType].isContextual) + assert(getFromMethods("contextualMethod").asInstanceOf[MethodType].methodTypeKind == MethodTypeKind.Contextual) + + assert(!getFromMethods("plainMethod").asInstanceOf[MethodType].isImplicit) + assert(!getFromMethods("plainMethod").asInstanceOf[MethodType].isContextual) + assert(getFromMethods("plainMethod").asInstanceOf[MethodType].methodTypeKind == MethodTypeKind.Plain) + + + def testCreatingMethodTypeKind(using Quotes) = + import quotes.reflect._ + val paramTypes = List(TypeRepr.of[Foo], TypeRepr.of[Int]) + val resType = TypeRepr.of[Bar] + val implicitMethodType = MethodType.apply(MethodTypeKind.Implicit)(List("foo", "int"))(mt => paramTypes, mt => resType) + assert(implicitMethodType.isImplicit) + assert(!implicitMethodType.isContextual) + assert(implicitMethodType.methodTypeKind == MethodTypeKind.Implicit) + assert(implicitMethodType.methodTypeKind != MethodTypeKind.Contextual) + assert(implicitMethodType.methodTypeKind != MethodTypeKind.Plain) + + + val contextualMethodType = MethodType.apply(MethodTypeKind.Contextual)(List("foo", "int"))(mt => paramTypes, mt => resType) + assert(contextualMethodType.isImplicit) + assert(contextualMethodType.isContextual) + assert(contextualMethodType.methodTypeKind != MethodTypeKind.Implicit) + assert(contextualMethodType.methodTypeKind == MethodTypeKind.Contextual) + assert(contextualMethodType.methodTypeKind != MethodTypeKind.Plain) + + val plainMethodType = MethodType.apply(MethodTypeKind.Plain)(List("foo", "int"))(mt => paramTypes, mt => resType) + assert(!plainMethodType.isContextual) + assert(!plainMethodType.isImplicit) + assert(plainMethodType.methodTypeKind != MethodTypeKind.Implicit) + assert(plainMethodType.methodTypeKind != MethodTypeKind.Contextual) + assert(plainMethodType.methodTypeKind == MethodTypeKind.Plain) diff --git a/tests/run-macros/reflect-method-type-kind/test_2.scala b/tests/run-macros/reflect-method-type-kind/test_2.scala new file mode 100644 index 000000000000..a020dc4e2d93 --- /dev/null +++ b/tests/run-macros/reflect-method-type-kind/test_2.scala @@ -0,0 +1,3 @@ +object Test: + def main(args: Array[String]): Unit = + Macro.macroCall() diff --git a/tests/run-macros/term-show.check b/tests/run-macros/term-show.check index 91ba0308e3db..9733d2ad211b 100644 --- a/tests/run-macros/term-show.check +++ b/tests/run-macros/term-show.check @@ -10,7 +10,7 @@ } () } -@scala.annotation.internal.SourceFile("tests/run-macros/term-show/Test_2.scala") trait A() extends java.lang.Object { +@scala.annotation.internal.SourceFile("tests/run-macros/term-show/Test_2.scala") @scala.annotation.experimental("Added by top level import scala.language.experimental.erasedDefinitions") trait A() extends java.lang.Object { def imp(x: scala.Int)(implicit str: scala.Predef.String): scala.Int def use(`x₂`: scala.Int)(using `str₂`: scala.Predef.String): scala.Int def era(`x₃`: scala.Int)(erased `str₃`: scala.Predef.String): scala.Int diff --git a/tests/run-macros/type-show/Test_2.scala b/tests/run-macros/type-show/Test_2.scala index de845f3e84dd..3bc9da043885 100644 --- a/tests/run-macros/type-show/Test_2.scala +++ b/tests/run-macros/type-show/Test_2.scala @@ -23,7 +23,7 @@ object Test { """TypeRef(ThisType(TypeRef(NoPrefix(), "scala")), "Nothing"), """+ """TypeRef(ThisType(TypeRef(NoPrefix(), "scala")), "Any"))), """+ """MatchType("""+ - """TypeRef(TermRef(ThisType(TypeRef(NoPrefix(), "")), "scala"), "Int"), """+ // match type bound + """TypeRef(ThisType(TypeRef(NoPrefix(), "scala")), "Any"), """+ // match type bound """ParamRef(binder, 0), """+ """List("""+ """MatchCase("""+ diff --git a/tests/run-staging/i19170c.check b/tests/run-staging/i19170c.check new file mode 100644 index 000000000000..581ccb11e364 --- /dev/null +++ b/tests/run-staging/i19170c.check @@ -0,0 +1 @@ +exception thrown, no additional printlns diff --git a/tests/run-staging/i19170c.scala b/tests/run-staging/i19170c.scala new file mode 100644 index 000000000000..24b7faa8a323 --- /dev/null +++ b/tests/run-staging/i19170c.scala @@ -0,0 +1,16 @@ +import scala.quoted.* + +given staging.Compiler = + staging.Compiler.make(getClass.getClassLoader.getParent) // different classloader that 19170b.scala +class A(i: Int) + +def f(i: Expr[Int])(using Quotes): Expr[A] = { '{ new A($i) } } + +@main def Test = { + try + val g: Int => A = staging.run { '{ (i: Int) => ${ f('{i}) } } } + println(g(3)) + catch case ex: Exception => + assert(ex.getMessage().startsWith("An unhandled exception was thrown in the staging compiler."), ex.getMessage()) + println("exception thrown, no additional printlns") +} diff --git a/tests/run-staging/i19176b.check b/tests/run-staging/i19176b.check new file mode 100644 index 000000000000..581ccb11e364 --- /dev/null +++ b/tests/run-staging/i19176b.check @@ -0,0 +1 @@ +exception thrown, no additional printlns diff --git a/tests/run-staging/i19176b.scala b/tests/run-staging/i19176b.scala new file mode 100644 index 000000000000..d3f1657d03da --- /dev/null +++ b/tests/run-staging/i19176b.scala @@ -0,0 +1,14 @@ +import scala.quoted.* + +given staging.Compiler = + staging.Compiler.make(getClass.getClassLoader.getParent) // we want to make sure the classloader is incorrect + +class A + +@main def Test = + try + val f: (A, Int) => Int = staging.run { '{ (q: A, x: Int) => x } } + f(new A, 3) + catch case ex: Exception => + assert(ex.getMessage().startsWith("An unhandled exception was thrown in the staging compiler."), ex.getMessage()) + println("exception thrown, no additional printlns") diff --git a/tests/run-tasty-inspector/isSuperAccessor.check b/tests/run-tasty-inspector/isSuperAccessor.check new file mode 100644 index 000000000000..4c90083e2b6b --- /dev/null +++ b/tests/run-tasty-inspector/isSuperAccessor.check @@ -0,0 +1 @@ +method SyncIterator$$super$next diff --git a/tests/run-tasty-inspector/isSuperAccessor.scala b/tests/run-tasty-inspector/isSuperAccessor.scala new file mode 100644 index 000000000000..3b950f8c79cf --- /dev/null +++ b/tests/run-tasty-inspector/isSuperAccessor.scala @@ -0,0 +1,39 @@ +import scala.quoted.* +import scala.tasty.inspector.* + +@main def Test = { + // Artefact of the current test infrastructure + // TODO improve infrastructure to avoid needing this code on each test + val classpath = dotty.tools.dotc.util.ClasspathFromClassloader(this.getClass.getClassLoader).split(java.io.File.pathSeparator).find(_.contains("runWithCompiler")).get + val allTastyFiles = dotty.tools.io.Path(classpath).walkFilter(_.extension == "tasty").map(_.toString).toList + val tastyFiles = allTastyFiles.filter(_.contains("SyncIterator")) + + TastyInspector.inspectTastyFiles(tastyFiles)(new MyInspector) +} + +class MyInspector extends Inspector: + + override def inspect(using Quotes)(tastys: List[Tasty[quotes.type]]): Unit = + import quotes.reflect.* + class Traverser extends TreeTraverser: + override def traverseTree(tree: Tree)(owner: Symbol) = + tree match + case tree: DefDef if tree.symbol.isSuperAccessor => + println(tree.symbol) + case _ => + super.traverseTree(tree)(owner) + end Traverser + + val traverser = new Traverser + tastys.foreach { tasty => + traverser.traverseTree(tasty.ast)(tasty.ast.symbol) + } + + +trait IntIterator { + def next: Int + def drop(n: Int): Unit +} +trait SyncIterator extends IntIterator { + abstract override def next: Int = super.next +} diff --git a/tests/run-tasty-inspector/scala2-library-test.scala b/tests/run-tasty-inspector/scala2-library-test.scala index 15a251427d70..37dc55e20d1f 100644 --- a/tests/run-tasty-inspector/scala2-library-test.scala +++ b/tests/run-tasty-inspector/scala2-library-test.scala @@ -32,7 +32,7 @@ def scalaLibClassesPath = lazy val scalaLibTastyPaths = new Directory(scalaLibClassesPath).deepFiles - .filter(_.`extension` == "tasty") + .filter(_.ext.isTasty) .map(_.normalize.path.stripPrefix(scalaLibClassesPath.toString + separator)) .toList diff --git a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala index ca48dd2d8a5f..7079c7320ba0 100644 --- a/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala +++ b/tests/run-tasty-inspector/stdlibExperimentalDefinitions.scala @@ -25,33 +25,12 @@ val experimentalDefinitionInLibrary = Set( "scala.util.TupledFunction", "scala.util.TupledFunction$", - //// New feature: main annotation generalization - // Can be stabilized when language feature is stabilized. - // Needs user feedback. - // Should argGetter/varargGetter be simplified? - // Should we have better support for main annotation macros? - "scala.annotation.MainAnnotation", - "scala.annotation.MainAnnotation$", - "scala.annotation.MainAnnotation$.Info", - "scala.annotation.MainAnnotation$.Parameter", - "scala.annotation.MainAnnotation$.ParameterAnnotation", - - - //// New feature: prototype of new version of @main - // This will never be stabilized. When it is ready it should replace the old @main annotation (requires scala.annotation.MainAnnotation). - // Needs user feedback. - "scala.annotation.newMain", - "scala.annotation.newMain$", - "scala.annotation.newMain$.alias", - "scala.annotation.newMain.Help", - "scala.annotation.newMain.Help$", - "scala.annotation.newMain.Names", - //// New feature: capture checking "scala.annotation.capability", "scala.annotation.retains", "scala.annotation.retainsByName", "scala.annotation.retainsCap", + "scala.annotation.retainsArg", "scala.Pure", "scala.caps", "scala.caps$", @@ -72,8 +51,8 @@ val experimentalDefinitionInLibrary = Set( "scala.annotation.init$.widen", "scala.annotation.init$.region", - //// New APIs: Quotes - // Can be stabilized in 3.4.0 (unsure) or later + //// New APIs: Quotes + // Can be stabilized in 3.5.0 (unsure) or later "scala.quoted.Quotes.reflectModule.CompilationInfoModule.XmacroSettings", // Cant be stabilized yet. // Need newClass variant that can add constructor parameters. @@ -92,11 +71,20 @@ val experimentalDefinitionInLibrary = Set( "scala.quoted.Quotes.reflectModule.TermParamClauseMethods.erasedArgs", "scala.quoted.Quotes.reflectModule.TermParamClauseMethods.hasErasedArgs", - // New feature: reverse method on Tuple - "scala.Tuple.reverse", // can be stabilized in 3.5 - "scala.Tuple$.Reverse", // can be stabilized in 3.5 - "scala.Tuple$.ReverseOnto", // can be stabilized in 3.5 - "scala.runtime.Tuples$.reverse", // can be stabilized in 3.5 + // New feature: fromNullable for explicit nulls + "scala.Predef$.fromNullable", + + // New feature: named tuples + "scala.NamedTuple", + "scala.NamedTuple$", + "scala.NamedTupleDecomposition", + "scala.NamedTupleDecomposition$", + + // New feature: modularity + "scala.Precise", + "scala.annotation.internal.WitnessNames", + "scala.compiletime.package$package$.deferred", + "scala.runtime.stdLibPatches.Predef$.is", ) diff --git a/tests/run-with-compiler/i14541.scala b/tests/run-with-compiler/i14541.scala index 0fdfb89674d5..2b942007c5b6 100644 --- a/tests/run-with-compiler/i14541.scala +++ b/tests/run-with-compiler/i14541.scala @@ -6,6 +6,7 @@ object Test: def main(args: Array[String]): Unit = getClass.getClassLoader.run("echo", List("hello", "raw", "world")) // caution: uses "SCALA_OPTS" + sys.props("scala.use_legacy_launcher") = "true" dotty.tools.MainGenericRunner.main(Array("--class-path", classpath, "echo", "hello", "run", "world")) @main def echo(args: String*): Unit = println { diff --git a/tests/run/binaryLiterals.scala b/tests/run/binaryLiterals.scala new file mode 100644 index 000000000000..5ac8c7b6f8bc --- /dev/null +++ b/tests/run/binaryLiterals.scala @@ -0,0 +1,72 @@ +@main +def Test = + val kenobi = 0b1 + + assert(kenobi == 1) + + assert(0B0000 == 0) + assert(0B0001 == 1) + assert(0B0010 == 2) + assert(0B0100 == 4) + assert(0B1000 == 8) + + assert(0b0000 == 0) + assert(0b0001 == 1) + assert(0b0010 == 2) + assert(0b0100 == 4) + assert(0b1000 == 8) + + assert(0b0001_0000 == 16) + assert(0b0010_0000 == 32) + assert(0b0100_0000 == 64) + assert(0b1000_0000 == 128) + + assert(0b0001_0000_0000 == 256) + assert(0b0010_0000_0000 == 512) + assert(0b0100_0000_0000 == 1024) + assert(0b1000_0000_0000 == 2048) + + assert(0b0001_0000_0000_0000 == 4096) + assert(0b0010_0000_0000_0000 == 8192) + assert(0b0100_0000_0000_0000 == 16384) + assert(0b1000_0000_0000_0000 == 32768) + + assert(0b0001__0000_0000_0000_0000 == 65536) + assert(0b0010__0000_0000_0000_0000 == 131072) + assert(0b0100__0000_0000_0000_0000 == 262144) + assert(0b1000__0000_0000_0000_0000 == 524288) + + assert(0b0001_0000__0000_0000_0000_0000 == 1048576) + assert(0b0010_0000__0000_0000_0000_0000 == 2097152) + assert(0b0100_0000__0000_0000_0000_0000 == 4194304) + assert(0b1000_0000__0000_0000_0000_0000 == 8388608) + + assert(0b0001_0000_0000__0000_0000_0000_0000 == 16777216) + assert(0b0010_0000_0000__0000_0000_0000_0000 == 33554432) + assert(0b0100_0000_0000__0000_0000_0000_0000 == 67108864) + assert(0b1000_0000_0000__0000_0000_0000_0000 == 134217728) + + assert(0b0001_0000_0000_0000__0000_0000_0000_0000 == 268435456) + assert(0b0010_0000_0000_0000__0000_0000_0000_0000 == 536870912) + assert(0b0100_0000_0000_0000__0000_0000_0000_0000 == 1073741824) + assert(0b1000_0000_0000_0000__0000_0000_0000_0000L == 2147483648L) + + assert(0b1000_0000_0000_0000__0000_0000_0000_0000 == -2147483648) // Signed ! + assert(0b1111_1111_1111_1111__1111_1111_1111_1111 == -1) + + // Randomly generated using https://numbergenerator.org/random-32-bit-binary-number#!numbers=10&length=32&addfilters= + // Converted to signed decimal using https://onlinetoolz.net/unsigned-signed#base=2&bits=32 + assert(0b0110_1000_1100_0101_0010_1100_0100_0011 == 1757752387) + assert(0b1111_0101_0100_1011_0101_1000_0011_0110 == -179611594) + assert(0b0000_0011_0000_1010_1010_0011_0000_0000 == 51028736) + assert(0b0101_0010_1111_1001_0100_0101_1101_1011 == 1392068059) + assert(0b1001_0000_1111_1001_1011_1101_1100_1111 == -1862681137) + + assert(0B0000_0111_1110_1100_0111_1100_1000_0010 == 132938882) + assert(0B0000_1011_0111_1011_0001_1010_1010_1000 == 192617128) + assert(0B1100_1100_1000_1010_1111_0111_0100_1101 == -863307955) + assert(0B1000_0000_0001_0010_0001_1001_0101_1110 == -2146297506) + assert(0B1110_0000_0110_1100_0111_0110_1100_1111 == -529762609) + + assert(0b0010_1001_0101_1001__1010_0100_1000_1010__1001_1000_0011_0111__1100_1011_0111_0101L == 2979593543648529269L) + assert(0b1101_1110_0100_1000__0010_1101_1010_0010__0111_1000_1111_1001__1010_1001_0101_1000L == -2429641823128802984L) diff --git a/tests/run/colltest6/CollectionStrawMan6_1.scala b/tests/run/colltest6/CollectionStrawMan6_1.scala index bed5c476b96d..0bf0cbddffc9 100644 --- a/tests/run/colltest6/CollectionStrawMan6_1.scala +++ b/tests/run/colltest6/CollectionStrawMan6_1.scala @@ -755,11 +755,11 @@ object CollectionStrawMan6 extends LowPriority { def elemTag: ClassTag[A] = ClassTag(xs.getClass.getComponentType) - protected def fromIterableWithSameElemType(coll: Iterable[A]): Array[A] = coll.toArray[A](elemTag) + protected def fromIterableWithSameElemType(coll: Iterable[A]): Array[A] = coll.toArray[A](using elemTag) def fromIterable[B: ClassTag](coll: Iterable[B]): Array[B] = coll.toArray[B] - protected[this] def newBuilder = new ArrayBuffer[A].mapResult(_.toArray(elemTag)) + protected[this] def newBuilder = new ArrayBuffer[A].mapResult(_.toArray(using elemTag)) override def knownSize = xs.length diff --git a/tests/run/for-desugar-strawman.scala b/tests/run/for-desugar-strawman.scala new file mode 100644 index 000000000000..a92b19b9150a --- /dev/null +++ b/tests/run/for-desugar-strawman.scala @@ -0,0 +1,96 @@ + +@main def Test = + println: + for + x <- List(1, 2, 3) + y = x + x + if x >= 2 + i <- List.range(0, y) + z = i * i + if z % 2 == 0 + yield + i * x + + println: + val xs = List(1, 2, 3) + xs.flatMapDefined: x => + val y = x + x + xs.applyFilter(x >= 2): + val is = List.range(0, y) + is.mapDefined: i => + val z = i * i + is.applyFilter(z % 2 == 0): + i * x + +extension [A](as: List[A]) + + def applyFilter[B](p: => Boolean)(b: => B) = + if p then Some(b) else None + + def flatMapDefined[B](f: A => Option[IterableOnce[B]]): List[B] = + as.flatMap: x => + f(x).getOrElse(Nil) + + def mapDefined[B](f: A => Option[B]): List[B] = + as.flatMap(f) + +object UNDEFINED + +extension [A](as: Vector[A]) + + def applyFilter[B](p: => Boolean)(b: => B) = + if p then b else UNDEFINED + + def flatMapDefined[B](f: A => IterableOnce[B] | UNDEFINED.type): Vector[B] = + as.flatMap: x => + f(x) match + case UNDEFINED => Nil + case y: IterableOnce[B] => y + + def mapDefined[B](f: A => B | UNDEFINED.type): Vector[B] = + as.flatMap: x => + f(x) match + case UNDEFINED => Nil + case y: B => y :: Nil + +/* +F ::= val x = E; F + x <- E; G +G ::= [] + val x = E; G + if E; G + x <- E; G + +Translation scheme: + +{ for F yield E }c where c = undefined +{ for G yield E }c where c is a reference to the generator preceding the G sequence + +{ for [] yield E }c = E +{ for p = Ep; G yield E }c = val p = Ep; { for G yield E }c +{ for if Ep; G yield E}c = c.applyFilter(Ep)({ for G yield E }c) +{ for p <- Ep; G yield E }c = val c1 = Ep; c1.BIND{ case p => { for G yield E }c1 } (c1 fresh) + + where BIND = flatMapDefined if isGen(G), isFilter(G) + = mapDefined if !isGen(G), isFilter(G) + = flatMap if isGen(G), !isFilter(G) + = map if !isGen(G), !isFilter(G) + +{ for case p <- Ep; G yield E }c = { for $x <- Ep; if $x match case p => true case _ => false; p = $x@RuntimeChecked; G yield E }c +{ for case p = Ep; G yield E }c = { for $x = Ep; if $x match case p => true case _ => false; p = $x@RuntimeChecked; G yield E}c + +isFilter(if E; S) +isFilter(val x = E; S) if isFilter(S) + +isGen(x <- E; S) +isGen(val x = E; S) if isGen(S) +isGen(if E; S) if isGen(S) + +*/ + +val foo = 1 + +def main2 = + foo + ??? + ??? match { case _ => 0 } \ No newline at end of file diff --git a/tests/run/given-disambiguation.scala b/tests/run/given-disambiguation.scala new file mode 100644 index 000000000000..637c02a5621f --- /dev/null +++ b/tests/run/given-disambiguation.scala @@ -0,0 +1,58 @@ +import language.experimental.modularity +import language.future + +trait M: + type Self + extension (x: Self) def combine (y: Self): String + def unit: Self + +trait Num: + type Self + def zero: Self + +trait A extends M +trait B extends M + +def f[X: {M, A, B}](x: X) = + summon[X is M] + x.combine(x) + +trait AA: + type XX: {M, A, B} + val x = XX.unit + val A: String = "hello" + +trait AAA: + type X: M +trait BBB: + type X: Num +class CCC[X1: {M, Num}] extends AAA, BBB: + type X = X1 + X.zero + X.unit + +@main def Test = + class C + + given C is M: + extension (x: Self) def combine (y: Self) = "M" + def unit = C() + + given C is A: + extension (x: Self) def combine (y: Self) = "A" + def unit = C() + + given C is B: + extension (x: Self) def combine (y: Self) = "B" + def unit = C() + + assert(f(C()) == "M") + + class CC extends AA: + type XX = C + assert(A.length == 5) + assert(A.toString == "hello") + + CC() + + diff --git a/tests/run/given-triangle.check b/tests/run/given-triangle.check new file mode 100644 index 000000000000..5ba9e6a1e8b9 --- /dev/null +++ b/tests/run/given-triangle.check @@ -0,0 +1,3 @@ +class A +class B +class C diff --git a/tests/run/given-triangle.scala b/tests/run/given-triangle.scala new file mode 100644 index 000000000000..66339f44e43c --- /dev/null +++ b/tests/run/given-triangle.scala @@ -0,0 +1,16 @@ +import language.`3.7` + +class A +class B extends A +class C extends A + +given A = A() +given B = B() +given C = C() + +def f(using a: A, b: B, c: C) = + println(a.getClass) + println(b.getClass) + println(c.getClass) + +@main def Test = f diff --git a/tests/run/i11050.scala b/tests/run/i11050.scala index 90a6ec84df85..027812c013c4 100644 --- a/tests/run/i11050.scala +++ b/tests/run/i11050.scala @@ -113,12 +113,14 @@ object Show: inline def show[T](x: T): String = summonInline[Show[T]].show(x) - transparent inline def derived[T](implicit ev: Mirror.Of[T]): Show[T] = new { - def show(x: T): String = inline ev match { - case m: Mirror.ProductOf[T] => showProduct(x.asInstanceOf[Product], m) - case m: Mirror.SumOf[T] => showCases[m.MirroredElemTypes](0)(x, m.ordinal(x)) + transparent inline def derived[T](implicit ev: Mirror.Of[T]): Show[T] = + class InlinedShow extends Show[T] { // provide name to anonymous class + def show(x: T): String = inline ev match { + case m: Mirror.ProductOf[T] => showProduct(x.asInstanceOf[Product], m) + case m: Mirror.SumOf[T] => showCases[m.MirroredElemTypes](0)(x, m.ordinal(x)) + } } - } + new InlinedShow transparent inline def showProduct[T](x: Product, m: Mirror.ProductOf[T]): String = constValue[m.MirroredLabel] + showElems[m.MirroredElemTypes, m.MirroredElemLabels](0, Nil)(x) diff --git a/tests/run/i13215.scala b/tests/run/i13215.scala index 56cd3517d6ac..f43e9aa1e38a 100644 --- a/tests/run/i13215.scala +++ b/tests/run/i13215.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental -Werror -WunstableInlineAccessors +//> using options -experimental -Werror -WunstableInlineAccessors import scala.annotation.publicInBinary diff --git a/tests/run/i15840.scala b/tests/run/i15840.scala new file mode 100644 index 000000000000..0f238e2e7148 --- /dev/null +++ b/tests/run/i15840.scala @@ -0,0 +1,27 @@ +//> using options -language:experimental.modularity -source future + +trait Nat: + type N <: Nat + +class _0 extends Nat: + type N = _0 + +class NatOps[N <: Nat](tracked val n: N): + def toInt(using toIntN: ToInt[n.N]): Int = toIntN() + +// works +def toInt[N <: Nat](n: N)(using toIntN: ToInt[n.N]) = toIntN() + +sealed abstract class ToInt[N <: Nat]: + def apply(): Int + +object ToInt: + given ToInt[_0] { + def apply() = 0 + } + +@main def Test() = + assert(toInt(new _0) == 0) + assert(NatOps[_0](new _0).toInt == 0) + assert: + NatOps(new _0).toInt == 0 // did not work diff --git a/tests/run/i17930/Foo_1.scala b/tests/run/i17930/Foo_1.scala new file mode 100644 index 000000000000..0ee4fa711f58 --- /dev/null +++ b/tests/run/i17930/Foo_1.scala @@ -0,0 +1,13 @@ +package eu.joaocosta.defaultstest + +object Foo { + def foo(x: Int, y: Int = 5): Int = x + y +} + +object Bar { + export Foo.* +} + +object App { + println(Bar.foo(2)) // Works +} diff --git a/tests/run/i17930/app_2.scala b/tests/run/i17930/app_2.scala new file mode 100644 index 000000000000..64ba6bff18c5 --- /dev/null +++ b/tests/run/i17930/app_2.scala @@ -0,0 +1,5 @@ +import eu.joaocosta.defaultstest._ + +@main def Test = + println(Foo.foo(2)) // Works + println(Bar.foo(2)) // Fails with "missing argument for parameter y of method foo in object Bar: (x: Int, y: Int): Int" diff --git a/tests/run/i18315.scala b/tests/run/i18315.scala index 85824920efbd..51a80420632d 100644 --- a/tests/run/i18315.scala +++ b/tests/run/i18315.scala @@ -7,9 +7,16 @@ trait Sam2: type T def apply(x: T): T +trait Sam3: + type T + type U + def apply(x: T): U + object Test: def main(args: Array[String]): Unit = val s1: Sam1 { type T = String } = x => x.trim s1.apply("foo") val s2: Sam2 { type T = Int } = x => x + 1 s2.apply(1) + val s3: Sam3 { type T = Int; type U = String } = x => x.toString + s3.apply(2) diff --git a/tests/run/i20095.check b/tests/run/i20095.check new file mode 100644 index 000000000000..0d55bed3a35c --- /dev/null +++ b/tests/run/i20095.check @@ -0,0 +1,2 @@ +foo +foo diff --git a/tests/run/i20095.scala b/tests/run/i20095.scala new file mode 100644 index 000000000000..e5761f49b539 --- /dev/null +++ b/tests/run/i20095.scala @@ -0,0 +1,9 @@ +inline def twice(inline thunk: =>Unit): Unit = + thunk + thunk + +inline def pipe(inline t: =>Unit, inline f: (=>Unit) => Unit): Unit = f(t) + +@main def Test = + pipe((), twice) + pipe(println("foo"), twice) diff --git a/tests/run/i3920.scala b/tests/run/i3920.scala new file mode 100644 index 000000000000..c66fd8908976 --- /dev/null +++ b/tests/run/i3920.scala @@ -0,0 +1,26 @@ +//> using options -source future -language:experimental.modularity +trait Ordering: + type T + def compare(t1:T, t2: T): Int + +class SetFunctor(tracked val ord: Ordering): + type Set = List[ord.T] + + def empty: Set = Nil + + extension (s: Set) + def add(x: ord.T): Set = x :: remove(x) + def remove(x: ord.T): Set = s.filter(e => ord.compare(x, e) != 0) + def contains(x: ord.T): Boolean = s.exists(e => ord.compare(x, e) == 0) + +object intOrdering extends Ordering: + type T = Int + def compare(t1: T, t2: T): Int = t1 - t2 + +val IntSet = new SetFunctor(intOrdering) + +@main def Test = + import IntSet.* + val set = IntSet.empty.add(6).add(8).add(23) + assert(!set.contains(7)) + assert(set.contains(8)) \ No newline at end of file diff --git a/tests/run/i502.scala b/tests/run/i502.scala index 71176d9660cd..20ed1f43b840 100644 --- a/tests/run/i502.scala +++ b/tests/run/i502.scala @@ -6,13 +6,13 @@ object Test extends App { Array[Int](1, 2) try { - Array[Int](1, 2)(null) + Array[Int](1, 2)(using null) ??? } catch { case _: NullPointerException => println("Ok") } - Array[Int](1, 2)({println("foo"); summon[ClassTag[Int]]}) + Array[Int](1, 2)(using {println("foo"); summon[ClassTag[Int]]}) - Array[Int](1, 2)(ClassTag.apply({ println("bar"); classOf[Int]})) + Array[Int](1, 2)(using ClassTag.apply({ println("bar"); classOf[Int]})) } diff --git a/tests/run/implicit-specifity.scala b/tests/run/implicit-specifity.scala index 51fa02d91cfd..9e59cf5f1869 100644 --- a/tests/run/implicit-specifity.scala +++ b/tests/run/implicit-specifity.scala @@ -1,3 +1,5 @@ +import language.`3.7` + case class Show[T](val i: Int) object Show { def apply[T](implicit st: Show[T]): Int = st.i @@ -38,5 +40,5 @@ object Test extends App { assert(Show[Int] == 0) assert(Show[String] == 1) assert(Show[Generic] == 1) // showGen loses against fallback due to longer argument list - assert(Show[Generic2] == 2) // ... but the opaque type intersection trick works. + assert(Show[Generic2] == 1) // ... and the opaque type intersection trick no longer works with new resolution rules. } diff --git a/tests/run/implied-for.scala b/tests/run/implied-for.scala index c7789ce570e4..a55d59e89505 100644 --- a/tests/run/implied-for.scala +++ b/tests/run/implied-for.scala @@ -20,7 +20,7 @@ object Test extends App { val x2: T = t val x3: D[Int] = d - assert(summon[T].isInstanceOf[B]) + assert(summon[T].isInstanceOf[T]) assert(summon[D[Int]].isInstanceOf[D[_]]) } diff --git a/tests/run/implied-priority.scala b/tests/run/implied-priority.scala index 0822fae6778f..a9380e117875 100644 --- a/tests/run/implied-priority.scala +++ b/tests/run/implied-priority.scala @@ -1,5 +1,6 @@ /* These tests show various mechanisms available for implicit prioritization. */ +import language.`3.7` class E[T](val str: String) // The type for which we infer terms below @@ -72,16 +73,16 @@ def test2a = { } /* If that solution is not applicable, we can define an override by refining the - * result type of the given instance, e.g. like this: + * result type of all lower-priority instances, e.g. like this: */ object Impl3 { - given t1[T]: E[T]("low") + trait LowPriority // A marker trait to indicate a lower priority + given t1[T]: E[T]("low") with LowPriority } object Override { - trait HighestPriority // A marker trait to indicate a higher priority - given over[T]: E[T]("hi") with HighestPriority() + given over[T]: E[T]("hi") with {} } def test3 = { @@ -90,7 +91,7 @@ def test3 = { { import Override.given import Impl3.given - assert(summon[E[String]].str == "hi") // `over` takes priority since its result type is a subtype of t1's. + assert(summon[E[String]].str == "hi", summon[E[String]].str) // `Impl3` takes priority since its result type is a subtype of t1's. } } diff --git a/tests/run/interleaving.scala b/tests/run/interleaving.scala index 557741032e8a..6749e59168bc 100644 --- a/tests/run/interleaving.scala +++ b/tests/run/interleaving.scala @@ -1,5 +1,6 @@ +import scala.language.experimental.clauseInterleaving + object Test extends App { - import scala.language.experimental.clauseInterleaving trait Key { type Value } trait DB { def getOrElse(k: Key)[V >: k.Value](default: V): V // dependent type parameter diff --git a/tests/run/main-annotation-birthday.scala b/tests/run/main-annotation-birthday.scala deleted file mode 100644 index 32cf28784ced..000000000000 --- a/tests/run/main-annotation-birthday.scala +++ /dev/null @@ -1,35 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -/** - * Wishes a happy birthday to lucky people! - * - * @param age the age of the people whose birthday it is - * @param name the name of the luckiest person! - * @param others all the other lucky people - */ -@newMain def happyBirthday(age: Int, name: String, others: String*) = - val suffix = - age % 100 match - case 11 | 12 | 13 => "th" - case _ => - age % 10 match - case 1 => "st" - case 2 => "nd" - case 3 => "rd" - case _ => "th" - val bldr = new StringBuilder(s"Happy $age$suffix birthday, $name") - for other <- others do bldr.append(" and ").append(other) - println(bldr) - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("happyBirthday") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("23", "Lisa", "Peter")) -end Test diff --git a/tests/run/main-annotation-dash-dash.scala b/tests/run/main-annotation-dash-dash.scala deleted file mode 100644 index 3fe0f47983d5..000000000000 --- a/tests/run/main-annotation-dash-dash.scala +++ /dev/null @@ -1,28 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -object myProgram: - - @newMain def foo(str: String, rest: String*): Unit = - println(s"str = $str") - println(s"rest = ${rest.mkString(",")}") - println() - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("foo") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("x", "y", "z")) - callMain(Array("--", "x", "y", "z")) - callMain(Array("--", "-a", "x", "y", "z")) - callMain(Array("x", "--", "y", "z")) - callMain(Array("--str", "y", "--", "z")) - callMain(Array("--str", "--", "y", "z")) // missing argument for `--str` -end Test diff --git a/tests/run/main-annotation-default-value-1.scala b/tests/run/main-annotation-default-value-1.scala deleted file mode 100644 index cf4ba79e1aff..000000000000 --- a/tests/run/main-annotation-default-value-1.scala +++ /dev/null @@ -1,25 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -// Sample main method -object myProgram: - - /** Adds two numbers */ - @newMain def add(num: Int = 0, inc: Int = 1): Unit = - println(s"$num + $inc = ${num + inc}") - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("add") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("2", "3")) - callMain(Array("2")) - callMain(Array()) -end Test diff --git a/tests/run/main-annotation-default-value-2.scala b/tests/run/main-annotation-default-value-2.scala deleted file mode 100644 index 8b60e6197405..000000000000 --- a/tests/run/main-annotation-default-value-2.scala +++ /dev/null @@ -1,36 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -// Sample main method -object myProgram: - - @newMain def alwaysPassParam(forbiddenParam: Int = throw new IllegalStateException("This should not be evaluated!")): Unit = - println(forbiddenParam) - -end myProgram - -object Test: - def hasCauseIllegalStateException(e: Throwable): Boolean = - e.getCause match { - case null => false - case _: IllegalStateException => true - case e: Throwable => hasCauseIllegalStateException(e) - } - - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("alwaysPassParam") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("42")) - try { - callMain(Array()) - println("This should not be printed") - } - catch { - case e: Exception if hasCauseIllegalStateException(e) => println("OK") - } -end Test diff --git a/tests/run/main-annotation-example.scala b/tests/run/main-annotation-example.scala deleted file mode 100644 index 926496e595e7..000000000000 --- a/tests/run/main-annotation-example.scala +++ /dev/null @@ -1,65 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.* -import collection.mutable -import scala.util.CommandLineParser.FromString - -/** Sum all the numbers - * - * @param first Fist number to sum - * @param rest The rest of the numbers to sum - */ -@myMain def sum(first: Int, rest: Int*): Int = first + rest.sum - - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("sum") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("23", "2", "3")) -end Test - -@experimental -class myMain extends MainAnnotation[FromString, Int]: - import MainAnnotation.{ Info, Parameter } - - def command(info: Info, args: Seq[String]): Option[Seq[String]] = - if args.contains("--help") then - println(info.documentation) - None // do not parse or run the program - else if info.parameters.exists(_.hasDefault) then - println("Default arguments are not supported") - None - else if info.hasVarargs then - val numPlainArgs = info.parameters.length - 1 - if numPlainArgs > args.length then - println("Not enough arguments") - None - else - Some(args) - else - if info.parameters.length > args.length then - println("Not enough arguments") - None - else if info.parameters.length < args.length then - println("Too many arguments") - None - else - Some(args) - - def argGetter[T](param: Parameter, arg: String, defaultArgument: Option[() => T])(using parser: FromString[T]): () => T = - () => parser.fromString(arg) - - def varargGetter[T](param: Parameter, args: Seq[String])(using parser: FromString[T]): () => Seq[T] = - () => args.map(arg => parser.fromString(arg)) - - def run(program: () => Int): Unit = - println("executing program") - val result = program() - println("result: " + result) - println("executed program") -end myMain diff --git a/tests/run/main-annotation-flags.scala b/tests/run/main-annotation-flags.scala deleted file mode 100644 index 8a579e6e2d00..000000000000 --- a/tests/run/main-annotation-flags.scala +++ /dev/null @@ -1,44 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -// Sample main method -object myProgram: - - @newMain def shortFlags(a: Boolean, b: Boolean): Unit = - println(s"shortFlags: a = $a, b = $b") - - @newMain def longFlags(flag1: Boolean, flag2: Boolean): Unit = - println(s"longFlags: flag1 = $flag1, flag2 = $flag2") - - @newMain def mixedFlags(a: Boolean, flag: Boolean): Unit = - println(s"mixedFlags: a = $a, flag = $flag") - -end myProgram - -object Test: - def callMain(name: String, args: String*): Unit = - val clazz = Class.forName(name) - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args.toArray) - - def main(args: Array[String]): Unit = - callMain("shortFlags") - callMain("shortFlags", "-a") - callMain("shortFlags", "-a", "-b") - callMain("shortFlags", "true", "false") - callMain("shortFlags", "-a", "true") - callMain("shortFlags", "-b", "true") - - - callMain("longFlags") - callMain("longFlags", "--flag1") - callMain("longFlags", "--flag1", "--flag2") - - callMain("mixedFlags") - callMain("mixedFlags", "-a") - callMain("mixedFlags", "-a", "--flag") - - -end Test diff --git a/tests/run/main-annotation-help-override.scala b/tests/run/main-annotation-help-override.scala deleted file mode 100644 index bfff85c5a353..000000000000 --- a/tests/run/main-annotation-help-override.scala +++ /dev/null @@ -1,56 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain -import scala.annotation.newMain.alias -import scala.util.Try - -object myProgram: - - /** A method that should let --help and -h display help. */ - @newMain def helpOverride1(notHelp: Int) = ??? - - /** A method that should let -h display help, but not --help. */ - @newMain def helpOverride2(help: Int) = ??? - - /** A method that should let --help display help, but not -h. */ - @newMain def helpOverride3(h: Int) = ??? - - /** A method that should not let --help and -h display help. */ - @newMain def helpOverride4(help: Int, h: Int) = ??? - - - /** A method that should let -h display help, but not --help. */ - @newMain def helpOverride5(@alias("help") notHelp: Int) = ??? - - /** A method that should let --help display help, but not -h. */ - @newMain def helpOverride6(@alias("h") notHelp: Int) = ??? - - /** A method that should not let --help and -h display help. */ - @newMain def helpOverride7(@alias("help") notHelp: Int, @alias("h") notH: Int) = ??? - - /** A method that should not let --help and -h display help. */ - @newMain def helpOverride8(@alias("help") @alias("h") notHelp: Int) = ??? - - /** A method that should not let --help and -h display help. */ - // Probably the correct way to override help flags. - @newMain def helpOverride9(@alias("h") help: Boolean) = println(s"helpOverride9: $help") - -end myProgram - -object Test: - val allClazzes: Seq[Class[?]] = - LazyList.from(1).map(i => Try(Class.forName("helpOverride" + i.toString))).takeWhile(_.isSuccess).map(_.get) - - def callAllMains(args: Array[String]): Unit = - for (clazz <- allClazzes) { - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - } - - def main(args: Array[String]): Unit = - println("##### --help") - callAllMains(Array("--help")) - println("##### -h") - callAllMains(Array("-h")) -end Test diff --git a/tests/run/main-annotation-help.scala b/tests/run/main-annotation-help.scala deleted file mode 100644 index d68bb0d7e874..000000000000 --- a/tests/run/main-annotation-help.scala +++ /dev/null @@ -1,178 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -import scala.util.CommandLineParser.FromString -import scala.util.Try - -class MyNumber(val value: Int): - def +(other: MyNumber): MyNumber = MyNumber(value + other.value) - -class MyGeneric[T](val value: T) - -given FromString[MyNumber] with - override def fromString(s: String): MyNumber = MyNumber(summon[FromString[Int]].fromString(s)) - -given FromString[MyGeneric[Int]] with - override def fromString(s: String): MyGeneric[Int] = MyGeneric(summon[FromString[Int]].fromString(s)) - -object myProgram: - - /** - * Adds two numbers. - */ - @newMain def doc1(num: Int, inc: Int): Unit = () - - /** Adds two numbers. */ - @newMain def doc2(num: Int, inc: Int): Unit = () - - /** - * Adds two numbers. - */ - @newMain def doc3(num: Int, inc: Int): Unit = () - - /** - * Adds two numbers. - * - * @param num the first number - * @param inc the second number - */ - @newMain def doc4(num: Int, inc: Int = 1): Unit = () - - /** - * Adds two numbers. - * - * @param num the first number - */ - @newMain def doc5(num: Int, inc: Int): Unit = () - - /** - * Adds two numbers. - * - * @param num - * @param inc - */ - @newMain def doc6(num: Int, inc: Int): Unit = () - - /** - * Adds two numbers. - * - * @param num the first number - * @param inc the second number - * @return the sum of the two numbers (not really) - */ - @newMain def doc7(num: Int, inc: Int): Unit = () - - /** - * Adds two numbers. - * - * @param num the first number - * @param inc the second number - * @return the sum of the two numbers (not really) - */ - @newMain def doc8(num: Int, inc: Int): Unit = () - - /** - * Adds two numbers. Same as [[doc1]]. - * - * @param num the first number - * @param inc the second number - * @return the sum of the two numbers (not really) - * @see [[doc1]] - */ - @newMain def doc9(num: Int, inc: Int): Unit = () - - /** - * Adds two numbers. - * - * This should be on another line. - * - * - * - * - * And this also. - * - * - * @param num I might have to write this - * on two lines - * @param inc I might even - * have to write this one - * on three lines - */ - @newMain def doc10(num: Int, inc: Int): Unit = () - - /** - * Adds two numbers. - * - * @param num the first number - * - * Oh, a new line! - * - * @param inc the second number - * - * And another one! - */ - @newMain def doc11(num: Int, inc: Int): Unit = () - - /** - * Adds two numbers. It seems that I have a very long line of documentation and therefore might need to be cut at some point to fit a small terminal screen. - */ - @newMain def doc12(num: Int, inc: Int): Unit = () - - /** - * Addstwonumbers.ItseemsthatIhaveaverylonglineofdocumentationandthereforemightneedtobecutatsomepointtofitasmallterminalscreen. - */ - @newMain def doc13(num: Int, inc: Int): Unit = () - - /** - * Loudly judges the number of argument you gave to this poor function. - */ - @newMain def doc14( - arg1: String, arg2: Int, arg3: String, arg4: Int, - arg5: String, arg6: Int, arg7: String, arg8: Int, - arg9: String = "I", arg10: Int = 42, arg11: String = "used", arg12: Int = 0, - arg13: String = "to", arg14: Int = 34, arg15: String = "wonder", arg16: Int* - ): Unit = () - - /** - * Adds two instances of [[MyNumber]]. - * @param myNum my first number to add - * @param myInc my second number to add - */ - @newMain def doc15(myNum: MyNumber, myInc: MyNumber): Unit = () - - /** - * Compares two instances of [[MyGeneric]]. - * @param first my first element - * @param second my second element - */ - @newMain def doc16(first: MyGeneric[Int], second: MyGeneric[Int]): Unit = () - - // This should not be printed in explain! - @newMain def doc17(a: Int, b: Int, c: String): Unit = () - -end myProgram - -object Test: - def callMain1(args: Array[String]): Unit = - val clazz = Class.forName("doc1") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - val allClazzes: Seq[Class[?]] = - LazyList.from(1).map(i => Try(Class.forName("doc" + i.toString))).takeWhile(_.isSuccess).map(_.get) - - def callAllMains(args: Array[String]): Unit = - for (clazz <- allClazzes) { - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - } - - def main(args: Array[String]): Unit = - callMain1(Array("--help")) - callMain1(Array("Some", "garbage", "before", "--help")) - callMain1(Array("--help", "and", "some", "stuff", "after")) - - callAllMains(Array("--help")) -end Test diff --git a/tests/run/main-annotation-homemade-annot-1.scala b/tests/run/main-annotation-homemade-annot-1.scala deleted file mode 100644 index 3106dae4006f..000000000000 --- a/tests/run/main-annotation-homemade-annot-1.scala +++ /dev/null @@ -1,49 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.concurrent._ -import scala.annotation.* -import scala.collection.mutable -import ExecutionContext.Implicits.global -import duration._ -import util.CommandLineParser.FromString - -@mainAwait def get(wait: Int): Future[Int] = Future{ - Thread.sleep(1000 * wait) - 42 -} - -@mainAwait def getMany(wait: Int*): Future[Int] = Future{ - Thread.sleep(1000 * wait.sum) - wait.length -} - -object Test: - def callMain(cls: String, args: Array[String]): Unit = - val clazz = Class.forName(cls) - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - println(Await.result(get(1), Duration(2, SECONDS))) - callMain("get", Array("1")) - callMain("getMany", Array("1")) - callMain("getMany", Array("0", "1")) -end Test - -@experimental -class mainAwait(timeout: Int = 2) extends MainAnnotation[FromString, Future[Any]]: - import MainAnnotation.* - - // This is a toy example, it only works with positional args - def command(info: Info, args: Seq[String]): Option[Seq[String]] = Some(args) - - def argGetter[T](param: Parameter, arg: String, defaultArgument: Option[() => T])(using p: FromString[T]): () => T = - () => p.fromString(arg) - - def varargGetter[T](param: Parameter, args: Seq[String])(using p: FromString[T]): () => Seq[T] = - () => for arg <- args yield p.fromString(arg) - - def run(f: () => Future[Any]): Unit = println(Await.result(f(), Duration(timeout, SECONDS))) - -end mainAwait diff --git a/tests/run/main-annotation-homemade-annot-2.scala b/tests/run/main-annotation-homemade-annot-2.scala deleted file mode 100644 index 980241ff93d3..000000000000 --- a/tests/run/main-annotation-homemade-annot-2.scala +++ /dev/null @@ -1,52 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.collection.mutable -import scala.annotation.* -import util.CommandLineParser.FromString - -@myMain()("A") -def foo1(): Unit = println("I was run!") - -@myMain(0)("This should not be printed") -def foo2() = throw new Exception("This should not be run") - -@myMain(1)("Purple smart", "Blue fast", "White fashion", "Yellow quiet", "Orange honest", "Pink loud") -def foo3() = println("Here are some colors:") - -@myMain()() -def foo4() = println("This will be printed, but nothing more.") - -object Test: - val allClazzes: Seq[Class[?]] = - LazyList.from(1).map(i => scala.util.Try(Class.forName("foo" + i.toString))).takeWhile(_.isSuccess).map(_.get) - - def callMains(): Unit = - for (clazz <- allClazzes) - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, Array[String]()) - - def main(args: Array[String]) = - callMains() -end Test - -// This is a toy example, it only works with positional args -@experimental -class myMain(runs: Int = 3)(after: String*) extends MainAnnotation[FromString, Any]: - import MainAnnotation.* - - def command(info: Info, args: Seq[String]): Option[Seq[String]] = Some(args) - - def argGetter[T](param: Parameter, arg: String, defaultArgument: Option[() => T])(using p: FromString[T]): () => T = - () => p.fromString(arg) - - def varargGetter[T](param: Parameter, args: Seq[String])(using p: FromString[T]): () => Seq[T] = - () => for arg <- args yield p.fromString(arg) - - def run(f: () => Any): Unit = - for (_ <- 1 to runs) - f() - if after.length > 0 then println(after.mkString(", ")) - end run - -end myMain diff --git a/tests/run/main-annotation-homemade-annot-3.scala b/tests/run/main-annotation-homemade-annot-3.scala deleted file mode 100644 index 4a894777c562..000000000000 --- a/tests/run/main-annotation-homemade-annot-3.scala +++ /dev/null @@ -1,26 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.* -import scala.util.CommandLineParser.FromString - -@mainNoArgs def foo() = println("Hello world!") - -object Test: - def main(args: Array[String]) = - val clazz = Class.forName("foo") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, Array[String]()) -end Test - -@experimental -class mainNoArgs extends MainAnnotation[FromString, Any]: - import MainAnnotation.* - - def command(info: Info, args: Seq[String]): Option[Seq[String]] = Some(args) - - def argGetter[T](param: Parameter, arg: String, defaultArgument: Option[() => T])(using p: FromString[T]): () => T = ??? - - def varargGetter[T](param: Parameter, args: Seq[String])(using p: FromString[T]): () => Seq[T] = ??? - - def run(program: () => Any): Unit = program() diff --git a/tests/run/main-annotation-homemade-annot-4.scala b/tests/run/main-annotation-homemade-annot-4.scala deleted file mode 100644 index b50e89523475..000000000000 --- a/tests/run/main-annotation-homemade-annot-4.scala +++ /dev/null @@ -1,27 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.* -import scala.util.CommandLineParser.FromString - -@mainManyArgs(1, "B", 3) def foo() = println("Hello world!") - -object Test: - def main(args: Array[String]) = - val clazz = Class.forName("foo") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, Array[String]()) -end Test - -@experimental -class mainManyArgs(i1: Int, s2: String, i3: Int) extends MainAnnotation[FromString, Any]: - import MainAnnotation.* - - def command(info: Info, args: Seq[String]): Option[Seq[String]] = Some(args) - - def argGetter[T](param: Parameter, arg: String, defaultArgument: Option[() => T])(using p: FromString[T]): () => T = ??? - - def varargGetter[T](param: Parameter, args: Seq[String])(using p: FromString[T]): () => Seq[T] = ??? - - - def run(program: () => Any): Unit = program() diff --git a/tests/run/main-annotation-homemade-annot-5.scala b/tests/run/main-annotation-homemade-annot-5.scala deleted file mode 100644 index a129a51da7eb..000000000000 --- a/tests/run/main-annotation-homemade-annot-5.scala +++ /dev/null @@ -1,28 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.* -import scala.util.CommandLineParser.FromString - -@mainManyArgs(Some(1)) def foo() = println("Hello world!") -@mainManyArgs(None) def bar() = println("Hello world!") - -object Test: - def main(args: Array[String]) = - for (methodName <- List("foo", "bar")) - val clazz = Class.forName(methodName) - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, Array[String]()) -end Test - -@experimental -class mainManyArgs(o: Option[Int]) extends MainAnnotation[FromString, Any]: - import MainAnnotation.* - - def command(info: Info, args: Seq[String]): Option[Seq[String]] = Some(args) - - def argGetter[T](param: Parameter, arg: String, defaultArgument: Option[() => T])(using p: FromString[T]): () => T = ??? - - def varargGetter[T](param: Parameter, args: Seq[String])(using p: FromString[T]): () => Seq[T] = ??? - - def run(program: () => Any): Unit = program() diff --git a/tests/run/main-annotation-homemade-annot-6.scala b/tests/run/main-annotation-homemade-annot-6.scala deleted file mode 100644 index 5a92e6382d3d..000000000000 --- a/tests/run/main-annotation-homemade-annot-6.scala +++ /dev/null @@ -1,65 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.* - -/** Foo docs */ -@myMain def foo(i: Int, j: String = "2") = println(s"foo($i, $j)") -/** Bar docs - * - * @param i the first parameter - */ -@myMain def bar(@MyParamAnnot(3) i: List[Int], rest: Int*) = println(s"bar($i, ${rest.mkString(", ")})") - -object Test: - def main(args: Array[String]) = - for (methodName <- List("foo", "bar")) - val clazz = Class.forName(methodName) - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, Array[String]("1", "2")) -end Test - -@experimental -class myMain extends MainAnnotation[Make, Any]: - import MainAnnotation.* - - def command(info: Info, args: Seq[String]): Option[Seq[String]] = - def paramInfoString(paramInfo: Parameter) = - import paramInfo.* - s" Parameter(name=\"$name\", typeName=\"$typeName\", hasDefault=$hasDefault, isVarargs=$isVarargs, documentation=\"$documentation\", annotations=$annotations)" - println( - s"""command( - | ${args.mkString("Array(", ", ", ")")}, - | ${info.name}, - | "${info.documentation}", - | ${info.parameters.map(paramInfoString).mkString("Seq(\n", ",\n", "\n )*")} - |)""".stripMargin) - Some(args) - - def argGetter[T](param: Parameter, arg: String, defaultArgument: Option[() => T])(using p: Make[T]): () => T = - () => p.make - - def varargGetter[T](param: Parameter, args: Seq[String])(using p: Make[T]): () => Seq[T] = - println("varargGetter()") - () => Seq(p.make, p.make) - - def run(f: () => Any): Unit = - println("run()") - f() - println() - -@experimental -case class MyParamAnnot(n: Int) extends MainAnnotation.ParameterAnnotation - -trait Make[T]: - def make: T - -given Make[Int] with - def make: Int = 42 - - -given Make[String] with - def make: String = "abc" - -given [T: Make]: Make[List[T]] with - def make: List[T] = List(summon[Make[T]].make) diff --git a/tests/run/main-annotation-homemade-parser-1.scala b/tests/run/main-annotation-homemade-parser-1.scala deleted file mode 100644 index 94d43bf19cc5..000000000000 --- a/tests/run/main-annotation-homemade-parser-1.scala +++ /dev/null @@ -1,30 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain -import scala.util.CommandLineParser.FromString - -class MyNumber(val value: Int) { - def +(other: MyNumber): MyNumber = MyNumber(value + other.value) -} - -given FromString[MyNumber] with - override def fromString(s: String): MyNumber = MyNumber(summon[FromString[Int]].fromString(s)) - -object myProgram: - - @newMain def add(num: MyNumber, inc: MyNumber): Unit = - println(s"${num.value} + ${inc.value} = ${num.value + inc.value}") - -end myProgram - - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("add") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("2", "3")) -end Test diff --git a/tests/run/main-annotation-homemade-parser-2.scala b/tests/run/main-annotation-homemade-parser-2.scala deleted file mode 100644 index 4f40f9b42b27..000000000000 --- a/tests/run/main-annotation-homemade-parser-2.scala +++ /dev/null @@ -1,33 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain -import scala.util.CommandLineParser.FromString - -given FromString[Test.MyNumber] with - override def fromString(s: String) = Test.create(summon[FromString[Int]].fromString(s)) - -object myProgram: - - @newMain def add(num: Test.MyNumber, inc: Test.MyNumber): Unit = - val numV = Test.value(num) - val incV = Test.value(inc) - println(s"$numV + $incV = ${numV + incV}") - -end myProgram - - -object Test: - opaque type MyNumber = Int - - def create(n: Int): MyNumber = n - def value(n: MyNumber): Int = n - - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("add") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("2", "3")) -end Test diff --git a/tests/run/main-annotation-homemade-parser-3.scala b/tests/run/main-annotation-homemade-parser-3.scala deleted file mode 100644 index 066e40f1b3a0..000000000000 --- a/tests/run/main-annotation-homemade-parser-3.scala +++ /dev/null @@ -1,29 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain -import scala.util.CommandLineParser.FromString - -given FromString[Int] with - override def fromString(s: String) = s.toInt + 42 - -object myProgram: - - given FromString[Int] with - override def fromString(s: String) = -1 * s.toInt // Should be ignored, because not top-level - - @newMain def add(num: Int, inc: Int): Unit = - println(s"$num + $inc = ${num + inc}") - -end myProgram - - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("add") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("2", "3")) -end Test diff --git a/tests/run/main-annotation-homemade-parser-4.scala b/tests/run/main-annotation-homemade-parser-4.scala deleted file mode 100644 index 668aa040380c..000000000000 --- a/tests/run/main-annotation-homemade-parser-4.scala +++ /dev/null @@ -1,50 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain -import scala.util.CommandLineParser.FromString - -given [T : FromString]: FromString[Option[T]] with - override def fromString(s: String) = Some(summon[FromString[T]].fromString(s)) - override def fromStringOption(s: String) = - try { - Some(fromString(s)) - } - catch { - case _: IllegalArgumentException => Some(None) - } - -given [T : FromString]: FromString[Either[T, String]] with - override def fromString(s: String) = Left(summon[FromString[T]].fromString(s)) - override def fromStringOption(s: String) = - try { - Some(fromString(s)) - } - catch { - case _: IllegalArgumentException => Some(Right(s"Unable to parse argument $s")) - } - -object myProgram: - - @newMain def getOption(o: Option[Int] = Some(42)) = println(o) - - @newMain def getEither(e: Either[Int, String] = Right("No argument given")) = println(e) - -end myProgram - - -object Test: - def call(className: String, args: Array[String]): Unit = - val clazz = Class.forName(className) - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - call("getOption", Array("7")) - call("getOption", Array()) - call("getOption", Array("abc")) - println - call("getEither", Array("7")) - call("getEither", Array()) - call("getEither", Array("abc")) -end Test diff --git a/tests/run/main-annotation-homemade-parser-5.scala b/tests/run/main-annotation-homemade-parser-5.scala deleted file mode 100644 index 123631312ef7..000000000000 --- a/tests/run/main-annotation-homemade-parser-5.scala +++ /dev/null @@ -1,29 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain -import scala.util.CommandLineParser.FromString - -given intParser: FromString[Int => Int] with - override def fromString(s: String) = n => summon[FromString[Int]].fromString(s) + n - -given stringParser: FromString[String => String] with - override def fromString(s: String) = s1 => summon[FromString[String]].fromString(s) + s1 - -object myProgram: - - @newMain def show(getI: Int => Int, getS: String => String) = - println(getI(3)) - println(getS(" world!")) - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("show") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("39", "Hello")) -end Test diff --git a/tests/run/main-annotation-multiple.scala b/tests/run/main-annotation-multiple.scala deleted file mode 100644 index dbc66d0df9ca..000000000000 --- a/tests/run/main-annotation-multiple.scala +++ /dev/null @@ -1,28 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -// Sample main method -object myProgram: - - /** Adds two numbers */ - @newMain def add(num: Int, inc: Int): Unit = - println(s"$num + $inc = ${num + inc}") - - /** Subtracts two numbers */ - @newMain def sub(num: Int, inc: Int): Unit = - println(s"$num - $inc = ${num - inc}") - -end myProgram - -object Test: - def callMain(mainMeth: String, args: Array[String]): Unit = - val clazz = Class.forName(mainMeth) - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain("add", Array("2", "3")) - callMain("sub", Array("2", "3")) -end Test diff --git a/tests/run/main-annotation-named-params.scala b/tests/run/main-annotation-named-params.scala deleted file mode 100644 index 4cfa2c8049b4..000000000000 --- a/tests/run/main-annotation-named-params.scala +++ /dev/null @@ -1,30 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -// Sample main method -object myProgram: - - /** Adds two numbers */ - @newMain def add(num: Int, inc: Int): Unit = - println(s"$num + $inc = ${num + inc}") - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("add") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("--num", "2", "--inc", "3")) - callMain(Array("--inc", "3", "--num", "2")) - - callMain(Array("2", "--inc", "3")) - callMain(Array("--num", "2", "3")) - - callMain(Array("--num", "2", "--num", "1", "--inc", "3")) - callMain(Array("--inc", "1", "--num", "2", "--num", "1", "--inc", "3")) -end Test diff --git a/tests/run/main-annotation-newMain.scala b/tests/run/main-annotation-newMain.scala deleted file mode 100644 index 5b00a46ce7e9..000000000000 --- a/tests/run/main-annotation-newMain.scala +++ /dev/null @@ -1,323 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.* -import collection.mutable -import scala.util.CommandLineParser.FromString - -@newMain def happyBirthday(age: Int, name: String, others: String*) = - val suffix = - age % 100 match - case 11 | 12 | 13 => "th" - case _ => - age % 10 match - case 1 => "st" - case 2 => "nd" - case 3 => "rd" - case _ => "th" - val bldr = new StringBuilder(s"Happy $age$suffix birthday, $name") - for other <- others do bldr.append(" and ").append(other) - println(bldr) - - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("happyBirthday") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("23", "Lisa", "Peter")) -end Test - - - -@experimental -final class newMain extends MainAnnotation[FromString, Any]: - import newMain._ - import MainAnnotation._ - - private inline val argMarker = "--" - private inline val shortArgMarker = "-" - - /** The name of the special argument to display the method's help. - * If one of the method's parameters is called the same, will be ignored. - */ - private inline val helpArg = "help" - - /** The short name of the special argument to display the method's help. - * If one of the method's parameters uses the same short name, will be ignored. - */ - private inline val shortHelpArg = 'h' - - private inline val maxUsageLineLength = 120 - - private var info: Info = _ // TODO remove this var - - - /** A buffer for all errors */ - private val errors = new mutable.ArrayBuffer[String] - - /** Issue an error, and return an uncallable getter */ - private def error(msg: String): () => Nothing = - errors += msg - () => throw new AssertionError("trying to get invalid argument") - - private def getAliases(param: Parameter): Seq[String] = - param.annotations.collect{ case a: Alias => a }.flatMap(_.aliases) - - private def getAlternativeNames(param: Parameter): Seq[String] = - getAliases(param).filter(nameIsValid(_)) - - private def getShortNames(param: Parameter): Seq[Char] = - getAliases(param).filter(shortNameIsValid(_)).map(_(0)) - - private inline def nameIsValid(name: String): Boolean = - name.length > 1 // TODO add more checks for illegal characters - - private inline def shortNameIsValid(name: String): Boolean = - name.length == 1 && shortNameIsValidChar(name(0)) - - private inline def shortNameIsValidChar(shortName: Char): Boolean = - ('A' <= shortName && shortName <= 'Z') || ('a' <= shortName && shortName <= 'z') - - private def getNameWithMarker(name: String | Char): String = name match { - case c: Char => shortArgMarker + c - case s: String if shortNameIsValid(s) => shortArgMarker + s - case s => argMarker + s - } - - private def getInvalidNames(param: Parameter): Seq[String | Char] = - getAliases(param).filter(name => !nameIsValid(name) && !shortNameIsValid(name)) - - def command(info: Info, args: Seq[String]): Option[Seq[String]] = - this.info = info - - val namesToCanonicalName: Map[String, String] = info.parameters.flatMap( - infos => - val names = getAlternativeNames(infos) - val canonicalName = infos.name - if nameIsValid(canonicalName) then (canonicalName +: names).map(_ -> canonicalName) - else names.map(_ -> canonicalName) - ).toMap - val shortNamesToCanonicalName: Map[Char, String] = info.parameters.flatMap( - infos => - val names = getShortNames(infos) - val canonicalName = infos.name - if shortNameIsValid(canonicalName) then (canonicalName(0) +: names).map(_ -> canonicalName) - else names.map(_ -> canonicalName) - ).toMap - - val helpIsOverridden = namesToCanonicalName.exists((name, _) => name == helpArg) - val shortHelpIsOverridden = shortNamesToCanonicalName.exists((name, _) => name == shortHelpArg) - - val (positionalArgs, byNameArgs, invalidByNameArgs) = { - def getCanonicalArgName(arg: String): Option[String] = - if arg.startsWith(argMarker) && arg.length > argMarker.length then - namesToCanonicalName.get(arg.drop(argMarker.length)) - else if arg.startsWith(shortArgMarker) && arg.length == shortArgMarker.length + 1 then - shortNamesToCanonicalName.get(arg(shortArgMarker.length)) - else - None - - def isArgName(arg: String): Boolean = - val isFullName = arg.startsWith(argMarker) - val isShortName = arg.startsWith(shortArgMarker) && arg.length == shortArgMarker.length + 1 && shortNameIsValidChar(arg(shortArgMarker.length)) - isFullName || isShortName - - def recurse(remainingArgs: Seq[String], pa: mutable.Queue[String], bna: Seq[(String, String)], ia: Seq[String]): (mutable.Queue[String], Seq[(String, String)], Seq[String]) = - remainingArgs match { - case Seq() => - (pa, bna, ia) - case argName +: argValue +: rest if isArgName(argName) => - getCanonicalArgName(argName) match { - case Some(canonicalName) => recurse(rest, pa, bna :+ (canonicalName -> argValue), ia) - case None => recurse(rest, pa, bna, ia :+ argName) - } - case arg +: rest => - recurse(rest, pa :+ arg, bna, ia) - } - - val (pa, bna, ia) = recurse(args.toSeq, mutable.Queue.empty, Vector(), Vector()) - val nameToArgValues: Map[String, Seq[String]] = if bna.isEmpty then Map.empty else bna.groupMapReduce(_._1)(p => List(p._2))(_ ++ _) - (pa, nameToArgValues, ia) - } - - val argStrings: Seq[Seq[String]] = - for paramInfo <- info.parameters yield { - if (paramInfo.isVarargs) { - val byNameGetters = byNameArgs.getOrElse(paramInfo.name, Seq()) - val positionalGetters = positionalArgs.removeAll() - // First take arguments passed by name, then those passed by position - byNameGetters ++ positionalGetters - } else { - byNameArgs.get(paramInfo.name) match - case Some(Nil) => - throw AssertionError(s"${paramInfo.name} present in byNameArgs, but it has no argument value") - case Some(argValues) => - if argValues.length > 1 then - // Do not accept multiple values - // Remove this test to take last given argument - error(s"more than one value for ${paramInfo.name}: ${argValues.mkString(", ")}") - Nil - else - List(argValues.last) - case None => - if positionalArgs.length > 0 then - List(positionalArgs.dequeue()) - else if paramInfo.hasDefault then - Nil - else - error(s"missing argument for ${paramInfo.name}") - Nil - } - } - - // Check aliases unicity - val nameAndCanonicalName = info.parameters.flatMap { - case paramInfo => (paramInfo.name +: getAlternativeNames(paramInfo) ++: getShortNames(paramInfo)).map(_ -> paramInfo.name) - } - val nameToCanonicalNames = nameAndCanonicalName.groupMap(_._1)(_._2) - - for (name, canonicalNames) <- nameToCanonicalNames if canonicalNames.length > 1 do - throw IllegalArgumentException(s"$name is used for multiple parameters: ${canonicalNames.mkString(", ")}") - - // Check aliases validity - val problematicNames = info.parameters.flatMap(getInvalidNames) - if problematicNames.length > 0 then - throw IllegalArgumentException(s"The following aliases are invalid: ${problematicNames.mkString(", ")}") - - // Handle unused and invalid args - for (remainingArg <- positionalArgs) error(s"unused argument: $remainingArg") - for (invalidArg <- invalidByNameArgs) error(s"unknown argument name: $invalidArg") - - val displayHelp = - (!helpIsOverridden && args.contains(getNameWithMarker(helpArg))) || - (!shortHelpIsOverridden && args.contains(getNameWithMarker(shortHelpArg))) - - if displayHelp then - usage() - println() - explain() - None - else if errors.nonEmpty then - for msg <- errors do println(s"Error: $msg") - usage() - None - else - Some(argStrings.flatten) - end command - - private def usage(): Unit = - def argsUsage: Seq[String] = - for (infos <- info.parameters) - yield { - val canonicalName = getNameWithMarker(infos.name) - val shortNames = getShortNames(infos).map(getNameWithMarker) - val alternativeNames = getAlternativeNames(infos).map(getNameWithMarker) - val namesPrint = (canonicalName +: alternativeNames ++: shortNames).mkString("[", " | ", "]") - val shortTypeName = infos.typeName.split('.').last - if infos.isVarargs then s"[<$shortTypeName> [<$shortTypeName> [...]]]" - else if infos.hasDefault then s"[$namesPrint <$shortTypeName>]" - else s"$namesPrint <$shortTypeName>" - } - - def wrapArgumentUsages(argsUsage: Seq[String], maxLength: Int): Seq[String] = { - def recurse(args: Seq[String], currentLine: String, acc: Vector[String]): Seq[String] = - (args, currentLine) match { - case (Nil, "") => acc - case (Nil, l) => (acc :+ l) - case (arg +: t, "") => recurse(t, arg, acc) - case (arg +: t, l) if l.length + 1 + arg.length <= maxLength => recurse(t, s"$l $arg", acc) - case (arg +: t, l) => recurse(t, arg, acc :+ l) - } - - recurse(argsUsage, "", Vector()).toList - } - - val usageBeginning = s"Usage: ${info.name} " - val argsOffset = usageBeginning.length - val usages = wrapArgumentUsages(argsUsage, maxUsageLineLength - argsOffset) - - println(usageBeginning + usages.mkString("\n" + " " * argsOffset)) - end usage - - private def explain(): Unit = - inline def shiftLines(s: Seq[String], shift: Int): String = s.map(" " * shift + _).mkString("\n") - - def wrapLongLine(line: String, maxLength: Int): List[String] = { - def recurse(s: String, acc: Vector[String]): Seq[String] = - val lastSpace = s.trim.nn.lastIndexOf(' ', maxLength) - if ((s.length <= maxLength) || (lastSpace < 0)) - acc :+ s - else { - val (shortLine, rest) = s.splitAt(lastSpace) - recurse(rest.trim.nn, acc :+ shortLine) - } - - recurse(line, Vector()).toList - } - - if (info.documentation.nonEmpty) - println(wrapLongLine(info.documentation, maxUsageLineLength).mkString("\n")) - if (info.parameters.nonEmpty) { - val argNameShift = 2 - val argDocShift = argNameShift + 2 - - println("Arguments:") - for infos <- info.parameters do - val canonicalName = getNameWithMarker(infos.name) - val shortNames = getShortNames(infos).map(getNameWithMarker) - val alternativeNames = getAlternativeNames(infos).map(getNameWithMarker) - val otherNames = (alternativeNames ++: shortNames) match { - case Seq() => "" - case names => names.mkString("(", ", ", ") ") - } - val argDoc = StringBuilder(" " * argNameShift) - argDoc.append(s"$canonicalName $otherNames- ${infos.typeName.split('.').last}") - if infos.isVarargs then argDoc.append(" (vararg)") - else if infos.hasDefault then argDoc.append(" (optional)") - - if (infos.documentation.nonEmpty) { - val shiftedDoc = - infos.documentation.split("\n").nn - .map(line => shiftLines(wrapLongLine(line.nn, maxUsageLineLength - argDocShift), argDocShift)) - .mkString("\n") - argDoc.append("\n").append(shiftedDoc) - } - - println(argDoc) - } - end explain - - private def convert[T](argName: String, arg: String, p: FromString[T]): () => T = - p.fromStringOption(arg) match - case Some(t) => () => t - case None => error(s"invalid argument for $argName: $arg") - - def argGetter[T](param: Parameter, arg: String, defaultArgument: Option[() => T])(using p: FromString[T]): () => T = { - if arg.nonEmpty then convert(param.name, arg, p) - else defaultArgument match - case Some(defaultGetter) => defaultGetter - case None => error(s"missing argument for ${param.name}") - } - - def varargGetter[T](param: Parameter, args: Seq[String])(using p: FromString[T]): () => Seq[T] = { - val getters = args.map(arg => convert(param.name, arg, p)) - () => getters.map(_()) - } - - def run(execProgram: () => Any): Unit = { - if errors.nonEmpty then - for msg <- errors do println(s"Error: $msg") - usage() - else - execProgram() - } - -end newMain - -object newMain: - @experimental - final class Alias(val aliases: String*) extends MainAnnotation.ParameterAnnotation -end newMain diff --git a/tests/run/main-annotation-no-parameters-no-parens.scala b/tests/run/main-annotation-no-parameters-no-parens.scala deleted file mode 100644 index b62fd55538de..000000000000 --- a/tests/run/main-annotation-no-parameters-no-parens.scala +++ /dev/null @@ -1,23 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -// Sample main method -object myProgram: - - /** Does nothing, except confirming that it runs */ - @newMain def run: Unit = - println("I run properly!") - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("run") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array()) -end Test diff --git a/tests/run/main-annotation-no-parameters.scala b/tests/run/main-annotation-no-parameters.scala deleted file mode 100644 index fc92a5680e07..000000000000 --- a/tests/run/main-annotation-no-parameters.scala +++ /dev/null @@ -1,23 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -// Sample main method -object myProgram: - - /** Does nothing, except confirming that it runs */ - @newMain def run(): Unit = - println("I run properly!") - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("run") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array()) -end Test diff --git a/tests/run/main-annotation-overload.scala b/tests/run/main-annotation-overload.scala deleted file mode 100644 index 60f9b68a58a2..000000000000 --- a/tests/run/main-annotation-overload.scala +++ /dev/null @@ -1,35 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -// Sample main method -object myProgram: - - /** Adds three numbers (malformed, doesn't work) */ - def add(num1: Int, num2: Int, num3: Int): Unit = - ??? - - /** Adds two numbers */ - @newMain def add(num: Int, inc: Int): Unit = - println(s"$num + $inc = ${num + inc}") - - /** Adds one number (malformed, doesn't work) */ - def add(num: Int): Unit = - ??? - - /** Adds zero numbers (malformed, doesn't work) */ - def add(): Int = - ??? - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("add") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("2", "3")) -end Test diff --git a/tests/run/main-annotation-param-annot-1.scala b/tests/run/main-annotation-param-annot-1.scala deleted file mode 100644 index 5cf29b9f4efb..000000000000 --- a/tests/run/main-annotation-param-annot-1.scala +++ /dev/null @@ -1,111 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain -import scala.annotation.newMain.alias - -object myProgram: - @newMain def altName1( - @alias("myNum") num: Int, - inc: Int - ): Unit = - println(s"$num + $inc = ${num + inc}") - - @newMain def altName2( - @alias("myNum") num: Int, - @alias("myInc") inc: Int - ): Unit = - println(s"$num + $inc = ${num + inc}") - - @newMain def shortName1( - @alias("n") num: Int, - inc: Int - ): Unit = - println(s"$num + $inc = ${num + inc}") - - @newMain def shortName2( - @alias("n") num: Int, - @alias("i") inc: Int - ): Unit = - println(s"$num + $inc = ${num + inc}") - - @newMain def mix1( - @alias("myNum") @alias("n") num: Int, - @alias("i") @alias("myInc") inc: Int - ): Unit = - println(s"$num + $inc = ${num + inc}") - - def myNum: String = "myNum" - def myShortNum = { - var short = 'a' - for i <- 0 until 'n' - 'a' - do - short = (short.toInt + 1).toChar - short.toString - } - def myInc = {new Exception("myInc")}.getMessage - def myShortInc = () => "i" - - @newMain def mix2( - @alias(myNum) @alias(myShortNum) num: Int, - @alias(myShortInc()) @alias(myInc) inc: Int - ): Unit = - println(s"$num + $inc = ${num + inc}") - - @newMain def multiple( - @alias("myNum") @alias("n") num: Int, - @alias("i") @alias("myInc") inc: Int - ): Unit = - println(s"$num + $inc = ${num + inc}") -end myProgram - - -object Test: - def callMain(className: String, args: Array[String]) = - val clazz = Class.forName(className) - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain("altName1", Array("--num", "2", "--inc", "3")) - callMain("altName1", Array("--myNum", "2", "--inc", "3")) - - callMain("altName2", Array("--num", "2", "--inc", "3")) - callMain("altName2", Array("--myNum", "2", "--inc", "3")) - callMain("altName2", Array("--num", "2", "--myInc", "3")) - callMain("altName2", Array("--myNum", "2", "--myInc", "3")) - - callMain("shortName1", Array("--num", "2", "--inc", "3")) - callMain("shortName1", Array("-n", "2", "--inc", "3")) - - callMain("shortName2", Array("--num", "2", "--inc", "3")) - callMain("shortName2", Array("-n", "2", "--inc", "3")) - callMain("shortName2", Array("--num", "2", "-i", "3")) - callMain("shortName2", Array("-n", "2", "-i", "3")) - - callMain("mix1", Array("--num", "2", "--inc", "3")) - callMain("mix1", Array("-n", "2", "--inc", "3")) - callMain("mix1", Array("--num", "2", "-i", "3")) - callMain("mix1", Array("-n", "2", "-i", "3")) - callMain("mix1", Array("--myNum", "2", "--myInc", "3")) - callMain("mix1", Array("-n", "2", "--myInc", "3")) - callMain("mix1", Array("--myNum", "2", "-i", "3")) - callMain("mix1", Array("-n", "2", "-i", "3")) - callMain("mix2", Array("--num", "2", "--inc", "3")) - callMain("mix2", Array("-n", "2", "--inc", "3")) - callMain("mix2", Array("--num", "2", "-i", "3")) - callMain("mix2", Array("-n", "2", "-i", "3")) - callMain("mix2", Array("--myNum", "2", "--myInc", "3")) - callMain("mix2", Array("-n", "2", "--myInc", "3")) - callMain("mix2", Array("--myNum", "2", "-i", "3")) - callMain("mix2", Array("-n", "2", "-i", "3")) - - callMain("multiple", Array("--num", "2", "--inc", "3")) - callMain("multiple", Array("-n", "2", "--inc", "3")) - callMain("multiple", Array("--num", "2", "-i", "3")) - callMain("multiple", Array("-n", "2", "-i", "3")) - callMain("multiple", Array("--myNum", "2", "--myInc", "3")) - callMain("multiple", Array("-n", "2", "--myInc", "3")) - callMain("multiple", Array("--myNum", "2", "-i", "3")) - callMain("multiple", Array("-n", "2", "-i", "3")) -end Test diff --git a/tests/run/main-annotation-param-annot-2.scala b/tests/run/main-annotation-param-annot-2.scala deleted file mode 100644 index 76033f24e614..000000000000 --- a/tests/run/main-annotation-param-annot-2.scala +++ /dev/null @@ -1,63 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain -import scala.annotation.newMain.alias - -object myProgram: - @newMain def multipleSameShortNames1( - @alias("n") num: Int, - @alias("n") inc: Int - ): Unit = () - - @newMain def multipleSameShortNames2( - @alias("n") @alias("n") num: Int, - inc: Int - ): Unit = () - - @newMain def multipleSameNames1( - @alias("arg") num: Int, - @alias("arg") inc: Int - ): Unit = () - - @newMain def multipleSameNames2( - @alias("arg") @alias("arg") num: Int, - inc: Int - ): Unit = () - - @newMain def multipleSameNames3( - num: Int, - @alias("num") inc: Int - ): Unit = () -end myProgram - - -object Test: - def hasCauseIllegalArgumentException(e: Throwable): Boolean = - e.getCause match { - case null => false - case _: IllegalArgumentException => true - case e: Throwable => hasCauseIllegalArgumentException(e) - } - - def callMain(className: String, args: Array[String]) = - val clazz = Class.forName(className) - val method = clazz.getMethod("main", classOf[Array[String]]) - - try { method.invoke(null, args) } - catch { - case e: Exception if hasCauseIllegalArgumentException(e) => println("OK") - } - - def main(args: Array[String]): Unit = - callMain("multipleSameShortNames1", Array("--num", "2", "--inc", "3")) - callMain("multipleSameShortNames1", Array("-n", "2", "--inc", "3")) - callMain("multipleSameShortNames2", Array("--num", "2", "--inc", "3")) - callMain("multipleSameShortNames2", Array("-n", "2", "--inc", "3")) - - callMain("multipleSameNames1", Array("--num", "2", "--inc", "3")) - callMain("multipleSameNames1", Array("--arg", "2", "--inc", "3")) - callMain("multipleSameNames2", Array("--num", "2", "--inc", "3")) - callMain("multipleSameNames2", Array("--arg", "2", "--inc", "3")) - callMain("multipleSameNames3", Array("--num", "2", "--inc", "3")) -end Test diff --git a/tests/run/main-annotation-param-annot-invalid-params.scala b/tests/run/main-annotation-param-annot-invalid-params.scala deleted file mode 100644 index 46bc812863b1..000000000000 --- a/tests/run/main-annotation-param-annot-invalid-params.scala +++ /dev/null @@ -1,48 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain -import scala.annotation.newMain.alias - -import java.lang.reflect.InvocationTargetException - -object myProgram: - - @newMain def empty( - @alias("") i: Int, - ): Unit = () - - @newMain def space( - @alias(" ") i: Int, - ): Unit = () - - @newMain def nonLetter( - @alias("1") i: Int, - ): Unit = () - -end myProgram - -object Test: - def hasCauseIllegalArgumentException(e: Throwable): Boolean = - e.getCause match { - case null => false - case _: IllegalArgumentException => true - case e: Throwable => hasCauseIllegalArgumentException(e) - } - - def callMain(className: String, args: Array[String]) = - val clazz = Class.forName(className) - val method = clazz.getMethod("main", classOf[Array[String]]) - try { - method.invoke(null, args) - println(s"Calling $className should result in an IllegalArgumentException being thrown") - } - catch { - case e: InvocationTargetException if hasCauseIllegalArgumentException(e) => println("OK") - } - - def main(args: Array[String]): Unit = - callMain("empty", Array("3")) - callMain("space", Array("3")) - callMain("nonLetter", Array("3")) -end Test diff --git a/tests/run/main-annotation-return-type-1.scala b/tests/run/main-annotation-return-type-1.scala deleted file mode 100644 index 1366cceeba8a..000000000000 --- a/tests/run/main-annotation-return-type-1.scala +++ /dev/null @@ -1,26 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -// Sample main method -object myProgram: - - /** Adds two numbers and returns them */ - @newMain def add(num: Int, inc: Int) = - println(num + inc) - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("add") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - println("Direct call") - myProgram.add(2, 3) - println("Main call") - callMain(Array("2", "3")) -end Test diff --git a/tests/run/main-annotation-return-type-2.scala b/tests/run/main-annotation-return-type-2.scala deleted file mode 100644 index e2dc6b8ae4e6..000000000000 --- a/tests/run/main-annotation-return-type-2.scala +++ /dev/null @@ -1,29 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -class MyResult(val result: Int): - override def toString: String = result.toString - -// Sample main method -object myProgram: - - /** Adds two numbers and returns them */ - @newMain def add(num: Int, inc: Int) = - println(MyResult(num + inc)) - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("add") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - println("Direct call") - myProgram.add(2, 3) - println("Main call") - callMain(Array("2", "3")) -end Test diff --git a/tests/run/main-annotation-short-name.scala b/tests/run/main-annotation-short-name.scala deleted file mode 100644 index 4a179fb793e1..000000000000 --- a/tests/run/main-annotation-short-name.scala +++ /dev/null @@ -1,25 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -object myProgram: - - /** Adds two numbers */ - @newMain def add(n: Int, i: Int): Unit = - println(s"$n + $i = ${n + i}") - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("add") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("-n", "2", "-i", "3")) - callMain(Array("-i", "3", "-n", "2")) - - callMain(Array("--n", "2", "--i", "3")) -end Test diff --git a/tests/run/main-annotation-simple.scala b/tests/run/main-annotation-simple.scala deleted file mode 100644 index 7d2fd501849b..000000000000 --- a/tests/run/main-annotation-simple.scala +++ /dev/null @@ -1,22 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -object myProgram: - - /** Adds two numbers */ - @newMain def add(num: Int, inc: Int): Unit = - println(s"$num + $inc = ${num + inc}") - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("add") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("2", "3")) -end Test diff --git a/tests/run/main-annotation-top-level.scala b/tests/run/main-annotation-top-level.scala deleted file mode 100644 index 3e2bb7bb2fb4..000000000000 --- a/tests/run/main-annotation-top-level.scala +++ /dev/null @@ -1,37 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -/** Adds two numbers */ -@newMain def add(num: Int, inc: Int): Unit = - println(s"$num + $inc = ${num + inc}") - -/** Adds any amount of numbers */ -@newMain def addAll(num: Int = 0, incs: Int*): Unit = - print(num) - if (incs.length > 0) { - print(" + ") - print(incs.mkString(" + ")) - } - println(s" = ${num + incs.sum}") - -object Test: - def callMainAdd(args: Array[String]): Unit = - val clazz = Class.forName("add") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def callMainAddAll(args: Array[String]): Unit = - val clazz = Class.forName("addAll") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMainAdd(Array("2", "3")) - - callMainAddAll(Array("2", "3")) - callMainAddAll(Array("2")) - callMainAddAll(Array()) - callMainAddAll(Array("1", "2", "3", "4")) -end Test diff --git a/tests/run/main-annotation-types.scala b/tests/run/main-annotation-types.scala deleted file mode 100644 index 0ee6220a1196..000000000000 --- a/tests/run/main-annotation-types.scala +++ /dev/null @@ -1,35 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -// Sample main method -object myProgram: - - /** Displays some parameters */ - @newMain def show( - int: Int, - double: Double, - string: String, - byte: Byte - ): Unit = - println("Here's what I got:") - println(s"int - $int") - println(s"double - $double") - println(s"string - $string") - println(s"byte - $byte") - println() - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("show") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("2", "3", "4", "1")) - callMain(Array("-1", "3456789098765445678", "false", "127")) - callMain(Array("2147483647", "3.1415926535", "Hello world!", "0")) -end Test diff --git a/tests/run/main-annotation-vararg-1.scala b/tests/run/main-annotation-vararg-1.scala deleted file mode 100644 index 0227054e0189..000000000000 --- a/tests/run/main-annotation-vararg-1.scala +++ /dev/null @@ -1,30 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -// Sample main method -object myProgram: - - /** Adds any amount of numbers */ - @newMain def add(nums: Int*): Unit = - if (nums.isEmpty) - println("No number input") - else - println(s"${nums.mkString(" + ")} = ${nums.sum}") - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("add") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("2", "3")) - callMain(Array("2", "3", "-4")) - callMain((1 to 10).toArray.map(_.toString)) - callMain(Array("0")) - callMain(Array()) -end Test diff --git a/tests/run/main-annotation-vararg-2.scala b/tests/run/main-annotation-vararg-2.scala deleted file mode 100644 index 8521795388b2..000000000000 --- a/tests/run/main-annotation-vararg-2.scala +++ /dev/null @@ -1,33 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -// Sample main method -object myProgram: - - /** Checks that the correct amount of parameters were passed */ - @newMain def count(count: Int, elems: String*): Unit = - if (elems.length == count) - println("Correct") - else - println(s"Expected $count argument${if (count != 1) "s" else ""}, but got ${elems.length}") - println(s" ${elems.mkString(", ")}") - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("count") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("1", "Hello")) - callMain(Array("2", "Hello", "world!")) - callMain(Array("3", "No 3 elements")) - callMain(Array("0")) - callMain(Array("0", "I", "shouldn't", "be", "here")) - callMain(Array("-2", "How does that make sense?")) - callMain(Array("26") ++ ('a' to 'z').toArray.map(_.toString)) -end Test diff --git a/tests/run/main-annotation-wrong-param-1.scala b/tests/run/main-annotation-wrong-param-1.scala deleted file mode 100644 index 6c9e9e991136..000000000000 --- a/tests/run/main-annotation-wrong-param-1.scala +++ /dev/null @@ -1,26 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -// Sample main method -object myProgram: - - /** Adds two numbers */ - @newMain def add(num: Int, inc: Int): Unit = - println(s"$num + $inc = ${num + inc}") - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("add") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("2", "true", "SPAAAAACE")) - callMain(Array("add", "2", "3")) - callMain(Array("true", "false", "10")) - callMain(Array("binary", "10", "01")) -end Test diff --git a/tests/run/main-annotation-wrong-param-names.scala b/tests/run/main-annotation-wrong-param-names.scala deleted file mode 100644 index 90622d543bf1..000000000000 --- a/tests/run/main-annotation-wrong-param-names.scala +++ /dev/null @@ -1,26 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -// Sample main method -object myProgram: - - /** Adds two numbers */ - @newMain def add(num: Int, inc: Int): Unit = - println(s"$num + $inc = ${num + inc}") - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("add") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("--n", "1", "--i", "10")) - callMain(Array("num", "1", "inc", "10")) - callMain(Array("--something", "1", "10")) - callMain(Array("1", "--else", "10")) -end Test diff --git a/tests/run/main-annotation-wrong-param-number.scala b/tests/run/main-annotation-wrong-param-number.scala deleted file mode 100644 index b8ef8c0ea9e7..000000000000 --- a/tests/run/main-annotation-wrong-param-number.scala +++ /dev/null @@ -1,26 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -// Sample main method -object myProgram: - - /** Adds two numbers */ - @newMain def add(num: Int, inc: Int): Unit = - println(s"$num + $inc = ${num + inc}") - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("add") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array()) - callMain(Array("1")) - callMain(Array("1", "2", "3")) - callMain(Array((1 to 10).toArray.map(_.toString): _*)) -end Test diff --git a/tests/run/main-annotation-wrong-param-type.scala b/tests/run/main-annotation-wrong-param-type.scala deleted file mode 100644 index 0fbae70a48a5..000000000000 --- a/tests/run/main-annotation-wrong-param-type.scala +++ /dev/null @@ -1,28 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -import scala.annotation.newMain - -// Sample main method -object myProgram: - - /** Adds two numbers */ - @newMain def add(num: Int, inc: Int): Unit = - println(s"$num + $inc = ${num + inc}") - -end myProgram - -object Test: - def callMain(args: Array[String]): Unit = - val clazz = Class.forName("add") - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args) - - def main(args: Array[String]): Unit = - callMain(Array("2", "true")) - callMain(Array("2.1", "3")) - callMain(Array("2", "3.1415921535")) - callMain(Array("192.168.1.1", "3")) - callMain(Array("false", "true")) - callMain(Array("Hello", "world!")) -end Test diff --git a/tests/run/main-calculator-example.scala b/tests/run/main-calculator-example.scala deleted file mode 100644 index fc2e1397009b..000000000000 --- a/tests/run/main-calculator-example.scala +++ /dev/null @@ -1,67 +0,0 @@ -//> using options -experimental -Yno-experimental -// scalajs: --skip - -sealed trait Expression: - def eval(): Int -case class Number(n: Int) extends Expression: - def eval(): Int = n -case class Plus(e1: Expression, e2: Expression) extends Expression: - def eval(): Int = e1.eval() + e2.eval() - -//// - -@main def sum(n1: Int, n2: Int) = - val x1 = Number(n1) - val x2 = Number(n2) - val expr = Plus(x1, x2) - println(s"Expression: $expr") - val result = expr.eval() - println(s"Calculated: $result") - -//// - -import scala.annotation.{ MainAnnotation, experimental } -import scala.annotation.MainAnnotation.{ Info, Parameter } -import scala.util.CommandLineParser.FromString - -class showAndEval extends MainAnnotation[FromString, Expression]: - def command(info: Info, args: Seq[String]): Option[Seq[String]] = - assert(info.parameters.forall(param => param.typeName == "Number"), "Only Number parameters allowed") - println(s"executing ${info.name} with inputs: ${args.mkString(" ")}") - Some(args) - - def argGetter[T](param: Parameter, arg: String, defaultArgument: Option[() => T])(using parser: FromString[T]): () => T = - () => parser.fromString(arg) - - def varargGetter[T](param: Parameter, args: Seq[String])(using parser: FromString[T]): () => Seq[T] = - () => args.map(arg => parser.fromString(arg)) - - def run(program: () => Expression): Unit = - val expr = program() - println(s"Expression: $expr") - val result = expr.eval() - println(s"Calculated: $result") -end showAndEval - -given FromString[Number] = (x: String) => Number(x.toInt) - -//// - -@showAndEval def sum2(x1: Number, x2: Number): Expression = - sumAll(x1, x2) - -@showAndEval def sumAll(xs: Number*): Expression = - if xs.isEmpty then Number(0) - else xs.tail.fold[Expression](xs.head)(Plus) - -//// - -@main def Test: Unit = - def callMain(name: String, args: String*): Unit = - val clazz = Class.forName(name) - val method = clazz.getMethod("main", classOf[Array[String]]) - method.invoke(null, args.toArray) - callMain("sum", "1", "2") - callMain("sum2", "2", "3") - callMain("sumAll", "1", "2", "3") -end Test diff --git a/tests/run/named-patmatch.scala b/tests/run/named-patmatch.scala new file mode 100644 index 000000000000..e62497e4aa8f --- /dev/null +++ b/tests/run/named-patmatch.scala @@ -0,0 +1,36 @@ +import annotation.experimental +import language.experimental.namedTuples + +@main def Test = + locally: + val (x = x, y = y) = (x = 11, y = 22) + assert(x == 11 && y == 22) + + locally: + val (x = a, y = b) = (x = 1, y = 2) + assert(a == 1 && b == 2) + + locally: + val (x = a, y = b) = (x = 1, y = 2) + assert(a == 1 && b == 2) + + locally: + val (x, y) = (x = 1, y = 2) + assert(x == 1 && y == 2) + + locally: + val (a, b) = (x = 1, y = 2) + assert(a == 1 && b == 2) + + (x = 1, y = 2) match + case (x = x, y = y) => assert(x == 1 && y == 2) + + (x = 1, y = 2) match + case (x, y) => assert(x == 1 && y == 2) + + (x = 1, y = 2) match + case (a, b) => assert(a == 1 && b == 2) + + + + diff --git a/tests/run/named-patterns.check b/tests/run/named-patterns.check new file mode 100644 index 000000000000..9ccc08d67069 --- /dev/null +++ b/tests/run/named-patterns.check @@ -0,0 +1,20 @@ +name Bob, age 22 +name Bob +age 22 +age 22, name Bob +Bob, 22 +name Bob, age 22 +name Bob +age 22 +age 22, name Bob +Bob, 22 +1003 Lausanne, Rue de la Gare 44 +1003 Lausanne +Rue de la Gare in Lausanne +1003 Lausanne, Rue de la Gare 44 +1003 Lausanne, Rue de la Gare 44 +Bob, aged 22, in 1003 Lausanne, Rue de la Gare 44 +Bob in 1003 Lausanne +aged 22 in Rue de la Gare in Lausanne +Bob, aged 22 in 1003 Lausanne, Rue de la Gare 44 +Bob, aged 22 in 1003 Lausanne, Rue de la Gare 44 diff --git a/tests/run/named-patterns.scala b/tests/run/named-patterns.scala new file mode 100644 index 000000000000..7c24dc8d683a --- /dev/null +++ b/tests/run/named-patterns.scala @@ -0,0 +1,74 @@ +import language.experimental.namedTuples + +object Test1: + class Person(val name: String, val age: Int) + + object Person: + def unapply(p: Person): (name: String, age: Int) = (p.name, p.age) + + class Person2(val name: String, val age: Int) + object Person2: + def unapply(p: Person2): Option[(name: String, age: Int)] = Some((p.name, p.age)) + + case class Address(city: String, zip: Int, street: String, number: Int) + + @main def Test = + val bob = Person("Bob", 22) + bob match + case Person(name = n, age = a) => println(s"name $n, age $a") + bob match + case Person(name = n) => println(s"name $n") + bob match + case Person(age = a) => println(s"age $a") + bob match + case Person(age = a, name = n) => println(s"age $a, name $n") + bob match + case Person(age, name) => println(s"$age, $name") + + val bob2 = Person2("Bob", 22) + bob2 match + case Person2(name = n, age = a) => println(s"name $n, age $a") + bob2 match + case Person2(name = n) => println(s"name $n") + bob2 match + case Person2(age = a) => println(s"age $a") + bob2 match + case Person2(age = a, name = n) => println(s"age $a, name $n") + bob2 match + case Person2(age, name) => println(s"$age, $name") + + val addr = Address("Lausanne", 1003, "Rue de la Gare", 44) + addr match + case Address(city = c, zip = z, street = s, number = n) => + println(s"$z $c, $s $n") + addr match + case Address(zip = z, city = c) => + println(s"$z $c") + addr match + case Address(city = c, street = s) => + println(s"$s in $c") + addr match + case Address(number = n, street = s, zip = z, city = c) => + println(s"$z $c, $s $n") + addr match + case Address(c, z, s, number) => + println(s"$z $c, $s $number") + + type Person3 = (p: Person2, addr: Address) + + val p3 = (p = bob2, addr = addr) + p3 match + case (addr = Address(city = c, zip = z, street = s, number = n), p = Person2(name = nn, age = a)) => + println(s"$nn, aged $a, in $z $c, $s $n") + p3 match + case (p = Person2(name = nn), addr = Address(zip = z, city = c)) => + println(s"$nn in $z $c") + p3 match + case (p = Person2(age = a), addr = Address(city = c, street = s)) => + println(s"aged $a in $s in $c") + p3 match + case (Person2(age = a, name = nn), Address(number = n, street = s, zip = z, city = c)) => + println(s"$nn, aged $a in $z $c, $s $n") + p3 match + case (Person2(nn, a), Address(c, z, s, number)) => + println(s"$nn, aged $a in $z $c, $s $number") diff --git a/tests/run/named-tuple-ops.scala b/tests/run/named-tuple-ops.scala new file mode 100644 index 000000000000..076ab5028c6c --- /dev/null +++ b/tests/run/named-tuple-ops.scala @@ -0,0 +1,89 @@ +//> using options -source future +import language.experimental.namedTuples +import scala.compiletime.asMatchable + +type City = (name: String, zip: Int, pop: Int) +type Raw = (String, Int, Int) + +type Coord = (x: Double, y: Double) +type Labels = (x: String, y: String) + +@main def Test = + val city: City = (name = "Lausanne", zip = 1000, pop = 140000) + val coord: Coord = (x = 1.0, y = 0.0) + val labels: Labels = (x = "west", y = "north") + + val size: 3 = city.size + assert(city.size == 3) + + val zip: Int = city(1) + assert(zip == 1000) + + val name: String = city.head + assert(name == "Lausanne") + + val zip_pop: (zip: Int, pop: Int) = city.tail + val (_: Int, _: Int) = zip_pop + assert(zip_pop == (zip = 1000, pop = 140000)) + + val cinit = city.init + val _: (name: String, zip: Int) = cinit + assert(cinit == (name = "Lausanne", zip = 1000)) + + val ctake1: (name: String) = city.take(1) + assert(ctake1 == (name = "Lausanne")) + + val cdrop1 = city.drop(1) + val _: (zip: Int, pop: Int) = cdrop1 + assert(cdrop1 == zip_pop) + + val cdrop3 = city.drop(3) + val _: NamedTuple.Empty = cdrop3 + assert(cdrop3 == NamedTuple.Empty) + + val cdrop4 = city.drop(4) + val _: NamedTuple.Empty = cdrop4 + assert(cdrop4 == NamedTuple.Empty) + + val csplit = city.splitAt(1) + val _: ((name: String), (zip: Int, pop: Int)) = csplit + assert(csplit == ((name = "Lausanne"), zip_pop)) + + val city_coord = city ++ coord + val _: NamedTuple.Concat[City, Coord] = city_coord + val _: (name: String, zip: Int, pop: Int, x: Double, y: Double) = city_coord + assert(city_coord == (name = "Lausanne", zip = 1000, pop = 140000, x = 1.0, y = 0.0)) + + type IntToString[X] = X match + case Int => String + case _ => X + + val intToString = [X] => (x: X) => x.asMatchable match + case x: Int => x.toString + case x => x + + val citymap = city.map[IntToString](intToString.asInstanceOf) + val _: (name: String, zip: String, pop: String) = citymap + assert(citymap == (name = "Lausanne", zip = "1000", pop = "140000")) + + val cityreverse = city.reverse + val _: (pop: Int, zip: Int, name: String) = cityreverse + assert(cityreverse == (pop = 140000, zip = 1000, name = "Lausanne")) + + val zipped = coord.zip(labels) + val _: (x: (Double, String), y: (Double, String)) = zipped + val (x3, y3) = zipped + val _: (Double, String) = x3 + assert(zipped == (x = (1.0, "west"), y = (0.0, "north"))) + + val zippedRaw = ((1.0, "west"), (0.0, "north")) + val (x1: (Double, String), x2: (Double, String)) = zippedRaw + + val cityFields = city.toList + val _: List[String | Int] = cityFields + assert(cityFields == List("Lausanne", 1000, 140000)) + + val citArr = city.toArray + val _: List[String | Int] = cityFields + assert(cityFields == List("Lausanne", 1000, 140000)) + diff --git a/tests/run/named-tuples-strawman-2.scala b/tests/run/named-tuples-strawman-2.scala new file mode 100644 index 000000000000..95f37ad23a93 --- /dev/null +++ b/tests/run/named-tuples-strawman-2.scala @@ -0,0 +1,248 @@ +import compiletime.* +import compiletime.ops.int.* +import compiletime.ops.boolean.! +import Tuple.* + +object TupleOps: + + private object helpers: + + /** Used to implement IndicesWhere */ + type IndicesWhereHelper[X <: Tuple, P[_ <: Union[X]] <: Boolean, N <: Int] <: Tuple = X match + case EmptyTuple => EmptyTuple + case h *: t => P[h] match + case true => N *: IndicesWhereHelper[t, P, S[N]] + case false => IndicesWhereHelper[t, P, S[N]] + + end helpers + + /** A type level Boolean indicating whether the tuple `X` has an element + * that matches `Y`. + * @pre The elements of `X` are assumed to be singleton types + */ + type Contains[X <: Tuple, Y] <: Boolean = X match + case Y *: _ => true + case _ *: xs => Contains[xs, Y] + case EmptyTuple => false + + /** The index of `Y` in tuple `X` as a literal constant Int, + * or `Size[X]` if `Y` is disjoint from all element types in `X`. + */ + type IndexOf[X <: Tuple, Y] <: Int = X match + case Y *: _ => 0 + case _ *: xs => S[IndexOf[xs, Y]] + case EmptyTuple => 0 + + /** A tuple consisting of those indices `N` of tuple `X` where the predicate `P` + * is true for `Elem[X, N]`. Indices are type level values <: Int. + */ + type IndicesWhere[X <: Tuple, P[_ <: Union[X]] <: Boolean] = + helpers.IndicesWhereHelper[X, P, 0] + + extension [X <: Tuple](inline x: X) + + /** The index (starting at 0) of the first occurrence of `y.type` in the type `X` of `x` + * or `Size[X]` if no such element exists. + */ + inline def indexOf(y: Any): IndexOf[X, y.type] = constValue[IndexOf[X, y.type]] + + /** A boolean indicating whether there is an element `y.type` in the type `X` of `x` */ + inline def contains(y: Any): Contains[X, y.type] = constValue[Contains[X, y.type]] + + end extension + + + /** The `X` tuple, with its element at index `N` replaced by `Y`. + * If `N` is equal to `Size[X]`, the element `Y` is appended instead + */ + type UpdateOrAppend[X <: Tuple, N <: Int, Y] <: Tuple = X match + case x *: xs => + N match + case 0 => Y *: xs + case S[n1] => x *: UpdateOrAppend[xs, n1, Y] + case EmptyTuple => + N match + case 0 => Y *: EmptyTuple + + inline def updateOrAppend[X <: Tuple, N <: Int, Y](xs: X, y: Y): UpdateOrAppend[X, N, Y] = + locally: + val n = constValue[N] + val size = xs.size + require(0 <= n && n <= xs.size, s"Index $n out of range 0..$size") + if n == size then xs :* y + else + val elems = xs.toArray + elems(n) = y.asInstanceOf[Object] + fromArray(elems) + .asInstanceOf[UpdateOrAppend[X, N, Y]] + + extension [X <: Tuple](inline xs: X) + // Note: Y must be inferred precisely, or given explicitly. This means even though `updateOrAppend` + // is clearly useful, we cannot yet move it to tuple since it is still too awkward to use. + // Once we have precise inference, we could replace `Y <: Singleton` with `Y: Precise` + // and then it should work beautifully. + inline def updateOrAppend[N <: Int & Singleton, Y <: Singleton](inline n: N, inline y: Y): UpdateOrAppend[X, N, Y] = + locally: + val size = xs.size + require(0 <= n && n <= size, s"Index $n out of range 0..$size") + if n == size then xs :* y + else + val elems = xs.toArray + elems(n) = y.asInstanceOf[Object] + fromArray(elems) + .asInstanceOf[UpdateOrAppend[X, N, Y]] + + /** If `Y` does not occur in tuple `X`, `X` with `Y` appended. Otherwise `X`. */ + type AppendIfDistinct[X <: Tuple, Y] <: Tuple = X match + case Y *: xs => X + case x *: xs => x *: AppendIfDistinct[xs, Y] + case EmptyTuple => Y *: EmptyTuple + + inline def appendIfDistinct[X <: Tuple, Y](xs: X, y: Y): AppendIfDistinct[X, Y] = + (if xs.contains(y) then xs else xs :* y).asInstanceOf[AppendIfDistinct[X, Y]] + + /** `X` with all elements from `Y` that do not occur in `X` appended */ + type ConcatDistinct[X <: Tuple, Y <: Tuple] <: Tuple = Y match + case y *: ys => ConcatDistinct[AppendIfDistinct[X, y], ys] + case EmptyTuple => X + + inline def concatDistinct[X <: Tuple, Y <: Tuple](xs: X, ys: Y): ConcatDistinct[X, Y] = + (xs ++ filter[Y, [Elem] =>> ![Contains[X, Elem]]](ys)).asInstanceOf[ConcatDistinct[X, Y]] + + /** A tuple consisting of all elements of this tuple that have types + * for which the given type level predicate `P` reduces to the literal + * constant `true`. + */ + inline def filter[X <: Tuple, P[_] <: Boolean](xs: X): Filter[X, P] = + val toInclude = constValueTuple[IndicesWhere[X, P]].toArray + val arr = new Array[Object](toInclude.length) + for i <- toInclude.indices do + arr(i) = xs.productElement(toInclude(i).asInstanceOf[Int]).asInstanceOf[Object] + Tuple.fromArray(arr).asInstanceOf[Filter[X, P]] + +object NamedTupleDecomposition: + import NamedTupleOps.* + + /** The names of the named tuple type `NT` */ + type Names[NT <: AnyNamedTuple] <: Tuple = NT match + case NamedTuple[n, _] => n + + /** The value types of the named tuple type `NT` */ + type DropNames[NT <: AnyNamedTuple] <: Tuple = NT match + case NamedTuple[_, x] => x + +object NamedTupleOps: + import TupleOps.* + + opaque type AnyNamedTuple = Any + + opaque type NamedTuple[N <: Tuple, +X <: Tuple] >: X <: AnyNamedTuple = X + + export NamedTupleDecomposition.* + + object NamedTuple: + def apply[N <: Tuple, X <: Tuple](x: X): NamedTuple[N, X] = x + + extension [NT <: AnyNamedTuple](x: NT) + inline def toTuple: DropNames[NT] = x.asInstanceOf + inline def names: Names[NT] = constValueTuple[Names[NT]] + + /** Internal use only: Merge names and value components of two named tuple to + * impement `UpdateWith`. + * @param N the names of the combined tuple + * @param X the value types of the first named tuple + * @param N2 the names of the second named tuple + * @param Y the value types of the second named tuple + */ + type Merge[N <: Tuple, X <: Tuple, N2 <: Tuple, Y <: Tuple] = (N2, Y) match + case (n *: ns, y *: ys) => + Merge[N, UpdateOrAppend[X, IndexOf[N, n], y], ns, ys] + case (EmptyTuple, EmptyTuple) => + NamedTuple[N, X] + + /** A joint named tuple where + * - The names are the names of named tuple `NT1` followed by those names of `NT2` which + * do not appear in `NT1` + * - The values are the values of `NT1` and `NT2` corresponding to these names. + * If a name is present in both `NT1` and `NT2` the value in `NT2` is used. + */ + type UpdateWith[NT1 <: AnyNamedTuple, NT2 <: AnyNamedTuple] = + Merge[ConcatDistinct[Names[NT1], Names[NT2]], DropNames[NT1], Names[NT2], DropNames[NT2]] + + extension [NT1 <: AnyNamedTuple](nt1: NT1) + inline def updateWith[NT2 <: AnyNamedTuple](nt2: NT2): UpdateWith[NT1, NT2] = + val names = constValueTuple[ConcatDistinct[Names[NT1], Names[NT2]]].toArray + val names2 = constValueTuple[Names[NT2]].toArray + val values1 = nt1.toTuple + val values2 = nt2.toTuple + val values = new Array[Object](names.length) + values1.toArray.copyToArray(values) + for i <- 0 until values2.size do + val idx = names.indexOf(names2(i)) + values(idx) = values2.productElement(i).asInstanceOf[Object] + Tuple.fromArray(values).asInstanceOf[UpdateWith[NT1, NT2]] + +@main def Test = + import TupleOps.* + import NamedTupleOps.* + + type Names = "first" *: "last" *: "age" *: EmptyTuple + type Values = "Bob" *: "Miller" *: 33 *: EmptyTuple + + val names: Names = ("first", "last", "age") + val values: Values = ("Bob", "Miller", 33) + + val x1: IndexOf[Names, "first"] = constValue + val _: 0 = x1 + + val x2: IndexOf[Names, "age"] = names.indexOf("age") + val _: 2 = x2 + + val x3: IndexOf[Names, "what?"] = names.indexOf("what?") + val _: 3 = x3 + + type Releases = "first" *: "middle" *: EmptyTuple + type ReleaseValues = 1.0 *: true *: EmptyTuple + + val releases: Releases = ("first", "middle") + val releaseValues: ReleaseValues = (1.0, true) + + val x4 = values.updateOrAppend(names.indexOf("age"), 11) + //updateOrAppend[Values](values)[IndexOf[Names, "age"], 11](indexOf[Names](names)["age"]("age"), 11) + val _: ("Bob", "Miller", 11) = x4 + assert(("Bob", "Miller", 11) == x4) + + val x5 = updateOrAppend[Values, IndexOf[Names, "what"], true](values, true) + val _: ("Bob", "Miller", 33, true) = x5 + assert(("Bob", "Miller", 33, true) == x5) + + val x6 = updateOrAppend[Values, IndexOf[Names, "first"], "Peter"](values, "Peter") + val _: ("Peter", "Miller", 33) = x6 + assert(("Peter", "Miller", 33) == x6) + + val x7 = concatDistinct[Names, Releases](names, releases) + val _: ("first", "last", "age", "middle") = x7 + assert(("first", "last", "age", "middle") == x7, x7) + + val x8 = concatDistinct[Releases, Names](releases, names) + val _: ("first", "middle", "last", "age") = x8 + assert(("first", "middle", "last", "age") == x8) + + def x9: Merge[ConcatDistinct[Names, Releases], Values, Releases, ReleaseValues] = ??? + def x9c: NamedTuple[("first", "last", "age", "middle"), (1.0, "Miller", 33, true)] = x9 + + val person = NamedTuple[Names, Values](values) + val release = NamedTuple[Releases, ReleaseValues](releaseValues) + + val x10 = person.updateWith(release) + val _: UpdateWith[NamedTuple[Names, Values], NamedTuple[Releases, ReleaseValues]] = x10 + val _: ("first", "last", "age", "middle") = x10.names + val _: (1.0, "Miller", 33, true) = x10.toTuple + assert((("first", "last", "age", "middle") == x10.names)) + assert((1.0, "Miller", 33, true) == x10.toTuple) + + val x11 = release.updateWith(person) + val _: UpdateWith[NamedTuple[Releases, ReleaseValues], NamedTuple[Names, Values]] = x11 + val _: NamedTuple[("first", "middle", "last", "age"), ("Bob", true, "Miller", 33)] = x11 + assert(("first", "middle", "last", "age") == x11.names) + assert(("Bob", true, "Miller", 33) == x11.toTuple) diff --git a/tests/run/named-tuples-xxl.check b/tests/run/named-tuples-xxl.check new file mode 100644 index 000000000000..ee5f60bec756 --- /dev/null +++ b/tests/run/named-tuples-xxl.check @@ -0,0 +1,6 @@ +(0,0,0,0,0,0,0,0,0,0,Bob,0,33,0,0,0,0,0,0,0,0,0,0,0) +(0,0,0,0,0,0,0,0,0,0,Bob,0,33,0,0,0,0,0,0,0,0,0,0,0) +(0,0,0,0,0,0,0,0,0,0,Bob,0,33,0,0,0,0,0,0,0,0,0,0,0) +Bob is younger than Bill +Bob is younger than Lucy +Bill is younger than Lucy diff --git a/tests/run/named-tuples-xxl.scala b/tests/run/named-tuples-xxl.scala new file mode 100644 index 000000000000..3a0a1e5e1294 --- /dev/null +++ b/tests/run/named-tuples-xxl.scala @@ -0,0 +1,91 @@ +import language.experimental.namedTuples +import NamedTuple.toTuple + +type Person = ( + x0: Int, x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, + name: String, y1: Int, age: Int, y2: Int, + z0: Int, z1: Int, z2: Int, z3: Int, z4: Int, z5: Int, z6: Int, z7: Int, z8: Int, z9: Int) + +val bob = ( + x0 = 0, x1 = 0, x2 = 0, x3 = 0, x4 = 0, x5 = 0, x6 = 0, x7 = 0, x8 = 0, x9 = 0, + name = "Bob", y1 = 0, age = 33, y2 = 0, + z0 = 0, z1 = 0, z2 = 0, z3 = 0, z4 = 0, z5 = 0, z6 = 0, z7 = 0, z8 = 0, z9 = 0) + +val person2: Person = bob + + +type AddressInfo = (city: String, zip: Int) +val addr = (city = "Lausanne", zip = 1003) + +type CombinedInfo = NamedTuple.Concat[Person, AddressInfo] +val bobWithAddr = bob ++ addr +val _: CombinedInfo = bobWithAddr +val _: CombinedInfo = bob ++ addr + +@main def Test = + assert(bob.name == "Bob") + assert(bob.age == 33) + bob match + case p @ (name = "Bob", age = a) => + val x = p + println(x) + assert(p.age == 33) + assert(a == 33) + case _ => + assert(false) + + bob match + case p @ (name = "Peter", age = _) => assert(false) + case p @ (name = "Bob", age = 0) => assert(false) + case _ => + bob match + case b @ (x0 = 0, x1 = 0, x2 = 0, x3 = 0, x4 = 0, x5 = 0, x6 = 0, x7 = 0, x8 = 0, x9 = 0, + name = "Bob", y1 = 0, age = 33, y2 = 0, + z0 = 0, z1 = 0, z2 = 0, z3 = 0, z4 = 0, z5 = 0, z6 = 0, z7 = 0, z8 = 0, z9 = 0) + => // !!! spurious unreachable case warning + println(bob) + println(b) + case _ => assert(false) + + val x = bob.age + assert(x == 33) + + val y: ( + Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, + String, Int, Int, Int, + Int, Int, Int, Int, Int, Int, Int, Int, Int, Int) + = bob.toTuple + + def ageOf(person: Person) = person.age + + assert(ageOf(bob) == 33) + + val persons = List( + bob, + (x0 = 0, x1 = 0, x2 = 0, x3 = 0, x4 = 0, x5 = 0, x6 = 0, x7 = 0, x8 = 0, x9 = 0, + name = "Bill", y1 = 0, age = 40, y2 = 0, + z0 = 0, z1 = 0, z2 = 0, z3 = 0, z4 = 0, z5 = 0, z6 = 0, z7 = 0, z8 = 0, z9 = 0), + (x0 = 0, x1 = 0, x2 = 0, x3 = 0, x4 = 0, x5 = 0, x6 = 0, x7 = 0, x8 = 0, x9 = 0, + name = "Lucy", y1 = 0, age = 45, y2 = 0, + z0 = 0, z1 = 0, z2 = 0, z3 = 0, z4 = 0, z5 = 0, z6 = 0, z7 = 0, z8 = 0, z9 = 0), + ) + for + p <- persons + q <- persons + if p.age < q.age + do + println(s"${p.name} is younger than ${q.name}") + + val name1 = bob(10) + val age1 = bob(12) + + val minors = persons.filter: + case (age = a) => a < 18 + case _ => false + + assert(minors.isEmpty) + + bob match + case bob1 @ (age = 33, name = "Bob") => + val x: Person = bob1 // bob1 still has type Person with the unswapped elements + case _ => assert(false) diff --git a/tests/run/named-tuples.check b/tests/run/named-tuples.check new file mode 100644 index 000000000000..6485aefafa9a --- /dev/null +++ b/tests/run/named-tuples.check @@ -0,0 +1,10 @@ +(Bob,33) +33 +Bob +(Bob,33,Lausanne,1003) +33 +no match +Bob is younger than Bill +Bob is younger than Lucy +Bill is younger than Lucy +(((Lausanne,Pully),Preverenges),((1003,1009),1028)) diff --git a/tests/run/named-tuples.scala b/tests/run/named-tuples.scala new file mode 100644 index 000000000000..676c21a0e434 --- /dev/null +++ b/tests/run/named-tuples.scala @@ -0,0 +1,114 @@ +import language.experimental.namedTuples +import NamedTuple.* + +type Person = (name: String, age: Int) +val bob = (name = "Bob", age = 33): (name: String, age: Int) +val person2: (name: String, age: Int) = bob + +type Uni = (uni: Double) +val uni = (uni = 1.0) +val _: Uni = uni + +type AddressInfo = (city: String, zipCode: Int) +val addr = (city = "Lausanne", zipCode = 1003) +val _: AddressInfo = addr + +type CombinedInfo = NamedTuple.Concat[Person, AddressInfo] +val bobWithAddr = bob ++ addr +val _: CombinedInfo = bobWithAddr +val _: CombinedInfo = bob ++ addr + +@main def Test = + println(bob) + println(bob.age) + println(person2.name) + println(bobWithAddr) + bob match + case p @ (name = "Bob", age = _) => println(p.age) + bob match + case p @ (name = "Bob", age = age) => assert(age == 33) + bob match + case p @ (name = "Peter", age = _) => println(p.age) + case p @ (name = "Bob", age = 0) => println(p.age) + case _ => println("no match") + + val x = bob.age + assert(x == 33) + + val y: (String, Int) = bob.toTuple + + def ageOf(person: Person) = person.age + + assert(ageOf(bob) == 33) + assert(ageOf((name = "anon", age = 22)) == 22) + assert(ageOf(("anon", 11)) == 11) + + val persons = List( + bob, + (name = "Bill", age = 40), + (name = "Lucy", age = 45) + ) + for + p <- persons + q <- persons + if p.age < q.age + do + println(s"${p.name} is younger than ${q.name}") + + //persons.select(_.age, _.name) + //persons.join(addresses).withCommon(_.name) + + def minMax(elems: Int*): (min: Int, max: Int) = + var min = elems(0) + var max = elems(0) + for elem <- elems do + if elem < min then min = elem + if elem > max then max = elem + (min = min, max = max) + + val mm = minMax(1, 3, 400, -3, 10) + assert(mm.min == -3) + assert(mm.max == 400) + + val name1 = bob(0) + val age1 = bob(1) + val _: String = name1 + val _: Int = age1 + + val bobS = bob.reverse + val _: (age: Int, name: String) = bobS + val _: NamedTuple.Reverse[Person] = bobS + + val silly = bob match + case (name, age) => name.length + age + + assert(silly == 36) + + val minors = persons.filter: + case (age = a) => a < 18 + case _ => false + + assert(minors.isEmpty) + + bob match + case bob1 @ (age = 33, name = "Bob") => + val x: Person = bob1 // bob1 still has type Person with the unswapped elements + case _ => assert(false) + + val addr2 = (city = "Pully", zipCode = 1009) + val addr3 = addr.zip(addr2) + val addr4 = addr3.zip("Preverenges", 1028) + println(addr4) + + // testing conversions +object Conv: + + val p: (String, Int) = bob + def f22(x: (String, Int)) = x._1 + def f22(x: String) = x + f22(bob) + + + + + diff --git a/tests/run/noProtectedSuper.scala b/tests/run/noProtectedSuper.scala index 999a8a06c4fa..d05c13d90c9f 100644 --- a/tests/run/noProtectedSuper.scala +++ b/tests/run/noProtectedSuper.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.publicInBinary diff --git a/tests/run/publicInBinary/Lib_1.scala b/tests/run/publicInBinary/Lib_1.scala index 86895ba40706..d9936670a458 100644 --- a/tests/run/publicInBinary/Lib_1.scala +++ b/tests/run/publicInBinary/Lib_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental -Werror -WunstableInlineAccessors +//> using options -experimental -Werror -WunstableInlineAccessors package foo diff --git a/tests/run/publicInBinary/Test_2.scala b/tests/run/publicInBinary/Test_2.scala index 3c3e89419057..26829d32653a 100644 --- a/tests/run/publicInBinary/Test_2.scala +++ b/tests/run/publicInBinary/Test_2.scala @@ -1,3 +1,4 @@ +//> using options -experimental import foo.* @main def Test: Unit = diff --git a/tests/run/quotes-add-erased/Macro_1.scala b/tests/run/quotes-add-erased/Macro_1.scala index 5b95051a3744..66619237e346 100644 --- a/tests/run/quotes-add-erased/Macro_1.scala +++ b/tests/run/quotes-add-erased/Macro_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.annotation.MacroAnnotation import scala.annotation.internal.ErasedParam @@ -7,7 +7,7 @@ import scala.quoted._ class NewAnnotation extends scala.annotation.Annotation class erasedParamsMethod extends MacroAnnotation: - def transform(using Quotes)(tree: quotes.reflect.Definition): List[quotes.reflect.Definition] = + def transform(using Quotes)(tree: quotes.reflect.Definition, companion: Option[quotes.reflect.Definition]): List[quotes.reflect.Definition] = import quotes.reflect._ tree match case ClassDef(name, ctr, parents, self, body) => diff --git a/tests/run/quotes-reflection/Macros_1.scala b/tests/run/quotes-reflection/Macros_1.scala index c9fe6eb38c99..92ac4e53e262 100644 --- a/tests/run/quotes-reflection/Macros_1.scala +++ b/tests/run/quotes-reflection/Macros_1.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.quoted.* diff --git a/tests/run/t2029.scala b/tests/run/t2029.scala index d4ab0f02b67f..d5bc478fa0b3 100644 --- a/tests/run/t2029.scala +++ b/tests/run/t2029.scala @@ -5,7 +5,7 @@ object Test{ val mainSet = TreeSet(1 to 5 :_*) var compareCalled = false; - val smallerSet = TreeSet(2 to 4 :_*)(Ordering[Int].reverse) + val smallerSet = TreeSet(2 to 4 :_*)(using Ordering[Int].reverse) println(mainSet.mkString(",")) println(smallerSet.mkString(",")) diff --git a/tests/run/t3326.scala b/tests/run/t3326.scala index 3d7d83068f92..1f8c04394682 100644 --- a/tests/run/t3326.scala +++ b/tests/run/t3326.scala @@ -28,8 +28,8 @@ object Test { def testCollectionSorted(): Unit = { import collection.* val order = implicitly[Ordering[Int]].reverse - var m1: SortedMap[Int, String] = SortedMap.empty[Int, String](order) - var m2: SortedMap[Int, String] = SortedMap.empty[Int, String](order) + var m1: SortedMap[Int, String] = SortedMap.empty[Int, String](using order) + var m2: SortedMap[Int, String] = SortedMap.empty[Int, String](using order) m1 ++= List(1 -> "World") m1 ++= List(2 -> "Hello") @@ -49,8 +49,8 @@ object Test { def testImmutableSorted(): Unit = { import collection.immutable.* val order = implicitly[Ordering[Int]].reverse - var m1: SortedMap[Int, String] = SortedMap.empty[Int, String](order) - var m2: SortedMap[Int, String] = SortedMap.empty[Int, String](order) + var m1: SortedMap[Int, String] = SortedMap.empty[Int, String](using order) + var m2: SortedMap[Int, String] = SortedMap.empty[Int, String](using order) m1 += (1 -> "World") m1 += (2 -> "Hello") diff --git a/tests/run/tupled-function-andThen.scala b/tests/run/tupled-function-andThen.scala index 0068143f9d3f..5dd0a75d41d3 100644 --- a/tests/run/tupled-function-andThen.scala +++ b/tests/run/tupled-function-andThen.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.util.TupledFunction diff --git a/tests/run/tupled-function-apply.scala b/tests/run/tupled-function-apply.scala index 69cfeef91dd1..8fb68308deb5 100644 --- a/tests/run/tupled-function-apply.scala +++ b/tests/run/tupled-function-apply.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.util.TupledFunction diff --git a/tests/run/tupled-function-compose.scala b/tests/run/tupled-function-compose.scala index d984b8a9184a..84ca06103537 100644 --- a/tests/run/tupled-function-compose.scala +++ b/tests/run/tupled-function-compose.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.util.TupledFunction object Test { diff --git a/tests/run/tupled-function-extension-method.scala b/tests/run/tupled-function-extension-method.scala index fc3319aa4c15..216d91ada605 100644 --- a/tests/run/tupled-function-extension-method.scala +++ b/tests/run/tupled-function-extension-method.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.util.TupledFunction object Test { diff --git a/tests/run/tupled-function-tupled.scala b/tests/run/tupled-function-tupled.scala index 5a799be167c3..77ee8f44a0e1 100644 --- a/tests/run/tupled-function-tupled.scala +++ b/tests/run/tupled-function-tupled.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.util.TupledFunction diff --git a/tests/run/tupled-function-untupled.scala b/tests/run/tupled-function-untupled.scala index 34b81c74c4f6..3ef86b4cac2e 100644 --- a/tests/run/tupled-function-untupled.scala +++ b/tests/run/tupled-function-untupled.scala @@ -1,4 +1,4 @@ -//> using options -experimental -Yno-experimental +//> using options -experimental import scala.util.TupledFunction object Test { diff --git a/tests/run/tyql.scala b/tests/run/tyql.scala new file mode 100644 index 000000000000..35777e9a4c13 --- /dev/null +++ b/tests/run/tyql.scala @@ -0,0 +1,205 @@ +import language.experimental.namedTuples +import NamedTuple.{NamedTuple, AnyNamedTuple} + +/* This is a demonstrator that shows how to map regular for expressions to + * internal data that can be optimized by a query engine. It needs NamedTuples + * and type classes but no macros. It's so far very provisional and experimental, + * intended as a basis for further exploration. + */ + +/** The type of expressions in the query language */ +trait Expr[Result] extends Selectable: + + /** This type is used to support selection with any of the field names + * defined by Fields. + */ + type Fields = NamedTuple.Map[NamedTuple.From[Result], Expr] + + /** A selection of a field name defined by Fields is implemented by `selectDynamic`. + * The implementation will add a cast to the right Expr type corresponding + * to the field type. + */ + def selectDynamic(fieldName: String) = Expr.Select(this, fieldName) + + /** Member methods to implement universal equality on Expr level. */ + def == (other: Expr[?]): Expr[Boolean] = Expr.Eq(this, other) + def != (other: Expr[?]): Expr[Boolean] = Expr.Ne(this, other) + +object Expr: + + /** Sample extension methods for individual types */ + extension (x: Expr[Int]) + def > (y: Expr[Int]): Expr[Boolean] = Gt(x, y) + def > (y: Int): Expr[Boolean] = Gt(x, IntLit(y)) + extension (x: Expr[Boolean]) + def &&(y: Expr[Boolean]): Expr[Boolean] = And(x, y) + def || (y: Expr[Boolean]): Expr[Boolean] = Or(x, y) + + // Note: All field names of constructors in the query language are prefixed with `$` + // so that we don't accidentally pick a field name of a constructor class where we want + // a name in the domain model instead. + + // Some sample constructors for Exprs + case class Gt($x: Expr[Int], $y: Expr[Int]) extends Expr[Boolean] + case class Plus(x: Expr[Int], y: Expr[Int]) extends Expr[Int] + case class And($x: Expr[Boolean], $y: Expr[Boolean]) extends Expr[Boolean] + case class Or($x: Expr[Boolean], $y: Expr[Boolean]) extends Expr[Boolean] + + // So far Select is weakly typed, so `selectDynamic` is easy to implement. + // Todo: Make it strongly typed like the other cases + case class Select[A]($x: Expr[A], $name: String) extends Expr + + case class Single[S <: String, A]($x: Expr[A]) + extends Expr[NamedTuple[S *: EmptyTuple, A *: EmptyTuple]] + + case class Concat[A <: AnyNamedTuple, B <: AnyNamedTuple]($x: Expr[A], $y: Expr[B]) + extends Expr[NamedTuple.Concat[A, B]] + + case class Join[A <: AnyNamedTuple](a: A) + extends Expr[NamedTuple.Map[A, StripExpr]] + + type StripExpr[E] = E match + case Expr[b] => b + + // Also weakly typed in the arguents since these two classes model universal equality */ + case class Eq($x: Expr[?], $y: Expr[?]) extends Expr[Boolean] + case class Ne($x: Expr[?], $y: Expr[?]) extends Expr[Boolean] + + /** References are placeholders for parameters */ + private var refCount = 0 + + case class Ref[A]($name: String = "") extends Expr[A]: + val id = refCount + refCount += 1 + override def toString = s"ref$id(${$name})" + + /** Literals are type-specific, tailored to the types that the DB supports */ + case class IntLit($value: Int) extends Expr[Int] + + /** Scala values can be lifted into literals by conversions */ + given Conversion[Int, IntLit] = IntLit(_) + + /** The internal representation of a function `A => B` + * Query languages are ususally first-order, so Fun is not an Expr + */ + case class Fun[A, B](param: Ref[A], f: B) + + type Pred[A] = Fun[A, Expr[Boolean]] + + /** Explicit conversion from + * (name_1: Expr[T_1], ..., name_n: Expr[T_n]) + * to + * Expr[(name_1: T_1, ..., name_n: T_n)] + */ + extension [A <: AnyNamedTuple](x: A) def toRow: Join[A] = Join(x) + + /** Same as _.toRow, as an implicit conversion */ + given [A <: AnyNamedTuple]: Conversion[A, Expr.Join[A]] = Expr.Join(_) + +end Expr + +/** The type of database queries. So far, we have queries + * that represent whole DB tables and queries that reify + * for-expressions as data. + */ +trait Query[A] + +object Query: + import Expr.{Pred, Fun, Ref} + + case class Filter[A]($q: Query[A], $p: Pred[A]) extends Query[A] + case class Map[A, B]($q: Query[A], $f: Fun[A, Expr[B]]) extends Query[B] + case class FlatMap[A, B]($q: Query[A], $f: Fun[A, Query[B]]) extends Query[B] + + // Extension methods to support for-expression syntax for queries + extension [R](x: Query[R]) + + def withFilter(p: Ref[R] => Expr[Boolean]): Query[R] = + val ref = Ref[R]() + Filter(x, Fun(ref, p(ref))) + + def map[B](f: Ref[R] => Expr[B]): Query[B] = + val ref = Ref[R]() + Map(x, Fun(ref, f(ref))) + + def flatMap[B](f: Ref[R] => Query[B]): Query[B] = + val ref = Ref[R]() + FlatMap(x, Fun(ref, f(ref))) +end Query + +/** The type of query references to database tables */ +case class Table[R]($name: String) extends Query[R] + +// Everything below is code using the model ----------------------------- + +// Some sample types +case class City(zipCode: Int, name: String, population: Int) +type Address = (city: City, street: String, number: Int) +type Person = (name: String, age: Int, addr: Address) + +@main def Test = + + val cities = Table[City]("cities") + + val q1 = cities.map: c => + c.zipCode + val q2 = cities.withFilter: city => + city.population > 10_000 + .map: city => + city.name + + val q3 = + for + city <- cities + if city.population > 10_000 + yield city.name + + val q4 = + for + city <- cities + alt <- cities + if city.name == alt.name && city.zipCode != alt.zipCode + yield + city + + val addresses = Table[Address]("addresses") + val q5 = + for + city <- cities + addr <- addresses + if addr.street == city.name + yield + (name = city.name, num = addr.number) + + val q6 = + cities.map: city => + (name = city.name, zipCode = city.zipCode) + + def run[T](q: Query[T]): Iterator[T] = ??? + + def x1: Iterator[Int] = run(q1) + def x2: Iterator[String] = run(q2) + def x3: Iterator[String] = run(q3) + def x4: Iterator[City] = run(q4) + def x5: Iterator[(name: String, num: Int)] = run(q5) + def x6: Iterator[(name: String, zipCode: Int)] = run(q6) + + println(q1) + println(q2) + println(q3) + println(q4) + println(q5) + println(q6) + +/* The following is not needed currently + +/** A type class for types that can map to a database table */ +trait Row: + type Self + type Fields = NamedTuple.From[Self] + type FieldExprs = NamedTuple.Map[Fields, Expr] + + //def toFields(x: Self): Fields = ??? + //def fromFields(x: Fields): Self = ??? + +*/ \ No newline at end of file diff --git a/tests/semanticdb/expect/Methods.expect.scala b/tests/semanticdb/expect/Methods.expect.scala index f34c657b2f6d..e1fcfa6880e1 100644 --- a/tests/semanticdb/expect/Methods.expect.scala +++ b/tests/semanticdb/expect/Methods.expect.scala @@ -15,7 +15,7 @@ class Methods/*<-example::Methods#*/[T/*<-example::Methods#[T]*/] { def m6/*<-example::Methods#m6().*/(x/*<-example::Methods#m6().(x)*/: Int/*->scala::Int#*/) = ???/*->scala::Predef.`???`().*/ def m6/*<-example::Methods#m6(+1).*/(x/*<-example::Methods#m6(+1).(x)*/: List/*->example::Methods#List#*/[T/*->example::Methods#[T]*/]) = ???/*->scala::Predef.`???`().*/ def m6/*<-example::Methods#m6(+2).*/(x/*<-example::Methods#m6(+2).(x)*/: scala.List/*->scala::package.List#*/[T/*->example::Methods#[T]*/]) = ???/*->scala::Predef.`???`().*/ - def m7/*<-example::Methods#m7().*/[U/*<-example::Methods#m7().[U]*//*<-example::Methods#m7().(evidence$1)*/: Ordering/*->scala::math::Ordering#*/](c/*<-example::Methods#m7().(c)*/: Methods/*->example::Methods#*/[T/*->example::Methods#[T]*/], l/*<-example::Methods#m7().(l)*/: List/*->example::Methods#List#*/[U/*->example::Methods#m7().[U]*/]) = ???/*->scala::Predef.`???`().*/ + def m7/*<-example::Methods#m7().*/[U/*<-example::Methods#m7().[U]*/: Ordering/*->scala::math::Ordering#*//*->example::Methods#m7().[U]*//*<-example::Methods#m7().(evidence$1)*/](c/*<-example::Methods#m7().(c)*/: Methods/*->example::Methods#*/[T/*->example::Methods#[T]*/], l/*<-example::Methods#m7().(l)*/: List/*->example::Methods#List#*/[U/*->example::Methods#m7().[U]*/]) = ???/*->scala::Predef.`???`().*/ def `m8()./*<-example::Methods#`m8().`().*/`() = ???/*->scala::Predef.`???`().*/ class `m9()./*<-example::Methods#`m9().`#*/` def m9/*<-example::Methods#m9().*/(x/*<-example::Methods#m9().(x)*/: `m9().`/*->example::Methods#`m9().`#*/) = ???/*->scala::Predef.`???`().*/ diff --git a/tests/semanticdb/expect/Synthetic.expect.scala b/tests/semanticdb/expect/Synthetic.expect.scala index a4419aa8bd82..4d797ce2b856 100644 --- a/tests/semanticdb/expect/Synthetic.expect.scala +++ b/tests/semanticdb/expect/Synthetic.expect.scala @@ -30,7 +30,7 @@ class Synthetic/*<-example::Synthetic#*/ { null.asInstanceOf/*->scala::Any#asInstanceOf().*/[Int/*->scala::Int#*/ => Int/*->scala::Int#*/](2) } - class J/*<-example::Synthetic#J#*/[T/*<-example::Synthetic#J#[T]*//*<-example::Synthetic#J#evidence$1.*/: Manifest/*->scala::Predef.Manifest#*/] { val arr/*<-example::Synthetic#J#arr.*/ = Array/*->scala::Array.*/.empty/*->scala::Array.empty().*/[T/*->example::Synthetic#J#[T]*/] } + class J/*<-example::Synthetic#J#*/[T/*<-example::Synthetic#J#[T]*/: /*<-example::Synthetic#J#evidence$1.*/Manifest/*->scala::Predef.Manifest#*//*->example::Synthetic#J#[T]*/] { val arr/*<-example::Synthetic#J#arr.*/ = Array/*->scala::Array.*/.empty/*->scala::Array.empty().*/[T/*->example::Synthetic#J#[T]*/] } class F/*<-example::Synthetic#F#*/ implicit val ordering/*<-example::Synthetic#ordering.*/: Ordering/*->scala::package.Ordering#*/[F/*->example::Synthetic#F#*/] = ???/*->scala::Predef.`???`().*/ diff --git a/tests/semanticdb/metac.expect b/tests/semanticdb/metac.expect index d1eabaa95bf7..221422de6505 100644 --- a/tests/semanticdb/metac.expect +++ b/tests/semanticdb/metac.expect @@ -2020,7 +2020,7 @@ Symbols: example/InstrumentTyper# => class InstrumentTyper extends Object { self: AnyRef & InstrumentTyper => +5 decls } example/InstrumentTyper#AnnotatedType# => type AnnotatedType = Int @param example/InstrumentTyper#``(). => primary ctor (): InstrumentTyper -example/InstrumentTyper#all(). => method all => List[Float | Double | List[Nothing] | Boolean | Unit | Char | String | LinkOption | Int | Long | Class[Option[Int]]] +example/InstrumentTyper#all(). => method all => List[Char | String | LinkOption | Int | Long | Class[Option[Int]] | Float | Double | Boolean | Unit | List[Nothing]] example/InstrumentTyper#clazzOf. => final val method clazzOf Option[Int] example/InstrumentTyper#singletonType(). => method singletonType (param x: Predef.type): Nothing example/InstrumentTyper#singletonType().(x) => param x: Predef.type @@ -2082,7 +2082,7 @@ Occurrences: [24:37..24:40): Int -> scala/Int# Synthetics: -[8:12..8:16):List => *.apply[Float | Double | List[Nothing] | Boolean | Unit | Char | String | LinkOption | Int | Long | Class[Option[Int]]] +[8:12..8:16):List => *.apply[Char | String | LinkOption | Int | Long | Class[Option[Int]] | Float | Double | Boolean | Unit | List[Nothing]] [20:4..20:8):List => *.apply[Nothing] expect/InventedNames.scala @@ -2588,7 +2588,7 @@ Uri => Methods.scala Text => empty Language => Scala Symbols => 82 entries -Occurrences => 156 entries +Occurrences => 157 entries Symbols: example/Methods# => class Methods [typeparam T ] extends Object { self: Methods[T] => +44 decls } @@ -2732,8 +2732,9 @@ Occurrences: [16:29..16:32): ??? -> scala/Predef.`???`(). [17:6..17:8): m7 <- example/Methods#m7(). [17:9..17:10): U <- example/Methods#m7().[U] -[17:10..17:10): <- example/Methods#m7().(evidence$1) [17:12..17:20): Ordering -> scala/math/Ordering# +[17:12..17:20): Ordering -> example/Methods#m7().[U] +[17:12..17:12): <- example/Methods#m7().(evidence$1) [17:22..17:23): c <- example/Methods#m7().(c) [17:25..17:32): Methods -> example/Methods# [17:33..17:34): T -> example/Methods#[T] @@ -3533,7 +3534,7 @@ Uri => Synthetic.scala Text => empty Language => Scala Symbols => 52 entries -Occurrences => 136 entries +Occurrences => 137 entries Synthetics => 39 entries Symbols: @@ -3659,8 +3660,9 @@ Occurrences: [32:8..32:9): J <- example/Synthetic#J# [32:9..32:9): <- example/Synthetic#J#``(). [32:10..32:11): T <- example/Synthetic#J#[T] -[32:11..32:11): <- example/Synthetic#J#evidence$1. +[32:13..32:13): <- example/Synthetic#J#evidence$1. [32:13..32:21): Manifest -> scala/Predef.Manifest# +[32:13..32:21): Manifest -> example/Synthetic#J#[T] [32:29..32:32): arr <- example/Synthetic#J#arr. [32:35..32:40): Array -> scala/Array. [32:41..32:46): empty -> scala/Array.empty(). diff --git a/tests/untried/neg/choices.check b/tests/untried/neg/choices.check index b114394e9609..2e45461ca178 100644 --- a/tests/untried/neg/choices.check +++ b/tests/untried/neg/choices.check @@ -1,2 +1,2 @@ -error: bad options: -Yresolve-term-conflict +error: bad options: -Xresolve-term-conflict one error found diff --git a/tests/untried/neg/choices.flags b/tests/untried/neg/choices.flags index 9718467d4ca2..7a04890a6dee 100644 --- a/tests/untried/neg/choices.flags +++ b/tests/untried/neg/choices.flags @@ -1 +1 @@ --Yresolve-term-conflict +-Xresolve-term-conflict diff --git a/tests/neg/12974.scala b/tests/warn/12974.scala similarity index 94% rename from tests/neg/12974.scala rename to tests/warn/12974.scala index 90edcc916471..45029602296f 100644 --- a/tests/neg/12974.scala +++ b/tests/warn/12974.scala @@ -23,7 +23,7 @@ object RecMap { def main(args: Array[String]) = import Record._ - val foo: Any = Rec.empty.fetch("foo") // error + val foo: Any = Rec.empty.fetch("foo") // TODO // ^ // Match type reduction failed since selector EmptyTuple.type // matches none of the cases diff --git a/tests/warn/i11022.check b/tests/warn/i11022.check index 4257bb64652c..79b8b990ed59 100644 --- a/tests/warn/i11022.check +++ b/tests/warn/i11022.check @@ -1,3 +1,7 @@ +-- Deprecation Warning: tests/warn/i11022.scala:19:22 ------------------------------------------------------------------ +19 | def usage(k: K) = k.k // warn + | ^^^ + | value k in class K is deprecated since 0.1: don't use k, ok? -- Deprecation Warning: tests/warn/i11022.scala:10:7 ------------------------------------------------------------------- 10 |val a: CaseClass = CaseClass(42) // warn: deprecated type // warn: deprecated apply method | ^^^^^^^^^ @@ -18,3 +22,7 @@ 12 |val c: Unit = CaseClass(42).magic() // warn: deprecated apply method | ^^^^^^^^^ | class CaseClass is deprecated: no CaseClass +-- Deprecation Warning: tests/warn/i11022.scala:14:4 ------------------------------------------------------------------- +14 |val CaseClass(rgb) = b // warn + | ^^^^^^^^^ + | class CaseClass is deprecated: no CaseClass diff --git a/tests/warn/i11022.scala b/tests/warn/i11022.scala index 8e2de9d8f519..60a7299a49b1 100644 --- a/tests/warn/i11022.scala +++ b/tests/warn/i11022.scala @@ -10,4 +10,12 @@ object CaseClass: val a: CaseClass = CaseClass(42) // warn: deprecated type // warn: deprecated apply method val b: CaseClass = new CaseClass(42) // warn: deprecated type // warn: deprecated class val c: Unit = CaseClass(42).magic() // warn: deprecated apply method -val d: Unit = CaseClass.notDeprecated() // compiles \ No newline at end of file +val d: Unit = CaseClass.notDeprecated() // compiles +val CaseClass(rgb) = b // warn + +case class K(@deprecated("don't use k, ok?","0.1") k: Int) + +object K: + def usage(k: K) = k.k // warn + +val s: String = CaseClass.toString diff --git a/tests/warn/i13433c/A_1.scala b/tests/warn/i13433c/A_1.scala new file mode 100644 index 000000000000..d810b8e34909 --- /dev/null +++ b/tests/warn/i13433c/A_1.scala @@ -0,0 +1,27 @@ +import scala.reflect.TypeTest + +type Matcher[A] = A match { case String => A } + +def patternMatch[A](a: Any)(using tt: TypeTest[Any, Matcher[A]]): Option[Matcher[A]] = { + // type T = RDF.Triple[Rdf] + a match { + case res: Matcher[A] => Some(res) + case _ => None + } +} + +def patternMatchWithAlias[A](a: Any)(using tt: TypeTest[Any, Matcher[A]]): Option[Matcher[A]] = { + type T = Matcher[A] + a match { + case res: T => Some(res) + case _ => None + } +} + +type S = String +type MS = Matcher[S] + +type S2 = MS +type MS2 = Matcher[S2] + +type Mstuck = Matcher[Nothing] diff --git a/tests/warn/i13433c/B_2.scala b/tests/warn/i13433c/B_2.scala new file mode 100644 index 000000000000..a0654d8cb96d --- /dev/null +++ b/tests/warn/i13433c/B_2.scala @@ -0,0 +1,23 @@ + +@main def main = { + println(patternMatch[String]("abc")) + println(patternMatchWithAlias[String]("abc")) + println(patternMatch[String]("abc")(using (s: Any) => { + if s.isInstanceOf[Matcher[String]] then Some[s.type & Matcher[String]](s.asInstanceOf[s.type & Matcher[String]]) else None })) + println(patternMatchWithAlias[String]("abc")(using (s: Any) => { + if s.isInstanceOf[Matcher[String]] then Some[s.type & Matcher[String]](s.asInstanceOf[s.type & Matcher[String]]) else None })) + + println(patternMatch[String](1)) + println(patternMatchWithAlias[String](1)) + + println(patternMatch[String]("abc")(using (s: Any) => { + if s.isInstanceOf[S] then Some[s.type & Matcher[String]](s.asInstanceOf[s.type & Matcher[String]]) else None})) + println(patternMatch[String]("abc")(using (s: Any) => { + if s.isInstanceOf[MS] then Some[s.type & Matcher[String]](s.asInstanceOf[s.type & Matcher[String]]) else None})) + println(patternMatch[String]("abc")(using (s: Any) => { + if s.isInstanceOf[S2] then Some[s.type & Matcher[String]](s.asInstanceOf[s.type & Matcher[String]]) else None})) + println(patternMatch[String]("abc")(using (s: Any) => { + if s.isInstanceOf[MS2] then Some[s.type & Matcher[String]](s.asInstanceOf[s.type & Matcher[String]]) else None})) + println(patternMatch[String]("abc")(using (s: Any) => { + if s.isInstanceOf[Mstuck] then Some[s.type & Matcher[String]](s.asInstanceOf[s.type & Matcher[String]]) else None})) // warn +} diff --git a/tests/warn/i15264.scala b/tests/warn/i15264.scala new file mode 100644 index 000000000000..9435c6364c08 --- /dev/null +++ b/tests/warn/i15264.scala @@ -0,0 +1,56 @@ +// Note: No check file for this test since the precise warning messages are non-deterministic +import language.`3.7-migration` +object priority: + // lower number = higher priority + class Prio0 extends Prio1 + object Prio0 { given Prio0() } + + class Prio1 extends Prio2 + object Prio1 { given Prio1() } + + class Prio2 + object Prio2 { given Prio2() } + +object repro: + // analogous to cats Eq, Hash, Order: + class A[V] + class B[V] extends A[V] + class C[V] extends A[V] + + class Q[V] + + object context: + // prios work here, which is cool + given[V](using priority.Prio0): C[V] = new C[V] + given[V](using priority.Prio1): B[V] = new B[V] + given[V](using priority.Prio2): A[V] = new A[V] + + object exports: + // so will these exports + export context.given + + // if you import these don't import from 'context' above + object qcontext: + // base defs, like what you would get from cats + given ga: A[Int] = new B[Int] // added so that we don't get an ambiguity in test2 + given gb: B[Int] = new B[Int] + given gc: C[Int] = new C[Int] + + // these seem like they should work but don't + given gcq[V](using p0: priority.Prio0)(using c: C[V]): C[Q[V]] = new C[Q[V]] + given gbq[V](using p1: priority.Prio1)(using b: B[V]): B[Q[V]] = new B[Q[V]] + given gaq[V](using p2: priority.Prio2)(using a: A[V]): A[Q[V]] = new A[Q[V]] + +object test1: + import repro.* + import repro.exports.given + + // these will work + val a = summon[A[Int]] // warn + + +object test2: + import repro.* + import repro.qcontext.given + + val a = summon[A[Q[Int]]] // warn diff --git a/tests/warn/i15474.scala b/tests/warn/i15474.scala index d7c41130a1bb..0d8fc111ac6a 100644 --- a/tests/warn/i15474.scala +++ b/tests/warn/i15474.scala @@ -1,4 +1,4 @@ - +//> using options -source 3.4 import scala.language.implicitConversions diff --git a/tests/warn/i15503i.scala b/tests/warn/i15503i.scala index 329b81327288..b7981e0e4206 100644 --- a/tests/warn/i15503i.scala +++ b/tests/warn/i15503i.scala @@ -247,7 +247,7 @@ package foo.test.i16679a: import scala.deriving.Mirror object CaseClassByStringName: inline final def derived[A](using inline A: Mirror.Of[A]): CaseClassByStringName[A] = - new CaseClassByStringName[A]: + new CaseClassByStringName[A]: // warn def name: String = A.toString object secondPackage: @@ -263,14 +263,14 @@ package foo.test.i16679b: object CaseClassName: import scala.deriving.Mirror inline final def derived[A](using inline A: Mirror.Of[A]): CaseClassName[A] = - new CaseClassName[A]: + new CaseClassName[A]: // warn def name: String = A.toString object Foo: given x: myPackage.CaseClassName[secondPackage.CoolClass] = null object secondPackage: - import myPackage.CaseClassName // OK + import myPackage.CaseClassName // warn import Foo.x case class CoolClass(i: Int) println(summon[myPackage.CaseClassName[CoolClass]]) @@ -279,7 +279,7 @@ package foo.test.i17156: package a: trait Foo[A] object Foo: - inline def derived[T]: Foo[T] = new Foo{} + inline def derived[T]: Foo[T] = new Foo{} // warn package b: import a.Foo @@ -312,4 +312,4 @@ package foo.test.i17117: val test = t1.test } - } \ No newline at end of file + } diff --git a/tests/warn/i15503j.scala b/tests/warn/i15503j.scala index f5e15bb79f79..fa30601d8960 100644 --- a/tests/warn/i15503j.scala +++ b/tests/warn/i15503j.scala @@ -49,11 +49,11 @@ package foo.unused.summon.inlines: transparent inline given conflictInside: C = summonInline[A] - new {} + ??? transparent inline given potentialConflict: C = summonInline[B] - new {} + ??? val b: B = summon[B] val c: C = summon[C] \ No newline at end of file diff --git a/tests/warn/i16723.check b/tests/warn/i16723.check new file mode 100644 index 000000000000..6d55fa0a89d2 --- /dev/null +++ b/tests/warn/i16723.check @@ -0,0 +1,6 @@ +-- [E197] Potential Issue Warning: tests/warn/i16723.scala:3:2 --------------------------------------------------------- +3 | new Object {} // warn + | ^ + | New anonymous class definition will be duplicated at each inline site + | + | longer explanation available when compiling with `-explain` diff --git a/tests/warn/i16723.scala b/tests/warn/i16723.scala new file mode 100644 index 000000000000..32875f4edf36 --- /dev/null +++ b/tests/warn/i16723.scala @@ -0,0 +1,3 @@ +inline def foo = + class NotAnon + new Object {} // warn \ No newline at end of file diff --git a/tests/warn/i16723a.check b/tests/warn/i16723a.check new file mode 100644 index 000000000000..ace11c5af1f9 --- /dev/null +++ b/tests/warn/i16723a.check @@ -0,0 +1,6 @@ +-- [E197] Potential Issue Warning: tests/warn/i16723a.scala:5:38 ------------------------------------------------------- +5 |inline given Converter[Int, String] = new Converter { // warn + | ^ + | New anonymous class definition will be duplicated at each inline site + | + | longer explanation available when compiling with `-explain` diff --git a/tests/warn/i16723a.scala b/tests/warn/i16723a.scala new file mode 100644 index 000000000000..90aaeb868d4e --- /dev/null +++ b/tests/warn/i16723a.scala @@ -0,0 +1,17 @@ +trait Converter[A, B] { + def convert: A => B +} + +inline given Converter[Int, String] = new Converter { // warn + def convert = _.toString() +} + +def foo(using bar: Converter[Int, String]) = + "foo" + +@main +def main = + foo + foo + foo + foo \ No newline at end of file diff --git a/tests/warn/i16743.check b/tests/warn/i16743.check new file mode 100644 index 000000000000..3010338cfb45 --- /dev/null +++ b/tests/warn/i16743.check @@ -0,0 +1,84 @@ +-- [E194] Potential Issue Warning: tests/warn/i16743.scala:30:6 -------------------------------------------------------- +30 | def t = 27 // warn + | ^ + | Extension method t will never be selected + | because T already has a member with the same name and compatible parameter types. + | + | longer explanation available when compiling with `-explain` +-- [E194] Potential Issue Warning: tests/warn/i16743.scala:32:6 -------------------------------------------------------- +32 | def g(x: String)(i: Int): String = x*i // warn + | ^ + | Extension method g will never be selected + | because T already has a member with the same name and compatible parameter types. + | + | longer explanation available when compiling with `-explain` +-- [E194] Potential Issue Warning: tests/warn/i16743.scala:33:6 -------------------------------------------------------- +33 | def h(x: String): String = x // warn + | ^ + | Extension method h will never be selected + | because T already has a member with the same name and compatible parameter types. + | + | longer explanation available when compiling with `-explain` +-- [E194] Potential Issue Warning: tests/warn/i16743.scala:35:6 -------------------------------------------------------- +35 | def j(x: Any, y: Int): String = (x.toString)*y // warn + | ^ + | Extension method j will never be selected + | because T already has a member with the same name and compatible parameter types. + | + | longer explanation available when compiling with `-explain` +-- [E194] Potential Issue Warning: tests/warn/i16743.scala:36:6 -------------------------------------------------------- +36 | def k(x: String): String = x // warn + | ^ + | Extension method k will never be selected + | because T already has a member with the same name and compatible parameter types. + | + | longer explanation available when compiling with `-explain` +-- [E194] Potential Issue Warning: tests/warn/i16743.scala:38:6 -------------------------------------------------------- +38 | def m(using String): String = "m" + summon[String] // warn + | ^ + | Extension method m will never be selected + | because T already has a member with the same name and compatible parameter types. + | + | longer explanation available when compiling with `-explain` +-- [E194] Potential Issue Warning: tests/warn/i16743.scala:39:6 -------------------------------------------------------- +39 | def n(using String): String = "n" + summon[String] // warn + | ^ + | Extension method n will never be selected + | because T already has a member with the same name and compatible parameter types. + | + | longer explanation available when compiling with `-explain` +-- [E194] Potential Issue Warning: tests/warn/i16743.scala:40:6 -------------------------------------------------------- +40 | def o: String = "42" // warn + | ^ + | Extension method o will never be selected + | because T already has a member with the same name and compatible parameter types. + | + | longer explanation available when compiling with `-explain` +-- [E194] Potential Issue Warning: tests/warn/i16743.scala:41:6 -------------------------------------------------------- +41 | def u: Int = 27 // warn + | ^ + | Extension method u will never be selected + | because T already has a member with the same name and compatible parameter types. + | + | longer explanation available when compiling with `-explain` +-- [E194] Potential Issue Warning: tests/warn/i16743.scala:44:6 -------------------------------------------------------- +44 | def at: Int = 42 // warn + | ^ + | Extension method at will never be selected + | because T already has a member with the same name and compatible parameter types. + | + | longer explanation available when compiling with `-explain` +-- [E194] Potential Issue Warning: tests/warn/i16743.scala:46:6 -------------------------------------------------------- +46 | def x(using String)(n: Int): Int = summon[String].toInt + n // warn + | ^ + | Extension method x will never be selected + | because T already has a member with the same name and compatible parameter types. + | + | longer explanation available when compiling with `-explain` +-- [E194] Potential Issue Warning: tests/warn/i16743.scala:47:6 -------------------------------------------------------- +47 | def y(using String)(s: String): String = s + summon[String] // warn + | ^ + | Extension method y will never be selected + | because T already has a member with the same name and compatible parameter types. + | + | longer explanation available when compiling with `-explain` diff --git a/tests/warn/i16743.scala b/tests/warn/i16743.scala new file mode 100644 index 000000000000..4c9c99cf30d0 --- /dev/null +++ b/tests/warn/i16743.scala @@ -0,0 +1,119 @@ + +trait G +given G = new G { override def toString = "mygiven" } +given String = "aGivenString" + +trait T: + def t = 42 + def f(x: String): String = x*2 + def g(x: String)(y: String): String = (x+y)*2 + def h(x: Any): String = x.toString*2 + def i(x: Any, y: String): String = (x.toString+y)*2 + def j(x: Any, y: Any): String = (x.toString+y.toString) + def k(using G): String = summon[G].toString + def l(using G): String = summon[G].toString + def m: String = "mystring" + def n: Result = Result() + def o: Int = 42 + def u: Int = 42 + def u(n: Int): Int = u + n + def v(n: Int): Int = u + n + def v(s: String): String = s + u + def end: Int = 42 + def at(n: Int) = n + def w(n: Int): Int = 42 + n + def x(n: Int): Int = 42 + n + def y(n: Int): Int = u + n + def y(s: String): String = s + u + +extension (_t: T) + def t = 27 // warn + def f(i: Int): String = String.valueOf(i) + def g(x: String)(i: Int): String = x*i // warn + def h(x: String): String = x // warn + def i(x: Any, y: Int): String = (x.toString)*y + def j(x: Any, y: Int): String = (x.toString)*y // warn + def k(x: String): String = x // warn + def l(using String): String = summon[String] + def m(using String): String = "m" + summon[String] // warn + def n(using String): String = "n" + summon[String] // warn + def o: String = "42" // warn + def u: Int = 27 // warn + def v(d: Double) = 3.14 + def end(n: Int): Int = 42 + n + def at: Int = 42 // warn + def w(using String)(n: String): Int = (summon[String] + n).toInt + def x(using String)(n: Int): Int = summon[String].toInt + n // warn + def y(using String)(s: String): String = s + summon[String] // warn + +// deferred extension is defined in subclass +trait Foo: + type X + extension (x: X) def t: Int + +trait Bar extends Foo: + type X = T + extension (x: X) def t = x.t // nowarn see Quote below + +// extension on opaque type matches member of underlying type +object Dungeon: + opaque type IArray[+T] = Array[? <: T] + object IArray: + extension (arr: IArray[Byte]) def length: Int = arr.asInstanceOf[Array[Byte]].length +trait DungeonDweller: + extension (arr: Dungeon.IArray[Byte]) def length: Int = 42 // nowarn + def f[A <: Byte](x: Dungeon.IArray[A]) = x.length +trait SadDungeonDweller: + def f[A](x: Dungeon.IArray[A]) = 27 // x.length // just to confirm, length is not a member + +trait Quote: + type Tree <: AnyRef + given TreeMethods: TreeMethods + trait TreeMethods: + extension (self: Tree) + def length(): Int +class QuotesImpl extends Quote: + type Tree = String + given TreeMethods: TreeMethods with + extension (self: Tree) + def length(): Int = self.length() // nowarn Tree already has a member with the same name. + +class Result: + def apply(using String): String = s"result ${summon[String]}" + +class Depends: + type Thing = String + def thing: Thing = "" +object Depending: + extension (using depends: Depends)(x: depends.Thing) + def y = 42 + def length() = 42 // nowarn see Quote above + def f(using d: Depends) = d.thing.y + def g(using d: Depends) = d.thing.length() + +@main def test() = + val x = new T {} + println(x.f(42)) // OK! + //println(x.g("x")(42)) // NOT OK! + println(x.h("hi")) // member! + println(x.i("hi", 5)) // OK! + println(x.j("hi", 5)) // member! + println(x.k) + //println(x.k("hi")) // no, implicit is either omitted (supplied implicitly) or explicitly (using foo) + println(x.l) // usual, invokes member + println("L"+x.l(using "x")) // explicit, member doesn't check, try extension + println(x.m(using "x")) // same idea as previous, except member takes no implicits or any params + println(x.m(2)) // member checks by adapting result + println(x.n) // Result + println(x.n.apply) // apply Result with given + println(x.n(using "x")) // apply Result explicitly, not extension + println(x.end(2)) + println(x.at(2)) + println { + val p = x.at + p(2) + } + println { + given String = "42" + x.w("27") + } diff --git a/tests/warn/i17266.check b/tests/warn/i17266.check index 716cd531dd0a..ce8626b14225 100644 --- a/tests/warn/i17266.check +++ b/tests/warn/i17266.check @@ -96,3 +96,14 @@ | resolved to calls on Predef or on imported methods. This might not be what | you intended. ------------------------------------------------------------------------------------------------------------------- +-- [E181] Potential Issue Warning: tests/warn/i17266.scala:148:2 ------------------------------------------------------- +148 | synchronized { // warn + | ^^^^^^^^^^^^ + | Suspicious top-level unqualified call to synchronized + |------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Top-level unqualified calls to AnyRef or Any methods such as synchronized are + | resolved to calls on Predef or on imported methods. This might not be what + | you intended. + ------------------------------------------------------------------------------------------------------------------- diff --git a/tests/warn/i17266.scala b/tests/warn/i17266.scala index 7e0c9f1b653b..f5d3d02b3661 100644 --- a/tests/warn/i17266.scala +++ b/tests/warn/i17266.scala @@ -43,13 +43,13 @@ object Test6: object Test7: import MyLib.* def test7 = - synchronized { // not an error + synchronized { // not an error; resolves to `Test7.synchronized` println("hello") } /* object Test7b: - def test8 = + def test7b = import MyLib.* synchronized { // already an error: Reference to synchronized is ambiguous. println("hello") @@ -62,21 +62,21 @@ class Test8: } class Test9: - def test5 = + def test9 = synchronized { // not an error println("hello") } class Test10: import MyLib.* - synchronized { // not an error + synchronized { // not an error; resolves to `this.synchronized` println("hello") } class Test11: import MyLib.* - def test7 = - synchronized { // not an error + def test11 = + synchronized { // not an error; resolves to `this.synchronized` println("hello") } @@ -86,14 +86,14 @@ trait Test12: } trait Test13: - def test5 = + def test13 = synchronized { // not an error println("hello") } trait Test14: import MyLib.* - synchronized { // not an error + synchronized { // not an error; resolves to `this.synchronized` println("hello") } @@ -141,4 +141,10 @@ def test26 = hashCode() // warn def test27 = - 1.hashCode()// not an error (should be? probably not) \ No newline at end of file + 1.hashCode()// not an error (should be? probably not) + +def test28 = + import MyLib.* + synchronized { // warn + println("hello") + } diff --git a/tests/warn/i17493.check b/tests/warn/i17493.check new file mode 100644 index 000000000000..8a4c102980fe --- /dev/null +++ b/tests/warn/i17493.check @@ -0,0 +1,11 @@ +-- [E181] Potential Issue Warning: tests/warn/i17493.scala:4:10 -------------------------------------------------------- +4 | def g = synchronized { println("hello, world") } // warn + | ^^^^^^^^^^^^ + | Suspicious top-level unqualified call to synchronized + |--------------------------------------------------------------------------------------------------------------------- + | Explanation (enabled by `-explain`) + |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + | Top-level unqualified calls to AnyRef or Any methods such as synchronized are + | resolved to calls on Predef or on imported methods. This might not be what + | you intended. + --------------------------------------------------------------------------------------------------------------------- diff --git a/tests/warn/i17493.scala b/tests/warn/i17493.scala new file mode 100644 index 000000000000..f76f3aeb02af --- /dev/null +++ b/tests/warn/i17493.scala @@ -0,0 +1,5 @@ +//> using options -explain +class A(val s: String) extends AnyVal { + // def f = eq("hello, world") // no warning for now because `eq` is inlined + def g = synchronized { println("hello, world") } // warn +} diff --git a/tests/warn/i20146.scala b/tests/warn/i20146.scala new file mode 100644 index 000000000000..bc952104df5d --- /dev/null +++ b/tests/warn/i20146.scala @@ -0,0 +1,7 @@ +//> using options -Wunused:imports + +def test(list: List[Int]): Int = + import list.{head => first} + import list.{length => len} // warn + import list.{addString => add} // warn + first + list.length \ No newline at end of file diff --git a/tests/warn/i20420.scala b/tests/warn/i20420.scala new file mode 100644 index 000000000000..4c7585e32f48 --- /dev/null +++ b/tests/warn/i20420.scala @@ -0,0 +1,27 @@ +//> using options -source 3.6-migration + +final class StrictEqual[V] +final class Less[V] +type LessEqual[V] = Less[V] | StrictEqual[V] + +object TapirCodecIron: + trait ValidatorForPredicate[Value, Predicate] + trait PrimitiveValidatorForPredicate[Value, Predicate] + extends ValidatorForPredicate[Value, Predicate] + + given validatorForLessEqual[N: Numeric, NM <: N](using + ValueOf[NM] + ): PrimitiveValidatorForPredicate[N, LessEqual[NM]] = ??? + given validatorForDescribedOr[N, P](using + IsDescription[P] + ): ValidatorForPredicate[N, P] = ??? + + trait IsDescription[A] + object IsDescription: + given derived[A]: IsDescription[A] = ??? + +@main def Test = { + import TapirCodecIron.{*, given} + type IntConstraint = LessEqual[3] + summon[ValidatorForPredicate[Int, IntConstraint]] // warn +} \ No newline at end of file diff --git a/tests/warn/i21036a.check b/tests/warn/i21036a.check new file mode 100644 index 000000000000..63d611a6e246 --- /dev/null +++ b/tests/warn/i21036a.check @@ -0,0 +1,10 @@ +-- Warning: tests/warn/i21036a.scala:7:17 ------------------------------------------------------------------------------ +7 |val y = summon[A] // warn + | ^ + | Given search preference for A between alternatives + | (b : B) + | and + | (a : A) + | will change. + | Current choice : the first alternative + | New choice from Scala 3.7: the second alternative diff --git a/tests/warn/i21036a.scala b/tests/warn/i21036a.scala new file mode 100644 index 000000000000..b7aba27ca95e --- /dev/null +++ b/tests/warn/i21036a.scala @@ -0,0 +1,7 @@ +//> using options -source 3.6 +trait A +trait B extends A +given b: B = ??? +given a: A = ??? + +val y = summon[A] // warn \ No newline at end of file diff --git a/tests/warn/i21036b.check b/tests/warn/i21036b.check new file mode 100644 index 000000000000..dfa19a0e9bb1 --- /dev/null +++ b/tests/warn/i21036b.check @@ -0,0 +1,10 @@ +-- Warning: tests/warn/i21036b.scala:7:17 ------------------------------------------------------------------------------ +7 |val y = summon[A] // warn + | ^ + | Given search preference for A between alternatives + | (b : B) + | and + | (a : A) + | has changed. + | Previous choice : the first alternative + | New choice from Scala 3.7: the second alternative diff --git a/tests/warn/i21036b.scala b/tests/warn/i21036b.scala new file mode 100644 index 000000000000..c440f5d3c06d --- /dev/null +++ b/tests/warn/i21036b.scala @@ -0,0 +1,7 @@ +//> using options -source 3.7-migration +trait A +trait B extends A +given b: B = ??? +given a: A = ??? + +val y = summon[A] // warn \ No newline at end of file diff --git a/tests/warn/i9241.scala b/tests/warn/i9241.scala index ed1db2df0c8e..5b52bd8cd64d 100644 --- a/tests/warn/i9241.scala +++ b/tests/warn/i9241.scala @@ -22,22 +22,31 @@ final class Baz private (val x: Int) extends AnyVal { } extension (x: Int) + @annotation.nowarn def unary_- : Int = ??? + @annotation.nowarn def unary_+[T] : Int = ??? def unary_!() : Int = ??? // warn + @annotation.nowarn def unary_~(using Int) : Int = ??? end extension extension [T](x: Short) + @annotation.nowarn def unary_- : Int = ??? + @annotation.nowarn def unary_+[U] : Int = ??? def unary_!() : Int = ??? // warn + @annotation.nowarn def unary_~(using Int) : Int = ??? end extension extension (using Int)(x: Byte) + @annotation.nowarn def unary_- : Int = ??? + @annotation.nowarn def unary_+[U] : Int = ??? def unary_!() : Int = ??? // warn + @annotation.nowarn def unary_~(using Int) : Int = ??? -end extension \ No newline at end of file +end extension diff --git a/tests/warn/looping-givens.check b/tests/warn/looping-givens.check new file mode 100644 index 000000000000..eec348c19d11 --- /dev/null +++ b/tests/warn/looping-givens.check @@ -0,0 +1,45 @@ +-- Warning: tests/warn/looping-givens.scala:9:22 ----------------------------------------------------------------------- +9 | given aa: A = summon // warn + | ^ + | Result of implicit search for A & B will change. + | Current result ab will be no longer eligible + | because it is not defined before the search position. + | Result with new rules: a. + | To opt into the new rules, compile with `-source future` or use + | the `scala.language.future` language import. + | + | To fix the problem without the language import, you could try one of the following: + | - use a `given ... with` clause as the enclosing given, + | - rearrange definitions so that ab comes earlier, + | - use an explicit argument. + | This will be an error in Scala 3.5 and later. +-- Warning: tests/warn/looping-givens.scala:10:22 ---------------------------------------------------------------------- +10 | given bb: B = summon // warn + | ^ + | Result of implicit search for A & B will change. + | Current result ab will be no longer eligible + | because it is not defined before the search position. + | Result with new rules: b. + | To opt into the new rules, compile with `-source future` or use + | the `scala.language.future` language import. + | + | To fix the problem without the language import, you could try one of the following: + | - use a `given ... with` clause as the enclosing given, + | - rearrange definitions so that ab comes earlier, + | - use an explicit argument. + | This will be an error in Scala 3.5 and later. +-- Warning: tests/warn/looping-givens.scala:11:28 ---------------------------------------------------------------------- +11 | given ab: (A & B) = summon // warn + | ^ + | Result of implicit search for A & B will change. + | Current result ab will be no longer eligible + | because it is not defined before the search position. + | Result with new rules: joint. + | To opt into the new rules, compile with `-source future` or use + | the `scala.language.future` language import. + | + | To fix the problem without the language import, you could try one of the following: + | - use a `given ... with` clause as the enclosing given, + | - rearrange definitions so that ab comes earlier, + | - use an explicit argument. + | This will be an error in Scala 3.5 and later. diff --git a/tests/warn/looping-givens.scala b/tests/warn/looping-givens.scala index 6b6a32002331..2f737206f64e 100644 --- a/tests/warn/looping-givens.scala +++ b/tests/warn/looping-givens.scala @@ -1,3 +1,5 @@ +//> using options -source 3.4 + class A class B diff --git a/tests/warn/suppressed-type-test-warnings.scala b/tests/warn/suppressed-type-test-warnings.scala index 63849cb2a1ba..c78e8e263153 100644 --- a/tests/warn/suppressed-type-test-warnings.scala +++ b/tests/warn/suppressed-type-test-warnings.scala @@ -18,12 +18,10 @@ object Test { def err2[A, B](value: Foo[A, B], a: A => Int): B = value match { case b: Bar[B] => // spurious // warn b.x - case _ => ??? // avoid fatal inexhaustivity warnings suppressing the uncheckable warning } def fail[A, B](value: Foo[A, B], a: A => Int): B = value match { case b: Bar[Int] => // warn b.x - case _ => ??? // avoid fatal inexhaustivity warnings suppressing the uncheckable warning } }