diff --git a/.github/Dockerfile b/.github/Dockerfile index 0115cb7a9dd2..12fc8d2fc013 100644 --- a/.github/Dockerfile +++ b/.github/Dockerfile @@ -15,7 +15,8 @@ RUN apt-get update && \ openjdk-17-jdk-headless \ openjdk-21-jdk-headless && \ (curl -fsSL https://deb.nodesource.com/setup_18.x | bash -) && \ - apt-get install -y nodejs + apt-get install -y nodejs && \ + apt-get install -y zip unzip # Install sbt diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 95a6ed24df13..ffa7e515b926 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -48,7 +48,7 @@ jobs: test_non_bootstrapped: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -100,7 +100,7 @@ jobs: test: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -160,7 +160,7 @@ jobs: test_scala2_library_tasty: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -287,7 +287,7 @@ jobs: name: MiMa runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -338,7 +338,7 @@ jobs: community_build_a: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -395,7 +395,7 @@ jobs: community_build_b: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -452,7 +452,7 @@ jobs: community_build_c: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -509,7 +509,7 @@ jobs: test_sbt: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -554,7 +554,7 @@ jobs: test_java8: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -614,7 +614,7 @@ jobs: publish_nightly: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -663,6 +663,14 @@ jobs: echo "This build version: $ver" echo "THISBUILD_VERSION=$ver" >> $GITHUB_ENV + - name: Check is version matching pattern + shell: bash + run: | + if ! grep -Eo "3\.[0-9]+\.[0-9]+-RC[0-9]+-bin-[0-9]{8}-[a-zA-Z0-9]{7}-NIGHTLY" <<< "${{ env.THISBUILD_VERSION }}"; then + echo "Version used by compiler to publish nightly release does not match expected pattern" + exit 1 + fi + - name: Check whether not yet published id: not_yet_published continue-on-error: true @@ -677,7 +685,7 @@ jobs: nightly_documentation: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt @@ -723,13 +731,13 @@ jobs: contents: write # for actions/create-release to create a release runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 options: --cpu-shares 4096 volumes: - ${{ github.workspace }}/../../cache/sbt:/root/.sbt - ${{ github.workspace }}/../../cache/ivy:/root/.ivy2/cache - ${{ github.workspace }}/../../cache/general:/root/.cache - needs: [test_non_bootstrapped, test, mima, community_build_a, community_build_b, community_build_c, test_sbt, test_java8] + needs: [test_non_bootstrapped, test, mima, community_build_a, community_build_b, community_build_c, test_sbt, test_java8, build-sdk-package] if: "github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags/')" @@ -765,31 +773,49 @@ jobs: # Extract the release tag - name: Extract the release tag run : echo "RELEASE_TAG=${GITHUB_REF#*refs/tags/}" >> $GITHUB_ENV - # BUILD THE SDKs - - name: Build and pack the SDK (universal) - run : | - ./project/scripts/sbt dist/packArchive - sha256sum dist/target/scala3-* > dist/target/sha256sum.txt - - name: Build and pack the SDK (linux x86-64) - run : | - ./project/scripts/sbt dist-linux-x86_64/packArchive - sha256sum dist/linux-x86_64/target/scala3-* > dist/linux-x86_64/target/sha256sum.txt - - name: Build and pack the SDK (linux aarch64) - run : | - ./project/scripts/sbt dist-linux-aarch64/packArchive - sha256sum dist/linux-aarch64/target/scala3-* > dist/linux-aarch64/target/sha256sum.txt - - name: Build and pack the SDK (mac x86-64) - run : | - ./project/scripts/sbt dist-mac-x86_64/packArchive - sha256sum dist/mac-x86_64/target/scala3-* > dist/mac-x86_64/target/sha256sum.txt - - name: Build and pack the SDK (mac aarch64) + + - name: Check compiler version + shell: bash run : | - ./project/scripts/sbt dist-mac-aarch64/packArchive - sha256sum dist/mac-aarch64/target/scala3-* > dist/mac-aarch64/target/sha256sum.txt - - name: Build and pack the SDK (win x86-64) + version=$(./project/scripts/sbt "print scala3-compiler-bootstrapped/version" | tail -n1) + echo "This build version: ${version}" + if [ "${version}" != "${{ env.RELEASE_TAG }}" ]; then + echo "Compiler version for this build '${version}', does not match tag: ${{ env.RELEASE_TAG }}" + exit 1 + fi + + - name: Prepare the SDKs + shell: bash run : | - ./project/scripts/sbt dist-win-x86_64/packArchive - sha256sum dist/win-x86_64/target/scala3-* > dist/win-x86_64/target/sha256sum.txt + prepareSDK() { + distroSuffix="$1" + sbtProject="$2" + distDir="$3" + + # Build binaries + ./project/scripts/sbt "${sbtProject}/Universal/stage" + + outputPath="${distDir}/target/universal/stage" + artifactName="scala3-${{ env.RELEASE_TAG }}${distroSuffix}" + zipArchive="${artifactName}.zip" + tarGzArchive="${artifactName}.tar.gz" + + cwd=$(pwd) + (cd $outputPath && zip -r ${zipArchive} . && mv ${zipArchive} "${cwd}/") + tar -czf ${tarGzArchive} -C "$outputPath" . + + # Caluclate SHA for each of archive files + for file in "${zipArchive}" "${tarGzArchive}"; do + sha256sum "${file}" > "${file}.sha256" + done + } + prepareSDK "" "dist" "./dist/" + prepareSDK "-aarch64-pc-linux" "dist-linux-aarch64" "./dist/linux-aarch64/" + prepareSDK "-x86_64-pc-linux" "dist-linux-x86_64" "./dist/linux-x86_64/" + prepareSDK "-aarch64-apple-darwin" "dist-mac-aarch64" "./dist/mac-aarch64/" + prepareSDK "-x86_64-apple-darwin" "dist-mac-x86_64" "./dist/mac-x86_64/" + prepareSDK "-x86_64-pc-win32" "dist-win-x86_64" "./dist/win-x86_64/" + # Create the GitHub release - name: Create GitHub Release id: create_gh_release @@ -803,179 +829,267 @@ jobs: draft: true prerelease: ${{ contains(env.RELEASE_TAG, '-') }} - - name: Upload zip archive to GitHub Release (universal) + # The following steps are generated using template: + # def template(distribution: String, suffix: String) = + # def upload(kind: String, path: String, contentType: String) = + # s"""- name: Upload $kind to GitHub Release ($distribution) + # uses: actions/upload-release-asset@v1 + # env: + # GITHUB_TOKEN: $${{ secrets.GITHUB_TOKEN }} + # with: + # upload_url: $${{ steps.create_gh_release.outputs.upload_url }} + # asset_path: ./${path} + # asset_name: ${path} + # asset_content_type: ${contentType}""" + # val filename = s"scala3-$${{ env.RELEASE_TAG }}${suffix}" + # s""" + # # $distribution + # ${upload("zip archive", s"$filename.zip", "application/zip")} + # ${upload("zip archive SHA", s"$filename.zip.sha256", "text/plain")} + # ${upload("tar.gz archive", s"$filename.tar.gz", "application/gzip")} + # ${upload("tar.gz archive SHA", s"$filename.tar.gz.sha256", "text/plain")} + # """ + + # @main def gen = + # Seq( + # template("Universal", ""), + # template("Linux x86-64", "-x86_64-pc-linux"), + # template("Linux aarch64", "-aarch64-pc-linux"), + # template("Mac x86-64", "-x86_64-apple-darwin"), + # template("Mac aarch64", "-aarch64-apple-darwin"), + # template("Windows x86_64", "-x86_64-pc-win32") + # ).foreach(println) + # Universal + - name: Upload zip archive to GitHub Release (Universal) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/target/scala3-${{ env.RELEASE_TAG }}.zip + asset_path: ./scala3-${{ env.RELEASE_TAG }}.zip asset_name: scala3-${{ env.RELEASE_TAG }}.zip asset_content_type: application/zip - - name: Upload tar.gz archive to GitHub Release (universal) + - name: Upload zip archive SHA to GitHub Release (Universal) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/target/scala3-${{ env.RELEASE_TAG }}.tar.gz + asset_path: ./scala3-${{ env.RELEASE_TAG }}.zip.sha256 + asset_name: scala3-${{ env.RELEASE_TAG }}.zip.sha256 + asset_content_type: text/plain + - name: Upload tar.gz archive to GitHub Release (Universal) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./scala3-${{ env.RELEASE_TAG }}.tar.gz asset_name: scala3-${{ env.RELEASE_TAG }}.tar.gz asset_content_type: application/gzip + - name: Upload tar.gz archive SHA to GitHub Release (Universal) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./scala3-${{ env.RELEASE_TAG }}.tar.gz.sha256 + asset_name: scala3-${{ env.RELEASE_TAG }}.tar.gz.sha256 + asset_content_type: text/plain - - name: Upload zip archive to GitHub Release (linux x86-64) + + # Linux x86-64 + - name: Upload zip archive to GitHub Release (Linux x86-64) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/linux-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.zip + asset_path: ./scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.zip asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.zip asset_content_type: application/zip - - name: Upload tar.gz archive to GitHub Release (linux x86-64) + - name: Upload zip archive SHA to GitHub Release (Linux x86-64) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/linux-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.tar.gz + asset_path: ./scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.zip.sha256 + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.zip.sha256 + asset_content_type: text/plain + - name: Upload tar.gz archive to GitHub Release (Linux x86-64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.tar.gz asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.tar.gz asset_content_type: application/gzip + - name: Upload tar.gz archive SHA to GitHub Release (Linux x86-64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.tar.gz.sha256 + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-linux.tar.gz.sha256 + asset_content_type: text/plain + - - name: Upload zip archive to GitHub Release (linux aarch64) + # Linux aarch64 + - name: Upload zip archive to GitHub Release (Linux aarch64) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/linux-aarch64/target/scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.zip + asset_path: ./scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.zip asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.zip asset_content_type: application/zip - - name: Upload tar.gz archive to GitHub Release (linux aarch64) + - name: Upload zip archive SHA to GitHub Release (Linux aarch64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.zip.sha256 + asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.zip.sha256 + asset_content_type: text/plain + - name: Upload tar.gz archive to GitHub Release (Linux aarch64) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/linux-aarch64/target/scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.tar.gz + asset_path: ./scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.tar.gz asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.tar.gz asset_content_type: application/gzip + - name: Upload tar.gz archive SHA to GitHub Release (Linux aarch64) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_gh_release.outputs.upload_url }} + asset_path: ./scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.tar.gz.sha256 + asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-pc-linux.tar.gz.sha256 + asset_content_type: text/plain + - - name: Upload zip archive to GitHub Release (mac x86-64) + # Mac x86-64 + - name: Upload zip archive to GitHub Release (Mac x86-64) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/mac-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.zip + asset_path: ./scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.zip asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.zip asset_content_type: application/zip - - name: Upload tar.gz archive to GitHub Release (mac x86-64) + - name: Upload zip archive SHA to GitHub Release (Mac x86-64) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/mac-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.tar.gz - asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.tar.gz - asset_content_type: application/gzip - - - name: Upload zip archive to GitHub Release (mac aarch64) + asset_path: ./scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.zip.sha256 + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.zip.sha256 + asset_content_type: text/plain + - name: Upload tar.gz archive to GitHub Release (Mac x86-64) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/mac-aarch64/target/scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.zip - asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.zip - asset_content_type: application/zip - - name: Upload tar.gz archive to GitHub Release (mac aarch64) + asset_path: ./scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.tar.gz + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.tar.gz + asset_content_type: application/gzip + - name: Upload tar.gz archive SHA to GitHub Release (Mac x86-64) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/mac-aarch64/target/scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.tar.gz - asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.tar.gz - asset_content_type: application/gzip + asset_path: ./scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.tar.gz.sha256 + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-apple-darwin.tar.gz.sha256 + asset_content_type: text/plain + - - name: Upload zip archive to GitHub Release (win x86-64) + # Mac aarch64 + - name: Upload zip archive to GitHub Release (Mac aarch64) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/win-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.zip - asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.zip + asset_path: ./scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.zip + asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.zip asset_content_type: application/zip - - name: Upload tar.gz archive to GitHub Release (win x86-64) + - name: Upload zip archive SHA to GitHub Release (Mac aarch64) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/win-x86_64/target/scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.tar.gz - asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.tar.gz - asset_content_type: application/gzip - - - - name: Upload SHA256 sum of the release artefacts to GitHub Release (universal) + asset_path: ./scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.zip.sha256 + asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.zip.sha256 + asset_content_type: text/plain + - name: Upload tar.gz archive to GitHub Release (Mac aarch64) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/target/sha256sum.txt - asset_name: sha256sum.txt - asset_content_type: text/plain - - - name: Upload SHA256 sum of the release artefacts to GitHub Release (linux x86-64) + asset_path: ./scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.tar.gz + asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.tar.gz + asset_content_type: application/gzip + - name: Upload tar.gz archive SHA to GitHub Release (Mac aarch64) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/linux-x86_64/target/sha256sum.txt - asset_name: sha256sum-x86_64-pc-linux.txt + asset_path: ./scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.tar.gz.sha256 + asset_name: scala3-${{ env.RELEASE_TAG }}-aarch64-apple-darwin.tar.gz.sha256 asset_content_type: text/plain - - name: Upload SHA256 sum of the release artefacts to GitHub Release (linux aarch64) + + # Windows x86_64 + - name: Upload zip archive to GitHub Release (Windows x86_64) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/linux-aarch64/target/sha256sum.txt - asset_name: sha256sum-aarch64-pc-linux.txt - asset_content_type: text/plain - - - name: Upload SHA256 sum of the release artefacts to GitHub Release (mac x86-64) + asset_path: ./scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.zip + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.zip + asset_content_type: application/zip + - name: Upload zip archive SHA to GitHub Release (Windows x86_64) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/mac-x86_64/target/sha256sum.txt - asset_name: sha256sum-x86_64-apple-darwin.txt + asset_path: ./scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.zip.sha256 + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.zip.sha256 asset_content_type: text/plain - - - name: Upload SHA256 sum of the release artefacts to GitHub Release (mac aarch64) + - name: Upload tar.gz archive to GitHub Release (Windows x86_64) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/mac-aarch64/target/sha256sum.txt - asset_name: sha256sum-aarch64-apple-darwin.txt - asset_content_type: text/plain - - - name: Upload SHA256 sum of the release artefacts to GitHub Release (win x86-64) + asset_path: ./scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.tar.gz + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.tar.gz + asset_content_type: application/gzip + - name: Upload tar.gz archive SHA to GitHub Release (Windows x86_64) uses: actions/upload-release-asset@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_gh_release.outputs.upload_url }} - asset_path: ./dist/win-x86_64/target/sha256sum.txt - asset_name: sha256sum-x86_64-pc-win32.txt + asset_path: ./scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.tar.gz.sha256 + asset_name: scala3-${{ env.RELEASE_TAG }}-x86_64-pc-win32.tar.gz.sha256 asset_content_type: text/plain - name: Publish Release @@ -985,7 +1099,7 @@ jobs: open_issue_on_failure: runs-on: [self-hosted, Linux] container: - image: lampepfl/dotty:2023-11-07 + image: lampepfl/dotty:2024-10-18 needs: [nightly_documentation, test_windows_full] # The `failure()` expression is true iff at least one of the dependencies # of this job (including transitive dependencies) has failed. @@ -1005,7 +1119,15 @@ jobs: build-msi-package: uses: ./.github/workflows/build-msi.yml if : github.event_name == 'pull_request' && contains(github.event.pull_request.body, '[test_msi]') - # TODO: ADD A JOB THAT DEPENDS ON THIS TO TEST THE MSI + + test-msi-package: + uses: ./.github/workflows/test-msi.yml + needs: [build-msi-package] + with: + # Ensure that version starts with prefix 3. + # In the future it can be adapted to compare with with git tag or version set in the build.s + version: "3." + java-version: 8 build-sdk-package: uses: ./.github/workflows/build-sdk.yml diff --git a/.github/workflows/test-msi.yml b/.github/workflows/test-msi.yml new file mode 100644 index 000000000000..1299c3d55061 --- /dev/null +++ b/.github/workflows/test-msi.yml @@ -0,0 +1,77 @@ +################################################################################################### +### THIS IS A REUSABLE WORKFLOW TO TEST SCALA WITH MSI RUNNER ### +### HOW TO USE: ### +### Provide optional `version` to test if installed binaries are installed with ### +### correct Scala version. ### +### NOTE: Requires `scala.msi` artifact uploaded within the same run ### +### ### +################################################################################################### + +name: Test 'scala' MSI Package +run-name: Test 'scala' (${{ inputs.version }}) MSI Package + +on: + workflow_call: + inputs: + version: + required: true + type: string + java-version: + required: true + type : string + +jobs: + test: + runs-on: windows-latest + steps: + - uses: actions/setup-java@v4 + with: + distribution: temurin + java-version: ${{ inputs.java-version }} + - name: Download MSI artifact + uses: actions/download-artifact@v4 + with: + name: scala.msi + path: . + + # Run the MSI installer + # During normal installation msiexec would modify the PATH automatically. + # However, it seems not to work in GH Actions. Append the PATH manually instead. + - name: Install Scala Runner + shell: pwsh + run: | + Start-Process 'msiexec.exe' -ArgumentList '/I "scala.msi" /L*V "install.log" /qb' -Wait + Get-Content 'install.log' + Add-Content $env:GITHUB_PATH "C:\Program Files (x86)\scala\bin" + + # Run tests to ensure the Scala Runner was installed and works + - name: Test Scala Runner + shell: pwsh + run: | + scala --version + if (-not (scala --version | Select-String "Scala version \(default\): ${{ inputs.version }}")) { + Write-Host "Invalid Scala version of MSI installed runner, expected ${{ inputs.version }}" + Exit 1 + } + - name : Test the `scalac` command + shell: pwsh + run: | + scalac --version + if (-not (scalac --version | Select-String "Scala compiler version ${{ inputs.version }}")) { + Write-Host "Invalid scalac version of MSI installed runner, expected ${{ inputs.version }}" + Exit 1 + } + - name : Test the `scaladoc` command + shell: pwsh + run: | + scaladoc --version + if (-not (scaladoc --version | Select-String "Scaladoc version ${{ inputs.version }}")) { + Write-Host "Invalid scaladoc version of MSI installed runner, expected ${{ inputs.version }}" + Exit 1 + } + - name : Uninstall the `scala` package + shell: pwsh + run: | + Start-Process 'msiexec.exe' -ArgumentList '/X "scala.msi" /L*V "uninstall.log" /qb' -Wait + Get-Content 'uninstall.log' + \ No newline at end of file diff --git a/community-build/src/scala/dotty/communitybuild/CommunityBuildRunner.scala b/community-build/src/scala/dotty/communitybuild/CommunityBuildRunner.scala index 6a0c54c4b00b..b3065fefe87f 100644 --- a/community-build/src/scala/dotty/communitybuild/CommunityBuildRunner.scala +++ b/community-build/src/scala/dotty/communitybuild/CommunityBuildRunner.scala @@ -16,13 +16,12 @@ object CommunityBuildRunner: * and avoid network overhead. See https://github.com/lampepfl/dotty-drone * for more infrastructural details. */ - extension (self: CommunityProject) def run()(using suite: CommunityBuildRunner): Unit = - if self.requiresExperimental && !compilerSupportExperimental then - log(s"Skipping ${self.project} - it needs experimental features unsupported in this build.") - return - self.dependencies.foreach(_.publish()) - self.testOnlyDependencies().foreach(_.publish()) - suite.runProject(self) + extension (self: CommunityProject) + def run()(using suite: CommunityBuildRunner): Unit = + self.dependencies.foreach(_.publish()) + self.testOnlyDependencies().foreach(_.publish()) + suite.runProject(self) + end extension trait CommunityBuildRunner: diff --git a/community-build/src/scala/dotty/communitybuild/Main.scala b/community-build/src/scala/dotty/communitybuild/Main.scala index 852cee46af22..c813f5ff684b 100644 --- a/community-build/src/scala/dotty/communitybuild/Main.scala +++ b/community-build/src/scala/dotty/communitybuild/Main.scala @@ -55,10 +55,7 @@ object Main: Seq("rm", "-rf", destStr).! Files.createDirectory(dest) val (toRun, ignored) = - allProjects.partition( p => - p.docCommand != null - && (!p.requiresExperimental || compilerSupportExperimental) - ) + allProjects.partition(_.docCommand != null) val paths = toRun.map { project => val name = project.project diff --git a/community-build/src/scala/dotty/communitybuild/projects.scala b/community-build/src/scala/dotty/communitybuild/projects.scala index a6c1c34aadda..7d5a4cb35a9d 100644 --- a/community-build/src/scala/dotty/communitybuild/projects.scala +++ b/community-build/src/scala/dotty/communitybuild/projects.scala @@ -10,9 +10,6 @@ lazy val compilerVersion: String = val file = communitybuildDir.resolve("scala3-bootstrapped.version") new String(Files.readAllBytes(file), UTF_8) -lazy val compilerSupportExperimental: Boolean = - compilerVersion.contains("SNAPSHOT") || compilerVersion.contains("NIGHTLY") - lazy val sbtPluginFilePath: String = // Workaround for https://github.com/sbt/sbt/issues/4395 new File(sys.props("user.home") + "/.sbt/1.0/plugins").mkdirs() @@ -43,7 +40,6 @@ sealed trait CommunityProject: val testOnlyDependencies: () => List[CommunityProject] val binaryName: String val runCommandsArgs: List[String] = Nil - val requiresExperimental: Boolean val environment: Map[String, String] = Map.empty final val projectDir = communitybuildDir.resolve("community-projects").resolve(project) @@ -53,7 +49,6 @@ sealed trait CommunityProject: /** Publish this project to the local Maven repository */ final def publish(): Unit = - // TODO what should this do with .requiresExperimental? if !published then publishDependencies() log(s"Publishing $project") @@ -65,11 +60,6 @@ sealed trait CommunityProject: published = true final def doc(): Unit = - if this.requiresExperimental && !compilerSupportExperimental then - log( - s"Skipping ${this.project} - it needs experimental features unsupported in this build." - ) - return publishDependencies() log(s"Documenting $project") if docCommand eq null then @@ -89,8 +79,7 @@ final case class MillCommunityProject( baseCommand: String, dependencies: List[CommunityProject] = Nil, testOnlyDependencies: () => List[CommunityProject] = () => Nil, - ignoreDocs: Boolean = false, - requiresExperimental: Boolean = false, + ignoreDocs: Boolean = false ) extends CommunityProject: override val binaryName: String = "./mill" override val testCommand = s"$baseCommand.test" @@ -109,8 +98,7 @@ final case class SbtCommunityProject( testOnlyDependencies: () => List[CommunityProject] = () => Nil, sbtPublishCommand: String = null, sbtDocCommand: String = null, - scalacOptions: List[String] = SbtCommunityProject.scalacOptions, - requiresExperimental: Boolean = false, + scalacOptions: List[String] = SbtCommunityProject.scalacOptions ) extends CommunityProject: override val binaryName: String = "sbt" @@ -260,7 +248,6 @@ object projects: project = "intent", sbtTestCommand = "test", sbtDocCommand = "doc", - requiresExperimental = true, ) lazy val scalacheck = SbtCommunityProject( @@ -489,8 +476,8 @@ object projects: lazy val scalaCollectionCompat = SbtCommunityProject( project = "scala-collection-compat", - sbtTestCommand = "compat30/test", - sbtPublishCommand = "compat30/publishLocal", + sbtTestCommand = "compat3/test", + sbtPublishCommand = "compat3/publishLocal", ) lazy val scalaJava8Compat = SbtCommunityProject( diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala b/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala index 394700c2898e..4f4caf36d92a 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala @@ -623,7 +623,13 @@ trait BCodeSkelBuilder extends BCodeHelpers { } if (emitLines && tree.span.exists && !tree.hasAttachment(SyntheticUnit)) { - val nr = ctx.source.offsetToLine(tree.span.point) + 1 + val nr = + val sourcePos = tree.sourcePos + ( + if sourcePos.exists then sourcePos.source.positionInUltimateSource(sourcePos).line + else ctx.source.offsetToLine(tree.span.point) // fallback + ) + 1 + if (nr != lastEmittedLineNr) { lastEmittedLineNr = nr getNonLabelNode(lastInsn) match { diff --git a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala index 6e2449b5c299..7ba39768871b 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala @@ -25,13 +25,13 @@ import dotty.tools.dotc.transform.{Erasure, ValueClasses} import dotty.tools.dotc.util.SourcePosition import dotty.tools.dotc.report -import org.scalajs.ir -import org.scalajs.ir.{ClassKind, Position, Names => jsNames, Trees => js, Types => jstpe} -import org.scalajs.ir.Names.{ClassName, MethodName, SimpleMethodName} -import org.scalajs.ir.OriginalName -import org.scalajs.ir.OriginalName.NoOriginalName -import org.scalajs.ir.Trees.OptimizerHints -import org.scalajs.ir.Version.Unversioned +import dotty.tools.sjs.ir +import dotty.tools.sjs.ir.{ClassKind, Position, Names => jsNames, Trees => js, Types => jstpe} +import dotty.tools.sjs.ir.Names.{ClassName, MethodName, SimpleMethodName} +import dotty.tools.sjs.ir.OriginalName +import dotty.tools.sjs.ir.OriginalName.NoOriginalName +import dotty.tools.sjs.ir.Trees.OptimizerHints +import dotty.tools.sjs.ir.Version.Unversioned import dotty.tools.dotc.transform.sjs.JSSymUtils.* diff --git a/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala b/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala index 098f592daa30..9a7753680bc3 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala @@ -15,12 +15,12 @@ import StdNames.* import dotty.tools.dotc.transform.sjs.JSSymUtils.* -import org.scalajs.ir -import org.scalajs.ir.{Trees => js, Types => jstpe} -import org.scalajs.ir.Names.{LocalName, LabelName, SimpleFieldName, FieldName, SimpleMethodName, MethodName, ClassName} -import org.scalajs.ir.OriginalName -import org.scalajs.ir.OriginalName.NoOriginalName -import org.scalajs.ir.UTF8String +import dotty.tools.sjs.ir +import dotty.tools.sjs.ir.{Trees => js, Types => jstpe} +import dotty.tools.sjs.ir.Names.{LocalName, LabelName, SimpleFieldName, FieldName, SimpleMethodName, MethodName, ClassName} +import dotty.tools.sjs.ir.OriginalName +import dotty.tools.sjs.ir.OriginalName.NoOriginalName +import dotty.tools.sjs.ir.UTF8String import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions diff --git a/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala b/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala index b5f9446758a9..e6c73357aa4c 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala @@ -22,12 +22,12 @@ import TypeErasure.ErasedValueType import dotty.tools.dotc.util.{SourcePosition, SrcPos} import dotty.tools.dotc.report -import org.scalajs.ir.{Position, Names => jsNames, Trees => js, Types => jstpe} -import org.scalajs.ir.Names.DefaultModuleID -import org.scalajs.ir.OriginalName.NoOriginalName -import org.scalajs.ir.Position.NoPosition -import org.scalajs.ir.Trees.OptimizerHints -import org.scalajs.ir.Version.Unversioned +import dotty.tools.sjs.ir.{Position, Names => jsNames, Trees => js, Types => jstpe} +import dotty.tools.sjs.ir.Names.DefaultModuleID +import dotty.tools.sjs.ir.OriginalName.NoOriginalName +import dotty.tools.sjs.ir.Position.NoPosition +import dotty.tools.sjs.ir.Trees.OptimizerHints +import dotty.tools.sjs.ir.Version.Unversioned import dotty.tools.dotc.transform.sjs.JSExportUtils.* import dotty.tools.dotc.transform.sjs.JSSymUtils.* @@ -932,7 +932,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { InstanceOfTypeTest(tpe.tycon.typeSymbol.typeRef) case _ => - import org.scalajs.ir.Names + import dotty.tools.sjs.ir.Names (toIRType(tpe): @unchecked) match { case jstpe.AnyType => NoTypeTest diff --git a/compiler/src/dotty/tools/backend/sjs/JSPositions.scala b/compiler/src/dotty/tools/backend/sjs/JSPositions.scala index 3b25187b0acd..a229c9ea0e58 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSPositions.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSPositions.scala @@ -13,7 +13,7 @@ import dotty.tools.dotc.report import dotty.tools.dotc.util.{SourceFile, SourcePosition} import dotty.tools.dotc.util.Spans.Span -import org.scalajs.ir +import dotty.tools.sjs.ir /** Conversion utilities from dotty Positions to IR Positions. */ class JSPositions()(using Context) { diff --git a/compiler/src/dotty/tools/dotc/Compiler.scala b/compiler/src/dotty/tools/dotc/Compiler.scala index 9b130e7d7804..d8ba1ab5dc2e 100644 --- a/compiler/src/dotty/tools/dotc/Compiler.scala +++ b/compiler/src/dotty/tools/dotc/Compiler.scala @@ -132,6 +132,7 @@ class Compiler { new ElimStaticThis, // Replace `this` references to static objects by global identifiers new CountOuterAccesses) :: // Identify outer accessors that can be dropped List(new DropOuterAccessors, // Drop unused outer accessors + new DropParentRefinements, // Drop parent refinements from a template new CheckNoSuperThis, // Check that supercalls don't contain references to `this` new Flatten, // Lift all inner classes to package scope new TransformWildcards, // Replace wildcards with default values @@ -151,7 +152,10 @@ class Compiler { List(new GenBCode) :: // Generate JVM bytecode Nil - var runId: Int = 1 + // TODO: Initially 0, so that the first nextRunId call would return InitialRunId == 1 + // Changing the initial runId from 1 to 0 makes the scala2-library-bootstrap fail to compile, + // when the underlying issue is fixed, please update dotc.profiler.RealProfiler.chromeTrace logic + private var runId: Int = 1 def nextRunId: Int = { runId += 1; runId } diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala index 11a0430480d9..50fd668c7696 100644 --- a/compiler/src/dotty/tools/dotc/Run.scala +++ b/compiler/src/dotty/tools/dotc/Run.scala @@ -339,10 +339,9 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint if phaseWillRun then Stats.trackTime(s"phase time ms/$phase") { val start = System.currentTimeMillis - val profileBefore = profiler.beforePhase(phase) - try units = phase.runOn(units) - catch case _: InterruptedException => cancelInterrupted() - profiler.afterPhase(phase, profileBefore) + profiler.onPhase(phase): + try units = phase.runOn(units) + catch case _: InterruptedException => cancelInterrupted() if (ctx.settings.Xprint.value.containsPhase(phase)) for (unit <- units) def printCtx(unit: CompilationUnit) = phase.printingContext( diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index e1a6b97fc7d3..e66c71731b4f 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -1605,9 +1605,10 @@ object desugar { /** Translate tuple expressions * - * () ==> () - * (t) ==> t - * (t1, ..., tN) ==> TupleN(t1, ..., tN) + * () ==> () + * (t) ==> t + * (t1, ..., tN) ==> TupleN(t1, ..., tN) + * (n1 = t1, ..., nN = tN) ==> NamedTuple.build[(n1, ..., nN)]()(TupleN(t1, ..., tN)) */ def tuple(tree: Tuple, pt: Type)(using Context): Tree = var elems = checkWellFormedTupleElems(tree.trees) diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index 72a051ea8154..6ef33d24f8be 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -441,12 +441,10 @@ private sealed trait YSettings: val YlegacyLazyVals: Setting[Boolean] = BooleanSetting(ForkSetting, "Ylegacy-lazy-vals", "Use legacy (pre 3.3.0) implementation of lazy vals.") val YcompileScala2Library: Setting[Boolean] = BooleanSetting(ForkSetting, "Ycompile-scala2-library", "Used when compiling the Scala 2 standard library.") val YprofileEnabled: Setting[Boolean] = BooleanSetting(ForkSetting, "Yprofile-enabled", "Enable profiling.") - val YprofileDestination: Setting[String] = StringSetting(ForkSetting, "Yprofile-destination", "file", "Where to send profiling output - specify a file, default is to the console.", "") - //.withPostSetHook( _ => YprofileEnabled.value = true ) - val YprofileExternalTool: Setting[List[String]] = PhasesSetting(ForkSetting, "Yprofile-external-tool", "Enable profiling for a phase using an external tool hook. Generally only useful for a single phase.", "typer") - //.withPostSetHook( _ => YprofileEnabled.value = true ) - val YprofileRunGcBetweenPhases: Setting[List[String]] = PhasesSetting(ForkSetting, "Yprofile-run-gc", "Run a GC between phases - this allows heap size to be accurate at the expense of more time. Specify a list of phases, or *", "_") - //.withPostSetHook( _ => YprofileEnabled.value = true ) + val YprofileDestination: Setting[String] = StringSetting(ForkSetting, "Yprofile-destination", "file", "Where to send profiling output - specify a file, default is to the console.", "", depends = List(YprofileEnabled -> true)) + val YprofileExternalTool: Setting[List[String]] = PhasesSetting(ForkSetting, "Yprofile-external-tool", "Enable profiling for a phase using an external tool hook. Generally only useful for a single phase.", "typer", depends = List(YprofileEnabled -> true)) + val YprofileRunGcBetweenPhases: Setting[List[String]] = PhasesSetting(ForkSetting, "Yprofile-run-gc", "Run a GC between phases - this allows heap size to be accurate at the expense of more time. Specify a list of phases, or *", "_", depends = List(YprofileEnabled -> true)) + val YprofileTrace: Setting[String] = StringSetting(ForkSetting, "Yprofile-trace", "file", s"Capture trace of compilation in JSON Chrome Trace format to the specified file. This option requires ${YprofileEnabled.name}. The output file can be visualized using https://ui.perfetto.dev/.", "", depends = List(YprofileEnabled -> true)) val YbestEffort: Setting[Boolean] = BooleanSetting(ForkSetting, "Ybest-effort", "Enable best-effort compilation attempting to produce betasty to the META-INF/best-effort directory, regardless of errors, as part of the pickler phase.") val YwithBestEffortTasty: Setting[Boolean] = BooleanSetting(ForkSetting, "Ywith-best-effort-tasty", "Allow to compile using best-effort tasty files. If such file is used, the compiler will stop after the pickler phase.") diff --git a/compiler/src/dotty/tools/dotc/config/Settings.scala b/compiler/src/dotty/tools/dotc/config/Settings.scala index a5fc6a64aa45..f85f2cc57de4 100644 --- a/compiler/src/dotty/tools/dotc/config/Settings.scala +++ b/compiler/src/dotty/tools/dotc/config/Settings.scala @@ -73,6 +73,11 @@ object Settings: def validateSettingString(name: String): Unit = assert(settingCharacters.matches(name), s"Setting string $name contains invalid characters") + /** List of setting-value pairs that are required for another setting to be valid. + * For example, `s = Setting(..., depends = List(YprofileEnabled -> true))` + * means that `s` requires `YprofileEnabled` to be set to `true`. + */ + type SettingDependencies = List[(Setting[?], Any)] case class Setting[T: ClassTag] private[Settings] ( category: SettingCategory, @@ -83,7 +88,7 @@ object Settings: choices: Option[Seq[?]] = None, prefix: Option[String] = None, aliases: List[String] = Nil, - depends: List[(Setting[?], Any)] = Nil, + depends: SettingDependencies = Nil, ignoreInvalidArgs: Boolean = false, preferPrevious: Boolean = false, propertyClass: Option[Class[?]] = None, @@ -385,8 +390,8 @@ object Settings: def BooleanSetting(category: SettingCategory, name: String, descr: String, initialValue: Boolean = false, aliases: List[String] = Nil, preferPrevious: Boolean = false, deprecation: Option[Deprecation] = None, ignoreInvalidArgs: Boolean = false): Setting[Boolean] = publish(Setting(category, prependName(name), descr, initialValue, aliases = aliases, preferPrevious = preferPrevious, deprecation = deprecation, ignoreInvalidArgs = ignoreInvalidArgs)) - def StringSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: String, aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[String] = - publish(Setting(category, prependName(name), descr, default, helpArg, aliases = aliases, deprecation = deprecation)) + def StringSetting(category: SettingCategory, name: String, helpArg: String, descr: String, default: String, aliases: List[String] = Nil, deprecation: Option[Deprecation] = None, depends: SettingDependencies = Nil): Setting[String] = + publish(Setting(category, prependName(name), descr, default, helpArg, aliases = aliases, deprecation = deprecation, depends = depends)) def ChoiceSetting(category: SettingCategory, name: String, helpArg: String, descr: String, choices: List[String], default: String, aliases: List[String] = Nil, legacyArgs: Boolean = false, deprecation: Option[Deprecation] = None): Setting[String] = publish(Setting(category, prependName(name), descr, default, helpArg, Some(choices), aliases = aliases, legacyArgs = legacyArgs, deprecation = deprecation)) @@ -412,8 +417,8 @@ object Settings: def PathSetting(category: SettingCategory, name: String, descr: String, default: String, aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[String] = publish(Setting(category, prependName(name), descr, default, aliases = aliases, deprecation = deprecation)) - def PhasesSetting(category: SettingCategory, name: String, descr: String, default: String = "", aliases: List[String] = Nil, deprecation: Option[Deprecation] = None): Setting[List[String]] = - publish(Setting(category, prependName(name), descr, if (default.isEmpty) Nil else List(default), aliases = aliases, deprecation = deprecation)) + def PhasesSetting(category: SettingCategory, name: String, descr: String, default: String = "", aliases: List[String] = Nil, deprecation: Option[Deprecation] = None, depends: SettingDependencies = Nil): Setting[List[String]] = + publish(Setting(category, prependName(name), descr, if (default.isEmpty) Nil else List(default), aliases = aliases, deprecation = deprecation, depends = depends)) def PrefixSetting(category: SettingCategory, name0: String, descr: String, deprecation: Option[Deprecation] = None): Setting[List[String]] = val name = prependName(name0) diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index 388720e7f3f4..d69c7408d0b1 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -769,6 +769,7 @@ object Contexts { .updated(settingsStateLoc, settingsGroup.defaultState) .updated(notNullInfosLoc, Nil) .updated(compilationUnitLoc, NoCompilationUnit) + .updated(profilerLoc, Profiler.NoOp) c._searchHistory = new SearchRoot c._gadtState = GadtState(GadtConstraint.empty) c diff --git a/compiler/src/dotty/tools/dotc/core/Denotations.scala b/compiler/src/dotty/tools/dotc/core/Denotations.scala index 60a7555456bf..816b28177333 100644 --- a/compiler/src/dotty/tools/dotc/core/Denotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Denotations.scala @@ -961,7 +961,7 @@ object Denotations { } def staleSymbolError(using Context): Nothing = - if symbol.isPackageObject && ctx.run != null && ctx.run.nn.isCompilingSuspended + if symbol.lastKnownDenotation.isPackageObject && ctx.run != null && ctx.run.nn.isCompilingSuspended then throw StaleSymbolTypeError(symbol) else throw StaleSymbolException(staleSymbolMsg) diff --git a/compiler/src/dotty/tools/dotc/core/Phases.scala b/compiler/src/dotty/tools/dotc/core/Phases.scala index 85df3f9f2c18..015cf6fc0f2c 100644 --- a/compiler/src/dotty/tools/dotc/core/Phases.scala +++ b/compiler/src/dotty/tools/dotc/core/Phases.scala @@ -370,7 +370,7 @@ object Phases { // Test that we are in a state where we need to check if the phase should be skipped for a java file, // this prevents checking the expensive `unit.typedAsJava` unnecessarily. val doCheckJava = skipIfJava && !isAfterLastJavaPhase - for unit <- units do + for unit <- units do ctx.profiler.onUnit(this, unit): given unitCtx: Context = runCtx.fresh.setPhase(this.start).setCompilationUnit(unit).withRootImports if ctx.run.enterUnit(unit) then try diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index 906e74735097..f54b8a62fa25 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -1162,10 +1162,10 @@ object SymDenotations { final def enclosingClass(using Context): Symbol = { def enclClass(sym: Symbol, skip: Boolean): Symbol = { def newSkip = sym.is(JavaStaticTerm) - if (!sym.exists) + if !sym.exists then NoSymbol - else if (sym.isClass) - if (skip) enclClass(sym.owner, newSkip) else sym + else if sym.isClass then + if skip || sym.isRefinementClass then enclClass(sym.owner, newSkip) else sym else enclClass(sym.owner, skip || newSkip) } diff --git a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala index 51e6a5e6138a..5690720a1b3f 100644 --- a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala +++ b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala @@ -8,7 +8,7 @@ import java.nio.channels.ClosedByInterruptException import scala.util.control.NonFatal import dotty.tools.dotc.classpath.FileUtils.{hasTastyExtension, hasBetastyExtension} -import dotty.tools.io.{ ClassPath, ClassRepresentation, AbstractFile } +import dotty.tools.io.{ ClassPath, ClassRepresentation, AbstractFile, NoAbstractFile } import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions import Contexts.*, Symbols.*, Flags.*, SymDenotations.*, Types.*, Scopes.*, Names.* @@ -333,7 +333,15 @@ abstract class SymbolLoader extends LazyType { self => def description(using Context): String = s"proxy to ${self.description}" } - override def complete(root: SymDenotation)(using Context): Unit = { + private inline def profileCompletion[T](root: SymDenotation)(inline body: T)(using Context): T = { + val sym = root.symbol + def associatedFile = root.symbol.associatedFile match + case file: AbstractFile => file + case _ => NoAbstractFile + ctx.profiler.onCompletion(sym, associatedFile)(body) + } + + override def complete(root: SymDenotation)(using Context): Unit = profileCompletion(root) { def signalError(ex: Exception): Unit = { if (ctx.debug) ex.printStackTrace() val msg = ex.getMessage() diff --git a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala index ce4956e6e847..9491bdab9de8 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala @@ -24,11 +24,16 @@ enum SourceLanguage: object SourceLanguage: /** The language in which `sym` was defined. */ def apply(sym: Symbol)(using Context): SourceLanguage = - if sym.is(JavaDefined) then + // We might be using this method while recalculating the denotation, + // so let's use `lastKnownDenotation`. + // This is ok as the source of the symbol and whether it is inline should + // not change between runs/phases. + val denot = sym.lastKnownDenotation + if denot.is(JavaDefined) then SourceLanguage.Java // Scala 2 methods don't have Inline set, except for the ones injected with `patchStdlibClass` // which are really Scala 3 methods. - else if sym.isClass && sym.is(Scala2x) || (sym.maybeOwner.is(Scala2x) && !sym.is(Inline)) then + else if denot.isClass && denot.is(Scala2x) || (denot.maybeOwner.lastKnownDenotation.is(Scala2x) && !denot.is(Inline)) then SourceLanguage.Scala2 else SourceLanguage.Scala3 diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index cf73bda0d131..8a9d44cb8d25 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -2494,7 +2494,10 @@ object Types extends TypeUtils { } private def disambiguate(d: Denotation)(using Context): Denotation = - disambiguate(d, currentSignature, currentSymbol.targetName) + // this method might be triggered while the denotation is already being recomputed + // in NamedType, so it's better to use lastKnownDenotation instead, as targetName + // should not change between phases/runs + disambiguate(d, currentSignature, currentSymbol.lastKnownDenotation.targetName) private def disambiguate(d: Denotation, sig: Signature | Null, target: Name)(using Context): Denotation = if (sig != null) diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 5a3be6505715..6390d8d32d3f 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -4684,7 +4684,7 @@ object Parsers { * | Expr1 * | */ - def blockStatSeq(): List[Tree] = checkNoEscapingPlaceholders { + def blockStatSeq(outermost: Boolean = false): List[Tree] = checkNoEscapingPlaceholders { val stats = new ListBuffer[Tree] while var empty = false @@ -4696,7 +4696,11 @@ object Parsers { stats += closure(in.offset, Location.InBlock, modifiers(BitSet(IMPLICIT))) else if isIdent(nme.extension) && followingIsExtension() then stats += extension() - else if isDefIntro(localModifierTokens, excludedSoftModifiers = Set(nme.`opaque`)) then + else if isDefIntro(localModifierTokens, + excludedSoftModifiers = + // Allow opaque definitions at outermost level in REPL. + if outermost && ctx.mode.is(Mode.Interactive) + then Set.empty else Set(nme.`opaque`)) then stats +++= localDef(in.offset) else empty = true diff --git a/compiler/src/dotty/tools/dotc/profile/ChromeTrace.scala b/compiler/src/dotty/tools/dotc/profile/ChromeTrace.scala new file mode 100644 index 000000000000..4950f439640f --- /dev/null +++ b/compiler/src/dotty/tools/dotc/profile/ChromeTrace.scala @@ -0,0 +1,191 @@ +// Scala 2 compiler backport of https://github.com/scala/scala/pull/7364 +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package dotty.tools.dotc.profile + +import scala.language.unsafeNulls + +import java.io.Closeable +import java.lang.management.ManagementFactory +import java.nio.file.{Files, Path} +import java.util +import java.util.concurrent.TimeUnit + +import scala.collection.mutable + +object ChromeTrace { + private object EventType { + final val Start = "B" + final val Instant = "I" + final val End = "E" + final val Complete = "X" + + final val Counter = "C" + + final val AsyncStart = "b" + final val AsyncInstant = "n" + final val AsyncEnd = "e" + } +} + +/** Allows writing a subset of captrue traces based on https://docs.google.com/document/d/1CvAClvFfyA5R-PhYUmn5OOQtYMH4h6I0nSsKchNAySU/preview# + * Can be visualized using https://ui.perfetto.dev/, Chrome's about://tracing (outdated) or the tooling in https://www.google.com.au/search?q=catapult+tracing&oq=catapult+tracing+&aqs=chrome..69i57.3974j0j4&sourceid=chrome&ie=UTF-8 */ +final class ChromeTrace(f: Path) extends Closeable { + import ChromeTrace.EventType + private val traceWriter = FileUtils.newAsyncBufferedWriter(f) + private val context = mutable.Stack[JsonContext](TopContext) + private val tidCache = new ThreadLocal[String]() { + @annotation.nowarn("cat=deprecation") + override def initialValue(): String = "%05d".format(Thread.currentThread().getId()) + } + objStart() + fld("traceEvents") + context.push(ValueContext) + arrStart() + traceWriter.newLine() + + private val pid = ManagementFactory.getRuntimeMXBean().getName().replaceAll("@.*", "") + + override def close(): Unit = { + arrEnd() + objEnd() + context.pop() + tidCache.remove() + traceWriter.close() + } + + def traceDurationEvent(name: String, startNanos: Long, durationNanos: Long, tid: String = this.tid(), pidSuffix: String = ""): Unit = { + val durationMicros = nanosToMicros(durationNanos) + val startMicros = nanosToMicros(startNanos) + objStart() + str("cat", "scalac") + str("name", name) + str("ph", EventType.Complete) + str("tid", tid) + writePid(pidSuffix) + lng("ts", startMicros) + lng("dur", durationMicros) + objEnd() + traceWriter.newLine() + } + + private def writePid(pidSuffix: String) = { + if (pidSuffix == "") + str("pid", pid) + else + str2("pid", pid, "-", pidSuffix) + } + + def traceCounterEvent(name: String, counterName: String, count: Long, processWide: Boolean): Unit = { + objStart() + str("cat", "scalac") + str("name", name) + str("ph", EventType.Counter) + str("tid", tid()) + writePid(pidSuffix = if (processWide) "" else tid()) + lng("ts", microTime()) + fld("args") + objStart() + lng(counterName, count) + objEnd() + objEnd() + traceWriter.newLine() + } + + def traceDurationEventStart(cat: String, name: String, colour: String = "", pidSuffix: String = tid()): Unit = traceDurationEventStartEnd(EventType.Start, cat, name, colour, pidSuffix) + def traceDurationEventEnd(cat: String, name: String, colour: String = "", pidSuffix: String = tid()): Unit = traceDurationEventStartEnd(EventType.End, cat, name, colour, pidSuffix) + + private def traceDurationEventStartEnd(eventType: String, cat: String, name: String, colour: String, pidSuffix: String = ""): Unit = { + objStart() + str("cat", cat) + str("name", name) + str("ph", eventType) + writePid(pidSuffix) + str("tid", tid()) + lng("ts", microTime()) + if (colour != "") { + str("cname", colour) + } + objEnd() + traceWriter.newLine() + } + + private def tid(): String = tidCache.get() + + private def nanosToMicros(t: Long): Long = TimeUnit.NANOSECONDS.toMicros(t) + + private def microTime(): Long = nanosToMicros(System.nanoTime()) + + private sealed abstract class JsonContext + private case class ArrayContext(var first: Boolean) extends JsonContext + private case class ObjectContext(var first: Boolean) extends JsonContext + private case object ValueContext extends JsonContext + private case object TopContext extends JsonContext + + private def str(name: String, value: String): Unit = { + fld(name) + traceWriter.write("\"") + traceWriter.write(value) // This assumes no escaping is needed + traceWriter.write("\"") + } + private def str2(name: String, value: String, valueContinued1: String, valueContinued2: String): Unit = { + fld(name) + traceWriter.write("\"") + traceWriter.write(value) // This assumes no escaping is needed + traceWriter.write(valueContinued1) // This assumes no escaping is needed + traceWriter.write(valueContinued2) // This assumes no escaping is needed + traceWriter.write("\"") + } + private def lng(name: String, value: Long): Unit = { + fld(name) + traceWriter.write(String.valueOf(value)) + traceWriter.write("") + } + private def objStart(): Unit = { + context.top match { + case ac @ ArrayContext(first) => + if (first) ac.first = false + else traceWriter.write(",") + case _ => + } + context.push(ObjectContext(true)) + traceWriter.write("{") + } + private def objEnd(): Unit = { + traceWriter.write("}") + context.pop() + } + private def arrStart(): Unit = { + traceWriter.write("[") + context.push(ArrayContext(true)) + } + private def arrEnd(): Unit = { + traceWriter.write("]") + context.pop() + } + + private def fld(name: String) = { + val topContext = context.top + topContext match { + case oc @ ObjectContext(first) => + if (first) oc.first = false + else traceWriter.write(",") + case context => + throw new IllegalStateException("Wrong context: " + context) + } + traceWriter.write("\"") + traceWriter.write(name) + traceWriter.write("\"") + traceWriter.write(":") + } +} \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/profile/FileUtils.scala b/compiler/src/dotty/tools/dotc/profile/FileUtils.scala new file mode 100644 index 000000000000..4aec428c05bf --- /dev/null +++ b/compiler/src/dotty/tools/dotc/profile/FileUtils.scala @@ -0,0 +1,204 @@ +// Scala 2 compiler backport of https://github.com/scala/scala/pull/7364 + +/* +* Scala (https://www.scala-lang.org) +* +* Copyright EPFL and Lightbend, Inc. +* +* Licensed under Apache License 2.0 +* (http://www.apache.org/licenses/LICENSE-2.0). +* +* See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package dotty.tools.dotc.profile + +import scala.language.unsafeNulls + +import java.io.{BufferedWriter, IOException, OutputStreamWriter, Writer} +import java.nio.CharBuffer +import java.nio.charset.{Charset, CharsetEncoder, StandardCharsets} +import java.nio.file.{Files, OpenOption, Path} +import java.util.concurrent.LinkedBlockingQueue +import java.util.concurrent.atomic.AtomicBoolean + + +import scala.concurrent.duration.Duration +import scala.concurrent.{Await, Promise} +import scala.util.{Failure, Success} +import scala.annotation.internal.sharable + +object FileUtils { + def newAsyncBufferedWriter(path: Path, charset: Charset = StandardCharsets.UTF_8.nn, options: Array[OpenOption] = NO_OPTIONS, threadsafe: Boolean = false): LineWriter = { + val encoder: CharsetEncoder = charset.newEncoder + val writer = new OutputStreamWriter(Files.newOutputStream(path, options: _*), encoder) + newAsyncBufferedWriter(new BufferedWriter(writer), threadsafe) + } + def newAsyncBufferedWriter(underlying: Writer, threadsafe: Boolean): LineWriter = { + val async = new AsyncBufferedWriter(underlying) + if (threadsafe) new ThreadsafeWriter(async) else async + } + private val NO_OPTIONS = new Array[OpenOption](0) + + sealed abstract class LineWriter extends Writer { + def newLine(): Unit + } + private class ThreadsafeWriter(val underlying: AsyncBufferedWriter) extends LineWriter { + lock = underlying + override def write(c: Int): Unit = + lock.synchronized (underlying.write(c)) + + override def write(cbuf: Array[Char]): Unit = + lock.synchronized (underlying.write(cbuf)) + + override def write(cbuf: Array[Char], off: Int, len: Int): Unit = + lock.synchronized (underlying.write(cbuf, off, len)) + + override def write(str: String): Unit = + lock.synchronized (underlying.write(str)) + + override def write(str: String, off: Int, len: Int): Unit = + lock.synchronized (underlying.write(str, off, len)) + + override def flush(): Unit = + lock.synchronized (underlying.flush()) + + override def close(): Unit = + lock.synchronized (underlying.close()) + + override def newLine(): Unit = + lock.synchronized (underlying.newLine()) + + } + + private object AsyncBufferedWriter { + @sharable private val Close = CharBuffer.allocate(0) + @sharable private val Flush = CharBuffer.allocate(0) + } + private class AsyncBufferedWriter(val underlying: Writer, bufferSize : Int = 4096) extends LineWriter { + private var current: CharBuffer = allocate + override def write(c: Int): Unit = super.write(c) + private def flushAsync(): Unit = { + background.ensureProcessed(current) + current = allocate + } +// allocate or reuse a CharArray which is guaranteed to have a backing array + private def allocate: CharBuffer = { + val reused = background.reuseBuffer + if (reused eq null) CharBuffer.allocate(bufferSize) + else { + //we don't care about race conditions + background.reuseBuffer = null + reused.clear() + reused + } + } + + override def write(cbuf: Array[Char], initialOffset: Int, initialLength: Int): Unit = { + var offset = initialOffset + var length = initialLength + while (length > 0) { + val capacity = current.remaining() + if (length <= capacity) { + current.put(cbuf, offset, length) + length = 0 + } else { + current.put(cbuf, offset, capacity) + flushAsync() + length -= capacity + offset += capacity + } + } + } + + override def write(s: String, initialOffset: Int, initialLength: Int): Unit = { + var offset = initialOffset + var length = initialLength + while (length > 0) { + val capacity = current.remaining() + if (length <= capacity) { + current.put(s, offset, offset + length) + length = 0 + } else { + current.put(s, offset, offset + capacity) + flushAsync() + length -= capacity + offset += capacity + } + } + } + + def newLine(): Unit = write(scala.util.Properties.lineSeparator) + + /** slightly breaks the flush contract in that the flush is not complete when the method returns */ + override def flush(): Unit = { + flushAsync() + } + + override def close(): Unit = { + background.ensureProcessed(current) + background.ensureProcessed(AsyncBufferedWriter.Close) + current = null + Await.result(background.asyncStatus.future, Duration.Inf) + underlying.close() + } + private object background extends Runnable{ + + import scala.concurrent.ExecutionContext.Implicits.global + + private val pending = new LinkedBlockingQueue[CharBuffer] + //a failure detected will case an Failure, Success indicates a close + val asyncStatus = Promise[Unit]() + private val scheduled = new AtomicBoolean + @volatile var reuseBuffer: CharBuffer = _ + + def ensureProcessed(buffer: CharBuffer): Unit = { + if (asyncStatus.isCompleted) { + asyncStatus.future.value.get match { + case Success(()) => throw new IllegalStateException("closed") + case Failure(t) => throw new IOException("async failure", t) + } + } + + //order is essential - add to the queue before the CAS + pending.add(buffer) + if (scheduled.compareAndSet(false, true)) { + global.execute(background) + } + } + + def run(): Unit = { + try { + while (!pending.isEmpty) { + val next = pending.poll() + if (next eq AsyncBufferedWriter.Flush) { + underlying.flush() + } else if (next eq AsyncBufferedWriter.Close) { + underlying.flush() + underlying.close() + asyncStatus.trySuccess(()) + } else { + val array = next.array() + next.flip() + underlying.write(array, next.arrayOffset() + next.position(), next.limit()) + reuseBuffer = next + } + } + } catch { + case t: Throwable => + asyncStatus.tryFailure(t) + throw t + } + finally scheduled.set(false) + + //we are not scheduled any more + //as a last check ensure that we didnt race with an addition to the queue + //order is essential - queue is checked before CAS + if ((!pending.isEmpty) && scheduled.compareAndSet(false, true)) { + global.execute(background) + } + } + } + } +} \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/profile/Profiler.scala b/compiler/src/dotty/tools/dotc/profile/Profiler.scala index a13c9d41b529..69a806215ddd 100644 --- a/compiler/src/dotty/tools/dotc/profile/Profiler.scala +++ b/compiler/src/dotty/tools/dotc/profile/Profiler.scala @@ -4,6 +4,7 @@ import scala.annotation.* import scala.language.unsafeNulls import java.io.{FileWriter, PrintWriter} +import java.nio.file.Paths import java.lang.management.{ManagementFactory, GarbageCollectorMXBean, RuntimeMXBean, MemoryMXBean, ClassLoadingMXBean, CompilationMXBean} import java.util.concurrent.TimeUnit import java.util.concurrent.atomic.AtomicInteger @@ -12,8 +13,15 @@ import javax.management.{Notification, NotificationEmitter, NotificationListener import dotty.tools.dotc.core.Phases.Phase import dotty.tools.dotc.core.Contexts.* +import dotty.tools.dotc.CompilationUnit +import dotty.tools.dotc.core.Types.Type +import dotty.tools.dotc.core.Symbols.{Symbol, NoSymbol} +import dotty.tools.dotc.core.Flags +import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions import dotty.tools.io.AbstractFile import annotation.internal.sharable +import dotty.tools.dotc.core.Periods.InitialRunId +import scala.collection.mutable.UnrolledBuffer object Profiler { def apply()(using Context): Profiler = @@ -25,14 +33,19 @@ object Profiler { new RealProfiler(reporter) } - private[profile] val emptySnap: ProfileSnap = ProfileSnap(0, "", 0, 0, 0, 0, 0, 0) + final def NoOp: Profiler = NoOpProfiler + + private[profile] val emptySnap: ProfileSnap = ProfileSnap(0, "", 0, 0, 0, 0, 0, 0, 0, 0) } -case class GcEventData(pool:String, reportTimeNs: Long, gcStartMillis:Long, gcEndMillis:Long, name:String, action:String, cause:String, threads:Long) +case class GcEventData(pool:String, reportTimeNs: Long, gcStartMillis:Long, gcEndMillis:Long, durationMillis: Long, name:String, action:String, cause:String, threads:Long){ + val endNanos = System.nanoTime() +} case class ProfileSnap(threadId: Long, threadName: String, snapTimeNanos : Long, idleTimeNanos:Long, cpuTimeNanos: Long, userTimeNanos: Long, - allocatedBytes:Long, heapBytes:Long) { + allocatedBytes:Long, heapBytes:Long, + totalClassesLoaded: Long, totalJITCompilationTime: Long) { def updateHeap(heapBytes:Long): ProfileSnap = copy(heapBytes = heapBytes) } @@ -66,22 +79,63 @@ case class ProfileRange(start: ProfileSnap, end:ProfileSnap, phase:Phase, purpos def retainedHeapMB: Double = toMegaBytes(end.heapBytes - start.heapBytes) } +private opaque type TracedEventId <: String = String +private object TracedEventId: + def apply(stringValue: String): TracedEventId = stringValue + final val Empty: TracedEventId = "" + sealed trait Profiler { def finished(): Unit - def beforePhase(phase: Phase): ProfileSnap - - def afterPhase(phase: Phase, profileBefore: ProfileSnap): Unit + inline def onPhase[T](phase: Phase)(inline body: T): T = + val (event, snapshot) = beforePhase(phase) + try body + finally afterPhase(event, phase, snapshot) + protected final val EmptyPhaseEvent = (TracedEventId.Empty, Profiler.emptySnap) + protected def beforePhase(phase: Phase): (TracedEventId, ProfileSnap) = EmptyPhaseEvent + protected def afterPhase(event: TracedEventId, phase: Phase, profileBefore: ProfileSnap): Unit = () + + inline def onUnit[T](phase: Phase, unit: CompilationUnit)(inline body: T): T = + val event = beforeUnit(phase, unit) + try body + finally afterUnit(event) + protected def beforeUnit(phase: Phase, unit: CompilationUnit): TracedEventId = TracedEventId.Empty + protected def afterUnit(event: TracedEventId): Unit = () + + inline def onTypedDef[T](sym: Symbol)(inline body: T): T = + val event = beforeTypedDef(sym) + try body + finally afterTypedDef(event) + protected def beforeTypedDef(sym: Symbol): TracedEventId = TracedEventId.Empty + protected def afterTypedDef(token: TracedEventId): Unit = () + + inline def onImplicitSearch[T](pt: Type)(inline body: T): T = + val event = beforeImplicitSearch(pt) + try body + finally afterImplicitSearch(event) + protected def beforeImplicitSearch(pt: Type): TracedEventId = TracedEventId.Empty + protected def afterImplicitSearch(event: TracedEventId): Unit = () + + inline def onMacroSplice[T](macroSym: Symbol)(inline body: T): T = + val event = beforeMacroSplice(macroSym) + try body + finally afterMacroSplice(event) + protected def beforeMacroSplice(macroSym: Symbol): TracedEventId = TracedEventId.Empty + protected def afterMacroSplice(event: TracedEventId): Unit = () + + inline def onCompletion[T](root: Symbol, associatedFile: => AbstractFile)(inline body: T): T = + val (event, completionName) = beforeCompletion(root, associatedFile) + try body + finally afterCompletion(event, completionName) + protected final val EmptyCompletionEvent = (TracedEventId.Empty, "") + protected def beforeCompletion(root: Symbol, associatedFile: => AbstractFile): (TracedEventId, String) = EmptyCompletionEvent + protected def afterCompletion(event: TracedEventId, completionName: String): Unit = () } private [profile] object NoOpProfiler extends Profiler { - - override def beforePhase(phase: Phase): ProfileSnap = Profiler.emptySnap - - override def afterPhase(phase: Phase, profileBefore: ProfileSnap): Unit = () - override def finished(): Unit = () } + private [profile] object RealProfiler { import scala.jdk.CollectionConverters.* val runtimeMx: RuntimeMXBean = ManagementFactory.getRuntimeMXBean @@ -92,17 +146,6 @@ private [profile] object RealProfiler { val threadMx: ExtendedThreadMxBean = ExtendedThreadMxBean.proxy if (threadMx.isThreadCpuTimeSupported) threadMx.setThreadCpuTimeEnabled(true) private val idGen = new AtomicInteger() -} - -private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) extends Profiler with NotificationListener { - def completeBackground(threadRange: ProfileRange): Unit = - reporter.reportBackground(this, threadRange) - - def outDir: AbstractFile = ctx.settings.outputDir.value - - val id: Int = RealProfiler.idGen.incrementAndGet() - - private val mainThread = Thread.currentThread() @nowarn("cat=deprecation") private[profile] def snapThread(idleTimeNanos: Long): ProfileSnap = { @@ -117,13 +160,47 @@ private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) cpuTimeNanos = threadMx.getCurrentThreadCpuTime, userTimeNanos = threadMx.getCurrentThreadUserTime, allocatedBytes = threadMx.getThreadAllocatedBytes(Thread.currentThread().getId), - heapBytes = readHeapUsage() + heapBytes = readHeapUsage(), + totalClassesLoaded = classLoaderMx.getTotalLoadedClassCount, + totalJITCompilationTime = compileMx.getTotalCompilationTime ) } private def readHeapUsage() = RealProfiler.memoryMx.getHeapMemoryUsage.getUsed +} + +private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) extends Profiler with NotificationListener { + val id: Int = RealProfiler.idGen.incrementAndGet() + private val mainThread = Thread.currentThread() + private val gcEvents = UnrolledBuffer[GcEventData]() + private var nextAfterUnitSnap: Long = System.nanoTime() + + private final val GcThreadId = "GC" + + enum Category: + def name: String = this.toString().toLowerCase() + case Run, Phase, File, TypeCheck, Implicit, Macro, Completion + private [profile] val chromeTrace = + if ctx.settings.YprofileTrace.isDefault + then null + else + val filename = ctx.settings.YprofileTrace.value + // Compilation units requiring multi-stage compilation (macros) would create a new profiler instances + // We need to store the traces in the seperate file to prevent overriding its content. + // Alternatives: sharing ChromeTrace instance between all runs / manual concatation after all runs are done + // FIXME: The first assigned runId is equal to 2 instead of 1 (InitialRunId). + // Fix me when bug described in Compiler.runId is resolved by removing +/- 1 adjustments + val suffix = if ctx.runId > InitialRunId + 1 then s".${ctx.runId - 1}" else "" + ChromeTrace(Paths.get(s"$filename$suffix")) + + private val compilerRunEvent: TracedEventId = traceDurationStart(Category.Run, s"scalac-$id") + + def completeBackground(threadRange: ProfileRange): Unit = + reporter.reportBackground(this, threadRange) + + def outDir: AbstractFile = ctx.settings.outputDir.value @nowarn - private def doGC: Unit = { + private def doGC(): Unit = { System.gc() System.runFinalization() } @@ -142,6 +219,15 @@ private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) case gc => } reporter.close(this) + if chromeTrace != null then + traceDurationEnd(Category.Run, compilerRunEvent) + for gcEvent <- gcEvents + do { + val durationNanos = TimeUnit.MILLISECONDS.toNanos(gcEvent.durationMillis) + val startNanos = gcEvent.endNanos - durationNanos + chromeTrace.traceDurationEvent(gcEvent.name, startNanos, durationNanos, tid = GcThreadId) + } + chromeTrace.close() } @@ -150,10 +236,7 @@ private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) import java.lang.{Integer => jInt} val reportNs = System.nanoTime() val data = notification.getUserData - val seq = notification.getSequenceNumber - val message = notification.getMessage val tpe = notification.getType - val time= notification.getTimeStamp data match { case cd: CompositeData if tpe == "com.sun.management.gc.notification" => val name = cd.get("gcName").toString @@ -164,49 +247,127 @@ private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) val startTime = info.get("startTime").asInstanceOf[jLong].longValue() val endTime = info.get("endTime").asInstanceOf[jLong].longValue() val threads = info.get("GcThreadCount").asInstanceOf[jInt].longValue() - reporter.reportGc(GcEventData("", reportNs, startTime, endTime, name, action, cause, threads)) + val gcEvent = GcEventData("", reportNs, startTime, endTime, duration, name, action, cause, threads) + synchronized { gcEvents += gcEvent } + reporter.reportGc(gcEvent) } } - override def afterPhase(phase: Phase, snapBefore: ProfileSnap): Unit = { + override def afterPhase(event: TracedEventId, phase: Phase, snapBefore: ProfileSnap): Unit = { assert(mainThread eq Thread.currentThread()) - val initialSnap = snapThread(0) + val initialSnap = RealProfiler.snapThread(0) if (ctx.settings.YprofileExternalTool.value.contains(phase.toString)) { println("Profile hook stop") ExternalToolHook.after() } val finalSnap = if (ctx.settings.YprofileRunGcBetweenPhases.value.contains(phase.toString)) { - doGC - initialSnap.updateHeap(readHeapUsage()) + doGC() + initialSnap.updateHeap(RealProfiler.readHeapUsage()) } else initialSnap - + traceDurationEnd(Category.Phase, event) + traceThreadSnapshotCounters() reporter.reportForeground(this, ProfileRange(snapBefore, finalSnap, phase, "", 0, Thread.currentThread)) } - override def beforePhase(phase: Phase): ProfileSnap = { + override def beforePhase(phase: Phase): (TracedEventId, ProfileSnap) = { assert(mainThread eq Thread.currentThread()) + traceThreadSnapshotCounters() + val eventId = traceDurationStart(Category.Phase, phase.phaseName) if (ctx.settings.YprofileRunGcBetweenPhases.value.contains(phase.toString)) - doGC + doGC() if (ctx.settings.YprofileExternalTool.value.contains(phase.toString)) { println("Profile hook start") ExternalToolHook.before() } - snapThread(0) + (eventId, RealProfiler.snapThread(0)) + } + + override def beforeUnit(phase: Phase, unit: CompilationUnit): TracedEventId = { + assert(mainThread eq Thread.currentThread()) + if chromeTrace != null then + traceThreadSnapshotCounters() + traceDurationStart(Category.File, unit.source.name) + else TracedEventId.Empty + } + + override def afterUnit(event: TracedEventId): Unit = { + assert(mainThread eq Thread.currentThread()) + if chromeTrace != null then + traceDurationEnd(Category.File, event) + traceThreadSnapshotCounters() } -} + private def traceThreadSnapshotCounters(initialSnap: => ProfileSnap = RealProfiler.snapThread(0)) = + if chromeTrace != null && System.nanoTime() > nextAfterUnitSnap then { + val snap = initialSnap + chromeTrace.traceCounterEvent("allocBytes", "allocBytes", snap.allocatedBytes, processWide = false) + chromeTrace.traceCounterEvent("heapBytes", "heapBytes", snap.heapBytes, processWide = true) + chromeTrace.traceCounterEvent("classesLoaded", "classesLoaded", snap.totalClassesLoaded, processWide = true) + chromeTrace.traceCounterEvent("jitCompilationTime", "jitCompilationTime", snap.totalJITCompilationTime, processWide = true) + chromeTrace.traceCounterEvent("userTime", "userTime", snap.userTimeNanos, processWide = false) + chromeTrace.traceCounterEvent("cpuTime", "cpuTime", snap.cpuTimeNanos, processWide = false) + chromeTrace.traceCounterEvent("idleTime", "idleTime", snap.idleTimeNanos, processWide = false) + nextAfterUnitSnap = System.nanoTime() + 10 * 1000 * 1000 + } -case class EventType(name: String) -object EventType { - //main thread with other tasks - val MAIN: EventType = EventType("main") - //other task ( background thread) - val BACKGROUND: EventType = EventType("background") - //total for compile - val GC: EventType = EventType("GC") + override def beforeTypedDef(sym: Symbol): TracedEventId = traceDurationStart(Category.TypeCheck, symbolName(sym)) + override def afterTypedDef(event: TracedEventId): Unit = traceDurationEnd(Category.TypeCheck, event) + + override def beforeImplicitSearch(pt: Type): TracedEventId = traceDurationStart(Category.Implicit, s"?[${symbolName(pt.typeSymbol)}]", colour = "yellow") + override def afterImplicitSearch(event: TracedEventId): Unit = traceDurationEnd(Category.Implicit, event, colour = "yellow") + + override def beforeMacroSplice(macroSym: Symbol): TracedEventId = traceDurationStart(Category.Macro, s"«${symbolName(macroSym)}»", colour = "olive") + override def afterMacroSplice(event: TracedEventId): Unit = traceDurationEnd(Category.Macro, event, colour = "olive") + + override def beforeCompletion(root: Symbol, associatedFile: => AbstractFile): (TracedEventId, String) = + if chromeTrace == null + then EmptyCompletionEvent + else + val completionName = this.completionName(root, associatedFile) + val event = TracedEventId(associatedFile.name) + chromeTrace.traceDurationEventStart(Category.Completion.name, "↯", colour = "thread_state_sleeping") + chromeTrace.traceDurationEventStart(Category.File.name, event) + chromeTrace.traceDurationEventStart(Category.Completion.name, completionName) + (event, completionName) + + override def afterCompletion(event: TracedEventId, completionName: String): Unit = + if chromeTrace != null + then + chromeTrace.traceDurationEventEnd(Category.Completion.name, completionName) + chromeTrace.traceDurationEventEnd(Category.File.name, event) + chromeTrace.traceDurationEventEnd(Category.Completion.name, "↯", colour = "thread_state_sleeping") + + private inline def traceDurationStart(category: Category, inline eventName: String, colour: String = ""): TracedEventId = + if chromeTrace == null + then TracedEventId.Empty + else + val event = TracedEventId(eventName) + chromeTrace.traceDurationEventStart(category.name, event, colour) + event + + private inline def traceDurationEnd(category: Category, event: TracedEventId, colour: String = ""): Unit = + if chromeTrace != null then + chromeTrace.traceDurationEventEnd(category.name, event, colour) + + private def symbolName(sym: Symbol): String = s"${sym.showKind} ${sym.showName}" + private def completionName(root: Symbol, associatedFile: AbstractFile): String = + def isTopLevel = root.owner != NoSymbol && root.owner.is(Flags.Package) + if root.is(Flags.Package) || isTopLevel + then root.javaBinaryName + else + val enclosing = root.enclosingClass + s"${enclosing.javaBinaryName}::${root.name}" } +enum EventType(name: String): + // main thread with other tasks + case MAIN extends EventType("main") + // other task ( background thread) + case BACKGROUND extends EventType("background") + // total for compile + case GC extends EventType("GC") + sealed trait ProfileReporter { def reportBackground(profiler: RealProfiler, threadRange: ProfileRange): Unit def reportForeground(profiler: RealProfiler, threadRange: ProfileRange): Unit @@ -259,9 +420,8 @@ class StreamProfileReporter(out:PrintWriter) extends ProfileReporter { out.println(s"${EventType.GC},$start,${data.reportTimeNs},${data.gcStartMillis}, ${data.gcEndMillis},${data.name},${data.action},${data.cause},${data.threads}") } - override def close(profiler: RealProfiler): Unit = { - out.flush - out.close + out.flush() + out.close() } } diff --git a/compiler/src/dotty/tools/dotc/profile/ThreadPoolFactory.scala b/compiler/src/dotty/tools/dotc/profile/ThreadPoolFactory.scala index e3ea69d9be06..1a81153b9b08 100644 --- a/compiler/src/dotty/tools/dotc/profile/ThreadPoolFactory.scala +++ b/compiler/src/dotty/tools/dotc/profile/ThreadPoolFactory.scala @@ -94,9 +94,9 @@ object ThreadPoolFactory { val data = new ThreadProfileData localData.set(data) - val profileStart = profiler.snapThread(0) + val profileStart = RealProfiler.snapThread(0) try worker.run finally { - val snap = profiler.snapThread(data.idleNs) + val snap = RealProfiler.snapThread(data.idleNs) val threadRange = ProfileRange(profileStart, snap, phase, shortId, data.taskCount, Thread.currentThread()) profiler.completeBackground(threadRange) } diff --git a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala index db523c879ea2..6d0a85b3ef0f 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ErrorMessageID.scala @@ -216,6 +216,7 @@ enum ErrorMessageID(val isActive: Boolean = true) extends java.lang.Enum[ErrorMe case FinalLocalDefID // errorNumber: 200 case NonNamedArgumentInJavaAnnotationID // errorNumber: 201 case QuotedTypeMissingID // errorNumber: 202 + case AmbiguousNamedTupleAssignmentID // errorNumber: 203 def errorNumber = ordinal - 1 diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index 97cd70113c2e..3b7fba1cb52d 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -3343,3 +3343,12 @@ final class QuotedTypeMissing(tpe: Type)(using Context) extends StagingMessage(Q |""" end QuotedTypeMissing + +final class AmbiguousNamedTupleAssignment(key: Name, value: untpd.Tree)(using Context) extends SyntaxMsg(AmbiguousNamedTupleAssignmentID): + override protected def msg(using Context): String = + i"""Ambiguous syntax: this is interpreted as a named tuple with one element, + |not as an assignment. + | + |To assign a value, use curly braces: `{${key} = ${value}}`.""" + + override protected def explain(using Context): String = "" diff --git a/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala b/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala index 073086ac5e2c..e8a402068bfc 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala @@ -43,7 +43,7 @@ class CheckReentrant extends MiniPhase { requiredClass("scala.annotation.internal.unshared")) private val scalaJSIRPackageClass = new CtxLazy( - getPackageClassIfDefined("org.scalajs.ir")) + getPackageClassIfDefined("dotty.tools.sjs.ir")) def isIgnored(sym: Symbol)(using Context): Boolean = sym.hasAnnotation(sharableAnnot()) || diff --git a/compiler/src/dotty/tools/dotc/transform/DropParentRefinements.scala b/compiler/src/dotty/tools/dotc/transform/DropParentRefinements.scala new file mode 100644 index 000000000000..1960568dc505 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/transform/DropParentRefinements.scala @@ -0,0 +1,35 @@ +package dotty.tools.dotc.transform + +import dotty.tools.dotc.transform.MegaPhase.MiniPhase +import dotty.tools.dotc.ast.tpd +import dotty.tools.dotc.core.Contexts.Context +import dotty.tools.dotc.core.DenotTransformers.IdentityDenotTransformer +import dotty.tools.dotc.typer.Typer + +object DropParentRefinements: + val name: String = "dropParentRefinements" + val description: String = "drop parent refinements from a template" + +/** Drop parent refinements from a template, as they are generated without + * an implementation. These refinements are unusally required for tracked + * members with more specific types. + */ +class DropParentRefinements extends MiniPhase with IdentityDenotTransformer: + thisPhase => + import tpd.* + + override def phaseName: String = DropParentRefinements.name + + override def description: String = DropParentRefinements.description + + override def runsAfterGroupsOf: Set[String] = Set(CountOuterAccesses.name) + + override def changesMembers: Boolean = true // the phase drops parent refinements + + override def transformTemplate(tree: tpd.Template)(using Context): tpd.Tree = + val newBody = tree.body.filter(!_.hasAttachment(Typer.RefinementFromParent)) + tree.body.foreach { member => + if member.hasAttachment(Typer.RefinementFromParent) then + member.symbol.dropAfter(thisPhase) + } + cpy.Template(tree)(body = newBody) diff --git a/compiler/src/dotty/tools/dotc/transform/Getters.scala b/compiler/src/dotty/tools/dotc/transform/Getters.scala index 43289209d146..a58dffa04223 100644 --- a/compiler/src/dotty/tools/dotc/transform/Getters.scala +++ b/compiler/src/dotty/tools/dotc/transform/Getters.scala @@ -103,7 +103,7 @@ class Getters extends MiniPhase with SymTransformer { thisPhase => override def transformValDef(tree: ValDef)(using Context): Tree = val sym = tree.symbol if !sym.is(Method) then return tree - val getterDef = DefDef(sym.asTerm, tree.rhs).withSpan(tree.span) + val getterDef = DefDef(sym.asTerm, tree.rhs).withSpan(tree.span).withAttachmentsFrom(tree) if !sym.is(Mutable) then return getterDef ensureSetter(sym.asTerm) if !newSetters.contains(sym.setter) then return getterDef diff --git a/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala b/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala index 482da0edb82b..359ec701d164 100644 --- a/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala +++ b/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala @@ -44,7 +44,10 @@ object ProtectedAccessors { /** Do we need a protected accessor for accessing sym from the current context's owner? */ def needsAccessor(sym: Symbol)(using Context): Boolean = needsAccessorIfNotInSubclass(sym) && - !ctx.owner.enclosingClass.derivesFrom(sym.owner) + !needsAccessorIsSubclass(sym) + + def needsAccessorIsSubclass(sym: Symbol)(using Context): Boolean = + ctx.owner.enclosingClass.derivesFrom(sym.owner) } class ProtectedAccessors extends MiniPhase { diff --git a/compiler/src/dotty/tools/dotc/transform/Splicer.scala b/compiler/src/dotty/tools/dotc/transform/Splicer.scala index e42f997e7265..b5386d5bd1df 100644 --- a/compiler/src/dotty/tools/dotc/transform/Splicer.scala +++ b/compiler/src/dotty/tools/dotc/transform/Splicer.scala @@ -47,13 +47,14 @@ object Splicer { def splice(tree: Tree, splicePos: SrcPos, spliceExpansionPos: SrcPos, classLoader: ClassLoader)(using Context): Tree = tree match { case Quote(quotedTree, Nil) => quotedTree case _ => - val macroOwner = newSymbol(ctx.owner, nme.MACROkw, Macro | Synthetic, defn.AnyType, coord = tree.span) + val owner = ctx.owner + val macroOwner = newSymbol(owner, nme.MACROkw, Macro | Synthetic, defn.AnyType, coord = tree.span) try val sliceContext = SpliceScope.contextWithNewSpliceScope(splicePos.sourcePos).withOwner(macroOwner) inContext(sliceContext) { val oldContextClassLoader = Thread.currentThread().getContextClassLoader Thread.currentThread().setContextClassLoader(classLoader) - try { + try ctx.profiler.onMacroSplice(owner){ val interpreter = new SpliceInterpreter(splicePos, classLoader) // Some parts of the macro are evaluated during the unpickling performed in quotedExprToTree diff --git a/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala b/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala index ce2b8fa591d8..5a63235fc3c0 100644 --- a/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala +++ b/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala @@ -195,7 +195,7 @@ class SuperAccessors(thisPhase: DenotTransformer) { * Otherwise, we need to go through an accessor, * which the implementing class will provide an implementation for. */ - if ctx.owner.enclosingClass.derivesFrom(sym.owner) then + if ProtectedAccessors.needsAccessorIsSubclass(sym) then if sym.is(JavaDefined) then report.error(em"${ctx.owner} accesses protected $sym inside a concrete trait method: use super.${sel.name} instead", sel.srcPos) sel diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index 20b0099d82e2..9fb3c00c67c4 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -648,21 +648,38 @@ object SpaceEngine { // we get // <== refineUsingParent(NatT, class Succ, []) = Succ[NatT] // <== isSub(Succ[NatT] <:< Succ[Succ[]]) = false - def getAppliedClass(tp: Type): Type = tp match - case tp @ AppliedType(_: HKTypeLambda, _) => tp - case tp @ AppliedType(tycon: TypeRef, _) if tycon.symbol.isClass => tp + def getAppliedClass(tp: Type): (Type, List[Type]) = tp match + case tp @ AppliedType(_: HKTypeLambda, _) => (tp, Nil) + case tp @ AppliedType(tycon: TypeRef, _) if tycon.symbol.isClass => (tp, tp.args) case tp @ AppliedType(tycon: TypeProxy, _) => getAppliedClass(tycon.superType.applyIfParameterized(tp.args)) - case tp => tp - val tp = getAppliedClass(tpOriginal) - def getChildren(sym: Symbol): List[Symbol] = + case tp => (tp, Nil) + val (tp, typeArgs) = getAppliedClass(tpOriginal) + // This function is needed to get the arguments of the types that will be applied to the class. + // This is necessary because if the arguments of the types contain Nothing, + // then this can affect whether the class will be taken into account during the exhaustiveness check + def getTypeArgs(parent: Symbol, child: Symbol, typeArgs: List[Type]): List[Type] = + val superType = child.typeRef.superType + if typeArgs.exists(_.isBottomType) && superType.isInstanceOf[ClassInfo] then + val parentClass = superType.asInstanceOf[ClassInfo].declaredParents.find(_.classSymbol == parent).get + val paramTypeMap = Map.from(parentClass.argTypes.map(_.typeSymbol).zip(typeArgs)) + val substArgs = child.typeRef.typeParamSymbols.map(param => paramTypeMap.getOrElse(param, WildcardType)) + substArgs + else Nil + def getChildren(sym: Symbol, typeArgs: List[Type]): List[Symbol] = sym.children.flatMap { child => if child eq sym then List(sym) // i3145: sealed trait Baz, val x = new Baz {}, Baz.children returns Baz... else if tp.classSymbol == defn.TupleClass || tp.classSymbol == defn.NonEmptyTupleClass then List(child) // TupleN and TupleXXL classes are used for Tuple, but they aren't Tuple's children - else if (child.is(Private) || child.is(Sealed)) && child.isOneOf(AbstractOrTrait) then getChildren(child) - else List(child) + else if (child.is(Private) || child.is(Sealed)) && child.isOneOf(AbstractOrTrait) then + getChildren(child, getTypeArgs(sym, child, typeArgs)) + else + val childSubstTypes = child.typeRef.applyIfParameterized(getTypeArgs(sym, child, typeArgs)) + // if a class contains a field of type Nothing, + // then it can be ignored in pattern matching, because it is impossible to obtain an instance of it + val existFieldWithBottomType = childSubstTypes.fields.exists(_.info.isBottomType) + if existFieldWithBottomType then Nil else List(child) } - val children = trace(i"getChildren($tp)")(getChildren(tp.classSymbol)) + val children = trace(i"getChildren($tp)")(getChildren(tp.classSymbol, typeArgs)) val parts = children.map { sym => val sym1 = if (sym.is(ModuleClass)) sym.sourceModule else sym diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala b/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala index 936b6958fb33..87ee2be91465 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala @@ -17,7 +17,7 @@ import Types.* import dotty.tools.backend.sjs.JSDefinitions.jsdefn -import org.scalajs.ir.{Trees => js} +import dotty.tools.sjs.ir.{Trees => js} /** Additional extensions for `Symbol`s that are only relevant for Scala.js. */ object JSSymUtils { diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala index f66141bff8ad..5aa35a277cb5 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala @@ -21,8 +21,8 @@ import dotty.tools.backend.sjs.JSDefinitions.jsdefn import JSExportUtils.* import JSSymUtils.* -import org.scalajs.ir.Names.DefaultModuleID -import org.scalajs.ir.Trees.TopLevelExportDef.isValidTopLevelExportName +import dotty.tools.sjs.ir.Names.DefaultModuleID +import dotty.tools.sjs.ir.Trees.TopLevelExportDef.isValidTopLevelExportName object PrepJSExports { import tpd.* diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala index 1b8fdd268ece..c7316482c193 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala @@ -23,7 +23,7 @@ import Types.* import JSSymUtils.* -import org.scalajs.ir.Trees.JSGlobalRef +import dotty.tools.sjs.ir.Trees.JSGlobalRef import dotty.tools.backend.sjs.JSDefinitions.jsdefn diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 3c2f025dc095..0727c83d8469 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -1082,7 +1082,7 @@ trait Implicits: * it should be applied, EmptyTree otherwise. * @param span The position where errors should be reported. */ - def inferImplicit(pt: Type, argument: Tree, span: Span)(using Context): SearchResult = + def inferImplicit(pt: Type, argument: Tree, span: Span)(using Context): SearchResult = ctx.profiler.onImplicitSearch(pt): trace(s"search implicit ${pt.show}, arg = ${argument.show}: ${argument.tpe.show}", implicits, show = true) { record("inferImplicit") assert(ctx.phase.allowsImplicitSearch, @@ -1176,7 +1176,7 @@ trait Implicits: case _ => info.derivesFrom(defn.ConversionClass) def tryConversion(using Context) = { val untpdConv = - if ref.symbol.is(Given) && producesConversion(ref.symbol.info) then + if ref.symbol.isOneOf(GivenOrImplicit) && producesConversion(ref.symbol.info) then untpd.Select( untpd.TypedSplice( adapt(generated, diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 6167db62fbe0..0849e57b8c7d 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -877,16 +877,16 @@ class Namer { typer: Typer => protected def addAnnotations(sym: Symbol): Unit = original match { case original: untpd.MemberDef => lazy val annotCtx = annotContext(original, sym) - original.setMods: + original.setMods: original.mods.withAnnotations : - original.mods.annotations.mapConserve: annotTree => + original.mods.annotations.mapConserve: annotTree => val cls = typedAheadAnnotationClass(annotTree)(using annotCtx) if (cls eq sym) report.error(em"An annotation class cannot be annotated with iself", annotTree.srcPos) annotTree else - val ann = - if cls.is(JavaDefined) then Checking.checkNamedArgumentForJavaAnnotation(annotTree, cls.asClass) + val ann = + if cls.is(JavaDefined) then Checking.checkNamedArgumentForJavaAnnotation(annotTree, cls.asClass) else annotTree val ann1 = Annotation.deferred(cls)(typedAheadExpr(ann)(using annotCtx)) sym.addAnnotation(ann1) diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index ce72aac596c0..3810bc66841e 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -78,6 +78,9 @@ object Typer { /** An attachment for GADT constraints that were inferred for a pattern. */ val InferredGadtConstraints = new Property.StickyKey[core.GadtConstraint] + /** Indicates that a definition was copied over from the parent refinements */ + val RefinementFromParent = new Property.StickyKey[Unit] + /** An attachment on a Select node with an `apply` field indicating that the `apply` * was inserted by the Typer. */ @@ -2820,7 +2823,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case _ => typed(rhs) - def typedValDef(vdef: untpd.ValDef, sym: Symbol)(using Context): Tree = { + def typedValDef(vdef: untpd.ValDef, sym: Symbol)(using Context): Tree = ctx.profiler.onTypedDef(sym) { val ValDef(name, tpt, _) = vdef checkNonRootName(vdef.name, vdef.nameSpan) completeAnnotations(vdef, sym) @@ -2848,7 +2851,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer sym.owner.info.decls.openForMutations.unlink(sym) EmptyTree - def typedDefDef(ddef: untpd.DefDef, sym: Symbol)(using Context): Tree = if !sym.info.exists then retractDefDef(sym) else { + def typedDefDef(ddef: untpd.DefDef, sym: Symbol)(using Context): Tree = if !sym.info.exists then retractDefDef(sym) else ctx.profiler.onTypedDef(sym) { // TODO: - Remove this when `scala.language.experimental.erasedDefinitions` is no longer experimental. // - Modify signature to `erased def erasedValue[T]: T` @@ -2960,7 +2963,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if !sym.is(Module) && !sym.isConstructor && sym.info.finalResultType.isErasedClass then sym.setFlag(Erased) - def typedTypeDef(tdef: untpd.TypeDef, sym: Symbol)(using Context): Tree = { + def typedTypeDef(tdef: untpd.TypeDef, sym: Symbol)(using Context): Tree = ctx.profiler.onTypedDef(sym) { val TypeDef(name, rhs) = tdef completeAnnotations(tdef, sym) val rhs1 = tdef.rhs match @@ -2974,7 +2977,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer assignType(cpy.TypeDef(tdef)(name, rhs1), sym) } - def typedClassDef(cdef: untpd.TypeDef, cls: ClassSymbol)(using Context): Tree = { + def typedClassDef(cdef: untpd.TypeDef, cls: ClassSymbol)(using Context): Tree = ctx.profiler.onTypedDef(cls) { if (!cls.info.isInstanceOf[ClassInfo]) return EmptyTree.assertingErrorsReported val TypeDef(name, impl @ Template(constr, _, self, _)) = cdef: @unchecked @@ -3071,7 +3074,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer ( if sym.isType then TypeDef(sym.asType) else if sym.is(Method) then DefDef(sym.asTerm) else ValDef(sym.asTerm) - ).withSpan(impl.span.startPos) + ).withSpan(impl.span.startPos).withAttachment(RefinementFromParent, ()) body ++ refinements case None => body @@ -3395,6 +3398,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer /** Translate tuples of all arities */ def typedTuple(tree: untpd.Tuple, pt: Type)(using Context): Tree = val tree1 = desugar.tuple(tree, pt) + checkAmbiguousNamedTupleAssignment(tree) if tree1 ne tree then typed(tree1, pt) else val arity = tree.trees.length @@ -3420,6 +3424,20 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val resTpe = TypeOps.nestedPairs(elemTpes) app1.cast(resTpe) + /** Checks if `tree` is a named tuple with one element that could be + * interpreted as an assignment, such as `(x = 1)`. If so, issues a warning. + */ + def checkAmbiguousNamedTupleAssignment(tree: untpd.Tuple)(using Context): Unit = + tree.trees match + case List(NamedArg(name, value)) => + val tmpCtx = ctx.fresh.setNewTyperState() + typedAssign(untpd.Assign(untpd.Ident(name), value), WildcardType)(using tmpCtx) + if !tmpCtx.reporter.hasErrors then + // If there are no errors typing the above, then the named tuple is + // ambiguous and we issue a warning. + report.migrationWarning(AmbiguousNamedTupleAssignment(name, value), tree.srcPos) + case _ => () + /** Retrieve symbol attached to given tree */ protected def retrieveSym(tree: untpd.Tree)(using Context): Symbol = tree.removeAttachment(SymOfTree) match { case Some(sym) => diff --git a/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala b/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala index 0c63f5b4ecb1..264d0f170769 100644 --- a/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala +++ b/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala @@ -43,7 +43,7 @@ class TyperPhase(addRootImports: Boolean = true) extends Phase { def typeCheck(using Context)(using subphase: SubPhase): Boolean = monitor(subphase.name) { val unit = ctx.compilationUnit try - if !unit.suspended then + if !unit.suspended then ctx.profiler.onUnit(ctx.phase, unit): unit.tpdTree = ctx.typer.typedExpr(unit.untpdTree) typr.println("typed: " + unit.source) record("retained untyped trees", unit.untpdTree.treeSize) diff --git a/compiler/src/dotty/tools/dotc/util/SourceFile.scala b/compiler/src/dotty/tools/dotc/util/SourceFile.scala index 9da4f58f2deb..3ea43d16a7c8 100644 --- a/compiler/src/dotty/tools/dotc/util/SourceFile.scala +++ b/compiler/src/dotty/tools/dotc/util/SourceFile.scala @@ -119,7 +119,8 @@ class SourceFile(val file: AbstractFile, computeContent: => Array[Char]) extends * For regular source files, simply return the argument. */ def positionInUltimateSource(position: SourcePosition): SourcePosition = - SourcePosition(underlying, position.span shift start) + if isSelfContained then position // return the argument + else SourcePosition(underlying, position.span shift start) private def calculateLineIndicesFromContents() = { val cs = content() diff --git a/compiler/src/dotty/tools/dotc/util/SourcePosition.scala b/compiler/src/dotty/tools/dotc/util/SourcePosition.scala index 904704b2349c..a7358755043c 100644 --- a/compiler/src/dotty/tools/dotc/util/SourcePosition.scala +++ b/compiler/src/dotty/tools/dotc/util/SourcePosition.scala @@ -79,7 +79,6 @@ extends SrcPos, interfaces.SourcePosition, Showable { rec(this) } - override def toString: String = s"${if (source.exists) source.file.toString else "(no source)"}:$span" diff --git a/compiler/src/dotty/tools/repl/ParseResult.scala b/compiler/src/dotty/tools/repl/ParseResult.scala index b9139343bca1..24a624173050 100644 --- a/compiler/src/dotty/tools/repl/ParseResult.scala +++ b/compiler/src/dotty/tools/repl/ParseResult.scala @@ -122,7 +122,7 @@ object ParseResult { private def parseStats(using Context): List[untpd.Tree] = { val parser = new Parser(ctx.source) - val stats = parser.blockStatSeq() + val stats = parser.blockStatSeq(outermost = true) parser.accept(Tokens.EOF) stats } diff --git a/compiler/test/dotc/run-macros-scala2-library-tasty.blacklist b/compiler/test/dotc/run-macros-scala2-library-tasty.blacklist index 63a6e2cee345..6fdfccf7646c 100644 --- a/compiler/test/dotc/run-macros-scala2-library-tasty.blacklist +++ b/compiler/test/dotc/run-macros-scala2-library-tasty.blacklist @@ -2,3 +2,4 @@ tasty-extractors-1 tasty-extractors-2 tasty-extractors-types +type-print diff --git a/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala b/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala index a40c1ec1e5b2..5cd4f837b823 100644 --- a/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/BootstrappedOnlyCompilationTests.scala @@ -193,7 +193,7 @@ class BootstrappedOnlyCompilationTests { // 1. hack with absolute path for -Xplugin // 2. copy `pluginFile` to destination - def compileFilesInDir(dir: String): CompilationTest = { + def compileFilesInDir(dir: String, run: Boolean = false): CompilationTest = { val outDir = defaultOutputDir + "testPlugins/" val sourceDir = new java.io.File(dir) @@ -201,7 +201,10 @@ class BootstrappedOnlyCompilationTests { val targets = dirs.map { dir => val compileDir = createOutputDirsForDir(dir, sourceDir, outDir) Files.copy(dir.toPath.resolve(pluginFile), compileDir.toPath.resolve(pluginFile), StandardCopyOption.REPLACE_EXISTING) - val flags = TestFlags(withCompilerClasspath, noCheckOptions).and("-Xplugin:" + compileDir.getAbsolutePath) + val flags = { + val base = TestFlags(withCompilerClasspath, noCheckOptions).and("-Xplugin:" + compileDir.getAbsolutePath) + if run then base.withRunClasspath(withCompilerClasspath) else base + } SeparateCompilationSource("testPlugins", dir, flags, compileDir) } @@ -210,6 +213,7 @@ class BootstrappedOnlyCompilationTests { compileFilesInDir("tests/plugins/neg").checkExpectedErrors() compileDir("tests/plugins/custom/analyzer", withCompilerOptions.and("-Yretain-trees")).checkCompile() + compileFilesInDir("tests/plugins/run", run = true).checkRuns() } } diff --git a/compiler/test/dotty/tools/dotc/profile/ChromeTraceTest.scala b/compiler/test/dotty/tools/dotc/profile/ChromeTraceTest.scala new file mode 100644 index 000000000000..07dc53da1f83 --- /dev/null +++ b/compiler/test/dotty/tools/dotc/profile/ChromeTraceTest.scala @@ -0,0 +1,93 @@ +package dotty.tools.dotc.profile + +import java.io.* + +import org.junit.Assert.* +import org.junit.* +import java.nio.file.Files +import java.nio.charset.StandardCharsets +import java.util.concurrent.locks.LockSupport +import scala.concurrent.duration.* + +class ChromeTraceTest: + private def testTraceOutputs(generator: ChromeTrace => Unit)(checkContent: PartialFunction[List[String], Unit]): Unit = { + val outfile = Files.createTempFile("trace-", ".json").nn + val tracer = new ChromeTrace(outfile) + try generator(tracer) + finally tracer.close() + val contentLines = scala.io.Source.fromFile(outfile.toFile().nn).getLines().toList + checkContent.applyOrElse( + contentLines, + content => fail(s"Invalid output lines: ${content.mkString(System.lineSeparator().nn)}") + ) + } + + @Test def traceCounterEvent(): Unit = testTraceOutputs{ tracer => + tracer.traceCounterEvent("foo", "counter1", 42, processWide = true) + tracer.traceCounterEvent("bar", "counter2", 21, processWide = false) + }{ + case """{"traceEvents":[""" :: + s"""{"cat":"scalac","name":"foo","ph":"C","tid":"${tid1}","pid":"${pid1}","ts":${ts1},"args":{"counter1":42}}""" :: + s""",{"cat":"scalac","name":"bar","ph":"C","tid":"${tid2}","pid":"${pid2}","ts":${ts2},"args":{"counter2":21}}""" :: + "]}" :: Nil => + assertEquals(tid1, tid2) + assertTrue(tid1.toIntOption.isDefined) + assertNotEquals(pid1, pid2) + assertTrue(pid1.toIntOption.isDefined) + assertEquals(s"$pid1-$tid1", pid2) + assertTrue(ts1.toLong < ts2.toLong) + } + + @Test def traceDurationEvent(): Unit = testTraceOutputs{ tracer => + tracer.traceDurationEvent(name = "name1", startNanos = 1000L, durationNanos = 2500L, tid = "this-thread") + tracer.traceDurationEvent(name = "name2", startNanos = 1000L, durationNanos = 5000L, tid = "this-thread", pidSuffix = "pidSuffix") + }{ + case """{"traceEvents":[""" :: + s"""{"cat":"scalac","name":"name1","ph":"X","tid":"this-thread","pid":"${pid1}","ts":1,"dur":2}""" :: + s""",{"cat":"scalac","name":"name2","ph":"X","tid":"this-thread","pid":"${pid2}","ts":1,"dur":5}""" :: + "]}" :: Nil => + assertTrue(pid1.toIntOption.isDefined) + assertEquals(s"$pid1-pidSuffix", pid2) + } + + @Test def traceDurationEvents(): Unit = { + val testStart = System.nanoTime() + testTraceOutputs{ tracer => + tracer.traceDurationEventStart(cat = "test1", name = "event1") + LockSupport.parkNanos(2.millis.toNanos) + tracer.traceDurationEventStart(cat = "test2", name = "event2", colour = "RED", pidSuffix = "pid-suffix") + LockSupport.parkNanos(4.millis.toNanos) + tracer.traceDurationEventEnd(cat = "test2", name = "event2") + LockSupport.parkNanos(8.millis.toNanos) + tracer.traceDurationEventEnd(cat = "test1", name = "event1", colour = "RED", pidSuffix = "pid-suffix") + }{ + case """{"traceEvents":[""" :: + s"""{"cat":"test1","name":"event1","ph":"B","pid":"${pid1}","tid":"${tid1}","ts":${ts1}}""" :: + s""",{"cat":"test2","name":"event2","ph":"B","pid":"${pid2}","tid":"${tid2}","ts":${ts2},"cname":"RED"}""" :: + s""",{"cat":"test2","name":"event2","ph":"E","pid":"${pid3}","tid":"${tid3}","ts":${ts3}}""" :: + s""",{"cat":"test1","name":"event1","ph":"E","pid":"${pid4}","tid":"${tid4}","ts":${ts4},"cname":"RED"}""" :: + "]}" :: Nil => + val traceEnd = System.nanoTime() + assertTrue(tid1.toIntOption.isDefined) + assertEquals(pid1, pid3) + assertTrue(pid1.endsWith(s"-$tid1")) + assertEquals(pid2, pid4) + assertTrue(pid2.endsWith("-pid-suffix")) + List(tid1, tid2, tid3).foreach: tid => + assertEquals(tid4, tid) + List(pid1, pid2, pid3, pid4).foreach: pid => + assertTrue(pid.takeWhile(_ != '-').toIntOption.isDefined) + + List(ts1, ts2, ts3, ts4).map(_.toLong) match { + case all @ List(ts1, ts2, ts3, ts4) => + all.foreach: ts => + // Timestamps are presented using Epoch microsecondos + assertTrue(ts >= testStart / 1000) + assertTrue(ts <= traceEnd / 1000) + assertTrue(ts2 >= ts1 + 2.millis.toMicros) + assertTrue(ts3 >= ts2 + 4.millis.toMicros) + assertTrue(ts4 >= ts3 + 8.millis.toMicros) + case _ => fail("unreachable") + } + } +} diff --git a/compiler/test/dotty/tools/dotc/profile/FileUtilsTest.scala b/compiler/test/dotty/tools/dotc/profile/FileUtilsTest.scala new file mode 100644 index 000000000000..3253cff52057 --- /dev/null +++ b/compiler/test/dotty/tools/dotc/profile/FileUtilsTest.scala @@ -0,0 +1,91 @@ +package dotty.tools.dotc.profile + +import java.io.* + +import org.junit.Assert.* +import org.junit.* + +class FileUtilsTest { + + @Test def writeIsSame(): Unit = { + val fileTest = File.createTempFile("FileUtilsTest", "t1").nn + val fileExpected = File.createTempFile("FileUtilsTest", "t2").nn + + val sTest = FileUtils.newAsyncBufferedWriter(new FileWriter(fileTest), threadsafe = false) + val sExpected = new BufferedWriter(new FileWriter(fileExpected)) + + def writeBoth(s:String, asChars: Boolean) = { + if (asChars) { + sTest.write(s.toCharArray) + sExpected.write(s.toCharArray) + } else { + sTest.write(s) + sExpected.write(s) + } + } + + for (i <- 1 to 2000) { + writeBoth(s"line $i text;", asChars = true) + writeBoth(s"line $i chars", asChars = false) + sTest.newLine() + sExpected.newLine() + } + sTest.close() + sExpected.close() + + assertEquals(fileExpected.length(),fileTest.length()) + + val expIn = new BufferedReader(new FileReader(fileExpected)) + val testIn = new BufferedReader(new FileReader(fileTest)) + + var exp = expIn.readLine() + while (exp ne null) { + val actual = testIn.readLine() + assertEquals(exp, actual) + exp = expIn.readLine() + } + expIn.close() + testIn.close() + fileTest.delete() + fileExpected.delete() + } + + @Ignore + @Test def showPerformance(): Unit = { + //warmup + for (i <- 1 to 1000) { + writeIsSame() + } + + val fileTest = File.createTempFile("FileUtilsTest", "t1").nn + val fileExpected = File.createTempFile("FileUtilsTest", "t2").nn + + for (i <- 1 to 10) { + val sTest = FileUtils.newAsyncBufferedWriter(fileTest.toPath.nn) + val sExpected = new BufferedWriter(new FileWriter(fileExpected)) + + val t1 = System.nanoTime() + List.tabulate(10000) {i => + sTest.write(s"line $i text;") + sTest.newLine() + } + val t2 = System.nanoTime() + sTest.close() + val t3 = System.nanoTime() + List.tabulate(10000) {i => + sExpected.write(s"line $i text;") + sExpected.newLine() + } + val t4 = System.nanoTime() + sExpected.close() + + println(s"async took ${t2 - t1} ns") + println(s"buffered took ${t4 - t3} ns") + + fileTest.delete() + fileExpected.delete() + } + } + +} + diff --git a/compiler/test/dotty/tools/repl/ReplCompilerTests.scala b/compiler/test/dotty/tools/repl/ReplCompilerTests.scala index 374f53dbd011..221eb8acb9de 100644 --- a/compiler/test/dotty/tools/repl/ReplCompilerTests.scala +++ b/compiler/test/dotty/tools/repl/ReplCompilerTests.scala @@ -462,10 +462,36 @@ class ReplCompilerTests extends ReplTest: .andThen: run("0") // check for crash val last = lines() - println(last) assertTrue(last(0), last(0) == ("Options incompatible with repl will be ignored: -Ybest-effort, -Ywith-best-effort-tasty")) assertTrue(last(1), last(1) == ("val res0: Int = 0")) + @Test def `i9879`: Unit = initially: + run { + """|opaque type A = Int; def getA: A = 0 + |object Wrapper { opaque type A = Int; def getA: A = 1 } + |val x = getA + |val y = Wrapper.getA""".stripMargin + } + val expected = List( + "def getA: A", + "// defined object Wrapper", + "val x: A = 0", + "val y: Wrapper.A = 1" + ) + assertEquals(expected, lines()) + + @Test def `i9879b`: Unit = initially: + run { + """|def test = + | type A = Int + | opaque type B = String + | object Wrapper { opaque type C = Int } + | ()""".stripMargin + } + val all = lines() + assertEquals(6, all.length) + assertTrue(all.head.startsWith("-- [E103] Syntax Error")) + assertTrue(all.exists(_.trim().startsWith("| Illegal start of statement: this modifier is not allowed here"))) object ReplCompilerTests: diff --git a/docs/_docs/reference/contextual/givens.md b/docs/_docs/reference/contextual/givens.md index 088ded2e8db4..2b360dfc7af0 100644 --- a/docs/_docs/reference/contextual/givens.md +++ b/docs/_docs/reference/contextual/givens.md @@ -87,11 +87,9 @@ time it is accessed. If the given is a mere alias to some immutable value, the g ## Syntax -Here is the full syntax for given instances. Some of these forms of givens are explained in a separate page: [Other Forms of Givens](../more-givens.md). +Here is the full syntax for given instances. Some of these forms of givens are explained in a separate page: [Other Forms of Givens](./more-givens.md). ```ebnf -Here is the complete context-free syntax for all proposed features. -``` TmplDef ::= ... | 'given' GivenDef GivenDef ::= [id ':'] GivenSig GivenSig ::= GivenImpl @@ -115,4 +113,4 @@ A given instance starts with the reserved keyword `given`, which is followed by - A _structural given_ implements one or more class constructors with a list of member definitions in a template body. -**Note** Parts of the given syntax have changed in Scala 3.6. The original syntax from Scala 3.0 on is described in a separate page [Previous Given Syntax](../previous-givens.md). The original syntax is still supported for now but will be deprecated and phased out over time. +**Note** Parts of the given syntax have changed in Scala 3.6. The original syntax from Scala 3.0 on is described in a separate page [Previous Given Syntax](./previous-givens.md). The original syntax is still supported for now but will be deprecated and phased out over time. diff --git a/docs/_docs/reference/experimental/better-fors.md b/docs/_docs/reference/experimental/better-fors.md new file mode 100644 index 000000000000..7add425caf51 --- /dev/null +++ b/docs/_docs/reference/experimental/better-fors.md @@ -0,0 +1,79 @@ +--- +layout: doc-page +title: "Better fors" +nightlyOf: https://docs.scala-lang.org/scala3/reference/experimental/better-fors.html +--- + +The `betterFors` language extension improves the usability of `for`-comprehensions. + +The extension is enabled by the language import `import scala.language.experimental.betterFors` or by setting the command line option `-language:experimental.betterFors`. + +The biggest user facing change is the new ability to start `for`-comprehensions with with aliases. This means that the following previously invalid code is now valid: + +```scala +for + as = List(1, 2, 3) + bs = List(4, 5, 6) + a <- as + b <- bs +yield a + b +``` + +The desugaring of this code is the same as if the aliases were introduced with `val`: + +```scala +val as = List(1, 2, 3) +val bs = List(4, 5, 6) +for + a <- as + b <- bs +yield a + b +``` + +Additionally this extension changes the way `for`-comprehensions are desugared. The desugaring is now done in a more intuitive way and the desugared code can be more efficient, because it avoids some unnecessary method calls. There are two main changes in the desugaring: + +1. **Simpler Desugaring for Pure Aliases**: + When an alias is not followed by a guard, the desugaring is simplified. The last generator and the aliases don't have to be wrapped in a tuple, and instead the aliases are simply introduced as local variables in a block with the next generator. + **Current Desugaring**: + ```scala + for { + a <- doSth(arg) + b = a + } yield a + b + ``` + Desugars to: + ```scala + doSth(arg).map { a => + val b = a + (a, b) + }.map { case (a, b) => + a + b + } + ``` + **New Desugaring**: + ```scala + doSth(arg).map { a => + val b = a + a + b + } + ``` + This change makes the desugaring more intuitive and avoids unnecessary `map` calls, when an alias is not followed by a guard. + +2. **Avoiding Redundant `map` Calls**: + When the result of the `for`-comprehension is the same expression as the last generator pattern, the desugaring avoids an unnecessary `map` call. but th eequality of the last pattern and the result has to be able to be checked syntactically, so it is either a variable or a tuple of variables. + **Current Desugaring**: + ```scala + for { + a <- List(1, 2, 3) + } yield a + ``` + Desugars to: + ```scala + List(1, 2, 3).map(a => a) + ``` + **New Desugaring**: + ```scala + List(1, 2, 3) + ``` + +For more details on the desugaring scheme see the comment in [`Desugar.scala#makeFor`](https://github.com/scala/scala3/blob/main/compiler/src/dotty/tools/dotc/ast/Desugar.scala#L1928). \ No newline at end of file diff --git a/docs/sidebar.yml b/docs/sidebar.yml index f12e732f1c6f..74aee3dfc668 100644 --- a/docs/sidebar.yml +++ b/docs/sidebar.yml @@ -162,6 +162,7 @@ subsection: - page: reference/experimental/modularity.md - page: reference/experimental/typeclasses.md - page: reference/experimental/runtimeChecked.md + - page: reference/experimental/better-fors.md - page: reference/syntax.md - title: Language Versions index: reference/language-versions/language-versions.md diff --git a/library/src-bootstrapped/scala/NamedTuple.scala b/library/src/scala/NamedTuple.scala similarity index 100% rename from library/src-bootstrapped/scala/NamedTuple.scala rename to library/src/scala/NamedTuple.scala diff --git a/library/src/scala/quoted/Expr.scala b/library/src/scala/quoted/Expr.scala index f1045e5bdaca..d1385a0193d6 100644 --- a/library/src/scala/quoted/Expr.scala +++ b/library/src/scala/quoted/Expr.scala @@ -256,7 +256,7 @@ object Expr { private def tupleTypeFromSeq(seq: Seq[Expr[Any]])(using Quotes): quotes.reflect.TypeRepr = import quotes.reflect.* val consRef = Symbol.classSymbol("scala.*:").typeRef - seq.foldLeft(TypeRepr.of[EmptyTuple]) { (ts, expr) => + seq.foldRight(TypeRepr.of[EmptyTuple]) { (expr, ts) => AppliedType(consRef, expr.asTerm.tpe :: ts :: Nil) } diff --git a/presentation-compiler/src/main/dotty/tools/pc/CompilerSearchVisitor.scala b/presentation-compiler/src/main/dotty/tools/pc/CompilerSearchVisitor.scala index 035c1062a3e3..9fb84ee1f513 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/CompilerSearchVisitor.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/CompilerSearchVisitor.scala @@ -28,7 +28,7 @@ class CompilerSearchVisitor( owner.isStatic && owner.isPublic private def isAccessible(sym: Symbol): Boolean = try - sym != NoSymbol && sym.isPublic && sym.isStatic || isAccessibleImplicitClass(sym) + (sym != NoSymbol && sym.isAccessibleFrom(ctx.owner.info) && sym.isStatic) || isAccessibleImplicitClass(sym) catch case err: AssertionError => logger.log(Level.WARNING, err.getMessage()) diff --git a/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala index fc97dd1f1176..3b2284bef1d0 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/PcDefinitionProvider.scala @@ -131,13 +131,13 @@ class PcDefinitionProvider( otherDefs.headOption.orElse(exportedDefs.headOption) match case Some(srcTree) => val pos = srcTree.namePos - pos.toLocation match - case None => DefinitionResultImpl.empty - case Some(loc) => + if pos.exists then + val loc = new Location(params.uri().toString(), pos.toLsp) DefinitionResultImpl( SemanticdbSymbols.symbolName(sym), - List(loc).asJava + List(loc).asJava, ) + else DefinitionResultImpl.empty case None => DefinitionResultImpl.empty else diff --git a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala index 5578fab412d1..adaeadb12978 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/completions/CompletionProvider.scala @@ -21,6 +21,7 @@ import dotty.tools.dotc.interactive.Interactive import dotty.tools.dotc.interactive.Completion import dotty.tools.dotc.interactive.InteractiveDriver import dotty.tools.dotc.parsing.Tokens +import dotty.tools.dotc.profile.Profiler import dotty.tools.dotc.util.SourceFile import dotty.tools.pc.AutoImports.AutoImportEdits import dotty.tools.pc.AutoImports.AutoImportsGenerator @@ -75,7 +76,10 @@ class CompletionProvider( val pos = driver.sourcePosition(params) val (items, isIncomplete) = driver.compilationUnits.get(uri) match case Some(unit) => - val newctx = ctx.fresh.setCompilationUnit(unit).withPhase(Phases.typerPhase(using ctx)) + val newctx = ctx.fresh + .setCompilationUnit(unit) + .setProfiler(Profiler()(using ctx)) + .withPhase(Phases.typerPhase(using ctx)) val tpdPath0 = Interactive.pathTo(unit.tpdTree, pos.span)(using newctx) val adjustedPath = Interactive.resolveTypedOrUntypedPath(tpdPath0, pos)(using newctx) diff --git a/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala b/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala index a738440c585d..d9c11a5ada8c 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/printer/ShortenedTypePrinter.scala @@ -527,7 +527,8 @@ class ShortenedTypePrinter( else if includeDefaultParam == ShortenedTypePrinter.IncludeDefaultParam.ResolveLater && isDefaultParam then " = ..." else "" // includeDefaultParam == Never or !isDefaultParam - s"$keywordName: ${paramTypeString}$default" + val inline = if(param.is(Flags.Inline)) "inline " else "" + s"$inline$keywordName: ${paramTypeString}$default" end if end paramLabel diff --git a/presentation-compiler/src/main/dotty/tools/pc/utils/InteractiveEnrichments.scala b/presentation-compiler/src/main/dotty/tools/pc/utils/InteractiveEnrichments.scala index 8ff11694ff1c..66080a363d51 100644 --- a/presentation-compiler/src/main/dotty/tools/pc/utils/InteractiveEnrichments.scala +++ b/presentation-compiler/src/main/dotty/tools/pc/utils/InteractiveEnrichments.scala @@ -99,12 +99,6 @@ object InteractiveEnrichments extends CommonMtagsEnrichments: def focusAt(point: Int): SourcePosition = pos.withSpan(pos.span.withPoint(point).focus) - def toLocation: Option[l.Location] = - for - uri <- InteractiveDriver.toUriOption(pos.source) - range <- if pos.exists then Some(pos.toLsp) else None - yield new l.Location(uri.toString(), range) - def encloses(other: SourcePosition): Boolean = pos.start <= other.start && pos.end >= other.end diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionArgSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionArgSuite.scala index 17f21b16d6e8..dc81d2596c6f 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionArgSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionArgSuite.scala @@ -796,7 +796,7 @@ class CompletionArgSuite extends BaseCompletionSuite: | def k: Int = m(1, a@@) |""".stripMargin, """|aaa = : Int - |assert(assertion: Boolean): Unit + |assert(inline assertion: Boolean): Unit |""".stripMargin, topLines = Some(2), ) @@ -810,7 +810,7 @@ class CompletionArgSuite extends BaseCompletionSuite: | def k: Int = m(inn = 1, a@@) |""".stripMargin, """|aaa = : Int - |assert(assertion: Boolean): Unit + |assert(inline assertion: Boolean): Unit |""".stripMargin, topLines = Some(2), ) @@ -912,7 +912,7 @@ class CompletionArgSuite extends BaseCompletionSuite: |""".stripMargin, """|aaa = : Int |abb = : Option[Int] - |assert(assertion: Boolean): Unit + |assert(inline assertion: Boolean): Unit |""".stripMargin, topLines = Some(3), ) @@ -927,7 +927,7 @@ class CompletionArgSuite extends BaseCompletionSuite: |""".stripMargin, """|aaa = : Int |abb = : Option[Int] - |assert(assertion: Boolean): Unit + |assert(inline assertion: Boolean): Unit |""".stripMargin, topLines = Some(3), ) @@ -945,7 +945,7 @@ class CompletionArgSuite extends BaseCompletionSuite: |""".stripMargin, """|aaa = : Int |abb = : Option[Int] - |assert(assertion: Boolean): Unit + |assert(inline assertion: Boolean): Unit |""".stripMargin, topLines = Some(3), ) @@ -963,7 +963,7 @@ class CompletionArgSuite extends BaseCompletionSuite: |""".stripMargin, """|abb = : Option[Int] |acc = : List[Int] - |assert(assertion: Boolean): Unit + |assert(inline assertion: Boolean): Unit |""".stripMargin, topLines = Some(3), ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionCancelSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionCancelSuite.scala index 4746eb93f25d..c1d0e017def7 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionCancelSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionCancelSuite.scala @@ -90,8 +90,8 @@ class CompletionCancelSuite extends BaseCompletionSuite: | val x = asser@@ |} """.stripMargin, - """|assert(assertion: Boolean): Unit - |assert(assertion: Boolean, message: => Any): Unit + """|assert(inline assertion: Boolean): Unit + |assert(inline assertion: Boolean, inline message: => Any): Unit |""".stripMargin ) diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetSuite.scala index 381375c65131..a002e722f1f0 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSnippetSuite.scala @@ -385,6 +385,29 @@ class CompletionSnippetSuite extends BaseCompletionSuite: ) @Test def `no-apply` = + checkSnippet( + s"""|package example + | + |object Widget{} + |object Main { + | Wi@@ + |} + |""".stripMargin, + """|Widget - example + |Window - java.awt + |WindowPeer - java.awt.peer + |WithFilter - [A](p: A => Boolean, xs: Array[A]): WithFilter[A] + |WithFilter - [A, CC[_$$2]](self: IterableOps[A, CC, ?], p: A => Boolean): WithFilter[A, CC] + |WithFilter - [K, V, IterableCC[_$$3], CC[_$$4,_$$5] <: IterableOps[?, AnyConstr, ?]](self: MapOps[K, V, CC, ?] & IterableOps[(K, V), IterableCC, ?], p: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, CC] + |WithFilter - [K, V, IterableCC[_$$1], MapCC[X,Y] <: scala.collection.Map[X, Y], CC[X,Y] <: scala.collection.Map[X, Y] & SortedMapOps[X, Y, CC, ?]](self: SortedMapOps[K, V, CC, ?] & MapOps[K, V, MapCC, ?] & IterableOps[(K, V), IterableCC, ?], p: ((K, V)) => Boolean): WithFilter[K, V, IterableCC, MapCC, CC] + |WithFilter - [A, IterableCC[_$$1], CC[X] <: SortedSet[X]](self: SortedSetOps[A, CC, ?] & IterableOps[A, IterableCC, ?], p: A => Boolean): WithFilter[A, IterableCC, CC] + |WithFilter - (p: Char => Boolean, s: String): WithFilter + |WithFilter - [A](l: Stream[A] @uncheckedVariance, p: A => Boolean): WithFilter[A] + |""".stripMargin, + includeDetail = true, + ) + + @Test def `no-apply2` = checkSnippet( s"""|package example | diff --git a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala index 57975d2c8e98..1d525c17bdf1 100644 --- a/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala +++ b/presentation-compiler/test/dotty/tools/pc/tests/completion/CompletionSuite.scala @@ -2107,6 +2107,19 @@ class CompletionSuite extends BaseCompletionSuite: |""".stripMargin ) + @Test def `shadowing` = + check( + """|package pkg + |object Main { + | val x = ListBuff@@ + |} + |""".stripMargin, + """|ListBuffer[A](elems: A*): ListBuffer[A] - scala.collection.mutable + |new ListBuffer[A]: ListBuffer[A] - scala.collection.mutable + |ListBuffer - scala.collection.mutable + |""".stripMargin + ) + @Test def `conflict-edit-2` = checkEdit( """|package a diff --git a/project/Build.scala b/project/Build.scala index 84ce00d11577..464ba4a86411 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -91,11 +91,18 @@ object DottyJSPlugin extends AutoPlugin { object Build { import ScaladocConfigs._ - val referenceVersion = "3.5.2-RC1" + /** Version of the Scala compiler used to build the artifacts. + * Reference version should track the latest version pushed to Maven: + * - In main branch it should be the last RC version (using experimental TASTy required for non-bootstrapped tests) + * - In release branch it should be the last stable release + * 3.6.0-RC1 was released as 3.6.0 - it's having and experimental TASTy version + */ + val referenceVersion = "3.6.0" - val baseVersion = "3.6.1" + val baseVersion = "3.6.2" // Will be required by some automation later - val prereleaseVersion = s"$baseVersion-RC1" + // TODO: Introduce automation and handling for RC versions before 3.6.2-RC1 + // val prereleaseVersion = s"$baseVersion-RC1" // LTS or Next val versionLine = "Next" @@ -113,8 +120,9 @@ object Build { * For a baseVersion `3.M.P` the mimaPreviousDottyVersion should be set to: * - `3.M.0` if `P > 0` * - `3.(M-1).0` if `P = 0` + * 3.6.1 is an exception from this rule - 3.6.0 was a broken release */ - val mimaPreviousDottyVersion = "3.5.0" + val mimaPreviousDottyVersion = "3.6.1" /** LTS version against which we check binary compatibility. * @@ -149,8 +157,8 @@ object Build { * scala-library. */ def stdlibVersion(implicit mode: Mode): String = mode match { - case NonBootstrapped => "2.13.14" - case Bootstrapped => "2.13.14" + case NonBootstrapped => "2.13.15" + case Bootstrapped => "2.13.15" } /** Version of the scala-library for which we will generate TASTy. @@ -160,7 +168,7 @@ object Build { * We can use nightly versions to tests the future compatibility in development. * Nightly versions: https://scala-ci.typesafe.com/ui/native/scala-integration/org/scala-lang */ - val stdlibBootstrappedVersion = "2.13.14" + val stdlibBootstrappedVersion = "2.13.15" val dottyOrganization = "org.scala-lang" val dottyGithubUrl = "https://github.com/scala/scala3" @@ -616,18 +624,36 @@ object Build { def findArtifactPath(classpath: Def.Classpath, name: String): String = findArtifact(classpath, name).getAbsolutePath + /** Replace package names in package definitions, for shading. + * It assumes the full package def is written on a single line. + * It does not adapt the imports accordingly. + */ + def replacePackage(lines: List[String])(replace: PartialFunction[String, String]): List[String] = { + def recur(lines: List[String]): List[String] = + lines match { + case head :: tail => + if (head.startsWith("package ")) { + val packageName = head.stripPrefix("package ").trim + val newPackageName = replace.applyOrElse(packageName, (_: String) => packageName) + s"package $newPackageName" :: tail + } else head :: recur(tail) + case _ => lines + } + recur(lines) + } + /** Insert UnsafeNulls Import after package */ - def insertUnsafeNullsImport(lines: Seq[String]): Seq[String] = { - def recur(ls: Seq[String], foundPackage: Boolean): Seq[String] = ls match { - case Seq(l, rest @ _*) => + def insertUnsafeNullsImport(lines: List[String]): List[String] = { + def recur(ls: List[String], foundPackage: Boolean): List[String] = ls match { + case l :: rest => val lt = l.trim() if (foundPackage) { if (!(lt.isEmpty || lt.startsWith("package "))) - "import scala.language.unsafeNulls" +: ls - else l +: recur(rest, foundPackage) + "import scala.language.unsafeNulls" :: ls + else l :: recur(rest, foundPackage) } else { if (lt.startsWith("package ")) l +: recur(rest, true) - else l +: recur(rest, foundPackage) + else l :: recur(rest, foundPackage) } case _ => ls } @@ -694,9 +720,9 @@ object Build { libraryDependencies ++= Seq( "org.scala-lang.modules" % "scala-asm" % "9.7.0-scala-2", // used by the backend Dependencies.compilerInterface, - "org.jline" % "jline-reader" % "3.25.1", // used by the REPL - "org.jline" % "jline-terminal" % "3.25.1", - "org.jline" % "jline-terminal-jna" % "3.25.1", // needed for Windows + "org.jline" % "jline-reader" % "3.27.0", // used by the REPL + "org.jline" % "jline-terminal" % "3.27.0", + "org.jline" % "jline-terminal-jna" % "3.27.0", // needed for Windows ("io.get-coursier" %% "coursier" % "2.0.16" % Test).cross(CrossVersion.for3Use2_13), ), @@ -928,7 +954,10 @@ object Build { val sjsSources = (trgDir ** "*.scala").get.toSet sjsSources.foreach(f => { val lines = IO.readLines(f) - IO.writeLines(f, insertUnsafeNullsImport(lines)) + val linesWithPackage = replacePackage(lines) { + case "org.scalajs.ir" => "dotty.tools.sjs.ir" + } + IO.writeLines(f, insertUnsafeNullsImport(linesWithPackage)) }) sjsSources } (Set(scalaJSIRSourcesJar)).toSeq @@ -1405,7 +1434,7 @@ object Build { BuildInfoPlugin.buildInfoDefaultSettings lazy val presentationCompilerSettings = { - val mtagsVersion = "1.3.4" + val mtagsVersion = "1.3.5" Seq( libraryDependencies ++= Seq( "org.lz4" % "lz4-java" % "1.8.0", @@ -1415,7 +1444,7 @@ object Build { .exclude("org.eclipse.lsp4j","org.eclipse.lsp4j.jsonrpc"), "org.eclipse.lsp4j" % "org.eclipse.lsp4j" % "0.20.1", ), - libraryDependencies += ("org.scalameta" % "mtags-shared_2.13.14" % mtagsVersion % SourceDeps), + libraryDependencies += ("org.scalameta" % "mtags-shared_2.13.15" % mtagsVersion % SourceDeps), ivyConfigurations += SourceDeps.hide, transitiveClassifiers := Seq("sources"), scalacOptions ++= Seq("-source", "3.3"), // To avoid fatal migration warnings diff --git a/scaladoc/resources/dotty_res/scripts/ux.js b/scaladoc/resources/dotty_res/scripts/ux.js index 7b875fbcef8e..97f9bf14939d 100644 --- a/scaladoc/resources/dotty_res/scripts/ux.js +++ b/scaladoc/resources/dotty_res/scripts/ux.js @@ -10,7 +10,7 @@ const attrsToCopy = [ /** * @typedef {Object} SavedPageState - * @property {Strign} mainDiv + * @property {String} mainDiv * @property {String} leftColumn * @property {String} title * @property {Record} attrs @@ -322,7 +322,7 @@ function attachAllListeners() { if (location.hash) { var target = location.hash.substring(1); - // setting the 'expand' class on the top-level container causes undesireable styles + // setting the 'expand' class on the top-level container causes undesirable styles // to apply to the top-level docs, so we avoid this logic for that element. if (target != "container") { var selected = document.getElementById(location.hash.substring(1)); @@ -568,7 +568,7 @@ function showGraph() { .attr("offset", "30%"); radialGradient .append("stop") - .attr("stop-color", "var(--background-default)") + .attr("stop-color", "var(--background-main)") .attr("offset", "100%"); var inner = svg.append("g"); diff --git a/tests/init-global/pos/i18629.scala b/tests/init-global/pos/i18629.scala index f97c21ee918d..03f1f5d5cda4 100644 --- a/tests/init-global/pos/i18629.scala +++ b/tests/init-global/pos/i18629.scala @@ -1,6 +1,6 @@ object Foo { val bar = List() match { case List() => ??? - case _ => ??? + case null => ??? } } diff --git a/tests/patmat/i13931.scala b/tests/patmat/i13931.scala index 0d8d9eb9dcd3..562f059771c1 100644 --- a/tests/patmat/i13931.scala +++ b/tests/patmat/i13931.scala @@ -3,5 +3,5 @@ class Test: case Seq() => println("empty") case _ => println("non-empty") - def test2 = IndexedSeq() match { case IndexedSeq() => case _ => } + def test2 = IndexedSeq() match { case IndexedSeq() => case null => } def test3 = IndexedSeq() match { case IndexedSeq(1) => case _ => } diff --git a/tests/plugins/run/scriptWrapper/Framework_1.scala b/tests/plugins/run/scriptWrapper/Framework_1.scala new file mode 100644 index 000000000000..c8a15de8342b --- /dev/null +++ b/tests/plugins/run/scriptWrapper/Framework_1.scala @@ -0,0 +1,3 @@ +package framework + +class entrypoint extends scala.annotation.Annotation diff --git a/tests/plugins/run/scriptWrapper/LineNumberPlugin_1.scala b/tests/plugins/run/scriptWrapper/LineNumberPlugin_1.scala new file mode 100644 index 000000000000..888d5f95838d --- /dev/null +++ b/tests/plugins/run/scriptWrapper/LineNumberPlugin_1.scala @@ -0,0 +1,68 @@ +package scriptWrapper + +import dotty.tools.dotc.* +import core.* +import Contexts.Context +import Contexts.ctx +import plugins.* +import ast.tpd +import util.SourceFile + +class LineNumberPlugin extends StandardPlugin { + val name: String = "linenumbers" + val description: String = "adjusts line numbers of script files" + + override def initialize(options: List[String])(using Context): List[PluginPhase] = FixLineNumbers() :: Nil +} + +// Loosely follows Mill linenumbers plugin (scan for marker with "original" source, adjust line numbers to match) +class FixLineNumbers extends PluginPhase { + + val codeMarker = "//USER_CODE_HERE" + + def phaseName: String = "fixLineNumbers" + override def runsAfter: Set[String] = Set("posttyper") + override def runsBefore: Set[String] = Set("pickler") + + override def transformUnit(tree: tpd.Tree)(using Context): tpd.Tree = { + val sourceContent = ctx.source.content() + val lines = new String(sourceContent).linesWithSeparators.toVector + val codeMarkerLine = lines.indexWhere(_.startsWith(codeMarker)) + + if codeMarkerLine < 0 then + tree + else + val adjustedFile = lines.collectFirst { + case s"//USER_SRC_FILE:./$file" => file.trim + }.getOrElse("") + + val adjustedSrc = ctx.source.file.container.lookupName(adjustedFile, directory = false) match + case null => + report.error(s"could not find file $adjustedFile", tree.sourcePos) + return tree + case file => + SourceFile(file, scala.io.Codec.UTF8) + + val userCodeOffset = ctx.source.lineToOffset(codeMarkerLine + 1) // lines.take(codeMarkerLine).map(_.length).sum + val lineMapper = LineMapper(codeMarkerLine, userCodeOffset, adjustedSrc) + lineMapper.transform(tree) + } + +} + +class LineMapper(markerLine: Int, userCodeOffset: Int, adjustedSrc: SourceFile) extends tpd.TreeMapWithPreciseStatContexts() { + + override def transform(tree: tpd.Tree)(using Context): tpd.Tree = { + val tree0 = super.transform(tree) + val pos = tree0.sourcePos + if pos.exists && pos.start >= userCodeOffset then + val tree1 = tree0.cloneIn(adjustedSrc).withSpan(pos.span.shift(-userCodeOffset)) + // if tree1.show.toString == "???" then + // val pos1 = tree1.sourcePos + // sys.error(s"rewrote ??? at ${pos1.source}:${pos1.line + 1}:${pos1.column + 1} (sourced from ${markerLine + 2})") + tree1 + else + tree0 + } + +} diff --git a/tests/plugins/run/scriptWrapper/Test_3.scala b/tests/plugins/run/scriptWrapper/Test_3.scala new file mode 100644 index 000000000000..341af27ee433 --- /dev/null +++ b/tests/plugins/run/scriptWrapper/Test_3.scala @@ -0,0 +1,25 @@ +@main def Test: Unit = { + val mainCls = Class.forName("foo_sc") + val mainMethod = mainCls.getMethod("main", classOf[Array[String]]) + val stackTrace: Array[String] = { + try + mainMethod.invoke(null, Array.empty[String]) + sys.error("Expected an exception") + catch + case e: java.lang.reflect.InvocationTargetException => + val cause = e.getCause + if cause != null then + cause.getStackTrace.map(_.toString) + else + throw e + } + + val expected = Set( + "foo_sc$.getRandom(foo_2.scala:3)", // adjusted line number (11 -> 3) + "foo_sc$.brokenRandom(foo_2.scala:5)", // adjusted line number (13 -> 5) + "foo_sc$.run(foo_2.scala:8)", // adjusted line number (16 -> 8) + ) + + val missing = expected -- stackTrace + assert(missing.isEmpty, s"Missing: $missing") +} diff --git a/tests/plugins/run/scriptWrapper/foo_2.scala b/tests/plugins/run/scriptWrapper/foo_2.scala new file mode 100644 index 000000000000..02e3f034e757 --- /dev/null +++ b/tests/plugins/run/scriptWrapper/foo_2.scala @@ -0,0 +1,18 @@ +// generated code +// script: foo.sc +object foo_sc { +def main(args: Array[String]): Unit = { + run // assume some macro generates this by scanning for @entrypoint +} +//USER_SRC_FILE:./foo_original_2.scala +//USER_CODE_HERE +import framework.* + +def getRandom: Int = brokenRandom // LINE 3; + +def brokenRandom: Int = ??? // LINE 5; + +@entrypoint +def run = println("Hello, here is a random number: " + getRandom) // LINE 8; +//END_USER_CODE_HERE +} diff --git a/tests/plugins/run/scriptWrapper/foo_original_2.scala b/tests/plugins/run/scriptWrapper/foo_original_2.scala new file mode 100644 index 000000000000..162ddd1724a1 --- /dev/null +++ b/tests/plugins/run/scriptWrapper/foo_original_2.scala @@ -0,0 +1,8 @@ +import framework.* + +def getRandom: Int = brokenRandom // LINE 3; + +def brokenRandom: Int = ??? // LINE 5; + +@entrypoint +def run = println("Hello, here is a random number: " + getRandom) // LINE 8; diff --git a/tests/plugins/run/scriptWrapper/plugin.properties b/tests/plugins/run/scriptWrapper/plugin.properties new file mode 100644 index 000000000000..f1fc6067e611 --- /dev/null +++ b/tests/plugins/run/scriptWrapper/plugin.properties @@ -0,0 +1 @@ +pluginClass=scriptWrapper.LineNumberPlugin diff --git a/tests/pos-macros/i20574/Exports.scala b/tests/pos-macros/i20574/Exports.scala new file mode 100644 index 000000000000..328d832fad88 --- /dev/null +++ b/tests/pos-macros/i20574/Exports.scala @@ -0,0 +1,3 @@ +object Exports{ + export OverloadedInline.* +} diff --git a/tests/pos-macros/i20574/Macros.scala b/tests/pos-macros/i20574/Macros.scala new file mode 100644 index 000000000000..a40c1f361ce1 --- /dev/null +++ b/tests/pos-macros/i20574/Macros.scala @@ -0,0 +1,20 @@ +import scala.quoted.* + +object Macros{ + + inline def A() : String = { + ${ A_impl } + } + + def A_impl(using Quotes): Expr[String] = { + Expr("Whatever") + } + + inline def B[T]: Int = { + ${ B_Impl[T] } + } + + def B_Impl[T](using Quotes): Expr[Int] = { + Expr(0) + } +} diff --git a/tests/pos-macros/i20574/OverloadedInline.scala b/tests/pos-macros/i20574/OverloadedInline.scala new file mode 100644 index 000000000000..5bf2347c45c0 --- /dev/null +++ b/tests/pos-macros/i20574/OverloadedInline.scala @@ -0,0 +1,13 @@ +import Macros.* + +object OverloadedInline{ + + A() + inline def overloaded_inline[T]: Unit = { + overloaded_inline[T](0) + } + + inline def overloaded_inline[T](dummy: Int): Unit = { + val crash = B[T] + } +} diff --git a/tests/pos-macros/i20574/Test.scala b/tests/pos-macros/i20574/Test.scala new file mode 100644 index 000000000000..abc2b4eb0bc9 --- /dev/null +++ b/tests/pos-macros/i20574/Test.scala @@ -0,0 +1,5 @@ +import Exports.* + +object Test { + overloaded_inline[Unit] +} diff --git a/tests/pos-with-compiler-cc/backend/sjs/JSCodeGen.scala b/tests/pos-with-compiler-cc/backend/sjs/JSCodeGen.scala index c670b2de97b1..81f09b082850 100644 --- a/tests/pos-with-compiler-cc/backend/sjs/JSCodeGen.scala +++ b/tests/pos-with-compiler-cc/backend/sjs/JSCodeGen.scala @@ -25,12 +25,12 @@ import dotty.tools.dotc.transform.SymUtils._ import dotty.tools.dotc.util.SourcePosition import dotty.tools.dotc.report -import org.scalajs.ir -import org.scalajs.ir.{ClassKind, Position, Names => jsNames, Trees => js, Types => jstpe} -import org.scalajs.ir.Names.{ClassName, MethodName, SimpleMethodName} -import org.scalajs.ir.OriginalName -import org.scalajs.ir.OriginalName.NoOriginalName -import org.scalajs.ir.Trees.OptimizerHints +import dotty.tools.sjs.ir +import dotty.tools.sjs.ir.{ClassKind, Position, Names => jsNames, Trees => js, Types => jstpe} +import dotty.tools.sjs.ir.Names.{ClassName, MethodName, SimpleMethodName} +import dotty.tools.sjs.ir.OriginalName +import dotty.tools.sjs.ir.OriginalName.NoOriginalName +import dotty.tools.sjs.ir.Trees.OptimizerHints import dotty.tools.dotc.transform.sjs.JSSymUtils._ diff --git a/tests/pos-with-compiler-cc/backend/sjs/JSEncoding.scala b/tests/pos-with-compiler-cc/backend/sjs/JSEncoding.scala index 73a150c60290..518295543610 100644 --- a/tests/pos-with-compiler-cc/backend/sjs/JSEncoding.scala +++ b/tests/pos-with-compiler-cc/backend/sjs/JSEncoding.scala @@ -15,12 +15,12 @@ import StdNames._ import dotty.tools.dotc.transform.sjs.JSSymUtils._ -import org.scalajs.ir -import org.scalajs.ir.{Trees => js, Types => jstpe} -import org.scalajs.ir.Names.{LocalName, LabelName, FieldName, SimpleMethodName, MethodName, ClassName} -import org.scalajs.ir.OriginalName -import org.scalajs.ir.OriginalName.NoOriginalName -import org.scalajs.ir.UTF8String +import dotty.tools.sjs.ir +import dotty.tools.sjs.ir.{Trees => js, Types => jstpe} +import dotty.tools.sjs.ir.Names.{LocalName, LabelName, FieldName, SimpleMethodName, MethodName, ClassName} +import dotty.tools.sjs.ir.OriginalName +import dotty.tools.sjs.ir.OriginalName.NoOriginalName +import dotty.tools.sjs.ir.UTF8String import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions diff --git a/tests/pos-with-compiler-cc/backend/sjs/JSExportsGen.scala b/tests/pos-with-compiler-cc/backend/sjs/JSExportsGen.scala index 78412999bb34..82b69e6a16a7 100644 --- a/tests/pos-with-compiler-cc/backend/sjs/JSExportsGen.scala +++ b/tests/pos-with-compiler-cc/backend/sjs/JSExportsGen.scala @@ -22,11 +22,11 @@ import TypeErasure.ErasedValueType import dotty.tools.dotc.util.{SourcePosition, SrcPos} import dotty.tools.dotc.report -import org.scalajs.ir.{Position, Names => jsNames, Trees => js, Types => jstpe} -import org.scalajs.ir.Names.DefaultModuleID -import org.scalajs.ir.OriginalName.NoOriginalName -import org.scalajs.ir.Position.NoPosition -import org.scalajs.ir.Trees.OptimizerHints +import dotty.tools.sjs.ir.{Position, Names => jsNames, Trees => js, Types => jstpe} +import dotty.tools.sjs.ir.Names.DefaultModuleID +import dotty.tools.sjs.ir.OriginalName.NoOriginalName +import dotty.tools.sjs.ir.Position.NoPosition +import dotty.tools.sjs.ir.Trees.OptimizerHints import dotty.tools.dotc.transform.sjs.JSExportUtils._ import dotty.tools.dotc.transform.sjs.JSSymUtils._ @@ -924,7 +924,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { InstanceOfTypeTest(tpe.tycon.typeSymbol.typeRef) case _ => - import org.scalajs.ir.Names + import dotty.tools.sjs.ir.Names (toIRType(tpe): @unchecked) match { case jstpe.AnyType => NoTypeTest diff --git a/tests/pos-with-compiler-cc/backend/sjs/JSPositions.scala b/tests/pos-with-compiler-cc/backend/sjs/JSPositions.scala index 2fd007165952..620e76ab4bab 100644 --- a/tests/pos-with-compiler-cc/backend/sjs/JSPositions.scala +++ b/tests/pos-with-compiler-cc/backend/sjs/JSPositions.scala @@ -13,7 +13,7 @@ import dotty.tools.dotc.report import dotty.tools.dotc.util.{SourceFile, SourcePosition} import dotty.tools.dotc.util.Spans.Span -import org.scalajs.ir +import dotty.tools.sjs.ir /** Conversion utilities from dotty Positions to IR Positions. */ class JSPositions()(using Context) { diff --git a/tests/pos/21681d.scala b/tests/pos/21681d.scala new file mode 100644 index 000000000000..97a01dec74aa --- /dev/null +++ b/tests/pos/21681d.scala @@ -0,0 +1,16 @@ +def test1() = + class Person: + def age: Int = ??? + def age_=(x: Int): Unit = ??? + + val person = Person() + + (person.age = 29) // no warn (interpreted as `person.age_=(29)`) + +def test2() = + class Person: + var age: Int = 28 + + val person = Person() + + (person.age = 29) // no warn (interpreted as `person.age_=(29)`) diff --git a/tests/pos/i20952.scala b/tests/pos/i20952.scala new file mode 100644 index 000000000000..68344fdab672 --- /dev/null +++ b/tests/pos/i20952.scala @@ -0,0 +1,20 @@ +package object packer: // the super class needs to be in a different package + class SuperClass(): + protected val problem: Any = ??? // needs to be protected + +class SuperClass(): + protected val problem: Any = ??? // needs to be protected + +// type Target = SuperClass // passes +type Target = packer.SuperClass // error + +trait Child extends Target: + + val aliased: problem.type = problem + type Alias = problem.type + + val newProblem: Any {val prog: problem.type} = ??? // error + val newProblem2: Any {val prog: Alias} = ??? // passes + val newProblem3: Any {val prog: aliased.type} = ??? // passes + +class ChildImpl extends Target with Child // concrete implementation is needed diff --git a/tests/pos/i21757.scala b/tests/pos/i21757.scala new file mode 100644 index 000000000000..7595540c4b58 --- /dev/null +++ b/tests/pos/i21757.scala @@ -0,0 +1,33 @@ +object ConversionChain { + + class X(val value: Int) + + class Y(val x: X) + + class Z(val y: Y) + + trait Conv[A, B] extends Conversion[A, B] + + given xy: Conv[X, Y] = { (x: X) => new Y(x) } + + given yz: Conv[Y, Z] = { (y: Y) => new Z(y) } + + object ConvUtils { + implicit def hypotheticalSyllogism[A, B, C]( // implicit def instead of given + using + ab: Conv[A, B], + bc: Conv[B, C] + ): Conv[A, C] = { + + new Conv[A, C] { + def apply(a: A): C = bc(ab(a)) + } + } + } + import ConvUtils.hypotheticalSyllogism + + def test(): Unit = { + val x = new X(42) + val z: Z = x + } +} diff --git a/tests/pos/i21779/Macro_1.scala b/tests/pos/i21779/Macro_1.scala new file mode 100644 index 000000000000..d40f5e28de5e --- /dev/null +++ b/tests/pos/i21779/Macro_1.scala @@ -0,0 +1,36 @@ +import scala.quoted.* + +object Macro: + transparent inline def tupleXxl: Tuple = + ${tupleXxlExpr} + + def tupleXxlExpr(using Quotes) = + import quotes.reflect.* + Expr.ofTupleFromSeq( + Seq( + Expr("a"), + Expr(2), + Expr(3), + Expr(4), + Expr(5), + Expr(6), + Expr(7), + Expr(8), + Expr(9), + Expr(10), + Expr(11), + Expr(12), + Expr(13), + Expr(14), + Expr(15), + Expr(16), + Expr(17), + Expr(18), + Expr(19), + Expr(20), + Expr(21), + Expr(22), + Expr(23), + ) + ) + diff --git a/tests/pos/i21779/Test_2.scala b/tests/pos/i21779/Test_2.scala new file mode 100644 index 000000000000..e614e3ca564d --- /dev/null +++ b/tests/pos/i21779/Test_2.scala @@ -0,0 +1,3 @@ +object Test: + val result: ("a", 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23) = + Macro.tupleXxl diff --git a/tests/pos/so68877939.scala b/tests/pos/so68877939.scala new file mode 100644 index 000000000000..41c5ee8ac659 --- /dev/null +++ b/tests/pos/so68877939.scala @@ -0,0 +1,15 @@ +abstract class Quantity[A <: Quantity[A]] +sealed trait UnitOfMeasure[A <: Quantity[A]] + +class Time extends Quantity[Time] +object Minutes extends UnitOfMeasure[Time] + +class PowerRamp extends Quantity[PowerRamp] +object KilowattsPerHour extends UnitOfMeasure[PowerRamp] + +type Test[X <: UnitOfMeasure[?]] = X match + case UnitOfMeasure[t] => t + +@main def main = + summon[Test[Minutes.type] =:= Time] + summon[Test[KilowattsPerHour.type] =:= PowerRamp] diff --git a/tests/printing/transformed/lazy-vals-legacy.check b/tests/printing/transformed/lazy-vals-legacy.check index 2768a89b9c9e..d8969619d2b2 100644 --- a/tests/printing/transformed/lazy-vals-legacy.check +++ b/tests/printing/transformed/lazy-vals-legacy.check @@ -1,4 +1,4 @@ -[[syntax trees at end of MegaPhase{dropOuterAccessors, checkNoSuperThis, flatten, transformWildcards, moveStatic, expandPrivate, restoreScopes, selectStatic, Collect entry points, collectSuperCalls, repeatableAnnotations}]] // tests/printing/transformed/lazy-vals-legacy.scala +[[syntax trees at end of MegaPhase{dropOuterAccessors, dropParentRefinements, checkNoSuperThis, flatten, transformWildcards, moveStatic, expandPrivate, restoreScopes, selectStatic, Collect entry points, collectSuperCalls, repeatableAnnotations}]] // tests/printing/transformed/lazy-vals-legacy.scala package { @SourceFile("tests/printing/transformed/lazy-vals-legacy.scala") final module class A extends Object { diff --git a/tests/printing/transformed/lazy-vals-new.check b/tests/printing/transformed/lazy-vals-new.check index 05471e5677dc..8997f0109e79 100644 --- a/tests/printing/transformed/lazy-vals-new.check +++ b/tests/printing/transformed/lazy-vals-new.check @@ -1,4 +1,4 @@ -[[syntax trees at end of MegaPhase{dropOuterAccessors, checkNoSuperThis, flatten, transformWildcards, moveStatic, expandPrivate, restoreScopes, selectStatic, Collect entry points, collectSuperCalls, repeatableAnnotations}]] // tests/printing/transformed/lazy-vals-new.scala +[[syntax trees at end of MegaPhase{dropOuterAccessors, dropParentRefinements, checkNoSuperThis, flatten, transformWildcards, moveStatic, expandPrivate, restoreScopes, selectStatic, Collect entry points, collectSuperCalls, repeatableAnnotations}]] // tests/printing/transformed/lazy-vals-new.scala package { @SourceFile("tests/printing/transformed/lazy-vals-new.scala") final module class A extends Object { diff --git a/tests/run/i21213-min.check b/tests/run/i21213-min.check new file mode 100644 index 000000000000..5716ca5987cb --- /dev/null +++ b/tests/run/i21213-min.check @@ -0,0 +1 @@ +bar diff --git a/tests/run/i21213-min.scala b/tests/run/i21213-min.scala new file mode 100644 index 000000000000..0f6aa6f8ddd5 --- /dev/null +++ b/tests/run/i21213-min.scala @@ -0,0 +1,9 @@ +import scala.language.experimental.modularity +import scala.language.future + +sealed abstract class Foo(tracked val discriminator: String) +class Bar extends Foo("bar") + +val bar: Foo = Bar() +object Test extends App: + println(bar.discriminator) diff --git a/tests/run/i21213.check b/tests/run/i21213.check new file mode 100644 index 000000000000..5716ca5987cb --- /dev/null +++ b/tests/run/i21213.check @@ -0,0 +1 @@ +bar diff --git a/tests/run/i21213.scala b/tests/run/i21213.scala new file mode 100644 index 000000000000..ec609afd7da3 --- /dev/null +++ b/tests/run/i21213.scala @@ -0,0 +1,10 @@ +import scala.language.experimental.modularity +import scala.language.future + +enum Foo(tracked val discriminator: String): + case Bar() extends Foo("bar") + case Baz() extends Foo("baz") + +val bar: Foo = Foo.Bar() +object Test extends App: + println(bar.discriminator) diff --git a/tests/warn/21681.check b/tests/warn/21681.check new file mode 100644 index 000000000000..e86ce4e36134 --- /dev/null +++ b/tests/warn/21681.check @@ -0,0 +1,7 @@ +-- [E203] Syntax Migration Warning: tests/warn/21681.scala:3:2 --------------------------------------------------------- +3 | (age = 29) // warn + | ^^^^^^^^^^ + | Ambiguous syntax: this is interpreted as a named tuple with one element, + | not as an assignment. + | + | To assign a value, use curly braces: `{age = 29}`. diff --git a/tests/warn/21681.scala b/tests/warn/21681.scala new file mode 100644 index 000000000000..76a19c96e1cb --- /dev/null +++ b/tests/warn/21681.scala @@ -0,0 +1,3 @@ +def main() = + var age: Int = 28 + (age = 29) // warn diff --git a/tests/warn/21681b.check b/tests/warn/21681b.check new file mode 100644 index 000000000000..32760e00ebb6 --- /dev/null +++ b/tests/warn/21681b.check @@ -0,0 +1,7 @@ +-- [E203] Syntax Migration Warning: tests/warn/21681b.scala:3:2 -------------------------------------------------------- +3 | (age = 29) // warn + | ^^^^^^^^^^ + | Ambiguous syntax: this is interpreted as a named tuple with one element, + | not as an assignment. + | + | To assign a value, use curly braces: `{age = 29}`. diff --git a/tests/warn/21681b.scala b/tests/warn/21681b.scala new file mode 100644 index 000000000000..710d69b0dd23 --- /dev/null +++ b/tests/warn/21681b.scala @@ -0,0 +1,3 @@ +object Test: + var age: Int = 28 + (age = 29) // warn diff --git a/tests/warn/21681c.check b/tests/warn/21681c.check new file mode 100644 index 000000000000..11c427f87cfe --- /dev/null +++ b/tests/warn/21681c.check @@ -0,0 +1,7 @@ +-- [E203] Syntax Migration Warning: tests/warn/21681c.scala:5:2 -------------------------------------------------------- +5 | (age = 29) // warn + | ^^^^^^^^^^ + | Ambiguous syntax: this is interpreted as a named tuple with one element, + | not as an assignment. + | + | To assign a value, use curly braces: `{age = 29}`. diff --git a/tests/warn/21681c.scala b/tests/warn/21681c.scala new file mode 100644 index 000000000000..5e2eae11708c --- /dev/null +++ b/tests/warn/21681c.scala @@ -0,0 +1,5 @@ +object Test: + def age: Int = ??? + def age_=(x: Int): Unit = () + age = 29 + (age = 29) // warn diff --git a/tests/warn/21770.check b/tests/warn/21770.check new file mode 100644 index 000000000000..0899f11d6ca5 --- /dev/null +++ b/tests/warn/21770.check @@ -0,0 +1,7 @@ +-- [E203] Syntax Migration Warning: tests/warn/21770.scala:5:9 --------------------------------------------------------- +5 | f(i => (cache = Some(i))) // warn + | ^^^^^^^^^^^^^^^^^ + | Ambiguous syntax: this is interpreted as a named tuple with one element, + | not as an assignment. + | + | To assign a value, use curly braces: `{cache = Some(i)}`. diff --git a/tests/warn/21770.scala b/tests/warn/21770.scala new file mode 100644 index 000000000000..9696a31d6ba8 --- /dev/null +++ b/tests/warn/21770.scala @@ -0,0 +1,5 @@ +def f(g: Int => Unit) = g(0) + +def test = + var cache: Option[Int] = None + f(i => (cache = Some(i))) // warn diff --git a/tests/warn/patmat-nothing-exhaustive.scala b/tests/warn/patmat-nothing-exhaustive.scala new file mode 100644 index 000000000000..4e9181256fda --- /dev/null +++ b/tests/warn/patmat-nothing-exhaustive.scala @@ -0,0 +1,10 @@ +enum TestAdt: + case Inhabited + case Uninhabited(no: Nothing) + +def test1(t: TestAdt): Int = t match + case TestAdt.Inhabited => 1 + +def test2(o: Option[Option[Nothing]]): Int = o match + case Some(None) => 1 + case None => 2