diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 0c20b85acbd565..dc2f4858be6e8c 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -22,25 +22,25 @@ env: FORCE_COLOR: 1 jobs: - check_source: + build-context: name: Change detection # To use boolean outputs from this job, parse them as JSON. # Here's some examples: # - # if: fromJSON(needs.check_source.outputs.run-docs) + # if: fromJSON(needs.build-context.outputs.run-docs) # # ${{ - # fromJSON(needs.check_source.outputs.run_tests) + # fromJSON(needs.build-context.outputs.run-tests) # && 'truthy-branch' # || 'falsy-branch' # }} # - uses: ./.github/workflows/reusable-change-detection.yml + uses: ./.github/workflows/reusable-context.yml check-docs: name: Docs - needs: check_source - if: fromJSON(needs.check_source.outputs.run-docs) + needs: build-context + if: fromJSON(needs.build-context.outputs.run-docs) uses: ./.github/workflows/reusable-docs.yml check_autoconf_regen: @@ -51,8 +51,8 @@ jobs: container: image: ghcr.io/python/autoconf:2025.01.02.12581854023 timeout-minutes: 60 - needs: check_source - if: needs.check_source.outputs.run_tests == 'true' + needs: build-context + if: needs.build-context.outputs.run-tests == 'true' steps: - name: Install Git run: | @@ -94,8 +94,8 @@ jobs: # reproducible: to get the same tools versions (autoconf, aclocal, ...) runs-on: ubuntu-24.04 timeout-minutes: 60 - needs: check_source - if: needs.check_source.outputs.run_tests == 'true' + needs: build-context + if: needs.build-context.outputs.run-tests == 'true' steps: - uses: actions/checkout@v4 with: @@ -110,7 +110,7 @@ jobs: with: path: config.cache # Include env.pythonLocation in key to avoid changes in environment when setup-python updates Python - key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ needs.check_source.outputs.config_hash }}-${{ env.pythonLocation }} + key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ needs.build-context.outputs.config-hash }}-${{ env.pythonLocation }} - name: Install Dependencies run: sudo ./.github/workflows/posix-deps-apt.sh - name: Add ccache to PATH @@ -153,8 +153,8 @@ jobs: name: >- Windows ${{ fromJSON(matrix.free-threading) && '(free-threading)' || '' }} - needs: check_source - if: fromJSON(needs.check_source.outputs.run_tests) + needs: build-context + if: fromJSON(needs.build-context.outputs.run-tests) strategy: fail-fast: false matrix: @@ -184,8 +184,8 @@ jobs: build_windows_msi: name: >- # ${{ '' } is a hack to nest jobs under the same sidebar category Windows MSI${{ '' }} - needs: check_source - if: fromJSON(needs.check_source.outputs.run-win-msi) + needs: build-context + if: fromJSON(needs.build-context.outputs.run-windows-msi) strategy: matrix: arch: @@ -200,8 +200,8 @@ jobs: name: >- macOS ${{ fromJSON(matrix.free-threading) && '(free-threading)' || '' }} - needs: check_source - if: needs.check_source.outputs.run_tests == 'true' + needs: build-context + if: needs.build-context.outputs.run-tests == 'true' strategy: fail-fast: false matrix: @@ -226,7 +226,7 @@ jobs: free-threading: true uses: ./.github/workflows/reusable-macos.yml with: - config_hash: ${{ needs.check_source.outputs.config_hash }} + config_hash: ${{ needs.build-context.outputs.config-hash }} free-threading: ${{ matrix.free-threading }} os: ${{ matrix.os }} @@ -235,8 +235,8 @@ jobs: Ubuntu ${{ fromJSON(matrix.free-threading) && '(free-threading)' || '' }} ${{ fromJSON(matrix.bolt) && '(bolt)' || '' }} - needs: check_source - if: needs.check_source.outputs.run_tests == 'true' + needs: build-context + if: needs.build-context.outputs.run-tests == 'true' strategy: matrix: bolt: @@ -257,7 +257,7 @@ jobs: bolt: true uses: ./.github/workflows/reusable-ubuntu.yml with: - config_hash: ${{ needs.check_source.outputs.config_hash }} + config_hash: ${{ needs.build-context.outputs.config-hash }} bolt-optimizations: ${{ matrix.bolt }} free-threading: ${{ matrix.free-threading }} os: ${{ matrix.os }} @@ -266,8 +266,8 @@ jobs: name: 'Ubuntu SSL tests with OpenSSL' runs-on: ${{ matrix.os }} timeout-minutes: 60 - needs: check_source - if: needs.check_source.outputs.run_tests == 'true' + needs: build-context + if: needs.build-context.outputs.run-tests == 'true' strategy: fail-fast: false matrix: @@ -289,7 +289,7 @@ jobs: uses: actions/cache@v4 with: path: config.cache - key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ needs.check_source.outputs.config_hash }} + key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ needs.build-context.outputs.config-hash }} - name: Register gcc problem matcher run: echo "::add-matcher::.github/problem-matchers/gcc.json" - name: Install Dependencies @@ -326,18 +326,18 @@ jobs: build_wasi: name: 'WASI' - needs: check_source - if: needs.check_source.outputs.run_tests == 'true' + needs: build-context + if: needs.build-context.outputs.run-tests == 'true' uses: ./.github/workflows/reusable-wasi.yml with: - config_hash: ${{ needs.check_source.outputs.config_hash }} + config_hash: ${{ needs.build-context.outputs.config-hash }} test_hypothesis: name: "Hypothesis tests on Ubuntu" runs-on: ubuntu-24.04 timeout-minutes: 60 - needs: check_source - if: needs.check_source.outputs.run_tests == 'true' && needs.check_source.outputs.run_hypothesis == 'true' + needs: build-context + if: needs.build-context.outputs.run-tests == 'true' env: OPENSSL_VER: 3.0.15 PYTHONSTRICTEXTENSIONBUILD: 1 @@ -384,7 +384,7 @@ jobs: uses: actions/cache@v4 with: path: ${{ env.CPYTHON_BUILDDIR }}/config.cache - key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ needs.check_source.outputs.config_hash }} + key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ needs.build-context.outputs.config-hash }} - name: Configure CPython out-of-tree working-directory: ${{ env.CPYTHON_BUILDDIR }} run: | @@ -452,8 +452,8 @@ jobs: name: 'Address sanitizer' runs-on: ${{ matrix.os }} timeout-minutes: 60 - needs: check_source - if: needs.check_source.outputs.run_tests == 'true' + needs: build-context + if: needs.build-context.outputs.run-tests == 'true' strategy: matrix: os: [ubuntu-24.04] @@ -471,7 +471,7 @@ jobs: uses: actions/cache@v4 with: path: config.cache - key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ needs.check_source.outputs.config_hash }} + key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ needs.build-context.outputs.config-hash }} - name: Register gcc problem matcher run: echo "::add-matcher::.github/problem-matchers/gcc.json" - name: Install Dependencies @@ -512,34 +512,67 @@ jobs: run: xvfb-run make ci build_tsan: - name: 'Thread sanitizer' - needs: check_source - if: needs.check_source.outputs.run_tests == 'true' + name: >- + Thread sanitizer + ${{ fromJSON(matrix.free-threading) && '(free-threading)' || '' }} + needs: build-context + if: needs.build-context.outputs.run-tests == 'true' + strategy: + matrix: + free-threading: + - false + - true uses: ./.github/workflows/reusable-tsan.yml with: - config_hash: ${{ needs.check_source.outputs.config_hash }} - options: ./configure --config-cache --with-thread-sanitizer --with-pydebug - suppressions_path: Tools/tsan/supressions.txt - tsan_logs_artifact_name: tsan-logs-default + config_hash: ${{ needs.build-context.outputs.config-hash }} + free-threading: ${{ matrix.free-threading }} - build_tsan_free_threading: - name: 'Thread sanitizer (free-threading)' - needs: check_source - if: needs.check_source.outputs.run_tests == 'true' - uses: ./.github/workflows/reusable-tsan.yml - with: - config_hash: ${{ needs.check_source.outputs.config_hash }} - options: ./configure --config-cache --disable-gil --with-thread-sanitizer --with-pydebug - suppressions_path: Tools/tsan/suppressions_free_threading.txt - tsan_logs_artifact_name: tsan-logs-free-threading + cross-build-linux: + name: Cross build Linux + runs-on: ubuntu-latest + needs: build-context + if: needs.build-context.outputs.run-tests == 'true' + steps: + - uses: actions/checkout@v4 + with: + persist-credentials: false + - name: Runner image version + run: echo "IMAGE_VERSION=${ImageVersion}" >> "$GITHUB_ENV" + - name: Restore config.cache + uses: actions/cache@v4 + with: + path: config.cache + key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ needs.build-context.outputs.config-hash }} + - name: Register gcc problem matcher + run: echo "::add-matcher::.github/problem-matchers/gcc.json" + - name: Set build dir + run: + # an absolute path outside of the working directoy + echo "BUILD_DIR=$(realpath ${{ github.workspace }}/../build)" >> "$GITHUB_ENV" + - name: Install Dependencies + run: sudo ./.github/workflows/posix-deps-apt.sh + - name: Configure host build + run: ./configure --prefix="$BUILD_DIR/host-python" + - name: Install host Python + run: make -j8 install + - name: Run test subset with host build + run: | + "$BUILD_DIR/host-python/bin/python3" -m test test_sysconfig test_site test_embed + - name: Configure cross build + run: ./configure --prefix="$BUILD_DIR/cross-python" --with-build-python="$BUILD_DIR/host-python/bin/python3" + - name: Install cross Python + run: make -j8 install + - name: Run test subset with host build + run: | + "$BUILD_DIR/cross-python/bin/python3" -m test test_sysconfig test_site test_embed # CIFuzz job based on https://google.github.io/oss-fuzz/getting-started/continuous-integration/ cifuzz: name: CIFuzz runs-on: ubuntu-latest timeout-minutes: 60 - needs: check_source - if: needs.check_source.outputs.run_cifuzz == 'true' + needs: build-context + if: needs.build-context.outputs.run-ci-fuzz == 'true' permissions: security-events: write strategy: @@ -578,7 +611,7 @@ jobs: if: always() needs: - - check_source # Transitive dependency, needed to access `run_tests` value + - build-context # Transitive dependency, needed to access `run-tests` value - check-docs - check_autoconf_regen - check_generated_files @@ -591,7 +624,6 @@ jobs: - test_hypothesis - build_asan - build_tsan - - build_tsan_free_threading - cifuzz runs-on: ubuntu-latest @@ -607,14 +639,14 @@ jobs: test_hypothesis, allowed-skips: >- ${{ - !fromJSON(needs.check_source.outputs.run-docs) + !fromJSON(needs.build-context.outputs.run-docs) && ' check-docs, ' || '' }} ${{ - needs.check_source.outputs.run_tests != 'true' + needs.build-context.outputs.run-tests != 'true' && ' check_autoconf_regen, check_generated_files, @@ -625,22 +657,15 @@ jobs: build_windows, build_asan, build_tsan, - build_tsan_free_threading, + test_hypothesis, ' || '' }} ${{ - !fromJSON(needs.check_source.outputs.run_cifuzz) + !fromJSON(needs.build-context.outputs.run-ci-fuzz) && ' cifuzz, ' || '' }} - ${{ - !fromJSON(needs.check_source.outputs.run_hypothesis) - && ' - test_hypothesis, - ' - || '' - }} jobs: ${{ toJSON(needs) }} diff --git a/.github/workflows/reusable-change-detection.yml b/.github/workflows/reusable-change-detection.yml deleted file mode 100644 index c08c0cb8873f12..00000000000000 --- a/.github/workflows/reusable-change-detection.yml +++ /dev/null @@ -1,173 +0,0 @@ -name: Reusable change detection - -on: # yamllint disable-line rule:truthy - workflow_call: - outputs: - # Some of the referenced steps set outputs conditionally and there may be - # cases when referencing them evaluates to empty strings. It is nice to - # work with proper booleans so they have to be evaluated through JSON - # conversion in the expressions. However, empty strings used like that - # may trigger all sorts of undefined and hard-to-debug behaviors in - # GitHub Actions CI/CD. To help with this, all of the outputs set here - # that are meant to be used as boolean flags (and not arbitrary strings), - # MUST have fallbacks with default values set. A common pattern would be - # to add ` || false` to all such expressions here, in the output - # definitions. They can then later be safely used through the following - # idiom in job conditionals and other expressions. Here's some examples: - # - # if: fromJSON(needs.change-detection.outputs.run-docs) - # - # ${{ - # fromJSON(needs.change-detection.outputs.run-tests) - # && 'truthy-branch' - # || 'falsy-branch' - # }} - # - config_hash: - description: Config hash value for use in cache keys - value: ${{ jobs.compute-changes.outputs.config-hash }} # str - run-docs: - description: Whether to build the docs - value: ${{ jobs.compute-changes.outputs.run-docs || false }} # bool - run_tests: - description: Whether to run the regular tests - value: ${{ jobs.compute-changes.outputs.run-tests || false }} # bool - run-win-msi: - description: Whether to run the MSI installer smoke tests - value: >- # bool - ${{ jobs.compute-changes.outputs.run-win-msi || false }} - run_hypothesis: - description: Whether to run the Hypothesis tests - value: >- # bool - ${{ jobs.compute-changes.outputs.run-hypothesis || false }} - run_cifuzz: - description: Whether to run the CIFuzz job - value: >- # bool - ${{ jobs.compute-changes.outputs.run-cifuzz || false }} - -jobs: - compute-changes: - name: Compute changed files - runs-on: ubuntu-latest - timeout-minutes: 10 - outputs: - config-hash: ${{ steps.config-hash.outputs.hash }} - run-cifuzz: ${{ steps.check.outputs.run-cifuzz }} - run-docs: ${{ steps.docs-changes.outputs.run-docs }} - run-hypothesis: ${{ steps.check.outputs.run-hypothesis }} - run-tests: ${{ steps.check.outputs.run-tests }} - run-win-msi: ${{ steps.win-msi-changes.outputs.run-win-msi }} - steps: - - run: >- - echo '${{ github.event_name }}' - - uses: actions/checkout@v4 - with: - persist-credentials: false - - name: Check for source changes - id: check - run: | - if [ -z "$GITHUB_BASE_REF" ]; then - echo "run-tests=true" >> "$GITHUB_OUTPUT" - else - git fetch origin "$GITHUB_BASE_REF" --depth=1 - # git diff "origin/$GITHUB_BASE_REF..." (3 dots) may be more - # reliable than git diff "origin/$GITHUB_BASE_REF.." (2 dots), - # but it requires to download more commits (this job uses - # "git fetch --depth=1"). - # - # git diff "origin/$GITHUB_BASE_REF..." (3 dots) works with Git - # 2.26, but Git 2.28 is stricter and fails with "no merge base". - # - # git diff "origin/$GITHUB_BASE_REF.." (2 dots) should be enough on - # GitHub, since GitHub starts by merging origin/$GITHUB_BASE_REF - # into the PR branch anyway. - # - # https://github.com/python/core-workflow/issues/373 - grep_ignore_args=( - # file extensions - -e '\.md$' - -e '\.rst$' - # top-level folders - -e '^Doc/' - -e '^Misc/' - # configuration files - -e '^\.github/CODEOWNERS$' - -e '^\.pre-commit-config\.yaml$' - -e '\.ruff\.toml$' - -e 'mypy\.ini$' - ) - git diff --name-only "origin/$GITHUB_BASE_REF.." \ - | grep -qvE "${grep_ignore_args[@]}" \ - && echo "run-tests=true" >> "$GITHUB_OUTPUT" || true - fi - - # Check if we should run hypothesis tests - GIT_BRANCH=${GITHUB_BASE_REF:-${GITHUB_REF#refs/heads/}} - echo "$GIT_BRANCH" - if $(echo "$GIT_BRANCH" | grep -q -w '3\.\(8\|9\|10\|11\)'); then - echo "Branch too old for hypothesis tests" - echo "run-hypothesis=false" >> "$GITHUB_OUTPUT" - else - echo "Run hypothesis tests" - echo "run-hypothesis=true" >> "$GITHUB_OUTPUT" - fi - - # oss-fuzz maintains a configuration for fuzzing the main branch of - # CPython, so CIFuzz should be run only for code that is likely to be - # merged into the main branch; compatibility with older branches may - # be broken. - FUZZ_RELEVANT_FILES='(\.c$|\.h$|\.cpp$|^configure$|^\.github/workflows/build\.yml$|^Modules/_xxtestfuzz)' - if [ "$GITHUB_BASE_REF" = "main" ] && [ "$(git diff --name-only "origin/$GITHUB_BASE_REF.." | grep -qE $FUZZ_RELEVANT_FILES; echo $?)" -eq 0 ]; then - # The tests are pretty slow so they are executed only for PRs - # changing relevant files. - echo "Run CIFuzz tests" - echo "run-cifuzz=true" >> "$GITHUB_OUTPUT" - else - echo "Branch too old for CIFuzz tests; or no C files were changed" - echo "run-cifuzz=false" >> "$GITHUB_OUTPUT" - fi - - name: Compute hash for config cache key - id: config-hash - run: | - echo "hash=${{ hashFiles('configure', 'configure.ac', '.github/workflows/build.yml') }}" >> "$GITHUB_OUTPUT" - - name: Get a list of the changed documentation-related files - if: github.event_name == 'pull_request' - id: changed-docs-files - uses: Ana06/get-changed-files@v2.3.0 - with: - filter: | - Doc/** - Misc/** - .github/workflows/reusable-docs.yml - format: csv # works for paths with spaces - - name: Check for docs changes - # We only want to run this on PRs when related files are changed, - # or when user triggers manual workflow run. - if: >- - ( - github.event_name == 'pull_request' - && steps.changed-docs-files.outputs.added_modified_renamed != '' - ) || github.event_name == 'workflow_dispatch' - id: docs-changes - run: | - echo "run-docs=true" >> "${GITHUB_OUTPUT}" - - name: Get a list of the MSI installer-related files - if: github.event_name == 'pull_request' - id: changed-win-msi-files - uses: Ana06/get-changed-files@v2.3.0 - with: - filter: | - Tools/msi/** - .github/workflows/reusable-windows-msi.yml - format: csv # works for paths with spaces - - name: Check for changes in MSI installer-related files - # We only want to run this on PRs when related files are changed, - # or when user triggers manual workflow run. - if: >- - ( - github.event_name == 'pull_request' - && steps.changed-win-msi-files.outputs.added_modified_renamed != '' - ) || github.event_name == 'workflow_dispatch' - id: win-msi-changes - run: | - echo "run-win-msi=true" >> "${GITHUB_OUTPUT}" diff --git a/.github/workflows/reusable-context.yml b/.github/workflows/reusable-context.yml new file mode 100644 index 00000000000000..fa4df6f29711db --- /dev/null +++ b/.github/workflows/reusable-context.yml @@ -0,0 +1,100 @@ +name: Reusable build context + +on: # yamllint disable-line rule:truthy + workflow_call: + outputs: + # Every referenced step MUST always set its output variable, + # either via ``Tools/build/compute-changes.py`` or in this workflow file. + # Boolean outputs (generally prefixed ``run-``) can then later be used + # safely through the following idiom in job conditionals and other + # expressions. Here's some examples: + # + # if: fromJSON(needs.build-context.outputs.run-tests) + # + # ${{ + # fromJSON(needs.build-context.outputs.run-tests) + # && 'truthy-branch' + # || 'falsy-branch' + # }} + # + config-hash: + description: Config hash value for use in cache keys + value: ${{ jobs.compute-changes.outputs.config-hash }} # str + run-docs: + description: Whether to build the docs + value: ${{ jobs.compute-changes.outputs.run-docs }} # bool + run-tests: + description: Whether to run the regular tests + value: ${{ jobs.compute-changes.outputs.run-tests }} # bool + run-windows-msi: + description: Whether to run the MSI installer smoke tests + value: ${{ jobs.compute-changes.outputs.run-windows-msi }} # bool + run-ci-fuzz: + description: Whether to run the CIFuzz job + value: ${{ jobs.compute-changes.outputs.run-ci-fuzz }} # bool + +jobs: + compute-changes: + name: Create context from changed files + runs-on: ubuntu-latest + timeout-minutes: 10 + outputs: + config-hash: ${{ steps.config-hash.outputs.hash }} + run-ci-fuzz: ${{ steps.changes.outputs.run-ci-fuzz }} + run-docs: ${{ steps.changes.outputs.run-docs }} + run-tests: ${{ steps.changes.outputs.run-tests }} + run-windows-msi: ${{ steps.changes.outputs.run-windows-msi }} + steps: + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3" + + - run: >- + echo '${{ github.event_name }}' + + - uses: actions/checkout@v4 + with: + persist-credentials: false + ref: >- + ${{ + github.event_name == 'pull_request' + && github.event.pull_request.head.sha + || '' + }} + + # Adapted from https://github.com/actions/checkout/issues/520#issuecomment-1167205721 + - name: Fetch commits to get branch diff + if: github.event_name == 'pull_request' + run: | + set -eux + + # Fetch enough history to find a common ancestor commit (aka merge-base): + git fetch origin "${refspec_pr}" --depth=$(( commits + 1 )) \ + --no-tags --prune --no-recurse-submodules + + # This should get the oldest commit in the local fetched history (which may not be the commit the PR branched from): + COMMON_ANCESTOR=$( git rev-list --first-parent --max-parents=0 --max-count=1 "${branch_pr}" ) + DATE=$( git log --date=iso8601 --format=%cd "${COMMON_ANCESTOR}" ) + + # Get all commits since that commit date from the base branch (eg: main): + git fetch origin "${refspec_base}" --shallow-since="${DATE}" \ + --no-tags --prune --no-recurse-submodules + env: + branch_pr: 'origin/${{ github.event.pull_request.head.ref }}' + commits: ${{ github.event.pull_request.commits }} + refspec_base: '+${{ github.event.pull_request.base.sha }}:remotes/origin/${{ github.event.pull_request.base.ref }}' + refspec_pr: '+${{ github.event.pull_request.head.sha }}:remotes/origin/${{ github.event.pull_request.head.ref }}' + + # We only want to run tests on PRs when related files are changed, + # or when someone triggers a manual workflow run. + - name: Compute changed files + id: changes + run: python Tools/build/compute-changes.py + env: + GITHUB_DEFAULT_BRANCH: ${{ github.event.repository.default_branch }} + + - name: Compute hash for config cache key + id: config-hash + run: | + echo "hash=${{ hashFiles('configure', 'configure.ac', '.github/workflows/build.yml') }}" >> "$GITHUB_OUTPUT" diff --git a/.github/workflows/reusable-tsan.yml b/.github/workflows/reusable-tsan.yml index 269f479849f21e..1d2548565d50ef 100644 --- a/.github/workflows/reusable-tsan.yml +++ b/.github/workflows/reusable-tsan.yml @@ -6,17 +6,11 @@ on: config_hash: required: true type: string - options: - required: true - type: string - suppressions_path: - description: 'A repo relative path to the suppressions file' - required: true - type: string - tsan_logs_artifact_name: - description: 'Name of the TSAN logs artifact. Must be unique for each job.' - required: true - type: string + free-threading: + description: Whether to use free-threaded mode + required: false + type: boolean + default: false env: FORCE_COLOR: 1 @@ -26,9 +20,6 @@ jobs: name: 'Thread sanitizer' runs-on: ubuntu-24.04 timeout-minutes: 60 - env: - OPTIONS: ${{ inputs.options }} - SUPPRESSIONS_PATH: ${{ inputs.suppressions_path }} steps: - uses: actions/checkout@v4 with: @@ -55,7 +46,11 @@ jobs: sudo sysctl -w vm.mmap_rnd_bits=28 - name: TSAN Option Setup run: | - echo "TSAN_OPTIONS=log_path=${GITHUB_WORKSPACE}/tsan_log suppressions=${GITHUB_WORKSPACE}/${SUPPRESSIONS_PATH} handle_segv=0" >> "$GITHUB_ENV" + echo "TSAN_OPTIONS=log_path=${GITHUB_WORKSPACE}/tsan_log suppressions=${GITHUB_WORKSPACE}/Tools/tsan/suppressions${{ + fromJSON(inputs.free-threading) + && '_free_threading' + || '' + }}.txt handle_segv=0" >> "$GITHUB_ENV" echo "CC=clang" >> "$GITHUB_ENV" echo "CXX=clang++" >> "$GITHUB_ENV" - name: Add ccache to PATH @@ -67,7 +62,12 @@ jobs: save: ${{ github.event_name == 'push' }} max-size: "200M" - name: Configure CPython - run: "${OPTIONS}" + run: >- + ./configure + --config-cache + --with-thread-sanitizer + --with-pydebug + ${{ fromJSON(inputs.free-threading) && '--disable-gil' || '' }} - name: Build CPython run: make -j4 - name: Display build info @@ -81,6 +81,11 @@ jobs: if: always() uses: actions/upload-artifact@v4 with: - name: ${{ inputs.tsan_logs_artifact_name }} + name: >- + tsan-logs-${{ + fromJSON(inputs.free-threading) + && 'free-threading' + || 'default' + }} path: tsan_log.* if-no-files-found: ignore diff --git a/.github/workflows/reusable-windows.yml b/.github/workflows/reusable-windows.yml index bfee3d2722cb44..5485a0169130b0 100644 --- a/.github/workflows/reusable-windows.yml +++ b/.github/workflows/reusable-windows.yml @@ -24,7 +24,7 @@ env: jobs: build: - name: 'build and test (${{ inputs.arch }})' + name: ${{ inputs.arch == 'arm64' && 'build' || 'build and test' }} (${{ inputs.arch }}) runs-on: ${{ inputs.os }} timeout-minutes: 60 env: diff --git a/Doc/c-api/bytearray.rst b/Doc/c-api/bytearray.rst index 9045689a6be567..15295096a710c8 100644 --- a/Doc/c-api/bytearray.rst +++ b/Doc/c-api/bytearray.rst @@ -74,6 +74,11 @@ Direct API functions .. c:function:: int PyByteArray_Resize(PyObject *bytearray, Py_ssize_t len) Resize the internal buffer of *bytearray* to *len*. + Failure is a ``-1`` return with an exception set. + + .. versionchanged:: next + A negative *len* will now result in an exception being set and -1 returned. + Macros ^^^^^^ diff --git a/Doc/c-api/import.rst b/Doc/c-api/import.rst index 6e48644c8fef8b..1cab3ce3061ec9 100644 --- a/Doc/c-api/import.rst +++ b/Doc/c-api/import.rst @@ -325,3 +325,24 @@ Importing Modules If Python is initialized multiple times, :c:func:`PyImport_AppendInittab` or :c:func:`PyImport_ExtendInittab` must be called before each Python initialization. + + +.. c:function:: PyObject* PyImport_ImportModuleAttr(PyObject *mod_name, PyObject *attr_name) + + Import the module *mod_name* and get its attribute *attr_name*. + + Names must be Python :class:`str` objects. + + Helper function combining :c:func:`PyImport_Import` and + :c:func:`PyObject_GetAttr`. For example, it can raise :exc:`ImportError` if + the module is not found, and :exc:`AttributeError` if the attribute doesn't + exist. + + .. versionadded:: 3.14 + +.. c:function:: PyObject* PyImport_ImportModuleAttrString(const char *mod_name, const char *attr_name) + + Similar to :c:func:`PyImport_ImportModuleAttr`, but names are UTF-8 encoded + strings instead of Python :class:`str` objects. + + .. versionadded:: 3.14 diff --git a/Doc/c-api/init.rst b/Doc/c-api/init.rst index dc44f3eaf87765..f90af6a9ce7c26 100644 --- a/Doc/c-api/init.rst +++ b/Doc/c-api/init.rst @@ -622,7 +622,8 @@ Process-wide parameters It now returns ``NULL`` if called before :c:func:`Py_Initialize`. .. deprecated-removed:: 3.13 3.15 - Get :data:`sys.executable` instead. + Use :c:func:`PyConfig_Get("executable") ` + (:data:`sys.executable`) instead. .. c:function:: wchar_t* Py_GetPrefix() @@ -644,8 +645,10 @@ Process-wide parameters It now returns ``NULL`` if called before :c:func:`Py_Initialize`. .. deprecated-removed:: 3.13 3.15 - Get :data:`sys.base_prefix` instead, or :data:`sys.prefix` if - :ref:`virtual environments ` need to be handled. + Use :c:func:`PyConfig_Get("base_prefix") ` + (:data:`sys.base_prefix`) instead. Use :c:func:`PyConfig_Get("prefix") + ` (:data:`sys.prefix`) if :ref:`virtual environments + ` need to be handled. .. c:function:: wchar_t* Py_GetExecPrefix() @@ -690,9 +693,11 @@ Process-wide parameters It now returns ``NULL`` if called before :c:func:`Py_Initialize`. .. deprecated-removed:: 3.13 3.15 - Get :data:`sys.base_exec_prefix` instead, or :data:`sys.exec_prefix` if - :ref:`virtual environments ` need to be handled. - + Use :c:func:`PyConfig_Get("base_exec_prefix") ` + (:data:`sys.base_exec_prefix`) instead. Use + :c:func:`PyConfig_Get("exec_prefix") ` + (:data:`sys.exec_prefix`) if :ref:`virtual environments ` need + to be handled. .. c:function:: wchar_t* Py_GetProgramFullPath() @@ -712,7 +717,8 @@ Process-wide parameters It now returns ``NULL`` if called before :c:func:`Py_Initialize`. .. deprecated-removed:: 3.13 3.15 - Get :data:`sys.executable` instead. + Use :c:func:`PyConfig_Get("executable") ` + (:data:`sys.executable`) instead. .. c:function:: wchar_t* Py_GetPath() @@ -740,8 +746,8 @@ Process-wide parameters It now returns ``NULL`` if called before :c:func:`Py_Initialize`. .. deprecated-removed:: 3.13 3.15 - Get :data:`sys.path` instead. - + Use :c:func:`PyConfig_Get("module_search_paths") ` + (:data:`sys.path`) instead. .. c:function:: const char* Py_GetVersion() @@ -926,8 +932,8 @@ Process-wide parameters It now returns ``NULL`` if called before :c:func:`Py_Initialize`. .. deprecated-removed:: 3.13 3.15 - Get :c:member:`PyConfig.home` or :envvar:`PYTHONHOME` environment - variable instead. + Use :c:func:`PyConfig_Get("home") ` or the + :envvar:`PYTHONHOME` environment variable instead. .. _threads: @@ -1495,7 +1501,7 @@ All of the following functions must be called after :c:func:`Py_Initialize`. .. c:function:: PyObject* PyUnstable_InterpreterState_GetMainModule(PyInterpreterState *interp) - Return a :term:`strong reference` to the ``__main__`` `module object `_ + Return a :term:`strong reference` to the ``__main__`` :ref:`module object ` for the given interpreter. The caller must hold the GIL. diff --git a/Doc/c-api/module.rst b/Doc/c-api/module.rst index f82a050ab75de0..f71089370152ce 100644 --- a/Doc/c-api/module.rst +++ b/Doc/c-api/module.rst @@ -523,9 +523,6 @@ state: On success, return ``0``. On error, raise an exception and return ``-1``. - Return ``-1`` if *value* is ``NULL``. It must be called with an exception - raised in this case. - Example usage:: static int @@ -540,6 +537,10 @@ state: return res; } + To be convenient, the function accepts ``NULL`` *value* with an exception + set. In this case, return ``-1`` and just leave the raised exception + unchanged. + The example can also be written without checking explicitly if *obj* is ``NULL``:: diff --git a/Doc/c-api/object.rst b/Doc/c-api/object.rst index 934b2ef06d3108..1ba5942c63601d 100644 --- a/Doc/c-api/object.rst +++ b/Doc/c-api/object.rst @@ -624,3 +624,84 @@ Object Protocol be immortal in another. .. versionadded:: next + +.. c:function:: int PyUnstable_TryIncRef(PyObject *obj) + + Increments the reference count of *obj* if it is not zero. Returns ``1`` + if the object's reference count was successfully incremented. Otherwise, + this function returns ``0``. + + :c:func:`PyUnstable_EnableTryIncRef` must have been called + earlier on *obj* or this function may spuriously return ``0`` in the + :term:`free threading` build. + + This function is logically equivalent to the following C code, except that + it behaves atomically in the :term:`free threading` build:: + + if (Py_REFCNT(op) > 0) { + Py_INCREF(op); + return 1; + } + return 0; + + This is intended as a building block for managing weak references + without the overhead of a Python :ref:`weak reference object `. + + Typically, correct use of this function requires support from *obj*'s + deallocator (:c:member:`~PyTypeObject.tp_dealloc`). + For example, the following sketch could be adapted to implement a + "weakmap" that works like a :py:class:`~weakref.WeakValueDictionary` + for a specific type: + + .. code-block:: c + + PyMutex mutex; + + PyObject * + add_entry(weakmap_key_type *key, PyObject *value) + { + PyUnstable_EnableTryIncRef(value); + weakmap_type weakmap = ...; + PyMutex_Lock(&mutex); + weakmap_add_entry(weakmap, key, value); + PyMutex_Unlock(&mutex); + Py_RETURN_NONE; + } + + PyObject * + get_value(weakmap_key_type *key) + { + weakmap_type weakmap = ...; + PyMutex_Lock(&mutex); + PyObject *result = weakmap_find(weakmap, key); + if (PyUnstable_TryIncRef(result)) { + // `result` is safe to use + PyMutex_Unlock(&mutex); + return result; + } + // if we get here, `result` is starting to be garbage-collected, + // but has not been removed from the weakmap yet + PyMutex_Unlock(&mutex); + return NULL; + } + + // tp_dealloc function for weakmap values + void + value_dealloc(PyObject *value) + { + weakmap_type weakmap = ...; + PyMutex_Lock(&mutex); + weakmap_remove_value(weakmap, value); + + ... + PyMutex_Unlock(&mutex); + } + + .. versionadded:: 3.14 + +.. c:function:: void PyUnstable_EnableTryIncRef(PyObject *obj) + + Enables subsequent uses of :c:func:`PyUnstable_TryIncRef` on *obj*. The + caller must hold a :term:`strong reference` to *obj* when calling this. + + .. versionadded:: 3.14 diff --git a/Doc/conf.py b/Doc/conf.py index 1aeecaeb3073f5..a4e0c628649018 100644 --- a/Doc/conf.py +++ b/Doc/conf.py @@ -27,7 +27,9 @@ 'c_annotations', 'changes', 'glossary_search', + 'grammar_snippet', 'lexers', + 'misc_news', 'pydoc_topics', 'pyspecific', 'sphinx.ext.coverage', diff --git a/Doc/data/refcounts.dat b/Doc/data/refcounts.dat index e78754e24e23d8..d709d2d91b0eb0 100644 --- a/Doc/data/refcounts.dat +++ b/Doc/data/refcounts.dat @@ -3052,3 +3052,11 @@ _Py_c_quot:Py_complex:divisor:: _Py_c_sum:Py_complex::: _Py_c_sum:Py_complex:left:: _Py_c_sum:Py_complex:right:: + +PyImport_ImportModuleAttr:PyObject*::+1: +PyImport_ImportModuleAttr:PyObject*:mod_name:0: +PyImport_ImportModuleAttr:PyObject*:attr_name:0: + +PyImport_ImportModuleAttrString:PyObject*::+1: +PyImport_ImportModuleAttrString:const char *:mod_name:: +PyImport_ImportModuleAttrString:const char *:attr_name:: diff --git a/Doc/data/stable_abi.dat b/Doc/data/stable_abi.dat index 0de0e9a2b67dfb..035f0dcc5e2678 100644 --- a/Doc/data/stable_abi.dat +++ b/Doc/data/stable_abi.dat @@ -582,7 +582,6 @@ func,PySequence_Contains,3.2,, func,PySequence_Count,3.2,, func,PySequence_DelItem,3.2,, func,PySequence_DelSlice,3.2,, -func,PySequence_Fast,3.2,, func,PySequence_GetItem,3.2,, func,PySequence_GetSlice,3.2,, func,PySequence_In,3.2,, diff --git a/Doc/deprecations/c-api-pending-removal-in-3.15.rst b/Doc/deprecations/c-api-pending-removal-in-3.15.rst index ac31b3cc8cd451..666a1622dd0b29 100644 --- a/Doc/deprecations/c-api-pending-removal-in-3.15.rst +++ b/Doc/deprecations/c-api-pending-removal-in-3.15.rst @@ -10,25 +10,35 @@ Pending removal in Python 3.15 :c:func:`PyWeakref_GetRef` on Python 3.12 and older. * :c:type:`Py_UNICODE` type and the :c:macro:`!Py_UNICODE_WIDE` macro: Use :c:type:`wchar_t` instead. -* Python initialization functions: +* Python initialization functions, deprecated in Python 3.13: - * :c:func:`PySys_ResetWarnOptions`: - Clear :data:`sys.warnoptions` and :data:`!warnings.filters` instead. - * :c:func:`Py_GetExecPrefix`: - Get :data:`sys.base_exec_prefix` and :data:`sys.exec_prefix` instead. * :c:func:`Py_GetPath`: - Get :data:`sys.path` instead. + Use :c:func:`PyConfig_Get("module_search_paths") ` + (:data:`sys.path`) instead. * :c:func:`Py_GetPrefix`: - Get :data:`sys.base_prefix` and :data:`sys.prefix` instead. + Use :c:func:`PyConfig_Get("base_prefix") ` + (:data:`sys.base_prefix`) instead. Use :c:func:`PyConfig_Get("prefix") + ` (:data:`sys.prefix`) if :ref:`virtual environments + ` need to be handled. + * :c:func:`Py_GetExecPrefix`: + Use :c:func:`PyConfig_Get("base_exec_prefix") ` + (:data:`sys.base_exec_prefix`) instead. Use + :c:func:`PyConfig_Get("exec_prefix") ` + (:data:`sys.exec_prefix`) if :ref:`virtual environments ` need to + be handled. * :c:func:`Py_GetProgramFullPath`: - Get :data:`sys.executable` instead. + Use :c:func:`PyConfig_Get("executable") ` + (:data:`sys.executable`) instead. * :c:func:`Py_GetProgramName`: - Get :data:`sys.executable` instead. + Use :c:func:`PyConfig_Get("executable") ` + (:data:`sys.executable`) instead. * :c:func:`Py_GetPythonHome`: - Get :c:func:`PyConfig_Get("home") ` - or the :envvar:`PYTHONHOME` environment variable instead. + Use :c:func:`PyConfig_Get("home") ` or the + :envvar:`PYTHONHOME` environment variable instead. - See also the :c:func:`PyConfig_Get` function. + The `pythoncapi-compat project + `__ can be used to get + :c:func:`PyConfig_Get` on Python 3.13 and older. * Functions to configure Python's initialization, deprecated in Python 3.11: @@ -40,6 +50,8 @@ Pending removal in Python 3.15 Set :c:member:`PyConfig.program_name` instead. * :c:func:`!Py_SetPythonHome()`: Set :c:member:`PyConfig.home` instead. + * :c:func:`PySys_ResetWarnOptions`: + Clear :data:`sys.warnoptions` and :data:`!warnings.filters` instead. The :c:func:`Py_InitializeFromConfig` API should be used with :c:type:`PyConfig` instead. diff --git a/Doc/glossary.rst b/Doc/glossary.rst index e3a14601398e89..d933ca6b467cf3 100644 --- a/Doc/glossary.rst +++ b/Doc/glossary.rst @@ -658,6 +658,9 @@ Glossary and therefore it is never deallocated while the interpreter is running. For example, :const:`True` and :const:`None` are immortal in CPython. + Immortal objects can be identified via :func:`sys._is_immortal`, or + via :c:func:`PyUnstable_IsImmortal` in the C API. + immutable An object with a fixed value. Immutable objects include numbers, strings and tuples. Such an object cannot be altered. A new object has to diff --git a/Doc/howto/free-threading-python.rst b/Doc/howto/free-threading-python.rst index b21e3287ecaa3f..cd920553a3a461 100644 --- a/Doc/howto/free-threading-python.rst +++ b/Doc/howto/free-threading-python.rst @@ -43,7 +43,7 @@ Identifying free-threaded Python ================================ To check if the current interpreter supports free-threading, :option:`python -VV <-V>` -and :attr:`sys.version` contain "experimental free-threading build". +and :data:`sys.version` contain "experimental free-threading build". The new :func:`sys._is_gil_enabled` function can be used to check whether the GIL is actually disabled in the running process. diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst index 2f81080d525f86..1af7d6be750102 100644 --- a/Doc/library/datetime.rst +++ b/Doc/library/datetime.rst @@ -93,7 +93,7 @@ The :mod:`!datetime` module exports the following constants: The largest year number allowed in a :class:`date` or :class:`.datetime` object. :const:`MAXYEAR` is 9999. -.. attribute:: UTC +.. data:: UTC Alias for the UTC time zone singleton :attr:`datetime.timezone.utc`. @@ -970,7 +970,7 @@ Other constructors, all class methods: .. deprecated:: 3.12 - Use :meth:`datetime.now` with :attr:`UTC` instead. + Use :meth:`datetime.now` with :const:`UTC` instead. .. classmethod:: datetime.fromtimestamp(timestamp, tz=None) @@ -1042,7 +1042,7 @@ Other constructors, all class methods: .. deprecated:: 3.12 - Use :meth:`datetime.fromtimestamp` with :attr:`UTC` instead. + Use :meth:`datetime.fromtimestamp` with :const:`UTC` instead. .. classmethod:: datetime.fromordinal(ordinal) diff --git a/Doc/library/decimal.rst b/Doc/library/decimal.rst index 185eaf3f721c72..9318af60b60f95 100644 --- a/Doc/library/decimal.rst +++ b/Doc/library/decimal.rst @@ -2262,7 +2262,7 @@ value for :attr:`~Context.prec` as well [#]_:: Decimal('904625697166532776746648320380374280103671755200316906558262375061821325312') -For inexact results, :attr:`MAX_PREC` is far too large on 64-bit platforms and +For inexact results, :const:`MAX_PREC` is far too large on 64-bit platforms and the available memory will be insufficient:: >>> Decimal(1) / 3 diff --git a/Doc/library/exceptions.rst b/Doc/library/exceptions.rst index f72b11e34c5c3d..319d261ef3fb4d 100644 --- a/Doc/library/exceptions.rst +++ b/Doc/library/exceptions.rst @@ -562,9 +562,13 @@ The following exceptions are the exceptions that are usually raised. Raised when the interpreter finds an internal error, but the situation does not look so serious to cause it to abandon all hope. The associated value is a - string indicating what went wrong (in low-level terms). + string indicating what went wrong (in low-level terms). In :term:`CPython`, + this could be raised by incorrectly using Python's C API, such as returning + a ``NULL`` value without an exception set. - You should report this to the author or maintainer of your Python interpreter. + If you're confident that this exception wasn't your fault, or the fault of + a package you're using, you should report this to the author or maintainer + of your Python interpreter. Be sure to report the version of the Python interpreter (``sys.version``; it is also printed at the start of an interactive Python session), the exact error message (the exception's associated value) and if possible the source of the diff --git a/Doc/library/filecmp.rst b/Doc/library/filecmp.rst index 282d0e0d8db5cf..abd1b8c826d170 100644 --- a/Doc/library/filecmp.rst +++ b/Doc/library/filecmp.rst @@ -189,7 +189,7 @@ The :class:`dircmp` class are the same type as *self*, if *self* is a subclass of :class:`dircmp`. -.. attribute:: DEFAULT_IGNORES +.. data:: DEFAULT_IGNORES .. versionadded:: 3.4 diff --git a/Doc/library/importlib.metadata.rst b/Doc/library/importlib.metadata.rst index d80255f5313061..45bf19ec29286d 100644 --- a/Doc/library/importlib.metadata.rst +++ b/Doc/library/importlib.metadata.rst @@ -375,7 +375,7 @@ Mapping import to distribution packages .. function:: packages_distributions() Return a mapping from the top level module and import package - names found via :attr:`sys.meta_path` to the names of the distribution + names found via :data:`sys.meta_path` to the names of the distribution packages (if any) that provide the corresponding files. To allow for namespace packages (which may have members provided by diff --git a/Doc/library/importlib.rst b/Doc/library/importlib.rst index b935fc0e42a4bd..245807e55ec936 100644 --- a/Doc/library/importlib.rst +++ b/Doc/library/importlib.rst @@ -746,7 +746,7 @@ ABC hierarchy:: suitable for reading (same as :attr:`pathlib.Path.open`). When opening as text, accepts encoding parameters such as those - accepted by :attr:`io.TextIOWrapper`. + accepted by :class:`io.TextIOWrapper`. .. method:: read_bytes() @@ -794,14 +794,14 @@ ABC hierarchy:: This module contains the various objects that help :keyword:`import` find and load modules. -.. attribute:: SOURCE_SUFFIXES +.. data:: SOURCE_SUFFIXES A list of strings representing the recognized file suffixes for source modules. .. versionadded:: 3.3 -.. attribute:: DEBUG_BYTECODE_SUFFIXES +.. data:: DEBUG_BYTECODE_SUFFIXES A list of strings representing the file suffixes for non-optimized bytecode modules. @@ -809,9 +809,9 @@ find and load modules. .. versionadded:: 3.3 .. deprecated:: 3.5 - Use :attr:`BYTECODE_SUFFIXES` instead. + Use :const:`BYTECODE_SUFFIXES` instead. -.. attribute:: OPTIMIZED_BYTECODE_SUFFIXES +.. data:: OPTIMIZED_BYTECODE_SUFFIXES A list of strings representing the file suffixes for optimized bytecode modules. @@ -819,9 +819,9 @@ find and load modules. .. versionadded:: 3.3 .. deprecated:: 3.5 - Use :attr:`BYTECODE_SUFFIXES` instead. + Use :const:`BYTECODE_SUFFIXES` instead. -.. attribute:: BYTECODE_SUFFIXES +.. data:: BYTECODE_SUFFIXES A list of strings representing the recognized file suffixes for bytecode modules (including the leading dot). @@ -831,7 +831,7 @@ find and load modules. .. versionchanged:: 3.5 The value is no longer dependent on ``__debug__``. -.. attribute:: EXTENSION_SUFFIXES +.. data:: EXTENSION_SUFFIXES A list of strings representing the recognized file suffixes for extension modules. @@ -1109,7 +1109,7 @@ find and load modules. .. method:: is_package(fullname) Returns ``True`` if the file path points to a package's ``__init__`` - module based on :attr:`EXTENSION_SUFFIXES`. + module based on :const:`EXTENSION_SUFFIXES`. .. method:: get_code(fullname) @@ -1294,7 +1294,7 @@ find and load modules. This module contains the various objects that help in the construction of an :term:`importer`. -.. attribute:: MAGIC_NUMBER +.. data:: MAGIC_NUMBER The bytes which represent the bytecode version number. If you need help with loading/writing bytecode then consider :class:`importlib.abc.SourceLoader`. diff --git a/Doc/library/logging.handlers.rst b/Doc/library/logging.handlers.rst index 5a081f9e7add99..ffb54591b3563b 100644 --- a/Doc/library/logging.handlers.rst +++ b/Doc/library/logging.handlers.rst @@ -613,7 +613,7 @@ The :class:`SysLogHandler` class, located in the :mod:`logging.handlers` module, supports sending logging messages to a remote or local Unix syslog. -.. class:: SysLogHandler(address=('localhost', SYSLOG_UDP_PORT), facility=LOG_USER, socktype=socket.SOCK_DGRAM) +.. class:: SysLogHandler(address=('localhost', SYSLOG_UDP_PORT), facility=LOG_USER, socktype=socket.SOCK_DGRAM, timeout=None) Returns a new instance of the :class:`SysLogHandler` class intended to communicate with a remote Unix machine whose address is given by *address* in @@ -626,6 +626,11 @@ supports sending logging messages to a remote or local Unix syslog. *socktype* argument, which defaults to :const:`socket.SOCK_DGRAM` and thus opens a UDP socket. To open a TCP socket (for use with the newer syslog daemons such as rsyslog), specify a value of :const:`socket.SOCK_STREAM`. + If *timeout* is specified, it sets a timeout (in seconds) for the socket operations. + This can help prevent the program from hanging indefinitely if the syslog server is + unreachable. By default, *timeout* is ``None``, meaning no timeout is applied. + + Note that if your server is not listening on UDP port 514, :class:`SysLogHandler` may appear not to work. In that case, check what @@ -645,6 +650,8 @@ supports sending logging messages to a remote or local Unix syslog. .. versionchanged:: 3.2 *socktype* was added. + .. versionchanged:: 3.14 + *timeout* was added. .. method:: close() diff --git a/Doc/library/logging.rst b/Doc/library/logging.rst index 235bcc281ac8f8..19ae024f9eeffa 100644 --- a/Doc/library/logging.rst +++ b/Doc/library/logging.rst @@ -342,7 +342,7 @@ in a module, ``__name__`` is the module's name in the Python package namespace. If no handler is attached to this logger (or any of its ancestors, taking into account the relevant :attr:`Logger.propagate` attributes), - the message will be sent to the handler set on :attr:`lastResort`. + the message will be sent to the handler set on :data:`lastResort`. .. versionchanged:: 3.2 The *stack_info* parameter was added. @@ -1495,7 +1495,7 @@ functions. Module-Level Attributes ----------------------- -.. attribute:: lastResort +.. data:: lastResort A "handler of last resort" is available through this attribute. This is a :class:`StreamHandler` writing to ``sys.stderr`` with a level of @@ -1507,7 +1507,7 @@ Module-Level Attributes .. versionadded:: 3.2 -.. attribute:: raiseExceptions +.. data:: raiseExceptions Used to see if exceptions during handling should be propagated. diff --git a/Doc/library/mimetypes.rst b/Doc/library/mimetypes.rst index 8ad4850584a7e1..514e773359a9aa 100644 --- a/Doc/library/mimetypes.rst +++ b/Doc/library/mimetypes.rst @@ -47,9 +47,11 @@ the information :func:`init` sets up. The optional *strict* argument is a flag specifying whether the list of known MIME types is limited to only the official types `registered with IANA `_. - When *strict* is ``True`` (the default), only the IANA types are supported; when - *strict* is ``False``, some additional non-standard but commonly used MIME types - are also recognized. + However, the behavior of this module also depends on the underlying operating + system. Only file types recognized by the OS or explicitly registered with + Python's internal database can be identified. When *strict* is ``True`` (the + default), only the IANA types are supported; when *strict* is ``False``, some + additional non-standard but commonly used MIME types are also recognized. .. versionchanged:: 3.8 Added support for *url* being a :term:`path-like object`. diff --git a/Doc/library/plistlib.rst b/Doc/library/plistlib.rst index 2906ebe7822f52..075b974501e3da 100644 --- a/Doc/library/plistlib.rst +++ b/Doc/library/plistlib.rst @@ -71,7 +71,7 @@ This module defines the following functions: When *aware_datetime* is true, fields with type ``datetime.datetime`` will be created as :ref:`aware object `, with - :attr:`!tzinfo` as :attr:`datetime.UTC`. + :attr:`!tzinfo` as :const:`datetime.UTC`. XML data for the :data:`FMT_XML` format is parsed using the Expat parser from :mod:`xml.parsers.expat` -- see its documentation for possible diff --git a/Doc/library/select.rst b/Doc/library/select.rst index 457970aed2dc73..d2094283d54736 100644 --- a/Doc/library/select.rst +++ b/Doc/library/select.rst @@ -165,7 +165,7 @@ The module defines the following: :exc:`InterruptedError`. -.. attribute:: PIPE_BUF +.. data:: PIPE_BUF The minimum number of bytes which can be written without blocking to a pipe when the pipe has been reported as ready for writing by :func:`~select.select`, diff --git a/Doc/library/shutil.rst b/Doc/library/shutil.rst index 2a8592f8bd69c1..06800c4588b663 100644 --- a/Doc/library/shutil.rst +++ b/Doc/library/shutil.rst @@ -512,7 +512,9 @@ the use of userspace buffers in Python as in "``outfd.write(infd.read())``". On macOS `fcopyfile`_ is used to copy the file content (not metadata). -On Linux and Solaris :func:`os.sendfile` is used. +On Linux :func:`os.copy_file_range` or :func:`os.sendfile` is used. + +On Solaris :func:`os.sendfile` is used. On Windows :func:`shutil.copyfile` uses a bigger default buffer size (1 MiB instead of 64 KiB) and a :func:`memoryview`-based variant of @@ -527,6 +529,10 @@ file then shutil will silently fallback on using less efficient .. versionchanged:: 3.14 Solaris now uses :func:`os.sendfile`. +.. versionchanged:: next + Copy-on-write or server-side copy may be used internally via + :func:`os.copy_file_range` on supported Linux filesystems. + .. _shutil-copytree-example: copytree example diff --git a/Doc/library/site.rst b/Doc/library/site.rst index 5f2a0f610e1aa5..e98dd83b60eb60 100644 --- a/Doc/library/site.rst +++ b/Doc/library/site.rst @@ -35,7 +35,7 @@ are skipped. For the tail part, it uses the empty string and then :file:`lib/site-packages` (on Windows) or :file:`lib/python{X.Y[t]}/site-packages` (on Unix and macOS). (The optional suffix "t" indicates the :term:`free threading` build, and is -appended if ``"t"`` is present in the :attr:`sys.abiflags` constant.) +appended if ``"t"`` is present in the :data:`sys.abiflags` constant.) For each of the distinct head-tail combinations, it sees if it refers to an existing directory, and if so, adds it to ``sys.path`` and also inspects the newly diff --git a/Doc/library/ssl.rst b/Doc/library/ssl.rst index 37ea32dc8a56e5..c0dcecf737ef76 100644 --- a/Doc/library/ssl.rst +++ b/Doc/library/ssl.rst @@ -1937,8 +1937,8 @@ to speed up repeated connections from the same clients. A :class:`TLSVersion` enum member representing the highest supported TLS version. The value defaults to :attr:`TLSVersion.MAXIMUM_SUPPORTED`. - The attribute is read-only for protocols other than :attr:`PROTOCOL_TLS`, - :attr:`PROTOCOL_TLS_CLIENT`, and :attr:`PROTOCOL_TLS_SERVER`. + The attribute is read-only for protocols other than :const:`PROTOCOL_TLS`, + :const:`PROTOCOL_TLS_CLIENT`, and :const:`PROTOCOL_TLS_SERVER`. The attributes :attr:`~SSLContext.maximum_version`, :attr:`~SSLContext.minimum_version` and @@ -1961,7 +1961,7 @@ to speed up repeated connections from the same clients. .. attribute:: SSLContext.num_tickets Control the number of TLS 1.3 session tickets of a - :attr:`PROTOCOL_TLS_SERVER` context. The setting has no impact on TLS + :const:`PROTOCOL_TLS_SERVER` context. The setting has no impact on TLS 1.0 to 1.2 connections. .. versionadded:: 3.8 diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst index 6050784264707b..4a15e27f82a160 100644 --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -2841,6 +2841,38 @@ objects. optional *sep* and *bytes_per_sep* parameters to insert separators between bytes in the hex output. + .. method:: resize(size) + + Resize the :class:`bytearray` to contain *size* bytes. *size* must be + greater than or equal to 0. + + If the :class:`bytearray` needs to shrink, bytes beyond *size* are truncated. + + If the :class:`bytearray` needs to grow, all new bytes, those beyond *size*, + will be set to null bytes. + + + This is equivalent to: + + >>> def resize(ba, size): + ... if len(ba) > size: + ... del ba[size:] + ... else: + ... ba += b'\0' * (size - len(ba)) + + Examples: + + >>> shrink = bytearray(b'abc') + >>> shrink.resize(1) + >>> (shrink, len(shrink)) + (bytearray(b'a'), 1) + >>> grow = bytearray(b'abc') + >>> grow.resize(5) + >>> (grow, len(grow)) + (bytearray(b'abc\x00\x00'), 5) + + .. versionadded:: next + Since bytearray objects are sequences of integers (akin to a list), for a bytearray object *b*, ``b[0]`` will be an integer, while ``b[0:1]`` will be a bytearray object of length 1. (This contrasts with text strings, where diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst index 151fd60532048a..855237e0984972 100644 --- a/Doc/library/sys.rst +++ b/Doc/library/sys.rst @@ -855,6 +855,11 @@ always available. Unless explicitly noted otherwise, all variables are read-only reflect the actual number of references. Consequently, do not rely on the returned value to be accurate, other than a value of 0 or 1. + .. impl-detail:: + + :term:`Immortal ` objects with a large reference count can be + identified via :func:`_is_immortal`. + .. versionchanged:: 3.12 Immortal objects have very large refcounts that do not match the actual number of references to the object. @@ -1264,6 +1269,24 @@ always available. Unless explicitly noted otherwise, all variables are read-only .. versionadded:: 3.12 +.. function:: _is_immortal(op) + + Return :const:`True` if the given object is :term:`immortal`, :const:`False` + otherwise. + + .. note:: + + Objects that are immortal (and thus return ``True`` upon being passed + to this function) are not guaranteed to be immortal in future versions, + and vice versa for mortal objects. + + .. versionadded:: next + + .. impl-detail:: + + This function should be used for specialized purposes only. + It is not guaranteed to exist in all implementations of Python. + .. function:: _is_interned(string) Return :const:`True` if the given string is "interned", :const:`False` @@ -1422,6 +1445,7 @@ always available. Unless explicitly noted otherwise, all variables are read-only AIX ``'aix'`` Android ``'android'`` Emscripten ``'emscripten'`` + FreeBSD ``'freebsd'`` iOS ``'ios'`` Linux ``'linux'`` macOS ``'darwin'`` @@ -1432,12 +1456,12 @@ always available. Unless explicitly noted otherwise, all variables are read-only On Unix systems not listed in the table, the value is the lowercased OS name as returned by ``uname -s``, with the first part of the version as returned by - ``uname -r`` appended, e.g. ``'sunos5'`` or ``'freebsd8'``, *at the time - when Python was built*. Unless you want to test for a specific system - version, it is therefore recommended to use the following idiom:: + ``uname -r`` appended, e.g. ``'sunos5'``, *at the time when Python was built*. + Unless you want to test for a specific system version, it is therefore + recommended to use the following idiom:: - if sys.platform.startswith('freebsd'): - # FreeBSD-specific code here... + if sys.platform.startswith('sunos'): + # SunOS-specific code here... .. versionchanged:: 3.3 On Linux, :data:`sys.platform` doesn't contain the major version anymore. @@ -1451,6 +1475,10 @@ always available. Unless explicitly noted otherwise, all variables are read-only On Android, :data:`sys.platform` now returns ``'android'`` rather than ``'linux'``. + .. versionchanged:: 3.14 + On FreeBSD, :data:`sys.platform` doesn't contain the major version anymore. + It is always ``'freebsd'``, instead of ``'freebsd13'`` or ``'freebsd14'``. + .. seealso:: :data:`os.name` has a coarser granularity. :func:`os.uname` gives diff --git a/Doc/library/test.rst b/Doc/library/test.rst index b5b6e442e218fd..def22f8bb8ab2d 100644 --- a/Doc/library/test.rst +++ b/Doc/library/test.rst @@ -792,6 +792,11 @@ The :mod:`test.support` module defines the following functions: Decorator for invoking :func:`check_impl_detail` on *guards*. If that returns ``False``, then uses *msg* as the reason for skipping the test. +.. decorator:: thread_unsafe(reason=None) + + Decorator for marking tests as thread-unsafe. This test always runs in one + thread even when invoked with ``--parallel-threads``. + .. decorator:: no_tracing diff --git a/Doc/license.rst b/Doc/license.rst index 61a26ab2867498..90783e3e31a69d 100644 --- a/Doc/license.rst +++ b/Doc/license.rst @@ -374,7 +374,7 @@ Project, https://www.wide.ad.jp/. :: may be used to endorse or promote products derived from this software without specific prior written permission. - THIS SOFTWARE IS PROVIDED BY THE PROJECT AND CONTRIBUTORS ``AS IS'' AND + THIS SOFTWARE IS PROVIDED BY THE PROJECT AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE PROJECT OR CONTRIBUTORS BE LIABLE @@ -583,7 +583,7 @@ interface:: notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND + THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE @@ -884,7 +884,7 @@ sources unless the build is configured ``--with-system-libffi``:: Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the - ``Software''), to deal in the Software without restriction, including + "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to @@ -893,7 +893,7 @@ sources unless the build is configured ``--with-system-libffi``:: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - THE SOFTWARE IS PROVIDED ``AS IS'', WITHOUT WARRANTY OF ANY KIND, + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT @@ -1122,7 +1122,7 @@ The file is distributed under the 2-Clause BSD License:: notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR + THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, diff --git a/Doc/make.bat b/Doc/make.bat index ede793ed3c6d70..99f0d5c44f0098 100644 --- a/Doc/make.bat +++ b/Doc/make.bat @@ -127,16 +127,14 @@ goto end :build if not exist "%BUILDDIR%" mkdir "%BUILDDIR%" -rem PY_MISC_NEWS_DIR is also used by our Sphinx extension in tools/extensions/pyspecific.py -if not defined PY_MISC_NEWS_DIR set PY_MISC_NEWS_DIR=%BUILDDIR%\%1 -if not exist "%PY_MISC_NEWS_DIR%" mkdir "%PY_MISC_NEWS_DIR%" +if not exist build mkdir build if exist ..\Misc\NEWS ( - echo.Copying Misc\NEWS to %PY_MISC_NEWS_DIR%\NEWS - copy ..\Misc\NEWS "%PY_MISC_NEWS_DIR%\NEWS" > nul + echo.Copying existing Misc\NEWS file to Doc\build\NEWS + copy ..\Misc\NEWS build\NEWS > nul ) else if exist ..\Misc\NEWS.D ( if defined BLURB ( echo.Merging Misc/NEWS with %BLURB% - %BLURB% merge -f "%PY_MISC_NEWS_DIR%\NEWS" + %BLURB% merge -f build\NEWS ) else ( echo.No Misc/NEWS file and Blurb is not available. exit /B 1 diff --git a/Doc/reference/toplevel_components.rst b/Doc/reference/toplevel_components.rst index dd3d3d6878e289..f155fafbe4d738 100644 --- a/Doc/reference/toplevel_components.rst +++ b/Doc/reference/toplevel_components.rst @@ -66,7 +66,9 @@ File input All input read from non-interactive files has the same form: -.. productionlist:: python-grammar +.. grammar-snippet:: + :group: python-grammar + file_input: (NEWLINE | `statement`)* This syntax is used in the following situations: @@ -85,7 +87,9 @@ Interactive input Input in interactive mode is parsed using the following grammar: -.. productionlist:: python-grammar +.. grammar-snippet:: + :group: python-grammar + interactive_input: [`stmt_list`] NEWLINE | `compound_stmt` NEWLINE Note that a (top-level) compound statement must be followed by a blank line in diff --git a/Doc/tools/extensions/availability.py b/Doc/tools/extensions/availability.py index 47833fdcb87590..1a2c7b02b44439 100644 --- a/Doc/tools/extensions/availability.py +++ b/Doc/tools/extensions/availability.py @@ -6,6 +6,7 @@ from docutils import nodes from sphinx import addnodes +from sphinx.locale import _ as sphinx_gettext from sphinx.util import logging from sphinx.util.docutils import SphinxDirective @@ -55,7 +56,7 @@ class Availability(SphinxDirective): final_argument_whitespace = True def run(self) -> list[nodes.container]: - title = "Availability" + title = sphinx_gettext("Availability") refnode = addnodes.pending_xref( title, nodes.inline(title, title, classes=["xref", "std", "std-ref"]), diff --git a/Doc/tools/extensions/grammar_snippet.py b/Doc/tools/extensions/grammar_snippet.py new file mode 100644 index 00000000000000..03c7e7ce2f4228 --- /dev/null +++ b/Doc/tools/extensions/grammar_snippet.py @@ -0,0 +1,219 @@ +"""Support for documenting Python's grammar.""" + +from __future__ import annotations + +import re +from typing import TYPE_CHECKING + +from docutils import nodes +from docutils.parsers.rst import directives +from sphinx import addnodes +from sphinx.domains.std import token_xrefs +from sphinx.util.docutils import SphinxDirective +from sphinx.util.nodes import make_id + +if TYPE_CHECKING: + from collections.abc import Sequence + from typing import Any + + from docutils.nodes import Node + from sphinx.application import Sphinx + from sphinx.util.typing import ExtensionMetadata + + +class snippet_string_node(nodes.inline): # noqa: N801 (snake_case is fine) + """Node for a string literal in a grammar snippet.""" + + def __init__( + self, + rawsource: str = '', + text: str = '', + *children: Node, + **attributes: Any, + ) -> None: + super().__init__(rawsource, text, *children, **attributes) + # Use the Pygments highlight class for `Literal.String.Other` + self['classes'].append('sx') + + +class GrammarSnippetBase(SphinxDirective): + """Common functionality for GrammarSnippetDirective & CompatProductionList.""" + + # The option/argument handling is left to the individual classes. + + def make_grammar_snippet( + self, options: dict[str, Any], content: Sequence[str] + ) -> list[nodes.paragraph]: + """Create a literal block from options & content.""" + + group_name = options['group'] + + # Docutils elements have a `rawsource` attribute that is supposed to be + # set to the original ReST source. + # Sphinx does the following with it: + # - if it's empty, set it to `self.astext()` + # - if it matches `self.astext()` when generating the output, + # apply syntax highlighting (which is based on the plain-text content + # and thus discards internal formatting, like references). + # To get around this, we set it to this non-empty string: + rawsource = 'You should not see this.' + + literal = nodes.literal_block( + rawsource, + '', + classes=['highlight'], + ) + + grammar_re = re.compile( + r""" + (?P^[a-zA-Z0-9_]+) # identifier at start of line + (?=:) # ... followed by a colon + | + (?P`[^\s`]+`) # identifier in backquotes + | + (?P'[^']*') # string in 'quotes' + | + (?P"[^"]*") # string in "quotes" + """, + re.VERBOSE, + ) + + for line in content: + last_pos = 0 + for match in grammar_re.finditer(line): + # Handle text between matches + if match.start() > last_pos: + literal += nodes.Text(line[last_pos : match.start()]) + last_pos = match.end() + + # Handle matches + group_dict = { + name: content + for name, content in match.groupdict().items() + if content is not None + } + match group_dict: + case {'rule_name': name}: + literal += self.make_link_target_for_token( + group_name, name + ) + case {'rule_ref': ref_text}: + literal += token_xrefs(ref_text, group_name) + case {'single_quoted': name} | {'double_quoted': name}: + literal += snippet_string_node('', name) + case _: + raise ValueError('unhandled match') + literal += nodes.Text(line[last_pos:] + '\n') + + node = nodes.paragraph( + '', + '', + literal, + ) + + return [node] + + def make_link_target_for_token( + self, group_name: str, name: str + ) -> addnodes.literal_strong: + """Return a literal node which is a link target for the given token.""" + name_node = addnodes.literal_strong() + + # Cargo-culted magic to make `name_node` a link target + # similar to Sphinx `production`. + # This needs to be the same as what Sphinx does + # to avoid breaking existing links. + domain = self.env.domains['std'] + obj_name = f"{group_name}:{name}" + prefix = f'grammar-token-{group_name}' + node_id = make_id(self.env, self.state.document, prefix, name) + name_node['ids'].append(node_id) + self.state.document.note_implicit_target(name_node, name_node) + domain.note_object('token', obj_name, node_id, location=name_node) + + text_node = nodes.Text(name) + name_node += text_node + return name_node + + +class GrammarSnippetDirective(GrammarSnippetBase): + """Transform a grammar-snippet directive to a Sphinx literal_block + + That is, turn something like: + + .. grammar-snippet:: file + :group: python-grammar + + file: (NEWLINE | statement)* + + into something similar to Sphinx productionlist, but better suited + for our needs: + - Instead of `::=`, use a colon, as in `Grammar/python.gram` + - Show the listing almost as is, with no auto-aligment. + The only special character is the backtick, which marks tokens. + + Unlike Sphinx's productionlist, this directive supports options. + The "group" must be given as a named option. + The content must be preceded by a blank line (like with most ReST + directives). + """ + + has_content = True + option_spec = { + 'group': directives.unchanged_required, + } + + # We currently ignore arguments. + required_arguments = 0 + optional_arguments = 1 + final_argument_whitespace = True + + def run(self) -> list[nodes.paragraph]: + return self.make_grammar_snippet(self.options, self.content) + + +class CompatProductionList(GrammarSnippetBase): + """Create grammar snippets from reST productionlist syntax + + This is intended to be a transitional directive, used while we switch + from productionlist to grammar-snippet. + It makes existing docs that use the ReST syntax look like grammar-snippet, + as much as possible. + """ + + has_content = False + required_arguments = 1 + optional_arguments = 0 + final_argument_whitespace = True + option_spec = {} + + def run(self) -> list[nodes.paragraph]: + # The "content" of a productionlist is actually the first and only + # argument. The first line is the group; the rest is the content lines. + lines = self.arguments[0].splitlines() + group = lines[0].strip() + options = {'group': group} + # We assume there's a colon in each line; align on it. + align_column = max(line.index(':') for line in lines[1:]) + 1 + content = [] + for line in lines[1:]: + rule_name, _colon, text = line.partition(':') + rule_name = rule_name.strip() + if rule_name: + name_part = rule_name + ':' + else: + name_part = '' + content.append(f'{name_part:<{align_column}}{text}') + return self.make_grammar_snippet(options, content) + + +def setup(app: Sphinx) -> ExtensionMetadata: + app.add_directive('grammar-snippet', GrammarSnippetDirective) + app.add_directive_to_domain( + 'std', 'productionlist', CompatProductionList, override=True + ) + return { + 'version': '1.0', + 'parallel_read_safe': True, + 'parallel_write_safe': True, + } diff --git a/Doc/tools/extensions/misc_news.py b/Doc/tools/extensions/misc_news.py new file mode 100644 index 00000000000000..a24c440595ee92 --- /dev/null +++ b/Doc/tools/extensions/misc_news.py @@ -0,0 +1,75 @@ +"""Support for including Misc/NEWS.""" + +from __future__ import annotations + +import re +from pathlib import Path +from typing import TYPE_CHECKING + +from docutils import nodes +from sphinx.locale import _ as sphinx_gettext +from sphinx.util.docutils import SphinxDirective + +if TYPE_CHECKING: + from typing import Final + + from docutils.nodes import Node + from sphinx.application import Sphinx + from sphinx.util.typing import ExtensionMetadata + + +BLURB_HEADER = """\ ++++++++++++ +Python News ++++++++++++ +""" + +bpo_issue_re: Final[re.Pattern[str]] = re.compile( + "(?:issue #|bpo-)([0-9]+)", re.ASCII +) +gh_issue_re: Final[re.Pattern[str]] = re.compile( + "gh-(?:issue-)?([0-9]+)", re.ASCII | re.IGNORECASE +) +whatsnew_re: Final[re.Pattern[str]] = re.compile( + r"^what's new in (.*?)\??$", re.ASCII | re.IGNORECASE | re.MULTILINE +) + + +class MiscNews(SphinxDirective): + has_content = False + required_arguments = 1 + optional_arguments = 0 + final_argument_whitespace = False + option_spec = {} + + def run(self) -> list[Node]: + # Get content of NEWS file + source, _ = self.get_source_info() + news_file = Path(source).resolve().parent / self.arguments[0] + self.env.note_dependency(news_file) + try: + news_text = news_file.read_text(encoding="utf-8") + except (OSError, UnicodeError): + text = sphinx_gettext("The NEWS file is not available.") + return [nodes.strong(text, text)] + + # remove first 3 lines as they are the main heading + news_text = news_text.removeprefix(BLURB_HEADER) + + news_text = bpo_issue_re.sub(r":issue:`\1`", news_text) + # Fallback handling for GitHub issues + news_text = gh_issue_re.sub(r":gh:`\1`", news_text) + news_text = whatsnew_re.sub(r"\1", news_text) + + self.state_machine.insert_input(news_text.splitlines(), str(news_file)) + return [] + + +def setup(app: Sphinx) -> ExtensionMetadata: + app.add_directive("miscnews", MiscNews) + + return { + "version": "1.0", + "parallel_read_safe": True, + "parallel_write_safe": True, + } diff --git a/Doc/tools/extensions/pyspecific.py b/Doc/tools/extensions/pyspecific.py index f363dfd4216929..57cf80a7e77324 100644 --- a/Doc/tools/extensions/pyspecific.py +++ b/Doc/tools/extensions/pyspecific.py @@ -141,46 +141,6 @@ def run(self): return PyMethod.run(self) -# Support for including Misc/NEWS - -issue_re = re.compile('(?:[Ii]ssue #|bpo-)([0-9]+)', re.I) -gh_issue_re = re.compile('(?:gh-issue-|gh-)([0-9]+)', re.I) -whatsnew_re = re.compile(r"(?im)^what's new in (.*?)\??$") - - -class MiscNews(SphinxDirective): - has_content = False - required_arguments = 1 - optional_arguments = 0 - final_argument_whitespace = False - option_spec = {} - - def run(self): - fname = self.arguments[0] - source = self.state_machine.input_lines.source( - self.lineno - self.state_machine.input_offset - 1) - source_dir = getenv('PY_MISC_NEWS_DIR') - if not source_dir: - source_dir = path.dirname(path.abspath(source)) - fpath = path.join(source_dir, fname) - self.env.note_dependency(path.abspath(fpath)) - try: - with io.open(fpath, encoding='utf-8') as fp: - content = fp.read() - except Exception: - text = 'The NEWS file is not available.' - node = nodes.strong(text, text) - return [node] - content = issue_re.sub(r':issue:`\1`', content) - # Fallback handling for the GitHub issue - content = gh_issue_re.sub(r':gh:`\1`', content) - content = whatsnew_re.sub(r'\1', content) - # remove first 3 lines as they are the main heading - lines = ['.. default-role:: obj', ''] + content.splitlines()[3:] - self.state_machine.insert_input(lines, fname) - return [] - - # Support for documenting Opcodes opcode_sig_re = re.compile(r'(\w+(?:\+\d)?)(?:\s*\((.*)\))?') @@ -268,6 +228,5 @@ def setup(app): app.add_directive_to_domain('py', 'awaitablefunction', PyAwaitableFunction) app.add_directive_to_domain('py', 'awaitablemethod', PyAwaitableMethod) app.add_directive_to_domain('py', 'abstractmethod', PyAbstractMethod) - app.add_directive('miscnews', MiscNews) app.connect('env-check-consistency', patch_pairindextypes) return {'version': '1.0', 'parallel_read_safe': True} diff --git a/Doc/tools/templates/dummy.html b/Doc/tools/templates/dummy.html index 49c2a71a5e40cf..4f0f6f91436a87 100644 --- a/Doc/tools/templates/dummy.html +++ b/Doc/tools/templates/dummy.html @@ -7,6 +7,10 @@ {% trans %}Deprecated since version {deprecated}, will be removed in version {removed}{% endtrans %} {% trans %}Deprecated since version {deprecated}, removed in version {removed}{% endtrans %} +In extensions/availability.py: + +{% trans %}Availability{% endtrans %} + In extensions/c_annotations.py: {% trans %}Part of the{% endtrans %} diff --git a/Doc/using/configure.rst b/Doc/using/configure.rst index b58532acc4b322..629859e36cb654 100644 --- a/Doc/using/configure.rst +++ b/Doc/using/configure.rst @@ -311,6 +311,10 @@ General Options By convention, ``--enable-experimental-jit`` is a shorthand for ``--enable-experimental-jit=yes``. + .. note:: + + When building CPython with JIT enabled, ensure that your system has Python 3.11 or later installed. + .. versionadded:: 3.13 .. option:: PKG_CONFIG diff --git a/Doc/whatsnew/2.3.rst b/Doc/whatsnew/2.3.rst index ac463f82cfb8ca..b7e4e73f4ce4aa 100644 --- a/Doc/whatsnew/2.3.rst +++ b/Doc/whatsnew/2.3.rst @@ -353,7 +353,7 @@ convert them to Unicode using the ``mbcs`` encoding. Other systems also allow Unicode strings as file names but convert them to byte strings before passing them to the system, which can cause a :exc:`UnicodeError` to be raised. Applications can test whether arbitrary Unicode strings are -supported as file names by checking :attr:`os.path.supports_unicode_filenames`, +supported as file names by checking :const:`os.path.supports_unicode_filenames`, a Boolean value. Under MacOS, :func:`os.listdir` may now return Unicode filenames. diff --git a/Doc/whatsnew/2.7.rst b/Doc/whatsnew/2.7.rst index 0e4dee0bd24fb2..caed3192be871d 100644 --- a/Doc/whatsnew/2.7.rst +++ b/Doc/whatsnew/2.7.rst @@ -1602,7 +1602,7 @@ changes, or look through the Subversion logs for all the details. identifier instead of the previous default value of ``'python'``. (Changed by Sean Reifschneider; :issue:`8451`.) -* The :attr:`sys.version_info` value is now a named tuple, with attributes +* The :data:`sys.version_info` value is now a named tuple, with attributes named :attr:`!major`, :attr:`!minor`, :attr:`!micro`, :attr:`!releaselevel`, and :attr:`!serial`. (Contributed by Ross Light; :issue:`4285`.) diff --git a/Doc/whatsnew/3.12.rst b/Doc/whatsnew/3.12.rst index 4fffc78a237791..994ccc708e26f2 100644 --- a/Doc/whatsnew/3.12.rst +++ b/Doc/whatsnew/3.12.rst @@ -1467,8 +1467,8 @@ imp ``imp.NullImporter`` Insert ``None`` into ``sys.path_importer_cache`` ``imp.cache_from_source()`` :func:`importlib.util.cache_from_source` ``imp.find_module()`` :func:`importlib.util.find_spec` - ``imp.get_magic()`` :attr:`importlib.util.MAGIC_NUMBER` - ``imp.get_suffixes()`` :attr:`importlib.machinery.SOURCE_SUFFIXES`, :attr:`importlib.machinery.EXTENSION_SUFFIXES`, and :attr:`importlib.machinery.BYTECODE_SUFFIXES` + ``imp.get_magic()`` :const:`importlib.util.MAGIC_NUMBER` + ``imp.get_suffixes()`` :const:`importlib.machinery.SOURCE_SUFFIXES`, :const:`importlib.machinery.EXTENSION_SUFFIXES`, and :const:`importlib.machinery.BYTECODE_SUFFIXES` ``imp.get_tag()`` :attr:`sys.implementation.cache_tag ` ``imp.load_module()`` :func:`importlib.import_module` ``imp.new_module(name)`` ``types.ModuleType(name)`` diff --git a/Doc/whatsnew/3.13.rst b/Doc/whatsnew/3.13.rst index f2a8c711f4df88..2090759d3c4fde 100644 --- a/Doc/whatsnew/3.13.rst +++ b/Doc/whatsnew/3.13.rst @@ -325,7 +325,7 @@ enabled at runtime using the environment variable :envvar:`PYTHON_GIL` or the command-line option :option:`-X gil=1`. To check if the current interpreter supports free-threading, :option:`python -VV <-V>` -and :attr:`sys.version` contain "experimental free-threading build". +and :data:`sys.version` contain "experimental free-threading build". The new :func:`!sys._is_gil_enabled` function can be used to check whether the GIL is actually disabled in the running process. @@ -1062,7 +1062,7 @@ os which makes the newly spawned process use the current process environment. (Contributed by Jakub Kulik in :gh:`113119`.) -* :func:`~os.posix_spawn` can now use the :attr:`~os.POSIX_SPAWN_CLOSEFROM` +* :func:`~os.posix_spawn` can now use the :const:`~os.POSIX_SPAWN_CLOSEFROM` attribute in the *file_actions* parameter on platforms that support :c:func:`!posix_spawn_file_actions_addclosefrom_np`. (Contributed by Jakub Kulik in :gh:`113117`.) diff --git a/Doc/whatsnew/3.14.rst b/Doc/whatsnew/3.14.rst index d1df869a5c813b..8e4ef244538f22 100644 --- a/Doc/whatsnew/3.14.rst +++ b/Doc/whatsnew/3.14.rst @@ -649,6 +649,13 @@ sys which only exists in specialized builds of Python, may now return objects from other interpreters than the one it's called in. +* Add :func:`sys._is_immortal` for determining if an object is :term:`immortal`. + (Contributed by Peter Bierma in :gh:`128509`.) + +* On FreeBSD, :data:`sys.platform` doesn't contain the major version anymore. + It is always ``'freebsd'``, instead of ``'freebsd13'`` or ``'freebsd14'``. + + sys.monitoring -------------- @@ -1327,6 +1334,11 @@ New features * Add :c:func:`PyUnstable_IsImmortal` for determining whether an object is :term:`immortal`, for debugging purposes. +* Add :c:func:`PyImport_ImportModuleAttr` and + :c:func:`PyImport_ImportModuleAttrString` helper functions to import a module + and get an attribute of the module. + (Contributed by Victor Stinner in :gh:`128911`.) + Limited C API changes --------------------- @@ -1336,6 +1348,11 @@ Limited C API changes implementation details. (Contributed by Victor Stinner in :gh:`120600` and :gh:`124127`.) +* Remove :c:func:`PySequence_Fast` from the limited C API, since this function + has to be used with :c:macro:`PySequence_Fast_GET_ITEM` which never worked + in the limited C API. + (Contributed by Victor Stinner in :gh:`91417`.) + Porting to Python 3.14 ---------------------- diff --git a/Doc/whatsnew/3.4.rst b/Doc/whatsnew/3.4.rst index 71b186aeed7359..e4f602a17ee968 100644 --- a/Doc/whatsnew/3.4.rst +++ b/Doc/whatsnew/3.4.rst @@ -994,7 +994,7 @@ The :func:`~importlib.reload` function has been moved from :mod:`!imp` to :mod:`importlib` as part of the :mod:`!imp` module deprecation. (Contributed by Berker Peksag in :issue:`18193`.) -:mod:`importlib.util` now has a :data:`~importlib.util.MAGIC_NUMBER` attribute +:mod:`importlib.util` now has a :const:`~importlib.util.MAGIC_NUMBER` attribute providing access to the bytecode version number. This replaces the :func:`!get_magic` function in the deprecated :mod:`!imp` module. (Contributed by Brett Cannon in :issue:`18192`.) diff --git a/Doc/whatsnew/changelog.rst b/Doc/whatsnew/changelog.rst index b4356143659031..e796d4157cec76 100644 --- a/Doc/whatsnew/changelog.rst +++ b/Doc/whatsnew/changelog.rst @@ -1,5 +1,7 @@ .. _changelog: +.. default-role:: py:obj + +++++++++ Changelog +++++++++ diff --git a/Include/abstract.h b/Include/abstract.h index 7cfee1332ccaa4..4efe4fcb014903 100644 --- a/Include/abstract.h +++ b/Include/abstract.h @@ -726,31 +726,6 @@ PyAPI_FUNC(PyObject *) PySequence_Tuple(PyObject *o); This is equivalent to the Python expression: list(o) */ PyAPI_FUNC(PyObject *) PySequence_List(PyObject *o); -/* Return the sequence 'o' as a list, unless it's already a tuple or list. - - Use PySequence_Fast_GET_ITEM to access the members of this list, and - PySequence_Fast_GET_SIZE to get its length. - - Returns NULL on failure. If the object does not support iteration, raises a - TypeError exception with 'm' as the message text. */ -PyAPI_FUNC(PyObject *) PySequence_Fast(PyObject *o, const char* m); - -/* Return the size of the sequence 'o', assuming that 'o' was returned by - PySequence_Fast and is not NULL. */ -#define PySequence_Fast_GET_SIZE(o) \ - (PyList_Check(o) ? PyList_GET_SIZE(o) : PyTuple_GET_SIZE(o)) - -/* Return the 'i'-th element of the sequence 'o', assuming that o was returned - by PySequence_Fast, and that i is within bounds. */ -#define PySequence_Fast_GET_ITEM(o, i)\ - (PyList_Check(o) ? PyList_GET_ITEM((o), (i)) : PyTuple_GET_ITEM((o), (i))) - -/* Return a pointer to the underlying item array for - an object returned by PySequence_Fast */ -#define PySequence_Fast_ITEMS(sf) \ - (PyList_Check(sf) ? ((PyListObject *)(sf))->ob_item \ - : ((PyTupleObject *)(sf))->ob_item) - /* Return the number of occurrences on value on 'o', that is, return the number of keys for which o[key] == value. diff --git a/Include/cpython/abstract.h b/Include/cpython/abstract.h index 4e7b7a46703a6d..8fed1d3110988b 100644 --- a/Include/cpython/abstract.h +++ b/Include/cpython/abstract.h @@ -85,3 +85,29 @@ PyAPI_FUNC(Py_ssize_t) PyObject_LengthHint(PyObject *o, Py_ssize_t); need to be corrected for a negative index. */ #define PySequence_ITEM(o, i)\ ( Py_TYPE(o)->tp_as_sequence->sq_item((o), (i)) ) + +/* Return the sequence 'o' as a list, unless it's already a tuple or list. + + Use PySequence_Fast_GET_ITEM to access the members of this list, and + PySequence_Fast_GET_SIZE to get its length. + + Returns NULL on failure. If the object does not support iteration, raises a + TypeError exception with 'm' as the message text. */ +PyAPI_FUNC(PyObject *) PySequence_Fast(PyObject *o, const char* m); + +/* Return the size of the sequence 'o', assuming that 'o' was returned by + PySequence_Fast and is not NULL. */ +#define PySequence_Fast_GET_SIZE(o) \ + (PyList_Check(o) ? PyList_GET_SIZE(o) : PyTuple_GET_SIZE(o)) + +/* Return the 'i'-th element of the sequence 'o', assuming that o was returned + by PySequence_Fast, and that i is within bounds. */ +#define PySequence_Fast_GET_ITEM(o, i)\ + (PyList_Check(o) ? PyList_GET_ITEM((o), (i)) : PyTuple_GET_ITEM((o), (i))) + +/* Return a pointer to the underlying item array for + an object returned by PySequence_Fast */ +#define PySequence_Fast_ITEMS(sf) \ + (PyList_Check(sf) ? ((PyListObject *)(sf))->ob_item \ + : ((PyTupleObject *)(sf))->ob_item) + diff --git a/Include/cpython/import.h b/Include/cpython/import.h index 0fd61c28cafa0e..0ce0b1ee6cce2a 100644 --- a/Include/cpython/import.h +++ b/Include/cpython/import.h @@ -21,3 +21,10 @@ struct _frozen { collection of frozen modules: */ PyAPI_DATA(const struct _frozen *) PyImport_FrozenModules; + +PyAPI_FUNC(PyObject*) PyImport_ImportModuleAttr( + PyObject *mod_name, + PyObject *attr_name); +PyAPI_FUNC(PyObject*) PyImport_ImportModuleAttrString( + const char *mod_name, + const char *attr_name); diff --git a/Include/cpython/longintrepr.h b/Include/cpython/longintrepr.h index 4dd82600d562ee..4b6f97a5e475d6 100644 --- a/Include/cpython/longintrepr.h +++ b/Include/cpython/longintrepr.h @@ -76,8 +76,8 @@ typedef long stwodigits; /* signed variant of twodigits */ - 1: Zero - 2: Negative - The third lowest bit of lv_tag is reserved for an immortality flag, but is - not currently used. + The third lowest bit of lv_tag is + set to 1 for the small ints. In a normalized number, ob_digit[ndigits-1] (the most significant digit) is never zero. Also, in all cases, for all valid i, diff --git a/Include/cpython/object.h b/Include/cpython/object.h index 4c9e4f6c6e0434..71bd01884426ad 100644 --- a/Include/cpython/object.h +++ b/Include/cpython/object.h @@ -544,3 +544,9 @@ PyAPI_FUNC(int) PyUnstable_Object_EnableDeferredRefcount(PyObject *); /* Check whether the object is immortal. This cannot fail. */ PyAPI_FUNC(int) PyUnstable_IsImmortal(PyObject *); + +// Increments the reference count of the object, if it's not zero. +// PyUnstable_EnableTryIncRef() should be called on the object +// before calling this function in order to avoid spurious failures. +PyAPI_FUNC(int) PyUnstable_TryIncRef(PyObject *); +PyAPI_FUNC(void) PyUnstable_EnableTryIncRef(PyObject *); diff --git a/Include/cpython/pyatomic.h b/Include/cpython/pyatomic.h index 6d106c1b499c69..2a0c11e7b3ad66 100644 --- a/Include/cpython/pyatomic.h +++ b/Include/cpython/pyatomic.h @@ -574,15 +574,15 @@ static inline void _Py_atomic_fence_release(void); #if _Py_USE_GCC_BUILTIN_ATOMICS # define Py_ATOMIC_GCC_H -# include "cpython/pyatomic_gcc.h" +# include "pyatomic_gcc.h" # undef Py_ATOMIC_GCC_H #elif __STDC_VERSION__ >= 201112L && !defined(__STDC_NO_ATOMICS__) # define Py_ATOMIC_STD_H -# include "cpython/pyatomic_std.h" +# include "pyatomic_std.h" # undef Py_ATOMIC_STD_H #elif defined(_MSC_VER) # define Py_ATOMIC_MSC_H -# include "cpython/pyatomic_msc.h" +# include "pyatomic_msc.h" # undef Py_ATOMIC_MSC_H #else # error "no available pyatomic implementation for this platform/compiler" diff --git a/Include/cpython/pystats.h b/Include/cpython/pystats.h index ee8885cda7b60d..f52348e42b1330 100644 --- a/Include/cpython/pystats.h +++ b/Include/cpython/pystats.h @@ -141,6 +141,14 @@ typedef struct _optimization_stats { uint64_t remove_globals_builtins_changed; uint64_t remove_globals_incorrect_keys; uint64_t error_in_opcode[PYSTATS_MAX_UOP_ID + 1]; + // JIT memory stats + uint64_t jit_total_memory_size; + uint64_t jit_code_size; + uint64_t jit_trampoline_size; + uint64_t jit_data_size; + uint64_t jit_padding_size; + uint64_t jit_freed_memory_size; + uint64_t trace_total_memory_hist[_Py_UOP_HIST_SIZE]; } OptimizationStats; typedef struct _rare_event_stats { diff --git a/Include/cpython/pythread.h b/Include/cpython/pythread.h index 03f710a9f7ef2e..e658b35bd90700 100644 --- a/Include/cpython/pythread.h +++ b/Include/cpython/pythread.h @@ -22,7 +22,7 @@ PyAPI_DATA(const long long) PY_TIMEOUT_MAX; */ # define NATIVE_TSS_KEY_T unsigned long #elif defined(HAVE_PTHREAD_STUBS) -# include "cpython/pthread_stubs.h" +# include "pthread_stubs.h" # define NATIVE_TSS_KEY_T pthread_key_t #else # error "Require native threads. See https://bugs.python.org/issue31370" diff --git a/Include/cpython/unicodeobject.h b/Include/cpython/unicodeobject.h index 287de52b96202c..cea69dd1280999 100644 --- a/Include/cpython/unicodeobject.h +++ b/Include/cpython/unicodeobject.h @@ -240,6 +240,8 @@ enum PyUnicode_Kind { PyUnicode_4BYTE_KIND = 4 }; +PyAPI_FUNC(int) PyUnicode_KIND(PyObject *op); + // PyUnicode_KIND(): Return one of the PyUnicode_*_KIND values defined above. // // gh-89653: Converting this macro to a static inline function would introduce @@ -264,13 +266,15 @@ static inline void* _PyUnicode_NONCOMPACT_DATA(PyObject *op) { return data; } -static inline void* PyUnicode_DATA(PyObject *op) { +PyAPI_FUNC(void*) PyUnicode_DATA(PyObject *op); + +static inline void* _PyUnicode_DATA(PyObject *op) { if (PyUnicode_IS_COMPACT(op)) { return _PyUnicode_COMPACT_DATA(op); } return _PyUnicode_NONCOMPACT_DATA(op); } -#define PyUnicode_DATA(op) PyUnicode_DATA(_PyObject_CAST(op)) +#define PyUnicode_DATA(op) _PyUnicode_DATA(_PyObject_CAST(op)) /* Return pointers to the canonical representation cast to unsigned char, Py_UCS2, or Py_UCS4 for direct character access. diff --git a/Include/internal/pycore_code.h b/Include/internal/pycore_code.h index 01d41446fdb0cf..65c3d142458577 100644 --- a/Include/internal/pycore_code.h +++ b/Include/internal/pycore_code.h @@ -373,6 +373,7 @@ extern void _Py_Specialize_ContainsOp(_PyStackRef value, _Py_CODEUNIT *instr); do { if (_Py_stats && PyFunction_Check(callable)) _Py_stats->call_stats.eval_calls[name]++; } while (0) #define GC_STAT_ADD(gen, name, n) do { if (_Py_stats) _Py_stats->gc_stats[(gen)].name += (n); } while (0) #define OPT_STAT_INC(name) do { if (_Py_stats) _Py_stats->optimization_stats.name++; } while (0) +#define OPT_STAT_ADD(name, n) do { if (_Py_stats) _Py_stats->optimization_stats.name += (n); } while (0) #define UOP_STAT_INC(opname, name) do { if (_Py_stats) { assert(opname < 512); _Py_stats->optimization_stats.opcode[opname].name++; } } while (0) #define UOP_PAIR_INC(uopcode, lastuop) \ do { \ @@ -408,6 +409,7 @@ PyAPI_FUNC(PyObject*) _Py_GetSpecializationStats(void); #define EVAL_CALL_STAT_INC_IF_FUNCTION(name, callable) ((void)0) #define GC_STAT_ADD(gen, name, n) ((void)0) #define OPT_STAT_INC(name) ((void)0) +#define OPT_STAT_ADD(name, n) ((void)0) #define UOP_STAT_INC(opname, name) ((void)0) #define UOP_PAIR_INC(uopcode, lastuop) ((void)0) #define OPT_UNSUPPORTED_OPCODE(opname) ((void)0) diff --git a/Include/internal/pycore_frame.h b/Include/internal/pycore_frame.h index 155a6f3ce054a2..8cc3504723b64c 100644 --- a/Include/internal/pycore_frame.h +++ b/Include/internal/pycore_frame.h @@ -159,13 +159,6 @@ static inline void _PyFrame_Copy(_PyInterpreterFrame *src, _PyInterpreterFrame * // Don't leave a dangling pointer to the old frame when creating generators // and coroutines: dest->previous = NULL; - -#ifdef Py_GIL_DISABLED - PyCodeObject *co = _PyFrame_GetCode(dest); - for (int i = stacktop; i < co->co_nlocalsplus + co->co_stacksize; i++) { - dest->localsplus[i] = PyStackRef_NULL; - } -#endif } #ifdef Py_GIL_DISABLED @@ -215,20 +208,13 @@ _PyFrame_Initialize( frame->return_offset = 0; frame->owner = FRAME_OWNED_BY_THREAD; frame->visited = 0; +#ifdef Py_DEBUG + frame->lltrace = 0; +#endif for (int i = null_locals_from; i < code->co_nlocalsplus; i++) { frame->localsplus[i] = PyStackRef_NULL; } - -#ifdef Py_GIL_DISABLED - // On GIL disabled, we walk the entire stack in GC. Since stacktop - // is not always in sync with the real stack pointer, we have - // no choice but to traverse the entire stack. - // This just makes sure we don't pass the GC invalid stack values. - for (int i = code->co_nlocalsplus; i < code->co_nlocalsplus + code->co_stacksize; i++) { - frame->localsplus[i] = PyStackRef_NULL; - } -#endif } /* Gets the pointer to the locals array @@ -398,14 +384,10 @@ _PyFrame_PushTrampolineUnchecked(PyThreadState *tstate, PyCodeObject *code, int #endif frame->owner = FRAME_OWNED_BY_THREAD; frame->visited = 0; - frame->return_offset = 0; - -#ifdef Py_GIL_DISABLED - assert(code->co_nlocalsplus == 0); - for (int i = 0; i < code->co_stacksize; i++) { - frame->localsplus[i] = PyStackRef_NULL; - } +#ifdef Py_DEBUG + frame->lltrace = 0; #endif + frame->return_offset = 0; return frame; } diff --git a/Include/internal/pycore_freelist_state.h b/Include/internal/pycore_freelist_state.h index 2ccd1ac055b747..7c252f5b570c13 100644 --- a/Include/internal/pycore_freelist_state.h +++ b/Include/internal/pycore_freelist_state.h @@ -11,6 +11,8 @@ extern "C" { # define PyTuple_MAXSAVESIZE 20 // Largest tuple to save on freelist # define Py_tuple_MAXFREELIST 2000 // Maximum number of tuples of each size to save # define Py_lists_MAXFREELIST 80 +# define Py_list_iters_MAXFREELIST 10 +# define Py_tuple_iters_MAXFREELIST 10 # define Py_dicts_MAXFREELIST 80 # define Py_dictkeys_MAXFREELIST 80 # define Py_floats_MAXFREELIST 100 @@ -40,6 +42,8 @@ struct _Py_freelists { struct _Py_freelist ints; struct _Py_freelist tuples[PyTuple_MAXSAVESIZE]; struct _Py_freelist lists; + struct _Py_freelist list_iters; + struct _Py_freelist tuple_iters; struct _Py_freelist dicts; struct _Py_freelist dictkeys; struct _Py_freelist slices; diff --git a/Include/internal/pycore_import.h b/Include/internal/pycore_import.h index 318c712bdfa174..5fe60df0a92fbc 100644 --- a/Include/internal/pycore_import.h +++ b/Include/internal/pycore_import.h @@ -31,12 +31,6 @@ extern int _PyImport_FixupBuiltin( PyObject *modules ); -// Export for many shared extensions, like '_json' -PyAPI_FUNC(PyObject*) _PyImport_GetModuleAttr(PyObject *, PyObject *); - -// Export for many shared extensions, like '_datetime' -PyAPI_FUNC(PyObject*) _PyImport_GetModuleAttrString(const char *, const char *); - struct _import_runtime_state { /* The builtin modules (defined in config.c). */ diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index f745b09796753b..6f00eca8de05af 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -31,7 +31,7 @@ extern "C" { #include "pycore_list.h" // struct _Py_list_state #include "pycore_mimalloc.h" // struct _mimalloc_interp_state #include "pycore_object_state.h" // struct _py_object_state -#include "pycore_optimizer.h" // _PyOptimizerObject +#include "pycore_optimizer.h" // _PyExecutorObject #include "pycore_obmalloc.h" // struct _obmalloc_state #include "pycore_qsbr.h" // struct _qsbr_state #include "pycore_stackref.h" // Py_STACKREF_DEBUG @@ -262,7 +262,7 @@ struct _is { struct ast_state ast; struct types_state types; struct callable_cache callable_cache; - _PyOptimizerObject *optimizer; + bool jit; _PyExecutorObject *executor_list_head; size_t trace_run_counter; _rare_events rare_events; diff --git a/Include/internal/pycore_long.h b/Include/internal/pycore_long.h index 8bead00e70640c..df0656a7cb8f0c 100644 --- a/Include/internal/pycore_long.h +++ b/Include/internal/pycore_long.h @@ -65,6 +65,8 @@ PyAPI_FUNC(void) _PyLong_ExactDealloc(PyObject *self); # error "_PY_NSMALLPOSINTS must be greater than or equal to 257" #endif +#define _PY_IS_SMALL_INT(val) ((val) >= 0 && (val) < 256 && (val) < _PY_NSMALLPOSINTS) + // Return a reference to the immortal zero singleton. // The function cannot return NULL. static inline PyObject* _PyLong_GetZero(void) @@ -159,13 +161,14 @@ PyAPI_FUNC(int) _PyLong_Size_t_Converter(PyObject *, void *); /* Long value tag bits: * 0-1: Sign bits value = (1-sign), ie. negative=2, positive=0, zero=1. - * 2: Reserved for immortality bit + * 2: Set to 1 for the small ints * 3+ Unsigned digit count */ #define SIGN_MASK 3 #define SIGN_ZERO 1 #define SIGN_NEGATIVE 2 #define NON_SIZE_BITS 3 +#define IMMORTALITY_BIT_MASK (1 << 2) /* The functions _PyLong_IsCompact and _PyLong_CompactValue are defined * in Include/cpython/longobject.h, since they need to be inline. @@ -196,7 +199,7 @@ PyAPI_FUNC(int) _PyLong_Size_t_Converter(PyObject *, void *); static inline int _PyLong_IsNonNegativeCompact(const PyLongObject* op) { assert(PyLong_Check(op)); - return op->long_value.lv_tag <= (1 << NON_SIZE_BITS); + return ((op->long_value.lv_tag & ~IMMORTALITY_BIT_MASK) <= (1 << NON_SIZE_BITS)); } @@ -298,7 +301,7 @@ _PyLong_FlipSign(PyLongObject *op) { .long_value = { \ .lv_tag = TAG_FROM_SIGN_AND_SIZE( \ (val) == 0 ? 0 : ((val) < 0 ? -1 : 1), \ - (val) == 0 ? 0 : 1), \ + (val) == 0 ? 0 : 1) | IMMORTALITY_BIT_MASK, \ { ((val) >= 0 ? (val) : -(val)) }, \ } \ } diff --git a/Include/internal/pycore_opcode_metadata.h b/Include/internal/pycore_opcode_metadata.h index bad5e515a99565..beb0baa7bb69a6 100644 --- a/Include/internal/pycore_opcode_metadata.h +++ b/Include/internal/pycore_opcode_metadata.h @@ -226,9 +226,9 @@ int _PyOpcode_num_popped(int opcode, int oparg) { case INSTRUMENTED_CALL: return 2 + oparg; case INSTRUMENTED_CALL_FUNCTION_EX: - return 0; + return 4; case INSTRUMENTED_CALL_KW: - return 0; + return 3 + oparg; case INSTRUMENTED_END_FOR: return 2; case INSTRUMENTED_END_SEND: @@ -244,7 +244,7 @@ int _PyOpcode_num_popped(int opcode, int oparg) { case INSTRUMENTED_LINE: return 0; case INSTRUMENTED_LOAD_SUPER_ATTR: - return 0; + return 3; case INSTRUMENTED_NOT_TAKEN: return 0; case INSTRUMENTED_POP_ITER: @@ -271,8 +271,12 @@ int _PyOpcode_num_popped(int opcode, int oparg) { return 0; case JUMP_BACKWARD: return 0; + case JUMP_BACKWARD_JIT: + return 0; case JUMP_BACKWARD_NO_INTERRUPT: return 0; + case JUMP_BACKWARD_NO_JIT: + return 0; case JUMP_FORWARD: return 0; case JUMP_IF_FALSE: @@ -697,9 +701,9 @@ int _PyOpcode_num_pushed(int opcode, int oparg) { case INSTRUMENTED_CALL: return 1; case INSTRUMENTED_CALL_FUNCTION_EX: - return 0; + return 1; case INSTRUMENTED_CALL_KW: - return 0; + return 1; case INSTRUMENTED_END_FOR: return 1; case INSTRUMENTED_END_SEND: @@ -715,7 +719,7 @@ int _PyOpcode_num_pushed(int opcode, int oparg) { case INSTRUMENTED_LINE: return 0; case INSTRUMENTED_LOAD_SUPER_ATTR: - return 0; + return 1 + (oparg & 1); case INSTRUMENTED_NOT_TAKEN: return 0; case INSTRUMENTED_POP_ITER: @@ -742,8 +746,12 @@ int _PyOpcode_num_pushed(int opcode, int oparg) { return 0; case JUMP_BACKWARD: return 0; + case JUMP_BACKWARD_JIT: + return 0; case JUMP_BACKWARD_NO_INTERRUPT: return 0; + case JUMP_BACKWARD_NO_JIT: + return 0; case JUMP_FORWARD: return 0; case JUMP_IF_FALSE: @@ -1380,7 +1388,7 @@ int _PyOpcode_max_stack_effect(int opcode, int oparg, int *effect) { return 0; } case INSTRUMENTED_CALL_KW: { - *effect = 0; + *effect = Py_MAX(0, -2 - oparg); return 0; } case INSTRUMENTED_END_FOR: { @@ -1412,7 +1420,7 @@ int _PyOpcode_max_stack_effect(int opcode, int oparg, int *effect) { return 0; } case INSTRUMENTED_LOAD_SUPER_ATTR: { - *effect = 0; + *effect = Py_MAX(-2, -2 + (oparg & 1)); return 0; } case INSTRUMENTED_NOT_TAKEN: { @@ -1467,10 +1475,18 @@ int _PyOpcode_max_stack_effect(int opcode, int oparg, int *effect) { *effect = 0; return 0; } + case JUMP_BACKWARD_JIT: { + *effect = 0; + return 0; + } case JUMP_BACKWARD_NO_INTERRUPT: { *effect = 0; return 0; } + case JUMP_BACKWARD_NO_JIT: { + *effect = 0; + return 0; + } case JUMP_FORWARD: { *effect = 0; return 0; @@ -1997,7 +2013,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[266] = { [BINARY_OP_ADD_FLOAT] = { true, INSTR_FMT_IXC0000, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, [BINARY_OP_ADD_INT] = { true, INSTR_FMT_IXC0000, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, [BINARY_OP_ADD_UNICODE] = { true, INSTR_FMT_IXC0000, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, - [BINARY_OP_EXTEND] = { true, INSTR_FMT_IXC0000, HAS_EXIT_FLAG | HAS_ESCAPES_FLAG }, + [BINARY_OP_EXTEND] = { true, INSTR_FMT_IXC0000, HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, [BINARY_OP_INPLACE_ADD_UNICODE] = { true, INSTR_FMT_IXC0000, HAS_LOCAL_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG }, [BINARY_OP_MULTIPLY_FLOAT] = { true, INSTR_FMT_IXC0000, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, [BINARY_OP_MULTIPLY_INT] = { true, INSTR_FMT_IXC0000, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, @@ -2088,8 +2104,8 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[266] = { [IMPORT_FROM] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [IMPORT_NAME] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [INSTRUMENTED_CALL] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, - [INSTRUMENTED_CALL_FUNCTION_EX] = { true, INSTR_FMT_IX, 0 }, - [INSTRUMENTED_CALL_KW] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [INSTRUMENTED_CALL_FUNCTION_EX] = { true, INSTR_FMT_IX, HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, + [INSTRUMENTED_CALL_KW] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [INSTRUMENTED_END_FOR] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG | HAS_NO_SAVE_IP_FLAG }, [INSTRUMENTED_END_SEND] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [INSTRUMENTED_FOR_ITER] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, @@ -2097,7 +2113,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[266] = { [INSTRUMENTED_JUMP_BACKWARD] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [INSTRUMENTED_JUMP_FORWARD] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, [INSTRUMENTED_LINE] = { true, INSTR_FMT_IX, HAS_ESCAPES_FLAG }, - [INSTRUMENTED_LOAD_SUPER_ATTR] = { true, INSTR_FMT_IXC, 0 }, + [INSTRUMENTED_LOAD_SUPER_ATTR] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [INSTRUMENTED_NOT_TAKEN] = { true, INSTR_FMT_IX, 0 }, [INSTRUMENTED_POP_ITER] = { true, INSTR_FMT_IX, HAS_ESCAPES_FLAG }, [INSTRUMENTED_POP_JUMP_IF_FALSE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG }, @@ -2110,7 +2126,9 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[266] = { [INTERPRETER_EXIT] = { true, INSTR_FMT_IX, 0 }, [IS_OP] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, [JUMP_BACKWARD] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [JUMP_BACKWARD_JIT] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [JUMP_BACKWARD_NO_INTERRUPT] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_JUMP_FLAG }, + [JUMP_BACKWARD_NO_JIT] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [JUMP_FORWARD] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_JUMP_FLAG }, [LIST_APPEND] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG }, [LIST_EXTEND] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, @@ -2118,7 +2136,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[266] = { [LOAD_ATTR_CLASS] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_EXIT_FLAG }, [LOAD_ATTR_CLASS_WITH_METACLASS_CHECK] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_EXIT_FLAG }, [LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG }, - [LOAD_ATTR_INSTANCE_VALUE] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG }, + [LOAD_ATTR_INSTANCE_VALUE] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ESCAPES_FLAG }, [LOAD_ATTR_METHOD_LAZY_DICT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG }, [LOAD_ATTR_METHOD_NO_DICT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_EXIT_FLAG }, [LOAD_ATTR_METHOD_WITH_VALUES] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG }, @@ -2539,7 +2557,9 @@ const char *_PyOpcode_OpName[266] = { [IS_OP] = "IS_OP", [JUMP] = "JUMP", [JUMP_BACKWARD] = "JUMP_BACKWARD", + [JUMP_BACKWARD_JIT] = "JUMP_BACKWARD_JIT", [JUMP_BACKWARD_NO_INTERRUPT] = "JUMP_BACKWARD_NO_INTERRUPT", + [JUMP_BACKWARD_NO_JIT] = "JUMP_BACKWARD_NO_JIT", [JUMP_FORWARD] = "JUMP_FORWARD", [JUMP_IF_FALSE] = "JUMP_IF_FALSE", [JUMP_IF_TRUE] = "JUMP_IF_TRUE", @@ -2800,7 +2820,9 @@ const uint8_t _PyOpcode_Deopt[256] = { [INTERPRETER_EXIT] = INTERPRETER_EXIT, [IS_OP] = IS_OP, [JUMP_BACKWARD] = JUMP_BACKWARD, + [JUMP_BACKWARD_JIT] = JUMP_BACKWARD, [JUMP_BACKWARD_NO_INTERRUPT] = JUMP_BACKWARD_NO_INTERRUPT, + [JUMP_BACKWARD_NO_JIT] = JUMP_BACKWARD, [JUMP_FORWARD] = JUMP_FORWARD, [LIST_APPEND] = LIST_APPEND, [LIST_EXTEND] = LIST_EXTEND, @@ -2939,8 +2961,6 @@ const uint8_t _PyOpcode_Deopt[256] = { case 146: \ case 147: \ case 148: \ - case 230: \ - case 231: \ case 232: \ case 233: \ case 234: \ diff --git a/Include/internal/pycore_optimizer.h b/Include/internal/pycore_optimizer.h index 03ce4d4491acd7..00fc4338b0a412 100644 --- a/Include/internal/pycore_optimizer.h +++ b/Include/internal/pycore_optimizer.h @@ -83,23 +83,6 @@ typedef struct _PyExecutorObject { _PyExitData exits[1]; } _PyExecutorObject; -typedef struct _PyOptimizerObject _PyOptimizerObject; - -/* Should return > 0 if a new executor is created. O if no executor is produced and < 0 if an error occurred. */ -typedef int (*_Py_optimize_func)( - _PyOptimizerObject* self, struct _PyInterpreterFrame *frame, - _Py_CODEUNIT *instr, _PyExecutorObject **exec_ptr, - int curr_stackentries, bool progress_needed); - -struct _PyOptimizerObject { - PyObject_HEAD - _Py_optimize_func optimize; - /* Data needed by the optimizer goes here, but is opaque to the VM */ -}; - -/** Test support **/ -_PyOptimizerObject *_Py_SetOptimizer(PyInterpreterState *interp, _PyOptimizerObject* optimizer); - // Export for '_opcode' shared extension (JIT compiler). PyAPI_FUNC(_PyExecutorObject*) _Py_GetExecutor(PyCodeObject *code, int offset); @@ -110,12 +93,6 @@ void _Py_BloomFilter_Init(_PyBloomFilter *); void _Py_BloomFilter_Add(_PyBloomFilter *bloom, void *obj); PyAPI_FUNC(void) _Py_Executor_DependsOn(_PyExecutorObject *executor, void *obj); -// For testing -// Export for '_testinternalcapi' shared extension. -PyAPI_FUNC(_PyOptimizerObject *) _Py_GetOptimizer(void); -PyAPI_FUNC(int) _Py_SetTier2Optimizer(_PyOptimizerObject* optimizer); -PyAPI_FUNC(PyObject *) _PyOptimizer_NewUOpOptimizer(void); - #define _Py_MAX_ALLOWED_BUILTINS_MODIFICATIONS 3 #define _Py_MAX_ALLOWED_GLOBALS_MODIFICATIONS 6 @@ -144,9 +121,7 @@ int _Py_uop_analyze_and_optimize(struct _PyInterpreterFrame *frame, _PyUOpInstruction *trace, int trace_len, int curr_stackentries, _PyBloomFilter *dependencies); -extern PyTypeObject _PyDefaultOptimizer_Type; extern PyTypeObject _PyUOpExecutor_Type; -extern PyTypeObject _PyUOpOptimizer_Type; #define UOP_FORMAT_TARGET 0 @@ -307,7 +282,7 @@ extern int _Py_uop_frame_pop(JitOptContext *ctx); PyAPI_FUNC(PyObject *) _Py_uop_symbols_test(PyObject *self, PyObject *ignored); -PyAPI_FUNC(int) _PyOptimizer_Optimize(struct _PyInterpreterFrame *frame, _Py_CODEUNIT *start, _PyStackRef *stack_pointer, _PyExecutorObject **exec_ptr, int chain_depth); +PyAPI_FUNC(int) _PyOptimizer_Optimize(struct _PyInterpreterFrame *frame, _Py_CODEUNIT *start, _PyExecutorObject **exec_ptr, int chain_depth); static inline int is_terminator(const _PyUOpInstruction *uop) { diff --git a/Include/internal/pycore_uop_ids.h b/Include/internal/pycore_uop_ids.h index 7a6c0d22fe24e5..ca40af55406089 100644 --- a/Include/internal/pycore_uop_ids.h +++ b/Include/internal/pycore_uop_ids.h @@ -152,13 +152,10 @@ extern "C" { #define _INIT_CALL_PY_EXACT_ARGS_2 396 #define _INIT_CALL_PY_EXACT_ARGS_3 397 #define _INIT_CALL_PY_EXACT_ARGS_4 398 -#define _INSTRUMENTED_CALL_FUNCTION_EX INSTRUMENTED_CALL_FUNCTION_EX -#define _INSTRUMENTED_CALL_KW INSTRUMENTED_CALL_KW #define _INSTRUMENTED_FOR_ITER INSTRUMENTED_FOR_ITER #define _INSTRUMENTED_INSTRUCTION INSTRUMENTED_INSTRUCTION #define _INSTRUMENTED_JUMP_FORWARD INSTRUMENTED_JUMP_FORWARD #define _INSTRUMENTED_LINE INSTRUMENTED_LINE -#define _INSTRUMENTED_LOAD_SUPER_ATTR INSTRUMENTED_LOAD_SUPER_ATTR #define _INSTRUMENTED_NOT_TAKEN INSTRUMENTED_NOT_TAKEN #define _INSTRUMENTED_POP_JUMP_IF_FALSE INSTRUMENTED_POP_JUMP_IF_FALSE #define _INSTRUMENTED_POP_JUMP_IF_NONE INSTRUMENTED_POP_JUMP_IF_NONE @@ -242,58 +239,59 @@ extern "C" { #define _MAYBE_EXPAND_METHOD 447 #define _MAYBE_EXPAND_METHOD_KW 448 #define _MONITOR_CALL 449 -#define _MONITOR_JUMP_BACKWARD 450 -#define _MONITOR_RESUME 451 +#define _MONITOR_CALL_KW 450 +#define _MONITOR_JUMP_BACKWARD 451 +#define _MONITOR_RESUME 452 #define _NOP NOP #define _POP_EXCEPT POP_EXCEPT -#define _POP_JUMP_IF_FALSE 452 -#define _POP_JUMP_IF_TRUE 453 +#define _POP_JUMP_IF_FALSE 453 +#define _POP_JUMP_IF_TRUE 454 #define _POP_TOP POP_TOP -#define _POP_TOP_LOAD_CONST_INLINE_BORROW 454 +#define _POP_TOP_LOAD_CONST_INLINE_BORROW 455 #define _PUSH_EXC_INFO PUSH_EXC_INFO -#define _PUSH_FRAME 455 +#define _PUSH_FRAME 456 #define _PUSH_NULL PUSH_NULL -#define _PUSH_NULL_CONDITIONAL 456 -#define _PY_FRAME_GENERAL 457 -#define _PY_FRAME_KW 458 -#define _QUICKEN_RESUME 459 -#define _REPLACE_WITH_TRUE 460 +#define _PUSH_NULL_CONDITIONAL 457 +#define _PY_FRAME_GENERAL 458 +#define _PY_FRAME_KW 459 +#define _QUICKEN_RESUME 460 +#define _REPLACE_WITH_TRUE 461 #define _RESUME_CHECK RESUME_CHECK #define _RETURN_GENERATOR RETURN_GENERATOR #define _RETURN_VALUE RETURN_VALUE -#define _SAVE_RETURN_OFFSET 461 -#define _SEND 462 -#define _SEND_GEN_FRAME 463 +#define _SAVE_RETURN_OFFSET 462 +#define _SEND 463 +#define _SEND_GEN_FRAME 464 #define _SETUP_ANNOTATIONS SETUP_ANNOTATIONS #define _SET_ADD SET_ADD #define _SET_FUNCTION_ATTRIBUTE SET_FUNCTION_ATTRIBUTE #define _SET_UPDATE SET_UPDATE -#define _START_EXECUTOR 464 -#define _STORE_ATTR 465 -#define _STORE_ATTR_INSTANCE_VALUE 466 -#define _STORE_ATTR_SLOT 467 -#define _STORE_ATTR_WITH_HINT 468 +#define _START_EXECUTOR 465 +#define _STORE_ATTR 466 +#define _STORE_ATTR_INSTANCE_VALUE 467 +#define _STORE_ATTR_SLOT 468 +#define _STORE_ATTR_WITH_HINT 469 #define _STORE_DEREF STORE_DEREF -#define _STORE_FAST 469 -#define _STORE_FAST_0 470 -#define _STORE_FAST_1 471 -#define _STORE_FAST_2 472 -#define _STORE_FAST_3 473 -#define _STORE_FAST_4 474 -#define _STORE_FAST_5 475 -#define _STORE_FAST_6 476 -#define _STORE_FAST_7 477 +#define _STORE_FAST 470 +#define _STORE_FAST_0 471 +#define _STORE_FAST_1 472 +#define _STORE_FAST_2 473 +#define _STORE_FAST_3 474 +#define _STORE_FAST_4 475 +#define _STORE_FAST_5 476 +#define _STORE_FAST_6 477 +#define _STORE_FAST_7 478 #define _STORE_FAST_LOAD_FAST STORE_FAST_LOAD_FAST #define _STORE_FAST_STORE_FAST STORE_FAST_STORE_FAST #define _STORE_GLOBAL STORE_GLOBAL #define _STORE_NAME STORE_NAME -#define _STORE_SLICE 478 -#define _STORE_SUBSCR 479 +#define _STORE_SLICE 479 +#define _STORE_SUBSCR 480 #define _STORE_SUBSCR_DICT STORE_SUBSCR_DICT #define _STORE_SUBSCR_LIST_INT STORE_SUBSCR_LIST_INT #define _SWAP SWAP -#define _TIER2_RESUME_CHECK 480 -#define _TO_BOOL 481 +#define _TIER2_RESUME_CHECK 481 +#define _TO_BOOL 482 #define _TO_BOOL_BOOL TO_BOOL_BOOL #define _TO_BOOL_INT TO_BOOL_INT #define _TO_BOOL_LIST TO_BOOL_LIST @@ -303,13 +301,13 @@ extern "C" { #define _UNARY_NEGATIVE UNARY_NEGATIVE #define _UNARY_NOT UNARY_NOT #define _UNPACK_EX UNPACK_EX -#define _UNPACK_SEQUENCE 482 +#define _UNPACK_SEQUENCE 483 #define _UNPACK_SEQUENCE_LIST UNPACK_SEQUENCE_LIST #define _UNPACK_SEQUENCE_TUPLE UNPACK_SEQUENCE_TUPLE #define _UNPACK_SEQUENCE_TWO_TUPLE UNPACK_SEQUENCE_TWO_TUPLE #define _WITH_EXCEPT_START WITH_EXCEPT_START #define _YIELD_VALUE YIELD_VALUE -#define MAX_UOP_ID 482 +#define MAX_UOP_ID 483 #ifdef __cplusplus } diff --git a/Include/internal/pycore_uop_metadata.h b/Include/internal/pycore_uop_metadata.h index 59740dbb57072e..0ed4c7c3a35436 100644 --- a/Include/internal/pycore_uop_metadata.h +++ b/Include/internal/pycore_uop_metadata.h @@ -82,7 +82,7 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_GUARD_BOTH_UNICODE] = HAS_EXIT_FLAG, [_BINARY_OP_ADD_UNICODE] = HAS_ERROR_FLAG | HAS_PURE_FLAG, [_BINARY_OP_INPLACE_ADD_UNICODE] = HAS_LOCAL_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG, - [_GUARD_BINARY_OP_EXTEND] = HAS_EXIT_FLAG | HAS_ESCAPES_FLAG, + [_GUARD_BINARY_OP_EXTEND] = HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG, [_BINARY_OP_EXTEND] = HAS_ESCAPES_FLAG | HAS_PURE_FLAG, [_BINARY_SUBSCR] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_BINARY_SLICE] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, @@ -154,7 +154,7 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_GUARD_TYPE_VERSION] = HAS_EXIT_FLAG, [_GUARD_TYPE_VERSION_AND_LOCK] = HAS_EXIT_FLAG, [_CHECK_MANAGED_OBJECT_HAS_VALUES] = HAS_DEOPT_FLAG, - [_LOAD_ATTR_INSTANCE_VALUE] = HAS_DEOPT_FLAG, + [_LOAD_ATTR_INSTANCE_VALUE] = HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG, [_CHECK_ATTR_MODULE_PUSH_KEYS] = HAS_DEOPT_FLAG, [_LOAD_ATTR_MODULE_FROM_KEYS] = HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG, [_CHECK_ATTR_WITH_HINT] = HAS_EXIT_FLAG, @@ -964,7 +964,7 @@ int _PyUop_num_popped(int opcode, int oparg) case _CHECK_METHOD_VERSION: return 0; case _EXPAND_METHOD: - return 2 + oparg; + return 0; case _CHECK_IS_NOT_PY_CALLABLE: return 0; case _CALL_NON_PY_GENERAL: @@ -972,7 +972,7 @@ int _PyUop_num_popped(int opcode, int oparg) case _CHECK_CALL_BOUND_METHOD_EXACT_ARGS: return 0; case _INIT_CALL_BOUND_METHOD_EXACT_ARGS: - return 2 + oparg; + return 0; case _CHECK_PEP_523: return 0; case _CHECK_FUNCTION_EXACT_ARGS: @@ -1036,7 +1036,7 @@ int _PyUop_num_popped(int opcode, int oparg) case _CHECK_METHOD_VERSION_KW: return 0; case _EXPAND_METHOD_KW: - return 3 + oparg; + return 0; case _CHECK_IS_NOT_PY_CALLABLE_KW: return 0; case _CALL_KW_NON_PY: @@ -1062,7 +1062,7 @@ int _PyUop_num_popped(int opcode, int oparg) case _BINARY_OP: return 2; case _SWAP: - return 2 + (oparg-2); + return 0; case _GUARD_IS_TRUE_POP: return 1; case _GUARD_IS_FALSE_POP: @@ -1110,7 +1110,7 @@ int _PyUop_num_popped(int opcode, int oparg) case _DEOPT: return 0; case _ERROR_POP_N: - return oparg; + return 0; case _TIER2_RESUME_CHECK: return 0; default: diff --git a/Include/opcode_ids.h b/Include/opcode_ids.h index c3b58825bfc938..dfe7fa36cccd31 100644 --- a/Include/opcode_ids.h +++ b/Include/opcode_ids.h @@ -174,59 +174,61 @@ extern "C" { #define FOR_ITER_LIST 192 #define FOR_ITER_RANGE 193 #define FOR_ITER_TUPLE 194 -#define LOAD_ATTR_CLASS 195 -#define LOAD_ATTR_CLASS_WITH_METACLASS_CHECK 196 -#define LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN 197 -#define LOAD_ATTR_INSTANCE_VALUE 198 -#define LOAD_ATTR_METHOD_LAZY_DICT 199 -#define LOAD_ATTR_METHOD_NO_DICT 200 -#define LOAD_ATTR_METHOD_WITH_VALUES 201 -#define LOAD_ATTR_MODULE 202 -#define LOAD_ATTR_NONDESCRIPTOR_NO_DICT 203 -#define LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES 204 -#define LOAD_ATTR_PROPERTY 205 -#define LOAD_ATTR_SLOT 206 -#define LOAD_ATTR_WITH_HINT 207 -#define LOAD_CONST_IMMORTAL 208 -#define LOAD_CONST_MORTAL 209 -#define LOAD_GLOBAL_BUILTIN 210 -#define LOAD_GLOBAL_MODULE 211 -#define LOAD_SUPER_ATTR_ATTR 212 -#define LOAD_SUPER_ATTR_METHOD 213 -#define RESUME_CHECK 214 -#define SEND_GEN 215 -#define STORE_ATTR_INSTANCE_VALUE 216 -#define STORE_ATTR_SLOT 217 -#define STORE_ATTR_WITH_HINT 218 -#define STORE_SUBSCR_DICT 219 -#define STORE_SUBSCR_LIST_INT 220 -#define TO_BOOL_ALWAYS_TRUE 221 -#define TO_BOOL_BOOL 222 -#define TO_BOOL_INT 223 -#define TO_BOOL_LIST 224 -#define TO_BOOL_NONE 225 -#define TO_BOOL_STR 226 -#define UNPACK_SEQUENCE_LIST 227 -#define UNPACK_SEQUENCE_TUPLE 228 -#define UNPACK_SEQUENCE_TWO_TUPLE 229 +#define JUMP_BACKWARD_JIT 195 +#define JUMP_BACKWARD_NO_JIT 196 +#define LOAD_ATTR_CLASS 197 +#define LOAD_ATTR_CLASS_WITH_METACLASS_CHECK 198 +#define LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN 199 +#define LOAD_ATTR_INSTANCE_VALUE 200 +#define LOAD_ATTR_METHOD_LAZY_DICT 201 +#define LOAD_ATTR_METHOD_NO_DICT 202 +#define LOAD_ATTR_METHOD_WITH_VALUES 203 +#define LOAD_ATTR_MODULE 204 +#define LOAD_ATTR_NONDESCRIPTOR_NO_DICT 205 +#define LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES 206 +#define LOAD_ATTR_PROPERTY 207 +#define LOAD_ATTR_SLOT 208 +#define LOAD_ATTR_WITH_HINT 209 +#define LOAD_CONST_IMMORTAL 210 +#define LOAD_CONST_MORTAL 211 +#define LOAD_GLOBAL_BUILTIN 212 +#define LOAD_GLOBAL_MODULE 213 +#define LOAD_SUPER_ATTR_ATTR 214 +#define LOAD_SUPER_ATTR_METHOD 215 +#define RESUME_CHECK 216 +#define SEND_GEN 217 +#define STORE_ATTR_INSTANCE_VALUE 218 +#define STORE_ATTR_SLOT 219 +#define STORE_ATTR_WITH_HINT 220 +#define STORE_SUBSCR_DICT 221 +#define STORE_SUBSCR_LIST_INT 222 +#define TO_BOOL_ALWAYS_TRUE 223 +#define TO_BOOL_BOOL 224 +#define TO_BOOL_INT 225 +#define TO_BOOL_LIST 226 +#define TO_BOOL_NONE 227 +#define TO_BOOL_STR 228 +#define UNPACK_SEQUENCE_LIST 229 +#define UNPACK_SEQUENCE_TUPLE 230 +#define UNPACK_SEQUENCE_TWO_TUPLE 231 #define INSTRUMENTED_END_FOR 235 #define INSTRUMENTED_POP_ITER 236 #define INSTRUMENTED_END_SEND 237 -#define INSTRUMENTED_LOAD_SUPER_ATTR 238 -#define INSTRUMENTED_FOR_ITER 239 -#define INSTRUMENTED_CALL_KW 240 -#define INSTRUMENTED_CALL_FUNCTION_EX 241 -#define INSTRUMENTED_INSTRUCTION 242 -#define INSTRUMENTED_JUMP_FORWARD 243 -#define INSTRUMENTED_NOT_TAKEN 244 -#define INSTRUMENTED_POP_JUMP_IF_TRUE 245 -#define INSTRUMENTED_POP_JUMP_IF_FALSE 246 -#define INSTRUMENTED_POP_JUMP_IF_NONE 247 -#define INSTRUMENTED_POP_JUMP_IF_NOT_NONE 248 -#define INSTRUMENTED_RESUME 249 -#define INSTRUMENTED_RETURN_VALUE 250 -#define INSTRUMENTED_YIELD_VALUE 251 -#define INSTRUMENTED_CALL 252 +#define INSTRUMENTED_FOR_ITER 238 +#define INSTRUMENTED_INSTRUCTION 239 +#define INSTRUMENTED_JUMP_FORWARD 240 +#define INSTRUMENTED_NOT_TAKEN 241 +#define INSTRUMENTED_POP_JUMP_IF_TRUE 242 +#define INSTRUMENTED_POP_JUMP_IF_FALSE 243 +#define INSTRUMENTED_POP_JUMP_IF_NONE 244 +#define INSTRUMENTED_POP_JUMP_IF_NOT_NONE 245 +#define INSTRUMENTED_RESUME 246 +#define INSTRUMENTED_RETURN_VALUE 247 +#define INSTRUMENTED_YIELD_VALUE 248 +#define INSTRUMENTED_LOAD_SUPER_ATTR 249 +#define INSTRUMENTED_CALL 250 +#define INSTRUMENTED_CALL_KW 251 +#define INSTRUMENTED_CALL_FUNCTION_EX 252 #define INSTRUMENTED_JUMP_BACKWARD 253 #define INSTRUMENTED_LINE 254 #define ENTER_EXECUTOR 255 diff --git a/InternalDocs/jit.md b/InternalDocs/jit.md index 1e9f385d5f87fa..2c204f39792d6a 100644 --- a/InternalDocs/jit.md +++ b/InternalDocs/jit.md @@ -38,12 +38,8 @@ executor in `co_executors`. ## The micro-op optimizer -The optimizer that `_PyOptimizer_Optimize()` runs is configurable via the -`_Py_SetTier2Optimizer()` function (this is used in test via -`_testinternalcapi.set_optimizer()`.) - The micro-op (abbreviated `uop` to approximate `μop`) optimizer is defined in -[`Python/optimizer.c`](../Python/optimizer.c) as the type `_PyUOpOptimizer_Type`. +[`Python/optimizer.c`](../Python/optimizer.c) as `_PyOptimizer_Optimize`. It translates an instruction trace into a sequence of micro-ops by replacing each bytecode by an equivalent sequence of micro-ops (see `_PyOpcode_macro_expansion` in diff --git a/Lib/_markupbase.py b/Lib/_markupbase.py index 3ad7e279960f7e..614f0cd16ddb44 100644 --- a/Lib/_markupbase.py +++ b/Lib/_markupbase.py @@ -13,7 +13,7 @@ _markedsectionclose = re.compile(r']\s*]\s*>') # An analysis of the MS-Word extensions is available at -# http://www.planetpublish.com/xmlarena/xap/Thursday/WordtoXML.pdf +# http://web.archive.org/web/20060321153828/http://www.planetpublish.com/xmlarena/xap/Thursday/WordtoXML.pdf _msmarkedsectionclose = re.compile(r']\s*>') diff --git a/Lib/_opcode_metadata.py b/Lib/_opcode_metadata.py index 459f7411296bcd..ae3e9bd0ab4940 100644 --- a/Lib/_opcode_metadata.py +++ b/Lib/_opcode_metadata.py @@ -85,6 +85,10 @@ "CONTAINS_OP_SET", "CONTAINS_OP_DICT", ], + "JUMP_BACKWARD": [ + "JUMP_BACKWARD_NO_JIT", + "JUMP_BACKWARD_JIT", + ], "FOR_ITER": [ "FOR_ITER_LIST", "FOR_ITER_TUPLE", @@ -167,41 +171,43 @@ 'FOR_ITER_LIST': 192, 'FOR_ITER_RANGE': 193, 'FOR_ITER_TUPLE': 194, - 'LOAD_ATTR_CLASS': 195, - 'LOAD_ATTR_CLASS_WITH_METACLASS_CHECK': 196, - 'LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN': 197, - 'LOAD_ATTR_INSTANCE_VALUE': 198, - 'LOAD_ATTR_METHOD_LAZY_DICT': 199, - 'LOAD_ATTR_METHOD_NO_DICT': 200, - 'LOAD_ATTR_METHOD_WITH_VALUES': 201, - 'LOAD_ATTR_MODULE': 202, - 'LOAD_ATTR_NONDESCRIPTOR_NO_DICT': 203, - 'LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES': 204, - 'LOAD_ATTR_PROPERTY': 205, - 'LOAD_ATTR_SLOT': 206, - 'LOAD_ATTR_WITH_HINT': 207, - 'LOAD_CONST_IMMORTAL': 208, - 'LOAD_CONST_MORTAL': 209, - 'LOAD_GLOBAL_BUILTIN': 210, - 'LOAD_GLOBAL_MODULE': 211, - 'LOAD_SUPER_ATTR_ATTR': 212, - 'LOAD_SUPER_ATTR_METHOD': 213, - 'RESUME_CHECK': 214, - 'SEND_GEN': 215, - 'STORE_ATTR_INSTANCE_VALUE': 216, - 'STORE_ATTR_SLOT': 217, - 'STORE_ATTR_WITH_HINT': 218, - 'STORE_SUBSCR_DICT': 219, - 'STORE_SUBSCR_LIST_INT': 220, - 'TO_BOOL_ALWAYS_TRUE': 221, - 'TO_BOOL_BOOL': 222, - 'TO_BOOL_INT': 223, - 'TO_BOOL_LIST': 224, - 'TO_BOOL_NONE': 225, - 'TO_BOOL_STR': 226, - 'UNPACK_SEQUENCE_LIST': 227, - 'UNPACK_SEQUENCE_TUPLE': 228, - 'UNPACK_SEQUENCE_TWO_TUPLE': 229, + 'JUMP_BACKWARD_JIT': 195, + 'JUMP_BACKWARD_NO_JIT': 196, + 'LOAD_ATTR_CLASS': 197, + 'LOAD_ATTR_CLASS_WITH_METACLASS_CHECK': 198, + 'LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN': 199, + 'LOAD_ATTR_INSTANCE_VALUE': 200, + 'LOAD_ATTR_METHOD_LAZY_DICT': 201, + 'LOAD_ATTR_METHOD_NO_DICT': 202, + 'LOAD_ATTR_METHOD_WITH_VALUES': 203, + 'LOAD_ATTR_MODULE': 204, + 'LOAD_ATTR_NONDESCRIPTOR_NO_DICT': 205, + 'LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES': 206, + 'LOAD_ATTR_PROPERTY': 207, + 'LOAD_ATTR_SLOT': 208, + 'LOAD_ATTR_WITH_HINT': 209, + 'LOAD_CONST_IMMORTAL': 210, + 'LOAD_CONST_MORTAL': 211, + 'LOAD_GLOBAL_BUILTIN': 212, + 'LOAD_GLOBAL_MODULE': 213, + 'LOAD_SUPER_ATTR_ATTR': 214, + 'LOAD_SUPER_ATTR_METHOD': 215, + 'RESUME_CHECK': 216, + 'SEND_GEN': 217, + 'STORE_ATTR_INSTANCE_VALUE': 218, + 'STORE_ATTR_SLOT': 219, + 'STORE_ATTR_WITH_HINT': 220, + 'STORE_SUBSCR_DICT': 221, + 'STORE_SUBSCR_LIST_INT': 222, + 'TO_BOOL_ALWAYS_TRUE': 223, + 'TO_BOOL_BOOL': 224, + 'TO_BOOL_INT': 225, + 'TO_BOOL_LIST': 226, + 'TO_BOOL_NONE': 227, + 'TO_BOOL_STR': 228, + 'UNPACK_SEQUENCE_LIST': 229, + 'UNPACK_SEQUENCE_TUPLE': 230, + 'UNPACK_SEQUENCE_TWO_TUPLE': 231, } opmap = { @@ -328,21 +334,21 @@ 'INSTRUMENTED_END_FOR': 235, 'INSTRUMENTED_POP_ITER': 236, 'INSTRUMENTED_END_SEND': 237, - 'INSTRUMENTED_LOAD_SUPER_ATTR': 238, - 'INSTRUMENTED_FOR_ITER': 239, - 'INSTRUMENTED_CALL_KW': 240, - 'INSTRUMENTED_CALL_FUNCTION_EX': 241, - 'INSTRUMENTED_INSTRUCTION': 242, - 'INSTRUMENTED_JUMP_FORWARD': 243, - 'INSTRUMENTED_NOT_TAKEN': 244, - 'INSTRUMENTED_POP_JUMP_IF_TRUE': 245, - 'INSTRUMENTED_POP_JUMP_IF_FALSE': 246, - 'INSTRUMENTED_POP_JUMP_IF_NONE': 247, - 'INSTRUMENTED_POP_JUMP_IF_NOT_NONE': 248, - 'INSTRUMENTED_RESUME': 249, - 'INSTRUMENTED_RETURN_VALUE': 250, - 'INSTRUMENTED_YIELD_VALUE': 251, - 'INSTRUMENTED_CALL': 252, + 'INSTRUMENTED_FOR_ITER': 238, + 'INSTRUMENTED_INSTRUCTION': 239, + 'INSTRUMENTED_JUMP_FORWARD': 240, + 'INSTRUMENTED_NOT_TAKEN': 241, + 'INSTRUMENTED_POP_JUMP_IF_TRUE': 242, + 'INSTRUMENTED_POP_JUMP_IF_FALSE': 243, + 'INSTRUMENTED_POP_JUMP_IF_NONE': 244, + 'INSTRUMENTED_POP_JUMP_IF_NOT_NONE': 245, + 'INSTRUMENTED_RESUME': 246, + 'INSTRUMENTED_RETURN_VALUE': 247, + 'INSTRUMENTED_YIELD_VALUE': 248, + 'INSTRUMENTED_LOAD_SUPER_ATTR': 249, + 'INSTRUMENTED_CALL': 250, + 'INSTRUMENTED_CALL_KW': 251, + 'INSTRUMENTED_CALL_FUNCTION_EX': 252, 'INSTRUMENTED_JUMP_BACKWARD': 253, 'JUMP': 256, 'JUMP_IF_FALSE': 257, diff --git a/Lib/_pyio.py b/Lib/_pyio.py index 14961c39d3541d..b3a8f37d68acdb 100644 --- a/Lib/_pyio.py +++ b/Lib/_pyio.py @@ -937,10 +937,8 @@ def write(self, b): return 0 pos = self._pos if pos > len(self._buffer): - # Inserts null bytes between the current end of the file - # and the new write position. - padding = b'\x00' * (pos - len(self._buffer)) - self._buffer += padding + # Pad buffer to pos with null bytes. + self._buffer.resize(pos) self._buffer[pos:pos + n] = b self._pos += n return n @@ -1692,13 +1690,14 @@ def readall(self): return bytes(result) - def readinto(self, b): + def readinto(self, buffer): """Same as RawIOBase.readinto().""" - m = memoryview(b).cast('B') - data = self.read(len(m)) - n = len(data) - m[:n] = data - return n + self._checkClosed() + self._checkReadable() + try: + return os.readinto(self._fd, buffer) + except BlockingIOError: + return None def write(self, b): """Write bytes b to file, return number written. diff --git a/Lib/_pyrepl/reader.py b/Lib/_pyrepl/reader.py index 4b0700d069c621..1252847e02b2ea 100644 --- a/Lib/_pyrepl/reader.py +++ b/Lib/_pyrepl/reader.py @@ -587,10 +587,11 @@ def setpos_from_xy(self, x: int, y: int) -> None: def pos2xy(self) -> tuple[int, int]: """Return the x, y coordinates of position 'pos'.""" # this *is* incomprehensible, yes. - y = 0 + p, y = 0, 0 + l2: list[int] = [] pos = self.pos assert 0 <= pos <= len(self.buffer) - if pos == len(self.buffer): + if pos == len(self.buffer) and len(self.screeninfo) > 0: y = len(self.screeninfo) - 1 p, l2 = self.screeninfo[y] return p + sum(l2) + l2.count(0), y diff --git a/Lib/ast.py b/Lib/ast.py index 154d2c8c1f9ebb..0937c27bdf8a11 100644 --- a/Lib/ast.py +++ b/Lib/ast.py @@ -1196,9 +1196,14 @@ def visit_JoinedStr(self, node): fallback_to_repr = True break quote_types = new_quote_types - elif "\n" in value: - quote_types = [q for q in quote_types if q in _MULTI_QUOTES] - assert quote_types + else: + if "\n" in value: + quote_types = [q for q in quote_types if q in _MULTI_QUOTES] + assert quote_types + + new_quote_types = [q for q in quote_types if q not in value] + if new_quote_types: + quote_types = new_quote_types new_fstring_parts.append(value) if fallback_to_repr: diff --git a/Lib/asyncio/locks.py b/Lib/asyncio/locks.py index f2f8b7ec858096..fa3a94764b507a 100644 --- a/Lib/asyncio/locks.py +++ b/Lib/asyncio/locks.py @@ -485,7 +485,7 @@ class Barrier(mixins._LoopBoundMixin): def __init__(self, parties): """Create a barrier, initialised to 'parties' tasks.""" if parties < 1: - raise ValueError('parties must be > 0') + raise ValueError('parties must be >= 1') self._cond = Condition() # notify all tasks when state changes diff --git a/Lib/asyncio/selector_events.py b/Lib/asyncio/selector_events.py index 50992a607b3a1c..22147451fa7ebd 100644 --- a/Lib/asyncio/selector_events.py +++ b/Lib/asyncio/selector_events.py @@ -1185,10 +1185,13 @@ def can_write_eof(self): return True def _call_connection_lost(self, exc): - super()._call_connection_lost(exc) - if self._empty_waiter is not None: - self._empty_waiter.set_exception( - ConnectionError("Connection is closed by peer")) + try: + super()._call_connection_lost(exc) + finally: + self._write_ready = None + if self._empty_waiter is not None: + self._empty_waiter.set_exception( + ConnectionError("Connection is closed by peer")) def _make_empty_waiter(self): if self._empty_waiter is not None: @@ -1203,7 +1206,6 @@ def _reset_empty_waiter(self): def close(self): self._read_ready_cb = None - self._write_ready = None super().close() diff --git a/Lib/http/__init__.py b/Lib/http/__init__.py index 9f278289420713..691b4a9a367bd0 100644 --- a/Lib/http/__init__.py +++ b/Lib/http/__init__.py @@ -190,7 +190,7 @@ class HTTPMethod: Methods from the following RFCs are all observed: - * RFF 9110: HTTP Semantics, obsoletes 7231, which obsoleted 2616 + * RFC 9110: HTTP Semantics, obsoletes 7231, which obsoleted 2616 * RFC 5789: PATCH Method for HTTP """ def __new__(cls, value, description): diff --git a/Lib/http/client.py b/Lib/http/client.py index fab90a0ba4eb83..33a858d34ae1ba 100644 --- a/Lib/http/client.py +++ b/Lib/http/client.py @@ -472,7 +472,7 @@ def read(self, amt=None): if self.chunked: return self._read_chunked(amt) - if amt is not None: + if amt is not None and amt >= 0: if self.length is not None and amt > self.length: # clip the read to the "end of response" amt = self.length @@ -590,6 +590,8 @@ def _get_chunk_left(self): def _read_chunked(self, amt=None): assert self.chunked != _UNKNOWN + if amt is not None and amt < 0: + amt = None value = [] try: while (chunk_left := self._get_chunk_left()) is not None: diff --git a/Lib/idlelib/CREDITS.txt b/Lib/idlelib/CREDITS.txt index 4a42af586a4a9e..bea3ba7c20de22 100644 --- a/Lib/idlelib/CREDITS.txt +++ b/Lib/idlelib/CREDITS.txt @@ -33,15 +33,15 @@ Major contributors since 2005: - 2005: Tal Einat - 2010: Terry Jan Reedy (current maintainer) -- 2013: Roger Serwys +- 2013: Roger Serwy - 2014: Saimadhav Heblikar - 2015: Mark Roseman - 2017: Louie Lu, Cheryl Sabella, and Serhiy Storchaka For additional details refer to NEWS.txt and Changelog. -Please contact the IDLE maintainer (kbk@shore.net) to have yourself included -here if you are one of those we missed! +If we missed you, feel free to submit a PR with a summary of +contributions (for instance, at least 5 merged PRs). diff --git a/Lib/idlelib/pyshell.py b/Lib/idlelib/pyshell.py index 66fbbd4a97b7af..295d06e4a5f017 100755 --- a/Lib/idlelib/pyshell.py +++ b/Lib/idlelib/pyshell.py @@ -1133,8 +1133,7 @@ def ispythonsource(self, filename): def short_title(self): return self.shell_title - COPYRIGHT = \ - 'Type "help", "copyright", "credits" or "license()" for more information.' + SPLASHLINE = 'Enter "help" below or click "Help" above for more information.' def begin(self): self.text.mark_set("iomark", "insert") @@ -1153,7 +1152,7 @@ def begin(self): sys.displayhook = rpc.displayhook self.write("Python %s on %s\n%s\n%s" % - (sys.version, sys.platform, self.COPYRIGHT, nosub)) + (sys.version, sys.platform, self.SPLASHLINE, nosub)) self.text.focus_force() self.showprompt() # User code should use separate default Tk root window diff --git a/Lib/locale.py b/Lib/locale.py index d8c09f1123d318..213d5e93418cfb 100644 --- a/Lib/locale.py +++ b/Lib/locale.py @@ -860,6 +860,24 @@ def getpreferredencoding(do_setlocale=True): # updated 'ca_es@valencia' -> 'ca_ES.ISO8859-15@valencia' to 'ca_ES.UTF-8@valencia' # updated 'kk_kz' -> 'kk_KZ.RK1048' to 'kk_KZ.ptcp154' # updated 'russian' -> 'ru_RU.ISO8859-5' to 'ru_RU.KOI8-R' +# +# SS 2025-02-04: +# Updated alias mapping with glibc 2.41 supported locales and the latest +# X lib alias mapping. +# +# These are the differences compared to the old mapping (Python 3.13.1 +# and older): +# +# updated 'c.utf8' -> 'C.UTF-8' to 'en_US.UTF-8' +# updated 'de_it' -> 'de_IT.ISO8859-1' to 'de_IT.UTF-8' +# removed 'de_li.utf8' +# updated 'en_il' -> 'en_IL.UTF-8' to 'en_IL.ISO8859-1' +# removed 'english.iso88591' +# updated 'es_cu' -> 'es_CU.UTF-8' to 'es_CU.ISO8859-1' +# updated 'russian' -> 'ru_RU.KOI8-R' to 'ru_RU.ISO8859-5' +# updated 'sr@latn' -> 'sr_CS.UTF-8@latin' to 'sr_RS.UTF-8@latin' +# removed 'univ' +# removed 'universal' locale_alias = { 'a3': 'az_AZ.KOI8-C', @@ -939,7 +957,7 @@ def getpreferredencoding(do_setlocale=True): 'c.ascii': 'C', 'c.en': 'C', 'c.iso88591': 'en_US.ISO8859-1', - 'c.utf8': 'C.UTF-8', + 'c.utf8': 'en_US.UTF-8', 'c_c': 'C', 'c_c.c': 'C', 'ca': 'ca_ES.ISO8859-1', @@ -956,6 +974,7 @@ def getpreferredencoding(do_setlocale=True): 'chr_us': 'chr_US.UTF-8', 'ckb_iq': 'ckb_IQ.UTF-8', 'cmn_tw': 'cmn_TW.UTF-8', + 'crh_ru': 'crh_RU.UTF-8', 'crh_ua': 'crh_UA.UTF-8', 'croatian': 'hr_HR.ISO8859-2', 'cs': 'cs_CZ.ISO8859-2', @@ -977,11 +996,12 @@ def getpreferredencoding(do_setlocale=True): 'de_be': 'de_BE.ISO8859-1', 'de_ch': 'de_CH.ISO8859-1', 'de_de': 'de_DE.ISO8859-1', - 'de_it': 'de_IT.ISO8859-1', - 'de_li.utf8': 'de_LI.UTF-8', + 'de_it': 'de_IT.UTF-8', + 'de_li': 'de_LI.ISO8859-1', 'de_lu': 'de_LU.ISO8859-1', 'deutsch': 'de_DE.ISO8859-1', 'doi_in': 'doi_IN.UTF-8', + 'dsb_de': 'dsb_DE.UTF-8', 'dutch': 'nl_NL.ISO8859-1', 'dutch.iso88591': 'nl_BE.ISO8859-1', 'dv_mv': 'dv_MV.UTF-8', @@ -1004,7 +1024,7 @@ def getpreferredencoding(do_setlocale=True): 'en_gb': 'en_GB.ISO8859-1', 'en_hk': 'en_HK.ISO8859-1', 'en_ie': 'en_IE.ISO8859-1', - 'en_il': 'en_IL.UTF-8', + 'en_il': 'en_IL.ISO8859-1', 'en_in': 'en_IN.ISO8859-1', 'en_ng': 'en_NG.UTF-8', 'en_nz': 'en_NZ.ISO8859-1', @@ -1020,7 +1040,6 @@ def getpreferredencoding(do_setlocale=True): 'en_zw.utf8': 'en_ZS.UTF-8', 'eng_gb': 'en_GB.ISO8859-1', 'english': 'en_EN.ISO8859-1', - 'english.iso88591': 'en_US.ISO8859-1', 'english_uk': 'en_GB.ISO8859-1', 'english_united-states': 'en_US.ISO8859-1', 'english_united-states.437': 'C', @@ -1036,7 +1055,7 @@ def getpreferredencoding(do_setlocale=True): 'es_cl': 'es_CL.ISO8859-1', 'es_co': 'es_CO.ISO8859-1', 'es_cr': 'es_CR.ISO8859-1', - 'es_cu': 'es_CU.UTF-8', + 'es_cu': 'es_CU.ISO8859-1', 'es_do': 'es_DO.ISO8859-1', 'es_ec': 'es_EC.ISO8859-1', 'es_es': 'es_ES.ISO8859-1', @@ -1086,6 +1105,7 @@ def getpreferredencoding(do_setlocale=True): 'ga_ie': 'ga_IE.ISO8859-1', 'galego': 'gl_ES.ISO8859-1', 'galician': 'gl_ES.ISO8859-1', + 'gbm_in': 'gbm_IN.UTF-8', 'gd': 'gd_GB.ISO8859-1', 'gd_gb': 'gd_GB.ISO8859-1', 'ger_de': 'de_DE.ISO8859-1', @@ -1126,6 +1146,7 @@ def getpreferredencoding(do_setlocale=True): 'icelandic': 'is_IS.ISO8859-1', 'id': 'id_ID.ISO8859-1', 'id_id': 'id_ID.ISO8859-1', + 'ie': 'ie.UTF-8', 'ig_ng': 'ig_NG.UTF-8', 'ik_ca': 'ik_CA.UTF-8', 'in': 'id_ID.ISO8859-1', @@ -1180,6 +1201,7 @@ def getpreferredencoding(do_setlocale=True): 'ks_in': 'ks_IN.UTF-8', 'ks_in@devanagari.utf8': 'ks_IN.UTF-8@devanagari', 'ku_tr': 'ku_TR.ISO8859-9', + 'kv_ru': 'kv_RU.UTF-8', 'kw': 'kw_GB.ISO8859-1', 'kw_gb': 'kw_GB.ISO8859-1', 'ky': 'ky_KG.UTF-8', @@ -1198,6 +1220,7 @@ def getpreferredencoding(do_setlocale=True): 'lo_la.mulelao1': 'lo_LA.MULELAO-1', 'lt': 'lt_LT.ISO8859-13', 'lt_lt': 'lt_LT.ISO8859-13', + 'ltg_lv.utf8': 'ltg_LV.UTF-8', 'lv': 'lv_LV.ISO8859-13', 'lv_lv': 'lv_LV.ISO8859-13', 'lzh_tw': 'lzh_TW.UTF-8', @@ -1205,6 +1228,7 @@ def getpreferredencoding(do_setlocale=True): 'mai': 'mai_IN.UTF-8', 'mai_in': 'mai_IN.UTF-8', 'mai_np': 'mai_NP.UTF-8', + 'mdf_ru': 'mdf_RU.UTF-8', 'mfe_mu': 'mfe_MU.UTF-8', 'mg_mg': 'mg_MG.ISO8859-15', 'mhr_ru': 'mhr_RU.UTF-8', @@ -1218,6 +1242,7 @@ def getpreferredencoding(do_setlocale=True): 'ml_in': 'ml_IN.UTF-8', 'mn_mn': 'mn_MN.UTF-8', 'mni_in': 'mni_IN.UTF-8', + 'mnw_mm': 'mnw_MM.UTF-8', 'mr': 'mr_IN.UTF-8', 'mr_in': 'mr_IN.UTF-8', 'ms': 'ms_MY.ISO8859-1', @@ -1286,6 +1311,7 @@ def getpreferredencoding(do_setlocale=True): 'pt_pt': 'pt_PT.ISO8859-1', 'quz_pe': 'quz_PE.UTF-8', 'raj_in': 'raj_IN.UTF-8', + 'rif_ma': 'rif_MA.UTF-8', 'ro': 'ro_RO.ISO8859-2', 'ro_ro': 'ro_RO.ISO8859-2', 'romanian': 'ro_RO.ISO8859-2', @@ -1293,12 +1319,14 @@ def getpreferredencoding(do_setlocale=True): 'ru_ru': 'ru_RU.UTF-8', 'ru_ua': 'ru_UA.KOI8-U', 'rumanian': 'ro_RO.ISO8859-2', - 'russian': 'ru_RU.KOI8-R', + 'russian': 'ru_RU.ISO8859-5', 'rw': 'rw_RW.ISO8859-1', 'rw_rw': 'rw_RW.ISO8859-1', 'sa_in': 'sa_IN.UTF-8', + 'sah_ru': 'sah_RU.UTF-8', 'sat_in': 'sat_IN.UTF-8', 'sc_it': 'sc_IT.UTF-8', + 'scn_it': 'scn_IT.UTF-8', 'sd': 'sd_IN.UTF-8', 'sd_in': 'sd_IN.UTF-8', 'sd_in@devanagari.utf8': 'sd_IN.UTF-8@devanagari', @@ -1340,7 +1368,7 @@ def getpreferredencoding(do_setlocale=True): 'sq_mk': 'sq_MK.UTF-8', 'sr': 'sr_RS.UTF-8', 'sr@cyrillic': 'sr_RS.UTF-8', - 'sr@latn': 'sr_CS.UTF-8@latin', + 'sr@latn': 'sr_RS.UTF-8@latin', 'sr_cs': 'sr_CS.UTF-8', 'sr_cs.iso88592@latn': 'sr_CS.ISO8859-2', 'sr_cs@latn': 'sr_CS.UTF-8@latin', @@ -1359,14 +1387,17 @@ def getpreferredencoding(do_setlocale=True): 'sr_yu@cyrillic': 'sr_RS.UTF-8', 'ss': 'ss_ZA.ISO8859-1', 'ss_za': 'ss_ZA.ISO8859-1', + 'ssy_er': 'ssy_ER.UTF-8', 'st': 'st_ZA.ISO8859-1', 'st_za': 'st_ZA.ISO8859-1', + 'su_id': 'su_ID.UTF-8', 'sv': 'sv_SE.ISO8859-1', 'sv_fi': 'sv_FI.ISO8859-1', 'sv_se': 'sv_SE.ISO8859-1', 'sw_ke': 'sw_KE.UTF-8', 'sw_tz': 'sw_TZ.UTF-8', 'swedish': 'sv_SE.ISO8859-1', + 'syr': 'syr.UTF-8', 'szl_pl': 'szl_PL.UTF-8', 'ta': 'ta_IN.TSCII-0', 'ta_in': 'ta_IN.TSCII-0', @@ -1393,6 +1424,7 @@ def getpreferredencoding(do_setlocale=True): 'tn': 'tn_ZA.ISO8859-15', 'tn_za': 'tn_ZA.ISO8859-15', 'to_to': 'to_TO.UTF-8', + 'tok': 'tok.UTF-8', 'tpi_pg': 'tpi_PG.UTF-8', 'tr': 'tr_TR.ISO8859-9', 'tr_cy': 'tr_CY.ISO8859-9', @@ -1407,8 +1439,7 @@ def getpreferredencoding(do_setlocale=True): 'ug_cn': 'ug_CN.UTF-8', 'uk': 'uk_UA.KOI8-U', 'uk_ua': 'uk_UA.KOI8-U', - 'univ': 'en_US.utf', - 'universal': 'en_US.utf', + 'univ.utf8': 'en_US.UTF-8', 'universal.utf8@ucs4': 'en_US.UTF-8', 'unm_us': 'unm_US.UTF-8', 'ur': 'ur_PK.CP1256', @@ -1437,6 +1468,7 @@ def getpreferredencoding(do_setlocale=True): 'yo_ng': 'yo_NG.UTF-8', 'yue_hk': 'yue_HK.UTF-8', 'yuw_pg': 'yuw_PG.UTF-8', + 'zgh_ma': 'zgh_MA.UTF-8', 'zh': 'zh_CN.eucCN', 'zh_cn': 'zh_CN.gb2312', 'zh_cn.big5': 'zh_TW.big5', diff --git a/Lib/logging/handlers.py b/Lib/logging/handlers.py index 1cba64fd554100..017c9ab409b7bc 100644 --- a/Lib/logging/handlers.py +++ b/Lib/logging/handlers.py @@ -855,7 +855,7 @@ class SysLogHandler(logging.Handler): } def __init__(self, address=('localhost', SYSLOG_UDP_PORT), - facility=LOG_USER, socktype=None): + facility=LOG_USER, socktype=None, timeout=None): """ Initialize a handler. @@ -872,6 +872,7 @@ def __init__(self, address=('localhost', SYSLOG_UDP_PORT), self.address = address self.facility = facility self.socktype = socktype + self.timeout = timeout self.socket = None self.createSocket() @@ -933,6 +934,8 @@ def createSocket(self): err = sock = None try: sock = socket.socket(af, socktype, proto) + if self.timeout: + sock.settimeout(self.timeout) if socktype == socket.SOCK_STREAM: sock.connect(sa) break diff --git a/Lib/multiprocessing/connection.py b/Lib/multiprocessing/connection.py index 710aba9685efda..d429212d447380 100644 --- a/Lib/multiprocessing/connection.py +++ b/Lib/multiprocessing/connection.py @@ -853,7 +853,7 @@ def PipeClient(address): _LEGACY_LENGTHS = (_MD5ONLY_MESSAGE_LENGTH, _MD5_DIGEST_LEN) -def _get_digest_name_and_payload(message: bytes) -> (str, bytes): +def _get_digest_name_and_payload(message): # type: (bytes) -> tuple[str, bytes] """Returns a digest name and the payload for a response hash. If a legacy protocol is detected based on the message length diff --git a/Lib/multiprocessing/forkserver.py b/Lib/multiprocessing/forkserver.py index df9b9be9d1898b..681af2610e9b37 100644 --- a/Lib/multiprocessing/forkserver.py +++ b/Lib/multiprocessing/forkserver.py @@ -382,13 +382,14 @@ def _serve_one(child_r, fds, unused_fds, handlers): # def read_signed(fd): - data = b'' - length = SIGNED_STRUCT.size - while len(data) < length: - s = os.read(fd, length - len(data)) - if not s: + data = bytearray(SIGNED_STRUCT.size) + unread = memoryview(data) + while unread: + count = os.readinto(fd, unread) + if count == 0: raise EOFError('unexpected EOF') - data += s + unread = unread[count:] + return SIGNED_STRUCT.unpack(data)[0] def write_signed(fd, n): diff --git a/Lib/multiprocessing/synchronize.py b/Lib/multiprocessing/synchronize.py index 4f72373c951abc..edd6c2543a7435 100644 --- a/Lib/multiprocessing/synchronize.py +++ b/Lib/multiprocessing/synchronize.py @@ -359,7 +359,7 @@ def wait(self, timeout=None): return True return False - def __repr__(self) -> str: + def __repr__(self): set_status = 'set' if self.is_set() else 'unset' return f"<{type(self).__qualname__} at {id(self):#x} {set_status}>" # diff --git a/Lib/pathlib/_abc.py b/Lib/pathlib/_abc.py index d55cc6f243cf2b..e498dc78e83b5e 100644 --- a/Lib/pathlib/_abc.py +++ b/Lib/pathlib/_abc.py @@ -358,33 +358,6 @@ def parents(self): parent = split(path)[0] return tuple(parents) - def match(self, path_pattern, *, case_sensitive=None): - """ - Return True if this path matches the given pattern. If the pattern is - relative, matching is done from the right; otherwise, the entire path - is matched. The recursive wildcard '**' is *not* supported by this - method. - """ - if not isinstance(path_pattern, JoinablePath): - path_pattern = self.with_segments(path_pattern) - if case_sensitive is None: - case_sensitive = _is_case_sensitive(self.parser) - sep = path_pattern.parser.sep - path_parts = self.parts[::-1] - pattern_parts = path_pattern.parts[::-1] - if not pattern_parts: - raise ValueError("empty pattern") - if len(path_parts) < len(pattern_parts): - return False - if len(path_parts) > len(pattern_parts) and path_pattern.anchor: - return False - globber = PathGlobber(sep, case_sensitive) - for path_part, pattern_part in zip(path_parts, pattern_parts): - match = globber.compile(pattern_part) - if match(path_part) is None: - return False - return True - def full_match(self, pattern, *, case_sensitive=None): """ Return True if this path matches the given glob-style pattern. The diff --git a/Lib/pathlib/_local.py b/Lib/pathlib/_local.py index 2b42f3c22254b8..b3ec934f7510de 100644 --- a/Lib/pathlib/_local.py +++ b/Lib/pathlib/_local.py @@ -668,6 +668,32 @@ def full_match(self, pattern, *, case_sensitive=None): globber = _StringGlobber(self.parser.sep, case_sensitive, recursive=True) return globber.compile(pattern)(path) is not None + def match(self, path_pattern, *, case_sensitive=None): + """ + Return True if this path matches the given pattern. If the pattern is + relative, matching is done from the right; otherwise, the entire path + is matched. The recursive wildcard '**' is *not* supported by this + method. + """ + if not isinstance(path_pattern, PurePath): + path_pattern = self.with_segments(path_pattern) + if case_sensitive is None: + case_sensitive = self.parser is posixpath + path_parts = self.parts[::-1] + pattern_parts = path_pattern.parts[::-1] + if not pattern_parts: + raise ValueError("empty pattern") + if len(path_parts) < len(pattern_parts): + return False + if len(path_parts) > len(pattern_parts) and path_pattern.anchor: + return False + globber = _StringGlobber(self.parser.sep, case_sensitive) + for path_part, pattern_part in zip(path_parts, pattern_parts): + match = globber.compile(pattern_part) + if match(path_part) is None: + return False + return True + # Subclassing os.PathLike makes isinstance() checks slower, # which in turn makes Path construction slower. Register instead! os.PathLike.register(PurePath) diff --git a/Lib/pydoc.py b/Lib/pydoc.py index 922946e5fa7ddb..1839b88fec28b1 100644 --- a/Lib/pydoc.py +++ b/Lib/pydoc.py @@ -245,7 +245,7 @@ def parentname(object, modname): if necessary) or module.""" if '.' in object.__qualname__: name = object.__qualname__.rpartition('.')[0] - if object.__module__ != modname: + if object.__module__ != modname and object.__module__ is not None: return object.__module__ + '.' + name else: return name diff --git a/Lib/shutil.py b/Lib/shutil.py index 171489ca41f2a7..510ae8c6f22d59 100644 --- a/Lib/shutil.py +++ b/Lib/shutil.py @@ -49,6 +49,7 @@ # https://bugs.python.org/issue43743#msg393429 _USE_CP_SENDFILE = (hasattr(os, "sendfile") and sys.platform.startswith(("linux", "android", "sunos"))) +_USE_CP_COPY_FILE_RANGE = hasattr(os, "copy_file_range") _HAS_FCOPYFILE = posix and hasattr(posix, "_fcopyfile") # macOS # CMD defaults in Windows 10 @@ -107,6 +108,66 @@ def _fastcopy_fcopyfile(fsrc, fdst, flags): else: raise err from None +def _determine_linux_fastcopy_blocksize(infd): + """Determine blocksize for fastcopying on Linux. + + Hopefully the whole file will be copied in a single call. + The copying itself should be performed in a loop 'till EOF is + reached (0 return) so a blocksize smaller or bigger than the actual + file size should not make any difference, also in case the file + content changes while being copied. + """ + try: + blocksize = max(os.fstat(infd).st_size, 2 ** 23) # min 8 MiB + except OSError: + blocksize = 2 ** 27 # 128 MiB + # On 32-bit architectures truncate to 1 GiB to avoid OverflowError, + # see gh-82500. + if sys.maxsize < 2 ** 32: + blocksize = min(blocksize, 2 ** 30) + return blocksize + +def _fastcopy_copy_file_range(fsrc, fdst): + """Copy data from one regular mmap-like fd to another by using + a high-performance copy_file_range(2) syscall that gives filesystems + an opportunity to implement the use of reflinks or server-side copy. + + This should work on Linux >= 4.5 only. + """ + try: + infd = fsrc.fileno() + outfd = fdst.fileno() + except Exception as err: + raise _GiveupOnFastCopy(err) # not a regular file + + blocksize = _determine_linux_fastcopy_blocksize(infd) + offset = 0 + while True: + try: + n_copied = os.copy_file_range(infd, outfd, blocksize, offset_dst=offset) + except OSError as err: + # ...in oder to have a more informative exception. + err.filename = fsrc.name + err.filename2 = fdst.name + + if err.errno == errno.ENOSPC: # filesystem is full + raise err from None + + # Give up on first call and if no data was copied. + if offset == 0 and os.lseek(outfd, 0, os.SEEK_CUR) == 0: + raise _GiveupOnFastCopy(err) + + raise err + else: + if n_copied == 0: + # If no bytes have been copied yet, copy_file_range + # might silently fail. + # https://lore.kernel.org/linux-fsdevel/20210126233840.GG4626@dread.disaster.area/T/#m05753578c7f7882f6e9ffe01f981bc223edef2b0 + if offset == 0: + raise _GiveupOnFastCopy() + break + offset += n_copied + def _fastcopy_sendfile(fsrc, fdst): """Copy data from one regular mmap-like fd to another by using high-performance sendfile(2) syscall. @@ -128,20 +189,7 @@ def _fastcopy_sendfile(fsrc, fdst): except Exception as err: raise _GiveupOnFastCopy(err) # not a regular file - # Hopefully the whole file will be copied in a single call. - # sendfile() is called in a loop 'till EOF is reached (0 return) - # so a bufsize smaller or bigger than the actual file size - # should not make any difference, also in case the file content - # changes while being copied. - try: - blocksize = max(os.fstat(infd).st_size, 2 ** 23) # min 8MiB - except OSError: - blocksize = 2 ** 27 # 128MiB - # On 32-bit architectures truncate to 1GiB to avoid OverflowError, - # see bpo-38319. - if sys.maxsize < 2 ** 32: - blocksize = min(blocksize, 2 ** 30) - + blocksize = _determine_linux_fastcopy_blocksize(infd) offset = 0 while True: try: @@ -266,12 +314,20 @@ def copyfile(src, dst, *, follow_symlinks=True): except _GiveupOnFastCopy: pass # Linux / Android / Solaris - elif _USE_CP_SENDFILE: - try: - _fastcopy_sendfile(fsrc, fdst) - return dst - except _GiveupOnFastCopy: - pass + elif _USE_CP_SENDFILE or _USE_CP_COPY_FILE_RANGE: + # reflink may be implicit in copy_file_range. + if _USE_CP_COPY_FILE_RANGE: + try: + _fastcopy_copy_file_range(fsrc, fdst) + return dst + except _GiveupOnFastCopy: + pass + if _USE_CP_SENDFILE: + try: + _fastcopy_sendfile(fsrc, fdst) + return dst + except _GiveupOnFastCopy: + pass # Windows, see: # https://github.com/python/cpython/pull/7160#discussion_r195405230 elif _WINDOWS and file_size > 0: diff --git a/Lib/site.py b/Lib/site.py index 92bd1ccdadd924..9da8b6724e1cec 100644 --- a/Lib/site.py +++ b/Lib/site.py @@ -633,12 +633,9 @@ def venv(known_paths): # Doing this here ensures venv takes precedence over user-site addsitepackages(known_paths, [sys.prefix]) - # addsitepackages will process site_prefix again if its in PREFIXES, - # but that's ok; known_paths will prevent anything being added twice if system_site == "true": - PREFIXES.insert(0, sys.prefix) + PREFIXES += [sys.base_prefix, sys.base_exec_prefix] else: - PREFIXES = [sys.prefix] ENABLE_USER_SITE = False return known_paths diff --git a/Lib/sqlite3/__init__.py b/Lib/sqlite3/__init__.py index 34a9c047dd607c..ed727fae609d1d 100644 --- a/Lib/sqlite3/__init__.py +++ b/Lib/sqlite3/__init__.py @@ -22,7 +22,7 @@ """ The sqlite3 extension module provides a DB-API 2.0 (PEP 249) compliant -interface to the SQLite library, and requires SQLite 3.7.15 or newer. +interface to the SQLite library, and requires SQLite 3.15.2 or newer. To use the module, start by creating a database Connection object: diff --git a/Lib/string.py b/Lib/string.py index 2eab6d4f595c4e..c4f05c7223ce8a 100644 --- a/Lib/string.py +++ b/Lib/string.py @@ -212,19 +212,20 @@ def _vformat(self, format_string, args, kwargs, used_args, recursion_depth, # this is some markup, find the object and do # the formatting - # handle arg indexing when empty field_names are given. - if field_name == '': + # handle arg indexing when empty field first parts are given. + field_first, _ = _string.formatter_field_name_split(field_name) + if field_first == '': if auto_arg_index is False: raise ValueError('cannot switch from manual field ' 'specification to automatic field ' 'numbering') - field_name = str(auto_arg_index) + field_name = str(auto_arg_index) + field_name auto_arg_index += 1 - elif field_name.isdigit(): + elif isinstance(field_first, int): if auto_arg_index: - raise ValueError('cannot switch from manual field ' - 'specification to automatic field ' - 'numbering') + raise ValueError('cannot switch from automatic field ' + 'numbering to manual field ' + 'specification') # disable auto arg incrementing, if it gets # used later on, then an exception will be raised auto_arg_index = False diff --git a/Lib/subprocess.py b/Lib/subprocess.py index de88eedcf80ff9..2044d2a42897e9 100644 --- a/Lib/subprocess.py +++ b/Lib/subprocess.py @@ -43,10 +43,8 @@ import builtins import errno import io -import locale import os import time -import signal import sys import threading import warnings @@ -144,6 +142,8 @@ def __init__(self, returncode, cmd, output=None, stderr=None): def __str__(self): if self.returncode and self.returncode < 0: + # Lazy import to improve module import time + import signal try: return "Command '%s' died with %r." % ( self.cmd, signal.Signals(-self.returncode)) @@ -381,6 +381,8 @@ def _text_encoding(): if sys.flags.utf8_mode: return "utf-8" else: + # Lazy import to improve module import time + import locale return locale.getencoding() @@ -1664,6 +1666,9 @@ def send_signal(self, sig): # Don't signal a process that we know has already died. if self.returncode is not None: return + + # Lazy import to improve module import time + import signal if sig == signal.SIGTERM: self.terminate() elif sig == signal.CTRL_C_EVENT: @@ -1765,6 +1770,9 @@ def _posix_spawn(self, args, executable, env, restore_signals, close_fds, """Execute program using os.posix_spawn().""" kwargs = {} if restore_signals: + # Lazy import to improve module import time + import signal + # See _Py_RestoreSignals() in Python/pylifecycle.c sigset = [] for signame in ('SIGPIPE', 'SIGXFZ', 'SIGXFSZ'): @@ -2214,9 +2222,13 @@ def send_signal(self, sig): def terminate(self): """Terminate the process with SIGTERM """ + # Lazy import to improve module import time + import signal self.send_signal(signal.SIGTERM) def kill(self): """Kill the process with SIGKILL """ + # Lazy import to improve module import time + import signal self.send_signal(signal.SIGKILL) diff --git a/Lib/sysconfig/__init__.py b/Lib/sysconfig/__init__.py index 3c3c9796ec3307..69f72452c4069a 100644 --- a/Lib/sysconfig/__init__.py +++ b/Lib/sysconfig/__init__.py @@ -116,8 +116,10 @@ def _getuserbase(): if env_base: return env_base - # Emscripten, iOS, tvOS, VxWorks, WASI, and watchOS have no home directories - if sys.platform in {"emscripten", "ios", "tvos", "vxworks", "wasi", "watchos"}: + # Emscripten, iOS, tvOS, VxWorks, WASI, and watchOS have no home directories. + # Use _PYTHON_HOST_PLATFORM to get the correct platform when cross-compiling. + system_name = os.environ.get('_PYTHON_HOST_PLATFORM', sys.platform).split('-')[0] + if system_name in {"emscripten", "ios", "tvos", "vxworks", "wasi", "watchos"}: return None def joinuser(*args): @@ -342,6 +344,18 @@ def get_makefile_filename(): return os.path.join(get_path('stdlib'), config_dir_name, 'Makefile') +def _import_from_directory(path, name): + if name not in sys.modules: + import importlib.machinery + import importlib.util + + spec = importlib.machinery.PathFinder.find_spec(name, [path]) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + sys.modules[name] = module + return sys.modules[name] + + def _get_sysconfigdata_name(): multiarch = getattr(sys.implementation, '_multiarch', '') return os.environ.get( @@ -349,27 +363,34 @@ def _get_sysconfigdata_name(): f'_sysconfigdata_{sys.abiflags}_{sys.platform}_{multiarch}', ) -def _init_posix(vars): - """Initialize the module as appropriate for POSIX systems.""" - # _sysconfigdata is generated at build time, see _generate_posix_vars() + +def _get_sysconfigdata(): + import importlib + name = _get_sysconfigdata_name() + path = os.environ.get('_PYTHON_SYSCONFIGDATA_PATH') + module = _import_from_directory(path, name) if path else importlib.import_module(name) - # For cross builds, the path to the target's sysconfigdata must be specified - # so it can be imported. It cannot be in PYTHONPATH, as foreign modules in - # sys.path can cause crashes when loaded by the host interpreter. - # Rely on truthiness as a valueless env variable is still an empty string. - # See OS X note in _generate_posix_vars re _sysconfigdata. - if (path := os.environ.get('_PYTHON_SYSCONFIGDATA_PATH')): - from importlib.machinery import FileFinder, SourceFileLoader, SOURCE_SUFFIXES - from importlib.util import module_from_spec - spec = FileFinder(path, (SourceFileLoader, SOURCE_SUFFIXES)).find_spec(name) - _temp = module_from_spec(spec) - spec.loader.exec_module(_temp) - else: - _temp = __import__(name, globals(), locals(), ['build_time_vars'], 0) - build_time_vars = _temp.build_time_vars + return module.build_time_vars + + +def _installation_is_relocated(): + """Is the Python installation running from a different prefix than what was targetted when building?""" + if os.name != 'posix': + raise NotImplementedError('sysconfig._installation_is_relocated() is currently only supported on POSIX') + + data = _get_sysconfigdata() + return ( + data['prefix'] != getattr(sys, 'base_prefix', '') + or data['exec_prefix'] != getattr(sys, 'base_exec_prefix', '') + ) + + +def _init_posix(vars): + """Initialize the module as appropriate for POSIX systems.""" # GH-126920: Make sure we don't overwrite any of the keys already set - vars.update(build_time_vars | vars) + vars.update(_get_sysconfigdata() | vars) + def _init_non_posix(vars): """Initialize the module as appropriate for NT""" diff --git a/Lib/sysconfig/__main__.py b/Lib/sysconfig/__main__.py index 10728c709e1811..bc2197cfe79402 100644 --- a/Lib/sysconfig/__main__.py +++ b/Lib/sysconfig/__main__.py @@ -232,10 +232,14 @@ def _generate_posix_vars(): print(f'Written {destfile}') + install_vars = get_config_vars() + # Fix config vars to match the values after install (of the default environment) + install_vars['projectbase'] = install_vars['BINDIR'] + install_vars['srcdir'] = install_vars['LIBPL'] # Write a JSON file with the output of sysconfig.get_config_vars jsonfile = os.path.join(pybuilddir, _get_json_data_name()) with open(jsonfile, 'w') as f: - json.dump(get_config_vars(), f, indent=2) + json.dump(install_vars, f, indent=2) print(f'Written {jsonfile}') diff --git a/Lib/test/libregrtest/cmdline.py b/Lib/test/libregrtest/cmdline.py index bf9a71efbdbff9..1f3b2381c71d45 100644 --- a/Lib/test/libregrtest/cmdline.py +++ b/Lib/test/libregrtest/cmdline.py @@ -160,6 +160,7 @@ def __init__(self, **kwargs) -> None: self.print_slow = False self.random_seed = None self.use_mp = None + self.parallel_threads = None self.forever = False self.header = False self.failfast = False @@ -316,6 +317,10 @@ def _create_parser(): 'a single process, ignore -jN option, ' 'and failed tests are also rerun sequentially ' 'in the same process') + group.add_argument('--parallel-threads', metavar='PARALLEL_THREADS', + type=int, + help='run copies of each test in PARALLEL_THREADS at ' + 'once') group.add_argument('-T', '--coverage', action='store_true', dest='trace', help='turn on code coverage tracing using the trace ' diff --git a/Lib/test/libregrtest/main.py b/Lib/test/libregrtest/main.py index dcbcc6790c68d8..de377f185f7ed9 100644 --- a/Lib/test/libregrtest/main.py +++ b/Lib/test/libregrtest/main.py @@ -142,6 +142,8 @@ def __init__(self, ns: Namespace, _add_python_opts: bool = False): else: self.random_seed = ns.random_seed + self.parallel_threads = ns.parallel_threads + # tests self.first_runtests: RunTests | None = None @@ -506,6 +508,7 @@ def create_run_tests(self, tests: TestTuple) -> RunTests: python_cmd=self.python_cmd, randomize=self.randomize, random_seed=self.random_seed, + parallel_threads=self.parallel_threads, ) def _run_tests(self, selected: TestTuple, tests: TestList | None) -> int: diff --git a/Lib/test/libregrtest/parallel_case.py b/Lib/test/libregrtest/parallel_case.py new file mode 100644 index 00000000000000..09d9d2831e86b8 --- /dev/null +++ b/Lib/test/libregrtest/parallel_case.py @@ -0,0 +1,79 @@ +"""Run a test case multiple times in parallel threads.""" + +import copy +import functools +import threading +import unittest + +from unittest import TestCase + + +class ParallelTestCase(TestCase): + def __init__(self, test_case: TestCase, num_threads: int): + self.test_case = test_case + self.num_threads = num_threads + self._testMethodName = test_case._testMethodName + self._testMethodDoc = test_case._testMethodDoc + + def __str__(self): + return f"{str(self.test_case)} [threads={self.num_threads}]" + + def run_worker(self, test_case: TestCase, result: unittest.TestResult, + barrier: threading.Barrier): + barrier.wait() + test_case.run(result) + + def run(self, result=None): + if result is None: + result = test_case.defaultTestResult() + startTestRun = getattr(result, 'startTestRun', None) + stopTestRun = getattr(result, 'stopTestRun', None) + if startTestRun is not None: + startTestRun() + else: + stopTestRun = None + + # Called at the beginning of each test. See TestCase.run. + result.startTest(self) + + cases = [copy.copy(self.test_case) for _ in range(self.num_threads)] + results = [unittest.TestResult() for _ in range(self.num_threads)] + + barrier = threading.Barrier(self.num_threads) + threads = [] + for i, (case, r) in enumerate(zip(cases, results)): + thread = threading.Thread(target=self.run_worker, + args=(case, r, barrier), + name=f"{str(self.test_case)}-{i}", + daemon=True) + threads.append(thread) + + for thread in threads: + thread.start() + + for threads in threads: + threads.join() + + # Aggregate test results + if all(r.wasSuccessful() for r in results): + result.addSuccess(self) + + # Note: We can't call result.addError, result.addFailure, etc. because + # we no longer have the original exception, just the string format. + for r in results: + if len(r.errors) > 0 or len(r.failures) > 0: + result._mirrorOutput = True + result.errors.extend(r.errors) + result.failures.extend(r.failures) + result.skipped.extend(r.skipped) + result.expectedFailures.extend(r.expectedFailures) + result.unexpectedSuccesses.extend(r.unexpectedSuccesses) + result.collectedDurations.extend(r.collectedDurations) + + if any(r.shouldStop for r in results): + result.stop() + + # Test has finished running + result.stopTest(self) + if stopTestRun is not None: + stopTestRun() diff --git a/Lib/test/libregrtest/runtests.py b/Lib/test/libregrtest/runtests.py index 130c036a62eefb..759f24fc25e38c 100644 --- a/Lib/test/libregrtest/runtests.py +++ b/Lib/test/libregrtest/runtests.py @@ -100,6 +100,7 @@ class RunTests: python_cmd: tuple[str, ...] | None randomize: bool random_seed: int | str + parallel_threads: int | None def copy(self, **override) -> 'RunTests': state = dataclasses.asdict(self) @@ -184,6 +185,8 @@ def bisect_cmd_args(self) -> list[str]: args.extend(("--python", cmd)) if self.randomize: args.append(f"--randomize") + if self.parallel_threads: + args.append(f"--parallel-threads={self.parallel_threads}") args.append(f"--randseed={self.random_seed}") return args diff --git a/Lib/test/libregrtest/single.py b/Lib/test/libregrtest/single.py index 54df688bbc470e..57d7b649d2ef63 100644 --- a/Lib/test/libregrtest/single.py +++ b/Lib/test/libregrtest/single.py @@ -17,6 +17,7 @@ from .save_env import saved_test_environment from .setup import setup_tests from .testresult import get_test_runner +from .parallel_case import ParallelTestCase from .utils import ( TestName, clear_caches, remove_testfn, abs_module_name, print_warning) @@ -27,14 +28,17 @@ PROGRESS_MIN_TIME = 30.0 # seconds -def run_unittest(test_mod): +def run_unittest(test_mod, runtests: RunTests): loader = unittest.TestLoader() tests = loader.loadTestsFromModule(test_mod) + for error in loader.errors: print(error, file=sys.stderr) if loader.errors: raise Exception("errors while loading tests") _filter_suite(tests, match_test) + if runtests.parallel_threads: + _parallelize_tests(tests, runtests.parallel_threads) return _run_suite(tests) def _filter_suite(suite, pred): @@ -49,6 +53,28 @@ def _filter_suite(suite, pred): newtests.append(test) suite._tests = newtests +def _parallelize_tests(suite, parallel_threads: int): + def is_thread_unsafe(test): + test_method = getattr(test, test._testMethodName) + instance = test_method.__self__ + return (getattr(test_method, "__unittest_thread_unsafe__", False) or + getattr(instance, "__unittest_thread_unsafe__", False)) + + newtests: list[object] = [] + for test in suite._tests: + if isinstance(test, unittest.TestSuite): + _parallelize_tests(test, parallel_threads) + newtests.append(test) + continue + + if is_thread_unsafe(test): + # Don't parallelize thread-unsafe tests + newtests.append(test) + continue + + newtests.append(ParallelTestCase(test, parallel_threads)) + suite._tests = newtests + def _run_suite(suite): """Run tests from a unittest.TestSuite-derived class.""" runner = get_test_runner(sys.stdout, @@ -133,7 +159,7 @@ def _load_run_test(result: TestResult, runtests: RunTests) -> None: raise Exception(f"Module {test_name} defines test_main() which " f"is no longer supported by regrtest") def test_func(): - return run_unittest(test_mod) + return run_unittest(test_mod, runtests) try: regrtest_runner(result, test_func, runtests) diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index 89f2a6b916bfc2..f31d98bf731d67 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -40,7 +40,7 @@ "anticipate_failure", "load_package_tests", "detect_api_mismatch", "check__all__", "skip_if_buggy_ucrt_strfptime", "check_disallow_instantiation", "check_sanitizer", "skip_if_sanitizer", - "requires_limited_api", "requires_specialization", + "requires_limited_api", "requires_specialization", "thread_unsafe", # sys "MS_WINDOWS", "is_jython", "is_android", "is_emscripten", "is_wasi", "is_apple_mobile", "check_impl_detail", "unix_shell", "setswitchinterval", @@ -58,13 +58,15 @@ "LOOPBACK_TIMEOUT", "INTERNET_TIMEOUT", "SHORT_TIMEOUT", "LONG_TIMEOUT", "Py_DEBUG", "exceeds_recursion_limit", "get_c_recursion_limit", "skip_on_s390x", - "without_optimizer", + "requires_jit_enabled", + "requires_jit_disabled", "force_not_colorized", "force_not_colorized_test_class", "make_clean_env", "BrokenIter", "in_systemd_nspawn_sync_suppressed", "run_no_yield_async_fn", "run_yielding_async_fn", "async_yield", + "reset_code", ] @@ -380,6 +382,21 @@ def wrapper(*args, **kw): return decorator +def thread_unsafe(reason): + """Mark a test as not thread safe. When the test runner is run with + --parallel-threads=N, the test will be run in a single thread.""" + def decorator(test_item): + test_item.__unittest_thread_unsafe__ = True + # the reason is not currently used + test_item.__unittest_thread_unsafe__why__ = reason + return test_item + if isinstance(reason, types.FunctionType): + test_item = reason + reason = '' + return decorator(test_item) + return decorator + + def skip_if_buildbot(reason=None): """Decorator raising SkipTest if running on a buildbot.""" import getpass @@ -1285,6 +1302,12 @@ def requires_specialization_ft(test): _opcode.ENABLE_SPECIALIZATION_FT, "requires specialization")(test) +def reset_code(f: types.FunctionType) -> types.FunctionType: + """Clear all specializations, local instrumentation, and JIT code for the given function.""" + f.__code__ = f.__code__.replace() + return f + + #======================================================================= # Check for the presence of docstrings. @@ -2620,21 +2643,13 @@ def exceeds_recursion_limit(): Py_TRACE_REFS = hasattr(sys, 'getobjects') -# Decorator to disable optimizer while a function run -def without_optimizer(func): - try: - from _testinternalcapi import get_optimizer, set_optimizer - except ImportError: - return func - @functools.wraps(func) - def wrapper(*args, **kwargs): - save_opt = get_optimizer() - try: - set_optimizer(None) - return func(*args, **kwargs) - finally: - set_optimizer(save_opt) - return wrapper +try: + from _testinternalcapi import jit_enabled +except ImportError: + requires_jit_enabled = requires_jit_disabled = unittest.skip("requires _testinternalcapi") +else: + requires_jit_enabled = unittest.skipUnless(jit_enabled(), "requires JIT enabled") + requires_jit_disabled = unittest.skipIf(jit_enabled(), "requires JIT disabled") _BASE_COPY_SRC_DIR_IGNORED_NAMES = frozenset({ diff --git a/Lib/test/support/venv.py b/Lib/test/support/venv.py index 78e6a51ec1815e..7bfb9e4f3c479f 100644 --- a/Lib/test/support/venv.py +++ b/Lib/test/support/venv.py @@ -6,6 +6,7 @@ import sys import sysconfig import tempfile +import unittest import venv @@ -68,3 +69,14 @@ def run(self, *args, **subprocess_args): raise else: return result + + +class VirtualEnvironmentMixin: + def venv(self, name=None, **venv_create_args): + venv_name = self.id() + if name: + venv_name += f'-{name}' + return VirtualEnvironment.from_tmpdir( + prefix=f'{venv_name}-venv-', + **venv_create_args, + ) diff --git a/Lib/test/test_ast/test_ast.py b/Lib/test/test_ast/test_ast.py index c268a1f00f938e..a438c8e81e4fd1 100644 --- a/Lib/test/test_ast/test_ast.py +++ b/Lib/test/test_ast/test_ast.py @@ -3279,16 +3279,6 @@ def test_folding_iter(self): self.assert_ast(code % (left, right), non_optimized_target, optimized_target) - def test_folding_subscript(self): - code = "(1,)[0]" - - non_optimized_target = self.wrap_expr( - ast.Subscript(value=ast.Tuple(elts=[ast.Constant(value=1)]), slice=ast.Constant(value=0)) - ) - optimized_target = self.wrap_expr(ast.Constant(value=1)) - - self.assert_ast(code, non_optimized_target, optimized_target) - def test_folding_type_param_in_function_def(self): code = "def foo[%s = 1 + 1](): pass" diff --git a/Lib/test/test_asyncio/test_selector_events.py b/Lib/test/test_asyncio/test_selector_events.py index c9217d04bcd322..9d094a7b041276 100644 --- a/Lib/test/test_asyncio/test_selector_events.py +++ b/Lib/test/test_asyncio/test_selector_events.py @@ -1051,6 +1051,48 @@ def test_transport_close_remove_writer(self, m_log): transport.close() remove_writer.assert_called_with(self.sock_fd) + def test_write_buffer_after_close(self): + # gh-115514: If the transport is closed while: + # * Transport write buffer is not empty + # * Transport is paused + # * Protocol has data in its buffer, like SSLProtocol in self._outgoing + # The data is still written out. + + # Also tested with real SSL transport in + # test.test_asyncio.test_ssl.TestSSL.test_remote_shutdown_receives_trailing_data + + data = memoryview(b'data') + self.sock.send.return_value = 2 + self.sock.send.fileno.return_value = 7 + + def _resume_writing(): + transport.write(b"data") + self.protocol.resume_writing.side_effect = None + + self.protocol.resume_writing.side_effect = _resume_writing + + transport = self.socket_transport() + transport._high_water = 1 + + transport.write(data) + + self.assertTrue(transport._protocol_paused) + self.assertTrue(self.sock.send.called) + self.loop.assert_writer(7, transport._write_ready) + + transport.close() + + # not called, we still have data in write buffer + self.assertFalse(self.protocol.connection_lost.called) + + self.loop.writers[7]._run() + # during this ^ run, the _resume_writing mock above was called and added more data + + self.assertEqual(transport.get_write_buffer_size(), 2) + self.loop.writers[7]._run() + + self.assertEqual(transport.get_write_buffer_size(), 0) + self.assertTrue(self.protocol.connection_lost.called) class SelectorSocketTransportBufferedProtocolTests(test_utils.TestCase): diff --git a/Lib/test/test_asyncio/test_ssl.py b/Lib/test/test_asyncio/test_ssl.py index 125a6c35793c44..ac774307c7942b 100644 --- a/Lib/test/test_asyncio/test_ssl.py +++ b/Lib/test/test_asyncio/test_ssl.py @@ -12,6 +12,7 @@ import tempfile import threading import time +import unittest.mock import weakref import unittest @@ -1431,6 +1432,166 @@ def wrapper(sock): with self.tcp_server(run(eof_server)) as srv: self.loop.run_until_complete(client(srv.addr)) + def test_remote_shutdown_receives_trailing_data_on_slow_socket(self): + # This test is the same as test_remote_shutdown_receives_trailing_data, + # except it simulates a socket that is not able to write data in time, + # thus triggering different code path in _SelectorSocketTransport. + # This triggers bug gh-115514, also tested using mocks in + # test.test_asyncio.test_selector_events.SelectorSocketTransportTests.test_write_buffer_after_close + # The slow path is triggered here by setting SO_SNDBUF, see code and comment below. + + CHUNK = 1024 * 128 + SIZE = 32 + + sslctx = self._create_server_ssl_context( + test_utils.ONLYCERT, + test_utils.ONLYKEY + ) + client_sslctx = self._create_client_ssl_context() + future = None + + def server(sock): + incoming = ssl.MemoryBIO() + outgoing = ssl.MemoryBIO() + sslobj = sslctx.wrap_bio(incoming, outgoing, server_side=True) + + while True: + try: + sslobj.do_handshake() + except ssl.SSLWantReadError: + if outgoing.pending: + sock.send(outgoing.read()) + incoming.write(sock.recv(16384)) + else: + if outgoing.pending: + sock.send(outgoing.read()) + break + + while True: + try: + data = sslobj.read(4) + except ssl.SSLWantReadError: + incoming.write(sock.recv(16384)) + else: + break + + self.assertEqual(data, b'ping') + sslobj.write(b'pong') + sock.send(outgoing.read()) + + time.sleep(0.2) # wait for the peer to fill its backlog + + # send close_notify but don't wait for response + with self.assertRaises(ssl.SSLWantReadError): + sslobj.unwrap() + sock.send(outgoing.read()) + + # should receive all data + data_len = 0 + while True: + try: + chunk = len(sslobj.read(16384)) + data_len += chunk + except ssl.SSLWantReadError: + incoming.write(sock.recv(16384)) + except ssl.SSLZeroReturnError: + break + + self.assertEqual(data_len, CHUNK * SIZE*2) + + # verify that close_notify is received + sslobj.unwrap() + + sock.close() + + def eof_server(sock): + sock.starttls(sslctx, server_side=True) + self.assertEqual(sock.recv_all(4), b'ping') + sock.send(b'pong') + + time.sleep(0.2) # wait for the peer to fill its backlog + + # send EOF + sock.shutdown(socket.SHUT_WR) + + # should receive all data + data = sock.recv_all(CHUNK * SIZE) + self.assertEqual(len(data), CHUNK * SIZE) + + sock.close() + + async def client(addr): + nonlocal future + future = self.loop.create_future() + + reader, writer = await asyncio.open_connection( + *addr, + ssl=client_sslctx, + server_hostname='') + writer.write(b'ping') + data = await reader.readexactly(4) + self.assertEqual(data, b'pong') + + # fill write backlog in a hacky way - renegotiation won't help + for _ in range(SIZE*2): + writer.transport._test__append_write_backlog(b'x' * CHUNK) + + try: + data = await reader.read() + self.assertEqual(data, b'') + except (BrokenPipeError, ConnectionResetError): + pass + + # Make sure _SelectorSocketTransport enters the delayed write + # path in its `write` method by wrapping socket in a fake class + # that acts as if there is not enough space in socket buffer. + # This triggers bug gh-115514, also tested using mocks in + # test.test_asyncio.test_selector_events.SelectorSocketTransportTests.test_write_buffer_after_close + socket_transport = writer.transport._ssl_protocol._transport + + class SocketWrapper: + def __init__(self, sock) -> None: + self.sock = sock + + def __getattr__(self, name): + return getattr(self.sock, name) + + def send(self, data): + # Fake that our write buffer is full, send only half + to_send = len(data)//2 + return self.sock.send(data[:to_send]) + + def _fake_full_write_buffer(data): + if socket_transport._read_ready_cb is None and not isinstance(socket_transport._sock, SocketWrapper): + socket_transport._sock = SocketWrapper(socket_transport._sock) + return unittest.mock.DEFAULT + + with unittest.mock.patch.object( + socket_transport, "write", + wraps=socket_transport.write, + side_effect=_fake_full_write_buffer + ): + await future + + writer.close() + await self.wait_closed(writer) + + def run(meth): + def wrapper(sock): + try: + meth(sock) + except Exception as ex: + self.loop.call_soon_threadsafe(future.set_exception, ex) + else: + self.loop.call_soon_threadsafe(future.set_result, None) + return wrapper + + with self.tcp_server(run(server)) as srv: + self.loop.run_until_complete(client(srv.addr)) + + with self.tcp_server(run(eof_server)) as srv: + self.loop.run_until_complete(client(srv.addr)) + def test_connect_timeout_warning(self): s = socket.socket(socket.AF_INET) s.bind(('127.0.0.1', 0)) diff --git a/Lib/test/test_bytes.py b/Lib/test/test_bytes.py index 7bb1ab38aa4fdf..18d619eb6239a1 100644 --- a/Lib/test/test_bytes.py +++ b/Lib/test/test_bytes.py @@ -1359,6 +1359,44 @@ def by(s): b = by("Hello, world") self.assertEqual(re.findall(br"\w+", b), [by("Hello"), by("world")]) + def test_resize(self): + ba = bytearray(b'abcdef') + self.assertIsNone(ba.resize(3)) + self.assertEqual(ba, bytearray(b'abc')) + + self.assertIsNone(ba.resize(10)) + self.assertEqual(len(ba), 10) + # Bytes beyond set values must be cleared. + self.assertEqual(ba, bytearray(b'abc\0\0\0\0\0\0\0')) + + ba[3:10] = b'defghij' + self.assertEqual(ba, bytearray(b'abcdefghij')) + + self.assertIsNone(ba.resize(2 ** 20)) + self.assertEqual(len(ba), 2**20) + self.assertEqual(ba, bytearray(b'abcdefghij' + b'\0' * (2 ** 20 - 10))) + + self.assertIsNone(ba.resize(0)) + self.assertEqual(ba, bytearray()) + + self.assertIsNone(ba.resize(10)) + self.assertEqual(ba, bytearray(b'\0' * 10)) + + # Subclass + ba = ByteArraySubclass(b'abcdef') + self.assertIsNone(ba.resize(3)) + self.assertEqual(ba, bytearray(b'abc')) + + # Check arguments + self.assertRaises(TypeError, bytearray().resize) + self.assertRaises(TypeError, bytearray().resize, (10, 10)) + + self.assertRaises(ValueError, bytearray().resize, -1) + self.assertRaises(ValueError, bytearray().resize, -200) + self.assertRaises(MemoryError, bytearray().resize, sys.maxsize) + self.assertRaises(MemoryError, bytearray(1000).resize, sys.maxsize) + + def test_setitem(self): def setitem_as_mapping(b, i, val): b[i] = val @@ -1715,17 +1753,18 @@ def test_resize_forbidden(self): # if it wouldn't reallocate the underlying buffer. # Furthermore, no destructive changes to the buffer may be applied # before raising the error. - b = bytearray(range(10)) + b = bytearray(10) v = memoryview(b) - def resize(n): + def manual_resize(n): b[1:-1] = range(n + 1, 2*n - 1) - resize(10) + b.resize(10) orig = b[:] - self.assertRaises(BufferError, resize, 11) + self.assertRaises(BufferError, b.resize, 11) + self.assertRaises(BufferError, manual_resize, 11) self.assertEqual(b, orig) - self.assertRaises(BufferError, resize, 9) + self.assertRaises(BufferError, b.resize, 9) self.assertEqual(b, orig) - self.assertRaises(BufferError, resize, 0) + self.assertRaises(BufferError, b.resize, 0) self.assertEqual(b, orig) # Other operations implying resize self.assertRaises(BufferError, b.pop, 0) diff --git a/Lib/test/test_call.py b/Lib/test/test_call.py index 78a706436aea0e..a4115d54ce5995 100644 --- a/Lib/test/test_call.py +++ b/Lib/test/test_call.py @@ -1,6 +1,6 @@ import unittest from test.support import (cpython_only, is_wasi, requires_limited_api, Py_DEBUG, - set_recursion_limit, skip_on_s390x, skip_emscripten_stack_overflow) + set_recursion_limit, skip_on_s390x, skip_emscripten_stack_overflow, import_helper) try: import _testcapi except ImportError: @@ -616,9 +616,6 @@ def testfunction_kw(self, *, kw): return self -ADAPTIVE_WARMUP_DELAY = 2 - - @unittest.skipIf(_testcapi is None, "requires _testcapi") class TestPEP590(unittest.TestCase): @@ -802,17 +799,18 @@ def __call__(self, *args): def test_setvectorcall(self): from _testcapi import function_setvectorcall + _testinternalcapi = import_helper.import_module("_testinternalcapi") def f(num): return num + 1 assert_equal = self.assertEqual num = 10 assert_equal(11, f(num)) function_setvectorcall(f) - # make sure specializer is triggered by running > 50 times - for _ in range(10 * ADAPTIVE_WARMUP_DELAY): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): assert_equal("overridden", f(num)) def test_setvectorcall_load_attr_specialization_skip(self): from _testcapi import function_setvectorcall + _testinternalcapi = import_helper.import_module("_testinternalcapi") class X: def __getattribute__(self, attr): @@ -824,11 +822,12 @@ def __getattribute__(self, attr): function_setvectorcall(X.__getattribute__) # make sure specialization doesn't trigger # when vectorcall is overridden - for _ in range(ADAPTIVE_WARMUP_DELAY): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): assert_equal("overridden", x.a) def test_setvectorcall_load_attr_specialization_deopt(self): from _testcapi import function_setvectorcall + _testinternalcapi = import_helper.import_module("_testinternalcapi") class X: def __getattribute__(self, attr): @@ -840,12 +839,12 @@ def get_a(x): assert_equal = self.assertEqual x = X() # trigger LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN specialization - for _ in range(ADAPTIVE_WARMUP_DELAY): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): assert_equal("a", get_a(x)) function_setvectorcall(X.__getattribute__) # make sure specialized LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN # gets deopted due to overridden vectorcall - for _ in range(ADAPTIVE_WARMUP_DELAY): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): assert_equal("overridden", get_a(x)) @requires_limited_api diff --git a/Lib/test/test_capi/test_bytearray.py b/Lib/test/test_capi/test_bytearray.py index 39099f6b82240f..323e0d2a5acdcb 100644 --- a/Lib/test/test_capi/test_bytearray.py +++ b/Lib/test/test_capi/test_bytearray.py @@ -151,10 +151,11 @@ def test_resize(self): self.assertEqual(resize(ba, 3), 0) self.assertEqual(ba, bytearray(b'abc')) + self.assertRaises(ValueError, resize, bytearray(), -1) + self.assertRaises(ValueError, resize, bytearray(), -200) self.assertRaises(MemoryError, resize, bytearray(), PY_SSIZE_T_MAX) self.assertRaises(MemoryError, resize, bytearray(1000), PY_SSIZE_T_MAX) - # CRASHES resize(bytearray(b'abc'), -1) # CRASHES resize(b'abc', 0) # CRASHES resize(object(), 0) # CRASHES resize(NULL, 0) diff --git a/Lib/test/test_capi/test_file.py b/Lib/test/test_capi/test_file.py index a67a5121c4588b..b5767756992861 100644 --- a/Lib/test/test_capi/test_file.py +++ b/Lib/test/test_capi/test_file.py @@ -1,26 +1,242 @@ +import io import os import unittest +import warnings from test import support -from test.support import import_helper, os_helper +from test.support import import_helper, os_helper, warnings_helper -_testcapi = import_helper.import_module('_testcapi') +_testcapi = import_helper.import_module('_testcapi') +_testlimitedcapi = import_helper.import_module('_testlimitedcapi') +_io = import_helper.import_module('_io') NULL = None +STDOUT_FD = 1 + +with open(__file__, 'rb') as fp: + FIRST_LINE = next(fp).decode() +FIRST_LINE_NORM = FIRST_LINE.rstrip() + '\n' class CAPIFileTest(unittest.TestCase): + def test_pyfile_fromfd(self): + # Test PyFile_FromFd() which is a thin wrapper to _io.open() + pyfile_fromfd = _testlimitedcapi.pyfile_fromfd + filename = __file__ + with open(filename, "rb") as fp: + fd = fp.fileno() + + # FileIO + fp.seek(0) + obj = pyfile_fromfd(fd, filename, "rb", 0, NULL, NULL, NULL, 0) + try: + self.assertIsInstance(obj, _io.FileIO) + self.assertEqual(obj.readline(), FIRST_LINE.encode()) + finally: + obj.close() + + # BufferedReader + fp.seek(0) + obj = pyfile_fromfd(fd, filename, "rb", 1024, NULL, NULL, NULL, 0) + try: + self.assertIsInstance(obj, _io.BufferedReader) + self.assertEqual(obj.readline(), FIRST_LINE.encode()) + finally: + obj.close() + + # TextIOWrapper + fp.seek(0) + obj = pyfile_fromfd(fd, filename, "r", 1, + "utf-8", "replace", NULL, 0) + try: + self.assertIsInstance(obj, _io.TextIOWrapper) + self.assertEqual(obj.encoding, "utf-8") + self.assertEqual(obj.errors, "replace") + self.assertEqual(obj.readline(), FIRST_LINE_NORM) + finally: + obj.close() + + def test_pyfile_getline(self): + # Test PyFile_GetLine(file, n): call file.readline() + # and strip "\n" suffix if n < 0. + pyfile_getline = _testlimitedcapi.pyfile_getline + + # Test Unicode + with open(__file__, "r") as fp: + fp.seek(0) + self.assertEqual(pyfile_getline(fp, -1), + FIRST_LINE_NORM.rstrip('\n')) + fp.seek(0) + self.assertEqual(pyfile_getline(fp, 0), + FIRST_LINE_NORM) + fp.seek(0) + self.assertEqual(pyfile_getline(fp, 6), + FIRST_LINE_NORM[:6]) + + # Test bytes + with open(__file__, "rb") as fp: + fp.seek(0) + self.assertEqual(pyfile_getline(fp, -1), + FIRST_LINE.rstrip('\n').encode()) + fp.seek(0) + self.assertEqual(pyfile_getline(fp, 0), + FIRST_LINE.encode()) + fp.seek(0) + self.assertEqual(pyfile_getline(fp, 6), + FIRST_LINE.encode()[:6]) + + def test_pyfile_writestring(self): + # Test PyFile_WriteString(str, file): call file.write(str) + writestr = _testlimitedcapi.pyfile_writestring + + with io.StringIO() as fp: + self.assertEqual(writestr("a\xe9\u20ac\U0010FFFF".encode(), fp), 0) + with self.assertRaises(UnicodeDecodeError): + writestr(b"\xff", fp) + with self.assertRaises(UnicodeDecodeError): + writestr("\udc80".encode("utf-8", "surrogatepass"), fp) + + text = fp.getvalue() + self.assertEqual(text, "a\xe9\u20ac\U0010FFFF") + + with self.assertRaises(SystemError): + writestr(b"abc", NULL) + + def test_pyfile_writeobject(self): + # Test PyFile_WriteObject(obj, file, flags): + # - Call file.write(str(obj)) if flags equals Py_PRINT_RAW. + # - Call file.write(repr(obj)) otherwise. + writeobject = _testlimitedcapi.pyfile_writeobject + Py_PRINT_RAW = 1 + + with io.StringIO() as fp: + # Test flags=Py_PRINT_RAW + self.assertEqual(writeobject("raw", fp, Py_PRINT_RAW), 0) + writeobject(NULL, fp, Py_PRINT_RAW) + + # Test flags=0 + self.assertEqual(writeobject("repr", fp, 0), 0) + writeobject(NULL, fp, 0) + + text = fp.getvalue() + self.assertEqual(text, "raw'repr'") + + # invalid file type + for invalid_file in (123, "abc", object()): + with self.subTest(file=invalid_file): + with self.assertRaises(AttributeError): + writeobject("abc", invalid_file, Py_PRINT_RAW) + + with self.assertRaises(TypeError): + writeobject("abc", NULL, 0) + + def test_pyobject_asfiledescriptor(self): + # Test PyObject_AsFileDescriptor(obj): + # - Return obj if obj is an integer. + # - Return obj.fileno() otherwise. + # File descriptor must be >= 0. + asfd = _testlimitedcapi.pyobject_asfiledescriptor + + self.assertEqual(asfd(123), 123) + self.assertEqual(asfd(0), 0) + + with open(__file__, "rb") as fp: + self.assertEqual(asfd(fp), fp.fileno()) + + # bool emits RuntimeWarning + msg = r"bool is used as a file descriptor" + with warnings_helper.check_warnings((msg, RuntimeWarning)): + self.assertEqual(asfd(True), 1) + + class FakeFile: + def __init__(self, fd): + self.fd = fd + def fileno(self): + return self.fd + + # file descriptor must be positive + with self.assertRaises(ValueError): + asfd(-1) + with self.assertRaises(ValueError): + asfd(FakeFile(-1)) + + # fileno() result must be an integer + with self.assertRaises(TypeError): + asfd(FakeFile("text")) + + # unsupported types + for obj in ("string", ["list"], object()): + with self.subTest(obj=obj): + with self.assertRaises(TypeError): + asfd(obj) + + # CRASHES asfd(NULL) + + def test_pyfile_newstdprinter(self): + # Test PyFile_NewStdPrinter() + pyfile_newstdprinter = _testcapi.pyfile_newstdprinter + + file = pyfile_newstdprinter(STDOUT_FD) + self.assertEqual(file.closed, False) + self.assertIsNone(file.encoding) + self.assertEqual(file.mode, "w") + + self.assertEqual(file.fileno(), STDOUT_FD) + self.assertEqual(file.isatty(), os.isatty(STDOUT_FD)) + + # flush() is a no-op + self.assertIsNone(file.flush()) + + # close() is a no-op + self.assertIsNone(file.close()) + self.assertEqual(file.closed, False) + + support.check_disallow_instantiation(self, type(file)) + + def test_pyfile_newstdprinter_write(self): + # Test the write() method of PyFile_NewStdPrinter() + pyfile_newstdprinter = _testcapi.pyfile_newstdprinter + + filename = os_helper.TESTFN + self.addCleanup(os_helper.unlink, filename) + + try: + old_stdout = os.dup(STDOUT_FD) + except OSError as exc: + # os.dup(STDOUT_FD) is not supported on WASI + self.skipTest(f"os.dup() failed with {exc!r}") + + try: + with open(filename, "wb") as fp: + # PyFile_NewStdPrinter() only accepts fileno(stdout) + # or fileno(stderr) file descriptor. + fd = fp.fileno() + os.dup2(fd, STDOUT_FD) + + file = pyfile_newstdprinter(STDOUT_FD) + self.assertEqual(file.write("text"), 4) + # The surrogate character is encoded with + # the "surrogateescape" error handler + self.assertEqual(file.write("[\udc80]"), 8) + finally: + os.dup2(old_stdout, STDOUT_FD) + os.close(old_stdout) + + with open(filename, "r") as fp: + self.assertEqual(fp.read(), "text[\\udc80]") + def test_py_fopen(self): # Test Py_fopen() and Py_fclose() + py_fopen = _testcapi.py_fopen with open(__file__, "rb") as fp: source = fp.read() for filename in (__file__, os.fsencode(__file__)): with self.subTest(filename=filename): - data = _testcapi.py_fopen(filename, "rb") + data = py_fopen(filename, "rb") self.assertEqual(data, source[:256]) - data = _testcapi.py_fopen(os_helper.FakePath(filename), "rb") + data = py_fopen(os_helper.FakePath(filename), "rb") self.assertEqual(data, source[:256]) filenames = [ @@ -43,41 +259,46 @@ def test_py_fopen(self): filename = None continue try: - data = _testcapi.py_fopen(filename, "rb") + data = py_fopen(filename, "rb") self.assertEqual(data, source[:256]) finally: os_helper.unlink(filename) # embedded null character/byte in the filename with self.assertRaises(ValueError): - _testcapi.py_fopen("a\x00b", "rb") + py_fopen("a\x00b", "rb") with self.assertRaises(ValueError): - _testcapi.py_fopen(b"a\x00b", "rb") + py_fopen(b"a\x00b", "rb") # non-ASCII mode failing with "Invalid argument" with self.assertRaises(OSError): - _testcapi.py_fopen(__file__, b"\xc2\x80") + py_fopen(__file__, b"\xc2\x80") with self.assertRaises(OSError): # \x98 is invalid in cp1250, cp1251, cp1257 # \x9d is invalid in cp1252-cp1255, cp1258 - _testcapi.py_fopen(__file__, b"\xc2\x98\xc2\x9d") + py_fopen(__file__, b"\xc2\x98\xc2\x9d") # UnicodeDecodeError can come from the audit hook code with self.assertRaises((UnicodeDecodeError, OSError)): - _testcapi.py_fopen(__file__, b"\x98\x9d") + py_fopen(__file__, b"\x98\x9d") # invalid filename type for invalid_type in (123, object()): with self.subTest(filename=invalid_type): with self.assertRaises(TypeError): - _testcapi.py_fopen(invalid_type, "rb") + py_fopen(invalid_type, "rb") if support.MS_WINDOWS: with self.assertRaises(OSError): # On Windows, the file mode is limited to 10 characters - _testcapi.py_fopen(__file__, "rt+, ccs=UTF-8") + py_fopen(__file__, "rt+, ccs=UTF-8") + + # CRASHES py_fopen(NULL, 'rb') + # CRASHES py_fopen(__file__, NULL) + + # TODO: Test Py_UniversalNewlineFgets() - # CRASHES _testcapi.py_fopen(NULL, 'rb') - # CRASHES _testcapi.py_fopen(__file__, NULL) + # PyFile_SetOpenCodeHook() and PyFile_OpenCode() are tested by + # test_embed.test_open_code_hook() if __name__ == "__main__": diff --git a/Lib/test/test_capi/test_frame.py b/Lib/test/test_capi/test_frame.py new file mode 100644 index 00000000000000..23cb8e3dada9d4 --- /dev/null +++ b/Lib/test/test_capi/test_frame.py @@ -0,0 +1,56 @@ +import sys +import unittest +from test.support import import_helper + + +_testcapi = import_helper.import_module('_testcapi') + + +class FrameTest(unittest.TestCase): + def getframe(self): + return sys._getframe() + + def test_frame_getters(self): + frame = self.getframe() + self.assertEqual(frame.f_locals, _testcapi.frame_getlocals(frame)) + self.assertIs(frame.f_globals, _testcapi.frame_getglobals(frame)) + self.assertIs(frame.f_builtins, _testcapi.frame_getbuiltins(frame)) + self.assertEqual(frame.f_lasti, _testcapi.frame_getlasti(frame)) + + def test_getvar(self): + current_frame = sys._getframe() + x = 1 + self.assertEqual(_testcapi.frame_getvar(current_frame, "x"), 1) + self.assertEqual(_testcapi.frame_getvarstring(current_frame, b"x"), 1) + with self.assertRaises(NameError): + _testcapi.frame_getvar(current_frame, "y") + with self.assertRaises(NameError): + _testcapi.frame_getvarstring(current_frame, b"y") + + # wrong name type + with self.assertRaises(TypeError): + _testcapi.frame_getvar(current_frame, b'x') + with self.assertRaises(TypeError): + _testcapi.frame_getvar(current_frame, 123) + + def getgenframe(self): + yield sys._getframe() + + def test_frame_get_generator(self): + gen = self.getgenframe() + frame = next(gen) + self.assertIs(gen, _testcapi.frame_getgenerator(frame)) + + def test_frame_fback_api(self): + """Test that accessing `f_back` does not cause a segmentation fault on + a frame created with `PyFrame_New` (GH-99110).""" + def dummy(): + pass + + frame = _testcapi.frame_new(dummy.__code__, globals(), locals()) + # The following line should not cause a segmentation fault. + self.assertIsNone(frame.f_back) + + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/test/test_capi/test_function.py b/Lib/test/test_capi/test_function.py new file mode 100644 index 00000000000000..9dca377e28ba42 --- /dev/null +++ b/Lib/test/test_capi/test_function.py @@ -0,0 +1,323 @@ +import unittest +from test.support import import_helper + + +_testcapi = import_helper.import_module('_testcapi') + + +class FunctionTest(unittest.TestCase): + def test_function_get_code(self): + # Test PyFunction_GetCode() + import types + + def some(): + pass + + code = _testcapi.function_get_code(some) + self.assertIsInstance(code, types.CodeType) + self.assertEqual(code, some.__code__) + + with self.assertRaises(SystemError): + _testcapi.function_get_code(None) # not a function + + def test_function_get_globals(self): + # Test PyFunction_GetGlobals() + def some(): + pass + + globals_ = _testcapi.function_get_globals(some) + self.assertIsInstance(globals_, dict) + self.assertEqual(globals_, some.__globals__) + + with self.assertRaises(SystemError): + _testcapi.function_get_globals(None) # not a function + + def test_function_get_module(self): + # Test PyFunction_GetModule() + def some(): + pass + + module = _testcapi.function_get_module(some) + self.assertIsInstance(module, str) + self.assertEqual(module, some.__module__) + + with self.assertRaises(SystemError): + _testcapi.function_get_module(None) # not a function + + def test_function_get_defaults(self): + # Test PyFunction_GetDefaults() + def some( + pos_only1, pos_only2='p', + /, + zero=0, optional=None, + *, + kw1, + kw2=True, + ): + pass + + defaults = _testcapi.function_get_defaults(some) + self.assertEqual(defaults, ('p', 0, None)) + self.assertEqual(defaults, some.__defaults__) + + with self.assertRaises(SystemError): + _testcapi.function_get_defaults(None) # not a function + + def test_function_set_defaults(self): + # Test PyFunction_SetDefaults() + def some( + pos_only1, pos_only2='p', + /, + zero=0, optional=None, + *, + kw1, + kw2=True, + ): + pass + + old_defaults = ('p', 0, None) + self.assertEqual(_testcapi.function_get_defaults(some), old_defaults) + self.assertEqual(some.__defaults__, old_defaults) + + with self.assertRaises(SystemError): + _testcapi.function_set_defaults(some, 1) # not tuple or None + self.assertEqual(_testcapi.function_get_defaults(some), old_defaults) + self.assertEqual(some.__defaults__, old_defaults) + + with self.assertRaises(SystemError): + _testcapi.function_set_defaults(1, ()) # not a function + self.assertEqual(_testcapi.function_get_defaults(some), old_defaults) + self.assertEqual(some.__defaults__, old_defaults) + + new_defaults = ('q', 1, None) + _testcapi.function_set_defaults(some, new_defaults) + self.assertEqual(_testcapi.function_get_defaults(some), new_defaults) + self.assertEqual(some.__defaults__, new_defaults) + + # Empty tuple is fine: + new_defaults = () + _testcapi.function_set_defaults(some, new_defaults) + self.assertEqual(_testcapi.function_get_defaults(some), new_defaults) + self.assertEqual(some.__defaults__, new_defaults) + + class tuplesub(tuple): ... # tuple subclasses must work + + new_defaults = tuplesub(((1, 2), ['a', 'b'], None)) + _testcapi.function_set_defaults(some, new_defaults) + self.assertEqual(_testcapi.function_get_defaults(some), new_defaults) + self.assertEqual(some.__defaults__, new_defaults) + + # `None` is special, it sets `defaults` to `NULL`, + # it needs special handling in `_testcapi`: + _testcapi.function_set_defaults(some, None) + self.assertEqual(_testcapi.function_get_defaults(some), None) + self.assertEqual(some.__defaults__, None) + + def test_function_get_kw_defaults(self): + # Test PyFunction_GetKwDefaults() + def some( + pos_only1, pos_only2='p', + /, + zero=0, optional=None, + *, + kw1, + kw2=True, + ): + pass + + defaults = _testcapi.function_get_kw_defaults(some) + self.assertEqual(defaults, {'kw2': True}) + self.assertEqual(defaults, some.__kwdefaults__) + + with self.assertRaises(SystemError): + _testcapi.function_get_kw_defaults(None) # not a function + + def test_function_set_kw_defaults(self): + # Test PyFunction_SetKwDefaults() + def some( + pos_only1, pos_only2='p', + /, + zero=0, optional=None, + *, + kw1, + kw2=True, + ): + pass + + old_defaults = {'kw2': True} + self.assertEqual(_testcapi.function_get_kw_defaults(some), old_defaults) + self.assertEqual(some.__kwdefaults__, old_defaults) + + with self.assertRaises(SystemError): + _testcapi.function_set_kw_defaults(some, 1) # not dict or None + self.assertEqual(_testcapi.function_get_kw_defaults(some), old_defaults) + self.assertEqual(some.__kwdefaults__, old_defaults) + + with self.assertRaises(SystemError): + _testcapi.function_set_kw_defaults(1, {}) # not a function + self.assertEqual(_testcapi.function_get_kw_defaults(some), old_defaults) + self.assertEqual(some.__kwdefaults__, old_defaults) + + new_defaults = {'kw2': (1, 2, 3)} + _testcapi.function_set_kw_defaults(some, new_defaults) + self.assertEqual(_testcapi.function_get_kw_defaults(some), new_defaults) + self.assertEqual(some.__kwdefaults__, new_defaults) + + # Empty dict is fine: + new_defaults = {} + _testcapi.function_set_kw_defaults(some, new_defaults) + self.assertEqual(_testcapi.function_get_kw_defaults(some), new_defaults) + self.assertEqual(some.__kwdefaults__, new_defaults) + + class dictsub(dict): ... # dict subclasses must work + + new_defaults = dictsub({'kw2': None}) + _testcapi.function_set_kw_defaults(some, new_defaults) + self.assertEqual(_testcapi.function_get_kw_defaults(some), new_defaults) + self.assertEqual(some.__kwdefaults__, new_defaults) + + # `None` is special, it sets `kwdefaults` to `NULL`, + # it needs special handling in `_testcapi`: + _testcapi.function_set_kw_defaults(some, None) + self.assertEqual(_testcapi.function_get_kw_defaults(some), None) + self.assertEqual(some.__kwdefaults__, None) + + def test_function_get_closure(self): + # Test PyFunction_GetClosure() + from types import CellType + + def regular_function(): ... + def unused_one_level(arg1): + def inner(arg2, arg3): ... + return inner + def unused_two_levels(arg1, arg2): + def decorator(arg3, arg4): + def inner(arg5, arg6): ... + return inner + return decorator + def with_one_level(arg1): + def inner(arg2, arg3): + return arg1 + arg2 + arg3 + return inner + def with_two_levels(arg1, arg2): + def decorator(arg3, arg4): + def inner(arg5, arg6): + return arg1 + arg2 + arg3 + arg4 + arg5 + arg6 + return inner + return decorator + + # Functions without closures: + self.assertIsNone(_testcapi.function_get_closure(regular_function)) + self.assertIsNone(regular_function.__closure__) + + func = unused_one_level(1) + closure = _testcapi.function_get_closure(func) + self.assertIsNone(closure) + self.assertIsNone(func.__closure__) + + func = unused_two_levels(1, 2)(3, 4) + closure = _testcapi.function_get_closure(func) + self.assertIsNone(closure) + self.assertIsNone(func.__closure__) + + # Functions with closures: + func = with_one_level(5) + closure = _testcapi.function_get_closure(func) + self.assertEqual(closure, func.__closure__) + self.assertIsInstance(closure, tuple) + self.assertEqual(len(closure), 1) + self.assertEqual(len(closure), len(func.__code__.co_freevars)) + for cell in closure: + self.assertIsInstance(cell, CellType) + self.assertTrue(closure[0].cell_contents, 5) + + func = with_two_levels(1, 2)(3, 4) + closure = _testcapi.function_get_closure(func) + self.assertEqual(closure, func.__closure__) + self.assertIsInstance(closure, tuple) + self.assertEqual(len(closure), 4) + self.assertEqual(len(closure), len(func.__code__.co_freevars)) + for cell in closure: + self.assertIsInstance(cell, CellType) + self.assertEqual([cell.cell_contents for cell in closure], + [1, 2, 3, 4]) + + def test_function_get_closure_error(self): + # Test PyFunction_GetClosure() + with self.assertRaises(SystemError): + _testcapi.function_get_closure(1) + with self.assertRaises(SystemError): + _testcapi.function_get_closure(None) + + def test_function_set_closure(self): + # Test PyFunction_SetClosure() + from types import CellType + + def function_without_closure(): ... + def function_with_closure(arg): + def inner(): + return arg + return inner + + func = function_without_closure + _testcapi.function_set_closure(func, (CellType(1), CellType(1))) + closure = _testcapi.function_get_closure(func) + self.assertEqual([c.cell_contents for c in closure], [1, 1]) + self.assertEqual([c.cell_contents for c in func.__closure__], [1, 1]) + + func = function_with_closure(1) + _testcapi.function_set_closure(func, + (CellType(1), CellType(2), CellType(3))) + closure = _testcapi.function_get_closure(func) + self.assertEqual([c.cell_contents for c in closure], [1, 2, 3]) + self.assertEqual([c.cell_contents for c in func.__closure__], [1, 2, 3]) + + def test_function_set_closure_none(self): + # Test PyFunction_SetClosure() + def function_without_closure(): ... + def function_with_closure(arg): + def inner(): + return arg + return inner + + _testcapi.function_set_closure(function_without_closure, None) + self.assertIsNone( + _testcapi.function_get_closure(function_without_closure)) + self.assertIsNone(function_without_closure.__closure__) + + _testcapi.function_set_closure(function_with_closure, None) + self.assertIsNone( + _testcapi.function_get_closure(function_with_closure)) + self.assertIsNone(function_with_closure.__closure__) + + def test_function_set_closure_errors(self): + # Test PyFunction_SetClosure() + def function_without_closure(): ... + + with self.assertRaises(SystemError): + _testcapi.function_set_closure(None, ()) # not a function + + with self.assertRaises(SystemError): + _testcapi.function_set_closure(function_without_closure, 1) + self.assertIsNone(function_without_closure.__closure__) # no change + + # NOTE: this works, but goes against the docs: + _testcapi.function_set_closure(function_without_closure, (1, 2)) + self.assertEqual( + _testcapi.function_get_closure(function_without_closure), (1, 2)) + self.assertEqual(function_without_closure.__closure__, (1, 2)) + + # TODO: test PyFunction_New() + # TODO: test PyFunction_NewWithQualName() + # TODO: test PyFunction_SetVectorcall() + # TODO: test PyFunction_GetAnnotations() + # TODO: test PyFunction_SetAnnotations() + # TODO: test PyClassMethod_New() + # TODO: test PyStaticMethod_New() + # + # PyFunction_AddWatcher() and PyFunction_ClearWatcher() are tested by + # test_capi.test_watchers. + + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/test/test_capi/test_import.py b/Lib/test/test_capi/test_import.py index 94f96728d9174b..25136624ca4ed9 100644 --- a/Lib/test/test_capi/test_import.py +++ b/Lib/test/test_capi/test_import.py @@ -7,6 +7,7 @@ from test.support import import_helper from test.support.warnings_helper import check_warnings +_testcapi = import_helper.import_module('_testcapi') _testlimitedcapi = import_helper.import_module('_testlimitedcapi') NULL = None @@ -148,7 +149,7 @@ def check_frozen_import(self, import_frozen_module): try: self.assertEqual(import_frozen_module('zipimport'), 1) - # import zipimport again + # import zipimport again self.assertEqual(import_frozen_module('zipimport'), 1) finally: sys.modules['zipimport'] = old_zipimport @@ -317,6 +318,59 @@ def test_executecodemoduleobject(self): # CRASHES execute_code_func(NULL, code, NULL, NULL) # CRASHES execute_code_func(name, NULL, NULL, NULL) + def check_importmoduleattr(self, importmoduleattr): + self.assertIs(importmoduleattr('sys', 'argv'), sys.argv) + self.assertIs(importmoduleattr('types', 'ModuleType'), types.ModuleType) + + # module name containing a dot + attr = importmoduleattr('email.message', 'Message') + from email.message import Message + self.assertIs(attr, Message) + + with self.assertRaises(ImportError): + # nonexistent module + importmoduleattr('nonexistentmodule', 'attr') + with self.assertRaises(AttributeError): + # nonexistent attribute + importmoduleattr('sys', 'nonexistentattr') + with self.assertRaises(AttributeError): + # attribute name containing a dot + importmoduleattr('sys', 'implementation.name') + + def test_importmoduleattr(self): + # Test PyImport_ImportModuleAttr() + importmoduleattr = _testcapi.PyImport_ImportModuleAttr + self.check_importmoduleattr(importmoduleattr) + + # Invalid module name type + for mod_name in (object(), 123, b'bytes'): + with self.subTest(mod_name=mod_name): + with self.assertRaises(TypeError): + importmoduleattr(mod_name, "attr") + + # Invalid attribute name type + for attr_name in (object(), 123, b'bytes'): + with self.subTest(attr_name=attr_name): + with self.assertRaises(TypeError): + importmoduleattr("sys", attr_name) + + with self.assertRaises(SystemError): + importmoduleattr(NULL, "argv") + # CRASHES importmoduleattr("sys", NULL) + + def test_importmoduleattrstring(self): + # Test PyImport_ImportModuleAttrString() + importmoduleattr = _testcapi.PyImport_ImportModuleAttrString + self.check_importmoduleattr(importmoduleattr) + + with self.assertRaises(UnicodeDecodeError): + importmoduleattr(b"sys\xff", "argv") + with self.assertRaises(UnicodeDecodeError): + importmoduleattr("sys", b"argv\xff") + + # CRASHES importmoduleattr(NULL, "argv") + # CRASHES importmoduleattr("sys", NULL) + # TODO: test PyImport_GetImporter() # TODO: test PyImport_ReloadModule() # TODO: test PyImport_ExtendInittab() diff --git a/Lib/test/test_capi/test_misc.py b/Lib/test/test_capi/test_misc.py index 114e7cdfd0cd9c..b218f72f1bbce0 100644 --- a/Lib/test/test_capi/test_misc.py +++ b/Lib/test/test_capi/test_misc.py @@ -306,7 +306,7 @@ def test_getitem_with_error(self): CURRENT_THREAD_REGEX + r' File .*, line 6 in \n' r'\n' - r'Extension modules: _testcapi \(total: 1\)\n') + r'Extension modules: _testcapi, _testinternalcapi \(total: 2\)\n') else: # Python built with NDEBUG macro defined: # test _Py_CheckFunctionResult() instead. @@ -403,42 +403,6 @@ def test_buildvalue_ints(self): def test_buildvalue_N(self): _testcapi.test_buildvalue_N() - def check_negative_refcount(self, code): - # bpo-35059: Check that Py_DECREF() reports the correct filename - # when calling _Py_NegativeRefcount() to abort Python. - code = textwrap.dedent(code) - rc, out, err = assert_python_failure('-c', code) - self.assertRegex(err, - br'_testcapimodule\.c:[0-9]+: ' - br'_Py_NegativeRefcount: Assertion failed: ' - br'object has negative ref count') - - @unittest.skipUnless(hasattr(_testcapi, 'negative_refcount'), - 'need _testcapi.negative_refcount()') - def test_negative_refcount(self): - code = """ - import _testcapi - from test import support - - with support.SuppressCrashReport(): - _testcapi.negative_refcount() - """ - self.check_negative_refcount(code) - - @unittest.skipUnless(hasattr(_testcapi, 'decref_freed_object'), - 'need _testcapi.decref_freed_object()') - @support.skip_if_sanitizer("use after free on purpose", - address=True, memory=True, ub=True) - def test_decref_freed_object(self): - code = """ - import _testcapi - from test import support - - with support.SuppressCrashReport(): - _testcapi.decref_freed_object() - """ - self.check_negative_refcount(code) - def test_trashcan_subclass(self): # bpo-35983: Check that the trashcan mechanism for "list" is NOT # activated when its tp_dealloc is being called by a subclass @@ -928,175 +892,6 @@ def __init__(self): _testcapi.clear_managed_dict(c) self.assertEqual(c.__dict__, {}) - def test_function_get_code(self): - import types - - def some(): - pass - - code = _testcapi.function_get_code(some) - self.assertIsInstance(code, types.CodeType) - self.assertEqual(code, some.__code__) - - with self.assertRaises(SystemError): - _testcapi.function_get_code(None) # not a function - - def test_function_get_globals(self): - def some(): - pass - - globals_ = _testcapi.function_get_globals(some) - self.assertIsInstance(globals_, dict) - self.assertEqual(globals_, some.__globals__) - - with self.assertRaises(SystemError): - _testcapi.function_get_globals(None) # not a function - - def test_function_get_module(self): - def some(): - pass - - module = _testcapi.function_get_module(some) - self.assertIsInstance(module, str) - self.assertEqual(module, some.__module__) - - with self.assertRaises(SystemError): - _testcapi.function_get_module(None) # not a function - - def test_function_get_defaults(self): - def some( - pos_only1, pos_only2='p', - /, - zero=0, optional=None, - *, - kw1, - kw2=True, - ): - pass - - defaults = _testcapi.function_get_defaults(some) - self.assertEqual(defaults, ('p', 0, None)) - self.assertEqual(defaults, some.__defaults__) - - with self.assertRaises(SystemError): - _testcapi.function_get_defaults(None) # not a function - - def test_function_set_defaults(self): - def some( - pos_only1, pos_only2='p', - /, - zero=0, optional=None, - *, - kw1, - kw2=True, - ): - pass - - old_defaults = ('p', 0, None) - self.assertEqual(_testcapi.function_get_defaults(some), old_defaults) - self.assertEqual(some.__defaults__, old_defaults) - - with self.assertRaises(SystemError): - _testcapi.function_set_defaults(some, 1) # not tuple or None - self.assertEqual(_testcapi.function_get_defaults(some), old_defaults) - self.assertEqual(some.__defaults__, old_defaults) - - with self.assertRaises(SystemError): - _testcapi.function_set_defaults(1, ()) # not a function - self.assertEqual(_testcapi.function_get_defaults(some), old_defaults) - self.assertEqual(some.__defaults__, old_defaults) - - new_defaults = ('q', 1, None) - _testcapi.function_set_defaults(some, new_defaults) - self.assertEqual(_testcapi.function_get_defaults(some), new_defaults) - self.assertEqual(some.__defaults__, new_defaults) - - # Empty tuple is fine: - new_defaults = () - _testcapi.function_set_defaults(some, new_defaults) - self.assertEqual(_testcapi.function_get_defaults(some), new_defaults) - self.assertEqual(some.__defaults__, new_defaults) - - class tuplesub(tuple): ... # tuple subclasses must work - - new_defaults = tuplesub(((1, 2), ['a', 'b'], None)) - _testcapi.function_set_defaults(some, new_defaults) - self.assertEqual(_testcapi.function_get_defaults(some), new_defaults) - self.assertEqual(some.__defaults__, new_defaults) - - # `None` is special, it sets `defaults` to `NULL`, - # it needs special handling in `_testcapi`: - _testcapi.function_set_defaults(some, None) - self.assertEqual(_testcapi.function_get_defaults(some), None) - self.assertEqual(some.__defaults__, None) - - def test_function_get_kw_defaults(self): - def some( - pos_only1, pos_only2='p', - /, - zero=0, optional=None, - *, - kw1, - kw2=True, - ): - pass - - defaults = _testcapi.function_get_kw_defaults(some) - self.assertEqual(defaults, {'kw2': True}) - self.assertEqual(defaults, some.__kwdefaults__) - - with self.assertRaises(SystemError): - _testcapi.function_get_kw_defaults(None) # not a function - - def test_function_set_kw_defaults(self): - def some( - pos_only1, pos_only2='p', - /, - zero=0, optional=None, - *, - kw1, - kw2=True, - ): - pass - - old_defaults = {'kw2': True} - self.assertEqual(_testcapi.function_get_kw_defaults(some), old_defaults) - self.assertEqual(some.__kwdefaults__, old_defaults) - - with self.assertRaises(SystemError): - _testcapi.function_set_kw_defaults(some, 1) # not dict or None - self.assertEqual(_testcapi.function_get_kw_defaults(some), old_defaults) - self.assertEqual(some.__kwdefaults__, old_defaults) - - with self.assertRaises(SystemError): - _testcapi.function_set_kw_defaults(1, {}) # not a function - self.assertEqual(_testcapi.function_get_kw_defaults(some), old_defaults) - self.assertEqual(some.__kwdefaults__, old_defaults) - - new_defaults = {'kw2': (1, 2, 3)} - _testcapi.function_set_kw_defaults(some, new_defaults) - self.assertEqual(_testcapi.function_get_kw_defaults(some), new_defaults) - self.assertEqual(some.__kwdefaults__, new_defaults) - - # Empty dict is fine: - new_defaults = {} - _testcapi.function_set_kw_defaults(some, new_defaults) - self.assertEqual(_testcapi.function_get_kw_defaults(some), new_defaults) - self.assertEqual(some.__kwdefaults__, new_defaults) - - class dictsub(dict): ... # dict subclasses must work - - new_defaults = dictsub({'kw2': None}) - _testcapi.function_set_kw_defaults(some, new_defaults) - self.assertEqual(_testcapi.function_get_kw_defaults(some), new_defaults) - self.assertEqual(some.__kwdefaults__, new_defaults) - - # `None` is special, it sets `kwdefaults` to `NULL`, - # it needs special handling in `_testcapi`: - _testcapi.function_set_kw_defaults(some, None) - self.assertEqual(_testcapi.function_get_kw_defaults(some), None) - self.assertEqual(some.__kwdefaults__, None) - def test_unstable_gc_new_with_extra_data(self): class Data(_testcapi.ObjExtraData): __slots__ = ('x', 'y') @@ -1111,147 +906,6 @@ class Data(_testcapi.ObjExtraData): del d.extra self.assertIsNone(d.extra) - def test_get_type_name(self): - class MyType: - pass - - from _testcapi import ( - get_type_name, get_type_qualname, - get_type_fullyqualname, get_type_module_name) - - from collections import OrderedDict - ht = _testcapi.get_heaptype_for_name() - for cls, fullname, modname, qualname, name in ( - (int, - 'int', - 'builtins', - 'int', - 'int'), - (OrderedDict, - 'collections.OrderedDict', - 'collections', - 'OrderedDict', - 'OrderedDict'), - (ht, - '_testcapi.HeapTypeNameType', - '_testcapi', - 'HeapTypeNameType', - 'HeapTypeNameType'), - (MyType, - f'{__name__}.CAPITest.test_get_type_name..MyType', - __name__, - 'CAPITest.test_get_type_name..MyType', - 'MyType'), - ): - with self.subTest(cls=repr(cls)): - self.assertEqual(get_type_fullyqualname(cls), fullname) - self.assertEqual(get_type_module_name(cls), modname) - self.assertEqual(get_type_qualname(cls), qualname) - self.assertEqual(get_type_name(cls), name) - - # override __module__ - ht.__module__ = 'test_module' - self.assertEqual(get_type_fullyqualname(ht), 'test_module.HeapTypeNameType') - self.assertEqual(get_type_module_name(ht), 'test_module') - self.assertEqual(get_type_qualname(ht), 'HeapTypeNameType') - self.assertEqual(get_type_name(ht), 'HeapTypeNameType') - - # override __name__ and __qualname__ - MyType.__name__ = 'my_name' - MyType.__qualname__ = 'my_qualname' - self.assertEqual(get_type_fullyqualname(MyType), f'{__name__}.my_qualname') - self.assertEqual(get_type_module_name(MyType), __name__) - self.assertEqual(get_type_qualname(MyType), 'my_qualname') - self.assertEqual(get_type_name(MyType), 'my_name') - - # override also __module__ - MyType.__module__ = 'my_module' - self.assertEqual(get_type_fullyqualname(MyType), 'my_module.my_qualname') - self.assertEqual(get_type_module_name(MyType), 'my_module') - self.assertEqual(get_type_qualname(MyType), 'my_qualname') - self.assertEqual(get_type_name(MyType), 'my_name') - - # PyType_GetFullyQualifiedName() ignores the module if it's "builtins" - # or "__main__" of it is not a string - MyType.__module__ = 'builtins' - self.assertEqual(get_type_fullyqualname(MyType), 'my_qualname') - MyType.__module__ = '__main__' - self.assertEqual(get_type_fullyqualname(MyType), 'my_qualname') - MyType.__module__ = 123 - self.assertEqual(get_type_fullyqualname(MyType), 'my_qualname') - - def test_get_base_by_token(self): - def get_base_by_token(src, key, comparable=True): - def run(use_mro): - find_first = _testcapi.pytype_getbasebytoken - ret1, result = find_first(src, key, use_mro, True) - ret2, no_result = find_first(src, key, use_mro, False) - self.assertIn(ret1, (0, 1)) - self.assertEqual(ret1, result is not None) - self.assertEqual(ret1, ret2) - self.assertIsNone(no_result) - return result - - found_in_mro = run(True) - found_in_bases = run(False) - if comparable: - self.assertIs(found_in_mro, found_in_bases) - return found_in_mro - return found_in_mro, found_in_bases - - create_type = _testcapi.create_type_with_token - get_token = _testcapi.get_tp_token - - Py_TP_USE_SPEC = _testcapi.Py_TP_USE_SPEC - self.assertEqual(Py_TP_USE_SPEC, 0) - - A1 = create_type('_testcapi.A1', Py_TP_USE_SPEC) - self.assertTrue(get_token(A1) != Py_TP_USE_SPEC) - - B1 = create_type('_testcapi.B1', id(self)) - self.assertTrue(get_token(B1) == id(self)) - - tokenA1 = get_token(A1) - # find A1 from A1 - found = get_base_by_token(A1, tokenA1) - self.assertIs(found, A1) - - # no token in static types - STATIC = type(1) - self.assertEqual(get_token(STATIC), 0) - found = get_base_by_token(STATIC, tokenA1) - self.assertIs(found, None) - - # no token in pure subtypes - class A2(A1): pass - self.assertEqual(get_token(A2), 0) - # find A1 - class Z(STATIC, B1, A2): pass - found = get_base_by_token(Z, tokenA1) - self.assertIs(found, A1) - - # searching for NULL token is an error - with self.assertRaises(SystemError): - get_base_by_token(Z, 0) - with self.assertRaises(SystemError): - get_base_by_token(STATIC, 0) - - # share the token with A1 - C1 = create_type('_testcapi.C1', tokenA1) - self.assertTrue(get_token(C1) == tokenA1) - - # find C1 first by shared token - class Z(C1, A2): pass - found = get_base_by_token(Z, tokenA1) - self.assertIs(found, C1) - # B1 not found - found = get_base_by_token(Z, get_token(B1)) - self.assertIs(found, None) - - with self.assertRaises(TypeError): - _testcapi.pytype_getbasebytoken( - 'not a type', id(self), True, False) - def test_gen_get_code(self): def genf(): yield gen = genf() @@ -1460,127 +1114,6 @@ def test_pyobject_getitemdata_error(self): _testcapi.pyobject_getitemdata(0) - def test_function_get_closure(self): - from types import CellType - - def regular_function(): ... - def unused_one_level(arg1): - def inner(arg2, arg3): ... - return inner - def unused_two_levels(arg1, arg2): - def decorator(arg3, arg4): - def inner(arg5, arg6): ... - return inner - return decorator - def with_one_level(arg1): - def inner(arg2, arg3): - return arg1 + arg2 + arg3 - return inner - def with_two_levels(arg1, arg2): - def decorator(arg3, arg4): - def inner(arg5, arg6): - return arg1 + arg2 + arg3 + arg4 + arg5 + arg6 - return inner - return decorator - - # Functions without closures: - self.assertIsNone(_testcapi.function_get_closure(regular_function)) - self.assertIsNone(regular_function.__closure__) - - func = unused_one_level(1) - closure = _testcapi.function_get_closure(func) - self.assertIsNone(closure) - self.assertIsNone(func.__closure__) - - func = unused_two_levels(1, 2)(3, 4) - closure = _testcapi.function_get_closure(func) - self.assertIsNone(closure) - self.assertIsNone(func.__closure__) - - # Functions with closures: - func = with_one_level(5) - closure = _testcapi.function_get_closure(func) - self.assertEqual(closure, func.__closure__) - self.assertIsInstance(closure, tuple) - self.assertEqual(len(closure), 1) - self.assertEqual(len(closure), len(func.__code__.co_freevars)) - for cell in closure: - self.assertIsInstance(cell, CellType) - self.assertTrue(closure[0].cell_contents, 5) - - func = with_two_levels(1, 2)(3, 4) - closure = _testcapi.function_get_closure(func) - self.assertEqual(closure, func.__closure__) - self.assertIsInstance(closure, tuple) - self.assertEqual(len(closure), 4) - self.assertEqual(len(closure), len(func.__code__.co_freevars)) - for cell in closure: - self.assertIsInstance(cell, CellType) - self.assertEqual([cell.cell_contents for cell in closure], - [1, 2, 3, 4]) - - def test_function_get_closure_error(self): - with self.assertRaises(SystemError): - _testcapi.function_get_closure(1) - with self.assertRaises(SystemError): - _testcapi.function_get_closure(None) - - def test_function_set_closure(self): - from types import CellType - - def function_without_closure(): ... - def function_with_closure(arg): - def inner(): - return arg - return inner - - func = function_without_closure - _testcapi.function_set_closure(func, (CellType(1), CellType(1))) - closure = _testcapi.function_get_closure(func) - self.assertEqual([c.cell_contents for c in closure], [1, 1]) - self.assertEqual([c.cell_contents for c in func.__closure__], [1, 1]) - - func = function_with_closure(1) - _testcapi.function_set_closure(func, - (CellType(1), CellType(2), CellType(3))) - closure = _testcapi.function_get_closure(func) - self.assertEqual([c.cell_contents for c in closure], [1, 2, 3]) - self.assertEqual([c.cell_contents for c in func.__closure__], [1, 2, 3]) - - def test_function_set_closure_none(self): - def function_without_closure(): ... - def function_with_closure(arg): - def inner(): - return arg - return inner - - _testcapi.function_set_closure(function_without_closure, None) - self.assertIsNone( - _testcapi.function_get_closure(function_without_closure)) - self.assertIsNone(function_without_closure.__closure__) - - _testcapi.function_set_closure(function_with_closure, None) - self.assertIsNone( - _testcapi.function_get_closure(function_with_closure)) - self.assertIsNone(function_with_closure.__closure__) - - def test_function_set_closure_errors(self): - def function_without_closure(): ... - - with self.assertRaises(SystemError): - _testcapi.function_set_closure(None, ()) # not a function - - with self.assertRaises(SystemError): - _testcapi.function_set_closure(function_without_closure, 1) - self.assertIsNone(function_without_closure.__closure__) # no change - - # NOTE: this works, but goes against the docs: - _testcapi.function_set_closure(function_without_closure, (1, 2)) - self.assertEqual( - _testcapi.function_get_closure(function_without_closure), (1, 2)) - self.assertEqual(function_without_closure.__closure__, (1, 2)) - - class TestPendingCalls(unittest.TestCase): # See the comment in ceval.c (at the "handle_eval_breaker" label) @@ -2922,39 +2455,6 @@ def test_linked_lifecycle_link_incref_unlink_decref(self): 0, get_refcount(interpid)) -class BuiltinStaticTypesTests(unittest.TestCase): - - TYPES = [ - object, - type, - int, - str, - dict, - type(None), - bool, - BaseException, - Exception, - Warning, - DeprecationWarning, # Warning subclass - ] - - def test_tp_bases_is_set(self): - # PyTypeObject.tp_bases is documented as public API. - # See https://github.com/python/cpython/issues/105020. - for typeobj in self.TYPES: - with self.subTest(typeobj): - bases = _testcapi.type_get_tp_bases(typeobj) - self.assertIsNot(bases, None) - - def test_tp_mro_is_set(self): - # PyTypeObject.tp_bases is documented as public API. - # See https://github.com/python/cpython/issues/105020. - for typeobj in self.TYPES: - with self.subTest(typeobj): - mro = _testcapi.type_get_tp_mro(typeobj) - self.assertIsNot(mro, None) - - class TestStaticTypes(unittest.TestCase): _has_run = False @@ -3383,5 +2883,6 @@ def test_pack_version_ctypes(self): result = ctypes_func(*args) self.assertEqual(result, expected) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_capi/test_object.py b/Lib/test/test_capi/test_object.py index b0d39937fd865f..5d0a383de64520 100644 --- a/Lib/test/test_capi/test_object.py +++ b/Lib/test/test_capi/test_object.py @@ -1,9 +1,12 @@ import enum +import textwrap import unittest from test import support from test.support import import_helper from test.support import os_helper from test.support import threading_helper +from test.support.script_helper import assert_python_failure + _testlimitedcapi = import_helper.import_module('_testlimitedcapi') _testcapi = import_helper.import_module('_testcapi') @@ -170,5 +173,42 @@ def silly_func(obj): self.assertTrue(_testinternalcapi.has_deferred_refcount(silly_list)) +class CAPITest(unittest.TestCase): + def check_negative_refcount(self, code): + # bpo-35059: Check that Py_DECREF() reports the correct filename + # when calling _Py_NegativeRefcount() to abort Python. + code = textwrap.dedent(code) + rc, out, err = assert_python_failure('-c', code) + self.assertRegex(err, + br'object\.c:[0-9]+: ' + br'_Py_NegativeRefcount: Assertion failed: ' + br'object has negative ref count') + + @unittest.skipUnless(hasattr(_testcapi, 'negative_refcount'), + 'need _testcapi.negative_refcount()') + def test_negative_refcount(self): + code = """ + import _testcapi + from test import support + + with support.SuppressCrashReport(): + _testcapi.negative_refcount() + """ + self.check_negative_refcount(code) + + @unittest.skipUnless(hasattr(_testcapi, 'decref_freed_object'), + 'need _testcapi.decref_freed_object()') + @support.skip_if_sanitizer("use after free on purpose", + address=True, memory=True, ub=True) + def test_decref_freed_object(self): + code = """ + import _testcapi + from test import support + + with support.SuppressCrashReport(): + _testcapi.decref_freed_object() + """ + self.check_negative_refcount(code) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_capi/test_opt.py b/Lib/test/test_capi/test_opt.py index 6a2f7726222f9b..02e534caec1162 100644 --- a/Lib/test/test_capi/test_opt.py +++ b/Lib/test/test_capi/test_opt.py @@ -9,30 +9,22 @@ import _opcode from test.support import (script_helper, requires_specialization, - import_helper, Py_GIL_DISABLED) + import_helper, Py_GIL_DISABLED, requires_jit_enabled, + reset_code) _testinternalcapi = import_helper.import_module("_testinternalcapi") from _testinternalcapi import TIER2_THRESHOLD -@contextlib.contextmanager -def temporary_optimizer(opt): - old_opt = _testinternalcapi.get_optimizer() - _testinternalcapi.set_optimizer(opt) - try: - yield - finally: - _testinternalcapi.set_optimizer(old_opt) - @contextlib.contextmanager def clear_executors(func): # Clear executors in func before and after running a block - func.__code__ = func.__code__.replace() + reset_code(func) try: yield finally: - func.__code__ = func.__code__.replace() + reset_code(func) def get_first_executor(func): @@ -57,8 +49,7 @@ def get_opnames(ex): @requires_specialization @unittest.skipIf(Py_GIL_DISABLED, "optimizer not yet supported in free-threaded builds") -@unittest.skipUnless(hasattr(_testinternalcapi, "get_optimizer"), - "Requires optimizer infrastructure") +@requires_jit_enabled class TestExecutorInvalidation(unittest.TestCase): def test_invalidate_object(self): @@ -75,10 +66,8 @@ def f{n}(): funcs = [ ns[f'f{n}'] for n in range(5)] objects = [object() for _ in range(5)] - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - for f in funcs: - f() + for f in funcs: + f() executors = [get_first_executor(f) for f in funcs] # Set things up so each executor depends on the objects # with an equal or lower index. @@ -106,9 +95,7 @@ def f(): pass """), ns, ns) f = ns['f'] - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - f() + f() exe = get_first_executor(f) self.assertIsNotNone(exe) self.assertTrue(exe.is_valid()) @@ -119,9 +106,7 @@ def test_sys__clear_internal_caches(self): def f(): for _ in range(TIER2_THRESHOLD): pass - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - f() + f() exe = get_first_executor(f) self.assertIsNotNone(exe) self.assertTrue(exe.is_valid()) @@ -133,8 +118,7 @@ def f(): @requires_specialization @unittest.skipIf(Py_GIL_DISABLED, "optimizer not yet supported in free-threaded builds") -@unittest.skipUnless(hasattr(_testinternalcapi, "get_optimizer"), - "Requires optimizer infrastructure") +@requires_jit_enabled @unittest.skipIf(os.getenv("PYTHON_UOPS_OPTIMIZE") == "0", "Needs uop optimizer to run.") class TestUops(unittest.TestCase): @@ -144,9 +128,7 @@ def testfunc(x): while i < x: i += 1 - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - testfunc(TIER2_THRESHOLD) + testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -192,11 +174,9 @@ def many_vars(): """), ns, ns) many_vars = ns["many_vars"] - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - ex = get_first_executor(many_vars) - self.assertIsNone(ex) - many_vars() + ex = get_first_executor(many_vars) + self.assertIsNone(ex) + many_vars() ex = get_first_executor(many_vars) self.assertIsNotNone(ex) @@ -215,10 +195,7 @@ def testfunc(x): while i < x: i += 1 - opt = _testinternalcapi.new_uop_optimizer() - - with temporary_optimizer(opt): - testfunc(TIER2_THRESHOLD) + testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -231,9 +208,7 @@ def testfunc(n): while i < n: i += 1 - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - testfunc(TIER2_THRESHOLD) + testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -246,9 +221,7 @@ def testfunc(a): if x is None: x = 0 - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - testfunc(range(TIER2_THRESHOLD)) + testfunc(range(TIER2_THRESHOLD)) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -263,9 +236,7 @@ def testfunc(a): if x is not None: x = 0 - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - testfunc(range(TIER2_THRESHOLD)) + testfunc(range(TIER2_THRESHOLD)) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -279,9 +250,7 @@ def testfunc(n): while not i >= n: i += 1 - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - testfunc(TIER2_THRESHOLD) + testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -294,9 +263,7 @@ def testfunc(n): while i < n: i += 1 - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - testfunc(TIER2_THRESHOLD) + testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -314,9 +281,7 @@ def testfunc(n): a += 1 return a - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - testfunc(TIER2_THRESHOLD) + testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -332,10 +297,8 @@ def testfunc(n): total += i return total - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - total = testfunc(TIER2_THRESHOLD) - self.assertEqual(total, sum(range(TIER2_THRESHOLD))) + total = testfunc(TIER2_THRESHOLD) + self.assertEqual(total, sum(range(TIER2_THRESHOLD))) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -353,11 +316,9 @@ def testfunc(a): total += i return total - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - a = list(range(TIER2_THRESHOLD)) - total = testfunc(a) - self.assertEqual(total, sum(a)) + a = list(range(TIER2_THRESHOLD)) + total = testfunc(a) + self.assertEqual(total, sum(a)) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -375,11 +336,9 @@ def testfunc(a): total += i return total - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - a = tuple(range(TIER2_THRESHOLD)) - total = testfunc(a) - self.assertEqual(total, sum(a)) + a = tuple(range(TIER2_THRESHOLD)) + total = testfunc(a) + self.assertEqual(total, sum(a)) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -395,14 +354,12 @@ def testfunc(it): for x in it: pass - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - a = [1, 2, 3] - it = iter(a) - testfunc(it) - a.append(4) - with self.assertRaises(StopIteration): - next(it) + a = [1, 2, 3] + it = iter(a) + testfunc(it) + a.append(4) + with self.assertRaises(StopIteration): + next(it) def test_call_py_exact_args(self): def testfunc(n): @@ -411,9 +368,7 @@ def dummy(x): for i in range(n): dummy(i) - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - testfunc(TIER2_THRESHOLD) + testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -429,9 +384,7 @@ def testfunc(n): else: i = 1 - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - testfunc(TIER2_THRESHOLD) + testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -457,9 +410,7 @@ def testfunc(n, m): x += 1000*i + j return x - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - x = testfunc(TIER2_THRESHOLD, TIER2_THRESHOLD) + x = testfunc(TIER2_THRESHOLD, TIER2_THRESHOLD) self.assertEqual(x, sum(range(TIER2_THRESHOLD)) * TIER2_THRESHOLD * 1001) @@ -484,9 +435,7 @@ def testfunc(n): bits += 1 return bits - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - x = testfunc(TIER2_THRESHOLD * 2) + x = testfunc(TIER2_THRESHOLD * 2) self.assertEqual(x, TIER2_THRESHOLD * 5) ex = get_first_executor(testfunc) @@ -499,16 +448,12 @@ def testfunc(n): @requires_specialization @unittest.skipIf(Py_GIL_DISABLED, "optimizer not yet supported in free-threaded builds") -@unittest.skipUnless(hasattr(_testinternalcapi, "get_optimizer"), - "Requires optimizer infrastructure") +@requires_jit_enabled @unittest.skipIf(os.getenv("PYTHON_UOPS_OPTIMIZE") == "0", "Needs uop optimizer to run.") class TestUopsOptimization(unittest.TestCase): def _run_with_optimizer(self, testfunc, arg): - res = None - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - res = testfunc(arg) + res = testfunc(arg) ex = get_first_executor(testfunc) return res, ex @@ -542,10 +487,7 @@ def testfunc(loops): num += 1 return a - opt = _testinternalcapi.new_uop_optimizer() - res = None - with temporary_optimizer(opt): - res = testfunc(TIER2_THRESHOLD) + res = testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -566,10 +508,7 @@ def testfunc(loops): num += 1 return x - opt = _testinternalcapi.new_uop_optimizer() - res = None - with temporary_optimizer(opt): - res = testfunc(TIER2_THRESHOLD) + res = testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -661,16 +600,14 @@ def testfunc(n): for i in range(n): dummy(i) - opt = _testinternalcapi.new_uop_optimizer() # Trigger specialization testfunc(8) - with temporary_optimizer(opt): - del dummy - gc.collect() + del dummy + gc.collect() - def dummy(x): - return x + 2 - testfunc(32) + def dummy(x): + return x + 2 + testfunc(32) ex = get_first_executor(testfunc) # Honestly as long as it doesn't crash it's fine. @@ -703,8 +640,6 @@ def testfunc(n): x = range(i) return x - opt = _testinternalcapi.new_uop_optimizer() - _testinternalcapi.set_optimizer(opt) testfunc(_testinternalcapi.TIER2_THRESHOLD) ex = get_first_executor(testfunc) @@ -712,7 +647,7 @@ def testfunc(n): uops = get_opnames(ex) assert "_LOAD_GLOBAL_BUILTINS" not in uops assert "_LOAD_CONST_INLINE_BORROW" in uops - """)) + """), PYTHON_JIT="1") self.assertEqual(result[0].rc, 0, result) def test_float_add_constant_propagation(self): @@ -1399,9 +1334,7 @@ def testfunc(n): # Only works on functions promoted to constants global_identity(i) - opt = _testinternalcapi.new_uop_optimizer() - with temporary_optimizer(opt): - testfunc(TIER2_THRESHOLD) + testfunc(TIER2_THRESHOLD) ex = get_first_executor(testfunc) self.assertIsNotNone(ex) @@ -1488,12 +1421,12 @@ def test_decref_escapes(self): class Convert9999ToNone: def __del__(self): ns = sys._getframe(1).f_locals - if ns["i"] == 9999: + if ns["i"] == _testinternalcapi.TIER2_THRESHOLD: ns["i"] = None def crash_addition(): try: - for i in range(10000): + for i in range(_testinternalcapi.TIER2_THRESHOLD + 1): n = Convert9999ToNone() i + i # Remove guards for i. n = None # Change i. diff --git a/Lib/test/test_capi/test_type.py b/Lib/test/test_capi/test_type.py index ffcaae73bca236..7e5d013d737ab0 100644 --- a/Lib/test/test_capi/test_type.py +++ b/Lib/test/test_capi/test_type.py @@ -4,7 +4,181 @@ _testcapi = import_helper.import_module('_testcapi') +class BuiltinStaticTypesTests(unittest.TestCase): + + TYPES = [ + object, + type, + int, + str, + dict, + type(None), + bool, + BaseException, + Exception, + Warning, + DeprecationWarning, # Warning subclass + ] + + def test_tp_bases_is_set(self): + # PyTypeObject.tp_bases is documented as public API. + # See https://github.com/python/cpython/issues/105020. + for typeobj in self.TYPES: + with self.subTest(typeobj): + bases = _testcapi.type_get_tp_bases(typeobj) + self.assertIsNot(bases, None) + + def test_tp_mro_is_set(self): + # PyTypeObject.tp_bases is documented as public API. + # See https://github.com/python/cpython/issues/105020. + for typeobj in self.TYPES: + with self.subTest(typeobj): + mro = _testcapi.type_get_tp_mro(typeobj) + self.assertIsNot(mro, None) + + class TypeTests(unittest.TestCase): + def test_get_type_name(self): + class MyType: + pass + + from _testcapi import ( + get_type_name, get_type_qualname, + get_type_fullyqualname, get_type_module_name) + + from collections import OrderedDict + ht = _testcapi.get_heaptype_for_name() + for cls, fullname, modname, qualname, name in ( + (int, + 'int', + 'builtins', + 'int', + 'int'), + (OrderedDict, + 'collections.OrderedDict', + 'collections', + 'OrderedDict', + 'OrderedDict'), + (ht, + '_testcapi.HeapTypeNameType', + '_testcapi', + 'HeapTypeNameType', + 'HeapTypeNameType'), + (MyType, + f'{__name__}.TypeTests.test_get_type_name..MyType', + __name__, + 'TypeTests.test_get_type_name..MyType', + 'MyType'), + ): + with self.subTest(cls=repr(cls)): + self.assertEqual(get_type_fullyqualname(cls), fullname) + self.assertEqual(get_type_module_name(cls), modname) + self.assertEqual(get_type_qualname(cls), qualname) + self.assertEqual(get_type_name(cls), name) + + # override __module__ + ht.__module__ = 'test_module' + self.assertEqual(get_type_fullyqualname(ht), 'test_module.HeapTypeNameType') + self.assertEqual(get_type_module_name(ht), 'test_module') + self.assertEqual(get_type_qualname(ht), 'HeapTypeNameType') + self.assertEqual(get_type_name(ht), 'HeapTypeNameType') + + # override __name__ and __qualname__ + MyType.__name__ = 'my_name' + MyType.__qualname__ = 'my_qualname' + self.assertEqual(get_type_fullyqualname(MyType), f'{__name__}.my_qualname') + self.assertEqual(get_type_module_name(MyType), __name__) + self.assertEqual(get_type_qualname(MyType), 'my_qualname') + self.assertEqual(get_type_name(MyType), 'my_name') + + # override also __module__ + MyType.__module__ = 'my_module' + self.assertEqual(get_type_fullyqualname(MyType), 'my_module.my_qualname') + self.assertEqual(get_type_module_name(MyType), 'my_module') + self.assertEqual(get_type_qualname(MyType), 'my_qualname') + self.assertEqual(get_type_name(MyType), 'my_name') + + # PyType_GetFullyQualifiedName() ignores the module if it's "builtins" + # or "__main__" of it is not a string + MyType.__module__ = 'builtins' + self.assertEqual(get_type_fullyqualname(MyType), 'my_qualname') + MyType.__module__ = '__main__' + self.assertEqual(get_type_fullyqualname(MyType), 'my_qualname') + MyType.__module__ = 123 + self.assertEqual(get_type_fullyqualname(MyType), 'my_qualname') + + def test_get_base_by_token(self): + def get_base_by_token(src, key, comparable=True): + def run(use_mro): + find_first = _testcapi.pytype_getbasebytoken + ret1, result = find_first(src, key, use_mro, True) + ret2, no_result = find_first(src, key, use_mro, False) + self.assertIn(ret1, (0, 1)) + self.assertEqual(ret1, result is not None) + self.assertEqual(ret1, ret2) + self.assertIsNone(no_result) + return result + + found_in_mro = run(True) + found_in_bases = run(False) + if comparable: + self.assertIs(found_in_mro, found_in_bases) + return found_in_mro + return found_in_mro, found_in_bases + + create_type = _testcapi.create_type_with_token + get_token = _testcapi.get_tp_token + + Py_TP_USE_SPEC = _testcapi.Py_TP_USE_SPEC + self.assertEqual(Py_TP_USE_SPEC, 0) + + A1 = create_type('_testcapi.A1', Py_TP_USE_SPEC) + self.assertTrue(get_token(A1) != Py_TP_USE_SPEC) + + B1 = create_type('_testcapi.B1', id(self)) + self.assertTrue(get_token(B1) == id(self)) + + tokenA1 = get_token(A1) + # find A1 from A1 + found = get_base_by_token(A1, tokenA1) + self.assertIs(found, A1) + + # no token in static types + STATIC = type(1) + self.assertEqual(get_token(STATIC), 0) + found = get_base_by_token(STATIC, tokenA1) + self.assertIs(found, None) + + # no token in pure subtypes + class A2(A1): pass + self.assertEqual(get_token(A2), 0) + # find A1 + class Z(STATIC, B1, A2): pass + found = get_base_by_token(Z, tokenA1) + self.assertIs(found, A1) + + # searching for NULL token is an error + with self.assertRaises(SystemError): + get_base_by_token(Z, 0) + with self.assertRaises(SystemError): + get_base_by_token(STATIC, 0) + + # share the token with A1 + C1 = create_type('_testcapi.C1', tokenA1) + self.assertTrue(get_token(C1) == tokenA1) + + # find C1 first by shared token + class Z(C1, A2): pass + found = get_base_by_token(Z, tokenA1) + self.assertIs(found, C1) + # B1 not found + found = get_base_by_token(Z, get_token(B1)) + self.assertIs(found, None) + + with self.assertRaises(TypeError): + _testcapi.pytype_getbasebytoken( + 'not a type', id(self), True, False) + def test_freeze(self): # test PyType_Freeze() type_freeze = _testcapi.type_freeze diff --git a/Lib/test/test_class.py b/Lib/test/test_class.py index e20e59944e9ce9..017aca3c82850f 100644 --- a/Lib/test/test_class.py +++ b/Lib/test/test_class.py @@ -1,6 +1,7 @@ "Test the functionality of Python classes implementing operators." import unittest +from test import support from test.support import cpython_only, import_helper, script_helper, skip_emscripten_stack_overflow testmeths = [ @@ -134,6 +135,7 @@ def __%s__(self, *args): AllTests = type("AllTests", (object,), d) del d, statictests, method, method_template +@support.thread_unsafe("callLst is shared between threads") class ClassTests(unittest.TestCase): def setUp(self): callLst[:] = [] diff --git a/Lib/test/test_cmd_line.py b/Lib/test/test_cmd_line.py index 24cf357c581096..b949b310ac0f5f 100644 --- a/Lib/test/test_cmd_line.py +++ b/Lib/test/test_cmd_line.py @@ -491,7 +491,7 @@ def test_stdout_flush_at_shutdown(self): rc, out, err = assert_python_failure('-c', code) self.assertEqual(b'', out) self.assertEqual(120, rc) - self.assertIn(b'Exception ignored on flushing sys.stdout:\n' + self.assertIn(b'Exception ignored while flushing sys.stdout:\n' b'OSError: '.replace(b'\n', os.linesep.encode()), err) diff --git a/Lib/test/test_coroutines.py b/Lib/test/test_coroutines.py index 840043d5271224..ae3cd3555002ef 100644 --- a/Lib/test/test_coroutines.py +++ b/Lib/test/test_coroutines.py @@ -735,7 +735,7 @@ async def func(): pass def test_func_12(self): async def g(): - i = me.send(None) + me.send(None) await foo me = g() with self.assertRaisesRegex(ValueError, @@ -2136,8 +2136,10 @@ async def func(): pass coro = None support.gc_collect() + self.assertEqual(cm.unraisable.err_msg, + f"Exception ignored while finalizing " + f"coroutine {coro_repr}") self.assertIn("was never awaited", str(cm.unraisable.exc_value)) - self.assertEqual(repr(cm.unraisable.object), coro_repr) def test_for_assign_raising_stop_async_iteration(self): class BadTarget: @@ -2281,7 +2283,7 @@ async def __aexit__(self, exc_type, exc_val, exc_tb): buffer.append(exc_type.__name__) async def f(): - async with CM() as c: + async with CM(): await asyncio.sleep(0.01) raise MyException buffer.append('unreachable') @@ -2373,7 +2375,7 @@ def check(depth, msg): orig_depth = sys.get_coroutine_origin_tracking_depth() try: - msg = check(0, f"coroutine '{corofn.__qualname__}' was never awaited") + check(0, f"coroutine '{corofn.__qualname__}' was never awaited") check(1, "".join([ f"coroutine '{corofn.__qualname__}' was never awaited\n", "Coroutine created at (most recent call last)\n", @@ -2414,7 +2416,9 @@ async def corofn(): del coro support.gc_collect() - self.assertEqual(repr(cm.unraisable.object), coro_repr) + self.assertEqual(cm.unraisable.err_msg, + f"Exception ignored while finalizing " + f"coroutine {coro_repr}") self.assertEqual(cm.unraisable.exc_type, ZeroDivisionError) del warnings._warn_unawaited_coroutine diff --git a/Lib/test/test_ctypes/test_callbacks.py b/Lib/test/test_ctypes/test_callbacks.py index 8f483dfe1db801..6c7c2e5270736e 100644 --- a/Lib/test/test_ctypes/test_callbacks.py +++ b/Lib/test/test_ctypes/test_callbacks.py @@ -324,7 +324,7 @@ def func(): self.assertIsInstance(cm.unraisable.exc_value, TypeError) self.assertEqual(cm.unraisable.err_msg, - f"Exception ignored on converting result " + f"Exception ignored while converting result " f"of ctypes callback function {func!r}") self.assertIsNone(cm.unraisable.object) diff --git a/Lib/test/test_ctypes/test_random_things.py b/Lib/test/test_ctypes/test_random_things.py index 630f6ed9489eba..73ff57d925e2ea 100644 --- a/Lib/test/test_ctypes/test_random_things.py +++ b/Lib/test/test_ctypes/test_random_things.py @@ -51,7 +51,7 @@ def expect_unraisable(self, exc_type, exc_msg=None): if exc_msg is not None: self.assertEqual(str(cm.unraisable.exc_value), exc_msg) self.assertEqual(cm.unraisable.err_msg, - f"Exception ignored on calling ctypes " + f"Exception ignored while calling ctypes " f"callback function {callback_func!r}") self.assertIsNone(cm.unraisable.object) diff --git a/Lib/test/test_descr.py b/Lib/test/test_descr.py index a7ebc9e8be0294..f2f3d9469f8bab 100644 --- a/Lib/test/test_descr.py +++ b/Lib/test/test_descr.py @@ -1103,6 +1103,7 @@ class MyFrozenSet(frozenset): with self.assertRaises(TypeError): frozenset().__class__ = MyFrozenSet + @support.thread_unsafe def test_slots(self): # Testing __slots__... class C0(object): @@ -5485,6 +5486,7 @@ def __repr__(self): {pickle.dumps, pickle._dumps}, {pickle.loads, pickle._loads})) + @support.thread_unsafe def test_pickle_slots(self): # Tests pickling of classes with __slots__. @@ -5552,6 +5554,7 @@ class E(C): y = pickle_copier.copy(x) self._assert_is_copy(x, y) + @support.thread_unsafe def test_reduce_copying(self): # Tests pickling and copying new-style classes and objects. global C1 diff --git a/Lib/test/test_dis.py b/Lib/test/test_dis.py index df4bdf4a3dd312..0b273cbd63e21e 100644 --- a/Lib/test/test_dis.py +++ b/Lib/test/test_dis.py @@ -15,7 +15,7 @@ import unittest from test.support import (captured_stdout, requires_debug_ranges, requires_specialization, cpython_only, - os_helper) + os_helper, import_helper, reset_code) from test.support.bytecode_helper import BytecodeTestCase @@ -892,7 +892,7 @@ def loop_test(): %3d RESUME_CHECK 0 %3d BUILD_LIST 0 - LOAD_CONST_MORTAL 0 ((1, 2, 3)) + LOAD_CONST_MORTAL 1 ((1, 2, 3)) LIST_EXTEND 1 LOAD_SMALL_INT 3 BINARY_OP 5 (*) @@ -904,11 +904,11 @@ def loop_test(): LOAD_FAST 0 (i) CALL_PY_GENERAL 1 POP_TOP - JUMP_BACKWARD 16 (to L1) + JUMP_BACKWARD_{: <6} 16 (to L1) %3d L2: END_FOR POP_ITER - LOAD_CONST_IMMORTAL 1 (None) + LOAD_CONST_IMMORTAL 0 (None) RETURN_VALUE """ % (loop_test.__code__.co_firstlineno, loop_test.__code__.co_firstlineno + 1, @@ -931,8 +931,6 @@ def extended_arg_quick(): """% (extended_arg_quick.__code__.co_firstlineno, extended_arg_quick.__code__.co_firstlineno + 1,) -ADAPTIVE_WARMUP_DELAY = 2 - class DisTestBase(unittest.TestCase): "Common utilities for DisTests and TestDisTraceback" @@ -1259,8 +1257,9 @@ def test__try_compile_no_context_exc_on_error(self): self.assertIsNone(e.__context__) @staticmethod - def code_quicken(f, times=ADAPTIVE_WARMUP_DELAY): - for _ in range(times): + def code_quicken(f): + _testinternalcapi = import_helper.import_module("_testinternalcapi") + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): f() @cpython_only @@ -1306,16 +1305,18 @@ def test_call_specialize(self): @requires_specialization def test_loop_quicken(self): # Loop can trigger a quicken where the loop is located - self.code_quicken(loop_test, 4) + self.code_quicken(loop_test) got = self.get_disassembly(loop_test, adaptive=True) - expected = dis_loop_test_quickened_code + jit = import_helper.import_module("_testinternalcapi").jit_enabled() + expected = dis_loop_test_quickened_code.format("JIT" if jit else "NO_JIT") self.do_disassembly_compare(got, expected) @cpython_only @requires_specialization def test_loop_with_conditional_at_end_is_quickened(self): + _testinternalcapi = import_helper.import_module("_testinternalcapi") def for_loop_true(x): - for i in range(10): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): if x: pass @@ -1324,7 +1325,7 @@ def for_loop_true(x): self.get_disassembly(for_loop_true, adaptive=True)) def for_loop_false(x): - for i in range(10): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): if x: pass @@ -1334,7 +1335,7 @@ def for_loop_false(x): def while_loop(): i = 0 - while i < 10: + while i < _testinternalcapi.SPECIALIZATION_THRESHOLD: i += 1 while_loop() @@ -1355,7 +1356,7 @@ def f(): self.code_quicken(f) else: # "copy" the code to un-quicken it: - f.__code__ = f.__code__.replace() + reset_code(f) for instruction in _unroll_caches_as_Instructions(dis.get_instructions( f, show_caches=True, adaptive=adaptive ), show_caches=True): diff --git a/Lib/test/test_embed.py b/Lib/test/test_embed.py index ffb69c87a065aa..cd65496cafb04d 100644 --- a/Lib/test/test_embed.py +++ b/Lib/test/test_embed.py @@ -384,11 +384,14 @@ def test_simple_initialization_api(self): def test_specialized_static_code_gets_unspecialized_at_Py_FINALIZE(self): # https://github.com/python/cpython/issues/92031 - code = textwrap.dedent("""\ + _testinternalcapi = import_helper.import_module("_testinternalcapi") + + code = textwrap.dedent(f"""\ import dis import importlib._bootstrap import opcode import test.test_dis + import test.support def is_specialized(f): for instruction in dis.get_instructions(f, adaptive=True): @@ -407,11 +410,11 @@ def is_specialized(f): func = importlib._bootstrap._handle_fromlist # "copy" the code to un-specialize it: - func.__code__ = func.__code__.replace() + test.support.reset_code(func) assert not is_specialized(func), "specialized instructions found" - for i in range(test.test_dis.ADAPTIVE_WARMUP_DELAY): + for _ in range({_testinternalcapi.SPECIALIZATION_THRESHOLD}): func(importlib._bootstrap, ["x"], lambda *args: None) assert is_specialized(func), "no specialized instructions found" @@ -1983,56 +1986,5 @@ def test_presite(self): self.assertIn("unique-python-message", out) -class StdPrinterTests(EmbeddingTestsMixin, unittest.TestCase): - # Test PyStdPrinter_Type which is used by _PySys_SetPreliminaryStderr(): - # "Set up a preliminary stderr printer until we have enough - # infrastructure for the io module in place." - - STDOUT_FD = 1 - - def create_printer(self, fd): - ctypes = import_helper.import_module('ctypes') - PyFile_NewStdPrinter = ctypes.pythonapi.PyFile_NewStdPrinter - PyFile_NewStdPrinter.argtypes = (ctypes.c_int,) - PyFile_NewStdPrinter.restype = ctypes.py_object - return PyFile_NewStdPrinter(fd) - - def test_write(self): - message = "unicode:\xe9-\u20ac-\udc80!\n" - - stdout_fd = self.STDOUT_FD - stdout_fd_copy = os.dup(stdout_fd) - self.addCleanup(os.close, stdout_fd_copy) - - rfd, wfd = os.pipe() - self.addCleanup(os.close, rfd) - self.addCleanup(os.close, wfd) - try: - # PyFile_NewStdPrinter() only accepts fileno(stdout) - # or fileno(stderr) file descriptor. - os.dup2(wfd, stdout_fd) - - printer = self.create_printer(stdout_fd) - printer.write(message) - finally: - os.dup2(stdout_fd_copy, stdout_fd) - - data = os.read(rfd, 100) - self.assertEqual(data, message.encode('utf8', 'backslashreplace')) - - def test_methods(self): - fd = self.STDOUT_FD - printer = self.create_printer(fd) - self.assertEqual(printer.fileno(), fd) - self.assertEqual(printer.isatty(), os.isatty(fd)) - printer.flush() # noop - printer.close() # noop - - def test_disallow_instantiation(self): - fd = self.STDOUT_FD - printer = self.create_printer(fd) - support.check_disallow_instantiation(self, type(printer)) - - if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_exceptions.py b/Lib/test/test_exceptions.py index 2d324827451b54..3838eb5b27c9e6 100644 --- a/Lib/test/test_exceptions.py +++ b/Lib/test/test_exceptions.py @@ -1678,10 +1678,13 @@ def __del__(self): obj = BrokenDel() with support.catch_unraisable_exception() as cm: + obj_repr = repr(type(obj).__del__) del obj gc_collect() # For PyPy or other GCs. - self.assertEqual(cm.unraisable.object, BrokenDel.__del__) + self.assertEqual(cm.unraisable.err_msg, + f"Exception ignored while calling " + f"deallocator {obj_repr}") self.assertIsNotNone(cm.unraisable.exc_traceback) def test_unhandled(self): diff --git a/Lib/test/test_frame.py b/Lib/test/test_frame.py index 7bd13eada8fedf..4d086064023488 100644 --- a/Lib/test/test_frame.py +++ b/Lib/test/test_frame.py @@ -773,51 +773,6 @@ def f(): self.assertIs(catcher.unraisable.exc_type, TypeError) self.assertIsNone(weak()) -@unittest.skipIf(_testcapi is None, 'need _testcapi') -class TestCAPI(unittest.TestCase): - def getframe(self): - return sys._getframe() - - def test_frame_getters(self): - frame = self.getframe() - self.assertEqual(frame.f_locals, _testcapi.frame_getlocals(frame)) - self.assertIs(frame.f_globals, _testcapi.frame_getglobals(frame)) - self.assertIs(frame.f_builtins, _testcapi.frame_getbuiltins(frame)) - self.assertEqual(frame.f_lasti, _testcapi.frame_getlasti(frame)) - - def test_getvar(self): - current_frame = sys._getframe() - x = 1 - self.assertEqual(_testcapi.frame_getvar(current_frame, "x"), 1) - self.assertEqual(_testcapi.frame_getvarstring(current_frame, b"x"), 1) - with self.assertRaises(NameError): - _testcapi.frame_getvar(current_frame, "y") - with self.assertRaises(NameError): - _testcapi.frame_getvarstring(current_frame, b"y") - - # wrong name type - with self.assertRaises(TypeError): - _testcapi.frame_getvar(current_frame, b'x') - with self.assertRaises(TypeError): - _testcapi.frame_getvar(current_frame, 123) - - def getgenframe(self): - yield sys._getframe() - - def test_frame_get_generator(self): - gen = self.getgenframe() - frame = next(gen) - self.assertIs(gen, _testcapi.frame_getgenerator(frame)) - - def test_frame_fback_api(self): - """Test that accessing `f_back` does not cause a segmentation fault on - a frame created with `PyFrame_New` (GH-99110).""" - def dummy(): - pass - - frame = _testcapi.frame_new(dummy.__code__, globals(), locals()) - # The following line should not cause a segmentation fault. - self.assertIsNone(frame.f_back) if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_free_threading/test_dict.py b/Lib/test/test_free_threading/test_dict.py index 13717cb39fa35d..4f605e0c51f0d5 100644 --- a/Lib/test/test_free_threading/test_dict.py +++ b/Lib/test/test_free_threading/test_dict.py @@ -5,7 +5,7 @@ from ast import Or from functools import partial -from threading import Thread +from threading import Barrier, Thread from unittest import TestCase try: @@ -142,6 +142,27 @@ def writer_func(l): for ref in thread_list: self.assertIsNone(ref()) + def test_racing_get_set_dict(self): + """Races getting and setting a dict should be thread safe""" + THREAD_COUNT = 10 + barrier = Barrier(THREAD_COUNT) + def work(d): + barrier.wait() + for _ in range(1000): + d[10] = 0 + d.get(10, None) + _ = d[10] + + d = {} + worker_threads = [] + for ii in range(THREAD_COUNT): + worker_threads.append(Thread(target=work, args=[d])) + for t in worker_threads: + t.start() + for t in worker_threads: + t.join() + + def test_racing_set_object_dict(self): """Races assigning to __dict__ should be thread safe""" class C: pass diff --git a/Lib/test/test_generated_cases.py b/Lib/test/test_generated_cases.py index bb78dd9af83b62..d2b33706ea6b75 100644 --- a/Lib/test/test_generated_cases.py +++ b/Lib/test/test_generated_cases.py @@ -286,7 +286,7 @@ def run_cases_test(self, input: str, expected: str): instructions, labels_with_prelude_and_postlude = rest.split(tier1_generator.INSTRUCTION_END_MARKER) _, labels_with_postlude = labels_with_prelude_and_postlude.split(tier1_generator.LABEL_START_MARKER) labels, _ = labels_with_postlude.split(tier1_generator.LABEL_END_MARKER) - actual = instructions + labels + actual = instructions.strip() + "\n\n " + labels.strip() # if actual.strip() != expected.strip(): # print("Actual:") # print(actual) @@ -538,7 +538,9 @@ def test_error_if_plain(self): frame->instr_ptr = next_instr; next_instr += 1; INSTRUCTION_STATS(OP); - if (cond) goto label; + if (cond) { + goto label; + } DISPATCH(); } """ @@ -555,7 +557,9 @@ def test_error_if_plain_with_comment(self): frame->instr_ptr = next_instr; next_instr += 1; INSTRUCTION_STATS(OP); - if (cond) goto label; + if (cond) { + goto label; + } // Comment is ok DISPATCH(); } @@ -582,7 +586,9 @@ def test_error_if_pop(self): right = stack_pointer[-1]; left = stack_pointer[-2]; SPAM(left, right); - if (cond) goto pop_2_label; + if (cond) { + goto pop_2_label; + } res = 0; stack_pointer[-2] = res; stack_pointer += -1; @@ -611,7 +617,9 @@ def test_error_if_pop_with_result(self): right = stack_pointer[-1]; left = stack_pointer[-2]; res = SPAM(left, right); - if (cond) goto pop_2_label; + if (cond) { + goto pop_2_label; + } stack_pointer[-2] = res; stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); @@ -644,6 +652,9 @@ def test_cache_effect(self): def test_suppress_dispatch(self): input = """ + label(somewhere) { + } + inst(OP, (--)) { goto somewhere; } @@ -655,6 +666,11 @@ def test_suppress_dispatch(self): INSTRUCTION_STATS(OP); goto somewhere; } + + somewhere: + { + + } """ self.run_cases_test(input, output) @@ -1392,7 +1408,9 @@ def test_pop_on_error_peeks(self): // THIRD { // Mark j and k as used - if (cond) goto pop_2_error; + if (cond) { + goto pop_2_error; + } } stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); @@ -1758,9 +1776,15 @@ def test_kill_in_wrong_order(self): def test_complex_label(self): input = """ + label(other_label) { + } + + label(other_label2) { + } + label(my_label) { // Comment - do_thing() + do_thing(); if (complex) { goto other_label; } @@ -1769,10 +1793,22 @@ def test_complex_label(self): """ output = """ + other_label: + { + + } + + other_label2: + { + + } + my_label: { // Comment - do_thing() + _PyFrame_SetStackPointer(frame, stack_pointer); + do_thing(); + stack_pointer = _PyFrame_GetStackPointer(frame); if (complex) { goto other_label; } @@ -1781,6 +1817,60 @@ def test_complex_label(self): """ self.run_cases_test(input, output) + def test_spilled_label(self): + input = """ + spilled label(one) { + RELOAD_STACK(); + goto two; + } + + label(two) { + SAVE_STACK(); + goto one; + } + """ + + output = """ + one: + { + /* STACK SPILLED */ + stack_pointer = _PyFrame_GetStackPointer(frame); + goto two; + } + + two: + { + _PyFrame_SetStackPointer(frame, stack_pointer); + goto one; + } + """ + self.run_cases_test(input, output) + + + def test_incorrect_spills(self): + input1 = """ + spilled label(one) { + goto two; + } + + label(two) { + } + """ + + input2 = """ + spilled label(one) { + } + + label(two) { + goto one; + } + """ + with self.assertRaisesRegex(SyntaxError, ".*reload.*"): + self.run_cases_test(input1, "") + with self.assertRaisesRegex(SyntaxError, ".*spill.*"): + self.run_cases_test(input2, "") + + def test_multiple_labels(self): input = """ label(my_label_1) { @@ -1792,7 +1882,7 @@ def test_multiple_labels(self): label(my_label_2) { // Comment do_thing2(); - goto my_label_3; + goto my_label_1; } """ @@ -1808,7 +1898,7 @@ def test_multiple_labels(self): { // Comment do_thing2(); - goto my_label_3; + goto my_label_1; } """ diff --git a/Lib/test/test_generators.py b/Lib/test/test_generators.py index b6985054c33d10..bf4b88cd9c4450 100644 --- a/Lib/test/test_generators.py +++ b/Lib/test/test_generators.py @@ -2664,14 +2664,18 @@ def printsolution(self, x): >>> with support.catch_unraisable_exception() as cm: ... g = f() ... next(g) +... gen_repr = repr(g) ... del g ... +... cm.unraisable.err_msg == (f'Exception ignored while closing ' +... f'generator {gen_repr}') ... cm.unraisable.exc_type == RuntimeError ... "generator ignored GeneratorExit" in str(cm.unraisable.exc_value) ... cm.unraisable.exc_traceback is not None True True True +True And errors thrown during closing should propagate: @@ -2776,10 +2780,12 @@ def printsolution(self, x): ... invoke("del failed") ... >>> with support.catch_unraisable_exception() as cm: -... l = Leaker() -... del l +... leaker = Leaker() +... del_repr = repr(type(leaker).__del__) +... del leaker ... -... cm.unraisable.object == Leaker.__del__ +... cm.unraisable.err_msg == (f'Exception ignored while ' +... f'calling deallocator {del_repr}') ... cm.unraisable.exc_type == RuntimeError ... str(cm.unraisable.exc_value) == "del failed" ... cm.unraisable.exc_traceback is not None diff --git a/Lib/test/test_glob.py b/Lib/test/test_glob.py index 1a836e34e8712f..a45b30599d5309 100644 --- a/Lib/test/test_glob.py +++ b/Lib/test/test_glob.py @@ -171,37 +171,45 @@ def test_glob_directory_names(self): self.norm('aab', 'F')]) def test_glob_directory_with_trailing_slash(self): - # Patterns ending with a slash shouldn't match non-dirs - res = glob.glob(self.norm('Z*Z') + os.sep) - self.assertEqual(res, []) - res = glob.glob(self.norm('ZZZ') + os.sep) - self.assertEqual(res, []) - # When there is a wildcard pattern which ends with os.sep, glob() - # doesn't blow up. - res = glob.glob(self.norm('aa*') + os.sep) - self.assertEqual(len(res), 2) - # either of these results is reasonable - self.assertIn(set(res), [ - {self.norm('aaa'), self.norm('aab')}, - {self.norm('aaa') + os.sep, self.norm('aab') + os.sep}, - ]) + seps = (os.sep, os.altsep) if os.altsep else (os.sep,) + for sep in seps: + # Patterns ending with a slash shouldn't match non-dirs + self.assertEqual(glob.glob(self.norm('Z*Z') + sep), []) + self.assertEqual(glob.glob(self.norm('ZZZ') + sep), []) + self.assertEqual(glob.glob(self.norm('aaa') + sep), + [self.norm('aaa') + sep]) + # Preserving the redundant separators is an implementation detail. + self.assertEqual(glob.glob(self.norm('aaa') + sep*2), + [self.norm('aaa') + sep*2]) + # When there is a wildcard pattern which ends with a pathname + # separator, glob() doesn't blow. + # The result should end with the pathname separator. + # Normalizing the trailing separator is an implementation detail. + eq = self.assertSequencesEqual_noorder + eq(glob.glob(self.norm('aa*') + sep), + [self.norm('aaa') + os.sep, self.norm('aab') + os.sep]) + # Stripping the redundant separators is an implementation detail. + eq(glob.glob(self.norm('aa*') + sep*2), + [self.norm('aaa') + os.sep, self.norm('aab') + os.sep]) def test_glob_bytes_directory_with_trailing_slash(self): # Same as test_glob_directory_with_trailing_slash, but with a # bytes argument. - res = glob.glob(os.fsencode(self.norm('Z*Z') + os.sep)) - self.assertEqual(res, []) - res = glob.glob(os.fsencode(self.norm('ZZZ') + os.sep)) - self.assertEqual(res, []) - res = glob.glob(os.fsencode(self.norm('aa*') + os.sep)) - self.assertEqual(len(res), 2) - # either of these results is reasonable - self.assertIn(set(res), [ - {os.fsencode(self.norm('aaa')), - os.fsencode(self.norm('aab'))}, - {os.fsencode(self.norm('aaa') + os.sep), - os.fsencode(self.norm('aab') + os.sep)}, - ]) + seps = (os.sep, os.altsep) if os.altsep else (os.sep,) + for sep in seps: + self.assertEqual(glob.glob(os.fsencode(self.norm('Z*Z') + sep)), []) + self.assertEqual(glob.glob(os.fsencode(self.norm('ZZZ') + sep)), []) + self.assertEqual(glob.glob(os.fsencode(self.norm('aaa') + sep)), + [os.fsencode(self.norm('aaa') + sep)]) + self.assertEqual(glob.glob(os.fsencode(self.norm('aaa') + sep*2)), + [os.fsencode(self.norm('aaa') + sep*2)]) + eq = self.assertSequencesEqual_noorder + eq(glob.glob(os.fsencode(self.norm('aa*') + sep)), + [os.fsencode(self.norm('aaa') + os.sep), + os.fsencode(self.norm('aab') + os.sep)]) + eq(glob.glob(os.fsencode(self.norm('aa*') + sep*2)), + [os.fsencode(self.norm('aaa') + os.sep), + os.fsencode(self.norm('aab') + os.sep)]) @skip_unless_symlink def test_glob_symlinks(self): @@ -209,8 +217,7 @@ def test_glob_symlinks(self): eq(self.glob('sym3'), [self.norm('sym3')]) eq(self.glob('sym3', '*'), [self.norm('sym3', 'EF'), self.norm('sym3', 'efg')]) - self.assertIn(self.glob('sym3' + os.sep), - [[self.norm('sym3')], [self.norm('sym3') + os.sep]]) + eq(self.glob('sym3' + os.sep), [self.norm('sym3') + os.sep]) eq(self.glob('*', '*F'), [self.norm('aaa', 'zzzF'), self.norm('aab', 'F'), self.norm('sym3', 'EF')]) diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py index 7a7ec555a2dbbb..75b748aee05940 100644 --- a/Lib/test/test_httplib.py +++ b/Lib/test/test_httplib.py @@ -1092,6 +1092,25 @@ def test_chunked(self): self.assertEqual(resp.read(), expected) resp.close() + # Explicit full read + for n in (-123, -1, None): + with self.subTest('full read', n=n): + sock = FakeSocket(chunked_start + last_chunk + chunked_end) + resp = client.HTTPResponse(sock, method="GET") + resp.begin() + self.assertTrue(resp.chunked) + self.assertEqual(resp.read(n), expected) + resp.close() + + # Read first chunk + with self.subTest('read1(-1)'): + sock = FakeSocket(chunked_start + last_chunk + chunked_end) + resp = client.HTTPResponse(sock, method="GET") + resp.begin() + self.assertTrue(resp.chunked) + self.assertEqual(resp.read1(-1), b"hello worl") + resp.close() + # Various read sizes for n in range(1, 12): sock = FakeSocket(chunked_start + last_chunk + chunked_end) diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py index 2e5f6475ae3b1e..e34fe45fd68e52 100644 --- a/Lib/test/test_logging.py +++ b/Lib/test/test_logging.py @@ -22,6 +22,7 @@ import logging.handlers import logging.config + import codecs import configparser import copy @@ -2095,6 +2096,18 @@ def test_udp_reconnection(self): self.handled.wait(support.LONG_TIMEOUT) self.assertEqual(self.log_output, b'<11>sp\xc3\xa4m\x00') + @patch('socket.socket') + def test_tcp_timeout(self, mock_socket): + instance_mock_sock = mock_socket.return_value + instance_mock_sock.connect.side_effect = socket.timeout + + with self.assertRaises(socket.timeout): + logging.handlers.SysLogHandler(address=('localhost', 514), + socktype=socket.SOCK_STREAM, + timeout=1) + + instance_mock_sock.close.assert_called() + @unittest.skipUnless(hasattr(socket, "AF_UNIX"), "Unix sockets required") class UnixSysLogHandlerTest(SysLogHandlerTest): diff --git a/Lib/test/test_monitoring.py b/Lib/test/test_monitoring.py index 364381e7dce00a..3125d190626e38 100644 --- a/Lib/test/test_monitoring.py +++ b/Lib/test/test_monitoring.py @@ -14,6 +14,7 @@ from test.support import requires_specialization_ft, script_helper _testcapi = test.support.import_helper.import_module("_testcapi") +_testinternalcapi = test.support.import_helper.import_module("_testinternalcapi") PAIR = (0,1) @@ -897,13 +898,13 @@ def implicit_stop_iteration(iterator=None): # re-specialize immediately, so that we can we can test the # unspecialized version of the loop first. # Note: this assumes that we don't specialize loops over sets. - implicit_stop_iteration(set(range(100))) + implicit_stop_iteration(set(range(_testinternalcapi.SPECIALIZATION_THRESHOLD))) # This will record a RAISE event for the StopIteration. self.check_events(implicit_stop_iteration, expected, recorders=recorders) # Now specialize, so that we see a STOP_ITERATION event. - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_COOLDOWN): implicit_stop_iteration() # This will record a STOP_ITERATION event for the StopIteration. @@ -1057,7 +1058,7 @@ def f(): except ValueError: pass - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): f() recorders = ( ReturnRecorder, @@ -2033,8 +2034,8 @@ def __init__(self, set_event): sys.monitoring.set_events(TEST_TOOL, E.PY_RESUME) def make_foo_optimized_then_set_event(): - for i in range(100): - Foo(i == 99) + for i in range(_testinternalcapi.SPECIALIZATION_THRESHOLD + 1): + Foo(i == _testinternalcapi.SPECIALIZATION_THRESHOLD) try: make_foo_optimized_then_set_event() @@ -2106,9 +2107,9 @@ def test_func(recorder): set_events = sys.monitoring.set_events line = E.LINE i = 0 - for i in range(551): - # Turn on events without branching once i reaches 500. - set_events(TEST_TOOL, line * int(i >= 500)) + for i in range(_testinternalcapi.SPECIALIZATION_THRESHOLD + 51): + # Turn on events without branching once i reaches _testinternalcapi.SPECIALIZATION_THRESHOLD. + set_events(TEST_TOOL, line * int(i >= _testinternalcapi.SPECIALIZATION_THRESHOLD)) pass pass pass diff --git a/Lib/test/test_opcache.py b/Lib/test/test_opcache.py index 4d7304b1c9abb6..87de4c94ba26fb 100644 --- a/Lib/test/test_opcache.py +++ b/Lib/test/test_opcache.py @@ -6,7 +6,7 @@ import unittest from test.support import (threading_helper, check_impl_detail, requires_specialization, requires_specialization_ft, - cpython_only) + cpython_only, requires_jit_disabled, reset_code) from test.support.import_helper import import_module # Skip this module on other interpreters, it is cpython specific: @@ -16,20 +16,6 @@ _testinternalcapi = import_module("_testinternalcapi") -def disabling_optimizer(func): - def wrapper(*args, **kwargs): - if not hasattr(_testinternalcapi, "get_optimizer"): - return func(*args, **kwargs) - old_opt = _testinternalcapi.get_optimizer() - _testinternalcapi.set_optimizer(None) - try: - return func(*args, **kwargs) - finally: - _testinternalcapi.set_optimizer(old_opt) - - return wrapper - - class TestBase(unittest.TestCase): def assert_specialized(self, f, opname): instructions = dis.get_instructions(f, adaptive=True) @@ -59,7 +45,8 @@ def f(self): d = D() - self.assertEqual(d.f(), 1) # warmup + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD - 1): + self.assertEqual(d.f(), 1) # warmup calls.clear() self.assertEqual(d.f(), 1) # try to specialize self.assertEqual(calls, [(d, D)]) @@ -79,7 +66,7 @@ def f(o): return o.x o = C() - for i in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): assert f(o) == 1 Descriptor.__get__ = lambda self, instance, value: 2 @@ -106,13 +93,13 @@ def __set__(self, instance, value): def f(): return Class.attribute - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertTrue(f()) Descriptor.__get__ = __get__ Descriptor.__set__ = __set__ - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_COOLDOWN): self.assertFalse(f()) def test_metaclass_descriptor_shadows_class_attribute(self): @@ -127,7 +114,7 @@ class Class(metaclass=Metaclass): def f(): return Class.attribute - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertTrue(f()) def test_metaclass_set_descriptor_after_optimization(self): @@ -144,12 +131,12 @@ def attribute(self): def f(): return Class.attribute - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertTrue(f()) Metaclass.attribute = attribute - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_COOLDOWN): self.assertFalse(f()) def test_metaclass_del_descriptor_after_optimization(self): @@ -164,12 +151,12 @@ class Class(metaclass=Metaclass): def f(): return Class.attribute - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertTrue(f()) del Metaclass.attribute - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_COOLDOWN): self.assertFalse(f()) def test_type_descriptor_shadows_attribute_method(self): @@ -179,7 +166,7 @@ class Class: def f(): return Class.mro - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertIsNone(f()) def test_type_descriptor_shadows_attribute_member(self): @@ -189,7 +176,7 @@ class Class: def f(): return Class.__base__ - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertIs(f(), object) def test_type_descriptor_shadows_attribute_getset(self): @@ -199,7 +186,7 @@ class Class: def f(): return Class.__name__ - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertEqual(f(), "Class") def test_metaclass_getattribute(self): @@ -213,7 +200,7 @@ class Class(metaclass=Metaclass): def f(): return Class.attribute - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertTrue(f()) def test_metaclass_swap(self): @@ -233,12 +220,12 @@ class Class(metaclass=OldMetaclass): def f(): return Class.attribute - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertTrue(f()) Class.__class__ = NewMetaclass - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_COOLDOWN): self.assertFalse(f()) def test_load_shadowing_slot_should_raise_type_error(self): @@ -255,7 +242,7 @@ def f(o): o = Sneaky() o.shadowed = 42 - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): with self.assertRaises(TypeError): f(o) @@ -272,7 +259,7 @@ def f(o): o = Sneaky() - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): with self.assertRaises(TypeError): f(o) @@ -288,7 +275,7 @@ def f(o): o = Sneaky() - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): with self.assertRaises(TypeError): f(o) @@ -304,7 +291,7 @@ def f(o): o = Sneaky() - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): with self.assertRaises(TypeError): f(o) @@ -332,13 +319,13 @@ def attribute(): def f(): return instance.attribute() - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertTrue(f()) Descriptor.__get__ = __get__ Descriptor.__set__ = __set__ - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_COOLDOWN): self.assertFalse(f()) def test_metaclass_descriptor_added_after_optimization(self): @@ -361,13 +348,13 @@ def __set__(self, instance, value): def f(): return Class.attribute() - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertTrue(f()) Descriptor.__get__ = __get__ Descriptor.__set__ = __set__ - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_COOLDOWN): self.assertFalse(f()) def test_metaclass_descriptor_shadows_class_attribute(self): @@ -383,7 +370,7 @@ def attribute(): def f(): return Class.attribute() - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertTrue(f()) def test_metaclass_set_descriptor_after_optimization(self): @@ -401,12 +388,12 @@ def attribute(self): def f(): return Class.attribute() - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertTrue(f()) Metaclass.attribute = attribute - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_COOLDOWN): self.assertFalse(f()) def test_metaclass_del_descriptor_after_optimization(self): @@ -422,12 +409,12 @@ def attribute(): def f(): return Class.attribute() - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertTrue(f()) del Metaclass.attribute - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_COOLDOWN): self.assertFalse(f()) def test_type_descriptor_shadows_attribute_method(self): @@ -438,7 +425,7 @@ def mro(): def f(): return Class.mro() - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertEqual(f(), ["Spam", "eggs"]) def test_type_descriptor_shadows_attribute_member(self): @@ -449,7 +436,7 @@ def __base__(): def f(): return Class.__base__() - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertNotEqual(f(), "Spam") def test_metaclass_getattribute(self): @@ -464,7 +451,7 @@ def attribute(): def f(): return Class.attribute() - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertTrue(f()) def test_metaclass_swap(self): @@ -484,12 +471,12 @@ class Class(metaclass=OldMetaclass): def f(): return Class.attribute() - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertTrue(f()) Class.__class__ = NewMetaclass - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_COOLDOWN): self.assertFalse(f()) @@ -504,7 +491,7 @@ def f(): pass f.__defaults__ = (None,) - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): f() def test_too_many_defaults_1(self): @@ -512,7 +499,7 @@ def f(x): pass f.__defaults__ = (None, None) - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): f(None) f() @@ -521,12 +508,12 @@ def f(x, y): pass f.__defaults__ = (None, None, None) - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): f(None, None) f(None) f() - @disabling_optimizer + @requires_jit_disabled @requires_specialization_ft def test_assign_init_code(self): class MyClass: @@ -537,7 +524,7 @@ def instantiate(): return MyClass() # Trigger specialization - for _ in range(1025): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): instantiate() self.assert_specialized(instantiate, "CALL_ALLOC_AND_ENTER_INIT") @@ -549,13 +536,13 @@ def count_args(self, *args): MyClass.__init__.__code__ = count_args.__code__ instantiate() - @disabling_optimizer + @requires_jit_disabled @requires_specialization_ft def test_push_init_frame_fails(self): def instantiate(): return InitTakesArg() - for _ in range(2): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): with self.assertRaises(TypeError): instantiate() self.assert_specialized(instantiate, "CALL_ALLOC_AND_ENTER_INIT") @@ -580,10 +567,9 @@ class TestRacesDoNotCrash(TestBase): # but you can also burn through a *ton* of type/dict/function versions: ITEMS = 1000 LOOPS = 4 - WARMUPS = 2 WRITERS = 2 - @disabling_optimizer + @requires_jit_disabled def assert_races_do_not_crash( self, opname, get_items, read, write, *, check_items=False ): @@ -593,11 +579,11 @@ def assert_races_do_not_crash( # Reset: if check_items: for item in items: - item.__code__ = item.__code__.replace() + reset_code(item) else: - read.__code__ = read.__code__.replace() + reset_code(read) # Specialize: - for _ in range(self.WARMUPS): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): read(items) if check_items: for item in items: @@ -1025,7 +1011,7 @@ class C: item = C() item.a = None # Resize into a combined unicode dict: - for i in range(29): + for i in range(_testinternalcapi.SHARED_KEYS_MAX_SIZE - 1): setattr(item, f"_{i}", None) items.append(item) return items @@ -1096,7 +1082,7 @@ class C: for _ in range(self.ITEMS): item = C() # Resize into a combined unicode dict: - for i in range(29): + for i in range(_testinternalcapi.SHARED_KEYS_MAX_SIZE - 1): setattr(item, f"_{i}", None) items.append(item) return items @@ -1192,7 +1178,7 @@ def test_dict_dematerialization(self): c.a = 1 c.b = 2 c.__dict__ - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): c.a self.assertEqual( _testinternalcapi.get_object_dict_values(c), @@ -1204,7 +1190,7 @@ def test_dict_dematerialization_multiple_refs(self): c.a = 1 c.b = 2 d = c.__dict__ - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): c.a self.assertIs(c.__dict__, d) @@ -1213,7 +1199,7 @@ def test_dict_dematerialization_copy(self): c.a = 1 c.b = 2 c2 = copy.copy(c) - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): c.a c2.a self.assertEqual( @@ -1225,7 +1211,7 @@ def test_dict_dematerialization_copy(self): (1, 2, '') ) c3 = copy.deepcopy(c) - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): c.a c3.a self.assertEqual( @@ -1239,7 +1225,7 @@ def test_dict_dematerialization_pickle(self): c.a = 1 c.b = 2 c2 = pickle.loads(pickle.dumps(c)) - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): c.a c2.a self.assertEqual( @@ -1257,7 +1243,7 @@ class D(dict): pass c.a = 1 c.b = 2 c.__dict__ = D(c.__dict__) - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): c.a self.assertIs( _testinternalcapi.get_object_dict_values(c), @@ -1302,7 +1288,7 @@ def f(o, n): for i in range(n): o.b = i # Prime f to store to dict slot 1 - f(c, 100) + f(c, _testinternalcapi.SPECIALIZATION_THRESHOLD) test_obj = NoInlineAorB() test_obj.__dict__ = make_special_dict() @@ -1319,7 +1305,7 @@ class TestSpecializer(TestBase): @requires_specialization_ft def test_binary_op(self): def binary_op_add_int(): - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): a, b = 1, 2 c = a + b self.assertEqual(c, 3) @@ -1329,7 +1315,7 @@ def binary_op_add_int(): self.assert_no_opcode(binary_op_add_int, "BINARY_OP") def binary_op_add_unicode(): - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): a, b = "foo", "bar" c = a + b self.assertEqual(c, "foobar") @@ -1339,7 +1325,7 @@ def binary_op_add_unicode(): self.assert_no_opcode(binary_op_add_unicode, "BINARY_OP") def binary_op_add_extend(): - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): a, b = 6, 3.0 c = a + b self.assertEqual(c, 9.0) @@ -1398,11 +1384,13 @@ def compactlong_rhs(arg): arg / 42, ) nan = float('nan') - self.assertEqual(compactlong_lhs(1.0), (43.0, 41.0, 42.0, 42.0)) - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): + self.assertEqual(compactlong_lhs(1.0), (43.0, 41.0, 42.0, 42.0)) + for _ in range(_testinternalcapi.SPECIALIZATION_COOLDOWN): self.assertTrue(all(filter(lambda x: x is nan, compactlong_lhs(nan)))) - self.assertEqual(compactlong_rhs(42.0), (84.0, 0.0, 84.0, 1.0)) - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): + self.assertEqual(compactlong_rhs(42.0), (84.0, 0.0, 84.0, 1.0)) + for _ in range(_testinternalcapi.SPECIALIZATION_COOLDOWN): self.assertTrue(all(filter(lambda x: x is nan, compactlong_rhs(nan)))) self.assert_no_opcode(compactlong_lhs, "BINARY_OP_EXTEND") @@ -1410,6 +1398,29 @@ def compactlong_rhs(arg): binary_op_nan() + def binary_op_bitwise_extend(): + for _ in range(100): + a, b = 2, 7 + x = a | b + self.assertEqual(x, 7) + y = a & b + self.assertEqual(y, 2) + z = a ^ b + self.assertEqual(z, 5) + a, b = 3, 9 + a |= b + self.assertEqual(a, 11) + a, b = 11, 9 + a &= b + self.assertEqual(a, 9) + a, b = 3, 9 + a ^= b + self.assertEqual(a, 10) + + binary_op_bitwise_extend() + self.assert_specialized(binary_op_bitwise_extend, "BINARY_OP_EXTEND") + self.assert_no_opcode(binary_op_bitwise_extend, "BINARY_OP") + @cpython_only @requires_specialization_ft def test_load_super_attr(self): @@ -1420,7 +1431,7 @@ def __init__(self): meth = super().__init__ super().__init__() - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): A() self.assert_specialized(A.__init__, "LOAD_SUPER_ATTR_ATTR") @@ -1440,7 +1451,7 @@ def init(self): globals()['super'] = fake_super try: # Should be unspecialized after enough calls. - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_COOLDOWN): A() finally: globals()['super'] = real_super @@ -1453,7 +1464,7 @@ def init(self): @requires_specialization_ft def test_contain_op(self): def contains_op_dict(): - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): a, b = 1, {1: 2, 2: 5} self.assertTrue(a in b) self.assertFalse(3 in b) @@ -1463,7 +1474,7 @@ def contains_op_dict(): self.assert_no_opcode(contains_op_dict, "CONTAINS_OP") def contains_op_set(): - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): a, b = 1, {1, 2} self.assertTrue(a in b) self.assertFalse(3 in b) @@ -1490,7 +1501,7 @@ async def __aexit__(self, *exc): pass async def send_with(): - for i in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): async with CM(): x = 1 @@ -1508,7 +1519,7 @@ def g(): def send_yield_from(): yield from g() - for i in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): list(send_yield_from()) self.assert_specialized(send_yield_from, "SEND_GEN") @@ -1520,19 +1531,19 @@ def test_store_attr_slot(self): class C: __slots__ = ['x'] - def set_slot(): + def set_slot(n): c = C() - for i in range(100): + for i in range(n): c.x = i - set_slot() + set_slot(_testinternalcapi.SPECIALIZATION_THRESHOLD) self.assert_specialized(set_slot, "STORE_ATTR_SLOT") self.assert_no_opcode(set_slot, "STORE_ATTR") # Adding a property for 'x' should unspecialize it. C.x = property(lambda self: None, lambda self, x: None) - set_slot() + set_slot(_testinternalcapi.SPECIALIZATION_COOLDOWN) self.assert_no_opcode(set_slot, "STORE_ATTR_SLOT") @cpython_only @@ -1541,19 +1552,20 @@ def test_store_attr_instance_value(self): class C: pass - def set_value(): + @reset_code + def set_value(n): c = C() - for i in range(100): + for i in range(n): c.x = i - set_value() + set_value(_testinternalcapi.SPECIALIZATION_THRESHOLD) self.assert_specialized(set_value, "STORE_ATTR_INSTANCE_VALUE") self.assert_no_opcode(set_value, "STORE_ATTR") # Adding a property for 'x' should unspecialize it. C.x = property(lambda self: None, lambda self, x: None) - set_value() + set_value(_testinternalcapi.SPECIALIZATION_COOLDOWN) self.assert_no_opcode(set_value, "STORE_ATTR_INSTANCE_VALUE") @cpython_only @@ -1563,21 +1575,22 @@ class C: pass c = C() - for i in range(29): + for i in range(_testinternalcapi.SHARED_KEYS_MAX_SIZE - 1): setattr(c, f"_{i}", None) - def set_value(): - for i in range(100): + @reset_code + def set_value(n): + for i in range(n): c.x = i - set_value() + set_value(_testinternalcapi.SPECIALIZATION_THRESHOLD) self.assert_specialized(set_value, "STORE_ATTR_WITH_HINT") self.assert_no_opcode(set_value, "STORE_ATTR") # Adding a property for 'x' should unspecialize it. C.x = property(lambda self: None, lambda self, x: None) - set_value() + set_value(_testinternalcapi.SPECIALIZATION_COOLDOWN) self.assert_no_opcode(set_value, "STORE_ATTR_WITH_HINT") @cpython_only @@ -1585,14 +1598,15 @@ def set_value(): def test_to_bool(self): def to_bool_bool(): true_cnt, false_cnt = 0, 0 - elems = [e % 2 == 0 for e in range(100)] + elems = [e % 2 == 0 for e in range(_testinternalcapi.SPECIALIZATION_THRESHOLD)] for e in elems: if e: true_cnt += 1 else: false_cnt += 1 - self.assertEqual(true_cnt, 50) - self.assertEqual(false_cnt, 50) + d, m = divmod(_testinternalcapi.SPECIALIZATION_THRESHOLD, 2) + self.assertEqual(true_cnt, d + m) + self.assertEqual(false_cnt, d) to_bool_bool() self.assert_specialized(to_bool_bool, "TO_BOOL_BOOL") @@ -1600,12 +1614,12 @@ def to_bool_bool(): def to_bool_int(): count = 0 - for i in range(100): + for i in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): if i: count += 1 else: count -= 1 - self.assertEqual(count, 98) + self.assertEqual(count, _testinternalcapi.SPECIALIZATION_THRESHOLD - 2) to_bool_int() self.assert_specialized(to_bool_int, "TO_BOOL_INT") @@ -1613,11 +1627,11 @@ def to_bool_int(): def to_bool_list(): count = 0 - elems = [1, 2, 3] + elems = list(range(_testinternalcapi.SPECIALIZATION_THRESHOLD)) while elems: count += elems.pop() self.assertEqual(elems, []) - self.assertEqual(count, 6) + self.assertEqual(count, sum(range(_testinternalcapi.SPECIALIZATION_THRESHOLD))) to_bool_list() self.assert_specialized(to_bool_list, "TO_BOOL_LIST") @@ -1625,11 +1639,11 @@ def to_bool_list(): def to_bool_none(): count = 0 - elems = [None, None, None, None] + elems = [None] * _testinternalcapi.SPECIALIZATION_THRESHOLD for e in elems: if not e: count += 1 - self.assertEqual(count, len(elems)) + self.assertEqual(count, _testinternalcapi.SPECIALIZATION_THRESHOLD) to_bool_none() self.assert_specialized(to_bool_none, "TO_BOOL_NONE") @@ -1637,11 +1651,11 @@ def to_bool_none(): def to_bool_str(): count = 0 - elems = ["", "foo", ""] + elems = [""] + ["foo"] * (_testinternalcapi.SPECIALIZATION_THRESHOLD - 1) for e in elems: if e: count += 1 - self.assertEqual(count, 1) + self.assertEqual(count, _testinternalcapi.SPECIALIZATION_THRESHOLD - 1) to_bool_str() self.assert_specialized(to_bool_str, "TO_BOOL_STR") @@ -1651,7 +1665,7 @@ def to_bool_str(): @requires_specialization_ft def test_unpack_sequence(self): def unpack_sequence_two_tuple(): - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): a, b = 1, 2 self.assertEqual(a, 1) self.assertEqual(b, 2) @@ -1662,7 +1676,7 @@ def unpack_sequence_two_tuple(): self.assert_no_opcode(unpack_sequence_two_tuple, "UNPACK_SEQUENCE") def unpack_sequence_tuple(): - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): a, = 1, self.assertEqual(a, 1) @@ -1671,7 +1685,7 @@ def unpack_sequence_tuple(): self.assert_no_opcode(unpack_sequence_tuple, "UNPACK_SEQUENCE") def unpack_sequence_list(): - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): a, b = [1, 2] self.assertEqual(a, 1) self.assertEqual(b, 2) @@ -1684,7 +1698,7 @@ def unpack_sequence_list(): @requires_specialization_ft def test_binary_subscr(self): def binary_subscr_list_int(): - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): a = [1, 2, 3] for idx, expected in enumerate(a): self.assertEqual(a[idx], expected) @@ -1695,7 +1709,7 @@ def binary_subscr_list_int(): self.assert_no_opcode(binary_subscr_list_int, "BINARY_SUBSCR") def binary_subscr_tuple_int(): - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): a = (1, 2, 3) for idx, expected in enumerate(a): self.assertEqual(a[idx], expected) @@ -1706,7 +1720,7 @@ def binary_subscr_tuple_int(): self.assert_no_opcode(binary_subscr_tuple_int, "BINARY_SUBSCR") def binary_subscr_dict(): - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): a = {1: 2, 2: 3} self.assertEqual(a[1], 2) self.assertEqual(a[2], 3) @@ -1716,7 +1730,7 @@ def binary_subscr_dict(): self.assert_no_opcode(binary_subscr_dict, "BINARY_SUBSCR") def binary_subscr_str_int(): - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): a = "foobar" for idx, expected in enumerate(a): self.assertEqual(a[idx], expected) @@ -1732,8 +1746,8 @@ def __init__(self, val): def __getitem__(self, item): return self.val - items = [C(i) for i in range(100)] - for i in range(100): + items = [C(i) for i in range(_testinternalcapi.SPECIALIZATION_THRESHOLD)] + for i in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): self.assertEqual(items[i][i], i) binary_subscr_getitems() @@ -1744,7 +1758,7 @@ def __getitem__(self, item): @requires_specialization_ft def test_compare_op(self): def compare_op_int(): - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): a, b = 1, 2 c = a == b self.assertFalse(c) @@ -1754,7 +1768,7 @@ def compare_op_int(): self.assert_no_opcode(compare_op_int, "COMPARE_OP") def compare_op_float(): - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): a, b = 1.0, 2.0 c = a == b self.assertFalse(c) @@ -1764,7 +1778,7 @@ def compare_op_float(): self.assert_no_opcode(compare_op_float, "COMPARE_OP") def compare_op_str(): - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): a, b = "spam", "ham" c = a == b self.assertFalse(c) @@ -1773,6 +1787,20 @@ def compare_op_str(): self.assert_specialized(compare_op_str, "COMPARE_OP_STR") self.assert_no_opcode(compare_op_str, "COMPARE_OP") + @cpython_only + @requires_specialization_ft + def test_load_const(self): + def load_const(): + def unused(): pass + # Currently, the empty tuple is immortal, and the otherwise + # unused nested function's code object is mortal. This test will + # have to use different values if either of that changes. + return () + + load_const() + self.assert_specialized(load_const, "LOAD_CONST_IMMORTAL") + self.assert_specialized(load_const, "LOAD_CONST_MORTAL") + self.assert_no_opcode(load_const, "LOAD_CONST") if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_operator.py b/Lib/test/test_operator.py index 82578a0ef1e6f2..1757824580e416 100644 --- a/Lib/test/test_operator.py +++ b/Lib/test/test_operator.py @@ -666,6 +666,7 @@ class COperatorTestCase(OperatorTestCase, unittest.TestCase): module = c_operator +@support.thread_unsafe("swaps global operator module") class OperatorPickleTestCase: def copy(self, obj, proto): with support.swap_item(sys.modules, 'operator', self.module): diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py index 72208d1c582568..6e40cb4f58bfee 100644 --- a/Lib/test/test_os.py +++ b/Lib/test/test_os.py @@ -264,6 +264,7 @@ def test_readinto(self): @unittest.skipUnless(hasattr(os, 'get_blocking'), 'needs os.get_blocking() and os.set_blocking()') @unittest.skipUnless(hasattr(os, "pipe"), "requires os.pipe()") + @unittest.skipIf(support.is_emscripten, "set_blocking does not work correctly") def test_readinto_non_blocking(self): # Verify behavior of a readinto which would block on a non-blocking fd. r, w = os.pipe() diff --git a/Lib/test/test_pathlib/test_pathlib.py b/Lib/test/test_pathlib/test_pathlib.py index 866a2d07dd692a..d64092b710a4d6 100644 --- a/Lib/test/test_pathlib/test_pathlib.py +++ b/Lib/test/test_pathlib/test_pathlib.py @@ -438,6 +438,84 @@ def test_match_empty(self): self.assertRaises(ValueError, P('a').match, '') self.assertRaises(ValueError, P('a').match, '.') + def test_match_common(self): + P = self.cls + # Simple relative pattern. + self.assertTrue(P('b.py').match('b.py')) + self.assertTrue(P('a/b.py').match('b.py')) + self.assertTrue(P('/a/b.py').match('b.py')) + self.assertFalse(P('a.py').match('b.py')) + self.assertFalse(P('b/py').match('b.py')) + self.assertFalse(P('/a.py').match('b.py')) + self.assertFalse(P('b.py/c').match('b.py')) + # Wildcard relative pattern. + self.assertTrue(P('b.py').match('*.py')) + self.assertTrue(P('a/b.py').match('*.py')) + self.assertTrue(P('/a/b.py').match('*.py')) + self.assertFalse(P('b.pyc').match('*.py')) + self.assertFalse(P('b./py').match('*.py')) + self.assertFalse(P('b.py/c').match('*.py')) + # Multi-part relative pattern. + self.assertTrue(P('ab/c.py').match('a*/*.py')) + self.assertTrue(P('/d/ab/c.py').match('a*/*.py')) + self.assertFalse(P('a.py').match('a*/*.py')) + self.assertFalse(P('/dab/c.py').match('a*/*.py')) + self.assertFalse(P('ab/c.py/d').match('a*/*.py')) + # Absolute pattern. + self.assertTrue(P('/b.py').match('/*.py')) + self.assertFalse(P('b.py').match('/*.py')) + self.assertFalse(P('a/b.py').match('/*.py')) + self.assertFalse(P('/a/b.py').match('/*.py')) + # Multi-part absolute pattern. + self.assertTrue(P('/a/b.py').match('/a/*.py')) + self.assertFalse(P('/ab.py').match('/a/*.py')) + self.assertFalse(P('/a/b/c.py').match('/a/*.py')) + # Multi-part glob-style pattern. + self.assertFalse(P('/a/b/c.py').match('/**/*.py')) + self.assertTrue(P('/a/b/c.py').match('/a/**/*.py')) + # Case-sensitive flag + self.assertFalse(P('A.py').match('a.PY', case_sensitive=True)) + self.assertTrue(P('A.py').match('a.PY', case_sensitive=False)) + self.assertFalse(P('c:/a/B.Py').match('C:/A/*.pY', case_sensitive=True)) + self.assertTrue(P('/a/b/c.py').match('/A/*/*.Py', case_sensitive=False)) + # Matching against empty path + self.assertFalse(P('').match('*')) + self.assertFalse(P('').match('**')) + self.assertFalse(P('').match('**/*')) + + @needs_posix + def test_match_posix(self): + P = self.cls + self.assertFalse(P('A.py').match('a.PY')) + + @needs_windows + def test_match_windows(self): + P = self.cls + # Absolute patterns. + self.assertTrue(P('c:/b.py').match('*:/*.py')) + self.assertTrue(P('c:/b.py').match('c:/*.py')) + self.assertFalse(P('d:/b.py').match('c:/*.py')) # wrong drive + self.assertFalse(P('b.py').match('/*.py')) + self.assertFalse(P('b.py').match('c:*.py')) + self.assertFalse(P('b.py').match('c:/*.py')) + self.assertFalse(P('c:b.py').match('/*.py')) + self.assertFalse(P('c:b.py').match('c:/*.py')) + self.assertFalse(P('/b.py').match('c:*.py')) + self.assertFalse(P('/b.py').match('c:/*.py')) + # UNC patterns. + self.assertTrue(P('//some/share/a.py').match('//*/*/*.py')) + self.assertTrue(P('//some/share/a.py').match('//some/share/*.py')) + self.assertFalse(P('//other/share/a.py').match('//some/share/*.py')) + self.assertFalse(P('//some/share/a/b.py').match('//some/share/*.py')) + # Case-insensitivity. + self.assertTrue(P('B.py').match('b.PY')) + self.assertTrue(P('c:/a/B.Py').match('C:/A/*.pY')) + self.assertTrue(P('//Some/Share/B.Py').match('//somE/sharE/*.pY')) + # Path anchor doesn't match pattern anchor + self.assertFalse(P('c:/b.py').match('/*.py')) # 'c:/' vs '/' + self.assertFalse(P('c:/b.py').match('c:*.py')) # 'c:/' vs 'c:' + self.assertFalse(P('//some/share/a.py').match('/*.py')) # '//some/share/' vs '/' + @needs_posix def test_parse_path_posix(self): check = self._check_parse_path diff --git a/Lib/test/test_pathlib/test_pathlib_abc.py b/Lib/test/test_pathlib/test_pathlib_abc.py index d60bb147b72971..e67bead4297829 100644 --- a/Lib/test/test_pathlib/test_pathlib_abc.py +++ b/Lib/test/test_pathlib/test_pathlib_abc.py @@ -296,88 +296,6 @@ def test_str_windows(self): p = self.cls('//a/b/c/d') self.assertEqual(str(p), '\\\\a\\b\\c\\d') - def test_match_empty(self): - P = self.cls - self.assertRaises(ValueError, P('a').match, '') - - def test_match_common(self): - P = self.cls - # Simple relative pattern. - self.assertTrue(P('b.py').match('b.py')) - self.assertTrue(P('a/b.py').match('b.py')) - self.assertTrue(P('/a/b.py').match('b.py')) - self.assertFalse(P('a.py').match('b.py')) - self.assertFalse(P('b/py').match('b.py')) - self.assertFalse(P('/a.py').match('b.py')) - self.assertFalse(P('b.py/c').match('b.py')) - # Wildcard relative pattern. - self.assertTrue(P('b.py').match('*.py')) - self.assertTrue(P('a/b.py').match('*.py')) - self.assertTrue(P('/a/b.py').match('*.py')) - self.assertFalse(P('b.pyc').match('*.py')) - self.assertFalse(P('b./py').match('*.py')) - self.assertFalse(P('b.py/c').match('*.py')) - # Multi-part relative pattern. - self.assertTrue(P('ab/c.py').match('a*/*.py')) - self.assertTrue(P('/d/ab/c.py').match('a*/*.py')) - self.assertFalse(P('a.py').match('a*/*.py')) - self.assertFalse(P('/dab/c.py').match('a*/*.py')) - self.assertFalse(P('ab/c.py/d').match('a*/*.py')) - # Absolute pattern. - self.assertTrue(P('/b.py').match('/*.py')) - self.assertFalse(P('b.py').match('/*.py')) - self.assertFalse(P('a/b.py').match('/*.py')) - self.assertFalse(P('/a/b.py').match('/*.py')) - # Multi-part absolute pattern. - self.assertTrue(P('/a/b.py').match('/a/*.py')) - self.assertFalse(P('/ab.py').match('/a/*.py')) - self.assertFalse(P('/a/b/c.py').match('/a/*.py')) - # Multi-part glob-style pattern. - self.assertFalse(P('/a/b/c.py').match('/**/*.py')) - self.assertTrue(P('/a/b/c.py').match('/a/**/*.py')) - # Case-sensitive flag - self.assertFalse(P('A.py').match('a.PY', case_sensitive=True)) - self.assertTrue(P('A.py').match('a.PY', case_sensitive=False)) - self.assertFalse(P('c:/a/B.Py').match('C:/A/*.pY', case_sensitive=True)) - self.assertTrue(P('/a/b/c.py').match('/A/*/*.Py', case_sensitive=False)) - # Matching against empty path - self.assertFalse(P('').match('*')) - self.assertFalse(P('').match('**')) - self.assertFalse(P('').match('**/*')) - - @needs_posix - def test_match_posix(self): - P = self.cls - self.assertFalse(P('A.py').match('a.PY')) - - @needs_windows - def test_match_windows(self): - P = self.cls - # Absolute patterns. - self.assertTrue(P('c:/b.py').match('*:/*.py')) - self.assertTrue(P('c:/b.py').match('c:/*.py')) - self.assertFalse(P('d:/b.py').match('c:/*.py')) # wrong drive - self.assertFalse(P('b.py').match('/*.py')) - self.assertFalse(P('b.py').match('c:*.py')) - self.assertFalse(P('b.py').match('c:/*.py')) - self.assertFalse(P('c:b.py').match('/*.py')) - self.assertFalse(P('c:b.py').match('c:/*.py')) - self.assertFalse(P('/b.py').match('c:*.py')) - self.assertFalse(P('/b.py').match('c:/*.py')) - # UNC patterns. - self.assertTrue(P('//some/share/a.py').match('//*/*/*.py')) - self.assertTrue(P('//some/share/a.py').match('//some/share/*.py')) - self.assertFalse(P('//other/share/a.py').match('//some/share/*.py')) - self.assertFalse(P('//some/share/a/b.py').match('//some/share/*.py')) - # Case-insensitivity. - self.assertTrue(P('B.py').match('b.PY')) - self.assertTrue(P('c:/a/B.Py').match('C:/A/*.pY')) - self.assertTrue(P('//Some/Share/B.Py').match('//somE/sharE/*.pY')) - # Path anchor doesn't match pattern anchor - self.assertFalse(P('c:/b.py').match('/*.py')) # 'c:/' vs '/' - self.assertFalse(P('c:/b.py').match('c:*.py')) # 'c:/' vs 'c:' - self.assertFalse(P('//some/share/a.py').match('/*.py')) # '//some/share/' vs '/' - def test_full_match_common(self): P = self.cls # Simple relative pattern. diff --git a/Lib/test/test_peepholer.py b/Lib/test/test_peepholer.py index b5b2b350e77a3b..9f2f9350d74661 100644 --- a/Lib/test/test_peepholer.py +++ b/Lib/test/test_peepholer.py @@ -473,6 +473,59 @@ def test_constant_folding(self): self.assertFalse(instr.opname.startswith('BUILD_')) self.check_lnotab(code) + def test_constant_folding_small_int(self): + tests = [ + # subscript + ('(0, )[0]', 0), + ('(1 + 2, )[0]', 3), + ('(2 + 2 * 2, )[0]', 6), + ('(1, (1 + 2 + 3, ))[1][0]', 6), + ('(255, )[0]', 255), + ('(256, )[0]', None), + ('(1000, )[0]', None), + ('(1 - 2, )[0]', None), + ] + for expr, oparg in tests: + with self.subTest(expr=expr, oparg=oparg): + code = compile(expr, '', 'single') + if oparg is not None: + self.assertInBytecode(code, 'LOAD_SMALL_INT', oparg) + else: + self.assertNotInBytecode(code, 'LOAD_SMALL_INT') + self.check_lnotab(code) + + def test_folding_subscript(self): + tests = [ + ('(1, )[0]', False), + ('(1, )[-1]', False), + ('(1 + 2, )[0]', False), + ('(1, (1, 2))[1][1]', False), + ('(1, 2)[2-1]', False), + ('(1, (1, 2))[1][2-1]', False), + ('(1, (1, 2))[1:6][0][2-1]', False), + ('"a"[0]', False), + ('("a" + "b")[1]', False), + ('("a" + "b", )[0][1]', False), + ('("a" * 10)[9]', False), + ('(1, )[1]', True), + ('(1, )[-2]', True), + ('"a"[1]', True), + ('"a"[-2]', True), + ('("a" + "b")[2]', True), + ('("a" + "b", )[0][2]', True), + ('("a" + "b", )[1][0]', True), + ('("a" * 10)[10]', True), + ('(1, (1, 2))[2:6][0][2-1]', True), + ] + for expr, has_error in tests: + with self.subTest(expr=expr, has_error=has_error): + code = compile(expr, '', 'single') + if not has_error: + self.assertNotInBytecode(code, 'BINARY_SUBSCR') + else: + self.assertInBytecode(code, 'BINARY_SUBSCR') + self.check_lnotab(code) + def test_in_literal_list(self): def containtest(): return x in [a, b] diff --git a/Lib/test/test_pydoc/module_none.py b/Lib/test/test_pydoc/module_none.py new file mode 100644 index 00000000000000..ebb50fc86e2cf7 --- /dev/null +++ b/Lib/test/test_pydoc/module_none.py @@ -0,0 +1,8 @@ +def func(): + pass +func.__module__ = None + +class A: + def method(self): + pass + method.__module__ = None diff --git a/Lib/test/test_pydoc/test_pydoc.py b/Lib/test/test_pydoc/test_pydoc.py index b02ba3aafd4d20..0abd36c5e076e2 100644 --- a/Lib/test/test_pydoc/test_pydoc.py +++ b/Lib/test/test_pydoc/test_pydoc.py @@ -1903,6 +1903,11 @@ def a_fn_with_https_link(): html ) + def test_module_none(self): + # Issue #128772 + from test.test_pydoc import module_none + pydoc.render_doc(module_none) + class PydocFodderTest(unittest.TestCase): def tearDown(self): diff --git a/Lib/test/test_pyrepl/test_reader.py b/Lib/test/test_pyrepl/test_reader.py index 863ecc61ddd432..27c6d6664eda9e 100644 --- a/Lib/test/test_pyrepl/test_reader.py +++ b/Lib/test/test_pyrepl/test_reader.py @@ -4,7 +4,7 @@ from unittest import TestCase from unittest.mock import MagicMock -from .support import handle_all_events, handle_events_narrow_console, code_to_events, prepare_reader +from .support import handle_all_events, handle_events_narrow_console, code_to_events, prepare_reader, prepare_console from _pyrepl.console import Event from _pyrepl.reader import Reader @@ -312,3 +312,10 @@ def test_key_press_on_tab_press_once(self): reader, _ = handle_all_events(events, prepare_reader=completing_reader) self.assert_screen_equals(reader, f"{code}a") + + def test_pos2xy_with_no_columns(self): + console = prepare_console([]) + reader = prepare_reader(console) + # Simulate a resize to 0 columns + reader.screeninfo = [] + self.assertEqual(reader.pos2xy(), (0, 0)) diff --git a/Lib/test/test_regrtest.py b/Lib/test/test_regrtest.py index e9ef830c848aad..969f483814d08d 100644 --- a/Lib/test/test_regrtest.py +++ b/Lib/test/test_regrtest.py @@ -1185,7 +1185,7 @@ def test_run(self): stats=TestStats(4, 1), forever=True) - @support.without_optimizer + @support.requires_jit_disabled def check_leak(self, code, what, *, run_workers=False): test = self.create_test('huntrleaks', code=code) diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py index 1f18b1f09b5858..078ddd6c431b37 100644 --- a/Lib/test/test_shutil.py +++ b/Lib/test/test_shutil.py @@ -3239,12 +3239,8 @@ def test_filesystem_full(self): self.assertRaises(OSError, self.zerocopy_fun, src, dst) -@unittest.skipIf(not SUPPORTS_SENDFILE, 'os.sendfile() not supported') -class TestZeroCopySendfile(_ZeroCopyFileTest, unittest.TestCase): - PATCHPOINT = "os.sendfile" - - def zerocopy_fun(self, fsrc, fdst): - return shutil._fastcopy_sendfile(fsrc, fdst) +class _ZeroCopyFileLinuxTest(_ZeroCopyFileTest): + BLOCKSIZE_INDEX = None def test_non_regular_file_src(self): with io.BytesIO(self.FILEDATA) as src: @@ -3265,65 +3261,65 @@ def test_non_regular_file_dst(self): self.assertEqual(dst.read(), self.FILEDATA) def test_exception_on_second_call(self): - def sendfile(*args, **kwargs): + def syscall(*args, **kwargs): if not flag: flag.append(None) - return orig_sendfile(*args, **kwargs) + return orig_syscall(*args, **kwargs) else: raise OSError(errno.EBADF, "yo") flag = [] - orig_sendfile = os.sendfile - with unittest.mock.patch('os.sendfile', create=True, - side_effect=sendfile): + orig_syscall = eval(self.PATCHPOINT) + with unittest.mock.patch(self.PATCHPOINT, create=True, + side_effect=syscall): with self.get_files() as (src, dst): with self.assertRaises(OSError) as cm: - shutil._fastcopy_sendfile(src, dst) + self.zerocopy_fun(src, dst) assert flag self.assertEqual(cm.exception.errno, errno.EBADF) def test_cant_get_size(self): # Emulate a case where src file size cannot be determined. # Internally bufsize will be set to a small value and - # sendfile() will be called repeatedly. + # a system call will be called repeatedly. with unittest.mock.patch('os.fstat', side_effect=OSError) as m: with self.get_files() as (src, dst): - shutil._fastcopy_sendfile(src, dst) + self.zerocopy_fun(src, dst) assert m.called self.assertEqual(read_file(TESTFN2, binary=True), self.FILEDATA) def test_small_chunks(self): # Force internal file size detection to be smaller than the - # actual file size. We want to force sendfile() to be called + # actual file size. We want to force a system call to be called # multiple times, also in order to emulate a src fd which gets # bigger while it is being copied. mock = unittest.mock.Mock() mock.st_size = 65536 + 1 with unittest.mock.patch('os.fstat', return_value=mock) as m: with self.get_files() as (src, dst): - shutil._fastcopy_sendfile(src, dst) + self.zerocopy_fun(src, dst) assert m.called self.assertEqual(read_file(TESTFN2, binary=True), self.FILEDATA) def test_big_chunk(self): # Force internal file size detection to be +100MB bigger than - # the actual file size. Make sure sendfile() does not rely on + # the actual file size. Make sure a system call does not rely on # file size value except for (maybe) a better throughput / # performance. mock = unittest.mock.Mock() mock.st_size = self.FILESIZE + (100 * 1024 * 1024) with unittest.mock.patch('os.fstat', return_value=mock) as m: with self.get_files() as (src, dst): - shutil._fastcopy_sendfile(src, dst) + self.zerocopy_fun(src, dst) assert m.called self.assertEqual(read_file(TESTFN2, binary=True), self.FILEDATA) def test_blocksize_arg(self): - with unittest.mock.patch('os.sendfile', + with unittest.mock.patch(self.PATCHPOINT, side_effect=ZeroDivisionError) as m: self.assertRaises(ZeroDivisionError, shutil.copyfile, TESTFN, TESTFN2) - blocksize = m.call_args[0][3] + blocksize = m.call_args[0][self.BLOCKSIZE_INDEX] # Make sure file size and the block size arg passed to # sendfile() are the same. self.assertEqual(blocksize, os.path.getsize(TESTFN)) @@ -3333,9 +3329,19 @@ def test_blocksize_arg(self): self.addCleanup(os_helper.unlink, TESTFN2 + '3') self.assertRaises(ZeroDivisionError, shutil.copyfile, TESTFN2, TESTFN2 + '3') - blocksize = m.call_args[0][3] + blocksize = m.call_args[0][self.BLOCKSIZE_INDEX] self.assertEqual(blocksize, 2 ** 23) + +@unittest.skipIf(not SUPPORTS_SENDFILE, 'os.sendfile() not supported') +@unittest.mock.patch.object(shutil, "_USE_CP_COPY_FILE_RANGE", False) +class TestZeroCopySendfile(_ZeroCopyFileLinuxTest, unittest.TestCase): + PATCHPOINT = "os.sendfile" + BLOCKSIZE_INDEX = 3 + + def zerocopy_fun(self, fsrc, fdst): + return shutil._fastcopy_sendfile(fsrc, fdst) + def test_file2file_not_supported(self): # Emulate a case where sendfile() only support file->socket # fds. In such a case copyfile() is supposed to skip the @@ -3358,6 +3364,29 @@ def test_file2file_not_supported(self): shutil._USE_CP_SENDFILE = True +@unittest.skipUnless(shutil._USE_CP_COPY_FILE_RANGE, "os.copy_file_range() not supported") +class TestZeroCopyCopyFileRange(_ZeroCopyFileLinuxTest, unittest.TestCase): + PATCHPOINT = "os.copy_file_range" + BLOCKSIZE_INDEX = 2 + + def zerocopy_fun(self, fsrc, fdst): + return shutil._fastcopy_copy_file_range(fsrc, fdst) + + def test_empty_file(self): + srcname = f"{TESTFN}src" + dstname = f"{TESTFN}dst" + self.addCleanup(lambda: os_helper.unlink(srcname)) + self.addCleanup(lambda: os_helper.unlink(dstname)) + with open(srcname, "wb"): + pass + + with open(srcname, "rb") as src, open(dstname, "wb") as dst: + # _fastcopy_copy_file_range gives up copying empty files due + # to a bug in older Linux. + with self.assertRaises(shutil._GiveupOnFastCopy): + self.zerocopy_fun(src, dst) + + @unittest.skipIf(not MACOS, 'macOS only') class TestZeroCopyMACOS(_ZeroCopyFileTest, unittest.TestCase): PATCHPOINT = "posix._fcopyfile" diff --git a/Lib/test/test_signal.py b/Lib/test/test_signal.py index 96ae79b0eb18b3..72a01cd1e451f4 100644 --- a/Lib/test/test_signal.py +++ b/Lib/test/test_signal.py @@ -383,7 +383,7 @@ def handler(signum, frame): except ZeroDivisionError: # An ignored exception should have been printed out on stderr err = err.getvalue() - if ('Exception ignored when trying to write to the signal wakeup fd' + if ('Exception ignored while trying to write to the signal wakeup fd' not in err): raise AssertionError(err) if ('OSError: [Errno %d]' % errno.EBADF) not in err: @@ -572,7 +572,7 @@ def handler(signum, frame): signal.raise_signal(signum) err = err.getvalue() - if ('Exception ignored when trying to {action} to the signal wakeup fd' + if ('Exception ignored while trying to {action} to the signal wakeup fd' not in err): raise AssertionError(err) """.format(action=action) @@ -642,7 +642,7 @@ def handler(signum, frame): "buffer" % written) # By default, we get a warning when a signal arrives - msg = ('Exception ignored when trying to {action} ' + msg = ('Exception ignored while trying to {action} ' 'to the signal wakeup fd') signal.set_wakeup_fd(write.fileno()) diff --git a/Lib/test/test_socket.py b/Lib/test/test_socket.py index faf326d9164e1b..b77fa3cb21512a 100644 --- a/Lib/test/test_socket.py +++ b/Lib/test/test_socket.py @@ -520,6 +520,8 @@ def clientTearDown(self): @unittest.skipIf(WSL, 'VSOCK does not work on Microsoft WSL') @unittest.skipUnless(HAVE_SOCKET_VSOCK, 'VSOCK sockets required for this test.') +@unittest.skipUnless(get_cid() != 2, # VMADDR_CID_HOST + "This test can only be run on a virtual guest.") class ThreadedVSOCKSocketStreamTest(unittest.TestCase, ThreadableTest): def __init__(self, methodName='runTest'): diff --git a/Lib/test/test_sqlite3/test_hooks.py b/Lib/test/test_sqlite3/test_hooks.py index 49e72f8fcfbcbd..53b8a39bf29a75 100644 --- a/Lib/test/test_sqlite3/test_hooks.py +++ b/Lib/test/test_sqlite3/test_hooks.py @@ -196,7 +196,7 @@ def progress(): con.execute("select 1 union select 2 union select 3").fetchall() self.assertEqual(action, 0, "progress handler was not cleared") - @with_tracebacks(ZeroDivisionError, name="bad_progress") + @with_tracebacks(ZeroDivisionError, msg_regex="bad_progress") def test_error_in_progress_handler(self): def bad_progress(): 1 / 0 @@ -206,7 +206,7 @@ def bad_progress(): create table foo(a, b) """) - @with_tracebacks(ZeroDivisionError, name="bad_progress") + @with_tracebacks(ZeroDivisionError, msg_regex="bad_progress") def test_error_in_progress_handler_result(self): class BadBool: def __bool__(self): diff --git a/Lib/test/test_sqlite3/test_userfunctions.py b/Lib/test/test_sqlite3/test_userfunctions.py index c6c3db159add64..5bb2eff55ebc8f 100644 --- a/Lib/test/test_sqlite3/test_userfunctions.py +++ b/Lib/test/test_sqlite3/test_userfunctions.py @@ -254,7 +254,7 @@ def test_func_return_nan(self): cur.execute("select returnnan()") self.assertIsNone(cur.fetchone()[0]) - @with_tracebacks(ZeroDivisionError, name="func_raiseexception") + @with_tracebacks(ZeroDivisionError, msg_regex="func_raiseexception") def test_func_exception(self): cur = self.con.cursor() with self.assertRaises(sqlite.OperationalError) as cm: @@ -262,14 +262,14 @@ def test_func_exception(self): cur.fetchone() self.assertEqual(str(cm.exception), 'user-defined function raised exception') - @with_tracebacks(MemoryError, name="func_memoryerror") + @with_tracebacks(MemoryError, msg_regex="func_memoryerror") def test_func_memory_error(self): cur = self.con.cursor() with self.assertRaises(MemoryError): cur.execute("select memoryerror()") cur.fetchone() - @with_tracebacks(OverflowError, name="func_overflowerror") + @with_tracebacks(OverflowError, msg_regex="func_overflowerror") def test_func_overflow_error(self): cur = self.con.cursor() with self.assertRaises(sqlite.DataError): @@ -389,7 +389,7 @@ def test_func_return_too_large_int(self): with self.assertRaisesRegex(sqlite.DataError, msg): cur.execute("select largeint()") - @with_tracebacks(UnicodeEncodeError, "surrogates not allowed", "chr") + @with_tracebacks(UnicodeEncodeError, "surrogates not allowed") def test_func_return_text_with_surrogates(self): cur = self.con.cursor() self.con.create_function("pychr", 1, chr) @@ -641,7 +641,7 @@ def test_aggr_error_on_create(self): with self.assertRaises(sqlite.OperationalError): self.con.create_function("bla", -100, AggrSum) - @with_tracebacks(AttributeError, name="AggrNoStep") + @with_tracebacks(AttributeError, msg_regex="AggrNoStep") def test_aggr_no_step(self): cur = self.con.cursor() with self.assertRaises(sqlite.OperationalError) as cm: @@ -656,7 +656,7 @@ def test_aggr_no_finalize(self): cur.execute("select nofinalize(t) from test") val = cur.fetchone()[0] - @with_tracebacks(ZeroDivisionError, name="AggrExceptionInInit") + @with_tracebacks(ZeroDivisionError, msg_regex="AggrExceptionInInit") def test_aggr_exception_in_init(self): cur = self.con.cursor() with self.assertRaises(sqlite.OperationalError) as cm: @@ -664,7 +664,7 @@ def test_aggr_exception_in_init(self): val = cur.fetchone()[0] self.assertEqual(str(cm.exception), "user-defined aggregate's '__init__' method raised error") - @with_tracebacks(ZeroDivisionError, name="AggrExceptionInStep") + @with_tracebacks(ZeroDivisionError, msg_regex="AggrExceptionInStep") def test_aggr_exception_in_step(self): cur = self.con.cursor() with self.assertRaises(sqlite.OperationalError) as cm: @@ -672,7 +672,7 @@ def test_aggr_exception_in_step(self): val = cur.fetchone()[0] self.assertEqual(str(cm.exception), "user-defined aggregate's 'step' method raised error") - @with_tracebacks(ZeroDivisionError, name="AggrExceptionInFinalize") + @with_tracebacks(ZeroDivisionError, msg_regex="AggrExceptionInFinalize") def test_aggr_exception_in_finalize(self): cur = self.con.cursor() with self.assertRaises(sqlite.OperationalError) as cm: @@ -822,11 +822,11 @@ def authorizer_cb(action, arg1, arg2, dbname, source): raise ValueError return sqlite.SQLITE_OK - @with_tracebacks(ValueError, name="authorizer_cb") + @with_tracebacks(ValueError, msg_regex="authorizer_cb") def test_table_access(self): super().test_table_access() - @with_tracebacks(ValueError, name="authorizer_cb") + @with_tracebacks(ValueError, msg_regex="authorizer_cb") def test_column_access(self): super().test_table_access() diff --git a/Lib/test/test_sqlite3/util.py b/Lib/test/test_sqlite3/util.py index 5599823838beea..8643835cca46e2 100644 --- a/Lib/test/test_sqlite3/util.py +++ b/Lib/test/test_sqlite3/util.py @@ -22,15 +22,16 @@ def cx_limit(cx, category=sqlite3.SQLITE_LIMIT_SQL_LENGTH, limit=128): cx.setlimit(category, _prev) -def with_tracebacks(exc, regex="", name=""): +def with_tracebacks(exc, regex="", name="", msg_regex=""): """Convenience decorator for testing callback tracebacks.""" def decorator(func): - _regex = re.compile(regex) if regex else None + exc_regex = re.compile(regex) if regex else None + _msg_regex = re.compile(msg_regex) if msg_regex else None @functools.wraps(func) def wrapper(self, *args, **kwargs): with test.support.catch_unraisable_exception() as cm: # First, run the test with traceback enabled. - with check_tracebacks(self, cm, exc, _regex, name): + with check_tracebacks(self, cm, exc, exc_regex, _msg_regex, name): func(self, *args, **kwargs) # Then run the test with traceback disabled. @@ -40,7 +41,7 @@ def wrapper(self, *args, **kwargs): @contextlib.contextmanager -def check_tracebacks(self, cm, exc, regex, obj_name): +def check_tracebacks(self, cm, exc, exc_regex, msg_regex, obj_name): """Convenience context manager for testing callback tracebacks.""" sqlite3.enable_callback_tracebacks(True) try: @@ -49,9 +50,12 @@ def check_tracebacks(self, cm, exc, regex, obj_name): yield self.assertEqual(cm.unraisable.exc_type, exc) - if regex: + if exc_regex: msg = str(cm.unraisable.exc_value) - self.assertIsNotNone(regex.search(msg)) + self.assertIsNotNone(exc_regex.search(msg), (exc_regex, msg)) + if msg_regex: + msg = cm.unraisable.err_msg + self.assertIsNotNone(msg_regex.search(msg), (msg_regex, msg)) if obj_name: self.assertEqual(cm.unraisable.object.__name__, obj_name) finally: diff --git a/Lib/test/test_string.py b/Lib/test/test_string.py index 824b89ad517c12..f6d112d8a93ec4 100644 --- a/Lib/test/test_string.py +++ b/Lib/test/test_string.py @@ -1,6 +1,7 @@ import unittest import string from string import Template +import types class ModuleTest(unittest.TestCase): @@ -101,6 +102,24 @@ def test_index_lookup(self): with self.assertRaises(KeyError): fmt.format("{0[2]}{0[0]}", {}) + def test_auto_numbering_lookup(self): + fmt = string.Formatter() + namespace = types.SimpleNamespace(foo=types.SimpleNamespace(bar='baz')) + widths = [None, types.SimpleNamespace(qux=4)] + self.assertEqual( + fmt.format("{.foo.bar:{[1].qux}}", namespace, widths), 'baz ') + + def test_auto_numbering_reenterability(self): + class ReenteringFormatter(string.Formatter): + def format_field(self, value, format_spec): + if format_spec.isdigit() and int(format_spec) > 0: + return self.format('{:{}}!', value, int(format_spec) - 1) + else: + return super().format_field(value, format_spec) + fmt = ReenteringFormatter() + x = types.SimpleNamespace(a='X') + self.assertEqual(fmt.format('{.a:{}}', x, 3), 'X!!!') + def test_override_get_value(self): class NamespaceFormatter(string.Formatter): def __init__(self, namespace={}): diff --git a/Lib/test/test_struct.py b/Lib/test/test_struct.py index 5fee9fbb92acf4..b99391e482ff70 100644 --- a/Lib/test/test_struct.py +++ b/Lib/test/test_struct.py @@ -694,7 +694,7 @@ def __del__(self): rc, stdout, stderr = assert_python_ok("-c", code) self.assertEqual(rc, 0) self.assertEqual(stdout.rstrip(), b"") - self.assertIn(b"Exception ignored in:", stderr) + self.assertIn(b"Exception ignored while calling deallocator", stderr) self.assertIn(b"C.__del__", stderr) def test__struct_reference_cycle_cleaned_up(self): diff --git a/Lib/test/test_super.py b/Lib/test/test_super.py index 149016635522c3..5cef612a340be9 100644 --- a/Lib/test/test_super.py +++ b/Lib/test/test_super.py @@ -9,9 +9,6 @@ from test.support import import_helper, threading_helper -ADAPTIVE_WARMUP_DELAY = 2 - - class A: def f(self): return 'A' @@ -466,7 +463,8 @@ def test(name): super(MyType, type(mytype)).__setattr__(mytype, "bar", 1) self.assertEqual(mytype.bar, 1) - for _ in range(ADAPTIVE_WARMUP_DELAY): + _testinternalcapi = import_helper.import_module("_testinternalcapi") + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): test("foo1") def test_reassigned_new(self): @@ -485,7 +483,8 @@ class C(B): def __new__(cls): return super().__new__(cls) - for _ in range(ADAPTIVE_WARMUP_DELAY): + _testinternalcapi = import_helper.import_module("_testinternalcapi") + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): C() def test_mixed_staticmethod_hierarchy(self): @@ -505,7 +504,8 @@ class C(B): def some(cls): return super().some(cls) - for _ in range(ADAPTIVE_WARMUP_DELAY): + _testinternalcapi = import_helper.import_module("_testinternalcapi") + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): C.some(C) @threading_helper.requires_working_threading() diff --git a/Lib/test/test_sysconfig.py b/Lib/test/test_sysconfig.py index 1002d90074599a..3738914cf17de8 100644 --- a/Lib/test/test_sysconfig.py +++ b/Lib/test/test_sysconfig.py @@ -20,7 +20,7 @@ from test.support.import_helper import import_module from test.support.os_helper import (TESTFN, unlink, skip_unless_symlink, change_cwd) -from test.support.venv import VirtualEnvironment +from test.support.venv import VirtualEnvironmentMixin import sysconfig from sysconfig import (get_paths, get_platform, get_config_vars, @@ -37,7 +37,7 @@ HAS_USER_BASE = sysconfig._HAS_USER_BASE -class TestSysConfig(unittest.TestCase): +class TestSysConfig(unittest.TestCase, VirtualEnvironmentMixin): def setUp(self): super(TestSysConfig, self).setUp() @@ -111,13 +111,6 @@ def _cleanup_testfn(self): elif os.path.isdir(path): shutil.rmtree(path) - def venv(self, **venv_create_args): - return VirtualEnvironment.from_tmpdir( - prefix=f'{self.id()}-venv-', - **venv_create_args, - ) - - def test_get_path_names(self): self.assertEqual(get_path_names(), sysconfig._SCHEME_KEYS) @@ -650,8 +643,21 @@ def test_sysconfigdata_json(self): system_config_vars = get_config_vars() - # Ignore keys in the check - for key in ('projectbase', 'srcdir'): + ignore_keys = set() + # Keys dependent on Python being run outside the build directrory + if sysconfig.is_python_build(): + ignore_keys |= {'srcdir'} + # Keys dependent on the executable location + if os.path.dirname(sys.executable) != system_config_vars['BINDIR']: + ignore_keys |= {'projectbase'} + # Keys dependent on the environment (different inside virtual environments) + if sys.prefix != sys.base_prefix: + ignore_keys |= {'prefix', 'exec_prefix', 'base', 'platbase'} + # Keys dependent on Python being run from the prefix targetted when building (different on relocatable installs) + if sysconfig._installation_is_relocated(): + ignore_keys |= {'prefix', 'exec_prefix', 'base', 'platbase', 'installed_base', 'installed_platbase'} + + for key in ignore_keys: json_config_vars.pop(key) system_config_vars.pop(key) diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py index 480bff743a9f8a..52d3341975088b 100644 --- a/Lib/test/test_tokenize.py +++ b/Lib/test/test_tokenize.py @@ -1538,6 +1538,7 @@ def test_false_encoding(self): self.assertEqual(encoding, 'utf-8') self.assertEqual(consumed_lines, [b'print("#coding=fake")']) + @support.thread_unsafe def test_open(self): filename = os_helper.TESTFN + '.py' self.addCleanup(os_helper.unlink, filename) diff --git a/Lib/test/test_type_cache.py b/Lib/test/test_type_cache.py index e109a65741309a..ee64f89358ed55 100644 --- a/Lib/test/test_type_cache.py +++ b/Lib/test/test_type_cache.py @@ -131,7 +131,7 @@ def _all_opnames(self, func): return set(instr.opname for instr in dis.Bytecode(func, adaptive=True)) def _check_specialization(self, func, arg, opname, *, should_specialize): - for _ in range(100): + for _ in range(_testinternalcapi.SPECIALIZATION_THRESHOLD): func(arg) if should_specialize: diff --git a/Lib/test/test_unparse.py b/Lib/test/test_unparse.py index 332919540da4d6..f6c4f1f3f6476a 100644 --- a/Lib/test/test_unparse.py +++ b/Lib/test/test_unparse.py @@ -513,11 +513,13 @@ def test_class_bases_and_keywords(self): self.check_src_roundtrip("class X(*args, **kwargs):\n pass") def test_fstrings(self): - self.check_src_roundtrip("f'-{f'*{f'+{f'.{x}.'}+'}*'}-'") - self.check_src_roundtrip("f'\\u2028{'x'}'") + self.check_src_roundtrip('''f\'\'\'-{f"""*{f"+{f'.{x}.'}+"}*"""}-\'\'\'''') + self.check_src_roundtrip('''f\'-{f\'\'\'*{f"""+{f".{f'{x}'}."}+"""}*\'\'\'}-\'''') + self.check_src_roundtrip('''f\'-{f\'*{f\'\'\'+{f""".{f"{f'{x}'}"}."""}+\'\'\'}*\'}-\'''') + self.check_src_roundtrip('''f"\\u2028{'x'}"''') self.check_src_roundtrip(r"f'{x}\n'") - self.check_src_roundtrip("f'{'\\n'}\\n'") - self.check_src_roundtrip("f'{f'{x}\\n'}\\n'") + self.check_src_roundtrip('''f"{'\\n'}\\n"''') + self.check_src_roundtrip('''f"{f'{x}\\n'}\\n"''') def test_docstrings(self): docstrings = ( diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py index 4516bdea6adb19..b51cc006b73280 100644 --- a/Lib/test/test_urlparse.py +++ b/Lib/test/test_urlparse.py @@ -1412,16 +1412,51 @@ def test_invalid_bracketed_hosts(self): self.assertRaises(ValueError, urllib.parse.urlsplit, 'Scheme://user@[0439:23af::2309::fae7:1234]/Path?Query') self.assertRaises(ValueError, urllib.parse.urlsplit, 'Scheme://user@[0439:23af:2309::fae7:1234:2342:438e:192.0.2.146]/Path?Query') self.assertRaises(ValueError, urllib.parse.urlsplit, 'Scheme://user@]v6a.ip[/Path') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[v6a.ip]') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://[v6a.ip].suffix') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[v6a.ip]/') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://[v6a.ip].suffix/') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[v6a.ip]?') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://[v6a.ip].suffix?') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[::1]') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://[::1].suffix') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[::1]/') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://[::1].suffix/') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[::1]?') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://[::1].suffix?') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[::1]:a') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://[::1].suffix:a') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[::1]:a1') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://[::1].suffix:a1') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[::1]:1a') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://[::1].suffix:1a') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[::1]:') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://[::1].suffix:/') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[::1]:?') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://user@prefix.[v6a.ip]') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://user@[v6a.ip].suffix') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://[v6a.ip') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://v6a.ip]') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://]v6a.ip[') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://]v6a.ip') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://v6a.ip[') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix.[v6a.ip') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://v6a.ip].suffix') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix]v6a.ip[suffix') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://prefix]v6a.ip') + self.assertRaises(ValueError, urllib.parse.urlsplit, 'scheme://v6a.ip[suffix') def test_splitting_bracketed_hosts(self): - p1 = urllib.parse.urlsplit('scheme://user@[v6a.ip]/path?query') + p1 = urllib.parse.urlsplit('scheme://user@[v6a.ip]:1234/path?query') self.assertEqual(p1.hostname, 'v6a.ip') self.assertEqual(p1.username, 'user') self.assertEqual(p1.path, '/path') + self.assertEqual(p1.port, 1234) p2 = urllib.parse.urlsplit('scheme://user@[0439:23af:2309::fae7%test]/path?query') self.assertEqual(p2.hostname, '0439:23af:2309::fae7%test') self.assertEqual(p2.username, 'user') self.assertEqual(p2.path, '/path') + self.assertIs(p2.port, None) p3 = urllib.parse.urlsplit('scheme://user@[0439:23af:2309::fae7:1234:192.0.2.146%test]/path?query') self.assertEqual(p3.hostname, '0439:23af:2309::fae7:1234:192.0.2.146%test') self.assertEqual(p3.username, 'user') diff --git a/Lib/test/test_uuid.py b/Lib/test/test_uuid.py index 74c8e2838efc8f..8216c4dd00e35a 100755 --- a/Lib/test/test_uuid.py +++ b/Lib/test/test_uuid.py @@ -21,7 +21,7 @@ def importable(name): try: __import__(name) return True - except: + except ModuleNotFoundError: return False diff --git a/Lib/threading.py b/Lib/threading.py index 78e591124278fc..da9cdf0b09d83c 100644 --- a/Lib/threading.py +++ b/Lib/threading.py @@ -3,7 +3,6 @@ import os as _os import sys as _sys import _thread -import warnings from time import monotonic as _time from _weakrefset import WeakSet @@ -133,6 +132,7 @@ def RLock(*args, **kwargs): """ if args or kwargs: + import warnings warnings.warn( 'Passing arguments to RLock is deprecated and will be removed in 3.15', DeprecationWarning, @@ -694,7 +694,7 @@ def __init__(self, parties, action=None, timeout=None): """ if parties < 1: - raise ValueError("parties must be > 0") + raise ValueError("parties must be >= 1") self._cond = Condition(Lock()) self._action = action self._timeout = timeout diff --git a/Lib/urllib/parse.py b/Lib/urllib/parse.py index c412c729852272..9d51f4c6812b57 100644 --- a/Lib/urllib/parse.py +++ b/Lib/urllib/parse.py @@ -439,6 +439,23 @@ def _checknetloc(netloc): raise ValueError("netloc '" + netloc + "' contains invalid " + "characters under NFKC normalization") +def _check_bracketed_netloc(netloc): + # Note that this function must mirror the splitting + # done in NetlocResultMixins._hostinfo(). + hostname_and_port = netloc.rpartition('@')[2] + before_bracket, have_open_br, bracketed = hostname_and_port.partition('[') + if have_open_br: + # No data is allowed before a bracket. + if before_bracket: + raise ValueError("Invalid IPv6 URL") + hostname, _, port = bracketed.partition(']') + # No data is allowed after the bracket but before the port delimiter. + if port and not port.startswith(":"): + raise ValueError("Invalid IPv6 URL") + else: + hostname, _, port = hostname_and_port.partition(':') + _check_bracketed_host(hostname) + # Valid bracketed hosts are defined in # https://www.rfc-editor.org/rfc/rfc3986#page-49 and https://url.spec.whatwg.org/ def _check_bracketed_host(hostname): @@ -505,8 +522,7 @@ def _urlsplit(url, scheme=None, allow_fragments=True): (']' in netloc and '[' not in netloc)): raise ValueError("Invalid IPv6 URL") if '[' in netloc and ']' in netloc: - bracketed_host = netloc.partition('[')[2].partition(']')[0] - _check_bracketed_host(bracketed_host) + _check_bracketed_netloc(netloc) if allow_fragments and '#' in url: url, fragment = url.split('#', 1) if '?' in url: diff --git a/Misc/ACKS b/Misc/ACKS index 4901609a178bc3..47c8d2b40aafb7 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -189,6 +189,7 @@ Stéphane Blondon Eric Blossom Sergey Bobrov Finn Bock +VojtÄ›ch BoÄek Paul Boddie Matthew Boedicker Robin Boerdijk @@ -1971,6 +1972,7 @@ Johannes Vogel Michael Vogt Radu Voicilas Alex Volkov +Illia Volochii Ruben Vorderman Guido Vranken Martijn Vries diff --git a/Misc/NEWS.d/3.11.0a4.rst b/Misc/NEWS.d/3.11.0a4.rst index 64e2f39ad9db18..a2d36202045887 100644 --- a/Misc/NEWS.d/3.11.0a4.rst +++ b/Misc/NEWS.d/3.11.0a4.rst @@ -775,8 +775,8 @@ Ensure that :func:`math.expm1` does not raise on underflow. .. nonce: s9PuyF .. section: Library -Adding :attr:`F_DUP2FD` and :attr:`F_DUP2FD_CLOEXEC` constants from FreeBSD -into the fcntl module. +Adding :const:`!F_DUP2FD` and :const:`!F_DUP2FD_CLOEXEC` constants from FreeBSD +into the :mod:`fcntl` module. .. diff --git a/Misc/NEWS.d/3.14.0a1.rst b/Misc/NEWS.d/3.14.0a1.rst index dfc33812611185..efeba0ac1ffe2f 100644 --- a/Misc/NEWS.d/3.14.0a1.rst +++ b/Misc/NEWS.d/3.14.0a1.rst @@ -798,7 +798,7 @@ when used with ``happy_eyeballs_delay`` Fixed :exc:`AssertionError` when using :func:`!asyncio.staggered.staggered_race` with -:attr:`asyncio.eager_task_factory`. +:data:`asyncio.eager_task_factory`. .. @@ -1755,8 +1755,8 @@ Adjust ``cmath.tanh(nanj)`` and ``cmath.tanh(infj)`` for recent C standards. Remove internal frames from tracebacks shown in :class:`code.InteractiveInterpreter` with non-default :func:`sys.excepthook`. Save correct tracebacks in -:attr:`sys.last_traceback` and update ``__traceback__`` attribute of -:attr:`sys.last_value` and :attr:`sys.last_exc`. +:data:`sys.last_traceback` and update ``__traceback__`` attribute of +:data:`sys.last_value` and :data:`sys.last_exc`. .. diff --git a/Misc/NEWS.d/3.5.3rc1.rst b/Misc/NEWS.d/3.5.3rc1.rst index 2424604249a65c..cfc729dd82556f 100644 --- a/Misc/NEWS.d/3.5.3rc1.rst +++ b/Misc/NEWS.d/3.5.3rc1.rst @@ -1146,7 +1146,7 @@ after a commit. .. section: Library A new version of typing.py from https://github.com/python/typing: -Collection (only for 3.6) (Issue #27598). Add FrozenSet to __all__ +Collection (only for 3.6) (issue #27598). Add FrozenSet to __all__ (upstream #261). Fix crash in _get_type_vars() (upstream #259). Remove the dict constraint in ForwardRef._eval_type (upstream #252). diff --git a/Misc/NEWS.d/3.6.0a4.rst b/Misc/NEWS.d/3.6.0a4.rst index 3abbdecb57038b..6f3f5262e5749d 100644 --- a/Misc/NEWS.d/3.6.0a4.rst +++ b/Misc/NEWS.d/3.6.0a4.rst @@ -177,7 +177,7 @@ Support keyword arguments to zlib.decompress(). Patch by Xiang Zhang. .. section: Library Prevent segfault after interpreter re-initialization due to ref count -problem introduced in code for Issue #27038 in 3.6.0a3. Patch by Xiang +problem introduced in code for issue #27038 in 3.6.0a3. Patch by Xiang Zhang. .. diff --git a/Misc/NEWS.d/3.6.0b1.rst b/Misc/NEWS.d/3.6.0b1.rst index bd54cf601d053b..1e2dcdd6c642bb 100644 --- a/Misc/NEWS.d/3.6.0b1.rst +++ b/Misc/NEWS.d/3.6.0b1.rst @@ -1137,7 +1137,7 @@ chunked transfer-encoding. .. section: Library A new version of typing.py from https://github.com/python/typing: - -Collection (only for 3.6) (Issue #27598) - Add FrozenSet to __all__ +Collection (only for 3.6) (issue #27598) - Add FrozenSet to __all__ (upstream #261) - fix crash in _get_type_vars() (upstream #259) - Remove the dict constraint in ForwardRef._eval_type (upstream #252) diff --git a/Misc/NEWS.d/next/Build/2025-02-04-12-30-43.gh-issue-129660.SitXa7.rst b/Misc/NEWS.d/next/Build/2025-02-04-12-30-43.gh-issue-129660.SitXa7.rst new file mode 100644 index 00000000000000..945f91be63809a --- /dev/null +++ b/Misc/NEWS.d/next/Build/2025-02-04-12-30-43.gh-issue-129660.SitXa7.rst @@ -0,0 +1,2 @@ +Drop ``test_embed`` from PGO training, whose contribution in recent +versions is considered to be ignorable. diff --git a/Misc/NEWS.d/next/C_API/2025-01-16-12-47-01.gh-issue-128911.mHVJ4x.rst b/Misc/NEWS.d/next/C_API/2025-01-16-12-47-01.gh-issue-128911.mHVJ4x.rst new file mode 100644 index 00000000000000..d32cd00cd5d605 --- /dev/null +++ b/Misc/NEWS.d/next/C_API/2025-01-16-12-47-01.gh-issue-128911.mHVJ4x.rst @@ -0,0 +1,3 @@ +Add :c:func:`PyImport_ImportModuleAttr` and :c:func:`PyImport_ImportModuleAttrString` +helper functions to import a module and get an attribute of the module. Patch +by Victor Stinner. diff --git a/Misc/NEWS.d/next/C_API/2025-01-16-21-56-49.gh-issue-128844.ZPiJuo.rst b/Misc/NEWS.d/next/C_API/2025-01-16-21-56-49.gh-issue-128844.ZPiJuo.rst new file mode 100644 index 00000000000000..d9e1962631026a --- /dev/null +++ b/Misc/NEWS.d/next/C_API/2025-01-16-21-56-49.gh-issue-128844.ZPiJuo.rst @@ -0,0 +1,3 @@ +Add :c:func:`PyUnstable_TryIncRef` and :c:func:`PyUnstable_EnableTryIncRef` +unstable APIs. These are helpers for dealing with unowned references in +a thread-safe way, particularly in the free threading build. diff --git a/Misc/NEWS.d/next/C_API/2025-01-28-13-21-17.gh-issue-91417.AfiR0t.rst b/Misc/NEWS.d/next/C_API/2025-01-28-13-21-17.gh-issue-91417.AfiR0t.rst new file mode 100644 index 00000000000000..e1017188b8d0ce --- /dev/null +++ b/Misc/NEWS.d/next/C_API/2025-01-28-13-21-17.gh-issue-91417.AfiR0t.rst @@ -0,0 +1,3 @@ +Remove :c:func:`PySequence_Fast` from the limited C API, since this function +has to be used with :c:macro:`PySequence_Fast_GET_ITEM` which never worked +in the limited C API. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/C_API/2025-01-29-11-58-38.gh-issue-89188.BsfLr3.rst b/Misc/NEWS.d/next/C_API/2025-01-29-11-58-38.gh-issue-89188.BsfLr3.rst new file mode 100644 index 00000000000000..7ff225a7dc60c7 --- /dev/null +++ b/Misc/NEWS.d/next/C_API/2025-01-29-11-58-38.gh-issue-89188.BsfLr3.rst @@ -0,0 +1,3 @@ +Implement :c:func:`PyUnicode_KIND` and :c:func:`PyUnicode_DATA` as function, +in addition to the macros with the same names. The macros rely on C bit +fields which have compiler-specific layout. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-30-16-13-31.gh-issue-127349.ssYd6n.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-30-16-13-31.gh-issue-127349.ssYd6n.rst new file mode 100644 index 00000000000000..3c1586b6cbb8e7 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-30-16-13-31.gh-issue-127349.ssYd6n.rst @@ -0,0 +1,2 @@ +Fixed the error when resizing terminal in Python REPL. Patch by Semyon +Moroz. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-12-04-22-14-40.gh-issue-127119._hpyFE.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-04-22-14-40.gh-issue-127119._hpyFE.rst new file mode 100644 index 00000000000000..f021bd490f488c --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-04-22-14-40.gh-issue-127119._hpyFE.rst @@ -0,0 +1 @@ +Slightly optimize the :class:`int` deallocator. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-01-07-19-26-40.gh-issue-126703.9i-S5t.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-07-19-26-40.gh-issue-126703.9i-S5t.rst new file mode 100644 index 00000000000000..dcd5f449c98ef3 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-07-19-26-40.gh-issue-126703.9i-S5t.rst @@ -0,0 +1 @@ +Improve performance of iterating over lists and tuples by using a freelist for the iterator objects. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-01-16-22-54-12.gh-issue-100239.7_HpBU.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-16-22-54-12.gh-issue-100239.7_HpBU.rst new file mode 100644 index 00000000000000..6f086b7ecc0036 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-16-22-54-12.gh-issue-100239.7_HpBU.rst @@ -0,0 +1 @@ +Specialize ``BINARY_OP`` for bitwise logical operations on compact ints. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-01-17-13-16-14.gh-issue-128842.OMs5X6.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-17-13-16-14.gh-issue-128842.OMs5X6.rst new file mode 100644 index 00000000000000..9898060076db79 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-17-13-16-14.gh-issue-128842.OMs5X6.rst @@ -0,0 +1 @@ +Collect JIT memory stats using pystats. Patch by Diego Russo. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-01-22-14-22-34.gh-issue-129201.wiZzEb.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-22-14-22-34.gh-issue-129201.wiZzEb.rst new file mode 100644 index 00000000000000..26737330716181 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-22-14-22-34.gh-issue-129201.wiZzEb.rst @@ -0,0 +1,5 @@ +The free-threaded version of the cyclic garbage collector has been optimized to +conditionally use CPU prefetch instructions during the collection. This can +reduce collection times by making it more likely that data is in the CPU cache +when it is needed. The prefetch instructions are enabled if the number of +long-lived objects (objects surviving a full collection) exceeds a threshold. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-01-24-11-37-22.gh-issue-129231.ZsAP9v.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-24-11-37-22.gh-issue-129231.ZsAP9v.rst new file mode 100644 index 00000000000000..b30492a1947058 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-24-11-37-22.gh-issue-129231.ZsAP9v.rst @@ -0,0 +1 @@ +Improve memory layout of JIT traces. Patch by Diego Russo diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-01-28-06-23-59.gh-issue-129345.uOjkML.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-28-06-23-59.gh-issue-129345.uOjkML.rst new file mode 100644 index 00000000000000..68e1103db45652 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-28-06-23-59.gh-issue-129345.uOjkML.rst @@ -0,0 +1 @@ +Fix null pointer dereference in :func:`syslog.openlog` when an audit hook raises an exception. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-01-28-10-26-04.gh-issue-129393.0eICq6.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-28-10-26-04.gh-issue-129393.0eICq6.rst new file mode 100644 index 00000000000000..e36e6f565efd81 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-28-10-26-04.gh-issue-129393.0eICq6.rst @@ -0,0 +1,2 @@ +On FreeBSD, :data:`sys.platform` doesn't contain the major version anymore. +It is always ``'freebsd'``, instead of ``'freebsd13'`` or ``'freebsd14'``. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-01-28-11-13-41.gh-issue-128563.xElppE.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-28-11-13-41.gh-issue-128563.xElppE.rst new file mode 100644 index 00000000000000..dfd932e836bda6 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-28-11-13-41.gh-issue-128563.xElppE.rst @@ -0,0 +1,2 @@ +Fix an issue where the "lltrace" debug feature could have been incorrectly +enabled for some frames. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-02-04-12-42-40.gh-issue-129643.K24Zow.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-02-04-12-42-40.gh-issue-129643.K24Zow.rst new file mode 100644 index 00000000000000..27dd3b7f652aca --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-02-04-12-42-40.gh-issue-129643.K24Zow.rst @@ -0,0 +1 @@ +Fix thread safety of :c:func:`PyList_SetItem` in free-threading builds. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-02-05-11-29-52.gh-issue-129643.4mGzvg.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-02-05-11-29-52.gh-issue-129643.4mGzvg.rst new file mode 100644 index 00000000000000..420e1fb9781ff3 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-02-05-11-29-52.gh-issue-129643.4mGzvg.rst @@ -0,0 +1 @@ +Fix thread safety of :c:func:`PyList_Insert` in free-threading builds. diff --git a/Misc/NEWS.d/next/Library/2020-08-07-16-55-57.bpo-27307.Xqzzda.rst b/Misc/NEWS.d/next/Library/2020-08-07-16-55-57.bpo-27307.Xqzzda.rst new file mode 100644 index 00000000000000..6e7a856d994cb6 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-08-07-16-55-57.bpo-27307.Xqzzda.rst @@ -0,0 +1 @@ +Add attribute and item access support to :class:`string.Formatter` in auto-numbering mode, which allows format strings like '{.name}' and '{[1]}'. diff --git a/Misc/NEWS.d/next/Library/2022-05-23-21-23-29.gh-issue-81340.D11RkZ.rst b/Misc/NEWS.d/next/Library/2022-05-23-21-23-29.gh-issue-81340.D11RkZ.rst new file mode 100644 index 00000000000000..49e6305bf83138 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-05-23-21-23-29.gh-issue-81340.D11RkZ.rst @@ -0,0 +1,5 @@ +Use :func:`os.copy_file_range` in :func:`shutil.copy`, :func:`shutil.copy2`, +and :func:`shutil.copyfile` functions by default. An underlying Linux system +call gives filesystems an opportunity to implement the use of copy-on-write +(in case of btrfs and XFS) or server-side copy (in the case of NFS.) +Patch by Illia Volochii. diff --git a/Misc/NEWS.d/next/Library/2024-12-16-22-20-38.gh-issue-121604.m3Xn4G.rst b/Misc/NEWS.d/next/Library/2024-12-16-22-20-38.gh-issue-121604.m3Xn4G.rst index 9a6fce8647cc6b..8edd8dfb604878 100644 --- a/Misc/NEWS.d/next/Library/2024-12-16-22-20-38.gh-issue-121604.m3Xn4G.rst +++ b/Misc/NEWS.d/next/Library/2024-12-16-22-20-38.gh-issue-121604.m3Xn4G.rst @@ -1 +1 @@ -Add missing Deprecation warnings for :attr:`importlib.machinery.DEBUG_BYTECODE_SUFFIXES`, :attr:`importlib.machinery.OPTIMIZED_BYTECODE_SUFFIXES`, :class:`importlib.machinery.WindowsRegistryFinder`, :class:`importlib.abc.ResourceLoader`, :meth:`importlib.abc.SourceLoader.path_mtime`. +Add missing Deprecation warnings for :const:`importlib.machinery.DEBUG_BYTECODE_SUFFIXES`, :const:`importlib.machinery.OPTIMIZED_BYTECODE_SUFFIXES`, :class:`importlib.machinery.WindowsRegistryFinder`, :class:`importlib.abc.ResourceLoader`, :meth:`importlib.abc.SourceLoader.path_mtime`. diff --git a/Misc/NEWS.d/next/Library/2024-12-17-16-48-02.gh-issue-115514.1yOJ7T.rst b/Misc/NEWS.d/next/Library/2024-12-17-16-48-02.gh-issue-115514.1yOJ7T.rst new file mode 100644 index 00000000000000..24e836a0b0b7f9 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-12-17-16-48-02.gh-issue-115514.1yOJ7T.rst @@ -0,0 +1,2 @@ +Fix exceptions and incomplete writes after :class:`!asyncio._SelectorTransport` +is closed before writes are completed. diff --git a/Misc/NEWS.d/next/Library/2024-12-20-08-44-12.gh-issue-127975.8HJwu9.rst b/Misc/NEWS.d/next/Library/2024-12-20-08-44-12.gh-issue-127975.8HJwu9.rst new file mode 100644 index 00000000000000..597fa41deb811c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-12-20-08-44-12.gh-issue-127975.8HJwu9.rst @@ -0,0 +1 @@ +Avoid reusing quote types in :func:`ast.unparse` if not needed. diff --git a/Misc/NEWS.d/next/Library/2024-12-26-11-00-03.gh-issue-112064.mCcw3B.rst b/Misc/NEWS.d/next/Library/2024-12-26-11-00-03.gh-issue-112064.mCcw3B.rst new file mode 100644 index 00000000000000..e885add7b68c0f --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-12-26-11-00-03.gh-issue-112064.mCcw3B.rst @@ -0,0 +1,2 @@ +Fix incorrect handling of negative read sizes in :meth:`HTTPResponse.read +`. Patch by Yury Manushkin. diff --git a/Misc/NEWS.d/next/Library/2025-01-04-20-51-48.gh-issue-128509.3gr_-O.rst b/Misc/NEWS.d/next/Library/2025-01-04-20-51-48.gh-issue-128509.3gr_-O.rst new file mode 100644 index 00000000000000..ba45884304f662 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-04-20-51-48.gh-issue-128509.3gr_-O.rst @@ -0,0 +1,2 @@ +Add :func:`sys._is_immortal` for identifying :term:`immortal` objects at +runtime. diff --git a/Misc/NEWS.d/next/Library/2025-01-22-13-29-06.gh-issue-128772.6YrxYM.rst b/Misc/NEWS.d/next/Library/2025-01-22-13-29-06.gh-issue-128772.6YrxYM.rst new file mode 100644 index 00000000000000..53d6b3ccaffda8 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-22-13-29-06.gh-issue-128772.6YrxYM.rst @@ -0,0 +1,2 @@ +Fix :mod:`pydoc` for methods with the ``__module__`` attribute equal to +``None``. diff --git a/Misc/NEWS.d/next/Library/2025-01-26-10-01-21.gh-issue-129005.ncpLvw.rst b/Misc/NEWS.d/next/Library/2025-01-26-10-01-21.gh-issue-129005.ncpLvw.rst new file mode 100644 index 00000000000000..a825e9d244d525 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-26-10-01-21.gh-issue-129005.ncpLvw.rst @@ -0,0 +1 @@ +Optimize ``_pyio.FileIO.readinto`` by avoiding unnecessary objects and copies using :func:`os.readinto`. diff --git a/Misc/NEWS.d/next/Library/2025-01-29-10-53-32.gh-issue-118761.i8wjpV.rst b/Misc/NEWS.d/next/Library/2025-01-29-10-53-32.gh-issue-118761.i8wjpV.rst new file mode 100644 index 00000000000000..0762cbe5d63949 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-29-10-53-32.gh-issue-118761.i8wjpV.rst @@ -0,0 +1,2 @@ +Improve import time of :mod:`subprocess` by lazy importing ``locale`` and +``signal``. Patch by Taneli Hukkinen. diff --git a/Misc/NEWS.d/next/Library/2025-01-29-11-14-20.gh-issue-118761.gMZwE1.rst b/Misc/NEWS.d/next/Library/2025-01-29-11-14-20.gh-issue-118761.gMZwE1.rst new file mode 100644 index 00000000000000..c2474795d8233e --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-29-11-14-20.gh-issue-118761.gMZwE1.rst @@ -0,0 +1,2 @@ +Always lazy import ``warnings`` in :mod:`threading`. Patch by Taneli +Hukkinen. diff --git a/Misc/NEWS.d/next/Library/2025-01-29-13-37-18.gh-issue-126400.DaBaR3.rst b/Misc/NEWS.d/next/Library/2025-01-29-13-37-18.gh-issue-126400.DaBaR3.rst new file mode 100644 index 00000000000000..1532faf4b7d6f5 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-29-13-37-18.gh-issue-126400.DaBaR3.rst @@ -0,0 +1,2 @@ +Add a socket *timeout* keyword argument to +:class:`logging.handlers.SysLogHandler`. diff --git a/Misc/NEWS.d/next/Library/2025-01-29-14-30-54.gh-issue-129409.JZbOE6.rst b/Misc/NEWS.d/next/Library/2025-01-29-14-30-54.gh-issue-129409.JZbOE6.rst new file mode 100644 index 00000000000000..7e00b44c0ef471 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-29-14-30-54.gh-issue-129409.JZbOE6.rst @@ -0,0 +1,2 @@ +Fix an integer overflow in the :mod:`csv` module when writing a data field +larger than 2GB. diff --git a/Misc/NEWS.d/next/Library/2025-01-29-17-10-00.gh-issue-129403.314159.rst b/Misc/NEWS.d/next/Library/2025-01-29-17-10-00.gh-issue-129403.314159.rst new file mode 100644 index 00000000000000..0c2bdd3136e3a3 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-29-17-10-00.gh-issue-129403.314159.rst @@ -0,0 +1 @@ +Corrected :exc:`ValueError` message for :class:`asyncio.Barrier` and :class:`threading.Barrier`. diff --git a/Misc/NEWS.d/next/Library/2025-01-31-11-14-05.gh-issue-129502.j_ArNo.rst b/Misc/NEWS.d/next/Library/2025-01-31-11-14-05.gh-issue-129502.j_ArNo.rst new file mode 100644 index 00000000000000..e9e9d12c11d0ac --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-31-11-14-05.gh-issue-129502.j_ArNo.rst @@ -0,0 +1,5 @@ +Unlikely errors in preparing arguments for :mod:`ctypes` callback are now +handled in the same way as errors raised in the callback of in converting +the result of the callback -- using :func:`sys.unraisablehook` instead of +:func:`sys.excepthook` and not setting :data:`sys.last_exc` and other +variables. diff --git a/Misc/NEWS.d/next/Library/2025-02-01-14-55-33.gh-issue-129559.hQCeAz.rst b/Misc/NEWS.d/next/Library/2025-02-01-14-55-33.gh-issue-129559.hQCeAz.rst new file mode 100644 index 00000000000000..f08d47b63a84b7 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-02-01-14-55-33.gh-issue-129559.hQCeAz.rst @@ -0,0 +1,2 @@ +Add :meth:`bytearray.resize` method so :class:`bytearray` can be efficiently +resized in place. diff --git a/Misc/NEWS.d/next/Library/2025-02-04-15-16-33.gh-issue-129646.sapk1F.rst b/Misc/NEWS.d/next/Library/2025-02-04-15-16-33.gh-issue-129646.sapk1F.rst new file mode 100644 index 00000000000000..742d1d60dfd1bc --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-02-04-15-16-33.gh-issue-129646.sapk1F.rst @@ -0,0 +1,2 @@ +Update the locale alias mapping in the :mod:`locale` module to match the +latest X Org locale alias mapping and support new locales in Glibc 2.41. diff --git a/Misc/NEWS.d/next/Security/2024-10-29-09-15-10.gh-issue-126108.eTIjHY.rst b/Misc/NEWS.d/next/Security/2024-10-29-09-15-10.gh-issue-126108.eTIjHY.rst new file mode 100644 index 00000000000000..9f2c7e84d4dff0 --- /dev/null +++ b/Misc/NEWS.d/next/Security/2024-10-29-09-15-10.gh-issue-126108.eTIjHY.rst @@ -0,0 +1 @@ +Fix a possible ``NULL`` pointer dereference in :c:func:`!PySys_AddWarnOptionUnicode`. diff --git a/Misc/NEWS.d/next/Security/2025-01-28-14-08-03.gh-issue-105704.EnhHxu.rst b/Misc/NEWS.d/next/Security/2025-01-28-14-08-03.gh-issue-105704.EnhHxu.rst new file mode 100644 index 00000000000000..bff1bc6b0d609c --- /dev/null +++ b/Misc/NEWS.d/next/Security/2025-01-28-14-08-03.gh-issue-105704.EnhHxu.rst @@ -0,0 +1,4 @@ +When using :func:`urllib.parse.urlsplit` and :func:`urllib.parse.urlparse` host +parsing would not reject domain names containing square brackets (``[`` and +``]``). Square brackets are only valid for IPv6 and IPvFuture hosts according to +`RFC 3986 Section 3.2.2 `__. diff --git a/Misc/NEWS.d/next/Tests/2024-12-16-19-15-10.gh-issue-128003.GVBrfa.rst b/Misc/NEWS.d/next/Tests/2024-12-16-19-15-10.gh-issue-128003.GVBrfa.rst new file mode 100644 index 00000000000000..05711c7e589551 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2024-12-16-19-15-10.gh-issue-128003.GVBrfa.rst @@ -0,0 +1,4 @@ +Add an option ``--parallel-threads=N`` to the regression test runner that +runs individual tests in multiple threads in parallel in order to find +concurrency bugs. Note that most of the test suite is not yet reviewed for +thread-safety or annotated with ``@thread_unsafe`` when necessary. diff --git a/Misc/NEWS.d/next/Tests/2025-01-30-13-09-27.gh-issue-129386.iNtbEi.rst b/Misc/NEWS.d/next/Tests/2025-01-30-13-09-27.gh-issue-129386.iNtbEi.rst new file mode 100644 index 00000000000000..a03f596bc46c30 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2025-01-30-13-09-27.gh-issue-129386.iNtbEi.rst @@ -0,0 +1,2 @@ +Add ``test.support.reset_code``, which can be used to reset various +bytecode-level optimizations and local instrumentation for a function. diff --git a/Misc/sbom.spdx.json b/Misc/sbom.spdx.json index b4d785f65639a5..316c266b7e4fd6 100644 --- a/Misc/sbom.spdx.json +++ b/Misc/sbom.spdx.json @@ -1280,11 +1280,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "9dcb50e3f9c3245972731be5da0b28e7583198d9" + "checksumValue": "5d6fdd98730584f74f7b731da6e488fe234504b3" }, { "algorithm": "SHA256", - "checksumValue": "7cac49fef5e9d952ec9390bf81c54d83f1b5da32fdf76091c2f0770ed943b7fe" + "checksumValue": "d74f365463166891f62e1326d22b2d39d865776b7ea5e0df2aea5eede4d85b0f" } ], "fileName": "Modules/_decimal/libmpdec/io.c" diff --git a/Misc/stable_abi.toml b/Misc/stable_abi.toml index bfc02b2239b84d..6a6db48033c573 100644 --- a/Misc/stable_abi.toml +++ b/Misc/stable_abi.toml @@ -1253,6 +1253,7 @@ added = '3.2' [function.PySequence_Fast] added = '3.2' + abi_only = true [function.PySequence_GetItem] added = '3.2' [function.PySequence_GetSlice] diff --git a/Modules/Setup.stdlib.in b/Modules/Setup.stdlib.in index 6b6a8ae57a5119..6bb05a06a3465d 100644 --- a/Modules/Setup.stdlib.in +++ b/Modules/Setup.stdlib.in @@ -162,8 +162,8 @@ @MODULE__XXTESTFUZZ_TRUE@_xxtestfuzz _xxtestfuzz/_xxtestfuzz.c _xxtestfuzz/fuzzer.c @MODULE__TESTBUFFER_TRUE@_testbuffer _testbuffer.c @MODULE__TESTINTERNALCAPI_TRUE@_testinternalcapi _testinternalcapi.c _testinternalcapi/test_lock.c _testinternalcapi/pytime.c _testinternalcapi/set.c _testinternalcapi/test_critical_sections.c -@MODULE__TESTCAPI_TRUE@_testcapi _testcapimodule.c _testcapi/vectorcall.c _testcapi/heaptype.c _testcapi/abstract.c _testcapi/unicode.c _testcapi/dict.c _testcapi/set.c _testcapi/list.c _testcapi/tuple.c _testcapi/getargs.c _testcapi/datetime.c _testcapi/docstring.c _testcapi/mem.c _testcapi/watchers.c _testcapi/long.c _testcapi/float.c _testcapi/complex.c _testcapi/numbers.c _testcapi/structmember.c _testcapi/exceptions.c _testcapi/code.c _testcapi/buffer.c _testcapi/pyatomic.c _testcapi/run.c _testcapi/file.c _testcapi/codec.c _testcapi/immortal.c _testcapi/gc.c _testcapi/hash.c _testcapi/time.c _testcapi/bytes.c _testcapi/object.c _testcapi/monitoring.c _testcapi/config.c -@MODULE__TESTLIMITEDCAPI_TRUE@_testlimitedcapi _testlimitedcapi.c _testlimitedcapi/abstract.c _testlimitedcapi/bytearray.c _testlimitedcapi/bytes.c _testlimitedcapi/codec.c _testlimitedcapi/complex.c _testlimitedcapi/dict.c _testlimitedcapi/eval.c _testlimitedcapi/float.c _testlimitedcapi/heaptype_relative.c _testlimitedcapi/import.c _testlimitedcapi/list.c _testlimitedcapi/long.c _testlimitedcapi/object.c _testlimitedcapi/pyos.c _testlimitedcapi/set.c _testlimitedcapi/sys.c _testlimitedcapi/tuple.c _testlimitedcapi/unicode.c _testlimitedcapi/vectorcall_limited.c _testlimitedcapi/version.c +@MODULE__TESTCAPI_TRUE@_testcapi _testcapimodule.c _testcapi/vectorcall.c _testcapi/heaptype.c _testcapi/abstract.c _testcapi/unicode.c _testcapi/dict.c _testcapi/set.c _testcapi/list.c _testcapi/tuple.c _testcapi/getargs.c _testcapi/datetime.c _testcapi/docstring.c _testcapi/mem.c _testcapi/watchers.c _testcapi/long.c _testcapi/float.c _testcapi/complex.c _testcapi/numbers.c _testcapi/structmember.c _testcapi/exceptions.c _testcapi/code.c _testcapi/buffer.c _testcapi/pyatomic.c _testcapi/run.c _testcapi/file.c _testcapi/codec.c _testcapi/immortal.c _testcapi/gc.c _testcapi/hash.c _testcapi/time.c _testcapi/bytes.c _testcapi/object.c _testcapi/monitoring.c _testcapi/config.c _testcapi/import.c _testcapi/frame.c _testcapi/type.c _testcapi/function.c +@MODULE__TESTLIMITEDCAPI_TRUE@_testlimitedcapi _testlimitedcapi.c _testlimitedcapi/abstract.c _testlimitedcapi/bytearray.c _testlimitedcapi/bytes.c _testlimitedcapi/codec.c _testlimitedcapi/complex.c _testlimitedcapi/dict.c _testlimitedcapi/eval.c _testlimitedcapi/float.c _testlimitedcapi/heaptype_relative.c _testlimitedcapi/import.c _testlimitedcapi/list.c _testlimitedcapi/long.c _testlimitedcapi/object.c _testlimitedcapi/pyos.c _testlimitedcapi/set.c _testlimitedcapi/sys.c _testlimitedcapi/tuple.c _testlimitedcapi/unicode.c _testlimitedcapi/vectorcall_limited.c _testlimitedcapi/version.c _testlimitedcapi/file.c @MODULE__TESTCLINIC_TRUE@_testclinic _testclinic.c @MODULE__TESTCLINIC_LIMITED_TRUE@_testclinic_limited _testclinic_limited.c diff --git a/Modules/_asynciomodule.c b/Modules/_asynciomodule.c index d5d49658555f1a..b488fd92aa6817 100644 --- a/Modules/_asynciomodule.c +++ b/Modules/_asynciomodule.c @@ -1715,7 +1715,8 @@ FutureObj_finalize(FutureObj *fut) if (func != NULL) { PyObject *res = PyObject_CallOneArg(func, context); if (res == NULL) { - PyErr_WriteUnraisable(func); + PyErr_FormatUnraisable("Exception ignored while calling asyncio " + "function %R", func); } else { Py_DECREF(res); @@ -2978,7 +2979,8 @@ TaskObj_finalize(TaskObj *task) if (func != NULL) { PyObject *res = PyObject_CallOneArg(func, context); if (res == NULL) { - PyErr_WriteUnraisable(func); + PyErr_FormatUnraisable("Exception ignored while calling asyncio " + "function %R", func); } else { Py_DECREF(res); diff --git a/Modules/_csv.c b/Modules/_csv.c index 7ca30e39e00c0c..e5ae853590bf2c 100644 --- a/Modules/_csv.c +++ b/Modules/_csv.c @@ -1138,7 +1138,7 @@ join_append_data(WriterObj *self, int field_kind, const void *field_data, int copy_phase) { DialectObj *dialect = self->dialect; - int i; + Py_ssize_t i; Py_ssize_t rec_len; #define INCLEN \ diff --git a/Modules/_ctypes/_ctypes.c b/Modules/_ctypes/_ctypes.c index d86adec0aeca58..7c0ac1a57f534c 100644 --- a/Modules/_ctypes/_ctypes.c +++ b/Modules/_ctypes/_ctypes.c @@ -183,7 +183,8 @@ _DictRemover_call(PyObject *myself, PyObject *args, PyObject *kw) DictRemoverObject *self = _DictRemoverObject_CAST(myself); if (self->key && self->dict) { if (-1 == PyDict_DelItem(self->dict, self->key)) { - PyErr_FormatUnraisable("Exception ignored on calling _ctypes.DictRemover"); + PyErr_FormatUnraisable("Exception ignored while " + "calling _ctypes.DictRemover"); } Py_CLEAR(self->key); Py_CLEAR(self->dict); @@ -463,7 +464,8 @@ CType_Type_traverse(PyObject *self, visitproc visit, void *arg) { StgInfo *info = _PyStgInfo_FromType_NoState(self); if (!info) { - PyErr_WriteUnraisable(self); + PyErr_FormatUnraisable("Exception ignored while " + "calling ctypes traverse function %R", self); } if (info) { Py_VISIT(info->proto); @@ -494,7 +496,8 @@ CType_Type_clear(PyObject *self) { StgInfo *info = _PyStgInfo_FromType_NoState(self); if (!info) { - PyErr_WriteUnraisable(self); + PyErr_FormatUnraisable("Exception ignored while " + "clearing ctypes %R", self); } if (info) { ctype_clear_stginfo(info); @@ -507,7 +510,8 @@ CType_Type_dealloc(PyObject *self) { StgInfo *info = _PyStgInfo_FromType_NoState(self); if (!info) { - PyErr_WriteUnraisable(NULL); // NULL avoids segfault here + PyErr_FormatUnraisable("Exception ignored while " + "deallocating ctypes %R", self); } if (info) { PyMem_Free(info->ffi_type_pointer.elements); diff --git a/Modules/_ctypes/callbacks.c b/Modules/_ctypes/callbacks.c index 652af2f1aee64d..6dd6f6ec56d008 100644 --- a/Modules/_ctypes/callbacks.c +++ b/Modules/_ctypes/callbacks.c @@ -92,26 +92,23 @@ PyType_Spec cthunk_spec = { * after checking for PyObject_IsTrue(), but this would probably be somewhat * slower. */ -static void +static int TryAddRef(PyObject *cnv, CDataObject *obj) { IUnknown *punk; PyObject *attrdict = _PyType_GetDict((PyTypeObject *)cnv); if (!attrdict) { - return; + return 0; } int r = PyDict_Contains(attrdict, &_Py_ID(_needs_com_addref_)); if (r <= 0) { - if (r < 0) { - PySys_WriteStderr("getting _needs_com_addref_"); - } - return; + return r; } punk = *(IUnknown **)obj->b_ptr; if (punk) punk->lpVtbl->AddRef(punk); - return; + return 0; } #endif @@ -146,14 +143,13 @@ static void _CallPythonObject(ctypes_state *st, StgInfo *info; if (PyStgInfo_FromType(st, cnv, &info) < 0) { - goto Done; + goto Error; } if (info && info->getfunc && !_ctypes_simple_instance(st, cnv)) { PyObject *v = info->getfunc(*pArgs, info->size); if (!v) { - PySys_WriteStderr("create argument %zd:\n", i); - goto Done; + goto Error; } args[i] = v; /* XXX XXX XX @@ -166,24 +162,25 @@ static void _CallPythonObject(ctypes_state *st, /* Hm, shouldn't we use PyCData_AtAddress() or something like that instead? */ CDataObject *obj = (CDataObject *)_PyObject_CallNoArgs(cnv); if (!obj) { - PySys_WriteStderr("create argument %zd:\n", i); - goto Done; + goto Error; } if (!CDataObject_Check(st, obj)) { + PyErr_Format(PyExc_TypeError, + "%R returned unexpected result of type %T", cnv, obj); Py_DECREF(obj); - PySys_WriteStderr("unexpected result of create argument %zd:\n", i); - goto Done; + goto Error; } memcpy(obj->b_ptr, *pArgs, info->size); args[i] = (PyObject *)obj; #ifdef MS_WIN32 - TryAddRef(cnv, obj); + if (TryAddRef(cnv, obj) < 0) { + goto Error; + } #endif } else { - PyErr_SetString(PyExc_TypeError, - "cannot build parameter"); - PySys_WriteStderr("Parsing argument %zd\n", i); - goto Done; + PyErr_Format(PyExc_TypeError, + "cannot build parameter of type %R", cnv); + goto Error; } /* XXX error handling! */ pArgs++; @@ -191,8 +188,13 @@ static void _CallPythonObject(ctypes_state *st, if (flags & (FUNCFLAG_USE_ERRNO | FUNCFLAG_USE_LASTERROR)) { error_object = _ctypes_get_errobj(st, &space); - if (error_object == NULL) + if (error_object == NULL) { + PyErr_FormatUnraisable( + "Exception ignored while setting error for " + "ctypes callback function %R", + callable); goto Done; + } if (flags & FUNCFLAG_USE_ERRNO) { int temp = space[0]; space[0] = errno; @@ -209,9 +211,9 @@ static void _CallPythonObject(ctypes_state *st, result = PyObject_Vectorcall(callable, args, nargs, NULL); if (result == NULL) { - PyErr_FormatUnraisable( - "Exception ignored on calling ctypes callback function %R", - callable); + PyErr_FormatUnraisable("Exception ignored while " + "calling ctypes callback function %R", + callable); } #ifdef MS_WIN32 @@ -253,7 +255,7 @@ static void _CallPythonObject(ctypes_state *st, if (keep == NULL) { /* Could not convert callback result. */ PyErr_FormatUnraisable( - "Exception ignored on converting result " + "Exception ignored while converting result " "of ctypes callback function %R", callable); } @@ -266,7 +268,7 @@ static void _CallPythonObject(ctypes_state *st, "memory leak in callback function.", 1) == -1) { PyErr_FormatUnraisable( - "Exception ignored on converting result " + "Exception ignored while converting result " "of ctypes callback function %R", callable); } @@ -279,6 +281,14 @@ static void _CallPythonObject(ctypes_state *st, for (j = 0; j < i; j++) { Py_DECREF(args[j]); } + return; + + Error: + PyErr_FormatUnraisable( + "Exception ignored while creating argument %zd for " + "ctypes callback function %R", + i, callable); + goto Done; } static void closure_fcn(ffi_cif *cif, @@ -471,39 +481,31 @@ long Call_GetClassObject(REFCLSID rclsid, REFIID riid, LPVOID *ppv) { PyObject *func, *result; long retval; - static PyObject *context; - - if (context == NULL) - context = PyUnicode_InternFromString("_ctypes.DllGetClassObject"); - func = _PyImport_GetModuleAttrString("ctypes", "DllGetClassObject"); + func = PyImport_ImportModuleAttrString("ctypes", "DllGetClassObject"); if (!func) { - PyErr_WriteUnraisable(context ? context : Py_None); /* There has been a warning before about this already */ - return E_FAIL; + goto error; } { PyObject *py_rclsid = PyLong_FromVoidPtr((void *)rclsid); if (py_rclsid == NULL) { Py_DECREF(func); - PyErr_WriteUnraisable(context ? context : Py_None); - return E_FAIL; + goto error; } PyObject *py_riid = PyLong_FromVoidPtr((void *)riid); if (py_riid == NULL) { Py_DECREF(func); Py_DECREF(py_rclsid); - PyErr_WriteUnraisable(context ? context : Py_None); - return E_FAIL; + goto error; } PyObject *py_ppv = PyLong_FromVoidPtr(ppv); if (py_ppv == NULL) { Py_DECREF(py_rclsid); Py_DECREF(py_riid); Py_DECREF(func); - PyErr_WriteUnraisable(context ? context : Py_None); - return E_FAIL; + goto error; } result = PyObject_CallFunctionObjArgs(func, py_rclsid, @@ -516,17 +518,21 @@ long Call_GetClassObject(REFCLSID rclsid, REFIID riid, LPVOID *ppv) } Py_DECREF(func); if (!result) { - PyErr_WriteUnraisable(context ? context : Py_None); - return E_FAIL; + goto error; } retval = PyLong_AsLong(result); if (PyErr_Occurred()) { - PyErr_WriteUnraisable(context ? context : Py_None); - retval = E_FAIL; + Py_DECREF(result); + goto error; } Py_DECREF(result); return retval; + +error: + PyErr_FormatUnraisable("Exception ignored while calling " + "ctypes.DllGetClassObject"); + return E_FAIL; } STDAPI DllGetClassObject(REFCLSID rclsid, @@ -545,43 +551,30 @@ STDAPI DllGetClassObject(REFCLSID rclsid, long Call_CanUnloadNow(void) { - PyObject *mod, *func, *result; - long retval; - static PyObject *context; - - if (context == NULL) - context = PyUnicode_InternFromString("_ctypes.DllCanUnloadNow"); - - mod = PyImport_ImportModule("ctypes"); - if (!mod) { -/* OutputDebugString("Could not import ctypes"); */ - /* We assume that this error can only occur when shutting - down, so we silently ignore it */ - PyErr_Clear(); - return E_FAIL; - } - /* Other errors cannot be raised, but are printed to stderr */ - func = PyObject_GetAttrString(mod, "DllCanUnloadNow"); - Py_DECREF(mod); + PyObject *func = PyImport_ImportModuleAttrString("ctypes", + "DllCanUnloadNow"); if (!func) { - PyErr_WriteUnraisable(context ? context : Py_None); - return E_FAIL; + goto error; } - result = _PyObject_CallNoArgs(func); + PyObject *result = _PyObject_CallNoArgs(func); Py_DECREF(func); if (!result) { - PyErr_WriteUnraisable(context ? context : Py_None); - return E_FAIL; + goto error; } - retval = PyLong_AsLong(result); + long retval = PyLong_AsLong(result); if (PyErr_Occurred()) { - PyErr_WriteUnraisable(context ? context : Py_None); - retval = E_FAIL; + Py_DECREF(result); + goto error; } Py_DECREF(result); return retval; + +error: + PyErr_FormatUnraisable("Exception ignored while calling " + "ctypes.DllCanUnloadNow"); + return E_FAIL; } /* diff --git a/Modules/_ctypes/stgdict.c b/Modules/_ctypes/stgdict.c index 5ca5b62427600d..05239d85c44d2c 100644 --- a/Modules/_ctypes/stgdict.c +++ b/Modules/_ctypes/stgdict.c @@ -257,8 +257,8 @@ PyCStructUnionType_update_stginfo(PyObject *type, PyObject *fields, int isStruct goto error; } - PyObject *layout_func = _PyImport_GetModuleAttrString("ctypes._layout", - "get_layout"); + PyObject *layout_func = PyImport_ImportModuleAttrString("ctypes._layout", + "get_layout"); if (!layout_func) { goto error; } diff --git a/Modules/_cursesmodule.c b/Modules/_cursesmodule.c index 2b67dbeb199c49..bba4bef89c3ed2 100644 --- a/Modules/_cursesmodule.c +++ b/Modules/_cursesmodule.c @@ -226,7 +226,7 @@ _PyCursesCheckFunction(int called, const char *funcname) if (called == TRUE) { return 1; } - PyObject *exc = _PyImport_GetModuleAttrString("_curses", "error"); + PyObject *exc = PyImport_ImportModuleAttrString("_curses", "error"); if (exc != NULL) { PyErr_Format(exc, "must call %s() first", funcname); Py_DECREF(exc); diff --git a/Modules/_datetimemodule.c b/Modules/_datetimemodule.c index ff2e6d6a098ad9..bcbf4217d41a9b 100644 --- a/Modules/_datetimemodule.c +++ b/Modules/_datetimemodule.c @@ -226,7 +226,7 @@ clear_current_module(PyInterpreterState *interp, PyObject *expected) goto finally; error: - PyErr_WriteUnraisable(NULL); + PyErr_FormatUnraisable("Exception ignored while clearing _datetime module"); finally: PyErr_SetRaisedException(exc); @@ -1839,7 +1839,7 @@ wrap_strftime(PyObject *object, PyObject *format, PyObject *timetuple, assert(object && format && timetuple); assert(PyUnicode_Check(format)); - PyObject *strftime = _PyImport_GetModuleAttrString("time", "strftime"); + PyObject *strftime = PyImport_ImportModuleAttrString("time", "strftime"); if (strftime == NULL) { return NULL; } @@ -2022,7 +2022,7 @@ static PyObject * time_time(void) { PyObject *result = NULL; - PyObject *time = _PyImport_GetModuleAttrString("time", "time"); + PyObject *time = PyImport_ImportModuleAttrString("time", "time"); if (time != NULL) { result = PyObject_CallNoArgs(time); @@ -2040,7 +2040,7 @@ build_struct_time(int y, int m, int d, int hh, int mm, int ss, int dstflag) PyObject *struct_time; PyObject *result; - struct_time = _PyImport_GetModuleAttrString("time", "struct_time"); + struct_time = PyImport_ImportModuleAttrString("time", "struct_time"); if (struct_time == NULL) { return NULL; } diff --git a/Modules/_decimal/_decimal.c b/Modules/_decimal/_decimal.c index 78cf6b1426493b..3dcb3e9870c8a4 100644 --- a/Modules/_decimal/_decimal.c +++ b/Modules/_decimal/_decimal.c @@ -3474,7 +3474,7 @@ pydec_format(PyObject *dec, PyObject *context, PyObject *fmt, decimal_state *sta PyObject *u; if (state->PyDecimal == NULL) { - state->PyDecimal = _PyImport_GetModuleAttrString("_pydecimal", "Decimal"); + state->PyDecimal = PyImport_ImportModuleAttrString("_pydecimal", "Decimal"); if (state->PyDecimal == NULL) { return NULL; } diff --git a/Modules/_decimal/libmpdec/io.c b/Modules/_decimal/libmpdec/io.c index 4e95b8964c8e5d..bdcca001659bc0 100644 --- a/Modules/_decimal/libmpdec/io.c +++ b/Modules/_decimal/libmpdec/io.c @@ -347,6 +347,10 @@ mpd_qset_string_exact(mpd_t *dec, const char *s, uint32_t *status) or the location of a decimal point. */ #define EXTRACT_DIGIT(s, x, d, dot) \ if (s == dot) *s++ = '.'; *s++ = '0' + (char)(x / d); x %= d +#if defined(__GNUC__) && !defined(__INTEL_COMPILER) && __GNUC__ >= 12 + #pragma GCC diagnostic push + #pragma GCC diagnostic ignored "-Wstringop-overflow" +#endif static inline char * word_to_string(char *s, mpd_uint_t x, int n, char *dot) { @@ -378,6 +382,9 @@ word_to_string(char *s, mpd_uint_t x, int n, char *dot) *s = '\0'; return s; } +#if defined(__GNUC__) && !defined(__INTEL_COMPILER) && __GNUC__ >= 12 + #pragma GCC diagnostic pop +#endif /* Print exponent x to string s. Undefined for MPD_SSIZE_MIN. */ static inline char * diff --git a/Modules/_elementtree.c b/Modules/_elementtree.c index 355f322d304c2f..b5b0b82571f882 100644 --- a/Modules/_elementtree.c +++ b/Modules/_elementtree.c @@ -16,7 +16,6 @@ #endif #include "Python.h" -#include "pycore_import.h" // _PyImport_GetModuleAttrString() #include "pycore_pyhash.h" // _Py_HashSecret #include // offsetof() @@ -4393,7 +4392,7 @@ module_exec(PyObject *m) CREATE_TYPE(m, st->Element_Type, &element_spec); CREATE_TYPE(m, st->XMLParser_Type, &xmlparser_spec); - st->deepcopy_obj = _PyImport_GetModuleAttrString("copy", "deepcopy"); + st->deepcopy_obj = PyImport_ImportModuleAttrString("copy", "deepcopy"); if (st->deepcopy_obj == NULL) { goto error; } @@ -4403,7 +4402,7 @@ module_exec(PyObject *m) goto error; /* link against pyexpat */ - if (!(st->expat_capsule = _PyImport_GetModuleAttrString("pyexpat", "expat_CAPI"))) + if (!(st->expat_capsule = PyImport_ImportModuleAttrString("pyexpat", "expat_CAPI"))) goto error; if (!(st->expat_capi = PyCapsule_GetPointer(st->expat_capsule, PyExpat_CAPSULE_NAME))) goto error; diff --git a/Modules/_io/fileio.c b/Modules/_io/fileio.c index cf0f1d671b507a..f27f2ed4843271 100644 --- a/Modules/_io/fileio.c +++ b/Modules/_io/fileio.c @@ -105,8 +105,10 @@ fileio_dealloc_warn(PyObject *op, PyObject *source) PyObject *exc = PyErr_GetRaisedException(); if (PyErr_ResourceWarning(source, 1, "unclosed file %R", source)) { /* Spurious errors can appear at shutdown */ - if (PyErr_ExceptionMatches(PyExc_Warning)) - PyErr_WriteUnraisable((PyObject *) self); + if (PyErr_ExceptionMatches(PyExc_Warning)) { + PyErr_FormatUnraisable("Exception ignored " + "while finalizing file %R", self); + } } PyErr_SetRaisedException(exc); } diff --git a/Modules/_io/iobase.c b/Modules/_io/iobase.c index 419e5516b5c11e..f87043df126895 100644 --- a/Modules/_io/iobase.c +++ b/Modules/_io/iobase.c @@ -314,7 +314,8 @@ iobase_finalize(PyObject *self) PyErr_Clear(); res = PyObject_CallMethodNoArgs((PyObject *)self, &_Py_ID(close)); if (res == NULL) { - PyErr_WriteUnraisable(self); + PyErr_FormatUnraisable("Exception ignored " + "while finalizing file %R", self); } else { Py_DECREF(res); diff --git a/Modules/_json.c b/Modules/_json.c index 31a5e935e13ad9..5532e252819bbd 100644 --- a/Modules/_json.c +++ b/Modules/_json.c @@ -302,7 +302,7 @@ raise_errmsg(const char *msg, PyObject *s, Py_ssize_t end) /* Use JSONDecodeError exception to raise a nice looking ValueError subclass */ _Py_DECLARE_STR(json_decoder, "json.decoder"); PyObject *JSONDecodeError = - _PyImport_GetModuleAttr(&_Py_STR(json_decoder), &_Py_ID(JSONDecodeError)); + PyImport_ImportModuleAttr(&_Py_STR(json_decoder), &_Py_ID(JSONDecodeError)); if (JSONDecodeError == NULL) { return; } diff --git a/Modules/_lsprof.c b/Modules/_lsprof.c index 0215cc4a5e30b4..9e8fb4c9b642a0 100644 --- a/Modules/_lsprof.c +++ b/Modules/_lsprof.c @@ -97,7 +97,8 @@ static PyTime_t CallExternalTimer(ProfilerObject *pObj) pObj->flags &= ~POF_EXT_TIMER; if (o == NULL) { - PyErr_WriteUnraisable(pObj->externalTimer); + PyErr_FormatUnraisable("Exception ignored while calling " + "_lsprof timer %R", pObj->externalTimer); return 0; } @@ -116,7 +117,8 @@ static PyTime_t CallExternalTimer(ProfilerObject *pObj) } Py_DECREF(o); if (err < 0) { - PyErr_WriteUnraisable(pObj->externalTimer); + PyErr_FormatUnraisable("Exception ignored while calling " + "_lsprof timer %R", pObj->externalTimer); return 0; } return result; @@ -933,7 +935,8 @@ profiler_dealloc(ProfilerObject *op) if (op->flags & POF_ENABLED) { PyThreadState *tstate = _PyThreadState_GET(); if (_PyEval_SetProfile(tstate, NULL, NULL) < 0) { - PyErr_FormatUnraisable("Exception ignored when destroying _lsprof profiler"); + PyErr_FormatUnraisable("Exception ignored while " + "destroying _lsprof profiler"); } } diff --git a/Modules/_operator.c b/Modules/_operator.c index ce3ef015710223..59987b8f143da2 100644 --- a/Modules/_operator.c +++ b/Modules/_operator.c @@ -1868,7 +1868,7 @@ methodcaller_reduce(methodcallerobject *mc, PyObject *Py_UNUSED(ignored)) PyObject *constructor; PyObject *newargs[2]; - partial = _PyImport_GetModuleAttrString("functools", "partial"); + partial = PyImport_ImportModuleAttrString("functools", "partial"); if (!partial) return NULL; diff --git a/Modules/_pickle.c b/Modules/_pickle.c index d9e600c675abc5..df5734d98676ab 100644 --- a/Modules/_pickle.c +++ b/Modules/_pickle.c @@ -362,7 +362,7 @@ _Pickle_InitState(PickleState *st) } Py_CLEAR(compat_pickle); - st->codecs_encode = _PyImport_GetModuleAttrString("codecs", "encode"); + st->codecs_encode = PyImport_ImportModuleAttrString("codecs", "encode"); if (st->codecs_encode == NULL) { goto error; } @@ -373,7 +373,7 @@ _Pickle_InitState(PickleState *st) goto error; } - st->partial = _PyImport_GetModuleAttrString("functools", "partial"); + st->partial = PyImport_ImportModuleAttrString("functools", "partial"); if (!st->partial) goto error; diff --git a/Modules/_sqlite/blob.c b/Modules/_sqlite/blob.c index d1a549a971c24a..390375628bfb4f 100644 --- a/Modules/_sqlite/blob.c +++ b/Modules/_sqlite/blob.c @@ -9,6 +9,8 @@ #include "clinic/blob.c.h" #undef clinic_state +#define _pysqlite_Blob_CAST(op) ((pysqlite_Blob *)(op)) + /*[clinic input] module _sqlite3 class _sqlite3.Blob "pysqlite_Blob *" "clinic_state()->BlobType" @@ -29,32 +31,35 @@ close_blob(pysqlite_Blob *self) } static int -blob_traverse(pysqlite_Blob *self, visitproc visit, void *arg) +blob_traverse(PyObject *op, visitproc visit, void *arg) { + pysqlite_Blob *self = _pysqlite_Blob_CAST(op); Py_VISIT(Py_TYPE(self)); Py_VISIT(self->connection); return 0; } static int -blob_clear(pysqlite_Blob *self) +blob_clear(PyObject *op) { + pysqlite_Blob *self = _pysqlite_Blob_CAST(op); Py_CLEAR(self->connection); return 0; } static void -blob_dealloc(pysqlite_Blob *self) +blob_dealloc(PyObject *op) { + pysqlite_Blob *self = _pysqlite_Blob_CAST(op); PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(self); close_blob(self); if (self->in_weakreflist != NULL) { - PyObject_ClearWeakRefs((PyObject*)self); + PyObject_ClearWeakRefs(op); } - tp->tp_clear((PyObject *)self); + (void)tp->tp_clear(op); tp->tp_free(self); Py_DECREF(tp); } @@ -373,8 +378,9 @@ blob_exit_impl(pysqlite_Blob *self, PyObject *type, PyObject *val, } static Py_ssize_t -blob_length(pysqlite_Blob *self) +blob_length(PyObject *op) { + pysqlite_Blob *self = _pysqlite_Blob_CAST(op); if (!check_blob(self)) { return -1; } @@ -449,8 +455,9 @@ subscript_slice(pysqlite_Blob *self, PyObject *item) } static PyObject * -blob_subscript(pysqlite_Blob *self, PyObject *item) +blob_subscript(PyObject *op, PyObject *item) { + pysqlite_Blob *self = _pysqlite_Blob_CAST(op); if (!check_blob(self)) { return NULL; } @@ -546,8 +553,9 @@ ass_subscript_slice(pysqlite_Blob *self, PyObject *item, PyObject *value) } static int -blob_ass_subscript(pysqlite_Blob *self, PyObject *item, PyObject *value) +blob_ass_subscript(PyObject *op, PyObject *item, PyObject *value) { + pysqlite_Blob *self = _pysqlite_Blob_CAST(op); if (!check_blob(self)) { return -1; } diff --git a/Modules/_sqlite/connection.c b/Modules/_sqlite/connection.c index 62598ecc864120..16afd7eada113f 100644 --- a/Modules/_sqlite/connection.c +++ b/Modules/_sqlite/connection.c @@ -34,7 +34,6 @@ #include "prepare_protocol.h" #include "util.h" -#include "pycore_import.h" // _PyImport_GetModuleAttrString() #include "pycore_modsupport.h" // _PyArg_NoKeywords() #include "pycore_pyerrors.h" // _PyErr_ChainExceptions1() #include "pycore_pylifecycle.h" // _Py_IsInterpreterFinalizing() @@ -136,6 +135,8 @@ sqlite3_int64_converter(PyObject *obj, sqlite3_int64 *result) #include "clinic/connection.c.h" #undef clinic_state +#define _pysqlite_Connection_CAST(op) ((pysqlite_Connection *)(op)) + /*[clinic input] module _sqlite3 class _sqlite3.Connection "pysqlite_Connection *" "clinic_state()->ConnectionType" @@ -385,8 +386,9 @@ do { \ } while (0) static int -connection_traverse(pysqlite_Connection *self, visitproc visit, void *arg) +connection_traverse(PyObject *op, visitproc visit, void *arg) { + pysqlite_Connection *self = _pysqlite_Connection_CAST(op); Py_VISIT(Py_TYPE(self)); Py_VISIT(self->statement_cache); Py_VISIT(self->cursors); @@ -410,8 +412,9 @@ clear_callback_context(callback_context *ctx) } static int -connection_clear(pysqlite_Connection *self) +connection_clear(PyObject *op) { + pysqlite_Connection *self = _pysqlite_Connection_CAST(op); Py_CLEAR(self->statement_cache); Py_CLEAR(self->cursors); Py_CLEAR(self->blobs); @@ -494,7 +497,8 @@ connection_finalize(PyObject *self) if (PyErr_ResourceWarning(self, 1, "unclosed database in %R", self)) { /* Spurious errors can appear at shutdown */ if (PyErr_ExceptionMatches(PyExc_Warning)) { - PyErr_WriteUnraisable(self); + PyErr_FormatUnraisable("Exception ignored while finalizing " + "database connection %R", self); } } } @@ -503,7 +507,8 @@ connection_finalize(PyObject *self) PyErr_Clear(); } else { - PyErr_WriteUnraisable((PyObject *)self); + PyErr_FormatUnraisable("Exception ignored while closing database %R", + self); } } @@ -518,7 +523,7 @@ connection_dealloc(PyObject *self) } PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(self); - tp->tp_clear(self); + (void)tp->tp_clear(self); tp->tp_free(self); Py_DECREF(tp); } @@ -890,7 +895,8 @@ print_or_clear_traceback(callback_context *ctx) assert(ctx != NULL); assert(ctx->state != NULL); if (ctx->state->enable_callback_tracebacks) { - PyErr_WriteUnraisable(ctx->callable); + PyErr_FormatUnraisable("Exception ignored on sqlite3 callback %R", + ctx->callable); } else { PyErr_Clear(); @@ -1716,8 +1722,10 @@ int pysqlite_check_thread(pysqlite_Connection* self) return 1; } -static PyObject* pysqlite_connection_get_isolation_level(pysqlite_Connection* self, void* unused) +static PyObject * +pysqlite_connection_get_isolation_level(PyObject *op, void *Py_UNUSED(closure)) { + pysqlite_Connection *self = _pysqlite_Connection_CAST(op); if (!pysqlite_check_connection(self)) { return NULL; } @@ -1727,16 +1735,20 @@ static PyObject* pysqlite_connection_get_isolation_level(pysqlite_Connection* se Py_RETURN_NONE; } -static PyObject* pysqlite_connection_get_total_changes(pysqlite_Connection* self, void* unused) +static PyObject * +pysqlite_connection_get_total_changes(PyObject *op, void *Py_UNUSED(closure)) { + pysqlite_Connection *self = _pysqlite_Connection_CAST(op); if (!pysqlite_check_connection(self)) { return NULL; } return PyLong_FromLong(sqlite3_total_changes(self->db)); } -static PyObject* pysqlite_connection_get_in_transaction(pysqlite_Connection* self, void* unused) +static PyObject * +pysqlite_connection_get_in_transaction(PyObject *op, void *Py_UNUSED(closure)) { + pysqlite_Connection *self = _pysqlite_Connection_CAST(op); if (!pysqlite_check_connection(self)) { return NULL; } @@ -1747,8 +1759,11 @@ static PyObject* pysqlite_connection_get_in_transaction(pysqlite_Connection* sel } static int -pysqlite_connection_set_isolation_level(pysqlite_Connection* self, PyObject* isolation_level, void *Py_UNUSED(ignored)) +pysqlite_connection_set_isolation_level(PyObject *op, + PyObject *isolation_level, + void *Py_UNUSED(ignored)) { + pysqlite_Connection *self = _pysqlite_Connection_CAST(op); if (isolation_level == NULL) { PyErr_SetString(PyExc_AttributeError, "cannot delete attribute"); return -1; @@ -1771,11 +1786,11 @@ pysqlite_connection_set_isolation_level(pysqlite_Connection* self, PyObject* iso } static PyObject * -pysqlite_connection_call(pysqlite_Connection *self, PyObject *args, - PyObject *kwargs) +pysqlite_connection_call(PyObject *op, PyObject *args, PyObject *kwargs) { PyObject* sql; pysqlite_Statement* statement; + pysqlite_Connection *self = _pysqlite_Connection_CAST(op); if (!pysqlite_check_thread(self) || !pysqlite_check_connection(self)) { return NULL; @@ -2000,7 +2015,7 @@ pysqlite_connection_iterdump_impl(pysqlite_Connection *self, return NULL; } - PyObject *iterdump = _PyImport_GetModuleAttrString(MODULE_NAME ".dump", "_iterdump"); + PyObject *iterdump = PyImport_ImportModuleAttrString(MODULE_NAME ".dump", "_iterdump"); if (!iterdump) { if (!PyErr_Occurred()) { PyErr_SetString(self->OperationalError, @@ -2526,8 +2541,9 @@ getconfig_impl(pysqlite_Connection *self, int op) } static PyObject * -get_autocommit(pysqlite_Connection *self, void *Py_UNUSED(ctx)) +get_autocommit(PyObject *op, void *Py_UNUSED(closure)) { + pysqlite_Connection *self = _pysqlite_Connection_CAST(op); if (!pysqlite_check_thread(self) || !pysqlite_check_connection(self)) { return NULL; } @@ -2541,8 +2557,9 @@ get_autocommit(pysqlite_Connection *self, void *Py_UNUSED(ctx)) } static int -set_autocommit(pysqlite_Connection *self, PyObject *val, void *Py_UNUSED(ctx)) +set_autocommit(PyObject *op, PyObject *val, void *Py_UNUSED(closure)) { + pysqlite_Connection *self = _pysqlite_Connection_CAST(op); if (!pysqlite_check_thread(self) || !pysqlite_check_connection(self)) { return -1; } @@ -2567,7 +2584,7 @@ set_autocommit(pysqlite_Connection *self, PyObject *val, void *Py_UNUSED(ctx)) } static PyObject * -get_sig(PyObject *self, void *Py_UNUSED(ctx)) +get_sig(PyObject *Py_UNUSED(self), void *Py_UNUSED(closure)) { return PyUnicode_FromString("(sql, /)"); } @@ -2577,11 +2594,12 @@ static const char connection_doc[] = PyDoc_STR("SQLite database connection object."); static PyGetSetDef connection_getset[] = { - {"isolation_level", (getter)pysqlite_connection_get_isolation_level, (setter)pysqlite_connection_set_isolation_level}, - {"total_changes", (getter)pysqlite_connection_get_total_changes, (setter)0}, - {"in_transaction", (getter)pysqlite_connection_get_in_transaction, (setter)0}, - {"autocommit", (getter)get_autocommit, (setter)set_autocommit}, - {"__text_signature__", get_sig, (setter)0}, + {"isolation_level", pysqlite_connection_get_isolation_level, + pysqlite_connection_set_isolation_level}, + {"total_changes", pysqlite_connection_get_total_changes, NULL}, + {"in_transaction", pysqlite_connection_get_in_transaction, NULL}, + {"autocommit", get_autocommit, set_autocommit}, + {"__text_signature__", get_sig, NULL}, {NULL} }; diff --git a/Modules/_sqlite/cursor.c b/Modules/_sqlite/cursor.c index 24e97fcf1897e9..02d598040775b0 100644 --- a/Modules/_sqlite/cursor.c +++ b/Modules/_sqlite/cursor.c @@ -44,6 +44,8 @@ typedef enum { #include "clinic/cursor.c.h" #undef clinic_state +#define _pysqlite_Cursor_CAST(op) ((pysqlite_Cursor *)(op)) + static inline int check_cursor_locked(pysqlite_Cursor *cur) { @@ -146,8 +148,9 @@ stmt_reset(pysqlite_Statement *self) } static int -cursor_traverse(pysqlite_Cursor *self, visitproc visit, void *arg) +cursor_traverse(PyObject *op, visitproc visit, void *arg) { + pysqlite_Cursor *self = _pysqlite_Cursor_CAST(op); Py_VISIT(Py_TYPE(self)); Py_VISIT(self->connection); Py_VISIT(self->description); @@ -159,8 +162,9 @@ cursor_traverse(pysqlite_Cursor *self, visitproc visit, void *arg) } static int -cursor_clear(pysqlite_Cursor *self) +cursor_clear(PyObject *op) { + pysqlite_Cursor *self = _pysqlite_Cursor_CAST(op); Py_CLEAR(self->connection); Py_CLEAR(self->description); Py_CLEAR(self->row_cast_map); @@ -176,14 +180,15 @@ cursor_clear(pysqlite_Cursor *self) } static void -cursor_dealloc(pysqlite_Cursor *self) +cursor_dealloc(PyObject *op) { + pysqlite_Cursor *self = _pysqlite_Cursor_CAST(op); PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(self); if (self->in_weakreflist != NULL) { - PyObject_ClearWeakRefs((PyObject*)self); + PyObject_ClearWeakRefs(op); } - tp->tp_clear((PyObject *)self); + (void)tp->tp_clear(op); tp->tp_free(self); Py_DECREF(tp); } @@ -1087,8 +1092,9 @@ pysqlite_cursor_executescript_impl(pysqlite_Cursor *self, } static PyObject * -pysqlite_cursor_iternext(pysqlite_Cursor *self) +pysqlite_cursor_iternext(PyObject *op) { + pysqlite_Cursor *self = _pysqlite_Cursor_CAST(op); if (!check_cursor(self)) { return NULL; } @@ -1125,7 +1131,7 @@ pysqlite_cursor_iternext(pysqlite_Cursor *self) } if (!Py_IsNone(self->row_factory)) { PyObject *factory = self->row_factory; - PyObject *args[] = { (PyObject *)self, row, }; + PyObject *args[] = { op, row, }; PyObject *new_row = PyObject_Vectorcall(factory, args, 2, NULL); Py_SETREF(row, new_row); } @@ -1144,7 +1150,7 @@ pysqlite_cursor_fetchone_impl(pysqlite_Cursor *self) { PyObject* row; - row = pysqlite_cursor_iternext(self); + row = pysqlite_cursor_iternext((PyObject *)self); if (!row && !PyErr_Occurred()) { Py_RETURN_NONE; } @@ -1174,7 +1180,7 @@ pysqlite_cursor_fetchmany_impl(pysqlite_Cursor *self, int maxrows) return NULL; } - while ((row = pysqlite_cursor_iternext(self))) { + while ((row = pysqlite_cursor_iternext((PyObject *)self))) { if (PyList_Append(list, row) < 0) { Py_DECREF(row); break; @@ -1212,7 +1218,7 @@ pysqlite_cursor_fetchall_impl(pysqlite_Cursor *self) return NULL; } - while ((row = pysqlite_cursor_iternext(self))) { + while ((row = pysqlite_cursor_iternext((PyObject *)self))) { if (PyList_Append(list, row) < 0) { Py_DECREF(row); break; diff --git a/Modules/_sqlite/module.c b/Modules/_sqlite/module.c index 698e81d9b897d0..27e8dab92e0e67 100644 --- a/Modules/_sqlite/module.c +++ b/Modules/_sqlite/module.c @@ -33,8 +33,6 @@ #include "row.h" #include "blob.h" -#include "pycore_import.h" // _PyImport_GetModuleAttrString() - #if SQLITE_VERSION_NUMBER < 3015002 #error "SQLite 3.15.2 or higher required" #endif @@ -234,7 +232,7 @@ static int load_functools_lru_cache(PyObject *module) { pysqlite_state *state = pysqlite_get_state(module); - state->lru_cache = _PyImport_GetModuleAttrString("functools", "lru_cache"); + state->lru_cache = PyImport_ImportModuleAttrString("functools", "lru_cache"); if (state->lru_cache == NULL) { return -1; } @@ -619,7 +617,7 @@ module_clear(PyObject *module) static void module_free(void *module) { - module_clear((PyObject *)module); + (void)module_clear((PyObject *)module); } #define ADD_TYPE(module, type) \ diff --git a/Modules/_sqlite/prepare_protocol.c b/Modules/_sqlite/prepare_protocol.c index 44533225665dab..31092417cb480d 100644 --- a/Modules/_sqlite/prepare_protocol.c +++ b/Modules/_sqlite/prepare_protocol.c @@ -24,8 +24,7 @@ #include "prepare_protocol.h" static int -pysqlite_prepare_protocol_init(pysqlite_PrepareProtocol *self, PyObject *args, - PyObject *kwargs) +pysqlite_prepare_protocol_init(PyObject *self, PyObject *args, PyObject *kwargs) { return 0; } @@ -38,7 +37,7 @@ pysqlite_prepare_protocol_traverse(PyObject *self, visitproc visit, void *arg) } static void -pysqlite_prepare_protocol_dealloc(pysqlite_PrepareProtocol *self) +pysqlite_prepare_protocol_dealloc(PyObject *self) { PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(self); diff --git a/Modules/_sqlite/row.c b/Modules/_sqlite/row.c index 14555076a7e79a..79660008b180dc 100644 --- a/Modules/_sqlite/row.c +++ b/Modules/_sqlite/row.c @@ -32,6 +32,8 @@ #include "clinic/row.c.h" #undef clinic_state +#define _pysqlite_Row_CAST(op) ((pysqlite_Row *)(op)) + /*[clinic input] module _sqlite3 class _sqlite3.Row "pysqlite_Row *" "clinic_state()->RowType" @@ -39,16 +41,18 @@ class _sqlite3.Row "pysqlite_Row *" "clinic_state()->RowType" /*[clinic end generated code: output=da39a3ee5e6b4b0d input=966c53403d7f3a40]*/ static int -row_clear(pysqlite_Row *self) +row_clear(PyObject *op) { + pysqlite_Row *self = _pysqlite_Row_CAST(op); Py_CLEAR(self->data); Py_CLEAR(self->description); return 0; } static int -row_traverse(pysqlite_Row *self, visitproc visit, void *arg) +row_traverse(PyObject *op, visitproc visit, void *arg) { + pysqlite_Row *self = _pysqlite_Row_CAST(op); Py_VISIT(Py_TYPE(self)); Py_VISIT(self->data); Py_VISIT(self->description); @@ -60,7 +64,7 @@ pysqlite_row_dealloc(PyObject *self) { PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(self); - tp->tp_clear(self); + (void)tp->tp_clear(self); tp->tp_free(self); Py_DECREF(tp); } @@ -94,10 +98,12 @@ pysqlite_row_new_impl(PyTypeObject *type, pysqlite_Cursor *cursor, return (PyObject *) self; } -PyObject* pysqlite_row_item(pysqlite_Row* self, Py_ssize_t idx) +static PyObject * +pysqlite_row_item(PyObject *op, Py_ssize_t idx) { - PyObject *item = PyTuple_GetItem(self->data, idx); - return Py_XNewRef(item); + pysqlite_Row *self = _pysqlite_Row_CAST(op); + PyObject *item = PyTuple_GetItem(self->data, idx); + return Py_XNewRef(item); } static int @@ -129,10 +135,11 @@ equal_ignore_case(PyObject *left, PyObject *right) } static PyObject * -pysqlite_row_subscript(pysqlite_Row *self, PyObject *idx) +pysqlite_row_subscript(PyObject *op, PyObject *idx) { Py_ssize_t _idx; Py_ssize_t nitems, i; + pysqlite_Row *self = _pysqlite_Row_CAST(op); if (PyLong_Check(idx)) { _idx = PyNumber_AsSsize_t(idx, PyExc_IndexError); @@ -174,8 +181,9 @@ pysqlite_row_subscript(pysqlite_Row *self, PyObject *idx) } static Py_ssize_t -pysqlite_row_length(pysqlite_Row* self) +pysqlite_row_length(PyObject *op) { + pysqlite_Row *self = _pysqlite_Row_CAST(op); return PyTuple_GET_SIZE(self->data); } @@ -208,24 +216,30 @@ pysqlite_row_keys_impl(pysqlite_Row *self) return list; } -static PyObject* pysqlite_iter(pysqlite_Row* self) +static PyObject * +pysqlite_iter(PyObject *op) { + pysqlite_Row *self = _pysqlite_Row_CAST(op); return PyObject_GetIter(self->data); } -static Py_hash_t pysqlite_row_hash(pysqlite_Row *self) +static Py_hash_t +pysqlite_row_hash(PyObject *op) { + pysqlite_Row *self = _pysqlite_Row_CAST(op); return PyObject_Hash(self->description) ^ PyObject_Hash(self->data); } -static PyObject* pysqlite_row_richcompare(pysqlite_Row *self, PyObject *_other, int opid) +static PyObject * +pysqlite_row_richcompare(PyObject *op, PyObject *opother, int opid) { if (opid != Py_EQ && opid != Py_NE) Py_RETURN_NOTIMPLEMENTED; + pysqlite_Row *self = _pysqlite_Row_CAST(op); pysqlite_state *state = pysqlite_get_state_by_type(Py_TYPE(self)); - if (PyObject_TypeCheck(_other, state->RowType)) { - pysqlite_Row *other = (pysqlite_Row *)_other; + if (PyObject_TypeCheck(opother, state->RowType)) { + pysqlite_Row *other = (pysqlite_Row *)opother; int eq = PyObject_RichCompareBool(self->description, other->description, Py_EQ); if (eq < 0) { return NULL; diff --git a/Modules/_sqlite/statement.c b/Modules/_sqlite/statement.c index 229bfc3b504165..facced0dfbfafd 100644 --- a/Modules/_sqlite/statement.c +++ b/Modules/_sqlite/statement.c @@ -25,6 +25,8 @@ #include "statement.h" #include "util.h" +#define _pysqlite_Statement_CAST(op) ((pysqlite_Statement *)(op)) + /* prototypes */ static const char *lstrip_sql(const char *sql); @@ -99,10 +101,11 @@ pysqlite_statement_create(pysqlite_Connection *connection, PyObject *sql) } static void -stmt_dealloc(pysqlite_Statement *self) +stmt_dealloc(PyObject *op) { + pysqlite_Statement *self = _pysqlite_Statement_CAST(op); PyTypeObject *tp = Py_TYPE(self); - PyObject_GC_UnTrack(self); + PyObject_GC_UnTrack(op); if (self->st) { Py_BEGIN_ALLOW_THREADS sqlite3_finalize(self->st); @@ -114,7 +117,7 @@ stmt_dealloc(pysqlite_Statement *self) } static int -stmt_traverse(pysqlite_Statement *self, visitproc visit, void *arg) +stmt_traverse(PyObject *self, visitproc visit, void *arg) { Py_VISIT(Py_TYPE(self)); return 0; diff --git a/Modules/_sre/sre.c b/Modules/_sre/sre.c index d0025dd21e045b..0d8d4843d33c1b 100644 --- a/Modules/_sre/sre.c +++ b/Modules/_sre/sre.c @@ -1169,7 +1169,7 @@ compile_template(_sremodulestate *module_state, /* delegate to Python code */ PyObject *func = module_state->compile_template; if (func == NULL) { - func = _PyImport_GetModuleAttrString("re", "_compile_template"); + func = PyImport_ImportModuleAttrString("re", "_compile_template"); if (func == NULL) { return NULL; } diff --git a/Modules/_ssl.c b/Modules/_ssl.c index c15a582a92aa4a..85e917fbbb7093 100644 --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -4666,7 +4666,8 @@ _servername_callback(SSL *s, int *al, void *args) servername_bytes = PyBytes_FromString(servername); if (servername_bytes == NULL) { - PyErr_WriteUnraisable((PyObject *) sslctx); + PyErr_FormatUnraisable("Exception ignored " + "in ssl servername callback"); goto error; } /* server_hostname was encoded to an A-label by our caller; put it @@ -4674,7 +4675,10 @@ _servername_callback(SSL *s, int *al, void *args) */ servername_str = PyUnicode_FromEncodedObject(servername_bytes, "ascii", NULL); if (servername_str == NULL) { - PyErr_WriteUnraisable(servername_bytes); + PyErr_FormatUnraisable("Exception ignored " + "in ssl servername callback " + "while decoding name %R", + servername_bytes); Py_DECREF(servername_bytes); goto error; } @@ -4687,7 +4691,10 @@ _servername_callback(SSL *s, int *al, void *args) Py_DECREF(ssl_socket); if (result == NULL) { - PyErr_WriteUnraisable(sslctx->set_sni_cb); + PyErr_FormatUnraisable("Exception ignored " + "in ssl servername callback " + "while calling set SNI callback %R", + sslctx->set_sni_cb); *al = SSL_AD_HANDSHAKE_FAILURE; ret = SSL_TLSEXT_ERR_ALERT_FATAL; } @@ -4700,7 +4707,11 @@ _servername_callback(SSL *s, int *al, void *args) } else { *al = (int) PyLong_AsLong(result); if (PyErr_Occurred()) { - PyErr_WriteUnraisable(result); + PyErr_FormatUnraisable("Exception ignored " + "in ssl servername callback " + "while calling set SNI callback " + "(result=%R)", + result); *al = SSL_AD_INTERNAL_ERROR; } ret = SSL_TLSEXT_ERR_ALERT_FATAL; @@ -5007,7 +5018,8 @@ static unsigned int psk_client_callback(SSL *s, error: if (PyErr_Occurred()) { - PyErr_WriteUnraisable(callback); + PyErr_FormatUnraisable("Exception ignored in ssl PSK client callback " + "while calling callback %R", callback); } PyGILState_Release(gstate); return 0; @@ -5116,7 +5128,8 @@ static unsigned int psk_server_callback(SSL *s, error: if (PyErr_Occurred()) { - PyErr_WriteUnraisable(callback); + PyErr_FormatUnraisable("Exception ignored in ssl PSK server callback " + "while calling callback %R", callback); } PyGILState_Release(gstate); return 0; diff --git a/Modules/_ssl/cert.c b/Modules/_ssl/cert.c index bda66dc4d94ae6..c11ed8e3a282e6 100644 --- a/Modules/_ssl/cert.c +++ b/Modules/_ssl/cert.c @@ -153,10 +153,13 @@ _x509name_print(_sslmodulestate *state, X509_NAME *name, int indent, unsigned lo * PySSLCertificate_Type */ +#define _PySSLCertificate_CAST(op) ((PySSLCertificate *)(op)) + static PyObject * -certificate_repr(PySSLCertificate *self) +certificate_repr(PyObject *op) { PyObject *osubject, *result; + PySSLCertificate *self = _PySSLCertificate_CAST(op); /* subject string is ASCII encoded, UTF-8 chars are quoted */ osubject = _x509name_print( @@ -176,8 +179,9 @@ certificate_repr(PySSLCertificate *self) } static Py_hash_t -certificate_hash(PySSLCertificate *self) +certificate_hash(PyObject *op) { + PySSLCertificate *self = _PySSLCertificate_CAST(op); if (self->hash == (Py_hash_t)-1) { unsigned long hash; hash = X509_subject_name_hash(self->cert); @@ -191,19 +195,20 @@ certificate_hash(PySSLCertificate *self) } static PyObject * -certificate_richcompare(PySSLCertificate *self, PyObject *other, int op) +certificate_richcompare(PyObject *lhs, PyObject *rhs, int op) { int cmp; + PySSLCertificate *self = _PySSLCertificate_CAST(lhs); _sslmodulestate *state = get_state_cert(self); - if (Py_TYPE(other) != state->PySSLCertificate_Type) { + if (Py_TYPE(rhs) != state->PySSLCertificate_Type) { Py_RETURN_NOTIMPLEMENTED; } /* only support == and != */ if ((op != Py_EQ) && (op != Py_NE)) { Py_RETURN_NOTIMPLEMENTED; } - cmp = X509_cmp(self->cert, ((PySSLCertificate*)other)->cert); + cmp = X509_cmp(self->cert, ((PySSLCertificate*)rhs)->cert); if (((op == Py_EQ) && (cmp == 0)) || ((op == Py_NE) && (cmp != 0))) { Py_RETURN_TRUE; } else { @@ -212,11 +217,12 @@ certificate_richcompare(PySSLCertificate *self, PyObject *other, int op) } static void -certificate_dealloc(PySSLCertificate *self) +certificate_dealloc(PyObject *op) { + PySSLCertificate *self = _PySSLCertificate_CAST(op); PyTypeObject *tp = Py_TYPE(self); X509_free(self->cert); - Py_TYPE(self)->tp_free(self); + (void)Py_TYPE(self)->tp_free(self); Py_DECREF(tp); } diff --git a/Modules/_testcapi/clinic/file.c.h b/Modules/_testcapi/clinic/file.c.h index fddbf48071bd3b..6efb6b47353443 100644 --- a/Modules/_testcapi/clinic/file.c.h +++ b/Modules/_testcapi/clinic/file.c.h @@ -4,6 +4,33 @@ preserve #include "pycore_modsupport.h" // _PyArg_CheckPositional() +PyDoc_STRVAR(_testcapi_pyfile_newstdprinter__doc__, +"pyfile_newstdprinter($module, fd, /)\n" +"--\n" +"\n"); + +#define _TESTCAPI_PYFILE_NEWSTDPRINTER_METHODDEF \ + {"pyfile_newstdprinter", (PyCFunction)_testcapi_pyfile_newstdprinter, METH_O, _testcapi_pyfile_newstdprinter__doc__}, + +static PyObject * +_testcapi_pyfile_newstdprinter_impl(PyObject *module, int fd); + +static PyObject * +_testcapi_pyfile_newstdprinter(PyObject *module, PyObject *arg) +{ + PyObject *return_value = NULL; + int fd; + + fd = PyLong_AsInt(arg); + if (fd == -1 && PyErr_Occurred()) { + goto exit; + } + return_value = _testcapi_pyfile_newstdprinter_impl(module, fd); + +exit: + return return_value; +} + PyDoc_STRVAR(_testcapi_py_fopen__doc__, "py_fopen($module, path, mode, /)\n" "--\n" @@ -34,4 +61,4 @@ _testcapi_py_fopen(PyObject *module, PyObject *const *args, Py_ssize_t nargs) exit: return return_value; } -/*[clinic end generated code: output=c4dc92400306c3eb input=a9049054013a1b77]*/ +/*[clinic end generated code: output=e943bbd7f181d079 input=a9049054013a1b77]*/ diff --git a/Modules/_testcapi/code.c b/Modules/_testcapi/code.c index c0193489b6f340..94f752c9726189 100644 --- a/Modules/_testcapi/code.c +++ b/Modules/_testcapi/code.c @@ -47,7 +47,6 @@ static PyObject * test_code_extra(PyObject* self, PyObject *Py_UNUSED(callable)) { PyObject *result = NULL; - PyObject *test_module = NULL; PyObject *test_func = NULL; // Get or initialize interpreter-specific code object storage index @@ -62,11 +61,8 @@ test_code_extra(PyObject* self, PyObject *Py_UNUSED(callable)) // Get a function to test with // This can be any Python function. Use `test.test_misc.testfunction`. - test_module = PyImport_ImportModule("test.test_capi.test_misc"); - if (!test_module) { - goto finally; - } - test_func = PyObject_GetAttrString(test_module, "testfunction"); + test_func = PyImport_ImportModuleAttrString("test.test_capi.test_misc", + "testfunction"); if (!test_func) { goto finally; } @@ -102,7 +98,6 @@ test_code_extra(PyObject* self, PyObject *Py_UNUSED(callable)) } result = Py_NewRef(Py_None); finally: - Py_XDECREF(test_module); Py_XDECREF(test_func); return result; } diff --git a/Modules/_testcapi/dict.c b/Modules/_testcapi/dict.c index 307797f98f12ae..b7c73d7332bd4e 100644 --- a/Modules/_testcapi/dict.c +++ b/Modules/_testcapi/dict.c @@ -181,6 +181,83 @@ dict_popstring_null(PyObject *self, PyObject *args) RETURN_INT(PyDict_PopString(dict, key, NULL)); } + +static int +test_dict_inner(PyObject *self, int count) +{ + Py_ssize_t pos = 0, iterations = 0; + int i; + PyObject *dict = PyDict_New(); + PyObject *v, *k; + + if (dict == NULL) + return -1; + + for (i = 0; i < count; i++) { + v = PyLong_FromLong(i); + if (v == NULL) { + goto error; + } + if (PyDict_SetItem(dict, v, v) < 0) { + Py_DECREF(v); + goto error; + } + Py_DECREF(v); + } + + k = v = UNINITIALIZED_PTR; + while (PyDict_Next(dict, &pos, &k, &v)) { + PyObject *o; + iterations++; + + assert(k != UNINITIALIZED_PTR); + assert(v != UNINITIALIZED_PTR); + i = PyLong_AS_LONG(v) + 1; + o = PyLong_FromLong(i); + if (o == NULL) { + goto error; + } + if (PyDict_SetItem(dict, k, o) < 0) { + Py_DECREF(o); + goto error; + } + Py_DECREF(o); + k = v = UNINITIALIZED_PTR; + } + assert(k == UNINITIALIZED_PTR); + assert(v == UNINITIALIZED_PTR); + + Py_DECREF(dict); + + if (iterations != count) { + PyErr_SetString( + PyExc_AssertionError, + "test_dict_iteration: dict iteration went wrong "); + return -1; + } else { + return 0; + } +error: + Py_DECREF(dict); + return -1; +} + + +static PyObject* +test_dict_iteration(PyObject* self, PyObject *Py_UNUSED(ignored)) +{ + int i; + + for (i = 0; i < 200; i++) { + if (test_dict_inner(self, i) < 0) { + return NULL; + } + } + + Py_RETURN_NONE; +} + + static PyMethodDef test_methods[] = { {"dict_containsstring", dict_containsstring, METH_VARARGS}, {"dict_getitemref", dict_getitemref, METH_VARARGS}, @@ -191,6 +268,7 @@ static PyMethodDef test_methods[] = { {"dict_pop_null", dict_pop_null, METH_VARARGS}, {"dict_popstring", dict_popstring, METH_VARARGS}, {"dict_popstring_null", dict_popstring_null, METH_VARARGS}, + {"test_dict_iteration", test_dict_iteration, METH_NOARGS}, {NULL}, }; diff --git a/Modules/_testcapi/file.c b/Modules/_testcapi/file.c index d15173fc7959e5..060e0f50598d7e 100644 --- a/Modules/_testcapi/file.c +++ b/Modules/_testcapi/file.c @@ -5,11 +5,29 @@ #include "util.h" #include "clinic/file.c.h" + /*[clinic input] module _testcapi [clinic start generated code]*/ /*[clinic end generated code: output=da39a3ee5e6b4b0d input=6361033e795369fc]*/ + +/*[clinic input] +_testcapi.pyfile_newstdprinter + + fd: int + / + +[clinic start generated code]*/ + +static PyObject * +_testcapi_pyfile_newstdprinter_impl(PyObject *module, int fd) +/*[clinic end generated code: output=8a2d1c57b6892db3 input=442f1824142262ea]*/ +{ + return PyFile_NewStdPrinter(fd); +} + + /*[clinic input] _testcapi.py_fopen @@ -38,7 +56,9 @@ _testcapi_py_fopen_impl(PyObject *module, PyObject *path, const char *mode, return PyBytes_FromStringAndSize(buffer, size); } + static PyMethodDef test_methods[] = { + _TESTCAPI_PYFILE_NEWSTDPRINTER_METHODDEF _TESTCAPI_PY_FOPEN_METHODDEF {NULL}, }; @@ -46,9 +66,5 @@ static PyMethodDef test_methods[] = { int _PyTestCapi_Init_File(PyObject *m) { - if (PyModule_AddFunctions(m, test_methods) < 0){ - return -1; - } - - return 0; + return PyModule_AddFunctions(m, test_methods); } diff --git a/Modules/_testcapi/float.c b/Modules/_testcapi/float.c index 15ea97ec4520b7..e3869134c84d43 100644 --- a/Modules/_testcapi/float.c +++ b/Modules/_testcapi/float.c @@ -99,9 +99,68 @@ _testcapi_float_unpack_impl(PyObject *module, const char *data, return PyFloat_FromDouble(d); } + +/* Test PyOS_string_to_double. */ +static PyObject * +test_string_to_double(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + double result; + const char *msg; + +#define CHECK_STRING(STR, expected) \ + do { \ + result = PyOS_string_to_double(STR, NULL, NULL); \ + if (result == -1.0 && PyErr_Occurred()) { \ + return NULL; \ + } \ + if (result != (double)expected) { \ + msg = "conversion of " STR " to float failed"; \ + goto fail; \ + } \ + } while (0) + +#define CHECK_INVALID(STR) \ + do { \ + result = PyOS_string_to_double(STR, NULL, NULL); \ + if (result == -1.0 && PyErr_Occurred()) { \ + if (PyErr_ExceptionMatches(PyExc_ValueError)) { \ + PyErr_Clear(); \ + } \ + else { \ + return NULL; \ + } \ + } \ + else { \ + msg = "conversion of " STR " didn't raise ValueError"; \ + goto fail; \ + } \ + } while (0) + + CHECK_STRING("0.1", 0.1); + CHECK_STRING("1.234", 1.234); + CHECK_STRING("-1.35", -1.35); + CHECK_STRING(".1e01", 1.0); + CHECK_STRING("2.e-2", 0.02); + + CHECK_INVALID(" 0.1"); + CHECK_INVALID("\t\n-3"); + CHECK_INVALID(".123 "); + CHECK_INVALID("3\n"); + CHECK_INVALID("123abc"); + + Py_RETURN_NONE; + fail: + PyErr_Format(PyExc_AssertionError, "test_string_to_double: %s", msg); + return NULL; +#undef CHECK_STRING +#undef CHECK_INVALID +} + + static PyMethodDef test_methods[] = { _TESTCAPI_FLOAT_PACK_METHODDEF _TESTCAPI_FLOAT_UNPACK_METHODDEF + {"test_string_to_double", test_string_to_double, METH_NOARGS}, {NULL}, }; diff --git a/Modules/_testcapi/frame.c b/Modules/_testcapi/frame.c new file mode 100644 index 00000000000000..5748dca948ea94 --- /dev/null +++ b/Modules/_testcapi/frame.c @@ -0,0 +1,134 @@ +#include "parts.h" +#include "util.h" + +#include "frameobject.h" // PyFrame_New() + + +static PyObject * +frame_getlocals(PyObject *self, PyObject *frame) +{ + if (!PyFrame_Check(frame)) { + PyErr_SetString(PyExc_TypeError, "argument must be a frame"); + return NULL; + } + return PyFrame_GetLocals((PyFrameObject *)frame); +} + + +static PyObject * +frame_getglobals(PyObject *self, PyObject *frame) +{ + if (!PyFrame_Check(frame)) { + PyErr_SetString(PyExc_TypeError, "argument must be a frame"); + return NULL; + } + return PyFrame_GetGlobals((PyFrameObject *)frame); +} + + +static PyObject * +frame_getgenerator(PyObject *self, PyObject *frame) +{ + if (!PyFrame_Check(frame)) { + PyErr_SetString(PyExc_TypeError, "argument must be a frame"); + return NULL; + } + return PyFrame_GetGenerator((PyFrameObject *)frame); +} + + +static PyObject * +frame_getbuiltins(PyObject *self, PyObject *frame) +{ + if (!PyFrame_Check(frame)) { + PyErr_SetString(PyExc_TypeError, "argument must be a frame"); + return NULL; + } + return PyFrame_GetBuiltins((PyFrameObject *)frame); +} + + +static PyObject * +frame_getlasti(PyObject *self, PyObject *frame) +{ + if (!PyFrame_Check(frame)) { + PyErr_SetString(PyExc_TypeError, "argument must be a frame"); + return NULL; + } + int lasti = PyFrame_GetLasti((PyFrameObject *)frame); + if (lasti < 0) { + assert(lasti == -1); + Py_RETURN_NONE; + } + return PyLong_FromLong(lasti); +} + + +static PyObject * +frame_new(PyObject *self, PyObject *args) +{ + PyObject *code, *globals, *locals; + if (!PyArg_ParseTuple(args, "OOO", &code, &globals, &locals)) { + return NULL; + } + if (!PyCode_Check(code)) { + PyErr_SetString(PyExc_TypeError, "argument must be a code object"); + return NULL; + } + PyThreadState *tstate = PyThreadState_Get(); + + return (PyObject *)PyFrame_New(tstate, (PyCodeObject *)code, globals, locals); +} + + +static PyObject * +frame_getvar(PyObject *self, PyObject *args) +{ + PyObject *frame, *name; + if (!PyArg_ParseTuple(args, "OO", &frame, &name)) { + return NULL; + } + if (!PyFrame_Check(frame)) { + PyErr_SetString(PyExc_TypeError, "argument must be a frame"); + return NULL; + } + + return PyFrame_GetVar((PyFrameObject *)frame, name); +} + + +static PyObject * +frame_getvarstring(PyObject *self, PyObject *args) +{ + PyObject *frame; + const char *name; + if (!PyArg_ParseTuple(args, "Oy", &frame, &name)) { + return NULL; + } + if (!PyFrame_Check(frame)) { + PyErr_SetString(PyExc_TypeError, "argument must be a frame"); + return NULL; + } + + return PyFrame_GetVarString((PyFrameObject *)frame, name); +} + + +static PyMethodDef test_methods[] = { + {"frame_getlocals", frame_getlocals, METH_O, NULL}, + {"frame_getglobals", frame_getglobals, METH_O, NULL}, + {"frame_getgenerator", frame_getgenerator, METH_O, NULL}, + {"frame_getbuiltins", frame_getbuiltins, METH_O, NULL}, + {"frame_getlasti", frame_getlasti, METH_O, NULL}, + {"frame_new", frame_new, METH_VARARGS, NULL}, + {"frame_getvar", frame_getvar, METH_VARARGS, NULL}, + {"frame_getvarstring", frame_getvarstring, METH_VARARGS, NULL}, + {NULL}, +}; + +int +_PyTestCapi_Init_Frame(PyObject *m) +{ + return PyModule_AddFunctions(m, test_methods); +} + diff --git a/Modules/_testcapi/function.c b/Modules/_testcapi/function.c new file mode 100644 index 00000000000000..ec1ba508df2ce9 --- /dev/null +++ b/Modules/_testcapi/function.c @@ -0,0 +1,143 @@ +#include "parts.h" +#include "util.h" + + +static PyObject * +function_get_code(PyObject *self, PyObject *func) +{ + PyObject *code = PyFunction_GetCode(func); + if (code != NULL) { + return Py_NewRef(code); + } else { + return NULL; + } +} + + +static PyObject * +function_get_globals(PyObject *self, PyObject *func) +{ + PyObject *globals = PyFunction_GetGlobals(func); + if (globals != NULL) { + return Py_NewRef(globals); + } else { + return NULL; + } +} + + +static PyObject * +function_get_module(PyObject *self, PyObject *func) +{ + PyObject *module = PyFunction_GetModule(func); + if (module != NULL) { + return Py_NewRef(module); + } else { + return NULL; + } +} + + +static PyObject * +function_get_defaults(PyObject *self, PyObject *func) +{ + PyObject *defaults = PyFunction_GetDefaults(func); + if (defaults != NULL) { + return Py_NewRef(defaults); + } else if (PyErr_Occurred()) { + return NULL; + } else { + Py_RETURN_NONE; // This can happen when `defaults` are set to `None` + } +} + + +static PyObject * +function_set_defaults(PyObject *self, PyObject *args) +{ + PyObject *func = NULL, *defaults = NULL; + if (!PyArg_ParseTuple(args, "OO", &func, &defaults)) { + return NULL; + } + int result = PyFunction_SetDefaults(func, defaults); + if (result == -1) + return NULL; + Py_RETURN_NONE; +} + + +static PyObject * +function_get_kw_defaults(PyObject *self, PyObject *func) +{ + PyObject *defaults = PyFunction_GetKwDefaults(func); + if (defaults != NULL) { + return Py_NewRef(defaults); + } else if (PyErr_Occurred()) { + return NULL; + } else { + Py_RETURN_NONE; // This can happen when `kwdefaults` are set to `None` + } +} + + +static PyObject * +function_set_kw_defaults(PyObject *self, PyObject *args) +{ + PyObject *func = NULL, *defaults = NULL; + if (!PyArg_ParseTuple(args, "OO", &func, &defaults)) { + return NULL; + } + int result = PyFunction_SetKwDefaults(func, defaults); + if (result == -1) + return NULL; + Py_RETURN_NONE; +} + + +static PyObject * +function_get_closure(PyObject *self, PyObject *func) +{ + PyObject *closure = PyFunction_GetClosure(func); + if (closure != NULL) { + return Py_NewRef(closure); + } else if (PyErr_Occurred()) { + return NULL; + } else { + Py_RETURN_NONE; // This can happen when `closure` is set to `None` + } +} + + +static PyObject * +function_set_closure(PyObject *self, PyObject *args) +{ + PyObject *func = NULL, *closure = NULL; + if (!PyArg_ParseTuple(args, "OO", &func, &closure)) { + return NULL; + } + int result = PyFunction_SetClosure(func, closure); + if (result == -1) { + return NULL; + } + Py_RETURN_NONE; +} + + +static PyMethodDef test_methods[] = { + {"function_get_code", function_get_code, METH_O, NULL}, + {"function_get_globals", function_get_globals, METH_O, NULL}, + {"function_get_module", function_get_module, METH_O, NULL}, + {"function_get_defaults", function_get_defaults, METH_O, NULL}, + {"function_set_defaults", function_set_defaults, METH_VARARGS, NULL}, + {"function_get_kw_defaults", function_get_kw_defaults, METH_O, NULL}, + {"function_set_kw_defaults", function_set_kw_defaults, METH_VARARGS, NULL}, + {"function_get_closure", function_get_closure, METH_O, NULL}, + {"function_set_closure", function_set_closure, METH_VARARGS, NULL}, + {NULL}, +}; + +int +_PyTestCapi_Init_Function(PyObject *m) +{ + return PyModule_AddFunctions(m, test_methods); +} diff --git a/Modules/_testcapi/gc.c b/Modules/_testcapi/gc.c index 7e33e0d4861e84..3691796302e500 100644 --- a/Modules/_testcapi/gc.c +++ b/Modules/_testcapi/gc.c @@ -94,7 +94,7 @@ slot_tp_del(PyObject *self) PyObject *tp_del = PyUnicode_InternFromString("__tp_del__"); if (tp_del == NULL) { - PyErr_WriteUnraisable(NULL); + PyErr_FormatUnraisable("Exception ignored while deallocating"); PyErr_SetRaisedException(exc); return; } @@ -104,10 +104,13 @@ slot_tp_del(PyObject *self) if (del != NULL) { res = PyObject_CallOneArg(del, self); Py_DECREF(del); - if (res == NULL) - PyErr_WriteUnraisable(del); - else + if (res == NULL) { + PyErr_FormatUnraisable("Exception ignored while calling " + "deallocator %R", del); + } + else { Py_DECREF(res); + } } /* Restore the saved exception. */ diff --git a/Modules/_testcapi/immortal.c b/Modules/_testcapi/immortal.c index 5bdae2e99d5375..0663c3781d426a 100644 --- a/Modules/_testcapi/immortal.c +++ b/Modules/_testcapi/immortal.c @@ -1,5 +1,8 @@ #include "parts.h" +#define Py_BUILD_CORE +#include "internal/pycore_long.h" // IMMORTALITY_BIT_MASK + int verify_immortality(PyObject *object) { assert(_Py_IsImmortal(object)); @@ -26,7 +29,17 @@ static PyObject * test_immortal_small_ints(PyObject *self, PyObject *Py_UNUSED(ignored)) { for (int i = -5; i <= 256; i++) { - assert(verify_immortality(PyLong_FromLong(i))); + PyObject *obj = PyLong_FromLong(i); + assert(verify_immortality(obj)); + int has_int_immortal_bit = ((PyLongObject *)obj)->long_value.lv_tag & IMMORTALITY_BIT_MASK; + assert(has_int_immortal_bit); + } + for (int i = 257; i <= 260; i++) { + PyObject *obj = PyLong_FromLong(i); + assert(obj); + int has_int_immortal_bit = ((PyLongObject *)obj)->long_value.lv_tag & IMMORTALITY_BIT_MASK; + assert(!has_int_immortal_bit); + Py_DECREF(obj); } Py_RETURN_NONE; } diff --git a/Modules/_testcapi/import.c b/Modules/_testcapi/import.c new file mode 100644 index 00000000000000..27d37498f3cd83 --- /dev/null +++ b/Modules/_testcapi/import.c @@ -0,0 +1,44 @@ +#include "parts.h" +#include "util.h" + +// Test PyImport_ImportModuleAttr() +static PyObject * +pyimport_importmoduleattr(PyObject *self, PyObject *args) +{ + PyObject *mod_name, *attr_name; + if (!PyArg_ParseTuple(args, "OO", &mod_name, &attr_name)) { + return NULL; + } + NULLABLE(mod_name); + NULLABLE(attr_name); + + return PyImport_ImportModuleAttr(mod_name, attr_name); +} + + +// Test PyImport_ImportModuleAttrString() +static PyObject * +pyimport_importmoduleattrstring(PyObject *self, PyObject *args) +{ + const char *mod_name, *attr_name; + Py_ssize_t len; + if (!PyArg_ParseTuple(args, "z#z#", &mod_name, &len, &attr_name, &len)) { + return NULL; + } + + return PyImport_ImportModuleAttrString(mod_name, attr_name); +} + + +static PyMethodDef test_methods[] = { + {"PyImport_ImportModuleAttr", pyimport_importmoduleattr, METH_VARARGS}, + {"PyImport_ImportModuleAttrString", pyimport_importmoduleattrstring, METH_VARARGS}, + {NULL}, +}; + +int +_PyTestCapi_Init_Import(PyObject *m) +{ + return PyModule_AddFunctions(m, test_methods); +} + diff --git a/Modules/_testcapi/list.c b/Modules/_testcapi/list.c index 09cec4c30c8c36..530b47780ac94e 100644 --- a/Modules/_testcapi/list.c +++ b/Modules/_testcapi/list.c @@ -60,22 +60,61 @@ list_extend(PyObject* Py_UNUSED(module), PyObject *args) } +static PyObject* +test_list_api(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + PyObject* list; + int i; + + /* SF bug 132008: PyList_Reverse segfaults */ +#define NLIST 30 + list = PyList_New(NLIST); + if (list == (PyObject*)NULL) + return (PyObject*)NULL; + /* list = range(NLIST) */ + for (i = 0; i < NLIST; ++i) { + PyObject* anint = PyLong_FromLong(i); + if (anint == (PyObject*)NULL) { + Py_DECREF(list); + return (PyObject*)NULL; + } + PyList_SET_ITEM(list, i, anint); + } + /* list.reverse(), via PyList_Reverse() */ + i = PyList_Reverse(list); /* should not blow up! */ + if (i != 0) { + Py_DECREF(list); + return (PyObject*)NULL; + } + /* Check that list == range(29, -1, -1) now */ + for (i = 0; i < NLIST; ++i) { + PyObject* anint = PyList_GET_ITEM(list, i); + if (PyLong_AS_LONG(anint) != NLIST-1-i) { + PyErr_SetString(PyExc_AssertionError, + "test_list_api: reverse screwed up"); + Py_DECREF(list); + return (PyObject*)NULL; + } + } + Py_DECREF(list); +#undef NLIST + + Py_RETURN_NONE; +} + + static PyMethodDef test_methods[] = { {"list_get_size", list_get_size, METH_O}, {"list_get_item", list_get_item, METH_VARARGS}, {"list_set_item", list_set_item, METH_VARARGS}, {"list_clear", list_clear, METH_O}, {"list_extend", list_extend, METH_VARARGS}, - + {"test_list_api", test_list_api, METH_NOARGS}, {NULL}, }; int _PyTestCapi_Init_List(PyObject *m) { - if (PyModule_AddFunctions(m, test_methods) < 0) { - return -1; - } - - return 0; + return PyModule_AddFunctions(m, test_methods); } diff --git a/Modules/_testcapi/mem.c b/Modules/_testcapi/mem.c index ecae5ba26226a6..7237fb94c3f51f 100644 --- a/Modules/_testcapi/mem.c +++ b/Modules/_testcapi/mem.c @@ -584,6 +584,106 @@ tracemalloc_untrack(PyObject *self, PyObject *args) Py_RETURN_NONE; } + +static void +tracemalloc_track_race_thread(void *data) +{ + PyTraceMalloc_Track(123, 10, 1); + PyTraceMalloc_Untrack(123, 10); + + PyThread_type_lock lock = (PyThread_type_lock)data; + PyThread_release_lock(lock); +} + +// gh-128679: Test fix for tracemalloc.stop() race condition +static PyObject * +tracemalloc_track_race(PyObject *self, PyObject *args) +{ +#define NTHREAD 50 + PyObject *tracemalloc = NULL; + PyObject *stop = NULL; + PyThread_type_lock locks[NTHREAD]; + memset(locks, 0, sizeof(locks)); + + // Call tracemalloc.start() + tracemalloc = PyImport_ImportModule("tracemalloc"); + if (tracemalloc == NULL) { + goto error; + } + PyObject *start = PyObject_GetAttrString(tracemalloc, "start"); + if (start == NULL) { + goto error; + } + PyObject *res = PyObject_CallNoArgs(start); + Py_DECREF(start); + if (res == NULL) { + goto error; + } + Py_DECREF(res); + + stop = PyObject_GetAttrString(tracemalloc, "stop"); + Py_CLEAR(tracemalloc); + if (stop == NULL) { + goto error; + } + + // Start threads + for (size_t i = 0; i < NTHREAD; i++) { + PyThread_type_lock lock = PyThread_allocate_lock(); + if (!lock) { + PyErr_NoMemory(); + goto error; + } + locks[i] = lock; + PyThread_acquire_lock(lock, 1); + + unsigned long thread; + thread = PyThread_start_new_thread(tracemalloc_track_race_thread, + (void*)lock); + if (thread == (unsigned long)-1) { + PyErr_SetString(PyExc_RuntimeError, "can't start new thread"); + goto error; + } + } + + // Call tracemalloc.stop() while threads are running + res = PyObject_CallNoArgs(stop); + Py_CLEAR(stop); + if (res == NULL) { + goto error; + } + Py_DECREF(res); + + // Wait until threads complete with the GIL released + Py_BEGIN_ALLOW_THREADS + for (size_t i = 0; i < NTHREAD; i++) { + PyThread_type_lock lock = locks[i]; + PyThread_acquire_lock(lock, 1); + PyThread_release_lock(lock); + } + Py_END_ALLOW_THREADS + + // Free threads locks + for (size_t i=0; i < NTHREAD; i++) { + PyThread_type_lock lock = locks[i]; + PyThread_free_lock(lock); + } + Py_RETURN_NONE; + +error: + Py_CLEAR(tracemalloc); + Py_CLEAR(stop); + for (size_t i=0; i < NTHREAD; i++) { + PyThread_type_lock lock = locks[i]; + if (lock) { + PyThread_free_lock(lock); + } + } + return NULL; +#undef NTHREAD +} + + static PyMethodDef test_methods[] = { {"pymem_api_misuse", pymem_api_misuse, METH_NOARGS}, {"pymem_buffer_overflow", pymem_buffer_overflow, METH_NOARGS}, @@ -602,6 +702,7 @@ static PyMethodDef test_methods[] = { // Tracemalloc tests {"tracemalloc_track", tracemalloc_track, METH_VARARGS}, {"tracemalloc_untrack", tracemalloc_untrack, METH_VARARGS}, + {"tracemalloc_track_race", tracemalloc_track_race, METH_NOARGS}, {NULL}, }; diff --git a/Modules/_testcapi/object.c b/Modules/_testcapi/object.c index 1d0169b2af9469..2d538627d213fd 100644 --- a/Modules/_testcapi/object.c +++ b/Modules/_testcapi/object.c @@ -131,6 +131,345 @@ pyobject_enable_deferred_refcount(PyObject *self, PyObject *obj) return PyLong_FromLong(result); } +static int MyObject_dealloc_called = 0; + +static void +MyObject_dealloc(PyObject *op) +{ + // PyUnstable_TryIncRef should return 0 if object is being deallocated + assert(Py_REFCNT(op) == 0); + assert(!PyUnstable_TryIncRef(op)); + assert(Py_REFCNT(op) == 0); + + MyObject_dealloc_called++; + Py_TYPE(op)->tp_free(op); +} + +static PyTypeObject MyType = { + PyVarObject_HEAD_INIT(NULL, 0) + .tp_name = "MyType", + .tp_basicsize = sizeof(PyObject), + .tp_dealloc = MyObject_dealloc, +}; + +static PyObject * +test_py_try_inc_ref(PyObject *self, PyObject *unused) +{ + if (PyType_Ready(&MyType) < 0) { + return NULL; + } + + MyObject_dealloc_called = 0; + + PyObject *op = PyObject_New(PyObject, &MyType); + if (op == NULL) { + return NULL; + } + + PyUnstable_EnableTryIncRef(op); +#ifdef Py_GIL_DISABLED + // PyUnstable_EnableTryIncRef sets the shared flags to + // `_Py_REF_MAYBE_WEAKREF` if the flags are currently zero to ensure that + // the shared reference count is merged on deallocation. + assert((op->ob_ref_shared & _Py_REF_SHARED_FLAG_MASK) >= _Py_REF_MAYBE_WEAKREF); +#endif + + if (!PyUnstable_TryIncRef(op)) { + PyErr_SetString(PyExc_AssertionError, "PyUnstable_TryIncRef failed"); + Py_DECREF(op); + return NULL; + } + Py_DECREF(op); // undo try-incref + Py_DECREF(op); // dealloc + assert(MyObject_dealloc_called == 1); + Py_RETURN_NONE; +} + + +static PyObject * +_test_incref(PyObject *ob) +{ + return Py_NewRef(ob); +} + +static PyObject * +test_xincref_doesnt_leak(PyObject *ob, PyObject *Py_UNUSED(ignored)) +{ + PyObject *obj = PyLong_FromLong(0); + Py_XINCREF(_test_incref(obj)); + Py_DECREF(obj); + Py_DECREF(obj); + Py_DECREF(obj); + Py_RETURN_NONE; +} + + +static PyObject * +test_incref_doesnt_leak(PyObject *ob, PyObject *Py_UNUSED(ignored)) +{ + PyObject *obj = PyLong_FromLong(0); + Py_INCREF(_test_incref(obj)); + Py_DECREF(obj); + Py_DECREF(obj); + Py_DECREF(obj); + Py_RETURN_NONE; +} + + +static PyObject * +test_xdecref_doesnt_leak(PyObject *ob, PyObject *Py_UNUSED(ignored)) +{ + Py_XDECREF(PyLong_FromLong(0)); + Py_RETURN_NONE; +} + + +static PyObject * +test_decref_doesnt_leak(PyObject *ob, PyObject *Py_UNUSED(ignored)) +{ + Py_DECREF(PyLong_FromLong(0)); + Py_RETURN_NONE; +} + + +static PyObject * +test_incref_decref_API(PyObject *ob, PyObject *Py_UNUSED(ignored)) +{ + PyObject *obj = PyLong_FromLong(0); + Py_IncRef(obj); + Py_DecRef(obj); + Py_DecRef(obj); + Py_RETURN_NONE; +} + + +#ifdef Py_REF_DEBUG +static PyObject * +negative_refcount(PyObject *self, PyObject *Py_UNUSED(args)) +{ + PyObject *obj = PyUnicode_FromString("negative_refcount"); + if (obj == NULL) { + return NULL; + } + assert(Py_REFCNT(obj) == 1); + + Py_SET_REFCNT(obj, 0); + /* Py_DECREF() must call _Py_NegativeRefcount() and abort Python */ + Py_DECREF(obj); + + Py_RETURN_NONE; +} + + +static PyObject * +decref_freed_object(PyObject *self, PyObject *Py_UNUSED(args)) +{ + PyObject *obj = PyUnicode_FromString("decref_freed_object"); + if (obj == NULL) { + return NULL; + } + assert(Py_REFCNT(obj) == 1); + + // Deallocate the memory + Py_DECREF(obj); + // obj is a now a dangling pointer + + // gh-109496: If Python is built in debug mode, Py_DECREF() must call + // _Py_NegativeRefcount() and abort Python. + Py_DECREF(obj); + + Py_RETURN_NONE; +} +#endif + + +// Test Py_CLEAR() macro +static PyObject* +test_py_clear(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + // simple case with a variable + PyObject *obj = PyList_New(0); + if (obj == NULL) { + return NULL; + } + Py_CLEAR(obj); + assert(obj == NULL); + + // gh-98724: complex case, Py_CLEAR() argument has a side effect + PyObject* array[1]; + array[0] = PyList_New(0); + if (array[0] == NULL) { + return NULL; + } + + PyObject **p = array; + Py_CLEAR(*p++); + assert(array[0] == NULL); + assert(p == array + 1); + + Py_RETURN_NONE; +} + + +// Test Py_SETREF() and Py_XSETREF() macros, similar to test_py_clear() +static PyObject* +test_py_setref(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + // Py_SETREF() simple case with a variable + PyObject *obj = PyList_New(0); + if (obj == NULL) { + return NULL; + } + Py_SETREF(obj, NULL); + assert(obj == NULL); + + // Py_XSETREF() simple case with a variable + PyObject *obj2 = PyList_New(0); + if (obj2 == NULL) { + return NULL; + } + Py_XSETREF(obj2, NULL); + assert(obj2 == NULL); + // test Py_XSETREF() when the argument is NULL + Py_XSETREF(obj2, NULL); + assert(obj2 == NULL); + + // gh-98724: complex case, Py_SETREF() argument has a side effect + PyObject* array[1]; + array[0] = PyList_New(0); + if (array[0] == NULL) { + return NULL; + } + + PyObject **p = array; + Py_SETREF(*p++, NULL); + assert(array[0] == NULL); + assert(p == array + 1); + + // gh-98724: complex case, Py_XSETREF() argument has a side effect + PyObject* array2[1]; + array2[0] = PyList_New(0); + if (array2[0] == NULL) { + return NULL; + } + + PyObject **p2 = array2; + Py_XSETREF(*p2++, NULL); + assert(array2[0] == NULL); + assert(p2 == array2 + 1); + + // test Py_XSETREF() when the argument is NULL + p2 = array2; + Py_XSETREF(*p2++, NULL); + assert(array2[0] == NULL); + assert(p2 == array2 + 1); + + Py_RETURN_NONE; +} + + +#define TEST_REFCOUNT() \ + do { \ + PyObject *obj = PyList_New(0); \ + if (obj == NULL) { \ + return NULL; \ + } \ + assert(Py_REFCNT(obj) == 1); \ + \ + /* test Py_NewRef() */ \ + PyObject *ref = Py_NewRef(obj); \ + assert(ref == obj); \ + assert(Py_REFCNT(obj) == 2); \ + Py_DECREF(ref); \ + \ + /* test Py_XNewRef() */ \ + PyObject *xref = Py_XNewRef(obj); \ + assert(xref == obj); \ + assert(Py_REFCNT(obj) == 2); \ + Py_DECREF(xref); \ + \ + assert(Py_XNewRef(NULL) == NULL); \ + \ + Py_DECREF(obj); \ + Py_RETURN_NONE; \ + } while (0) + + +// Test Py_NewRef() and Py_XNewRef() macros +static PyObject* +test_refcount_macros(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + TEST_REFCOUNT(); +} + +#undef Py_NewRef +#undef Py_XNewRef + +// Test Py_NewRef() and Py_XNewRef() functions, after undefining macros. +static PyObject* +test_refcount_funcs(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + TEST_REFCOUNT(); +} + + +// Test Py_Is() function +#define TEST_PY_IS() \ + do { \ + PyObject *o_none = Py_None; \ + PyObject *o_true = Py_True; \ + PyObject *o_false = Py_False; \ + PyObject *obj = PyList_New(0); \ + if (obj == NULL) { \ + return NULL; \ + } \ + \ + /* test Py_Is() */ \ + assert(Py_Is(obj, obj)); \ + assert(!Py_Is(obj, o_none)); \ + \ + /* test Py_None */ \ + assert(Py_Is(o_none, o_none)); \ + assert(!Py_Is(obj, o_none)); \ + \ + /* test Py_True */ \ + assert(Py_Is(o_true, o_true)); \ + assert(!Py_Is(o_false, o_true)); \ + assert(!Py_Is(obj, o_true)); \ + \ + /* test Py_False */ \ + assert(Py_Is(o_false, o_false)); \ + assert(!Py_Is(o_true, o_false)); \ + assert(!Py_Is(obj, o_false)); \ + \ + Py_DECREF(obj); \ + Py_RETURN_NONE; \ + } while (0) + +// Test Py_Is() macro +static PyObject* +test_py_is_macros(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + TEST_PY_IS(); +} + +#undef Py_Is + +// Test Py_Is() function, after undefining its macro. +static PyObject* +test_py_is_funcs(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + TEST_PY_IS(); +} + + +static PyObject * +clear_managed_dict(PyObject *self, PyObject *obj) +{ + PyObject_ClearManagedDict(obj); + Py_RETURN_NONE; +} + static PyMethodDef test_methods[] = { {"call_pyobject_print", call_pyobject_print, METH_VARARGS}, @@ -139,15 +478,28 @@ static PyMethodDef test_methods[] = { {"pyobject_print_os_error", pyobject_print_os_error, METH_VARARGS}, {"pyobject_clear_weakrefs_no_callbacks", pyobject_clear_weakrefs_no_callbacks, METH_O}, {"pyobject_enable_deferred_refcount", pyobject_enable_deferred_refcount, METH_O}, + {"test_py_try_inc_ref", test_py_try_inc_ref, METH_NOARGS}, + {"test_xincref_doesnt_leak",test_xincref_doesnt_leak, METH_NOARGS}, + {"test_incref_doesnt_leak", test_incref_doesnt_leak, METH_NOARGS}, + {"test_xdecref_doesnt_leak",test_xdecref_doesnt_leak, METH_NOARGS}, + {"test_decref_doesnt_leak", test_decref_doesnt_leak, METH_NOARGS}, + {"test_incref_decref_API", test_incref_decref_API, METH_NOARGS}, +#ifdef Py_REF_DEBUG + {"negative_refcount", negative_refcount, METH_NOARGS}, + {"decref_freed_object", decref_freed_object, METH_NOARGS}, +#endif + {"test_py_clear", test_py_clear, METH_NOARGS}, + {"test_py_setref", test_py_setref, METH_NOARGS}, + {"test_refcount_macros", test_refcount_macros, METH_NOARGS}, + {"test_refcount_funcs", test_refcount_funcs, METH_NOARGS}, + {"test_py_is_macros", test_py_is_macros, METH_NOARGS}, + {"test_py_is_funcs", test_py_is_funcs, METH_NOARGS}, + {"clear_managed_dict", clear_managed_dict, METH_O, NULL}, {NULL}, }; int _PyTestCapi_Init_Object(PyObject *m) { - if (PyModule_AddFunctions(m, test_methods) < 0) { - return -1; - } - - return 0; + return PyModule_AddFunctions(m, test_methods); } diff --git a/Modules/_testcapi/parts.h b/Modules/_testcapi/parts.h index 65ba77596c760e..af6400162daf2b 100644 --- a/Modules/_testcapi/parts.h +++ b/Modules/_testcapi/parts.h @@ -61,5 +61,9 @@ int _PyTestCapi_Init_Time(PyObject *module); int _PyTestCapi_Init_Monitoring(PyObject *module); int _PyTestCapi_Init_Object(PyObject *module); int _PyTestCapi_Init_Config(PyObject *mod); +int _PyTestCapi_Init_Import(PyObject *mod); +int _PyTestCapi_Init_Frame(PyObject *mod); +int _PyTestCapi_Init_Type(PyObject *mod); +int _PyTestCapi_Init_Function(PyObject *mod); #endif // Py_TESTCAPI_PARTS_H diff --git a/Modules/_testcapi/set.c b/Modules/_testcapi/set.c index 31b52cee5e9623..092715ab7d0aa4 100644 --- a/Modules/_testcapi/set.c +++ b/Modules/_testcapi/set.c @@ -8,18 +8,37 @@ set_get_size(PyObject *self, PyObject *obj) RETURN_SIZE(PySet_GET_SIZE(obj)); } + +static PyObject* +test_set_type_size(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + PyObject *obj = PyList_New(0); + if (obj == NULL) { + return NULL; + } + + // Ensure that following tests don't modify the object, + // to ensure that Py_DECREF() will not crash. + assert(Py_TYPE(obj) == &PyList_Type); + assert(Py_SIZE(obj) == 0); + + // bpo-39573: Test Py_SET_TYPE() and Py_SET_SIZE() functions. + Py_SET_TYPE(obj, &PyList_Type); + Py_SET_SIZE(obj, 0); + + Py_DECREF(obj); + Py_RETURN_NONE; +} + + static PyMethodDef test_methods[] = { {"set_get_size", set_get_size, METH_O}, - + {"test_set_type_size", test_set_type_size, METH_NOARGS}, {NULL}, }; int _PyTestCapi_Init_Set(PyObject *m) { - if (PyModule_AddFunctions(m, test_methods) < 0) { - return -1; - } - - return 0; + return PyModule_AddFunctions(m, test_methods); } diff --git a/Modules/_testcapi/type.c b/Modules/_testcapi/type.c new file mode 100644 index 00000000000000..9bef58d1f83668 --- /dev/null +++ b/Modules/_testcapi/type.c @@ -0,0 +1,251 @@ +#include "parts.h" +#include "util.h" + + +static PyType_Slot HeapTypeNameType_slots[] = { + {0}, +}; + +static PyType_Spec HeapTypeNameType_Spec = { + .name = "_testcapi.HeapTypeNameType", + .basicsize = sizeof(PyObject), + .flags = Py_TPFLAGS_DEFAULT, + .slots = HeapTypeNameType_slots, +}; + +static PyObject * +get_heaptype_for_name(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + return PyType_FromSpec(&HeapTypeNameType_Spec); +} + + +static PyObject * +get_type_name(PyObject *self, PyObject *type) +{ + assert(PyType_Check(type)); + return PyType_GetName((PyTypeObject *)type); +} + + +static PyObject * +get_type_qualname(PyObject *self, PyObject *type) +{ + assert(PyType_Check(type)); + return PyType_GetQualName((PyTypeObject *)type); +} + + +static PyObject * +get_type_fullyqualname(PyObject *self, PyObject *type) +{ + assert(PyType_Check(type)); + return PyType_GetFullyQualifiedName((PyTypeObject *)type); +} + + +static PyObject * +get_type_module_name(PyObject *self, PyObject *type) +{ + assert(PyType_Check(type)); + return PyType_GetModuleName((PyTypeObject *)type); +} + + +static PyObject * +test_get_type_dict(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + /* Test for PyType_GetDict */ + + // Assert ints have a `to_bytes` method + PyObject *long_dict = PyType_GetDict(&PyLong_Type); + assert(long_dict); + assert(PyDict_GetItemString(long_dict, "to_bytes")); // borrowed ref + Py_DECREF(long_dict); + + // Make a new type, add an attribute to it and assert it's there + PyObject *HeapTypeNameType = PyType_FromSpec(&HeapTypeNameType_Spec); + assert(HeapTypeNameType); + assert(PyObject_SetAttrString( + HeapTypeNameType, "new_attr", Py_NewRef(Py_None)) >= 0); + PyObject *type_dict = PyType_GetDict((PyTypeObject*)HeapTypeNameType); + assert(type_dict); + assert(PyDict_GetItemString(type_dict, "new_attr")); // borrowed ref + Py_DECREF(HeapTypeNameType); + Py_DECREF(type_dict); + Py_RETURN_NONE; +} + + +static PyObject * +test_get_statictype_slots(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + newfunc tp_new = PyType_GetSlot(&PyLong_Type, Py_tp_new); + if (PyLong_Type.tp_new != tp_new) { + PyErr_SetString(PyExc_AssertionError, "mismatch: tp_new of long"); + return NULL; + } + + reprfunc tp_repr = PyType_GetSlot(&PyLong_Type, Py_tp_repr); + if (PyLong_Type.tp_repr != tp_repr) { + PyErr_SetString(PyExc_AssertionError, "mismatch: tp_repr of long"); + return NULL; + } + + ternaryfunc tp_call = PyType_GetSlot(&PyLong_Type, Py_tp_call); + if (tp_call != NULL) { + PyErr_SetString(PyExc_AssertionError, "mismatch: tp_call of long"); + return NULL; + } + + binaryfunc nb_add = PyType_GetSlot(&PyLong_Type, Py_nb_add); + if (PyLong_Type.tp_as_number->nb_add != nb_add) { + PyErr_SetString(PyExc_AssertionError, "mismatch: nb_add of long"); + return NULL; + } + + lenfunc mp_length = PyType_GetSlot(&PyLong_Type, Py_mp_length); + if (mp_length != NULL) { + PyErr_SetString(PyExc_AssertionError, "mismatch: mp_length of long"); + return NULL; + } + + void *over_value = PyType_GetSlot(&PyLong_Type, Py_bf_releasebuffer + 1); + if (over_value != NULL) { + PyErr_SetString(PyExc_AssertionError, "mismatch: max+1 of long"); + return NULL; + } + + tp_new = PyType_GetSlot(&PyLong_Type, 0); + if (tp_new != NULL) { + PyErr_SetString(PyExc_AssertionError, "mismatch: slot 0 of long"); + return NULL; + } + if (PyErr_ExceptionMatches(PyExc_SystemError)) { + // This is the right exception + PyErr_Clear(); + } + else { + return NULL; + } + + Py_RETURN_NONE; +} + + +// Get type->tp_version_tag +static PyObject * +type_get_version(PyObject *self, PyObject *type) +{ + if (!PyType_Check(type)) { + PyErr_SetString(PyExc_TypeError, "argument must be a type"); + return NULL; + } + PyObject *res = PyLong_FromUnsignedLong( + ((PyTypeObject *)type)->tp_version_tag); + if (res == NULL) { + assert(PyErr_Occurred()); + return NULL; + } + return res; +} + +static PyObject * +type_modified(PyObject *self, PyObject *arg) +{ + if (!PyType_Check(arg)) { + PyErr_SetString(PyExc_TypeError, "argument must be a type"); + return NULL; + } + PyTypeObject *type = (PyTypeObject*)arg; + + PyType_Modified(type); + Py_RETURN_NONE; +} + + +static PyObject * +type_assign_version(PyObject *self, PyObject *arg) +{ + if (!PyType_Check(arg)) { + PyErr_SetString(PyExc_TypeError, "argument must be a type"); + return NULL; + } + PyTypeObject *type = (PyTypeObject*)arg; + + int res = PyUnstable_Type_AssignVersionTag(type); + return PyLong_FromLong(res); +} + + +static PyObject * +type_get_tp_bases(PyObject *self, PyObject *arg) +{ + if (!PyType_Check(arg)) { + PyErr_SetString(PyExc_TypeError, "argument must be a type"); + return NULL; + } + PyTypeObject *type = (PyTypeObject*)arg; + + PyObject *bases = type->tp_bases; + if (bases == NULL) { + Py_RETURN_NONE; + } + return Py_NewRef(bases); +} + +static PyObject * +type_get_tp_mro(PyObject *self, PyObject *arg) +{ + if (!PyType_Check(arg)) { + PyErr_SetString(PyExc_TypeError, "argument must be a type"); + return NULL; + } + PyTypeObject *type = (PyTypeObject*)arg; + + PyObject *mro = ((PyTypeObject *)type)->tp_mro; + if (mro == NULL) { + Py_RETURN_NONE; + } + return Py_NewRef(mro); +} + + +static PyObject * +type_freeze(PyObject *module, PyObject *arg) +{ + if (!PyType_Check(arg)) { + PyErr_SetString(PyExc_TypeError, "argument must be a type"); + return NULL; + } + PyTypeObject *type = (PyTypeObject*)arg; + + if (PyType_Freeze(type) < 0) { + return NULL; + } + Py_RETURN_NONE; +} + + +static PyMethodDef test_methods[] = { + {"get_heaptype_for_name", get_heaptype_for_name, METH_NOARGS}, + {"get_type_name", get_type_name, METH_O}, + {"get_type_qualname", get_type_qualname, METH_O}, + {"get_type_fullyqualname", get_type_fullyqualname, METH_O}, + {"get_type_module_name", get_type_module_name, METH_O}, + {"test_get_type_dict", test_get_type_dict, METH_NOARGS}, + {"test_get_statictype_slots", test_get_statictype_slots, METH_NOARGS}, + {"type_get_version", type_get_version, METH_O, PyDoc_STR("type->tp_version_tag")}, + {"type_modified", type_modified, METH_O, PyDoc_STR("PyType_Modified")}, + {"type_assign_version", type_assign_version, METH_O, PyDoc_STR("PyUnstable_Type_AssignVersionTag")}, + {"type_get_tp_bases", type_get_tp_bases, METH_O}, + {"type_get_tp_mro", type_get_tp_mro, METH_O}, + {"type_freeze", type_freeze, METH_O}, + {NULL}, +}; + +int +_PyTestCapi_Init_Type(PyObject *m) +{ + return PyModule_AddFunctions(m, test_methods); +} diff --git a/Modules/_testcapi/watchers.c b/Modules/_testcapi/watchers.c index 321d3aeffb6ad1..f7440769b9594e 100644 --- a/Modules/_testcapi/watchers.c +++ b/Modules/_testcapi/watchers.c @@ -428,7 +428,8 @@ allocate_too_many_code_watchers(PyObject *self, PyObject *args) PyObject *exc = PyErr_GetRaisedException(); for (int i = 0; i < num_watchers; i++) { if (PyCode_ClearWatcher(watcher_ids[i]) < 0) { - PyErr_WriteUnraisable(Py_None); + PyErr_FormatUnraisable("Exception ignored while " + "clearing code watcher"); break; } } @@ -609,7 +610,8 @@ allocate_too_many_func_watchers(PyObject *self, PyObject *args) PyObject *exc = PyErr_GetRaisedException(); for (int i = 0; i < num_watchers; i++) { if (PyFunction_ClearWatcher(watcher_ids[i]) < 0) { - PyErr_WriteUnraisable(Py_None); + PyErr_FormatUnraisable("Exception ignored while " + "clearing function watcher"); break; } } @@ -755,7 +757,8 @@ allocate_too_many_context_watchers(PyObject *self, PyObject *args) PyObject *exc = PyErr_GetRaisedException(); for (int i = 0; i < num_watchers; i++) { if (PyContext_ClearWatcher(watcher_ids[i]) < 0) { - PyErr_WriteUnraisable(Py_None); + PyErr_FormatUnraisable("Exception ignored while " + "clearing context watcher"); break; } } diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c index c405a352ed74a1..c84646ccf03fa7 100644 --- a/Modules/_testcapimodule.c +++ b/Modules/_testcapimodule.c @@ -163,124 +163,6 @@ test_sizeof_c_types(PyObject *self, PyObject *Py_UNUSED(ignored)) #endif } -static PyObject* -test_list_api(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - PyObject* list; - int i; - - /* SF bug 132008: PyList_Reverse segfaults */ -#define NLIST 30 - list = PyList_New(NLIST); - if (list == (PyObject*)NULL) - return (PyObject*)NULL; - /* list = range(NLIST) */ - for (i = 0; i < NLIST; ++i) { - PyObject* anint = PyLong_FromLong(i); - if (anint == (PyObject*)NULL) { - Py_DECREF(list); - return (PyObject*)NULL; - } - PyList_SET_ITEM(list, i, anint); - } - /* list.reverse(), via PyList_Reverse() */ - i = PyList_Reverse(list); /* should not blow up! */ - if (i != 0) { - Py_DECREF(list); - return (PyObject*)NULL; - } - /* Check that list == range(29, -1, -1) now */ - for (i = 0; i < NLIST; ++i) { - PyObject* anint = PyList_GET_ITEM(list, i); - if (PyLong_AS_LONG(anint) != NLIST-1-i) { - PyErr_SetString(get_testerror(self), - "test_list_api: reverse screwed up"); - Py_DECREF(list); - return (PyObject*)NULL; - } - } - Py_DECREF(list); -#undef NLIST - - Py_RETURN_NONE; -} - -static int -test_dict_inner(PyObject *self, int count) -{ - Py_ssize_t pos = 0, iterations = 0; - int i; - PyObject *dict = PyDict_New(); - PyObject *v, *k; - - if (dict == NULL) - return -1; - - for (i = 0; i < count; i++) { - v = PyLong_FromLong(i); - if (v == NULL) { - goto error; - } - if (PyDict_SetItem(dict, v, v) < 0) { - Py_DECREF(v); - goto error; - } - Py_DECREF(v); - } - - k = v = UNINITIALIZED_PTR; - while (PyDict_Next(dict, &pos, &k, &v)) { - PyObject *o; - iterations++; - - assert(k != UNINITIALIZED_PTR); - assert(v != UNINITIALIZED_PTR); - i = PyLong_AS_LONG(v) + 1; - o = PyLong_FromLong(i); - if (o == NULL) { - goto error; - } - if (PyDict_SetItem(dict, k, o) < 0) { - Py_DECREF(o); - goto error; - } - Py_DECREF(o); - k = v = UNINITIALIZED_PTR; - } - assert(k == UNINITIALIZED_PTR); - assert(v == UNINITIALIZED_PTR); - - Py_DECREF(dict); - - if (iterations != count) { - PyErr_SetString( - get_testerror(self), - "test_dict_iteration: dict iteration went wrong "); - return -1; - } else { - return 0; - } -error: - Py_DECREF(dict); - return -1; -} - - - -static PyObject* -test_dict_iteration(PyObject* self, PyObject *Py_UNUSED(ignored)) -{ - int i; - - for (i = 0; i < 200; i++) { - if (test_dict_inner(self, i) < 0) { - return NULL; - } - } - - Py_RETURN_NONE; -} - /* Issue #4701: Check that PyObject_Hash implicitly calls * PyType_Ready if it hasn't already been called */ @@ -530,136 +412,6 @@ test_buildvalue_N(PyObject *self, PyObject *Py_UNUSED(ignored)) } -static PyObject * -test_get_statictype_slots(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - newfunc tp_new = PyType_GetSlot(&PyLong_Type, Py_tp_new); - if (PyLong_Type.tp_new != tp_new) { - PyErr_SetString(PyExc_AssertionError, "mismatch: tp_new of long"); - return NULL; - } - - reprfunc tp_repr = PyType_GetSlot(&PyLong_Type, Py_tp_repr); - if (PyLong_Type.tp_repr != tp_repr) { - PyErr_SetString(PyExc_AssertionError, "mismatch: tp_repr of long"); - return NULL; - } - - ternaryfunc tp_call = PyType_GetSlot(&PyLong_Type, Py_tp_call); - if (tp_call != NULL) { - PyErr_SetString(PyExc_AssertionError, "mismatch: tp_call of long"); - return NULL; - } - - binaryfunc nb_add = PyType_GetSlot(&PyLong_Type, Py_nb_add); - if (PyLong_Type.tp_as_number->nb_add != nb_add) { - PyErr_SetString(PyExc_AssertionError, "mismatch: nb_add of long"); - return NULL; - } - - lenfunc mp_length = PyType_GetSlot(&PyLong_Type, Py_mp_length); - if (mp_length != NULL) { - PyErr_SetString(PyExc_AssertionError, "mismatch: mp_length of long"); - return NULL; - } - - void *over_value = PyType_GetSlot(&PyLong_Type, Py_bf_releasebuffer + 1); - if (over_value != NULL) { - PyErr_SetString(PyExc_AssertionError, "mismatch: max+1 of long"); - return NULL; - } - - tp_new = PyType_GetSlot(&PyLong_Type, 0); - if (tp_new != NULL) { - PyErr_SetString(PyExc_AssertionError, "mismatch: slot 0 of long"); - return NULL; - } - if (PyErr_ExceptionMatches(PyExc_SystemError)) { - // This is the right exception - PyErr_Clear(); - } - else { - return NULL; - } - - Py_RETURN_NONE; -} - - -static PyType_Slot HeapTypeNameType_slots[] = { - {0}, -}; - -static PyType_Spec HeapTypeNameType_Spec = { - .name = "_testcapi.HeapTypeNameType", - .basicsize = sizeof(PyObject), - .flags = Py_TPFLAGS_DEFAULT, - .slots = HeapTypeNameType_slots, -}; - -static PyObject * -get_heaptype_for_name(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - return PyType_FromSpec(&HeapTypeNameType_Spec); -} - - -static PyObject * -get_type_name(PyObject *self, PyObject *type) -{ - assert(PyType_Check(type)); - return PyType_GetName((PyTypeObject *)type); -} - - -static PyObject * -get_type_qualname(PyObject *self, PyObject *type) -{ - assert(PyType_Check(type)); - return PyType_GetQualName((PyTypeObject *)type); -} - - -static PyObject * -get_type_fullyqualname(PyObject *self, PyObject *type) -{ - assert(PyType_Check(type)); - return PyType_GetFullyQualifiedName((PyTypeObject *)type); -} - - -static PyObject * -get_type_module_name(PyObject *self, PyObject *type) -{ - assert(PyType_Check(type)); - return PyType_GetModuleName((PyTypeObject *)type); -} - - -static PyObject * -test_get_type_dict(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - /* Test for PyType_GetDict */ - - // Assert ints have a `to_bytes` method - PyObject *long_dict = PyType_GetDict(&PyLong_Type); - assert(long_dict); - assert(PyDict_GetItemString(long_dict, "to_bytes")); // borrowed ref - Py_DECREF(long_dict); - - // Make a new type, add an attribute to it and assert it's there - PyObject *HeapTypeNameType = PyType_FromSpec(&HeapTypeNameType_Spec); - assert(HeapTypeNameType); - assert(PyObject_SetAttrString( - HeapTypeNameType, "new_attr", Py_NewRef(Py_None)) >= 0); - PyObject *type_dict = PyType_GetDict((PyTypeObject*)HeapTypeNameType); - assert(type_dict); - assert(PyDict_GetItemString(type_dict, "new_attr")); // borrowed ref - Py_DECREF(HeapTypeNameType); - Py_DECREF(type_dict); - Py_RETURN_NONE; -} - static PyObject * pyobject_repr_from_null(PyObject *self, PyObject *Py_UNUSED(ignored)) { @@ -885,61 +637,6 @@ pending_threadfunc(PyObject *self, PyObject *arg, PyObject *kwargs) return PyLong_FromUnsignedLong((unsigned long)num_added); } -/* Test PyOS_string_to_double. */ -static PyObject * -test_string_to_double(PyObject *self, PyObject *Py_UNUSED(ignored)) { - double result; - const char *msg; - -#define CHECK_STRING(STR, expected) \ - do { \ - result = PyOS_string_to_double(STR, NULL, NULL); \ - if (result == -1.0 && PyErr_Occurred()) { \ - return NULL; \ - } \ - if (result != (double)expected) { \ - msg = "conversion of " STR " to float failed"; \ - goto fail; \ - } \ - } while (0) - -#define CHECK_INVALID(STR) \ - do { \ - result = PyOS_string_to_double(STR, NULL, NULL); \ - if (result == -1.0 && PyErr_Occurred()) { \ - if (PyErr_ExceptionMatches(PyExc_ValueError)) { \ - PyErr_Clear(); \ - } \ - else { \ - return NULL; \ - } \ - } \ - else { \ - msg = "conversion of " STR " didn't raise ValueError"; \ - goto fail; \ - } \ - } while (0) - - CHECK_STRING("0.1", 0.1); - CHECK_STRING("1.234", 1.234); - CHECK_STRING("-1.35", -1.35); - CHECK_STRING(".1e01", 1.0); - CHECK_STRING("2.e-2", 0.02); - - CHECK_INVALID(" 0.1"); - CHECK_INVALID("\t\n-3"); - CHECK_INVALID(".123 "); - CHECK_INVALID("3\n"); - CHECK_INVALID("123abc"); - - Py_RETURN_NONE; - fail: - return raiseTestError(self, "test_string_to_double", msg); -#undef CHECK_STRING -#undef CHECK_INVALID -} - - /* Coverage testing of capsule objects. */ static const char *capsule_name = "capsule name"; @@ -1360,15 +1057,10 @@ test_pep3118_obsolete_write_locks(PyObject* self, PyObject *Py_UNUSED(ignored)) if (ret != -1 || match == 0) goto error; - PyObject *mod_io = PyImport_ImportModule("_io"); - if (mod_io == NULL) { - return NULL; - } - /* bytesiobuf_getbuffer() */ - PyTypeObject *type = (PyTypeObject *)PyObject_GetAttrString( - mod_io, "_BytesIOBuffer"); - Py_DECREF(mod_io); + PyTypeObject *type = (PyTypeObject *)PyImport_ImportModuleAttrString( + "_io", + "_BytesIOBuffer"); if (type == NULL) { return NULL; } @@ -1521,48 +1213,6 @@ static PyMethodDef ml = { NULL }; -static PyObject * -_test_incref(PyObject *ob) -{ - return Py_NewRef(ob); -} - -static PyObject * -test_xincref_doesnt_leak(PyObject *ob, PyObject *Py_UNUSED(ignored)) -{ - PyObject *obj = PyLong_FromLong(0); - Py_XINCREF(_test_incref(obj)); - Py_DECREF(obj); - Py_DECREF(obj); - Py_DECREF(obj); - Py_RETURN_NONE; -} - -static PyObject * -test_incref_doesnt_leak(PyObject *ob, PyObject *Py_UNUSED(ignored)) -{ - PyObject *obj = PyLong_FromLong(0); - Py_INCREF(_test_incref(obj)); - Py_DECREF(obj); - Py_DECREF(obj); - Py_DECREF(obj); - Py_RETURN_NONE; -} - -static PyObject * -test_xdecref_doesnt_leak(PyObject *ob, PyObject *Py_UNUSED(ignored)) -{ - Py_XDECREF(PyLong_FromLong(0)); - Py_RETURN_NONE; -} - -static PyObject * -test_decref_doesnt_leak(PyObject *ob, PyObject *Py_UNUSED(ignored)) -{ - Py_DECREF(PyLong_FromLong(0)); - Py_RETURN_NONE; -} - static PyObject * test_structseq_newtype_doesnt_leak(PyObject *Py_UNUSED(self), PyObject *Py_UNUSED(args)) @@ -1609,16 +1259,6 @@ test_structseq_newtype_null_descr_doc(PyObject *Py_UNUSED(self), Py_RETURN_NONE; } -static PyObject * -test_incref_decref_API(PyObject *ob, PyObject *Py_UNUSED(ignored)) -{ - PyObject *obj = PyLong_FromLong(0); - Py_IncRef(obj); - Py_DecRef(obj); - Py_DecRef(obj); - Py_RETURN_NONE; -} - typedef struct { PyThread_type_lock start_event; PyThread_type_lock exit_event; @@ -2036,45 +1676,6 @@ bad_get(PyObject *module, PyObject *args) } -#ifdef Py_REF_DEBUG -static PyObject * -negative_refcount(PyObject *self, PyObject *Py_UNUSED(args)) -{ - PyObject *obj = PyUnicode_FromString("negative_refcount"); - if (obj == NULL) { - return NULL; - } - assert(Py_REFCNT(obj) == 1); - - Py_SET_REFCNT(obj, 0); - /* Py_DECREF() must call _Py_NegativeRefcount() and abort Python */ - Py_DECREF(obj); - - Py_RETURN_NONE; -} - -static PyObject * -decref_freed_object(PyObject *self, PyObject *Py_UNUSED(args)) -{ - PyObject *obj = PyUnicode_FromString("decref_freed_object"); - if (obj == NULL) { - return NULL; - } - assert(Py_REFCNT(obj) == 1); - - // Deallocate the memory - Py_DECREF(obj); - // obj is a now a dangling pointer - - // gh-109496: If Python is built in debug mode, Py_DECREF() must call - // _Py_NegativeRefcount() and abort Python. - Py_DECREF(obj); - - Py_RETURN_NONE; -} -#endif - - /* Functions for testing C calling conventions (METH_*) are named meth_*, * e.g. "meth_varargs" for METH_VARARGS. * @@ -2178,319 +1779,55 @@ pynumber_tobase(PyObject *module, PyObject *args) return PyNumber_ToBase(obj, base); } -static PyObject* -test_set_type_size(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - PyObject *obj = PyList_New(0); - if (obj == NULL) { - return NULL; +/* We only use 2 in test_capi/test_misc.py. */ +#define NUM_BASIC_STATIC_TYPES 2 +static PyTypeObject BasicStaticTypes[NUM_BASIC_STATIC_TYPES] = { +#define INIT_BASIC_STATIC_TYPE \ + { \ + PyVarObject_HEAD_INIT(NULL, 0) \ + .tp_name = "BasicStaticType", \ + .tp_basicsize = sizeof(PyObject), \ } + INIT_BASIC_STATIC_TYPE, + INIT_BASIC_STATIC_TYPE, +#undef INIT_BASIC_STATIC_TYPE +}; +static int num_basic_static_types_used = 0; - // Ensure that following tests don't modify the object, - // to ensure that Py_DECREF() will not crash. - assert(Py_TYPE(obj) == &PyList_Type); - assert(Py_SIZE(obj) == 0); - - // bpo-39573: Test Py_SET_TYPE() and Py_SET_SIZE() functions. - Py_SET_TYPE(obj, &PyList_Type); - Py_SET_SIZE(obj, 0); - - Py_DECREF(obj); - Py_RETURN_NONE; -} - - -// Test Py_CLEAR() macro -static PyObject* -test_py_clear(PyObject *self, PyObject *Py_UNUSED(ignored)) +static PyObject * +get_basic_static_type(PyObject *self, PyObject *args) { - // simple case with a variable - PyObject *obj = PyList_New(0); - if (obj == NULL) { + PyObject *base = NULL; + if (!PyArg_ParseTuple(args, "|O", &base)) { return NULL; } - Py_CLEAR(obj); - assert(obj == NULL); + assert(base == NULL || PyType_Check(base)); - // gh-98724: complex case, Py_CLEAR() argument has a side effect - PyObject* array[1]; - array[0] = PyList_New(0); - if (array[0] == NULL) { + if(num_basic_static_types_used >= NUM_BASIC_STATIC_TYPES) { + PyErr_SetString(PyExc_RuntimeError, "no more available basic static types"); return NULL; } + PyTypeObject *cls = &BasicStaticTypes[num_basic_static_types_used++]; - PyObject **p = array; - Py_CLEAR(*p++); - assert(array[0] == NULL); - assert(p == array + 1); - - Py_RETURN_NONE; + if (base != NULL) { + cls->tp_bases = PyTuple_Pack(1, base); + if (cls->tp_bases == NULL) { + return NULL; + } + cls->tp_base = (PyTypeObject *)Py_NewRef(base); + } + if (PyType_Ready(cls) < 0) { + Py_DECREF(cls->tp_bases); + Py_DECREF(cls->tp_base); + return NULL; + } + return (PyObject *)cls; } -// Test Py_SETREF() and Py_XSETREF() macros, similar to test_py_clear() -static PyObject* -test_py_setref(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - // Py_SETREF() simple case with a variable - PyObject *obj = PyList_New(0); - if (obj == NULL) { - return NULL; - } - Py_SETREF(obj, NULL); - assert(obj == NULL); - - // Py_XSETREF() simple case with a variable - PyObject *obj2 = PyList_New(0); - if (obj2 == NULL) { - return NULL; - } - Py_XSETREF(obj2, NULL); - assert(obj2 == NULL); - // test Py_XSETREF() when the argument is NULL - Py_XSETREF(obj2, NULL); - assert(obj2 == NULL); - - // gh-98724: complex case, Py_SETREF() argument has a side effect - PyObject* array[1]; - array[0] = PyList_New(0); - if (array[0] == NULL) { - return NULL; - } - - PyObject **p = array; - Py_SETREF(*p++, NULL); - assert(array[0] == NULL); - assert(p == array + 1); - - // gh-98724: complex case, Py_XSETREF() argument has a side effect - PyObject* array2[1]; - array2[0] = PyList_New(0); - if (array2[0] == NULL) { - return NULL; - } - - PyObject **p2 = array2; - Py_XSETREF(*p2++, NULL); - assert(array2[0] == NULL); - assert(p2 == array2 + 1); - - // test Py_XSETREF() when the argument is NULL - p2 = array2; - Py_XSETREF(*p2++, NULL); - assert(array2[0] == NULL); - assert(p2 == array2 + 1); - - Py_RETURN_NONE; -} - - -#define TEST_REFCOUNT() \ - do { \ - PyObject *obj = PyList_New(0); \ - if (obj == NULL) { \ - return NULL; \ - } \ - assert(Py_REFCNT(obj) == 1); \ - \ - /* test Py_NewRef() */ \ - PyObject *ref = Py_NewRef(obj); \ - assert(ref == obj); \ - assert(Py_REFCNT(obj) == 2); \ - Py_DECREF(ref); \ - \ - /* test Py_XNewRef() */ \ - PyObject *xref = Py_XNewRef(obj); \ - assert(xref == obj); \ - assert(Py_REFCNT(obj) == 2); \ - Py_DECREF(xref); \ - \ - assert(Py_XNewRef(NULL) == NULL); \ - \ - Py_DECREF(obj); \ - Py_RETURN_NONE; \ - } while (0) - - -// Test Py_NewRef() and Py_XNewRef() macros -static PyObject* -test_refcount_macros(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - TEST_REFCOUNT(); -} - -#undef Py_NewRef -#undef Py_XNewRef - -// Test Py_NewRef() and Py_XNewRef() functions, after undefining macros. -static PyObject* -test_refcount_funcs(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - TEST_REFCOUNT(); -} - - -// Test Py_Is() function -#define TEST_PY_IS() \ - do { \ - PyObject *o_none = Py_None; \ - PyObject *o_true = Py_True; \ - PyObject *o_false = Py_False; \ - PyObject *obj = PyList_New(0); \ - if (obj == NULL) { \ - return NULL; \ - } \ - \ - /* test Py_Is() */ \ - assert(Py_Is(obj, obj)); \ - assert(!Py_Is(obj, o_none)); \ - \ - /* test Py_None */ \ - assert(Py_Is(o_none, o_none)); \ - assert(!Py_Is(obj, o_none)); \ - \ - /* test Py_True */ \ - assert(Py_Is(o_true, o_true)); \ - assert(!Py_Is(o_false, o_true)); \ - assert(!Py_Is(obj, o_true)); \ - \ - /* test Py_False */ \ - assert(Py_Is(o_false, o_false)); \ - assert(!Py_Is(o_true, o_false)); \ - assert(!Py_Is(obj, o_false)); \ - \ - Py_DECREF(obj); \ - Py_RETURN_NONE; \ - } while (0) - -// Test Py_Is() macro -static PyObject* -test_py_is_macros(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - TEST_PY_IS(); -} - -#undef Py_Is - -// Test Py_Is() function, after undefining its macro. -static PyObject* -test_py_is_funcs(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - TEST_PY_IS(); -} - - -// type->tp_version_tag -static PyObject * -type_get_version(PyObject *self, PyObject *type) -{ - if (!PyType_Check(type)) { - PyErr_SetString(PyExc_TypeError, "argument must be a type"); - return NULL; - } - PyObject *res = PyLong_FromUnsignedLong( - ((PyTypeObject *)type)->tp_version_tag); - if (res == NULL) { - assert(PyErr_Occurred()); - return NULL; - } - return res; -} - -static PyObject * -type_modified(PyObject *self, PyObject *type) -{ - if (!PyType_Check(type)) { - PyErr_SetString(PyExc_TypeError, "argument must be a type"); - return NULL; - } - PyType_Modified((PyTypeObject *)type); - Py_RETURN_NONE; -} - - -static PyObject * -type_assign_version(PyObject *self, PyObject *type) -{ - if (!PyType_Check(type)) { - PyErr_SetString(PyExc_TypeError, "argument must be a type"); - return NULL; - } - int res = PyUnstable_Type_AssignVersionTag((PyTypeObject *)type); - return PyLong_FromLong(res); -} - - -static PyObject * -type_get_tp_bases(PyObject *self, PyObject *type) -{ - PyObject *bases = ((PyTypeObject *)type)->tp_bases; - if (bases == NULL) { - Py_RETURN_NONE; - } - return Py_NewRef(bases); -} - -static PyObject * -type_get_tp_mro(PyObject *self, PyObject *type) -{ - PyObject *mro = ((PyTypeObject *)type)->tp_mro; - if (mro == NULL) { - Py_RETURN_NONE; - } - return Py_NewRef(mro); -} - - -/* We only use 2 in test_capi/test_misc.py. */ -#define NUM_BASIC_STATIC_TYPES 2 -static PyTypeObject BasicStaticTypes[NUM_BASIC_STATIC_TYPES] = { -#define INIT_BASIC_STATIC_TYPE \ - { \ - PyVarObject_HEAD_INIT(NULL, 0) \ - .tp_name = "BasicStaticType", \ - .tp_basicsize = sizeof(PyObject), \ - } - INIT_BASIC_STATIC_TYPE, - INIT_BASIC_STATIC_TYPE, -#undef INIT_BASIC_STATIC_TYPE -}; -static int num_basic_static_types_used = 0; - -static PyObject * -get_basic_static_type(PyObject *self, PyObject *args) -{ - PyObject *base = NULL; - if (!PyArg_ParseTuple(args, "|O", &base)) { - return NULL; - } - assert(base == NULL || PyType_Check(base)); - - if(num_basic_static_types_used >= NUM_BASIC_STATIC_TYPES) { - PyErr_SetString(PyExc_RuntimeError, "no more available basic static types"); - return NULL; - } - PyTypeObject *cls = &BasicStaticTypes[num_basic_static_types_used++]; - - if (base != NULL) { - cls->tp_bases = PyTuple_Pack(1, base); - if (cls->tp_bases == NULL) { - return NULL; - } - cls->tp_base = (PyTypeObject *)Py_NewRef(base); - } - if (PyType_Ready(cls) < 0) { - Py_DECREF(cls->tp_bases); - Py_DECREF(cls->tp_base); - return NULL; - } - return (PyObject *)cls; -} - - -// Test PyThreadState C API -static PyObject * -test_tstate_capi(PyObject *self, PyObject *Py_UNUSED(args)) +// Test PyThreadState C API +static PyObject * +test_tstate_capi(PyObject *self, PyObject *Py_UNUSED(args)) { // PyThreadState_Get() PyThreadState *tstate = PyThreadState_Get(); @@ -2533,109 +1870,6 @@ test_tstate_capi(PyObject *self, PyObject *Py_UNUSED(args)) Py_RETURN_NONE; } -static PyObject * -frame_getlocals(PyObject *self, PyObject *frame) -{ - if (!PyFrame_Check(frame)) { - PyErr_SetString(PyExc_TypeError, "argument must be a frame"); - return NULL; - } - return PyFrame_GetLocals((PyFrameObject *)frame); -} - -static PyObject * -frame_getglobals(PyObject *self, PyObject *frame) -{ - if (!PyFrame_Check(frame)) { - PyErr_SetString(PyExc_TypeError, "argument must be a frame"); - return NULL; - } - return PyFrame_GetGlobals((PyFrameObject *)frame); -} - -static PyObject * -frame_getgenerator(PyObject *self, PyObject *frame) -{ - if (!PyFrame_Check(frame)) { - PyErr_SetString(PyExc_TypeError, "argument must be a frame"); - return NULL; - } - return PyFrame_GetGenerator((PyFrameObject *)frame); -} - -static PyObject * -frame_getbuiltins(PyObject *self, PyObject *frame) -{ - if (!PyFrame_Check(frame)) { - PyErr_SetString(PyExc_TypeError, "argument must be a frame"); - return NULL; - } - return PyFrame_GetBuiltins((PyFrameObject *)frame); -} - -static PyObject * -frame_getlasti(PyObject *self, PyObject *frame) -{ - if (!PyFrame_Check(frame)) { - PyErr_SetString(PyExc_TypeError, "argument must be a frame"); - return NULL; - } - int lasti = PyFrame_GetLasti((PyFrameObject *)frame); - if (lasti < 0) { - assert(lasti == -1); - Py_RETURN_NONE; - } - return PyLong_FromLong(lasti); -} - -static PyObject * -frame_new(PyObject *self, PyObject *args) -{ - PyObject *code, *globals, *locals; - if (!PyArg_ParseTuple(args, "OOO", &code, &globals, &locals)) { - return NULL; - } - if (!PyCode_Check(code)) { - PyErr_SetString(PyExc_TypeError, "argument must be a code object"); - return NULL; - } - PyThreadState *tstate = PyThreadState_Get(); - - return (PyObject *)PyFrame_New(tstate, (PyCodeObject *)code, globals, locals); -} - -static PyObject * -test_frame_getvar(PyObject *self, PyObject *args) -{ - PyObject *frame, *name; - if (!PyArg_ParseTuple(args, "OO", &frame, &name)) { - return NULL; - } - if (!PyFrame_Check(frame)) { - PyErr_SetString(PyExc_TypeError, "argument must be a frame"); - return NULL; - } - - return PyFrame_GetVar((PyFrameObject *)frame, name); -} - -static PyObject * -test_frame_getvarstring(PyObject *self, PyObject *args) -{ - PyObject *frame; - const char *name; - if (!PyArg_ParseTuple(args, "Oy", &frame, &name)) { - return NULL; - } - if (!PyFrame_Check(frame)) { - PyErr_SetString(PyExc_TypeError, "argument must be a frame"); - return NULL; - } - - return PyFrame_GetVarString((PyFrameObject *)frame, name); -} - - static PyObject * gen_get_code(PyObject *self, PyObject *gen) { @@ -2903,14 +2137,6 @@ settrace_to_error(PyObject *self, PyObject *list) Py_RETURN_NONE; } -static PyObject * -clear_managed_dict(PyObject *self, PyObject *obj) -{ - PyObject_ClearManagedDict(obj); - Py_RETURN_NONE; -} - - static PyObject * test_macros(PyObject *self, PyObject *Py_UNUSED(args)) { @@ -2947,119 +2173,6 @@ test_macros(PyObject *self, PyObject *Py_UNUSED(args)) Py_RETURN_NONE; } -static PyObject * -function_get_code(PyObject *self, PyObject *func) -{ - PyObject *code = PyFunction_GetCode(func); - if (code != NULL) { - return Py_NewRef(code); - } else { - return NULL; - } -} - -static PyObject * -function_get_globals(PyObject *self, PyObject *func) -{ - PyObject *globals = PyFunction_GetGlobals(func); - if (globals != NULL) { - return Py_NewRef(globals); - } else { - return NULL; - } -} - -static PyObject * -function_get_module(PyObject *self, PyObject *func) -{ - PyObject *module = PyFunction_GetModule(func); - if (module != NULL) { - return Py_NewRef(module); - } else { - return NULL; - } -} - -static PyObject * -function_get_defaults(PyObject *self, PyObject *func) -{ - PyObject *defaults = PyFunction_GetDefaults(func); - if (defaults != NULL) { - return Py_NewRef(defaults); - } else if (PyErr_Occurred()) { - return NULL; - } else { - Py_RETURN_NONE; // This can happen when `defaults` are set to `None` - } -} - -static PyObject * -function_set_defaults(PyObject *self, PyObject *args) -{ - PyObject *func = NULL, *defaults = NULL; - if (!PyArg_ParseTuple(args, "OO", &func, &defaults)) { - return NULL; - } - int result = PyFunction_SetDefaults(func, defaults); - if (result == -1) - return NULL; - Py_RETURN_NONE; -} - -static PyObject * -function_get_kw_defaults(PyObject *self, PyObject *func) -{ - PyObject *defaults = PyFunction_GetKwDefaults(func); - if (defaults != NULL) { - return Py_NewRef(defaults); - } else if (PyErr_Occurred()) { - return NULL; - } else { - Py_RETURN_NONE; // This can happen when `kwdefaults` are set to `None` - } -} - -static PyObject * -function_set_kw_defaults(PyObject *self, PyObject *args) -{ - PyObject *func = NULL, *defaults = NULL; - if (!PyArg_ParseTuple(args, "OO", &func, &defaults)) { - return NULL; - } - int result = PyFunction_SetKwDefaults(func, defaults); - if (result == -1) - return NULL; - Py_RETURN_NONE; -} - -static PyObject * -function_get_closure(PyObject *self, PyObject *func) -{ - PyObject *closure = PyFunction_GetClosure(func); - if (closure != NULL) { - return Py_NewRef(closure); - } else if (PyErr_Occurred()) { - return NULL; - } else { - Py_RETURN_NONE; // This can happen when `closure` is set to `None` - } -} - -static PyObject * -function_set_closure(PyObject *self, PyObject *args) -{ - PyObject *func = NULL, *closure = NULL; - if (!PyArg_ParseTuple(args, "OO", &func, &closure)) { - return NULL; - } - int result = PyFunction_SetClosure(func, closure); - if (result == -1) { - return NULL; - } - Py_RETURN_NONE; -} - - static PyObject * test_weakref_capi(PyObject *Py_UNUSED(module), PyObject *Py_UNUSED(args)) { @@ -3308,19 +2421,6 @@ finalize_thread_hang(PyObject *self, PyObject *callback) } -static PyObject * -type_freeze(PyObject *module, PyObject *args) -{ - PyTypeObject *type; - if (!PyArg_ParseTuple(args, "O!", &PyType_Type, &type)) { - return NULL; - } - if (PyType_Freeze(type) < 0) { - return NULL; - } - Py_RETURN_NONE; -} - struct atexit_data { int called; PyThreadState *tstate; @@ -3390,124 +2490,18 @@ code_offset_to_line(PyObject* self, PyObject* const* args, Py_ssize_t nargsf) } -static void -tracemalloc_track_race_thread(void *data) -{ - PyTraceMalloc_Track(123, 10, 1); - PyTraceMalloc_Untrack(123, 10); - - PyThread_type_lock lock = (PyThread_type_lock)data; - PyThread_release_lock(lock); -} - -// gh-128679: Test fix for tracemalloc.stop() race condition -static PyObject * -tracemalloc_track_race(PyObject *self, PyObject *args) -{ -#define NTHREAD 50 - PyObject *tracemalloc = NULL; - PyObject *stop = NULL; - PyThread_type_lock locks[NTHREAD]; - memset(locks, 0, sizeof(locks)); - - // Call tracemalloc.start() - tracemalloc = PyImport_ImportModule("tracemalloc"); - if (tracemalloc == NULL) { - goto error; - } - PyObject *start = PyObject_GetAttrString(tracemalloc, "start"); - if (start == NULL) { - goto error; - } - PyObject *res = PyObject_CallNoArgs(start); - Py_DECREF(start); - if (res == NULL) { - goto error; - } - Py_DECREF(res); - - stop = PyObject_GetAttrString(tracemalloc, "stop"); - Py_CLEAR(tracemalloc); - if (stop == NULL) { - goto error; - } - - // Start threads - for (size_t i = 0; i < NTHREAD; i++) { - PyThread_type_lock lock = PyThread_allocate_lock(); - if (!lock) { - PyErr_NoMemory(); - goto error; - } - locks[i] = lock; - PyThread_acquire_lock(lock, 1); - - unsigned long thread; - thread = PyThread_start_new_thread(tracemalloc_track_race_thread, - (void*)lock); - if (thread == (unsigned long)-1) { - PyErr_SetString(PyExc_RuntimeError, "can't start new thread"); - goto error; - } - } - - // Call tracemalloc.stop() while threads are running - res = PyObject_CallNoArgs(stop); - Py_CLEAR(stop); - if (res == NULL) { - goto error; - } - Py_DECREF(res); - - // Wait until threads complete with the GIL released - Py_BEGIN_ALLOW_THREADS - for (size_t i = 0; i < NTHREAD; i++) { - PyThread_type_lock lock = locks[i]; - PyThread_acquire_lock(lock, 1); - PyThread_release_lock(lock); - } - Py_END_ALLOW_THREADS - - // Free threads locks - for (size_t i=0; i < NTHREAD; i++) { - PyThread_type_lock lock = locks[i]; - PyThread_free_lock(lock); - } - Py_RETURN_NONE; - -error: - Py_CLEAR(tracemalloc); - Py_CLEAR(stop); - for (size_t i=0; i < NTHREAD; i++) { - PyThread_type_lock lock = locks[i]; - if (lock) { - PyThread_free_lock(lock); - } - } - return NULL; -#undef NTHREAD -} - static PyMethodDef TestMethods[] = { {"set_errno", set_errno, METH_VARARGS}, {"test_config", test_config, METH_NOARGS}, {"test_sizeof_c_types", test_sizeof_c_types, METH_NOARGS}, - {"test_list_api", test_list_api, METH_NOARGS}, - {"test_dict_iteration", test_dict_iteration, METH_NOARGS}, {"test_lazy_hash_inheritance", test_lazy_hash_inheritance,METH_NOARGS}, - {"test_xincref_doesnt_leak",test_xincref_doesnt_leak, METH_NOARGS}, - {"test_incref_doesnt_leak", test_incref_doesnt_leak, METH_NOARGS}, - {"test_xdecref_doesnt_leak",test_xdecref_doesnt_leak, METH_NOARGS}, - {"test_decref_doesnt_leak", test_decref_doesnt_leak, METH_NOARGS}, {"test_structseq_newtype_doesnt_leak", test_structseq_newtype_doesnt_leak, METH_NOARGS}, {"test_structseq_newtype_null_descr_doc", test_structseq_newtype_null_descr_doc, METH_NOARGS}, - {"test_incref_decref_API", test_incref_decref_API, METH_NOARGS}, {"pyobject_repr_from_null", pyobject_repr_from_null, METH_NOARGS}, {"pyobject_str_from_null", pyobject_str_from_null, METH_NOARGS}, {"pyobject_bytes_from_null", pyobject_bytes_from_null, METH_NOARGS}, - {"test_string_to_double", test_string_to_double, METH_NOARGS}, {"test_capsule", (PyCFunction)test_capsule, METH_NOARGS}, {"test_from_contiguous", (PyCFunction)test_from_contiguous, METH_NOARGS}, #if (defined(__linux__) || defined(__FreeBSD__)) && defined(__GNUC__) @@ -3518,13 +2512,6 @@ static PyMethodDef TestMethods[] = { {"py_buildvalue", py_buildvalue, METH_VARARGS}, {"py_buildvalue_ints", py_buildvalue_ints, METH_VARARGS}, {"test_buildvalue_N", test_buildvalue_N, METH_NOARGS}, - {"test_get_statictype_slots", test_get_statictype_slots, METH_NOARGS}, - {"get_heaptype_for_name", get_heaptype_for_name, METH_NOARGS}, - {"get_type_name", get_type_name, METH_O}, - {"get_type_qualname", get_type_qualname, METH_O}, - {"get_type_fullyqualname", get_type_fullyqualname, METH_O}, - {"get_type_module_name", get_type_module_name, METH_O}, - {"test_get_type_dict", test_get_type_dict, METH_NOARGS}, {"test_reftracer", test_reftracer, METH_NOARGS}, {"_test_thread_state", test_thread_state, METH_VARARGS}, {"gilstate_ensure_release", gilstate_ensure_release, METH_NOARGS}, @@ -3573,10 +2560,6 @@ static PyMethodDef TestMethods[] = { #endif {"test_pythread_tss_key_state", test_pythread_tss_key_state, METH_VARARGS}, {"bad_get", bad_get, METH_VARARGS}, -#ifdef Py_REF_DEBUG - {"negative_refcount", negative_refcount, METH_NOARGS}, - {"decref_freed_object", decref_freed_object, METH_NOARGS}, -#endif {"meth_varargs", meth_varargs, METH_VARARGS}, {"meth_varargs_keywords", _PyCFunction_CAST(meth_varargs_keywords), METH_VARARGS|METH_KEYWORDS}, {"meth_o", meth_o, METH_O}, @@ -3585,52 +2568,20 @@ static PyMethodDef TestMethods[] = { {"meth_fastcall_keywords", _PyCFunction_CAST(meth_fastcall_keywords), METH_FASTCALL|METH_KEYWORDS}, {"pycfunction_call", test_pycfunction_call, METH_VARARGS}, {"pynumber_tobase", pynumber_tobase, METH_VARARGS}, - {"test_set_type_size", test_set_type_size, METH_NOARGS}, - {"test_py_clear", test_py_clear, METH_NOARGS}, - {"test_py_setref", test_py_setref, METH_NOARGS}, - {"test_refcount_macros", test_refcount_macros, METH_NOARGS}, - {"test_refcount_funcs", test_refcount_funcs, METH_NOARGS}, - {"test_py_is_macros", test_py_is_macros, METH_NOARGS}, - {"test_py_is_funcs", test_py_is_funcs, METH_NOARGS}, - {"type_get_version", type_get_version, METH_O, PyDoc_STR("type->tp_version_tag")}, - {"type_modified", type_modified, METH_O, PyDoc_STR("PyType_Modified")}, - {"type_assign_version", type_assign_version, METH_O, PyDoc_STR("PyUnstable_Type_AssignVersionTag")}, - {"type_get_tp_bases", type_get_tp_bases, METH_O}, - {"type_get_tp_mro", type_get_tp_mro, METH_O}, {"get_basic_static_type", get_basic_static_type, METH_VARARGS, NULL}, {"test_tstate_capi", test_tstate_capi, METH_NOARGS, NULL}, - {"frame_getlocals", frame_getlocals, METH_O, NULL}, - {"frame_getglobals", frame_getglobals, METH_O, NULL}, - {"frame_getgenerator", frame_getgenerator, METH_O, NULL}, - {"frame_getbuiltins", frame_getbuiltins, METH_O, NULL}, - {"frame_getlasti", frame_getlasti, METH_O, NULL}, - {"frame_new", frame_new, METH_VARARGS, NULL}, - {"frame_getvar", test_frame_getvar, METH_VARARGS, NULL}, - {"frame_getvarstring", test_frame_getvarstring, METH_VARARGS, NULL}, {"gen_get_code", gen_get_code, METH_O, NULL}, {"get_feature_macros", get_feature_macros, METH_NOARGS, NULL}, {"test_code_api", test_code_api, METH_NOARGS, NULL}, {"settrace_to_error", settrace_to_error, METH_O, NULL}, {"settrace_to_record", settrace_to_record, METH_O, NULL}, {"test_macros", test_macros, METH_NOARGS, NULL}, - {"clear_managed_dict", clear_managed_dict, METH_O, NULL}, - {"function_get_code", function_get_code, METH_O, NULL}, - {"function_get_globals", function_get_globals, METH_O, NULL}, - {"function_get_module", function_get_module, METH_O, NULL}, - {"function_get_defaults", function_get_defaults, METH_O, NULL}, - {"function_set_defaults", function_set_defaults, METH_VARARGS, NULL}, - {"function_get_kw_defaults", function_get_kw_defaults, METH_O, NULL}, - {"function_set_kw_defaults", function_set_kw_defaults, METH_VARARGS, NULL}, - {"function_get_closure", function_get_closure, METH_O, NULL}, - {"function_set_closure", function_set_closure, METH_VARARGS, NULL}, {"test_weakref_capi", test_weakref_capi, METH_NOARGS}, {"function_set_warning", function_set_warning, METH_NOARGS}, {"test_critical_sections", test_critical_sections, METH_NOARGS}, {"finalize_thread_hang", finalize_thread_hang, METH_O, NULL}, - {"type_freeze", type_freeze, METH_VARARGS}, {"test_atexit", test_atexit, METH_NOARGS}, {"code_offset_to_line", _PyCFunction_CAST(code_offset_to_line), METH_FASTCALL}, - {"tracemalloc_track_race", tracemalloc_track_race, METH_NOARGS}, {NULL, NULL} /* sentinel */ }; @@ -4401,6 +3352,18 @@ PyInit__testcapi(void) if (_PyTestCapi_Init_Config(m) < 0) { return NULL; } + if (_PyTestCapi_Init_Import(m) < 0) { + return NULL; + } + if (_PyTestCapi_Init_Frame(m) < 0) { + return NULL; + } + if (_PyTestCapi_Init_Type(m) < 0) { + return NULL; + } + if (_PyTestCapi_Init_Function(m) < 0) { + return NULL; + } PyState_AddModule(m, &_testcapimodule); return m; diff --git a/Modules/_testexternalinspection.c b/Modules/_testexternalinspection.c index 5a28c0e266226b..22074c81b7405f 100644 --- a/Modules/_testexternalinspection.c +++ b/Modules/_testexternalinspection.c @@ -232,15 +232,15 @@ search_map_for_section(pid_t pid, const char* secname, const char* substr) { &count, &object_name) == KERN_SUCCESS) { - int path_len = proc_regionfilename( - pid, address, map_filename, MAXPATHLEN); - if (path_len == 0) { + if ((region_info.protection & VM_PROT_READ) == 0 + || (region_info.protection & VM_PROT_EXECUTE) == 0) { address += size; continue; } - if ((region_info.protection & VM_PROT_READ) == 0 - || (region_info.protection & VM_PROT_EXECUTE) == 0) { + int path_len = proc_regionfilename( + pid, address, map_filename, MAXPATHLEN); + if (path_len == 0) { address += size; continue; } diff --git a/Modules/_testinternalcapi.c b/Modules/_testinternalcapi.c index f3d234a7f9595e..e44b629897c58a 100644 --- a/Modules/_testinternalcapi.c +++ b/Modules/_testinternalcapi.c @@ -950,38 +950,13 @@ get_co_framesize(PyObject *self, PyObject *arg) return PyLong_FromLong(code->co_framesize); } -#ifdef _Py_TIER2 - static PyObject * -new_uop_optimizer(PyObject *self, PyObject *arg) +jit_enabled(PyObject *self, PyObject *arg) { - return _PyOptimizer_NewUOpOptimizer(); + return PyBool_FromLong(_PyInterpreterState_GET()->jit); } -static PyObject * -set_optimizer(PyObject *self, PyObject *opt) -{ - if (opt == Py_None) { - opt = NULL; - } - if (_Py_SetTier2Optimizer((_PyOptimizerObject*)opt) < 0) { - return NULL; - } - Py_RETURN_NONE; -} - -static PyObject * -get_optimizer(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - PyObject *opt = NULL; #ifdef _Py_TIER2 - opt = (PyObject *)_Py_GetOptimizer(); -#endif - if (opt == NULL) { - Py_RETURN_NONE; - } - return opt; -} static PyObject * add_executor_dependency(PyObject *self, PyObject *args) @@ -2047,10 +2022,8 @@ static PyMethodDef module_functions[] = { {"iframe_getline", iframe_getline, METH_O, NULL}, {"iframe_getlasti", iframe_getlasti, METH_O, NULL}, {"get_co_framesize", get_co_framesize, METH_O, NULL}, + {"jit_enabled", jit_enabled, METH_NOARGS, NULL}, #ifdef _Py_TIER2 - {"get_optimizer", get_optimizer, METH_NOARGS, NULL}, - {"set_optimizer", set_optimizer, METH_O, NULL}, - {"new_uop_optimizer", new_uop_optimizer, METH_NOARGS, NULL}, {"add_executor_dependency", add_executor_dependency, METH_VARARGS, NULL}, {"invalidate_executors", invalidate_executors, METH_O, NULL}, #endif @@ -2165,6 +2138,21 @@ module_exec(PyObject *module) return 1; } + if (PyModule_Add(module, "SPECIALIZATION_THRESHOLD", + PyLong_FromLong(ADAPTIVE_WARMUP_VALUE + 1)) < 0) { + return 1; + } + + if (PyModule_Add(module, "SPECIALIZATION_COOLDOWN", + PyLong_FromLong(ADAPTIVE_COOLDOWN_VALUE + 1)) < 0) { + return 1; + } + + if (PyModule_Add(module, "SHARED_KEYS_MAX_SIZE", + PyLong_FromLong(SHARED_KEYS_MAX_SIZE)) < 0) { + return 1; + } + return 0; } diff --git a/Modules/_testlimitedcapi.c b/Modules/_testlimitedcapi.c index 82dac1c999470f..4dae99ec92a085 100644 --- a/Modules/_testlimitedcapi.c +++ b/Modules/_testlimitedcapi.c @@ -89,5 +89,8 @@ PyInit__testlimitedcapi(void) if (_PyTestLimitedCAPI_Init_Version(mod) < 0) { return NULL; } + if (_PyTestLimitedCAPI_Init_File(mod) < 0) { + return NULL; + } return mod; } diff --git a/Modules/_testlimitedcapi/clinic/file.c.h b/Modules/_testlimitedcapi/clinic/file.c.h new file mode 100644 index 00000000000000..663619eead2a3a --- /dev/null +++ b/Modules/_testlimitedcapi/clinic/file.c.h @@ -0,0 +1,81 @@ +/*[clinic input] +preserve +[clinic start generated code]*/ + +PyDoc_STRVAR(_testcapi_pyfile_getline__doc__, +"pyfile_getline($module, file, n, /)\n" +"--\n" +"\n"); + +#define _TESTCAPI_PYFILE_GETLINE_METHODDEF \ + {"pyfile_getline", (PyCFunction)(void(*)(void))_testcapi_pyfile_getline, METH_FASTCALL, _testcapi_pyfile_getline__doc__}, + +static PyObject * +_testcapi_pyfile_getline_impl(PyObject *module, PyObject *file, int n); + +static PyObject * +_testcapi_pyfile_getline(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + PyObject *file; + int n; + + if (nargs != 2) { + PyErr_Format(PyExc_TypeError, "pyfile_getline expected 2 arguments, got %zd", nargs); + goto exit; + } + file = args[0]; + n = PyLong_AsInt(args[1]); + if (n == -1 && PyErr_Occurred()) { + goto exit; + } + return_value = _testcapi_pyfile_getline_impl(module, file, n); + +exit: + return return_value; +} + +PyDoc_STRVAR(_testcapi_pyfile_writeobject__doc__, +"pyfile_writeobject($module, obj, file, flags, /)\n" +"--\n" +"\n"); + +#define _TESTCAPI_PYFILE_WRITEOBJECT_METHODDEF \ + {"pyfile_writeobject", (PyCFunction)(void(*)(void))_testcapi_pyfile_writeobject, METH_FASTCALL, _testcapi_pyfile_writeobject__doc__}, + +static PyObject * +_testcapi_pyfile_writeobject_impl(PyObject *module, PyObject *obj, + PyObject *file, int flags); + +static PyObject * +_testcapi_pyfile_writeobject(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + PyObject *obj; + PyObject *file; + int flags; + + if (nargs != 3) { + PyErr_Format(PyExc_TypeError, "pyfile_writeobject expected 3 arguments, got %zd", nargs); + goto exit; + } + obj = args[0]; + file = args[1]; + flags = PyLong_AsInt(args[2]); + if (flags == -1 && PyErr_Occurred()) { + goto exit; + } + return_value = _testcapi_pyfile_writeobject_impl(module, obj, file, flags); + +exit: + return return_value; +} + +PyDoc_STRVAR(_testcapi_pyobject_asfiledescriptor__doc__, +"pyobject_asfiledescriptor($module, obj, /)\n" +"--\n" +"\n"); + +#define _TESTCAPI_PYOBJECT_ASFILEDESCRIPTOR_METHODDEF \ + {"pyobject_asfiledescriptor", (PyCFunction)_testcapi_pyobject_asfiledescriptor, METH_O, _testcapi_pyobject_asfiledescriptor__doc__}, +/*[clinic end generated code: output=ea572aaaa01aec7b input=a9049054013a1b77]*/ diff --git a/Modules/_testlimitedcapi/file.c b/Modules/_testlimitedcapi/file.c new file mode 100644 index 00000000000000..e082e3c6700ee7 --- /dev/null +++ b/Modules/_testlimitedcapi/file.c @@ -0,0 +1,128 @@ +#include "pyconfig.h" // Py_GIL_DISABLED +#ifndef Py_GIL_DISABLED + // Need limited C API 3.13 for PyLong_AsInt() +# define Py_LIMITED_API 0x030d0000 +#endif + +#include "parts.h" +#include "util.h" +#include "clinic/file.c.h" + + +/*[clinic input] +module _testcapi +[clinic start generated code]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=6361033e795369fc]*/ + + +static PyObject * +pyfile_fromfd(PyObject *module, PyObject *args) +{ + int fd; + const char *name; + Py_ssize_t size; + const char *mode; + int buffering; + const char *encoding; + const char *errors; + const char *newline; + int closefd; + if (!PyArg_ParseTuple(args, + "iz#z#" + "iz#z#" + "z#i", + &fd, &name, &size, &mode, &size, + &buffering, &encoding, &size, &errors, &size, + &newline, &size, &closefd)) { + return NULL; + } + + return PyFile_FromFd(fd, name, mode, buffering, + encoding, errors, newline, closefd); +} + + +/*[clinic input] +_testcapi.pyfile_getline + + file: object + n: int + / + +[clinic start generated code]*/ + +static PyObject * +_testcapi_pyfile_getline_impl(PyObject *module, PyObject *file, int n) +/*[clinic end generated code: output=137fde2774563266 input=df26686148b3657e]*/ +{ + return PyFile_GetLine(file, n); +} + + +/*[clinic input] +_testcapi.pyfile_writeobject + + obj: object + file: object + flags: int + / + +[clinic start generated code]*/ + +static PyObject * +_testcapi_pyfile_writeobject_impl(PyObject *module, PyObject *obj, + PyObject *file, int flags) +/*[clinic end generated code: output=ebb4d802e3db489c input=64a34a3e75b9935a]*/ +{ + NULLABLE(obj); + NULLABLE(file); + RETURN_INT(PyFile_WriteObject(obj, file, flags)); +} + + +static PyObject * +pyfile_writestring(PyObject *module, PyObject *args) +{ + const char *str; + Py_ssize_t size; + PyObject *file; + if (!PyArg_ParseTuple(args, "z#O", &str, &size, &file)) { + return NULL; + } + NULLABLE(file); + + RETURN_INT(PyFile_WriteString(str, file)); +} + + +/*[clinic input] +_testcapi.pyobject_asfiledescriptor + + obj: object + / + +[clinic start generated code]*/ + +static PyObject * +_testcapi_pyobject_asfiledescriptor(PyObject *module, PyObject *obj) +/*[clinic end generated code: output=2d640c6a1970c721 input=45fa1171d62b18d7]*/ +{ + NULLABLE(obj); + RETURN_INT(PyObject_AsFileDescriptor(obj)); +} + + +static PyMethodDef test_methods[] = { + {"pyfile_fromfd", pyfile_fromfd, METH_VARARGS}, + _TESTCAPI_PYFILE_GETLINE_METHODDEF + _TESTCAPI_PYFILE_WRITEOBJECT_METHODDEF + {"pyfile_writestring", pyfile_writestring, METH_VARARGS}, + _TESTCAPI_PYOBJECT_ASFILEDESCRIPTOR_METHODDEF + {NULL}, +}; + +int +_PyTestLimitedCAPI_Init_File(PyObject *m) +{ + return PyModule_AddFunctions(m, test_methods); +} diff --git a/Modules/_testlimitedcapi/parts.h b/Modules/_testlimitedcapi/parts.h index 9efcd8dcb71e5b..60f6f03011a65c 100644 --- a/Modules/_testlimitedcapi/parts.h +++ b/Modules/_testlimitedcapi/parts.h @@ -42,5 +42,6 @@ int _PyTestLimitedCAPI_Init_Tuple(PyObject *module); int _PyTestLimitedCAPI_Init_Unicode(PyObject *module); int _PyTestLimitedCAPI_Init_VectorcallLimited(PyObject *module); int _PyTestLimitedCAPI_Init_Version(PyObject *module); +int _PyTestLimitedCAPI_Init_File(PyObject *module); #endif // Py_TESTLIMITEDCAPI_PARTS_H diff --git a/Modules/_threadmodule.c b/Modules/_threadmodule.c index 57b122aef20ddd..70cd1bb287ee85 100644 --- a/Modules/_threadmodule.c +++ b/Modules/_threadmodule.c @@ -1538,17 +1538,20 @@ create_localsdict(localobject *self, thread_module_state *state, goto err; } - if (PyDict_SetItem(self->localdicts, tstate->threading_local_key, ldict) < - 0) { + if (PyDict_SetItem(self->localdicts, tstate->threading_local_key, + ldict) < 0) + { goto err; } wr = create_sentinel_wr(self); if (wr == NULL) { PyObject *exc = PyErr_GetRaisedException(); - if (PyDict_DelItem(self->localdicts, tstate->threading_local_key) < - 0) { - PyErr_WriteUnraisable((PyObject *)self); + if (PyDict_DelItem(self->localdicts, + tstate->threading_local_key) < 0) + { + PyErr_FormatUnraisable("Exception ignored while deleting " + "thread local of %R", self); } PyErr_SetRaisedException(exc); goto err; @@ -1556,9 +1559,11 @@ create_localsdict(localobject *self, thread_module_state *state, if (PySet_Add(self->thread_watchdogs, wr) < 0) { PyObject *exc = PyErr_GetRaisedException(); - if (PyDict_DelItem(self->localdicts, tstate->threading_local_key) < - 0) { - PyErr_WriteUnraisable((PyObject *)self); + if (PyDict_DelItem(self->localdicts, + tstate->threading_local_key) < 0) + { + PyErr_FormatUnraisable("Exception ignored while deleting " + "thread local of %R", self); } PyErr_SetRaisedException(exc); goto err; @@ -1608,13 +1613,16 @@ _ldict(localobject *self, thread_module_state *state) we create a new one the next time we do an attr access */ PyObject *exc = PyErr_GetRaisedException(); - if (PyDict_DelItem(self->localdicts, tstate->threading_local_key) < - 0) { - PyErr_WriteUnraisable((PyObject *)self); - PyErr_Clear(); + if (PyDict_DelItem(self->localdicts, + tstate->threading_local_key) < 0) + { + PyErr_FormatUnraisable("Exception ignored while deleting " + "thread local of %R", self); + assert(!PyErr_Occurred()); } if (PySet_Discard(self->thread_watchdogs, wr) < 0) { - PyErr_WriteUnraisable((PyObject *)self); + PyErr_FormatUnraisable("Exception ignored while discarding " + "thread watchdog of %R", self); } PyErr_SetRaisedException(exc); Py_DECREF(ldict); @@ -1745,12 +1753,14 @@ clear_locals(PyObject *locals_and_key, PyObject *dummyweakref) if (self->localdicts != NULL) { PyObject *key = PyTuple_GetItem(locals_and_key, 1); if (PyDict_Pop(self->localdicts, key, NULL) < 0) { - PyErr_WriteUnraisable((PyObject*)self); + PyErr_FormatUnraisable("Exception ignored while clearing " + "thread local %R", (PyObject *)self); } } if (self->thread_watchdogs != NULL) { if (PySet_Discard(self->thread_watchdogs, dummyweakref) < 0) { - PyErr_WriteUnraisable((PyObject *)self); + PyErr_FormatUnraisable("Exception ignored while clearing " + "thread local %R", (PyObject *)self); } } @@ -2313,7 +2323,8 @@ thread_shutdown(PyObject *self, PyObject *args) // Wait for the thread to finish. If we're interrupted, such // as by a ctrl-c we print the error and exit early. if (ThreadHandle_join(handle, -1) < 0) { - PyErr_WriteUnraisable(NULL); + PyErr_FormatUnraisable("Exception ignored while joining a thread " + "in _thread._shutdown()"); ThreadHandle_decref(handle); Py_RETURN_NONE; } diff --git a/Modules/_winapi.c b/Modules/_winapi.c index 260cab48091c16..786a828f00908c 100644 --- a/Modules/_winapi.c +++ b/Modules/_winapi.c @@ -171,17 +171,16 @@ overlapped_dealloc(OverlappedObject *self) { /* The operation is no longer pending -- nothing to do. */ } - else if (_Py_IsInterpreterFinalizing(_PyInterpreterState_GET())) - { + else if (_Py_IsInterpreterFinalizing(_PyInterpreterState_GET())) { /* The operation is still pending -- give a warning. This will probably only happen on Windows XP. */ PyErr_SetString(PyExc_PythonFinalizationError, "I/O operations still in flight while destroying " "Overlapped object, the process may crash"); - PyErr_WriteUnraisable(NULL); + PyErr_FormatUnraisable("Exception ignored while deallocating " + "overlapped operation %R", self); } - else - { + else { /* The operation is still pending, but the process is probably about to exit, so we need not worry too much about memory leaks. Leaking self prevents a potential diff --git a/Modules/_zoneinfo.c b/Modules/_zoneinfo.c index c5292575c22f23..1fcea9ce8b1261 100644 --- a/Modules/_zoneinfo.c +++ b/Modules/_zoneinfo.c @@ -782,7 +782,7 @@ zoneinfo_reduce(PyObject *obj_self, PyObject *unused) if (self->source == SOURCE_FILE) { // Objects constructed from files cannot be pickled. PyObject *pickle_error = - _PyImport_GetModuleAttrString("pickle", "PicklingError"); + PyImport_ImportModuleAttrString("pickle", "PicklingError"); if (pickle_error == NULL) { return NULL; } @@ -2554,7 +2554,7 @@ static PyObject * new_weak_cache(void) { PyObject *WeakValueDictionary = - _PyImport_GetModuleAttrString("weakref", "WeakValueDictionary"); + PyImport_ImportModuleAttrString("weakref", "WeakValueDictionary"); if (WeakValueDictionary == NULL) { return NULL; } @@ -2732,12 +2732,12 @@ zoneinfomodule_exec(PyObject *m) /* Populate imports */ state->_tzpath_find_tzfile = - _PyImport_GetModuleAttrString("zoneinfo._tzpath", "find_tzfile"); + PyImport_ImportModuleAttrString("zoneinfo._tzpath", "find_tzfile"); if (state->_tzpath_find_tzfile == NULL) { goto error; } - state->io_open = _PyImport_GetModuleAttrString("io", "open"); + state->io_open = PyImport_ImportModuleAttrString("io", "open"); if (state->io_open == NULL) { goto error; } diff --git a/Modules/arraymodule.c b/Modules/arraymodule.c index 28c6a0ae05c598..dc1729a7a3a558 100644 --- a/Modules/arraymodule.c +++ b/Modules/arraymodule.c @@ -2285,7 +2285,7 @@ array_array___reduce_ex___impl(arrayobject *self, PyTypeObject *cls, assert(state != NULL); if (state->array_reconstructor == NULL) { - state->array_reconstructor = _PyImport_GetModuleAttrString( + state->array_reconstructor = PyImport_ImportModuleAttrString( "array", "_array_reconstructor"); if (state->array_reconstructor == NULL) { return NULL; @@ -3206,7 +3206,7 @@ array_modexec(PyObject *m) return -1; } - PyObject *mutablesequence = _PyImport_GetModuleAttrString( + PyObject *mutablesequence = PyImport_ImportModuleAttrString( "collections.abc", "MutableSequence"); if (!mutablesequence) { Py_DECREF((PyObject *)state->ArrayType); diff --git a/Modules/atexitmodule.c b/Modules/atexitmodule.c index 1b89b32ba907d7..2bfdda53af8cb2 100644 --- a/Modules/atexitmodule.c +++ b/Modules/atexitmodule.c @@ -110,7 +110,8 @@ atexit_callfuncs(struct atexit_state *state) PyObject *copy = PyList_GetSlice(state->callbacks, 0, PyList_GET_SIZE(state->callbacks)); if (copy == NULL) { - PyErr_WriteUnraisable(NULL); + PyErr_FormatUnraisable("Exception ignored while " + "copying atexit callbacks"); return; } diff --git a/Modules/cjkcodecs/cjkcodecs.h b/Modules/cjkcodecs/cjkcodecs.h index 2b446ba5226ac0..737a7a042753a9 100644 --- a/Modules/cjkcodecs/cjkcodecs.h +++ b/Modules/cjkcodecs/cjkcodecs.h @@ -13,7 +13,6 @@ #include "Python.h" #include "multibytecodec.h" -#include "pycore_import.h" // _PyImport_GetModuleAttrString() /* a unicode "undefined" code point */ @@ -299,7 +298,7 @@ add_codecs(cjkcodecs_module_state *st) \ static PyObject * getmultibytecodec(void) { - return _PyImport_GetModuleAttrString("_multibytecodec", "__create_codec"); + return PyImport_ImportModuleAttrString("_multibytecodec", "__create_codec"); } static void diff --git a/Modules/faulthandler.c b/Modules/faulthandler.c index c522b00884bb5e..a5d761a1b3858c 100644 --- a/Modules/faulthandler.c +++ b/Modules/faulthandler.c @@ -1360,7 +1360,7 @@ PyInit_faulthandler(void) static int faulthandler_init_enable(void) { - PyObject *enable = _PyImport_GetModuleAttrString("faulthandler", "enable"); + PyObject *enable = PyImport_ImportModuleAttrString("faulthandler", "enable"); if (enable == NULL) { return -1; } diff --git a/Modules/getpath.c b/Modules/getpath.c index 2d3c9757298d16..e2478da021f511 100644 --- a/Modules/getpath.c +++ b/Modules/getpath.c @@ -955,7 +955,7 @@ _PyConfig_InitPathConfig(PyConfig *config, int compute_path_config) ) { Py_DECREF(co); Py_DECREF(dict); - PyErr_FormatUnraisable("Exception ignored in preparing getpath"); + PyErr_FormatUnraisable("Exception ignored while preparing getpath"); return PyStatus_Error("error evaluating initial values"); } @@ -964,13 +964,13 @@ _PyConfig_InitPathConfig(PyConfig *config, int compute_path_config) if (!r) { Py_DECREF(dict); - PyErr_FormatUnraisable("Exception ignored in running getpath"); + PyErr_FormatUnraisable("Exception ignored while running getpath"); return PyStatus_Error("error evaluating path"); } Py_DECREF(r); if (_PyConfig_FromDict(config, configDict) < 0) { - PyErr_FormatUnraisable("Exception ignored in reading getpath results"); + PyErr_FormatUnraisable("Exception ignored while reading getpath results"); Py_DECREF(dict); return PyStatus_Error("error getting getpath results"); } diff --git a/Modules/main.c b/Modules/main.c index 67e8a534713a39..11b85b88e68ed6 100644 --- a/Modules/main.c +++ b/Modules/main.c @@ -314,25 +314,19 @@ pymain_start_pyrepl_no_main(void) static int pymain_run_module(const wchar_t *modname, int set_argv0) { - PyObject *module, *runpy, *runmodule, *runargs, *result; + PyObject *module, *runmodule, *runargs, *result; if (PySys_Audit("cpython.run_module", "u", modname) < 0) { return pymain_exit_err_print(); } - runpy = PyImport_ImportModule("runpy"); - if (runpy == NULL) { - fprintf(stderr, "Could not import runpy module\n"); - return pymain_exit_err_print(); - } - runmodule = PyObject_GetAttrString(runpy, "_run_module_as_main"); + runmodule = PyImport_ImportModuleAttrString("runpy", + "_run_module_as_main"); if (runmodule == NULL) { - fprintf(stderr, "Could not access runpy._run_module_as_main\n"); - Py_DECREF(runpy); + fprintf(stderr, "Could not import runpy._run_module_as_main\n"); return pymain_exit_err_print(); } module = PyUnicode_FromWideChar(modname, wcslen(modname)); if (module == NULL) { fprintf(stderr, "Could not convert module name to unicode\n"); - Py_DECREF(runpy); Py_DECREF(runmodule); return pymain_exit_err_print(); } @@ -340,7 +334,6 @@ pymain_run_module(const wchar_t *modname, int set_argv0) if (runargs == NULL) { fprintf(stderr, "Could not create arguments for runpy._run_module_as_main\n"); - Py_DECREF(runpy); Py_DECREF(runmodule); Py_DECREF(module); return pymain_exit_err_print(); @@ -350,7 +343,6 @@ pymain_run_module(const wchar_t *modname, int set_argv0) if (!result && PyErr_Occurred() == PyExc_KeyboardInterrupt) { _PyRuntime.signals.unhandled_keyboard_interrupt = 1; } - Py_DECREF(runpy); Py_DECREF(runmodule); Py_DECREF(module); Py_DECREF(runargs); @@ -497,7 +489,7 @@ pymain_run_startup(PyConfig *config, int *exitcode) static int pymain_run_interactive_hook(int *exitcode) { - PyObject *hook, *result; + PyObject *hook; if (PySys_GetOptionalAttrString("__interactivehook__", &hook) < 0) { goto error; } @@ -509,7 +501,7 @@ pymain_run_interactive_hook(int *exitcode) goto error; } - result = _PyObject_CallNoArgs(hook); + PyObject *result = _PyObject_CallNoArgs(hook); Py_DECREF(hook); if (result == NULL) { goto error; diff --git a/Modules/overlapped.c b/Modules/overlapped.c index 308a0dab7fab1a..806ebee7a70ff1 100644 --- a/Modules/overlapped.c +++ b/Modules/overlapped.c @@ -759,7 +759,8 @@ Overlapped_dealloc(OverlappedObject *self) PyExc_RuntimeError, "%R still has pending operation at " "deallocation, the process may crash", self); - PyErr_WriteUnraisable(NULL); + PyErr_FormatUnraisable("Exception ignored while deallocating " + "overlapped operation %R", self); } } diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index a35a848a7ca4b8..6dfe73017abf9d 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -7,6 +7,8 @@ of the compiler used. Different compilers define their own feature test macro, e.g. '_MSC_VER'. */ +// --- Python includes ------------------------------------------------------ + #include "Python.h" #ifdef __VXWORKS__ @@ -26,255 +28,63 @@ #include "pycore_time.h" // _PyLong_FromTime_t() #include "pycore_typeobject.h" // _PyType_AddMethod() -#ifdef HAVE_UNISTD_H -# include // symlink() -#endif - -#ifdef MS_WINDOWS -# include -# if !defined(MS_WINDOWS_GAMES) || defined(MS_WINDOWS_DESKTOP) -# include -# endif -# include -# include // UNLEN -# include "osdefs.h" // SEP -# include // SetEntriesInAcl -# include // SDDL_REVISION_1 -# if defined(MS_WINDOWS_DESKTOP) || defined(MS_WINDOWS_SYSTEM) -# define HAVE_SYMLINK -# endif /* MS_WINDOWS_DESKTOP | MS_WINDOWS_SYSTEM */ -#endif - #ifndef MS_WINDOWS -# include "posixmodule.h" +# include "posixmodule.h" // _PyLong_FromUid() #else -# include "pycore_fileutils_windows.h" -# include "winreparse.h" +# include "pycore_fileutils_windows.h" // _Py_GetFileInformationByName() +# include "osdefs.h" // SEP +# include "winreparse.h" // _Py_REPARSE_DATA_BUFFER #endif -#if !defined(EX_OK) && defined(EXIT_SUCCESS) -# define EX_OK EXIT_SUCCESS + +// --- System includes ------------------------------------------------------ + +#include // ctermid() +#include // system() + +#ifdef HAVE_UNISTD_H +# include // symlink() #endif #ifdef __APPLE__ - /* Needed for the implementation of os.statvfs */ + /* Needed for the implementation of os.statvfs */ # include # include #endif -/* On android API level 21, 'AT_EACCESS' is not declared although - * HAVE_FACCESSAT is defined. */ -#ifdef __ANDROID__ -# undef HAVE_FACCESSAT -#endif - -#include // ctermid() -#include // system() #ifdef HAVE_SYS_TIME_H # include // futimes() #endif + #ifdef HAVE_SYS_PIDFD_H # include // PIDFD_NONBLOCK #endif - -// SGI apparently needs this forward declaration -#ifdef HAVE__GETPTY -# include // mode_t - extern char * _getpty(int *, int, mode_t, int); -#endif - #ifdef __EMSCRIPTEN__ -#include "emscripten.h" // emscripten_debugger() -#endif - -/* - * A number of APIs are available on macOS from a certain macOS version. - * To support building with a new SDK while deploying to older versions - * the availability test is split into two: - * - HAVE_: The configure check for compile time availability - * - HAVE__RUNTIME: Runtime check for availability - * - * The latter is always true when not on macOS, or when using a compiler - * that does not support __has_builtin (older versions of Xcode). - * - * Due to compiler restrictions there is one valid use of HAVE__RUNTIME: - * if (HAVE__RUNTIME) { ... } - * - * In mixing the test with other tests or using negations will result in compile - * errors. - */ -#if defined(__APPLE__) - -#include - -#if defined(__has_builtin) -#if __has_builtin(__builtin_available) -#define HAVE_BUILTIN_AVAILABLE 1 -#endif -#endif - -#ifdef HAVE_BUILTIN_AVAILABLE -# define HAVE_FSTATAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) -# define HAVE_FACCESSAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) -# define HAVE_FCHMODAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) -# define HAVE_FCHOWNAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) -# define HAVE_LINKAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) -# define HAVE_FDOPENDIR_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) -# define HAVE_MKDIRAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) -# define HAVE_RENAMEAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) -# define HAVE_UNLINKAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) -# define HAVE_OPENAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) -# define HAVE_READLINKAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) -# define HAVE_SYMLINKAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) -# define HAVE_FUTIMENS_RUNTIME __builtin_available(macOS 10.13, iOS 11.0, tvOS 11.0, watchOS 4.0, *) -# define HAVE_UTIMENSAT_RUNTIME __builtin_available(macOS 10.13, iOS 11.0, tvOS 11.0, watchOS 4.0, *) -# define HAVE_PWRITEV_RUNTIME __builtin_available(macOS 11.0, iOS 14.0, tvOS 14.0, watchOS 7.0, *) -# define HAVE_MKFIFOAT_RUNTIME __builtin_available(macOS 13.0, iOS 16.0, tvOS 16.0, watchOS 9.0, *) -# define HAVE_MKNODAT_RUNTIME __builtin_available(macOS 13.0, iOS 16.0, tvOS 16.0, watchOS 9.0, *) -# define HAVE_PTSNAME_R_RUNTIME __builtin_available(macOS 10.13.4, iOS 11.3, tvOS 11.3, watchOS 4.3, *) - -# define HAVE_POSIX_SPAWN_SETSID_RUNTIME __builtin_available(macOS 10.15, *) - -#else /* Xcode 8 or earlier */ - - /* __builtin_available is not present in these compilers, but - * some of the symbols might be weak linked (10.10 SDK or later - * deploying on 10.9. - * - * Fall back to the older style of availability checking for - * symbols introduced in macOS 10.10. - */ - -# ifdef HAVE_FSTATAT -# define HAVE_FSTATAT_RUNTIME (fstatat != NULL) -# endif - -# ifdef HAVE_FACCESSAT -# define HAVE_FACCESSAT_RUNTIME (faccessat != NULL) -# endif - -# ifdef HAVE_FCHMODAT -# define HAVE_FCHMODAT_RUNTIME (fchmodat != NULL) -# endif - -# ifdef HAVE_FCHOWNAT -# define HAVE_FCHOWNAT_RUNTIME (fchownat != NULL) -# endif - -# ifdef HAVE_LINKAT -# define HAVE_LINKAT_RUNTIME (linkat != NULL) -# endif - -# ifdef HAVE_FDOPENDIR -# define HAVE_FDOPENDIR_RUNTIME (fdopendir != NULL) -# endif - -# ifdef HAVE_MKDIRAT -# define HAVE_MKDIRAT_RUNTIME (mkdirat != NULL) -# endif - -# ifdef HAVE_RENAMEAT -# define HAVE_RENAMEAT_RUNTIME (renameat != NULL) -# endif - -# ifdef HAVE_UNLINKAT -# define HAVE_UNLINKAT_RUNTIME (unlinkat != NULL) -# endif - -# ifdef HAVE_OPENAT -# define HAVE_OPENAT_RUNTIME (openat != NULL) -# endif - -# ifdef HAVE_READLINKAT -# define HAVE_READLINKAT_RUNTIME (readlinkat != NULL) -# endif - -# ifdef HAVE_SYMLINKAT -# define HAVE_SYMLINKAT_RUNTIME (symlinkat != NULL) -# endif - -# ifdef HAVE_UTIMENSAT -# define HAVE_UTIMENSAT_RUNTIME (utimensat != NULL) -# endif - -# ifdef HAVE_FUTIMENS -# define HAVE_FUTIMENS_RUNTIME (futimens != NULL) -# endif - -# ifdef HAVE_PWRITEV -# define HAVE_PWRITEV_RUNTIME (pwritev != NULL) -# endif - -# ifdef HAVE_MKFIFOAT -# define HAVE_MKFIFOAT_RUNTIME (mkfifoat != NULL) -# endif - -# ifdef HAVE_MKNODAT -# define HAVE_MKNODAT_RUNTIME (mknodat != NULL) -# endif - -# ifdef HAVE_PTSNAME_R -# define HAVE_PTSNAME_R_RUNTIME (ptsname_r != NULL) -# endif - -#endif - -#ifdef HAVE_FUTIMESAT -/* Some of the logic for weak linking depends on this assertion */ -# error "HAVE_FUTIMESAT unexpectedly defined" -#endif - -#else -# define HAVE_FSTATAT_RUNTIME 1 -# define HAVE_FACCESSAT_RUNTIME 1 -# define HAVE_FCHMODAT_RUNTIME 1 -# define HAVE_FCHOWNAT_RUNTIME 1 -# define HAVE_LINKAT_RUNTIME 1 -# define HAVE_FDOPENDIR_RUNTIME 1 -# define HAVE_MKDIRAT_RUNTIME 1 -# define HAVE_RENAMEAT_RUNTIME 1 -# define HAVE_UNLINKAT_RUNTIME 1 -# define HAVE_OPENAT_RUNTIME 1 -# define HAVE_READLINKAT_RUNTIME 1 -# define HAVE_SYMLINKAT_RUNTIME 1 -# define HAVE_FUTIMENS_RUNTIME 1 -# define HAVE_UTIMENSAT_RUNTIME 1 -# define HAVE_PWRITEV_RUNTIME 1 -# define HAVE_MKFIFOAT_RUNTIME 1 -# define HAVE_MKNODAT_RUNTIME 1 -# define HAVE_PTSNAME_R_RUNTIME 1 +# include "emscripten.h" // emscripten_debugger() #endif - -PyDoc_STRVAR(posix__doc__, -"This module provides access to operating system functionality that is\n\ -standardized by the C Standard and the POSIX standard (a thinly\n\ -disguised Unix interface). Refer to the library manual and\n\ -corresponding Unix manual entries for more information on calls."); - - #ifdef HAVE_SYS_UIO_H # include #endif #ifdef HAVE_SYS_TYPES_H -/* Should be included before on HP-UX v3 */ + /* Should be included before on HP-UX v3 */ # include -#endif /* HAVE_SYS_TYPES_H */ - +#endif #ifdef HAVE_SYS_SYSMACROS_H -/* GNU C Library: major(), minor(), makedev() */ + /* GNU C Library: major(), minor(), makedev() */ # include #endif #ifdef HAVE_SYS_STAT_H # include -#endif /* HAVE_SYS_STAT_H */ +#endif #ifdef HAVE_SYS_WAIT_H # include // WNOHANG #endif + #ifdef HAVE_LINUX_WAIT_H # include // P_PIDFD #endif @@ -284,54 +94,34 @@ corresponding Unix manual entries for more information on calls."); #endif #ifdef HAVE_FCNTL_H -# include +# include // fcntl() #endif #ifdef HAVE_GRP_H -# include +# include // setgroups() #endif #ifdef HAVE_SYSEXITS_H -# include +# include // EX_OK #endif #ifdef HAVE_SYS_LOADAVG_H -# include +# include // getloadavg() #endif #ifdef HAVE_SYS_SENDFILE_H -# include +# include // sendfile() #endif #if defined(__APPLE__) -# include +# include // fcopyfile() #endif #ifdef HAVE_SCHED_H -# include +# include // sched_setscheduler() #endif - #ifdef HAVE_LINUX_SCHED_H -# include -#endif - -#if !defined(CPU_ALLOC) && defined(HAVE_SCHED_SETAFFINITY) -# undef HAVE_SCHED_SETAFFINITY -#endif - -#if defined(HAVE_SYS_XATTR_H) -# if defined(HAVE_LINUX_LIMITS_H) && !defined(__FreeBSD_kernel__) && !defined(__GNU__) -# define USE_XATTRS -# include // Needed for XATTR_SIZE_MAX on musl libc. -# endif -# if defined(__CYGWIN__) -# define USE_XATTRS -# include // Needed for XATTR_SIZE_MAX and XATTR_LIST_MAX. -# endif -#endif - -#ifdef USE_XATTRS -# include +# include // SCHED_IDLE, SCHED_RR #endif #if defined(__FreeBSD__) || defined(__DragonFly__) || defined(__APPLE__) @@ -357,23 +147,128 @@ corresponding Unix manual entries for more information on calls."); #endif #ifdef HAVE_LINUX_RANDOM_H -# include +# include // GRND_RANDOM #endif #ifdef HAVE_GETRANDOM_SYSCALL -# include +# include // syscall() +#endif + +#ifdef HAVE_POSIX_SPAWN +# include // posix_spawn() +#endif + +#ifdef HAVE_UTIME_H +# include // utime() #endif +#ifdef HAVE_SYS_UTIME_H +# include +# define HAVE_UTIME_H /* pretend we do for the rest of this file */ +#endif + +#ifdef HAVE_SYS_TIMES_H +# include // times() +#endif + +#ifdef HAVE_SYS_PARAM_H +# include +#endif + +#ifdef HAVE_SYS_UTSNAME_H +# include // uname() +#endif + +/* memfd_create is either defined in sys/mman.h or sys/memfd.h + * linux/memfd.h defines additional flags + */ +#ifdef HAVE_SYS_MMAN_H +# include // memfd_create() +#endif +#ifdef HAVE_SYS_MEMFD_H +# include // memfd_create() +#endif +#ifdef HAVE_LINUX_MEMFD_H +# include // memfd_create(), MFD_CLOEXEC +#endif + +#ifdef HAVE_SYS_EVENTFD_H +# include // eventfd() +#endif + +#ifdef HAVE_SYS_TIMERFD_H +# include // timerfd_create() +#endif + +#ifdef _Py_MEMORY_SANITIZER +# include // __msan_unpoison() +#endif + + +// --- More complex system includes ----------------------------------------- + +#ifdef MS_WINDOWS +# include +# if !defined(MS_WINDOWS_GAMES) || defined(MS_WINDOWS_DESKTOP) +# include // PathCchSkipRoot() +# endif +# include // SetEntriesInAcl +# include // UNLEN +# include // SDDL_REVISION_1 +# include // FSCTL_GET_REPARSE_POINT +# if defined(MS_WINDOWS_DESKTOP) || defined(MS_WINDOWS_SYSTEM) +# define HAVE_SYMLINK +# endif /* MS_WINDOWS_DESKTOP | MS_WINDOWS_SYSTEM */ +#endif + + +#ifdef _MSC_VER +# ifdef HAVE_DIRECT_H +# include +# endif +# ifdef HAVE_IO_H +# include +# endif +# ifdef HAVE_PROCESS_H +# include // getpid(), _cwait() +# endif +# include +#endif /* _MSC_VER */ + + +#ifdef HAVE__GETPTY +# include // mode_t + // SGI apparently needs this forward declaration + extern char * _getpty(int *, int, mode_t, int); +#endif + + +#if defined(HAVE_SYS_XATTR_H) +# if defined(HAVE_LINUX_LIMITS_H) && !defined(__FreeBSD_kernel__) && !defined(__GNU__) +# define USE_XATTRS +# include // Needed for XATTR_SIZE_MAX on musl libc. +# endif +# if defined(__CYGWIN__) +# define USE_XATTRS +# include // Needed for XATTR_SIZE_MAX and XATTR_LIST_MAX. +# endif +#endif +#ifdef USE_XATTRS +# include // fgetxattr() +#endif + + #ifdef HAVE_WINDOWS_CONSOLE_IO # define TERMSIZE_USE_CONIO #elif defined(HAVE_SYS_IOCTL_H) -# include +# include // ioctl(), TIOCGWINSZ # if defined(HAVE_TERMIOS_H) # include # endif # if defined(TIOCGWINSZ) # define TERMSIZE_USE_IOCTL # endif -#endif /* HAVE_WINDOWS_CONSOLE_IO */ +#endif + /* Various compilers have only certain posix functions */ /* XXX Gosh I wish these were all moved into pyconfig.h */ @@ -399,24 +294,16 @@ corresponding Unix manual entries for more information on calls."); # endif /* MS_WINDOWS_DESKTOP | MS_WINDOWS_SYSTEM */ # define HAVE_PIPE 1 # define HAVE_FSYNC 1 -# define fsync _commit -#endif /* ! __WATCOMC__ || __QNX__ */ - -/*[clinic input] -# one of the few times we lie about this name! -module os -[clinic start generated code]*/ -/*[clinic end generated code: output=da39a3ee5e6b4b0d input=94a0f0f978acae17]*/ +# define fsync _commit +#endif -#ifndef _MSC_VER -#if defined(__sgi)&&_COMPILER_VERSION>=700 +#if !defined(_MSC_VER) && defined(__sgi) && _COMPILER_VERSION>=700 /* declare ctermid_r if compiling with MIPSPro 7.x in ANSI C mode (default) */ -extern char *ctermid_r(char *); +extern char *ctermid_r(char *); #endif -#endif /* !_MSC_VER */ #if defined(__VXWORKS__) # include @@ -430,33 +317,9 @@ extern char *ctermid_r(char *); # endif #endif /* __VXWORKS__ */ -#ifdef HAVE_POSIX_SPAWN -# include -#endif - -#ifdef HAVE_UTIME_H -# include -#endif /* HAVE_UTIME_H */ - -#ifdef HAVE_SYS_UTIME_H -# include -# define HAVE_UTIME_H /* pretend we do for the rest of this file */ -#endif /* HAVE_SYS_UTIME_H */ - -#ifdef HAVE_SYS_TIMES_H -# include -#endif /* HAVE_SYS_TIMES_H */ - -#ifdef HAVE_SYS_PARAM_H -# include -#endif /* HAVE_SYS_PARAM_H */ - -#ifdef HAVE_SYS_UTSNAME_H -# include -#endif /* HAVE_SYS_UTSNAME_H */ #ifdef HAVE_DIRENT_H -# include +# include // opendir() # define NAMLEN(dirent) strlen((dirent)->d_name) #else # if defined(__WATCOMC__) && !defined(__QNX__) @@ -477,18 +340,20 @@ extern char *ctermid_r(char *); # endif #endif -#ifdef _MSC_VER -# ifdef HAVE_DIRECT_H -# include -# endif -# ifdef HAVE_IO_H -# include + +#if defined(MAJOR_IN_MKDEV) +# include +#else +# if defined(MAJOR_IN_SYSMACROS) +# include # endif -# ifdef HAVE_PROCESS_H -# include +# if defined(HAVE_MKNOD) && defined(HAVE_SYS_MKDEV_H) +# include # endif -# include -#endif /* _MSC_VER */ +#endif + + +// --- Macros --------------------------------------------------------------- #ifndef MAXPATHLEN # if defined(PATH_MAX) && PATH_MAX > 1024 @@ -498,6 +363,7 @@ extern char *ctermid_r(char *); # endif #endif /* MAXPATHLEN */ + #ifdef UNION_WAIT /* Emulate some macros on systems that have a union instead of macros */ # ifndef WIFEXITED @@ -517,12 +383,14 @@ extern char *ctermid_r(char *); # define WAIT_STATUS_INT(s) (s) #endif /* UNION_WAIT */ + /* Don't use the "_r" form if we don't need it (also, won't have a prototype for it, at least on Solaris -- maybe others as well?). */ #if defined(HAVE_CTERMID_R) # define USE_CTERMID_R #endif + /* choose the appropriate stat and fstat functions and return structs */ #undef STAT #undef FSTAT @@ -539,25 +407,19 @@ extern char *ctermid_r(char *); # define STRUCT_STAT struct stat #endif -#if defined(MAJOR_IN_MKDEV) -# include -#else -# if defined(MAJOR_IN_SYSMACROS) -# include -# endif -# if defined(HAVE_MKNOD) && defined(HAVE_SYS_MKDEV_H) -# include -# endif + +#if !defined(EX_OK) && defined(EXIT_SUCCESS) +# define EX_OK EXIT_SUCCESS #endif -#ifdef MS_WINDOWS -# define INITFUNC PyInit_nt -# define MODNAME "nt" -# define MODNAME_OBJ &_Py_ID(nt) -#else -# define INITFUNC PyInit_posix -# define MODNAME "posix" -# define MODNAME_OBJ &_Py_ID(posix) +#if !defined(CPU_ALLOC) && defined(HAVE_SCHED_SETAFFINITY) +# undef HAVE_SCHED_SETAFFINITY +#endif + +/* On android API level 21, 'AT_EACCESS' is not declared although + * HAVE_FACCESSAT is defined. */ +#ifdef __ANDROID__ +# undef HAVE_FACCESSAT #endif #if defined(__sun) @@ -565,33 +427,195 @@ extern char *ctermid_r(char *); # define HAVE_STRUCT_STAT_ST_FSTYPE 1 #endif -/* memfd_create is either defined in sys/mman.h or sys/memfd.h - * linux/memfd.h defines additional flags + +// --- Apple __builtin_available() macros ----------------------------------- + +/* + * A number of APIs are available on macOS from a certain macOS version. + * To support building with a new SDK while deploying to older versions + * the availability test is split into two: + * - HAVE_: The configure check for compile time availability + * - HAVE__RUNTIME: Runtime check for availability + * + * The latter is always true when not on macOS, or when using a compiler + * that does not support __has_builtin (older versions of Xcode). + * + * Due to compiler restrictions there is one valid use of HAVE__RUNTIME: + * if (HAVE__RUNTIME) { ... } + * + * In mixing the test with other tests or using negations will result in compile + * errors. */ -#ifdef HAVE_SYS_MMAN_H -# include +#if defined(__APPLE__) + +#include + +#if defined(__has_builtin) +#if __has_builtin(__builtin_available) +#define HAVE_BUILTIN_AVAILABLE 1 #endif -#ifdef HAVE_SYS_MEMFD_H -# include #endif -#ifdef HAVE_LINUX_MEMFD_H -# include + +#ifdef HAVE_BUILTIN_AVAILABLE +# define HAVE_FSTATAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) +# define HAVE_FACCESSAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) +# define HAVE_FCHMODAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) +# define HAVE_FCHOWNAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) +# define HAVE_LINKAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) +# define HAVE_FDOPENDIR_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) +# define HAVE_MKDIRAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) +# define HAVE_RENAMEAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) +# define HAVE_UNLINKAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) +# define HAVE_OPENAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) +# define HAVE_READLINKAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) +# define HAVE_SYMLINKAT_RUNTIME __builtin_available(macOS 10.10, iOS 8.0, *) +# define HAVE_FUTIMENS_RUNTIME __builtin_available(macOS 10.13, iOS 11.0, tvOS 11.0, watchOS 4.0, *) +# define HAVE_UTIMENSAT_RUNTIME __builtin_available(macOS 10.13, iOS 11.0, tvOS 11.0, watchOS 4.0, *) +# define HAVE_PWRITEV_RUNTIME __builtin_available(macOS 11.0, iOS 14.0, tvOS 14.0, watchOS 7.0, *) +# define HAVE_MKFIFOAT_RUNTIME __builtin_available(macOS 13.0, iOS 16.0, tvOS 16.0, watchOS 9.0, *) +# define HAVE_MKNODAT_RUNTIME __builtin_available(macOS 13.0, iOS 16.0, tvOS 16.0, watchOS 9.0, *) +# define HAVE_PTSNAME_R_RUNTIME __builtin_available(macOS 10.13.4, iOS 11.3, tvOS 11.3, watchOS 4.3, *) + +# define HAVE_POSIX_SPAWN_SETSID_RUNTIME __builtin_available(macOS 10.15, *) + +#else /* Xcode 8 or earlier */ + + /* __builtin_available is not present in these compilers, but + * some of the symbols might be weak linked (10.10 SDK or later + * deploying on 10.9. + * + * Fall back to the older style of availability checking for + * symbols introduced in macOS 10.10. + */ + +# ifdef HAVE_FSTATAT +# define HAVE_FSTATAT_RUNTIME (fstatat != NULL) +# endif + +# ifdef HAVE_FACCESSAT +# define HAVE_FACCESSAT_RUNTIME (faccessat != NULL) +# endif + +# ifdef HAVE_FCHMODAT +# define HAVE_FCHMODAT_RUNTIME (fchmodat != NULL) +# endif + +# ifdef HAVE_FCHOWNAT +# define HAVE_FCHOWNAT_RUNTIME (fchownat != NULL) +# endif + +# ifdef HAVE_LINKAT +# define HAVE_LINKAT_RUNTIME (linkat != NULL) +# endif + +# ifdef HAVE_FDOPENDIR +# define HAVE_FDOPENDIR_RUNTIME (fdopendir != NULL) +# endif + +# ifdef HAVE_MKDIRAT +# define HAVE_MKDIRAT_RUNTIME (mkdirat != NULL) +# endif + +# ifdef HAVE_RENAMEAT +# define HAVE_RENAMEAT_RUNTIME (renameat != NULL) +# endif + +# ifdef HAVE_UNLINKAT +# define HAVE_UNLINKAT_RUNTIME (unlinkat != NULL) +# endif + +# ifdef HAVE_OPENAT +# define HAVE_OPENAT_RUNTIME (openat != NULL) +# endif + +# ifdef HAVE_READLINKAT +# define HAVE_READLINKAT_RUNTIME (readlinkat != NULL) +# endif + +# ifdef HAVE_SYMLINKAT +# define HAVE_SYMLINKAT_RUNTIME (symlinkat != NULL) +# endif + +# ifdef HAVE_UTIMENSAT +# define HAVE_UTIMENSAT_RUNTIME (utimensat != NULL) +# endif + +# ifdef HAVE_FUTIMENS +# define HAVE_FUTIMENS_RUNTIME (futimens != NULL) +# endif + +# ifdef HAVE_PWRITEV +# define HAVE_PWRITEV_RUNTIME (pwritev != NULL) +# endif + +# ifdef HAVE_MKFIFOAT +# define HAVE_MKFIFOAT_RUNTIME (mkfifoat != NULL) +# endif + +# ifdef HAVE_MKNODAT +# define HAVE_MKNODAT_RUNTIME (mknodat != NULL) +# endif + +# ifdef HAVE_PTSNAME_R +# define HAVE_PTSNAME_R_RUNTIME (ptsname_r != NULL) +# endif + #endif -/* eventfd() */ -#ifdef HAVE_SYS_EVENTFD_H -# include +#ifdef HAVE_FUTIMESAT +/* Some of the logic for weak linking depends on this assertion */ +# error "HAVE_FUTIMESAT unexpectedly defined" #endif -/* timerfd_create() */ -#ifdef HAVE_SYS_TIMERFD_H -# include +#else +# define HAVE_FSTATAT_RUNTIME 1 +# define HAVE_FACCESSAT_RUNTIME 1 +# define HAVE_FCHMODAT_RUNTIME 1 +# define HAVE_FCHOWNAT_RUNTIME 1 +# define HAVE_LINKAT_RUNTIME 1 +# define HAVE_FDOPENDIR_RUNTIME 1 +# define HAVE_MKDIRAT_RUNTIME 1 +# define HAVE_RENAMEAT_RUNTIME 1 +# define HAVE_UNLINKAT_RUNTIME 1 +# define HAVE_OPENAT_RUNTIME 1 +# define HAVE_READLINKAT_RUNTIME 1 +# define HAVE_SYMLINKAT_RUNTIME 1 +# define HAVE_FUTIMENS_RUNTIME 1 +# define HAVE_UTIMENSAT_RUNTIME 1 +# define HAVE_PWRITEV_RUNTIME 1 +# define HAVE_MKFIFOAT_RUNTIME 1 +# define HAVE_MKNODAT_RUNTIME 1 +# define HAVE_PTSNAME_R_RUNTIME 1 #endif -#ifdef _Py_MEMORY_SANITIZER -# include + +// --- os module ------------------------------------------------------------ + +#ifdef MS_WINDOWS +# define INITFUNC PyInit_nt +# define MODNAME "nt" +# define MODNAME_OBJ &_Py_ID(nt) +#else +# define INITFUNC PyInit_posix +# define MODNAME "posix" +# define MODNAME_OBJ &_Py_ID(posix) #endif +/*[clinic input] +# one of the few times we lie about this name! +module os +[clinic start generated code]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=94a0f0f978acae17]*/ + +PyDoc_STRVAR(posix__doc__, +"This module provides access to operating system functionality that is\n\ +standardized by the C Standard and the POSIX standard (a thinly\n\ +disguised Unix interface). Refer to the library manual and\n\ +corresponding Unix manual entries for more information on calls."); + + +// --- Functions ------------------------------------------------------------ + #ifdef HAVE_FORK static void run_at_forkers(PyObject *lst, int reverse) @@ -606,8 +630,10 @@ run_at_forkers(PyObject *lst, int reverse) * one of the callbacks. */ cpy = PyList_GetSlice(lst, 0, PyList_GET_SIZE(lst)); - if (cpy == NULL) - PyErr_WriteUnraisable(lst); + if (cpy == NULL) { + PyErr_FormatUnraisable("Exception ignored in atfork callback " + "while copying list %R", lst); + } else { if (reverse) PyList_Reverse(cpy); @@ -615,10 +641,13 @@ run_at_forkers(PyObject *lst, int reverse) PyObject *func, *res; func = PyList_GET_ITEM(cpy, i); res = _PyObject_CallNoArgs(func); - if (res == NULL) - PyErr_WriteUnraisable(func); - else + if (res == NULL) { + PyErr_FormatUnraisable("Exception ignored " + "in atfork callback %R", func); + } + else { Py_DECREF(res); + } } Py_DECREF(cpy); } @@ -9877,7 +9906,7 @@ wait_helper(PyObject *module, pid_t pid, int status, struct rusage *ru) memset(ru, 0, sizeof(*ru)); } - struct_rusage = _PyImport_GetModuleAttrString("resource", "struct_rusage"); + struct_rusage = PyImport_ImportModuleAttrString("resource", "struct_rusage"); if (struct_rusage == NULL) return NULL; @@ -16330,7 +16359,8 @@ ScandirIterator_finalize(ScandirIterator *iterator) "unclosed scandir iterator %R", iterator)) { /* Spurious errors can appear at shutdown */ if (PyErr_ExceptionMatches(PyExc_Warning)) { - PyErr_WriteUnraisable((PyObject *) iterator); + PyErr_FormatUnraisable("Exception ignored while finalizing " + "scandir iterator %R", iterator); } } } diff --git a/Modules/pyexpat.c b/Modules/pyexpat.c index 9931ca2a8d4749..3290706f143b9a 100644 --- a/Modules/pyexpat.c +++ b/Modules/pyexpat.c @@ -1948,7 +1948,8 @@ pyexpat_capsule_destructor(PyObject *capsule) { void *p = PyCapsule_GetPointer(capsule, PyExpat_CAPSULE_NAME); if (p == NULL) { - PyErr_WriteUnraisable(capsule); + PyErr_FormatUnraisable("Exception ignored while destroying " + "pyexact capsule"); return; } PyMem_Free(p); diff --git a/Modules/selectmodule.c b/Modules/selectmodule.c index e14e114a6dafd0..c75e2ba28c5b4e 100644 --- a/Modules/selectmodule.c +++ b/Modules/selectmodule.c @@ -14,7 +14,6 @@ #include "Python.h" #include "pycore_fileutils.h" // _Py_set_inheritable() -#include "pycore_import.h" // _PyImport_GetModuleAttrString() #include "pycore_time.h" // _PyTime_FromSecondsObject() #include @@ -1996,7 +1995,7 @@ kqueue_tracking_init(PyObject *module) { // Register a callback to invalidate kqueues with open fds after fork. PyObject *register_at_fork = NULL, *cb = NULL, *args = NULL, *kwargs = NULL, *result = NULL; - register_at_fork = _PyImport_GetModuleAttrString("posix", + register_at_fork = PyImport_ImportModuleAttrString("posix", "register_at_fork"); if (register_at_fork == NULL) { goto finally; diff --git a/Modules/signalmodule.c b/Modules/signalmodule.c index 0e53a36bca55f0..b679b83bed5365 100644 --- a/Modules/signalmodule.c +++ b/Modules/signalmodule.c @@ -245,7 +245,8 @@ report_wakeup_write_error(void *data) errno = (int) (intptr_t) data; PyObject *exc = PyErr_GetRaisedException(); PyErr_SetFromErrno(PyExc_OSError); - PyErr_FormatUnraisable("Exception ignored when trying to write to the signal wakeup fd"); + PyErr_FormatUnraisable("Exception ignored while " + "trying to write to the signal wakeup fd"); PyErr_SetRaisedException(exc); errno = save_errno; return 0; @@ -262,7 +263,8 @@ report_wakeup_send_error(void* data) recognizes the error codes used by both GetLastError() and WSAGetLastError */ PyErr_SetExcFromWindowsErr(PyExc_OSError, send_errno); - PyErr_FormatUnraisable("Exception ignored when trying to send to the signal wakeup fd"); + PyErr_FormatUnraisable("Exception ignored while " + "trying to send to the signal wakeup fd"); PyErr_SetRaisedException(exc); return 0; } @@ -1837,7 +1839,8 @@ _PyErr_CheckSignalsTstate(PyThreadState *tstate) PyErr_Format(PyExc_OSError, "Signal %i ignored due to race condition", i); - PyErr_WriteUnraisable(Py_None); + PyErr_FormatUnraisable("Exception ignored while " + "calling signal handler"); continue; } PyObject *arglist = NULL; diff --git a/Modules/socketmodule.c b/Modules/socketmodule.c index 01811afa578119..4b6d2dd1c5fc7b 100644 --- a/Modules/socketmodule.c +++ b/Modules/socketmodule.c @@ -2952,6 +2952,8 @@ sock_accept(PyObject *self, PyObject *Py_UNUSED(ignored)) ctx.addrlen = &addrlen; ctx.addrbuf = &addrbuf; + ctx.result = INVALID_SOCKET; + if (sock_call(s, 0, sock_accept_impl, &ctx) < 0) return NULL; newfd = ctx.result; @@ -5357,7 +5359,8 @@ sock_finalize(PyObject *self) if (PyErr_ResourceWarning((PyObject *)s, 1, "unclosed %R", s)) { /* Spurious errors can appear at shutdown */ if (PyErr_ExceptionMatches(PyExc_Warning)) { - PyErr_WriteUnraisable((PyObject *)s); + PyErr_FormatUnraisable("Exception ignored while " + "finalizing socket %R", s); } } diff --git a/Modules/syslogmodule.c b/Modules/syslogmodule.c index 8c57d7e3d7f2f4..697c27fd43af06 100644 --- a/Modules/syslogmodule.c +++ b/Modules/syslogmodule.c @@ -182,7 +182,7 @@ syslog_openlog_impl(PyObject *module, PyObject *ident, long logopt, } } if (PySys_Audit("syslog.openlog", "Oll", ident ? ident : Py_None, logopt, facility) < 0) { - Py_DECREF(ident); + Py_XDECREF(ident); return NULL; } diff --git a/Modules/timemodule.c b/Modules/timemodule.c index 5d0cd52a93a2d3..8d2cbff662b9a3 100644 --- a/Modules/timemodule.c +++ b/Modules/timemodule.c @@ -979,7 +979,7 @@ time_strptime(PyObject *self, PyObject *args) { PyObject *func, *result; - func = _PyImport_GetModuleAttrString("_strptime", "_strptime_time"); + func = PyImport_ImportModuleAttrString("_strptime", "_strptime_time"); if (!func) { return NULL; } diff --git a/Objects/abstract.c b/Objects/abstract.c index c92ef10aa79648..db7b9263711f68 100644 --- a/Objects/abstract.c +++ b/Objects/abstract.c @@ -583,7 +583,7 @@ PyBuffer_SizeFromFormat(const char *format) PyObject *fmt = NULL; Py_ssize_t itemsize = -1; - calcsize = _PyImport_GetModuleAttrString("struct", "calcsize"); + calcsize = PyImport_ImportModuleAttrString("struct", "calcsize"); if (calcsize == NULL) { goto done; } diff --git a/Objects/bytearrayobject.c b/Objects/bytearrayobject.c index 21584332e0e443..6133d30f49930a 100644 --- a/Objects/bytearrayobject.c +++ b/Objects/bytearrayobject.c @@ -184,7 +184,12 @@ PyByteArray_Resize(PyObject *self, Py_ssize_t requested_size) assert(self != NULL); assert(PyByteArray_Check(self)); assert(logical_offset <= alloc); - assert(requested_size >= 0); + + if (requested_size < 0) { + PyErr_Format(PyExc_ValueError, + "Can only resize to positive sizes, got %zd", requested_size); + return -1; + } if (requested_size == Py_SIZE(self)) { return 0; @@ -1388,6 +1393,31 @@ bytearray_removesuffix_impl(PyByteArrayObject *self, Py_buffer *suffix) } +/*[clinic input] +bytearray.resize + size: Py_ssize_t + New size to resize to.. + / +Resize the internal buffer of bytearray to len. +[clinic start generated code]*/ + +static PyObject * +bytearray_resize_impl(PyByteArrayObject *self, Py_ssize_t size) +/*[clinic end generated code: output=f73524922990b2d9 input=75fd4d17c4aa47d3]*/ +{ + Py_ssize_t start_size = PyByteArray_GET_SIZE(self); + int result = PyByteArray_Resize((PyObject *)self, size); + if (result < 0) { + return NULL; + } + // Set new bytes to null bytes + if (size > start_size) { + memset(PyByteArray_AS_STRING(self) + start_size, 0, size - start_size); + } + Py_RETURN_NONE; +} + + /*[clinic input] bytearray.translate @@ -2361,6 +2391,7 @@ static PyMethodDef bytearray_methods[] = { BYTEARRAY_REPLACE_METHODDEF BYTEARRAY_REMOVEPREFIX_METHODDEF BYTEARRAY_REMOVESUFFIX_METHODDEF + BYTEARRAY_RESIZE_METHODDEF BYTEARRAY_REVERSE_METHODDEF BYTEARRAY_RFIND_METHODDEF BYTEARRAY_RINDEX_METHODDEF diff --git a/Objects/clinic/bytearrayobject.c.h b/Objects/clinic/bytearrayobject.c.h index 91cf5363e639d1..03b5a8a516cc09 100644 --- a/Objects/clinic/bytearrayobject.c.h +++ b/Objects/clinic/bytearrayobject.c.h @@ -565,6 +565,45 @@ bytearray_removesuffix(PyObject *self, PyObject *arg) return return_value; } +PyDoc_STRVAR(bytearray_resize__doc__, +"resize($self, size, /)\n" +"--\n" +"\n" +"Resize the internal buffer of bytearray to len.\n" +"\n" +" size\n" +" New size to resize to.."); + +#define BYTEARRAY_RESIZE_METHODDEF \ + {"resize", (PyCFunction)bytearray_resize, METH_O, bytearray_resize__doc__}, + +static PyObject * +bytearray_resize_impl(PyByteArrayObject *self, Py_ssize_t size); + +static PyObject * +bytearray_resize(PyObject *self, PyObject *arg) +{ + PyObject *return_value = NULL; + Py_ssize_t size; + + { + Py_ssize_t ival = -1; + PyObject *iobj = _PyNumber_Index(arg); + if (iobj != NULL) { + ival = PyLong_AsSsize_t(iobj); + Py_DECREF(iobj); + } + if (ival == -1 && PyErr_Occurred()) { + goto exit; + } + size = ival; + } + return_value = bytearray_resize_impl((PyByteArrayObject *)self, size); + +exit: + return return_value; +} + PyDoc_STRVAR(bytearray_translate__doc__, "translate($self, table, /, delete=b\'\')\n" "--\n" @@ -1623,4 +1662,4 @@ bytearray_sizeof(PyObject *self, PyObject *Py_UNUSED(ignored)) { return bytearray_sizeof_impl((PyByteArrayObject *)self); } -/*[clinic end generated code: output=bc8bec8514102bf3 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=41bb67a8a181e733 input=a9049054013a1b77]*/ diff --git a/Objects/clinic/dictobject.c.h b/Objects/clinic/dictobject.c.h index cdf39ce147203b..c66916bb33aa37 100644 --- a/Objects/clinic/dictobject.c.h +++ b/Objects/clinic/dictobject.c.h @@ -94,9 +94,7 @@ dict_get(PyObject *self, PyObject *const *args, Py_ssize_t nargs) } default_value = args[1]; skip_optional: - Py_BEGIN_CRITICAL_SECTION(self); return_value = dict_get_impl((PyDictObject *)self, key, default_value); - Py_END_CRITICAL_SECTION(); exit: return return_value; @@ -312,4 +310,4 @@ dict_values(PyObject *self, PyObject *Py_UNUSED(ignored)) { return dict_values_impl((PyDictObject *)self); } -/*[clinic end generated code: output=4956c5b276ea652f input=a9049054013a1b77]*/ +/*[clinic end generated code: output=0f04bf0e7e6b130f input=a9049054013a1b77]*/ diff --git a/Objects/codeobject.c b/Objects/codeobject.c index 7eea61968bc4d9..a7b46aa2dfbbc0 100644 --- a/Objects/codeobject.c +++ b/Objects/codeobject.c @@ -459,8 +459,7 @@ _PyCode_Validate(struct _PyCodeConstructor *con) } extern void -_PyCode_Quicken(_Py_CODEUNIT *instructions, Py_ssize_t size, PyObject *consts, - int enable_counters); +_PyCode_Quicken(_Py_CODEUNIT *instructions, Py_ssize_t size, int enable_counters); #ifdef Py_GIL_DISABLED static _PyCodeArray * _PyCodeArray_New(Py_ssize_t size); @@ -543,10 +542,9 @@ init_code(PyCodeObject *co, struct _PyCodeConstructor *con) } co->_co_firsttraceable = entry_point; #ifdef Py_GIL_DISABLED - _PyCode_Quicken(_PyCode_CODE(co), Py_SIZE(co), co->co_consts, - interp->config.tlbc_enabled); + _PyCode_Quicken(_PyCode_CODE(co), Py_SIZE(co), interp->config.tlbc_enabled); #else - _PyCode_Quicken(_PyCode_CODE(co), Py_SIZE(co), co->co_consts, 1); + _PyCode_Quicken(_PyCode_CODE(co), Py_SIZE(co), 1); #endif notify_code_watchers(PY_CODE_EVENT_CREATE, co); return 0; @@ -2819,7 +2817,7 @@ copy_code(_Py_CODEUNIT *dst, PyCodeObject *co) for (int i = 0; i < code_len; i += _PyInstruction_GetLength(co, i)) { dst[i] = _Py_GetBaseCodeUnit(co, i); } - _PyCode_Quicken(dst, code_len, co->co_consts, 1); + _PyCode_Quicken(dst, code_len, 1); } static Py_ssize_t diff --git a/Objects/dictobject.c b/Objects/dictobject.c index 8fe71123252a75..91cf013a1dc24b 100644 --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -4248,7 +4248,6 @@ dict___contains__(PyDictObject *self, PyObject *key) } /*[clinic input] -@critical_section dict.get key: object @@ -4260,7 +4259,7 @@ Return the value for key if key is in the dictionary, else default. static PyObject * dict_get_impl(PyDictObject *self, PyObject *key, PyObject *default_value) -/*[clinic end generated code: output=bba707729dee05bf input=a631d3f18f584c60]*/ +/*[clinic end generated code: output=bba707729dee05bf input=279ddb5790b6b107]*/ { PyObject *val = NULL; Py_hash_t hash; @@ -7352,7 +7351,8 @@ PyObject_ClearManagedDict(PyObject *obj) if (set_or_clear_managed_dict(obj, NULL, true) < 0) { /* Must be out of memory */ assert(PyErr_Occurred() == PyExc_MemoryError); - PyErr_WriteUnraisable(NULL); + PyErr_FormatUnraisable("Exception ignored while " + "clearing an object managed dict"); /* Clear the dict */ PyDictObject *dict = _PyObject_GetManagedDict(obj); Py_BEGIN_CRITICAL_SECTION2(dict, obj); diff --git a/Objects/fileobject.c b/Objects/fileobject.c index c377d1bb28b56f..7025b5bcffc1c8 100644 --- a/Objects/fileobject.c +++ b/Objects/fileobject.c @@ -34,7 +34,7 @@ PyFile_FromFd(int fd, const char *name, const char *mode, int buffering, const c PyObject *open, *stream; /* import _io in case we are being used to open io.py */ - open = _PyImport_GetModuleAttrString("_io", "open"); + open = PyImport_ImportModuleAttrString("_io", "open"); if (open == NULL) return NULL; stream = PyObject_CallFunction(open, "isisssO", fd, mode, @@ -506,7 +506,7 @@ PyFile_OpenCodeObject(PyObject *path) if (hook) { f = hook(path, _PyRuntime.open_code_userdata); } else { - PyObject *open = _PyImport_GetModuleAttrString("_io", "open"); + PyObject *open = PyImport_ImportModuleAttrString("_io", "open"); if (open) { f = PyObject_CallFunction(open, "Os", path, "rb"); Py_DECREF(open); diff --git a/Objects/genobject.c b/Objects/genobject.c index 73bbf86588c457..79aed8571c35e7 100644 --- a/Objects/genobject.c +++ b/Objects/genobject.c @@ -97,8 +97,10 @@ _PyGen_Finalize(PyObject *self) PyObject *res = PyObject_CallOneArg(finalizer, self); if (res == NULL) { - PyErr_WriteUnraisable(self); - } else { + PyErr_FormatUnraisable("Exception ignored while " + "finalizing generator %R", self); + } + else { Py_DECREF(res); } /* Restore the saved exception. */ @@ -122,7 +124,8 @@ _PyGen_Finalize(PyObject *self) PyObject *res = gen_close((PyObject*)gen, NULL); if (res == NULL) { if (PyErr_Occurred()) { - PyErr_WriteUnraisable(self); + PyErr_FormatUnraisable("Exception ignored while " + "closing generator %R", self); } } else { @@ -338,7 +341,8 @@ gen_close_iter(PyObject *yf) else { PyObject *meth; if (PyObject_GetOptionalAttr(yf, &_Py_ID(close), &meth) < 0) { - PyErr_WriteUnraisable(yf); + PyErr_FormatUnraisable("Exception ignored while " + "closing generator %R", yf); } if (meth) { retval = _PyObject_CallNoArgs(meth); diff --git a/Objects/listobject.c b/Objects/listobject.c index 099e65c0c25fed..120e353b709e7b 100644 --- a/Objects/listobject.c +++ b/Objects/listobject.c @@ -419,7 +419,6 @@ int PyList_SetItem(PyObject *op, Py_ssize_t i, PyObject *newitem) { - PyObject **p; if (!PyList_Check(op)) { Py_XDECREF(newitem); PyErr_BadInternalCall(); @@ -435,8 +434,9 @@ PyList_SetItem(PyObject *op, Py_ssize_t i, ret = -1; goto end; } - p = self->ob_item + i; - Py_XSETREF(*p, newitem); + PyObject *tmp = self->ob_item[i]; + FT_ATOMIC_STORE_PTR_RELEASE(self->ob_item[i], newitem); + Py_XDECREF(tmp); ret = 0; end:; Py_END_CRITICAL_SECTION(); @@ -466,8 +466,8 @@ ins1(PyListObject *self, Py_ssize_t where, PyObject *v) where = n; items = self->ob_item; for (i = n; --i >= where; ) - items[i+1] = items[i]; - items[where] = Py_NewRef(v); + FT_ATOMIC_STORE_PTR_RELAXED(items[i+1], items[i]); + FT_ATOMIC_STORE_PTR_RELEASE(items[where], Py_NewRef(v)); return 0; } @@ -3903,15 +3903,17 @@ PyTypeObject PyListIter_Type = { static PyObject * list_iter(PyObject *seq) { - _PyListIterObject *it; - if (!PyList_Check(seq)) { PyErr_BadInternalCall(); return NULL; } - it = PyObject_GC_New(_PyListIterObject, &PyListIter_Type); - if (it == NULL) - return NULL; + _PyListIterObject *it = _Py_FREELIST_POP(_PyListIterObject, list_iters); + if (it == NULL) { + it = PyObject_GC_New(_PyListIterObject, &PyListIter_Type); + if (it == NULL) { + return NULL; + } + } it->it_index = 0; it->it_seq = (PyListObject *)Py_NewRef(seq); _PyObject_GC_TRACK(it); @@ -3924,7 +3926,8 @@ listiter_dealloc(PyObject *self) _PyListIterObject *it = (_PyListIterObject *)self; _PyObject_GC_UNTRACK(it); Py_XDECREF(it->it_seq); - PyObject_GC_Del(it); + assert(Py_IS_TYPE(self, &PyListIter_Type)); + _Py_FREELIST_FREE(list_iters, it, PyObject_GC_Del); } static int diff --git a/Objects/longobject.c b/Objects/longobject.c index 43be1ab056e0fe..370328dcfe8c9a 100644 --- a/Objects/longobject.c +++ b/Objects/longobject.c @@ -3651,32 +3651,25 @@ long_richcompare(PyObject *self, PyObject *other, int op) } static inline int -compact_int_is_small(PyObject *self) +/// Return 1 if the object is one of the immortal small ints +_long_is_small_int(PyObject *op) { - PyLongObject *pylong = (PyLongObject *)self; - assert(_PyLong_IsCompact(pylong)); - stwodigits ival = medium_value(pylong); - if (IS_SMALL_INT(ival)) { - PyLongObject *small_pylong = (PyLongObject *)get_small_int((sdigit)ival); - if (pylong == small_pylong) { - return 1; - } - } - return 0; + PyLongObject *long_object = (PyLongObject *)op; + int is_small_int = (long_object->long_value.lv_tag & IMMORTALITY_BIT_MASK) != 0; + assert((!is_small_int) || PyLong_CheckExact(op)); + return is_small_int; } void _PyLong_ExactDealloc(PyObject *self) { assert(PyLong_CheckExact(self)); + if (_long_is_small_int(self)) { + // See PEP 683, section Accidental De-Immortalizing for details + _Py_SetImmortal(self); + return; + } if (_PyLong_IsCompact((PyLongObject *)self)) { - #ifndef Py_GIL_DISABLED - if (compact_int_is_small(self)) { - // See PEP 683, section Accidental De-Immortalizing for details - _Py_SetImmortal(self); - return; - } - #endif _Py_FREELIST_FREE(ints, self, PyObject_Free); return; } @@ -3686,24 +3679,20 @@ _PyLong_ExactDealloc(PyObject *self) static void long_dealloc(PyObject *self) { - assert(self); - if (_PyLong_IsCompact((PyLongObject *)self)) { - if (compact_int_is_small(self)) { - /* This should never get called, but we also don't want to SEGV if - * we accidentally decref small Ints out of existence. Instead, - * since small Ints are immortal, re-set the reference count. - * - * See PEP 683, section Accidental De-Immortalizing for details - */ - _Py_SetImmortal(self); - return; - } - if (PyLong_CheckExact(self)) { - _Py_FREELIST_FREE(ints, self, PyObject_Free); - return; - } + if (_long_is_small_int(self)) { + /* This should never get called, but we also don't want to SEGV if + * we accidentally decref small Ints out of existence. Instead, + * since small Ints are immortal, re-set the reference count. + * + * See PEP 683, section Accidental De-Immortalizing for details + */ + _Py_SetImmortal(self); + return; + } + if (PyLong_CheckExact(self) && _PyLong_IsCompact((PyLongObject *)self)) { + _Py_FREELIST_FREE(ints, self, PyObject_Free); + return; } - Py_TYPE(self)->tp_free(self); } @@ -6065,7 +6054,7 @@ long_subtype_new(PyTypeObject *type, PyObject *x, PyObject *obase) return NULL; } assert(PyLong_Check(newobj)); - newobj->long_value.lv_tag = tmp->long_value.lv_tag; + newobj->long_value.lv_tag = tmp->long_value.lv_tag & ~IMMORTALITY_BIT_MASK; for (i = 0; i < n; i++) { newobj->long_value.ob_digit[i] = tmp->long_value.ob_digit[i]; } diff --git a/Objects/memoryobject.c b/Objects/memoryobject.c index ea4d24dc690768..331363b2babbef 100644 --- a/Objects/memoryobject.c +++ b/Objects/memoryobject.c @@ -2083,7 +2083,7 @@ struct_get_unpacker(const char *fmt, Py_ssize_t itemsize) PyObject *format = NULL; struct unpacker *x = NULL; - Struct = _PyImport_GetModuleAttrString("struct", "Struct"); + Struct = PyImport_ImportModuleAttrString("struct", "Struct"); if (Struct == NULL) return NULL; diff --git a/Objects/moduleobject.c b/Objects/moduleobject.c index c06f18d2c2ad32..80bad4ba28b820 100644 --- a/Objects/moduleobject.c +++ b/Objects/moduleobject.c @@ -703,7 +703,8 @@ _PyModule_ClearDict(PyObject *d) PyErr_Clear(); } if (PyDict_SetItem(d, key, Py_None) != 0) { - PyErr_FormatUnraisable("Exception ignored on clearing module dict"); + PyErr_FormatUnraisable("Exception ignored while " + "clearing module dict"); } } } @@ -724,7 +725,8 @@ _PyModule_ClearDict(PyObject *d) PyErr_Clear(); } if (PyDict_SetItem(d, key, Py_None) != 0) { - PyErr_FormatUnraisable("Exception ignored on clearing module dict"); + PyErr_FormatUnraisable("Exception ignored while " + "clearing module dict"); } } } diff --git a/Objects/object.c b/Objects/object.c index eb1a7825c45450..fdff16138201a0 100644 --- a/Objects/object.c +++ b/Objects/object.c @@ -19,7 +19,7 @@ #include "pycore_object.h" // PyAPI_DATA() _Py_SwappedOp definition #include "pycore_object_state.h" // struct _reftracer_runtime_state #include "pycore_long.h" // _PyLong_GetZero() -#include "pycore_optimizer.h" // _PyUOpExecutor_Type, _PyUOpOptimizer_Type, ... +#include "pycore_optimizer.h" // _PyUOpExecutor_Type, ... #include "pycore_pyerrors.h" // _PyErr_Occurred() #include "pycore_pymem.h" // _PyMem_IsPtrFreed() #include "pycore_pystate.h" // _PyThreadState_GET() @@ -923,6 +923,8 @@ _PyObject_ClearFreeLists(struct _Py_freelists *freelists, int is_finalization) clear_freelist(&freelists->tuples[i], is_finalization, free_object); } clear_freelist(&freelists->lists, is_finalization, free_object); + clear_freelist(&freelists->list_iters, is_finalization, free_object); + clear_freelist(&freelists->tuple_iters, is_finalization, free_object); clear_freelist(&freelists->dicts, is_finalization, free_object); clear_freelist(&freelists->dictkeys, is_finalization, PyMem_Free); clear_freelist(&freelists->slices, is_finalization, free_object); @@ -2379,9 +2381,6 @@ static PyTypeObject* static_types[] = { &_PyBufferWrapper_Type, &_PyContextTokenMissing_Type, &_PyCoroWrapper_Type, -#ifdef _Py_TIER2 - &_PyDefaultOptimizer_Type, -#endif &_Py_GenericAliasIterType, &_PyHamtItems_Type, &_PyHamtKeys_Type, @@ -2404,7 +2403,6 @@ static PyTypeObject* static_types[] = { &_PyUnion_Type, #ifdef _Py_TIER2 &_PyUOpExecutor_Type, - &_PyUOpOptimizer_Type, #endif &_PyWeakref_CallableProxyType, &_PyWeakref_ProxyType, @@ -2588,6 +2586,20 @@ PyUnstable_Object_EnableDeferredRefcount(PyObject *op) #endif } +int +PyUnstable_TryIncRef(PyObject *op) +{ + return _Py_TryIncref(op); +} + +void +PyUnstable_EnableTryIncRef(PyObject *op) +{ +#ifdef Py_GIL_DISABLED + _PyObject_SetMaybeWeakref(op); +#endif +} + void _Py_ResurrectReference(PyObject *op) { diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c index 7fe8553030a02e..60af9e40e3fe83 100644 --- a/Objects/tupleobject.c +++ b/Objects/tupleobject.c @@ -993,7 +993,8 @@ tupleiter_dealloc(PyObject *self) _PyTupleIterObject *it = _PyTupleIterObject_CAST(self); _PyObject_GC_UNTRACK(it); Py_XDECREF(it->it_seq); - PyObject_GC_Del(it); + assert(Py_IS_TYPE(self, &PyTupleIter_Type)); + _Py_FREELIST_FREE(tuple_iters, it, PyObject_GC_Del); } static int @@ -1119,15 +1120,16 @@ PyTypeObject PyTupleIter_Type = { static PyObject * tuple_iter(PyObject *seq) { - _PyTupleIterObject *it; - if (!PyTuple_Check(seq)) { PyErr_BadInternalCall(); return NULL; } - it = PyObject_GC_New(_PyTupleIterObject, &PyTupleIter_Type); - if (it == NULL) - return NULL; + _PyTupleIterObject *it = _Py_FREELIST_POP(_PyTupleIterObject, tuple_iters); + if (it == NULL) { + it = PyObject_GC_New(_PyTupleIterObject, &PyTupleIter_Type); + if (it == NULL) + return NULL; + } it->it_index = 0; it->it_seq = (PyTupleObject *)Py_NewRef(seq); _PyObject_GC_TRACK(it); diff --git a/Objects/typeobject.c b/Objects/typeobject.c index 93920341a179e8..f3238da8a642e4 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -10288,10 +10288,13 @@ slot_tp_finalize(PyObject *self) del = lookup_maybe_method(self, &_Py_ID(__del__), &unbound); if (del != NULL) { res = call_unbound_noarg(unbound, del, self); - if (res == NULL) - PyErr_WriteUnraisable(del); - else + if (res == NULL) { + PyErr_FormatUnraisable("Exception ignored while " + "calling deallocator %R", del); + } + else { Py_DECREF(res); + } Py_DECREF(del); } diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index d9952f764bb178..75967d69ed374d 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -1735,7 +1735,9 @@ unicode_dealloc(PyObject *unicode) PyObject *popped; int r = PyDict_Pop(interned, unicode, &popped); if (r == -1) { - PyErr_WriteUnraisable(unicode); + PyErr_FormatUnraisable("Exception ignored while " + "removing an interned string %R", + unicode); // We don't know what happened to the string. It's probably // best to leak it: // - if it was popped, there are no more references to it @@ -16486,3 +16488,24 @@ PyInit__string(void) { return PyModuleDef_Init(&_string_module); } + + +#undef PyUnicode_KIND +int PyUnicode_KIND(PyObject *op) +{ + if (!PyUnicode_Check(op)) { + PyErr_Format(PyExc_TypeError, "expect str, got %T", op); + return -1; + } + return _PyASCIIObject_CAST(op)->state.kind; +} + +#undef PyUnicode_DATA +void* PyUnicode_DATA(PyObject *op) +{ + if (!PyUnicode_Check(op)) { + PyErr_Format(PyExc_TypeError, "expect str, got %T", op); + return NULL; + } + return _PyUnicode_DATA(op); +} diff --git a/Objects/weakrefobject.c b/Objects/weakrefobject.c index 0ee64ed70a63cd..bd4c4ac9b3475a 100644 --- a/Objects/weakrefobject.c +++ b/Objects/weakrefobject.c @@ -987,10 +987,13 @@ handle_callback(PyWeakReference *ref, PyObject *callback) { PyObject *cbresult = PyObject_CallOneArg(callback, (PyObject *)ref); - if (cbresult == NULL) - PyErr_WriteUnraisable(callback); - else + if (cbresult == NULL) { + PyErr_FormatUnraisable("Exception ignored while " + "calling weakref callback %R", callback); + } + else { Py_DECREF(cbresult); + } } /* This function is called by the tp_dealloc handler to clear weak references. @@ -1042,7 +1045,8 @@ PyObject_ClearWeakRefs(PyObject *object) PyObject *tuple = PyTuple_New(num_weakrefs * 2); if (tuple == NULL) { _PyWeakref_ClearWeakRefsNoCallbacks(object); - PyErr_WriteUnraisable(NULL); + PyErr_FormatUnraisable("Exception ignored while " + "clearing object weakrefs"); PyErr_SetRaisedException(exc); return; } diff --git a/PCbuild/_testcapi.vcxproj b/PCbuild/_testcapi.vcxproj index c41235eac356af..a68f15d25aabb7 100644 --- a/PCbuild/_testcapi.vcxproj +++ b/PCbuild/_testcapi.vcxproj @@ -127,6 +127,10 @@ + + + + diff --git a/PCbuild/_testcapi.vcxproj.filters b/PCbuild/_testcapi.vcxproj.filters index 0a00df655deefc..21091e9dc1aa16 100644 --- a/PCbuild/_testcapi.vcxproj.filters +++ b/PCbuild/_testcapi.vcxproj.filters @@ -114,6 +114,18 @@ Source Files + + Source Files + + + Source Files + + + Source Files + + + Source Files + diff --git a/PCbuild/_testlimitedcapi.vcxproj b/PCbuild/_testlimitedcapi.vcxproj index 87abff52493098..36c41fc9824fda 100644 --- a/PCbuild/_testlimitedcapi.vcxproj +++ b/PCbuild/_testlimitedcapi.vcxproj @@ -114,6 +114,7 @@ + diff --git a/PCbuild/_testlimitedcapi.vcxproj.filters b/PCbuild/_testlimitedcapi.vcxproj.filters index a975a508506905..62ecb2f70ffa2d 100644 --- a/PCbuild/_testlimitedcapi.vcxproj.filters +++ b/PCbuild/_testlimitedcapi.vcxproj.filters @@ -30,6 +30,7 @@ + diff --git a/Parser/pegen.c b/Parser/pegen.c index bb98e7b184a4dc..83b0022e47d619 100644 --- a/Parser/pegen.c +++ b/Parser/pegen.c @@ -111,7 +111,7 @@ init_normalization(Parser *p) if (p->normalize) { return 1; } - p->normalize = _PyImport_GetModuleAttrString("unicodedata", "normalize"); + p->normalize = PyImport_ImportModuleAttrString("unicodedata", "normalize"); if (!p->normalize) { return 0; diff --git a/Parser/tokenizer/file_tokenizer.c b/Parser/tokenizer/file_tokenizer.c index 2750527da484aa..efe9fb9b56abaf 100644 --- a/Parser/tokenizer/file_tokenizer.c +++ b/Parser/tokenizer/file_tokenizer.c @@ -158,7 +158,7 @@ fp_setreadl(struct tok_state *tok, const char* enc) return 0; } - open = _PyImport_GetModuleAttrString("io", "open"); + open = PyImport_ImportModuleAttrString("io", "open"); if (open == NULL) { return 0; } diff --git a/Python/_warnings.c b/Python/_warnings.c index 99139988908e17..81d130d7fe76ce 100644 --- a/Python/_warnings.c +++ b/Python/_warnings.c @@ -1444,7 +1444,8 @@ _PyErr_WarnUnawaitedAgenMethod(PyAsyncGenObject *agen, PyObject *method) "coroutine method %R of %R was never awaited", method, agen->ag_qualname) < 0) { - PyErr_WriteUnraisable((PyObject *)agen); + PyErr_FormatUnraisable("Exception ignored while " + "finalizing async generator %R", agen); } PyErr_SetRaisedException(exc); } @@ -1486,14 +1487,17 @@ _PyErr_WarnUnawaitedCoroutine(PyObject *coro) } if (PyErr_Occurred()) { - PyErr_WriteUnraisable(coro); + PyErr_FormatUnraisable("Exception ignored while " + "finalizing coroutine %R", coro); } + if (!warned) { if (_PyErr_WarnFormat(coro, PyExc_RuntimeWarning, 1, "coroutine '%S' was never awaited", ((PyCoroObject *)coro)->cr_qualname) < 0) { - PyErr_WriteUnraisable(coro); + PyErr_FormatUnraisable("Exception ignored while " + "finalizing coroutine %R", coro); } } } diff --git a/Python/ast_opt.c b/Python/ast_opt.c index 01e208b88eca8b..78d84002d593fb 100644 --- a/Python/ast_opt.c +++ b/Python/ast_opt.c @@ -567,25 +567,6 @@ fold_tuple(expr_ty node, PyArena *arena, _PyASTOptimizeState *state) return make_const(node, newval, arena); } -static int -fold_subscr(expr_ty node, PyArena *arena, _PyASTOptimizeState *state) -{ - PyObject *newval; - expr_ty arg, idx; - - arg = node->v.Subscript.value; - idx = node->v.Subscript.slice; - if (node->v.Subscript.ctx != Load || - arg->kind != Constant_kind || - idx->kind != Constant_kind) - { - return 1; - } - - newval = PyObject_GetItem(arg->v.Constant.value, idx->v.Constant.value); - return make_const(node, newval, arena); -} - /* Change literal list or set of constants into constant tuple or frozenset respectively. Change literal list of non-constants into tuple. @@ -822,7 +803,6 @@ astfold_expr(expr_ty node_, PyArena *ctx_, _PyASTOptimizeState *state) case Subscript_kind: CALL(astfold_expr, expr_ty, node_->v.Subscript.value); CALL(astfold_expr, expr_ty, node_->v.Subscript.slice); - CALL(fold_subscr, expr_ty, node_); break; case Starred_kind: CALL(astfold_expr, expr_ty, node_->v.Starred.value); diff --git a/Python/bytecodes.c b/Python/bytecodes.c index 5f0be8d3feefd4..b650613650cf36 100644 --- a/Python/bytecodes.c +++ b/Python/bytecodes.c @@ -45,7 +45,6 @@ #include "ceval_macros.h" /* Flow control macros */ -#define GO_TO_INSTRUCTION(instname) ((void)0) #define inst(name, ...) case name: #define op(name, ...) /* NAME is ignored */ @@ -273,7 +272,6 @@ dummy_func( inst(LOAD_FAST_AND_CLEAR, (-- value)) { value = GETLOCAL(oparg); - // do not use SETLOCAL here, it decrefs the old value GETLOCAL(oparg) = PyStackRef_NULL; } @@ -294,10 +292,20 @@ dummy_func( * marshalling can intern strings and make them immortal. */ PyObject *obj = GETITEM(FRAME_CO_CONSTS, oparg); value = PyStackRef_FromPyObjectNew(obj); -#if ENABLE_SPECIALIZATION +#if ENABLE_SPECIALIZATION_FT +#ifdef Py_GIL_DISABLED + uint8_t expected = LOAD_CONST; + if (!_Py_atomic_compare_exchange_uint8( + &this_instr->op.code, &expected, + _Py_IsImmortal(obj) ? LOAD_CONST_IMMORTAL : LOAD_CONST_MORTAL)) { + // We might lose a race with instrumentation, which we don't care about. + assert(expected >= MIN_INSTRUMENTED_OPCODE); + } +#else if (this_instr->op.code == LOAD_CONST) { this_instr->op.code = _Py_IsImmortal(obj) ? LOAD_CONST_IMMORTAL : LOAD_CONST_MORTAL; } +#endif #endif } @@ -319,8 +327,10 @@ dummy_func( } replicate(8) inst(STORE_FAST, (value --)) { - SETLOCAL(oparg, value); + _PyStackRef tmp = GETLOCAL(oparg); + GETLOCAL(oparg) = value; DEAD(value); + PyStackRef_XCLOSE(tmp); } pseudo(STORE_FAST_MAYBE_NULL, (unused --)) = { @@ -330,18 +340,24 @@ dummy_func( inst(STORE_FAST_LOAD_FAST, (value1 -- value2)) { uint32_t oparg1 = oparg >> 4; uint32_t oparg2 = oparg & 15; - SETLOCAL(oparg1, value1); + _PyStackRef tmp = GETLOCAL(oparg1); + GETLOCAL(oparg1) = value1; DEAD(value1); value2 = PyStackRef_DUP(GETLOCAL(oparg2)); + PyStackRef_XCLOSE(tmp); } inst(STORE_FAST_STORE_FAST, (value2, value1 --)) { uint32_t oparg1 = oparg >> 4; uint32_t oparg2 = oparg & 15; - SETLOCAL(oparg1, value1); + _PyStackRef tmp = GETLOCAL(oparg1); + GETLOCAL(oparg1) = value1; DEAD(value1); - SETLOCAL(oparg2, value2); + PyStackRef_XCLOSE(tmp); + tmp = GETLOCAL(oparg2); + GETLOCAL(oparg2) = value2; DEAD(value2); + PyStackRef_XCLOSE(tmp); } pure inst(POP_TOP, (value --)) { @@ -749,7 +765,7 @@ dummy_func( assert(INLINE_CACHE_ENTRIES_BINARY_OP == 5); assert(d && d->guard); int res = d->guard(left_o, right_o); - EXIT_IF(!res); + DEOPT_IF(!res); } pure op(_BINARY_OP_EXTEND, (descr/4, left, right -- res)) { @@ -1766,7 +1782,9 @@ dummy_func( ); ERROR_IF(1, error); } - SETLOCAL(oparg, PyStackRef_NULL); + _PyStackRef tmp = GETLOCAL(oparg); + GETLOCAL(oparg) = PyStackRef_NULL; + PyStackRef_XCLOSE(tmp); } inst(MAKE_CELL, (--)) { @@ -1777,7 +1795,9 @@ dummy_func( if (cell == NULL) { ERROR_NO_POP(); } - SETLOCAL(oparg, PyStackRef_FromPyObjectSteal(cell)); + _PyStackRef tmp = GETLOCAL(oparg); + GETLOCAL(oparg) = PyStackRef_FromPyObjectSteal(cell); + PyStackRef_XCLOSE(tmp); } inst(DELETE_DEREF, (--)) { @@ -2009,12 +2029,10 @@ dummy_func( ERROR_IF(err != 0, error); } - inst(INSTRUMENTED_LOAD_SUPER_ATTR, (unused/1 -- )) { - // cancel out the decrement that will happen in LOAD_SUPER_ATTR; we - // don't want to specialize instrumented instructions - PAUSE_ADAPTIVE_COUNTER(this_instr[1].counter); - GO_TO_INSTRUCTION(LOAD_SUPER_ATTR); - } + macro(INSTRUMENTED_LOAD_SUPER_ATTR) = + counter/1 + + _LOAD_SUPER_ATTR + + _PUSH_NULL_CONDITIONAL; family(LOAD_SUPER_ATTR, INLINE_CACHE_ENTRIES_LOAD_SUPER_ATTR) = { LOAD_SUPER_ATTR_ATTR, @@ -2078,7 +2096,10 @@ dummy_func( attr = PyStackRef_FromPyObjectSteal(attr_o); } - macro(LOAD_SUPER_ATTR) = _SPECIALIZE_LOAD_SUPER_ATTR + _LOAD_SUPER_ATTR + _PUSH_NULL_CONDITIONAL; + macro(LOAD_SUPER_ATTR) = + _SPECIALIZE_LOAD_SUPER_ATTR + + _LOAD_SUPER_ATTR + + _PUSH_NULL_CONDITIONAL; inst(LOAD_SUPER_ATTR_ATTR, (unused/1, global_super_st, class_st, self_st -- attr_st)) { PyObject *global_super = PyStackRef_AsPyObjectBorrow(global_super_st); @@ -2120,8 +2141,7 @@ dummy_func( PyStackRef_CLOSE(self_st); self_or_null = PyStackRef_NULL; } - PyStackRef_CLOSE(class_st); - PyStackRef_CLOSE(global_super_st); + DECREF_INPUTS(); attr = PyStackRef_FromPyObjectSteal(attr_o); } @@ -2235,7 +2255,7 @@ dummy_func( attr = PyStackRef_FromPyObjectNew(attr_o); #endif STAT_INC(LOAD_ATTR, hit); - DECREF_INPUTS(); + PyStackRef_CLOSE(owner); } macro(LOAD_ATTR_INSTANCE_VALUE) = @@ -2558,7 +2578,7 @@ dummy_func( } OPCODE_DEFERRED_INC(COMPARE_OP); ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); - #endif /* ENABLE_SPECIALIZATION */ + #endif /* ENABLE_SPECIALIZATION_FT */ } op(_COMPARE_OP, (left, right -- res)) { @@ -2772,13 +2792,26 @@ dummy_func( JUMPBY(oparg); } - tier1 op(_JUMP_BACKWARD, (the_counter/1 --)) { - assert(oparg <= INSTR_OFFSET()); - JUMPBY(-oparg); - #ifdef _Py_TIER2 - #if ENABLE_SPECIALIZATION + family(JUMP_BACKWARD, 1) = { + JUMP_BACKWARD_NO_JIT, + JUMP_BACKWARD_JIT, + }; + + tier1 op(_SPECIALIZE_JUMP_BACKWARD, (--)) { + #if ENABLE_SPECIALIZATION + if (this_instr->op.code == JUMP_BACKWARD) { + this_instr->op.code = tstate->interp->jit ? JUMP_BACKWARD_JIT : JUMP_BACKWARD_NO_JIT; + // Need to re-dispatch so the warmup counter isn't off by one: + next_instr = this_instr; + DISPATCH_SAME_OPARG(); + } + #endif + } + + tier1 op(_JIT, (--)) { + #ifdef _Py_TIER2 _Py_BackoffCounter counter = this_instr[1].counter; - if (backoff_counter_triggers(counter) && this_instr->op.code == JUMP_BACKWARD) { + if (backoff_counter_triggers(counter) && this_instr->op.code == JUMP_BACKWARD_JIT) { _Py_CODEUNIT *start = this_instr; /* Back up over EXTENDED_ARGs so optimizer sees the whole instruction */ while (oparg > 255) { @@ -2786,7 +2819,7 @@ dummy_func( start--; } _PyExecutorObject *executor; - int optimized = _PyOptimizer_Optimize(frame, start, stack_pointer, &executor, 0); + int optimized = _PyOptimizer_Optimize(frame, start, &executor, 0); if (optimized <= 0) { this_instr[1].counter = restart_backoff_counter(counter); ERROR_IF(optimized < 0, error); @@ -2801,13 +2834,25 @@ dummy_func( else { ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); } - #endif /* ENABLE_SPECIALIZATION */ - #endif /* _Py_TIER2 */ + #endif } macro(JUMP_BACKWARD) = + unused/1 + + _SPECIALIZE_JUMP_BACKWARD + + _CHECK_PERIODIC + + JUMP_BACKWARD_NO_INTERRUPT; + + macro(JUMP_BACKWARD_NO_JIT) = + unused/1 + + _CHECK_PERIODIC + + JUMP_BACKWARD_NO_INTERRUPT; + + macro(JUMP_BACKWARD_JIT) = + unused/1 + _CHECK_PERIODIC + - _JUMP_BACKWARD; + JUMP_BACKWARD_NO_INTERRUPT + + _JIT; pseudo(JUMP, (--)) = { JUMP_FORWARD, @@ -2896,6 +2941,7 @@ dummy_func( * generator or coroutine, so we deliberately do not check it here. * (see bpo-30039). */ + assert(oparg <= INSTR_OFFSET()); JUMPBY(-oparg); } @@ -3635,15 +3681,14 @@ dummy_func( EXIT_IF(!PyStackRef_IsNull(null[0])); } - op(_EXPAND_METHOD, (callable[1], null[1], unused[oparg] -- method[1], self[1], unused[oparg])) { + op(_EXPAND_METHOD, (callable[1], self_or_null[1], unused[oparg] -- callable[1], self_or_null[1], unused[oparg])) { PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); - assert(PyStackRef_IsNull(null[0])); - DEAD(null); + assert(PyStackRef_IsNull(self_or_null[0])); assert(Py_TYPE(callable_o) == &PyMethod_Type); - self[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); + self_or_null[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); _PyStackRef temp = callable[0]; - method[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); - assert(PyStackRef_FunctionCheck(method[0])); + callable[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); + assert(PyStackRef_FunctionCheck(callable[0])); PyStackRef_CLOSE(temp); } @@ -3704,13 +3749,13 @@ dummy_func( EXIT_IF(Py_TYPE(PyStackRef_AsPyObjectBorrow(callable[0])) != &PyMethod_Type); } - op(_INIT_CALL_BOUND_METHOD_EXACT_ARGS, (callable[1], null[1], unused[oparg] -- func[1], self[1], unused[oparg])) { - DEAD(null); + op(_INIT_CALL_BOUND_METHOD_EXACT_ARGS, (callable[1], self_or_null[1], unused[oparg] -- callable[1], self_or_null[1], unused[oparg])) { + assert(PyStackRef_IsNull(self_or_null[0])); PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); STAT_INC(CALL, hit); - self[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); + self_or_null[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); _PyStackRef temp = callable[0]; - func[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); + callable[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); PyStackRef_CLOSE(temp); } @@ -4135,8 +4180,9 @@ dummy_func( PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } @@ -4147,8 +4193,8 @@ dummy_func( EXIT_IF(meth->ml_flags != METH_O); // CPython promises to check all non-vectorcall function calls. EXIT_IF(tstate->c_recursion_remaining <= 0); - _PyStackRef arg_stackref = args[1]; - _PyStackRef self_stackref = args[0]; + _PyStackRef arg_stackref = arguments[1]; + _PyStackRef self_stackref = arguments[0]; EXIT_IF(!Py_IS_TYPE(PyStackRef_AsPyObjectBorrow(self_stackref), method->d_common.d_type)); STAT_INC(CALL, hit); @@ -4159,11 +4205,7 @@ dummy_func( PyStackRef_AsPyObjectBorrow(arg_stackref)); _Py_LeaveRecursiveCallTstate(tstate); assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - PyStackRef_CLOSE(self_stackref); - PyStackRef_CLOSE(arg_stackref); - DEAD(args); - DEAD(self_or_null); - PyStackRef_CLOSE(callable[0]); + DECREF_INPUTS(); ERROR_IF(res_o == NULL, error); res = PyStackRef_FromPyObjectSteal(res_o); } @@ -4300,18 +4342,23 @@ dummy_func( CALL_KW_NON_PY, }; - inst(INSTRUMENTED_CALL_KW, (counter/1, version/2 -- )) { - int is_meth = !PyStackRef_IsNull(PEEK(oparg + 2)); - int total_args = oparg + is_meth; - PyObject *function = PyStackRef_AsPyObjectBorrow(PEEK(oparg + 3)); - PyObject *arg = total_args == 0 ? &_PyInstrumentation_MISSING - : PyStackRef_AsPyObjectBorrow(PEEK(total_args + 1)); + op(_MONITOR_CALL_KW, (callable[1], self_or_null[1], args[oparg], kwnames -- callable[1], self_or_null[1], args[oparg], kwnames)) { + int is_meth = !PyStackRef_IsNull(self_or_null[0]); + PyObject *arg; + if (is_meth) { + arg = PyStackRef_AsPyObjectBorrow(self_or_null[0]); + } + else if (args) { + arg = PyStackRef_AsPyObjectBorrow(args[0]); + } + else { + arg = &_PyInstrumentation_MISSING; + } + PyObject *function = PyStackRef_AsPyObjectBorrow(callable[0]); int err = _Py_call_instrumentation_2args( tstate, PY_MONITORING_EVENT_CALL, frame, this_instr, function, arg); ERROR_IF(err, error); - PAUSE_ADAPTIVE_COUNTER(this_instr[1].counter); - GO_TO_INSTRUCTION(CALL_KW); } op(_MAYBE_EXPAND_METHOD_KW, (callable[1], self_or_null[1], args[oparg], kwnames_in -- func[1], maybe_self[1], args[oparg], kwnames_out)) { @@ -4450,15 +4497,14 @@ dummy_func( EXIT_IF(!PyStackRef_IsNull(null[0])); } - op(_EXPAND_METHOD_KW, (callable[1], null[1], unused[oparg], unused -- method[1], self[1], unused[oparg], unused)) { + op(_EXPAND_METHOD_KW, (callable[1], self_or_null[1], unused[oparg], unused -- callable[1], self_or_null[1], unused[oparg], unused)) { + assert(PyStackRef_IsNull(self_or_null[0])); _PyStackRef callable_s = callable[0]; PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable_s); - - assert(PyStackRef_IsNull(null[0])); assert(Py_TYPE(callable_o) == &PyMethod_Type); - self[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); - method[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); - assert(PyStackRef_FunctionCheck(method[0])); + self_or_null[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); + callable[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); + assert(PyStackRef_FunctionCheck(callable[0])); PyStackRef_CLOSE(callable_s); } @@ -4490,6 +4536,13 @@ dummy_func( _MAYBE_EXPAND_METHOD_KW + _DO_CALL_KW; + macro(INSTRUMENTED_CALL_KW) = + counter/1 + + unused/2 + + _MONITOR_CALL_KW + + _MAYBE_EXPAND_METHOD_KW + + _DO_CALL_KW; + op(_CHECK_IS_NOT_PY_CALLABLE_KW, (callable[1], unused[1], unused[oparg], kwnames -- callable[1], unused[1], unused[oparg], kwnames)) { PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); EXIT_IF(PyFunction_Check(callable_o)); @@ -4536,10 +4589,6 @@ dummy_func( _CALL_KW_NON_PY + _CHECK_PERIODIC; - inst(INSTRUMENTED_CALL_FUNCTION_EX, ( -- )) { - GO_TO_INSTRUCTION(CALL_FUNCTION_EX); - } - op(_MAKE_CALLARGS_A_TUPLE, (func, unused, callargs, kwargs_in -- func, unused, tuple, kwargs_out)) { PyObject *callargs_o = PyStackRef_AsPyObjectBorrow(callargs); if (PyTuple_CheckExact(callargs_o)) { @@ -4564,7 +4613,8 @@ dummy_func( } } - op(_DO_CALL_FUNCTION_EX, (func_st, unused, callargs_st, kwargs_st -- result)) { + op(_DO_CALL_FUNCTION_EX, (func_st, null, callargs_st, kwargs_st -- result)) { + (void)null; PyObject *func = PyStackRef_AsPyObjectBorrow(func_st); // DICT_MERGE is called before this opcode if there are kwargs. @@ -4635,8 +4685,8 @@ dummy_func( result_o = PyObject_Call(func, callargs, kwargs); } PyStackRef_XCLOSE(kwargs_st); - DEAD(kwargs_st); PyStackRef_CLOSE(callargs_st); + DEAD(null); PyStackRef_CLOSE(func_st); ERROR_IF(result_o == NULL, error); result = PyStackRef_FromPyObjectSteal(result_o); @@ -4647,6 +4697,10 @@ dummy_func( _DO_CALL_FUNCTION_EX + _CHECK_PERIODIC; + macro(INSTRUMENTED_CALL_FUNCTION_EX) = + _MAKE_CALLARGS_A_TUPLE + + _DO_CALL_FUNCTION_EX + + _CHECK_PERIODIC; inst(MAKE_FUNCTION, (codeobj_st -- func)) { PyObject *codeobj = PyStackRef_AsPyObjectBorrow(codeobj_st); @@ -4773,12 +4827,11 @@ dummy_func( macro(BINARY_OP) = _SPECIALIZE_BINARY_OP + unused/4 + _BINARY_OP; - pure inst(SWAP, (bottom_in, unused[oparg-2], top_in -- - top_out, unused[oparg-2], bottom_out)) { - bottom_out = bottom_in; - DEAD(bottom_in); - top_out = top_in; - DEAD(top_in); + pure inst(SWAP, (bottom[1], unused[oparg-2], top[1] -- + bottom[1], unused[oparg-2], top[1])) { + _PyStackRef temp = bottom[0]; + bottom[0] = top[0]; + top[0] = temp; assert(oparg >= 2); } @@ -4991,7 +5044,7 @@ dummy_func( } else { int chain_depth = current_executor->vm_data.chain_depth + 1; - int optimized = _PyOptimizer_Optimize(frame, target, stack_pointer, &executor, chain_depth); + int optimized = _PyOptimizer_Optimize(frame, target, &executor, chain_depth); if (optimized <= 0) { exit->temperature = restart_backoff_counter(temperature); if (optimized < 0) { @@ -5092,7 +5145,7 @@ dummy_func( exit->temperature = advance_backoff_counter(exit->temperature); GOTO_TIER_ONE(target); } - int optimized = _PyOptimizer_Optimize(frame, target, stack_pointer, &executor, 0); + int optimized = _PyOptimizer_Optimize(frame, target, &executor, 0); if (optimized <= 0) { exit->temperature = restart_backoff_counter(exit->temperature); if (optimized < 0) { @@ -5138,7 +5191,8 @@ dummy_func( EXIT_TO_TIER1(); } - tier2 op(_ERROR_POP_N, (target/2, unused[oparg] --)) { + tier2 op(_ERROR_POP_N, (target/2 --)) { + assert(oparg == 0); frame->instr_ptr = _PyFrame_GetBytecode(frame) + target; SYNC_SP(); GOTO_UNWIND(); @@ -5157,18 +5211,18 @@ dummy_func( } label(pop_4_error) { - STACK_SHRINK(1); - goto pop_3_error; + STACK_SHRINK(4); + goto error; } label(pop_3_error) { - STACK_SHRINK(1); - goto pop_2_error; + STACK_SHRINK(3); + goto error; } label(pop_2_error) { - STACK_SHRINK(1); - goto pop_1_error; + STACK_SHRINK(2); + goto error; } label(pop_1_error) { @@ -5199,29 +5253,29 @@ dummy_func( goto exception_unwind; } - label(exception_unwind) { + spilled label(exception_unwind) { /* We can't use frame->instr_ptr here, as RERAISE may have set it */ int offset = INSTR_OFFSET()-1; int level, handler, lasti; - if (get_exception_handler(_PyFrame_GetCode(frame), offset, &level, &handler, &lasti) == 0) { + int handled = get_exception_handler(_PyFrame_GetCode(frame), offset, &level, &handler, &lasti); + if (handled == 0) { // No handlers, so exit. assert(_PyErr_Occurred(tstate)); - /* Pop remaining stack entries. */ _PyStackRef *stackbase = _PyFrame_Stackbase(frame); - while (stack_pointer > stackbase) { - PyStackRef_XCLOSE(POP()); + while (frame->stackpointer > stackbase) { + _PyStackRef ref = _PyFrame_StackPop(frame); + PyStackRef_XCLOSE(ref); } - assert(STACK_LEVEL() == 0); - _PyFrame_SetStackPointer(frame, stack_pointer); monitor_unwind(tstate, frame, next_instr-1); goto exit_unwind; } - assert(STACK_LEVEL() >= level); _PyStackRef *new_top = _PyFrame_Stackbase(frame) + level; - while (stack_pointer > new_top) { - PyStackRef_XCLOSE(POP()); + assert(frame->stackpointer >= new_top); + while (frame->stackpointer > new_top) { + _PyStackRef ref = _PyFrame_StackPop(frame); + PyStackRef_XCLOSE(ref); } if (lasti) { int frame_lasti = _PyInterpreterFrame_LASTI(frame); @@ -5229,7 +5283,7 @@ dummy_func( if (lasti == NULL) { goto exception_unwind; } - PUSH(PyStackRef_FromPyObjectSteal(lasti)); + _PyFrame_StackPush(frame, PyStackRef_FromPyObjectSteal(lasti)); } /* Make the raw exception data @@ -5237,10 +5291,11 @@ dummy_func( so a program can emulate the Python main loop. */ PyObject *exc = _PyErr_GetRaisedException(tstate); - PUSH(PyStackRef_FromPyObjectSteal(exc)); + _PyFrame_StackPush(frame, PyStackRef_FromPyObjectSteal(exc)); next_instr = _PyFrame_GetBytecode(frame) + handler; - if (monitor_handled(tstate, frame, next_instr, exc) < 0) { + int err = monitor_handled(tstate, frame, next_instr, exc); + if (err < 0) { goto exception_unwind; } /* Resume normal execution */ @@ -5249,10 +5304,11 @@ dummy_func( lltrace_resume_frame(frame); } #endif + RELOAD_STACK(); DISPATCH(); } - label(exit_unwind) { + spilled label(exit_unwind) { assert(_PyErr_Occurred(tstate)); _Py_LeaveRecursiveCallPy(tstate); assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); @@ -5267,14 +5323,41 @@ dummy_func( tstate->c_recursion_remaining += PY_EVAL_C_STACK_UNITS; return NULL; } - goto resume_with_error; + next_instr = frame->instr_ptr; + RELOAD_STACK(); + goto error; } - label(resume_with_error) { + spilled label(start_frame) { + int too_deep = _Py_EnterRecursivePy(tstate); + if (too_deep) { + goto exit_unwind; + } next_instr = frame->instr_ptr; - stack_pointer = _PyFrame_GetStackPointer(frame); - goto error; + + #ifdef LLTRACE + { + int lltrace = maybe_lltrace_resume_frame(frame, GLOBALS()); + frame->lltrace = lltrace; + if (lltrace < 0) { + goto exit_unwind; + } + } + #endif + + #ifdef Py_DEBUG + /* _PyEval_EvalFrameDefault() must not be called with an exception set, + because it can clear it (directly or indirectly) and so the + caller loses its exception */ + assert(!_PyErr_Occurred(tstate)); + #endif + + RELOAD_STACK(); + DISPATCH(); } + + + // END BYTECODES // } @@ -5284,7 +5367,6 @@ dummy_func( exit_unwind: handle_eval_breaker: resume_frame: - resume_with_error: start_frame: unbound_local_error: ; diff --git a/Python/ceval.c b/Python/ceval.c index 90fe994535dfb7..ea65153970e6d2 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -194,6 +194,7 @@ lltrace_instruction(_PyInterpreterFrame *frame, } fflush(stdout); } + static void lltrace_resume_frame(_PyInterpreterFrame *frame) { @@ -786,7 +787,16 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int _PyInterpreterFrame entry_frame; + if (_Py_EnterRecursiveCallTstate(tstate, "")) { + assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); + _PyEval_FrameClearAndPop(tstate, frame); + return NULL; + } + /* Local "register" variables. + * These are cached values from the frame and code object. */ + _Py_CODEUNIT *next_instr; + _PyStackRef *stack_pointer; #if defined(Py_DEBUG) && !defined(Py_STACKREF_DEBUG) /* Set these to invalid but identifiable values for debugging. */ @@ -811,76 +821,40 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int tstate->current_frame = frame; tstate->c_recursion_remaining -= (PY_EVAL_C_STACK_UNITS - 1); - if (_Py_EnterRecursiveCallTstate(tstate, "")) { - tstate->c_recursion_remaining--; - tstate->py_recursion_remaining--; - goto exit_unwind; - } /* support for generator.throw() */ if (throwflag) { if (_Py_EnterRecursivePy(tstate)) { - goto exit_unwind; + goto early_exit; } - /* Because this avoids the RESUME, - * we need to update instrumentation */ #ifdef Py_GIL_DISABLED /* Load thread-local bytecode */ if (frame->tlbc_index != ((_PyThreadStateImpl *)tstate)->tlbc_index) { _Py_CODEUNIT *bytecode = _PyEval_GetExecutableCode(tstate, _PyFrame_GetCode(frame)); if (bytecode == NULL) { - goto exit_unwind; + goto early_exit; } ptrdiff_t off = frame->instr_ptr - _PyFrame_GetBytecode(frame); frame->tlbc_index = ((_PyThreadStateImpl *)tstate)->tlbc_index; frame->instr_ptr = bytecode + off; } #endif + /* Because this avoids the RESUME, we need to update instrumentation */ _Py_Instrument(_PyFrame_GetCode(frame), tstate->interp); - monitor_throw(tstate, frame, frame->instr_ptr); - /* TO DO -- Monitor throw entry. */ - goto resume_with_error; + next_instr = frame->instr_ptr; + stack_pointer = _PyFrame_GetStackPointer(frame); + monitor_throw(tstate, frame, next_instr); + goto error; } - /* Local "register" variables. - * These are cached values from the frame and code object. */ - _Py_CODEUNIT *next_instr; - _PyStackRef *stack_pointer; - #if defined(_Py_TIER2) && !defined(_Py_JIT) /* Tier 2 interpreter state */ _PyExecutorObject *current_executor = NULL; const _PyUOpInstruction *next_uop = NULL; #endif -start_frame: - if (_Py_EnterRecursivePy(tstate)) { - goto exit_unwind; - } - - next_instr = frame->instr_ptr; -resume_frame: - stack_pointer = _PyFrame_GetStackPointer(frame); - -#ifdef LLTRACE - { - int lltrace = maybe_lltrace_resume_frame(frame, GLOBALS()); - frame->lltrace = lltrace; - if (lltrace < 0) { - goto exit_unwind; - } - } -#endif - -#ifdef Py_DEBUG - /* _PyEval_EvalFrameDefault() must not be called with an exception set, - because it can clear it (directly or indirectly) and so the - caller loses its exception */ - assert(!_PyErr_Occurred(tstate)); -#endif - - DISPATCH(); + goto start_frame; #include "generated_cases.c.h" @@ -984,10 +958,10 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int OPT_HIST(trace_uop_execution_counter, trace_run_length_hist); assert(next_uop[-1].format == UOP_FORMAT_TARGET); frame->return_offset = 0; // Don't leave this random - _PyFrame_SetStackPointer(frame, stack_pointer); Py_DECREF(current_executor); tstate->previous_executor = NULL; - goto resume_with_error; + next_instr = frame->instr_ptr; + goto error; jump_to_jump_target: assert(next_uop[-1].format == UOP_FORMAT_JUMP); @@ -1019,6 +993,20 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int #endif // _Py_TIER2 +early_exit: + assert(_PyErr_Occurred(tstate)); + _Py_LeaveRecursiveCallPy(tstate); + assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); + // GH-99729: We need to unlink the frame *before* clearing it: + _PyInterpreterFrame *dying = frame; + frame = tstate->current_frame = dying->previous; + _PyEval_FrameClearAndPop(tstate, dying); + frame->return_offset = 0; + assert(frame->owner == FRAME_OWNED_BY_INTERPRETER); + /* Restore previous frame and exit */ + tstate->current_frame = frame->previous; + tstate->c_recursion_remaining += PY_EVAL_C_STACK_UNITS; + return NULL; } #if defined(__GNUC__) diff --git a/Python/ceval_macros.h b/Python/ceval_macros.h index 62c80c96e422fd..b6d9df32953892 100644 --- a/Python/ceval_macros.h +++ b/Python/ceval_macros.h @@ -221,15 +221,6 @@ GETITEM(PyObject *v, Py_ssize_t i) { #define LOCALS_ARRAY (frame->localsplus) #define GETLOCAL(i) (frame->localsplus[i]) -/* The SETLOCAL() macro must not DECREF the local variable in-place and - then store the new value; it must copy the old value to a temporary - value, then store the new value, and then DECREF the temporary value. - This is because it is possible that during the DECREF the frame is - accessed by other code (e.g. a __del__ method or gc.collect()) and the - variable would be pointing to already-freed memory. */ -#define SETLOCAL(i, value) do { _PyStackRef tmp = GETLOCAL(i); \ - GETLOCAL(i) = value; \ - PyStackRef_XCLOSE(tmp); } while (0) #ifdef Py_STATS #define UPDATE_MISS_STATS(INSTNAME) \ @@ -381,7 +372,9 @@ do { \ tstate->previous_executor = NULL; \ frame = tstate->current_frame; \ if (next_instr == NULL) { \ - goto resume_with_error; \ + next_instr = frame->instr_ptr; \ + stack_pointer = _PyFrame_GetStackPointer(frame); \ + goto error; \ } \ stack_pointer = _PyFrame_GetStackPointer(frame); \ DISPATCH(); \ diff --git a/Python/clinic/sysmodule.c.h b/Python/clinic/sysmodule.c.h index cfcbd55388efa0..1e53624d4d45d7 100644 --- a/Python/clinic/sysmodule.c.h +++ b/Python/clinic/sysmodule.c.h @@ -373,6 +373,36 @@ sys__is_interned(PyObject *module, PyObject *arg) return return_value; } +PyDoc_STRVAR(sys__is_immortal__doc__, +"_is_immortal($module, op, /)\n" +"--\n" +"\n" +"Return True if the given object is \"immortal\" per PEP 683.\n" +"\n" +"This function should be used for specialized purposes only."); + +#define SYS__IS_IMMORTAL_METHODDEF \ + {"_is_immortal", (PyCFunction)sys__is_immortal, METH_O, sys__is_immortal__doc__}, + +static int +sys__is_immortal_impl(PyObject *module, PyObject *op); + +static PyObject * +sys__is_immortal(PyObject *module, PyObject *op) +{ + PyObject *return_value = NULL; + int _return_value; + + _return_value = sys__is_immortal_impl(module, op); + if ((_return_value == -1) && PyErr_Occurred()) { + goto exit; + } + return_value = PyBool_FromLong((long)_return_value); + +exit: + return return_value; +} + PyDoc_STRVAR(sys_settrace__doc__, "settrace($module, function, /)\n" "--\n" @@ -1724,4 +1754,4 @@ sys__is_gil_enabled(PyObject *module, PyObject *Py_UNUSED(ignored)) #ifndef SYS_GETANDROIDAPILEVEL_METHODDEF #define SYS_GETANDROIDAPILEVEL_METHODDEF #endif /* !defined(SYS_GETANDROIDAPILEVEL_METHODDEF) */ -/*[clinic end generated code: output=568b0a0069dc43e8 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=1e5f608092c12636 input=a9049054013a1b77]*/ diff --git a/Python/codegen.c b/Python/codegen.c index df3b5aaac1d0d9..e9853d7302f67f 100644 --- a/Python/codegen.c +++ b/Python/codegen.c @@ -201,9 +201,6 @@ static int codegen_subscript(compiler *, expr_ty); static int codegen_slice_two_parts(compiler *, expr_ty); static int codegen_slice(compiler *, expr_ty); -static bool are_all_items_const(asdl_expr_seq *, Py_ssize_t, Py_ssize_t); - - static int codegen_with(compiler *, stmt_ty, int); static int codegen_async_with(compiler *, stmt_ty, int); static int codegen_async_for(compiler *, stmt_ty); @@ -287,7 +284,7 @@ codegen_addop_load_const(compiler *c, location loc, PyObject *o) if (PyLong_CheckExact(o)) { int overflow; long val = PyLong_AsLongAndOverflow(o, &overflow); - if (!overflow && val >= 0 && val < 256 && val < _PY_NSMALLPOSINTS) { + if (!overflow && _PY_IS_SMALL_INT(val)) { ADDOP_I(c, loc, LOAD_SMALL_INT, val); return SUCCESS; } @@ -3210,34 +3207,6 @@ starunpack_helper_impl(compiler *c, location loc, int build, int add, int extend, int tuple) { Py_ssize_t n = asdl_seq_LEN(elts); - if (!injected_arg && n > 2 && are_all_items_const(elts, 0, n)) { - PyObject *folded = PyTuple_New(n); - if (folded == NULL) { - return ERROR; - } - for (Py_ssize_t i = 0; i < n; i++) { - PyObject *val = ((expr_ty)asdl_seq_GET(elts, i))->v.Constant.value; - PyTuple_SET_ITEM(folded, i, Py_NewRef(val)); - } - if (tuple && !pushed) { - ADDOP_LOAD_CONST_NEW(c, loc, folded); - } else { - if (add == SET_ADD) { - Py_SETREF(folded, PyFrozenSet_New(folded)); - if (folded == NULL) { - return ERROR; - } - } - ADDOP_I(c, loc, build, pushed); - ADDOP_LOAD_CONST_NEW(c, loc, folded); - ADDOP_I(c, loc, extend, 1); - if (tuple) { - ADDOP_I(c, loc, CALL_INTRINSIC_1, INTRINSIC_LIST_TO_TUPLE); - } - } - return SUCCESS; - } - int big = n + pushed + (injected_arg ? 1 : 0) > STACK_USE_GUIDELINE; int seen_star = 0; for (Py_ssize_t i = 0; i < n; i++) { @@ -3389,18 +3358,6 @@ codegen_set(compiler *c, expr_ty e) BUILD_SET, SET_ADD, SET_UPDATE, 0); } -static bool -are_all_items_const(asdl_expr_seq *seq, Py_ssize_t begin, Py_ssize_t end) -{ - for (Py_ssize_t i = begin; i < end; i++) { - expr_ty key = (expr_ty)asdl_seq_GET(seq, i); - if (key == NULL || key->kind != Constant_kind) { - return false; - } - } - return true; -} - static int codegen_subdict(compiler *c, expr_ty e, Py_ssize_t begin, Py_ssize_t end) { diff --git a/Python/compile.c b/Python/compile.c index ef470830336dde..b58c12d4b881ac 100644 --- a/Python/compile.c +++ b/Python/compile.c @@ -704,12 +704,12 @@ _PyCompile_ExitScope(compiler *c) assert(c->u); /* we are deleting from a list so this really shouldn't fail */ if (PySequence_DelItem(c->c_stack, n) < 0) { - PyErr_FormatUnraisable("Exception ignored on removing " + PyErr_FormatUnraisable("Exception ignored while removing " "the last compiler stack item"); } if (nested_seq != NULL) { if (_PyInstructionSequence_AddNested(c->u->u_instr_sequence, nested_seq) < 0) { - PyErr_FormatUnraisable("Exception ignored on appending " + PyErr_FormatUnraisable("Exception ignored while appending " "nested instruction sequence"); } } diff --git a/Python/crossinterp.c b/Python/crossinterp.c index 0a106ad636bfe8..aa2c1cb78bce06 100644 --- a/Python/crossinterp.c +++ b/Python/crossinterp.c @@ -368,12 +368,9 @@ _convert_exc_to_TracebackException(PyObject *exc, PyObject **p_tbexc) PyObject *create = NULL; // This is inspired by _PyErr_Display(). - PyObject *tbmod = PyImport_ImportModule("traceback"); - if (tbmod == NULL) { - return -1; - } - PyObject *tbexc_type = PyObject_GetAttrString(tbmod, "TracebackException"); - Py_DECREF(tbmod); + PyObject *tbexc_type = PyImport_ImportModuleAttrString( + "traceback", + "TracebackException"); if (tbexc_type == NULL) { return -1; } @@ -784,7 +781,8 @@ _PyXI_excinfo_Apply(_PyXI_excinfo *info, PyObject *exctype) PyObject *exc = PyErr_GetRaisedException(); if (PyObject_SetAttrString(exc, "_errdisplay", tbexc) < 0) { #ifdef Py_DEBUG - PyErr_FormatUnraisable("Exception ignored when setting _errdisplay"); + PyErr_FormatUnraisable("Exception ignored while " + "setting _errdisplay"); #endif PyErr_Clear(); } diff --git a/Python/errors.c b/Python/errors.c index 11a74f6f25cfcf..cc188e9a617dc2 100644 --- a/Python/errors.c +++ b/Python/errors.c @@ -1633,7 +1633,7 @@ format_unraisable_v(const char *format, va_list va, PyObject *obj) PyObject *hook_args = make_unraisable_hook_args( tstate, exc_type, exc_value, exc_tb, err_msg, obj); if (hook_args == NULL) { - err_msg_str = ("Exception ignored on building " + err_msg_str = ("Exception ignored while building " "sys.unraisablehook arguments"); goto error; } diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h index 4addfbcf6d6419..22d11059fcadb8 100644 --- a/Python/executor_cases.c.h +++ b/Python/executor_cases.c.h @@ -19,7 +19,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) JUMP_TO_ERROR(); + if (err != 0) { + JUMP_TO_ERROR(); + } } break; } @@ -33,7 +35,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) JUMP_TO_ERROR(); + if (err != 0) { + JUMP_TO_ERROR(); + } } } break; @@ -81,7 +85,7 @@ PyTuple_GetItem(_PyFrame_GetCode(frame)->co_localsplusnames, oparg) ); stack_pointer = _PyFrame_GetStackPointer(frame); - if (1) JUMP_TO_ERROR(); + JUMP_TO_ERROR(); } value = PyStackRef_DUP(value_s); stack_pointer[0] = value; @@ -201,7 +205,6 @@ _PyStackRef value; oparg = CURRENT_OPARG(); value = GETLOCAL(oparg); - // do not use SETLOCAL here, it decrefs the old value GETLOCAL(oparg) = PyStackRef_NULL; stack_pointer[0] = value; stack_pointer += 1; @@ -303,9 +306,13 @@ oparg = 0; assert(oparg == CURRENT_OPARG()); value = stack_pointer[-1]; - SETLOCAL(oparg, value); + _PyStackRef tmp = GETLOCAL(oparg); + GETLOCAL(oparg) = value; stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_XCLOSE(tmp); + stack_pointer = _PyFrame_GetStackPointer(frame); break; } @@ -314,9 +321,13 @@ oparg = 1; assert(oparg == CURRENT_OPARG()); value = stack_pointer[-1]; - SETLOCAL(oparg, value); + _PyStackRef tmp = GETLOCAL(oparg); + GETLOCAL(oparg) = value; stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_XCLOSE(tmp); + stack_pointer = _PyFrame_GetStackPointer(frame); break; } @@ -325,9 +336,13 @@ oparg = 2; assert(oparg == CURRENT_OPARG()); value = stack_pointer[-1]; - SETLOCAL(oparg, value); + _PyStackRef tmp = GETLOCAL(oparg); + GETLOCAL(oparg) = value; stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_XCLOSE(tmp); + stack_pointer = _PyFrame_GetStackPointer(frame); break; } @@ -336,9 +351,13 @@ oparg = 3; assert(oparg == CURRENT_OPARG()); value = stack_pointer[-1]; - SETLOCAL(oparg, value); + _PyStackRef tmp = GETLOCAL(oparg); + GETLOCAL(oparg) = value; stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_XCLOSE(tmp); + stack_pointer = _PyFrame_GetStackPointer(frame); break; } @@ -347,9 +366,13 @@ oparg = 4; assert(oparg == CURRENT_OPARG()); value = stack_pointer[-1]; - SETLOCAL(oparg, value); + _PyStackRef tmp = GETLOCAL(oparg); + GETLOCAL(oparg) = value; stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_XCLOSE(tmp); + stack_pointer = _PyFrame_GetStackPointer(frame); break; } @@ -358,9 +381,13 @@ oparg = 5; assert(oparg == CURRENT_OPARG()); value = stack_pointer[-1]; - SETLOCAL(oparg, value); + _PyStackRef tmp = GETLOCAL(oparg); + GETLOCAL(oparg) = value; stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_XCLOSE(tmp); + stack_pointer = _PyFrame_GetStackPointer(frame); break; } @@ -369,9 +396,13 @@ oparg = 6; assert(oparg == CURRENT_OPARG()); value = stack_pointer[-1]; - SETLOCAL(oparg, value); + _PyStackRef tmp = GETLOCAL(oparg); + GETLOCAL(oparg) = value; stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_XCLOSE(tmp); + stack_pointer = _PyFrame_GetStackPointer(frame); break; } @@ -380,9 +411,13 @@ oparg = 7; assert(oparg == CURRENT_OPARG()); value = stack_pointer[-1]; - SETLOCAL(oparg, value); + _PyStackRef tmp = GETLOCAL(oparg); + GETLOCAL(oparg) = value; stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_XCLOSE(tmp); + stack_pointer = _PyFrame_GetStackPointer(frame); break; } @@ -390,9 +425,13 @@ _PyStackRef value; oparg = CURRENT_OPARG(); value = stack_pointer[-1]; - SETLOCAL(oparg, value); + _PyStackRef tmp = GETLOCAL(oparg); + GETLOCAL(oparg) = value; stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_XCLOSE(tmp); + stack_pointer = _PyFrame_GetStackPointer(frame); break; } @@ -422,9 +461,11 @@ * This has the benign side effect that if value is * finalized it will see the location as the FOR_ITER's. */ - PyStackRef_CLOSE(value); stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(value); + stack_pointer = _PyFrame_GetStackPointer(frame); break; } @@ -451,7 +492,11 @@ PyObject *res_o = PyNumber_Negative(PyStackRef_AsPyObjectBorrow(value)); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(value); - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-1] = res; break; @@ -476,7 +521,11 @@ int err = PyObject_IsTrue(PyStackRef_AsPyObjectBorrow(value)); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(value); - if (err < 0) JUMP_TO_ERROR(); + if (err < 0) { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = err ? PyStackRef_True : PyStackRef_False; stack_pointer[-1] = res; break; @@ -587,7 +636,11 @@ PyObject *res_o = PyNumber_Invert(PyStackRef_AsPyObjectBorrow(value)); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(value); - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-1] = res; break; @@ -647,7 +700,11 @@ PyObject *res_o = _PyLong_Multiply((PyLongObject *)left_o, (PyLongObject *)right_o); PyStackRef_CLOSE_SPECIALIZED(right, _PyLong_ExactDealloc); PyStackRef_CLOSE_SPECIALIZED(left, _PyLong_ExactDealloc); - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2] = res; stack_pointer += -1; @@ -669,7 +726,11 @@ PyObject *res_o = _PyLong_Add((PyLongObject *)left_o, (PyLongObject *)right_o); PyStackRef_CLOSE_SPECIALIZED(right, _PyLong_ExactDealloc); PyStackRef_CLOSE_SPECIALIZED(left, _PyLong_ExactDealloc); - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2] = res; stack_pointer += -1; @@ -691,7 +752,11 @@ PyObject *res_o = _PyLong_Subtract((PyLongObject *)left_o, (PyLongObject *)right_o); PyStackRef_CLOSE_SPECIALIZED(right, _PyLong_ExactDealloc); PyStackRef_CLOSE_SPECIALIZED(left, _PyLong_ExactDealloc); - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2] = res; stack_pointer += -1; @@ -754,7 +819,11 @@ ((PyFloatObject *)left_o)->ob_fval * ((PyFloatObject *)right_o)->ob_fval; PyObject *res_o = _PyFloat_FromDouble_ConsumeInputs(left, right, dres); - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2] = res; stack_pointer += -1; @@ -777,7 +846,11 @@ ((PyFloatObject *)left_o)->ob_fval + ((PyFloatObject *)right_o)->ob_fval; PyObject *res_o = _PyFloat_FromDouble_ConsumeInputs(left, right, dres); - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2] = res; stack_pointer += -1; @@ -800,7 +873,11 @@ ((PyFloatObject *)left_o)->ob_fval - ((PyFloatObject *)right_o)->ob_fval; PyObject *res_o = _PyFloat_FromDouble_ConsumeInputs(left, right, dres); - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2] = res; stack_pointer += -1; @@ -840,7 +917,11 @@ PyObject *res_o = PyUnicode_Concat(left_o, right_o); PyStackRef_CLOSE_SPECIALIZED(right, _PyUnicode_ExactDealloc); PyStackRef_CLOSE_SPECIALIZED(left, _PyUnicode_ExactDealloc); - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2] = res; stack_pointer += -1; @@ -887,7 +968,11 @@ PyUnicode_Append(&temp, right_o); *target_local = PyStackRef_FromPyObjectSteal(temp); PyStackRef_CLOSE_SPECIALIZED(right, _PyUnicode_ExactDealloc); - if (PyStackRef_IsNull(*target_local)) JUMP_TO_ERROR(); + if (PyStackRef_IsNull(*target_local)) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } #if TIER_ONE // The STORE_FAST is already done. This is done here in tier one, // and during trace projection in tier two: @@ -957,7 +1042,11 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(container); PyStackRef_CLOSE(sub); - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2] = res; stack_pointer += -1; @@ -993,11 +1082,17 @@ stack_pointer += 2; assert(WITHIN_STACK_BOUNDS()); } + stack_pointer += -3; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(container); - if (res_o == NULL) JUMP_TO_ERROR(); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (res_o == NULL) { + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); - stack_pointer[-3] = res; - stack_pointer += -2; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); break; } @@ -1031,7 +1126,11 @@ } PyStackRef_CLOSE(v); PyStackRef_CLOSE(container); - if (err) JUMP_TO_ERROR(); + if (err) { + stack_pointer += -4; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } stack_pointer += -4; assert(WITHIN_STACK_BOUNDS()); break; @@ -1079,10 +1178,14 @@ Py_INCREF(res_o); #endif PyStackRef_CLOSE_SPECIALIZED(sub_st, _PyLong_ExactDealloc); + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(list_st); + stack_pointer = _PyFrame_GetStackPointer(frame); res = PyStackRef_FromPyObjectSteal(res_o); - stack_pointer[-2] = res; - stack_pointer += -1; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); break; } @@ -1121,10 +1224,14 @@ STAT_INC(BINARY_SUBSCR, hit); PyObject *res_o = (PyObject*)&_Py_SINGLETON(strings).ascii[c]; PyStackRef_CLOSE_SPECIALIZED(sub_st, _PyLong_ExactDealloc); + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(str_st); + stack_pointer = _PyFrame_GetStackPointer(frame); res = PyStackRef_FromPyObjectSteal(res_o); - stack_pointer[-2] = res; - stack_pointer += -1; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); break; } @@ -1160,10 +1267,14 @@ assert(res_o != NULL); Py_INCREF(res_o); PyStackRef_CLOSE_SPECIALIZED(sub_st, _PyLong_ExactDealloc); + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(tuple_st); + stack_pointer = _PyFrame_GetStackPointer(frame); res = PyStackRef_FromPyObjectSteal(res_o); - stack_pointer[-2] = res; - stack_pointer += -1; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); break; } @@ -1192,7 +1303,11 @@ } PyStackRef_CLOSE(dict_st); PyStackRef_CLOSE(sub_st); - if (rc <= 0) JUMP_TO_ERROR(); + if (rc <= 0) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } // not found or error res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2] = res; @@ -1262,7 +1377,11 @@ list = stack_pointer[-2 - (oparg-1)]; int err = _PyList_AppendTakeRef((PyListObject *)PyStackRef_AsPyObjectBorrow(list), PyStackRef_AsPyObjectSteal(v)); - if (err < 0) JUMP_TO_ERROR(); + if (err < 0) { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); break; @@ -1279,7 +1398,11 @@ PyStackRef_AsPyObjectBorrow(v)); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(v); - if (err) JUMP_TO_ERROR(); + if (err) { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); break; @@ -1299,7 +1422,11 @@ PyStackRef_CLOSE(v); PyStackRef_CLOSE(container); PyStackRef_CLOSE(sub); - if (err) JUMP_TO_ERROR(); + if (err) { + stack_pointer += -3; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } stack_pointer += -3; assert(WITHIN_STACK_BOUNDS()); break; @@ -1346,10 +1473,10 @@ assert(old_value != NULL); UNLOCK_OBJECT(list); // unlock before decrefs! PyStackRef_CLOSE_SPECIALIZED(sub_st, _PyLong_ExactDealloc); - PyStackRef_CLOSE(list_st); stack_pointer += -3; assert(WITHIN_STACK_BOUNDS()); _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(list_st); Py_DECREF(old_value); stack_pointer = _PyFrame_GetStackPointer(frame); break; @@ -1373,10 +1500,14 @@ PyStackRef_AsPyObjectSteal(sub), PyStackRef_AsPyObjectSteal(value)); stack_pointer = _PyFrame_GetStackPointer(frame); - PyStackRef_CLOSE(dict_st); - if (err) JUMP_TO_ERROR(); stack_pointer += -3; assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(dict_st); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (err) { + JUMP_TO_ERROR(); + } break; } @@ -1392,7 +1523,11 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(container); PyStackRef_CLOSE(sub); - if (err) JUMP_TO_ERROR(); + if (err) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); break; @@ -1408,7 +1543,11 @@ PyObject *res_o = _PyIntrinsics_UnaryFunctions[oparg].func(tstate, PyStackRef_AsPyObjectBorrow(value)); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(value); - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-1] = res; break; @@ -1429,7 +1568,11 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(value2_st); PyStackRef_CLOSE(value1_st); - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2] = res; stack_pointer += -1; @@ -1481,13 +1624,19 @@ type->tp_name); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(obj); - if (true) JUMP_TO_ERROR(); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); } _PyFrame_SetStackPointer(frame, stack_pointer); iter_o = (*getter)(obj_o); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(obj); - if (iter_o == NULL) JUMP_TO_ERROR(); + if (iter_o == NULL) { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } if (Py_TYPE(iter_o)->tp_as_async == NULL || Py_TYPE(iter_o)->tp_as_async->am_anext == NULL) { stack_pointer += -1; @@ -1499,7 +1648,7 @@ Py_TYPE(iter_o)->tp_name); Py_DECREF(iter_o); stack_pointer = _PyFrame_GetStackPointer(frame); - if (true) JUMP_TO_ERROR(); + JUMP_TO_ERROR(); } iter = PyStackRef_FromPyObjectSteal(iter_o); stack_pointer[-1] = iter; @@ -1532,7 +1681,11 @@ PyObject *iter_o = _PyEval_GetAwaitable(PyStackRef_AsPyObjectBorrow(iterable), oparg); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(iterable); - if (iter_o == NULL) JUMP_TO_ERROR(); + if (iter_o == NULL) { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } iter = PyStackRef_FromPyObjectSteal(iter_o); stack_pointer[-1] = iter; break; @@ -1653,13 +1806,15 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = PyMapping_GetOptionalItem(BUILTINS(), &_Py_ID(__build_class__), &bc_o); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err < 0) JUMP_TO_ERROR(); + if (err < 0) { + JUMP_TO_ERROR(); + } if (bc_o == NULL) { _PyFrame_SetStackPointer(frame, stack_pointer); _PyErr_SetString(tstate, PyExc_NameError, "__build_class__ not found"); stack_pointer = _PyFrame_GetStackPointer(frame); - if (true) JUMP_TO_ERROR(); + JUMP_TO_ERROR(); } bc = PyStackRef_FromPyObjectSteal(bc_o); stack_pointer[0] = bc; @@ -1681,7 +1836,9 @@ "no locals found when storing %R", name); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(v); - if (true) JUMP_TO_ERROR(); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); } if (PyDict_CheckExact(ns)) { _PyFrame_SetStackPointer(frame, stack_pointer); @@ -1694,7 +1851,11 @@ stack_pointer = _PyFrame_GetStackPointer(frame); } PyStackRef_CLOSE(v); - if (err) JUMP_TO_ERROR(); + if (err) { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); break; @@ -1738,7 +1899,11 @@ int res = _PyEval_UnpackIterableStackRef(tstate, seq, oparg, -1, top); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(seq); - if (res == 0) JUMP_TO_ERROR(); + if (res == 0) { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } stack_pointer += -1 + oparg; assert(WITHIN_STACK_BOUNDS()); break; @@ -1842,7 +2007,11 @@ int res = _PyEval_UnpackIterableStackRef(tstate, seq, oparg & 0xFF, oparg >> 8, top); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(seq); - if (res == 0) JUMP_TO_ERROR(); + if (res == 0) { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } stack_pointer += (oparg & 0xFF) + (oparg >> 8); assert(WITHIN_STACK_BOUNDS()); break; @@ -1861,7 +2030,11 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(v); PyStackRef_CLOSE(owner); - if (err) JUMP_TO_ERROR(); + if (err) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); break; @@ -1876,7 +2049,11 @@ int err = PyObject_DelAttr(PyStackRef_AsPyObjectBorrow(owner), name); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(owner); - if (err) JUMP_TO_ERROR(); + if (err) { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); break; @@ -1891,7 +2068,11 @@ int err = PyDict_SetItem(GLOBALS(), name, PyStackRef_AsPyObjectBorrow(v)); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(v); - if (err) JUMP_TO_ERROR(); + if (err) { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); break; @@ -1925,7 +2106,7 @@ _PyErr_SetString(tstate, PyExc_SystemError, "no locals found"); stack_pointer = _PyFrame_GetStackPointer(frame); - if (true) JUMP_TO_ERROR(); + JUMP_TO_ERROR(); } locals = PyStackRef_FromPyObjectNew(l); stack_pointer[0] = locals; @@ -1943,7 +2124,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *v_o = _PyEval_LoadName(tstate, frame, name); stack_pointer = _PyFrame_GetStackPointer(frame); - if (v_o == NULL) JUMP_TO_ERROR(); + if (v_o == NULL) { + JUMP_TO_ERROR(); + } v = PyStackRef_FromPyObjectSteal(v_o); stack_pointer[0] = v; stack_pointer += 1; @@ -1959,7 +2142,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); _PyEval_LoadGlobalStackRef(GLOBALS(), BUILTINS(), name, res); stack_pointer = _PyFrame_GetStackPointer(frame); - if (PyStackRef_IsNull(*res)) JUMP_TO_ERROR(); + if (PyStackRef_IsNull(*res)) { + JUMP_TO_ERROR(); + } stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); break; @@ -2103,9 +2288,13 @@ PyTuple_GetItem(_PyFrame_GetCode(frame)->co_localsplusnames, oparg) ); stack_pointer = _PyFrame_GetStackPointer(frame); - if (1) JUMP_TO_ERROR(); + JUMP_TO_ERROR(); } - SETLOCAL(oparg, PyStackRef_NULL); + _PyStackRef tmp = GETLOCAL(oparg); + GETLOCAL(oparg) = PyStackRef_NULL; + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_XCLOSE(tmp); + stack_pointer = _PyFrame_GetStackPointer(frame); break; } @@ -2118,7 +2307,11 @@ if (cell == NULL) { JUMP_TO_ERROR(); } - SETLOCAL(oparg, PyStackRef_FromPyObjectSteal(cell)); + _PyStackRef tmp = GETLOCAL(oparg); + GETLOCAL(oparg) = PyStackRef_FromPyObjectSteal(cell); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_XCLOSE(tmp); + stack_pointer = _PyFrame_GetStackPointer(frame); break; } @@ -2167,9 +2360,15 @@ JUMP_TO_ERROR(); } } + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(class_dict_st); + stack_pointer = _PyFrame_GetStackPointer(frame); value = PyStackRef_FromPyObjectSteal(value_o); - stack_pointer[-1] = value; + stack_pointer[0] = value; + stack_pointer += 1; + assert(WITHIN_STACK_BOUNDS()); break; } @@ -2182,7 +2381,7 @@ _PyFrame_SetStackPointer(frame, stack_pointer); _PyEval_FormatExcUnbound(tstate, _PyFrame_GetCode(frame), oparg); stack_pointer = _PyFrame_GetStackPointer(frame); - if (true) JUMP_TO_ERROR(); + JUMP_TO_ERROR(); } value = PyStackRef_FromPyObjectSteal(value_o); stack_pointer[0] = value; @@ -2230,14 +2429,20 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(pieces[_i]); } - if (true) JUMP_TO_ERROR(); + stack_pointer += -oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); } PyObject *str_o = _PyUnicode_JoinArray(&_Py_STR(empty), pieces_o, oparg); STACKREFS_TO_PYOBJECTS_CLEANUP(pieces_o); for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(pieces[_i]); } - if (str_o == NULL) JUMP_TO_ERROR(); + if (str_o == NULL) { + stack_pointer += -oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } str = PyStackRef_FromPyObjectSteal(str_o); stack_pointer[-oparg] = str; stack_pointer += 1 - oparg; @@ -2303,7 +2508,9 @@ stack_pointer = _PyFrame_GetStackPointer(frame); } PyStackRef_CLOSE(iterable_st); - if (true) JUMP_TO_ERROR(); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); } assert(Py_IsNone(none_val)); PyStackRef_CLOSE(iterable_st); @@ -2323,7 +2530,11 @@ PyStackRef_AsPyObjectBorrow(iterable)); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(iterable); - if (err < 0) JUMP_TO_ERROR(); + if (err < 0) { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); break; @@ -2341,7 +2552,9 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(values[_i]); } - if (true) JUMP_TO_ERROR(); + stack_pointer += -oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); } int err = 0; for (int i = 0; i < oparg; i++) { @@ -2360,7 +2573,7 @@ _PyFrame_SetStackPointer(frame, stack_pointer); Py_DECREF(set_o); stack_pointer = _PyFrame_GetStackPointer(frame); - if (true) JUMP_TO_ERROR(); + JUMP_TO_ERROR(); } set = PyStackRef_FromPyObjectSteal(set_o); stack_pointer[-oparg] = set; @@ -2379,7 +2592,9 @@ for (int _i = oparg*2; --_i >= 0;) { PyStackRef_CLOSE(values[_i]); } - if (true) JUMP_TO_ERROR(); + stack_pointer += -oparg*2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); } _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *map_o = _PyDict_FromItems( @@ -2391,7 +2606,11 @@ for (int _i = oparg*2; --_i >= 0;) { PyStackRef_CLOSE(values[_i]); } - if (map_o == NULL) JUMP_TO_ERROR(); + if (map_o == NULL) { + stack_pointer += -oparg*2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } map = PyStackRef_FromPyObjectSteal(map_o); stack_pointer[-oparg*2] = map; stack_pointer += 1 - oparg*2; @@ -2406,24 +2625,30 @@ _PyErr_Format(tstate, PyExc_SystemError, "no locals found when setting up annotations"); stack_pointer = _PyFrame_GetStackPointer(frame); - if (true) JUMP_TO_ERROR(); + JUMP_TO_ERROR(); } /* check if __annotations__ in locals()... */ _PyFrame_SetStackPointer(frame, stack_pointer); int err = PyMapping_GetOptionalItem(LOCALS(), &_Py_ID(__annotations__), &ann_dict); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err < 0) JUMP_TO_ERROR(); + if (err < 0) { + JUMP_TO_ERROR(); + } if (ann_dict == NULL) { _PyFrame_SetStackPointer(frame, stack_pointer); ann_dict = PyDict_New(); stack_pointer = _PyFrame_GetStackPointer(frame); - if (ann_dict == NULL) JUMP_TO_ERROR(); + if (ann_dict == NULL) { + JUMP_TO_ERROR(); + } _PyFrame_SetStackPointer(frame, stack_pointer); err = PyObject_SetItem(LOCALS(), &_Py_ID(__annotations__), ann_dict); Py_DECREF(ann_dict); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err) JUMP_TO_ERROR(); + if (err) { + JUMP_TO_ERROR(); + } } else { _PyFrame_SetStackPointer(frame, stack_pointer); @@ -2456,7 +2681,9 @@ stack_pointer = _PyFrame_GetStackPointer(frame); } PyStackRef_CLOSE(update); - if (true) JUMP_TO_ERROR(); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); } PyStackRef_CLOSE(update); stack_pointer += -1; @@ -2483,7 +2710,9 @@ _PyEval_FormatKwargsError(tstate, callable_o, update_o); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(update); - if (true) JUMP_TO_ERROR(); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); } PyStackRef_CLOSE(update); stack_pointer += -1; @@ -2510,14 +2739,16 @@ PyStackRef_AsPyObjectSteal(value) ); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) JUMP_TO_ERROR(); + if (err != 0) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); break; } - /* _INSTRUMENTED_LOAD_SUPER_ATTR is not a viable micro-op for tier 2 because it is instrumented */ - case _LOAD_SUPER_ATTR_ATTR: { _PyStackRef self_st; _PyStackRef class_st; @@ -2547,7 +2778,11 @@ PyStackRef_CLOSE(global_super_st); PyStackRef_CLOSE(class_st); PyStackRef_CLOSE(self_st); - if (attr == NULL) JUMP_TO_ERROR(); + if (attr == NULL) { + stack_pointer += -3; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } attr_st = PyStackRef_FromPyObjectSteal(attr); stack_pointer[-3] = attr_st; stack_pointer += -2; @@ -2591,11 +2826,17 @@ if (method_found) { self_or_null = self_st; // transfer ownership } else { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(self_st); + stack_pointer = _PyFrame_GetStackPointer(frame); self_or_null = PyStackRef_NULL; + stack_pointer += 1; + assert(WITHIN_STACK_BOUNDS()); } - PyStackRef_CLOSE(class_st); PyStackRef_CLOSE(global_super_st); + PyStackRef_CLOSE(class_st); attr = PyStackRef_FromPyObjectSteal(attr_o); stack_pointer[-3] = attr; stack_pointer[-2] = self_or_null; @@ -2635,7 +2876,11 @@ meth | NULL | arg1 | ... | argN */ PyStackRef_CLOSE(owner); - if (attr_o == NULL) JUMP_TO_ERROR(); + if (attr_o == NULL) { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } self_or_null[0] = PyStackRef_NULL; } } @@ -2645,7 +2890,11 @@ attr_o = PyObject_GetAttr(PyStackRef_AsPyObjectBorrow(owner), name); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(owner); - if (attr_o == NULL) JUMP_TO_ERROR(); + if (attr_o == NULL) { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } } attr = PyStackRef_FromPyObjectSteal(attr_o); stack_pointer[-1] = attr; @@ -2724,8 +2973,10 @@ attr = PyStackRef_FromPyObjectNew(attr_o); #endif STAT_INC(LOAD_ATTR, hit); - PyStackRef_CLOSE(owner); stack_pointer[-1] = attr; + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(owner); + stack_pointer = _PyFrame_GetStackPointer(frame); break; } @@ -2784,8 +3035,10 @@ attr = PyStackRef_FromPyObjectSteal(attr_o); #endif STAT_INC(LOAD_ATTR, hit); - PyStackRef_CLOSE(owner); stack_pointer[-1] = attr; + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(owner); + stack_pointer = _PyFrame_GetStackPointer(frame); break; } @@ -3001,10 +3254,10 @@ _PyDictValues_AddToInsertionOrder(values, index); } UNLOCK_OBJECT(owner_o); - PyStackRef_CLOSE(owner); stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(owner); Py_XDECREF(old_value); stack_pointer = _PyFrame_GetStackPointer(frame); break; @@ -3071,10 +3324,10 @@ // old_value should be DECREFed after GC track checking is done, if not, it could raise a segmentation fault, // when dict only holds the strong reference to value in ep->me_value. STAT_INC(STORE_ATTR, hit); - PyStackRef_CLOSE(owner); stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(owner); Py_XDECREF(old_value); stack_pointer = _PyFrame_GetStackPointer(frame); break; @@ -3096,10 +3349,10 @@ PyObject *old_value = *(PyObject **)addr; FT_ATOMIC_STORE_PTR_RELEASE(*(PyObject **)addr, PyStackRef_AsPyObjectSteal(value)); UNLOCK_OBJECT(owner_o); - PyStackRef_CLOSE(owner); stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(owner); Py_XDECREF(old_value); stack_pointer = _PyFrame_GetStackPointer(frame); break; @@ -3120,7 +3373,11 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(left); PyStackRef_CLOSE(right); - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } if (oparg & 16) { stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); @@ -3128,7 +3385,9 @@ int res_bool = PyObject_IsTrue(res_o); Py_DECREF(res_o); stack_pointer = _PyFrame_GetStackPointer(frame); - if (res_bool < 0) JUMP_TO_ERROR(); + if (res_bool < 0) { + JUMP_TO_ERROR(); + } res = res_bool ? PyStackRef_True : PyStackRef_False; } else { @@ -3256,7 +3515,11 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(left); PyStackRef_CLOSE(right); - if (res < 0) JUMP_TO_ERROR(); + if (res < 0) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } b = (res ^ oparg) ? PyStackRef_True : PyStackRef_False; stack_pointer[-2] = b; stack_pointer += -1; @@ -3284,7 +3547,11 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(left); PyStackRef_CLOSE(right); - if (res < 0) JUMP_TO_ERROR(); + if (res < 0) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } b = (res ^ oparg) ? PyStackRef_True : PyStackRef_False; stack_pointer[-2] = b; stack_pointer += -1; @@ -3311,7 +3578,11 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(left); PyStackRef_CLOSE(right); - if (res < 0) JUMP_TO_ERROR(); + if (res < 0) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } b = (res ^ oparg) ? PyStackRef_True : PyStackRef_False; stack_pointer[-2] = b; stack_pointer += -1; @@ -3334,7 +3605,9 @@ if (err < 0) { PyStackRef_CLOSE(exc_value_st); PyStackRef_CLOSE(match_type_st); - if (true) JUMP_TO_ERROR(); + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); } PyObject *match_o = NULL; PyObject *rest_o = NULL; @@ -3344,9 +3617,17 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(exc_value_st); PyStackRef_CLOSE(match_type_st); - if (res < 0) JUMP_TO_ERROR(); + if (res < 0) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } assert((match_o == NULL) == (rest_o == NULL)); - if (match_o == NULL) JUMP_TO_ERROR(); + if (match_o == NULL) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } if (!Py_IsNone(match_o)) { stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); @@ -3377,7 +3658,9 @@ stack_pointer = _PyFrame_GetStackPointer(frame); if (err < 0) { PyStackRef_CLOSE(right); - if (true) JUMP_TO_ERROR(); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); } _PyFrame_SetStackPointer(frame, stack_pointer); int res = PyErr_GivenExceptionMatches(left_o, right_o); @@ -3403,7 +3686,11 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(level); PyStackRef_CLOSE(fromlist); - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2] = res; stack_pointer += -1; @@ -3420,7 +3707,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = _PyEval_ImportFrom(tstate, PyStackRef_AsPyObjectBorrow(from), name); stack_pointer = _PyFrame_GetStackPointer(frame); - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[0] = res; stack_pointer += 1; @@ -3455,9 +3744,13 @@ _PyFrame_SetStackPointer(frame, stack_pointer); Py_ssize_t len_i = PyObject_Length(PyStackRef_AsPyObjectBorrow(obj)); stack_pointer = _PyFrame_GetStackPointer(frame); - if (len_i < 0) JUMP_TO_ERROR(); + if (len_i < 0) { + JUMP_TO_ERROR(); + } PyObject *len_o = PyLong_FromSsize_t(len_i); - if (len_o == NULL) JUMP_TO_ERROR(); + if (len_o == NULL) { + JUMP_TO_ERROR(); + } len = PyStackRef_FromPyObjectSteal(len_o); stack_pointer[0] = len; stack_pointer += 1; @@ -3491,7 +3784,11 @@ attrs = PyStackRef_FromPyObjectSteal(attrs_o); } else { - if (_PyErr_Occurred(tstate)) JUMP_TO_ERROR(); + if (_PyErr_Occurred(tstate)) { + stack_pointer += -3; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } // Error! attrs = PyStackRef_None; // Failure! } @@ -3536,7 +3833,9 @@ PyObject *values_or_none_o = _PyEval_MatchKeys(tstate, PyStackRef_AsPyObjectBorrow(subject), PyStackRef_AsPyObjectBorrow(keys)); stack_pointer = _PyFrame_GetStackPointer(frame); - if (values_or_none_o == NULL) JUMP_TO_ERROR(); + if (values_or_none_o == NULL) { + JUMP_TO_ERROR(); + } values_or_none = PyStackRef_FromPyObjectSteal(values_or_none_o); stack_pointer[0] = values_or_none; stack_pointer += 1; @@ -3553,7 +3852,11 @@ PyObject *iter_o = PyObject_GetIter(PyStackRef_AsPyObjectBorrow(iterable)); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(iterable); - if (iter_o == NULL) JUMP_TO_ERROR(); + if (iter_o == NULL) { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } iter = PyStackRef_FromPyObjectSteal(iter_o); stack_pointer[-1] = iter; break; @@ -3773,7 +4076,9 @@ r->start = value + r->step; r->len--; PyObject *res = PyLong_FromLong(value); - if (res == NULL) JUMP_TO_ERROR(); + if (res == NULL) { + JUMP_TO_ERROR(); + } next = PyStackRef_FromPyObjectSteal(res); stack_pointer[0] = next; stack_pointer += 1; @@ -3833,7 +4138,7 @@ Py_TYPE(owner_o)->tp_name); stack_pointer = _PyFrame_GetStackPointer(frame); } - if (true) JUMP_TO_ERROR(); + JUMP_TO_ERROR(); } attr = PyStackRef_FromPyObjectSteal(attr_o); self_or_null = self_or_null_o == NULL ? @@ -3886,7 +4191,9 @@ PyObject *res_o = PyObject_Vectorcall(exit_func_o, stack + 2 - has_self, (3 + has_self) | PY_VECTORCALL_ARGUMENTS_OFFSET, NULL); stack_pointer = _PyFrame_GetStackPointer(frame); - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[0] = res; stack_pointer += 1; @@ -4069,7 +4376,9 @@ PyObject *method = ((PyMethodObject *)callable_o)->im_func; _PyStackRef temp = callable[0]; func[0] = PyStackRef_FromPyObjectNew(method); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(temp); + stack_pointer = _PyFrame_GetStackPointer(frame); } break; } @@ -4175,23 +4484,21 @@ } case _EXPAND_METHOD: { - _PyStackRef *null; + _PyStackRef *self_or_null; _PyStackRef *callable; - _PyStackRef *method; - _PyStackRef *self; oparg = CURRENT_OPARG(); - null = &stack_pointer[-1 - oparg]; + self_or_null = &stack_pointer[-1 - oparg]; callable = &stack_pointer[-2 - oparg]; - method = &stack_pointer[-2 - oparg]; - self = &stack_pointer[-1 - oparg]; PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); - assert(PyStackRef_IsNull(null[0])); + assert(PyStackRef_IsNull(self_or_null[0])); assert(Py_TYPE(callable_o) == &PyMethod_Type); - self[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); + self_or_null[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); _PyStackRef temp = callable[0]; - method[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); - assert(PyStackRef_FunctionCheck(method[0])); + callable[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); + assert(PyStackRef_FunctionCheck(callable[0])); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(temp); + stack_pointer = _PyFrame_GetStackPointer(frame); break; } @@ -4238,7 +4545,9 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - if (true) JUMP_TO_ERROR(); + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); } _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = PyObject_Vectorcall( @@ -4253,7 +4562,11 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -4279,21 +4592,20 @@ } case _INIT_CALL_BOUND_METHOD_EXACT_ARGS: { - _PyStackRef *null; + _PyStackRef *self_or_null; _PyStackRef *callable; - _PyStackRef *func; - _PyStackRef *self; oparg = CURRENT_OPARG(); - null = &stack_pointer[-1 - oparg]; + self_or_null = &stack_pointer[-1 - oparg]; callable = &stack_pointer[-2 - oparg]; - func = &stack_pointer[-2 - oparg]; - self = &stack_pointer[-1 - oparg]; + assert(PyStackRef_IsNull(self_or_null[0])); PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); STAT_INC(CALL, hit); - self[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); + self_or_null[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); _PyStackRef temp = callable[0]; - func[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); + callable[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(temp); + stack_pointer = _PyFrame_GetStackPointer(frame); break; } @@ -4525,10 +4837,12 @@ } STAT_INC(CALL, hit); res = PyStackRef_FromPyObjectSteal(Py_NewRef(Py_TYPE(arg_o))); - PyStackRef_CLOSE(arg); stack_pointer[-3] = res; stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(arg); + stack_pointer = _PyFrame_GetStackPointer(frame); break; } @@ -4556,11 +4870,17 @@ _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = PyObject_Str(arg_o); stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -3; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(arg); - if (res_o == NULL) JUMP_TO_ERROR(); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (res_o == NULL) { + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); - stack_pointer[-3] = res; - stack_pointer += -2; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); break; } @@ -4589,11 +4909,17 @@ _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = PySequence_Tuple(arg_o); stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -3; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(arg); - if (res_o == NULL) JUMP_TO_ERROR(); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (res_o == NULL) { + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); - stack_pointer[-3] = res; - stack_pointer += -2; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); break; } @@ -4645,7 +4971,9 @@ self[0] = PyStackRef_FromPyObjectSteal(self_o); _PyStackRef temp = callable[0]; init[0] = PyStackRef_FromPyObjectNew(init_func); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(temp); + stack_pointer = _PyFrame_GetStackPointer(frame); break; } @@ -4738,7 +5066,9 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - if (true) JUMP_TO_ERROR(); + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); } _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = tp->tp_vectorcall((PyObject *)tp, args_o, total_args, NULL); @@ -4749,7 +5079,11 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -4799,12 +5133,20 @@ stack_pointer = _PyFrame_GetStackPointer(frame); _Py_LeaveRecursiveCallTstate(tstate); assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(arg); + stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(callable[0]); - if (res_o == NULL) JUMP_TO_ERROR(); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (res_o == NULL) { + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); - stack_pointer[-2 - oparg] = res; - stack_pointer += -1 - oparg; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); break; } @@ -4844,7 +5186,9 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - if (true) JUMP_TO_ERROR(); + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); } _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = ((PyCFunctionFast)(void(*)(void))cfunc)( @@ -4859,7 +5203,11 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -4906,7 +5254,9 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - if (true) JUMP_TO_ERROR(); + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); } _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = cfunc(PyCFunction_GET_SELF(callable_o), args_o, total_args, NULL); @@ -4918,7 +5268,11 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -4965,11 +5319,17 @@ if (res_o == NULL) { GOTO_ERROR(error); } + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(arg_stackref); + stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(callable[0]); + stack_pointer = _PyFrame_GetStackPointer(frame); res = PyStackRef_FromPyObjectSteal(res_o); - stack_pointer[-2 - oparg] = res; - stack_pointer += -1 - oparg; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); break; } @@ -5050,17 +5410,25 @@ STAT_INC(CALL, hit); int err = _PyList_AppendTakeRef((PyListObject *)self_o, PyStackRef_AsPyObjectSteal(arg)); UNLOCK_OBJECT(self_o); + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(self); + stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(callable); - if (err) JUMP_TO_ERROR(); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (err) { + JUMP_TO_ERROR(); + } #if TIER_ONE // Skip the following POP_TOP. This is done here in tier one, and // during trace projection in tier two: assert(next_instr->op.code == POP_TOP); SKIP_OVER(1); #endif - stack_pointer += -3; - assert(WITHIN_STACK_BOUNDS()); break; } @@ -5075,8 +5443,9 @@ callable = &stack_pointer[-2 - oparg]; PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } PyMethodDescrObject *method = (PyMethodDescrObject *)callable_o; @@ -5098,8 +5467,8 @@ UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } - _PyStackRef arg_stackref = args[1]; - _PyStackRef self_stackref = args[0]; + _PyStackRef arg_stackref = arguments[1]; + _PyStackRef self_stackref = arguments[0]; if (!Py_IS_TYPE(PyStackRef_AsPyObjectBorrow(self_stackref), method->d_common.d_type)) { UOP_STAT_INC(uopcode, miss); @@ -5115,10 +5484,16 @@ stack_pointer = _PyFrame_GetStackPointer(frame); _Py_LeaveRecursiveCallTstate(tstate); assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - PyStackRef_CLOSE(self_stackref); - PyStackRef_CLOSE(arg_stackref); PyStackRef_CLOSE(callable[0]); - if (res_o == NULL) JUMP_TO_ERROR(); + PyStackRef_XCLOSE(self_or_null[0]); + for (int _i = oparg; --_i >= 0;) { + PyStackRef_CLOSE(args[_i]); + } + if (res_o == NULL) { + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -5167,7 +5542,9 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - if (true) JUMP_TO_ERROR(); + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); } _PyFrame_SetStackPointer(frame, stack_pointer); PyCFunctionFastWithKeywords cfunc = @@ -5181,7 +5558,11 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -5238,12 +5619,20 @@ stack_pointer = _PyFrame_GetStackPointer(frame); _Py_LeaveRecursiveCallTstate(tstate); assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(self_stackref); + stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(callable[0]); - if (res_o == NULL) JUMP_TO_ERROR(); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (res_o == NULL) { + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); - stack_pointer[-2 - oparg] = res; - stack_pointer += -1 - oparg; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); break; } @@ -5289,7 +5678,9 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - if (true) JUMP_TO_ERROR(); + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); } _PyFrame_SetStackPointer(frame, stack_pointer); PyCFunctionFast cfunc = @@ -5303,7 +5694,11 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -5311,7 +5706,7 @@ break; } - /* _INSTRUMENTED_CALL_KW is not a viable micro-op for tier 2 because it is instrumented */ + /* _MONITOR_CALL_KW is not a viable micro-op for tier 2 because it uses the 'this_instr' variable */ case _MAYBE_EXPAND_METHOD_KW: { _PyStackRef kwnames_in; @@ -5335,7 +5730,9 @@ PyObject *method = ((PyMethodObject *)callable_o)->im_func; _PyStackRef temp = callable[0]; func[0] = PyStackRef_FromPyObjectNew(method); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(temp); + stack_pointer = _PyFrame_GetStackPointer(frame); } kwnames_out = kwnames_in; stack_pointer[-1] = kwnames_out; @@ -5374,10 +5771,14 @@ arguments, positional_args, kwnames_o, frame ); stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(kwnames); + stack_pointer = _PyFrame_GetStackPointer(frame); // The frame has stolen all the arguments from the stack, // so there is no need to clean them up. - stack_pointer += -3 - oparg; + stack_pointer += -2 - oparg; assert(WITHIN_STACK_BOUNDS()); if (temp == NULL) { JUMP_TO_ERROR(); @@ -5436,23 +5837,21 @@ } case _EXPAND_METHOD_KW: { - _PyStackRef *null; + _PyStackRef *self_or_null; _PyStackRef *callable; - _PyStackRef *method; - _PyStackRef *self; oparg = CURRENT_OPARG(); - null = &stack_pointer[-2 - oparg]; + self_or_null = &stack_pointer[-2 - oparg]; callable = &stack_pointer[-3 - oparg]; - method = &stack_pointer[-3 - oparg]; - self = &stack_pointer[-2 - oparg]; + assert(PyStackRef_IsNull(self_or_null[0])); _PyStackRef callable_s = callable[0]; PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable_s); - assert(PyStackRef_IsNull(null[0])); assert(Py_TYPE(callable_o) == &PyMethod_Type); - self[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); - method[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); - assert(PyStackRef_FunctionCheck(method[0])); + self_or_null[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); + callable[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); + assert(PyStackRef_FunctionCheck(callable[0])); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(callable_s); + stack_pointer = _PyFrame_GetStackPointer(frame); break; } @@ -5502,7 +5901,9 @@ PyStackRef_CLOSE(args[_i]); } PyStackRef_CLOSE(kwnames); - if (true) JUMP_TO_ERROR(); + stack_pointer += -3 - oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); } PyObject *kwnames_o = PyStackRef_AsPyObjectBorrow(kwnames); int positional_args = total_args - (int)PyTuple_GET_SIZE(kwnames_o); @@ -5512,7 +5913,11 @@ positional_args | PY_VECTORCALL_ARGUMENTS_OFFSET, kwnames_o); stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(kwnames); + stack_pointer = _PyFrame_GetStackPointer(frame); STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); PyStackRef_CLOSE(callable[0]); @@ -5520,16 +5925,18 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); - stack_pointer[-3 - oparg] = res; - stack_pointer += -2 - oparg; + stack_pointer[-2 - oparg] = res; + stack_pointer += -1 - oparg; assert(WITHIN_STACK_BOUNDS()); break; } - /* _INSTRUMENTED_CALL_FUNCTION_EX is not a viable micro-op for tier 2 because it is instrumented */ - case _MAKE_CALLARGS_A_TUPLE: { _PyStackRef kwargs_in; _PyStackRef callargs; @@ -5558,8 +5965,14 @@ JUMP_TO_ERROR(); } kwargs_out = kwargs_in; + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(callargs); + stack_pointer = _PyFrame_GetStackPointer(frame); tuple = PyStackRef_FromPyObjectSteal(tuple_o); + stack_pointer += 2; + assert(WITHIN_STACK_BOUNDS()); } stack_pointer[-2] = tuple; stack_pointer[-1] = kwargs_out; @@ -5577,12 +5990,20 @@ PyFunctionObject *func_obj = (PyFunctionObject *) PyFunction_New(codeobj, GLOBALS()); stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(codeobj_st); - if (func_obj == NULL) JUMP_TO_ERROR(); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (func_obj == NULL) { + JUMP_TO_ERROR(); + } _PyFunction_SetVersion( func_obj, ((PyCodeObject *)codeobj)->co_version); func = PyStackRef_FromPyObjectSteal((PyObject *)func_obj); - stack_pointer[-1] = func; + stack_pointer[0] = func; + stack_pointer += 1; + assert(WITHIN_STACK_BOUNDS()); break; } @@ -5615,7 +6036,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); PyGenObject *gen = (PyGenObject *)_Py_MakeCoro(func); stack_pointer = _PyFrame_GetStackPointer(frame); - if (gen == NULL) JUMP_TO_ERROR(); + if (gen == NULL) { + JUMP_TO_ERROR(); + } assert(EMPTY()); _PyFrame_SetStackPointer(frame, stack_pointer); _PyInterpreterFrame *gen_frame = &gen->gi_iframe; @@ -5650,7 +6073,11 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - if (slice_o == NULL) JUMP_TO_ERROR(); + if (slice_o == NULL) { + stack_pointer += -oparg; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } slice = PyStackRef_FromPyObjectSteal(slice_o); stack_pointer[-oparg] = slice; stack_pointer += 1 - oparg; @@ -5669,10 +6096,18 @@ _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *result_o = conv_fn(PyStackRef_AsPyObjectBorrow(value)); stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(value); - if (result_o == NULL) JUMP_TO_ERROR(); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (result_o == NULL) { + JUMP_TO_ERROR(); + } result = PyStackRef_FromPyObjectSteal(result_o); - stack_pointer[-1] = result; + stack_pointer[0] = result; + stack_pointer += 1; + assert(WITHIN_STACK_BOUNDS()); break; } @@ -5687,14 +6122,24 @@ _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = PyObject_Format(value_o, NULL); stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(value); - if (res_o == NULL) JUMP_TO_ERROR(); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (res_o == NULL) { + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); } else { res = value; + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); } - stack_pointer[-1] = res; + stack_pointer[0] = res; + stack_pointer += 1; + assert(WITHIN_STACK_BOUNDS()); break; } @@ -5709,7 +6154,11 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(value); PyStackRef_CLOSE(fmt_spec); - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2] = res; stack_pointer += -1; @@ -5745,7 +6194,11 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(lhs); PyStackRef_CLOSE(rhs); - if (res_o == NULL) JUMP_TO_ERROR(); + if (res_o == NULL) { + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + JUMP_TO_ERROR(); + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2] = res; stack_pointer += -1; @@ -5754,18 +6207,15 @@ } case _SWAP: { - _PyStackRef top_in; - _PyStackRef bottom_in; - _PyStackRef top_out; - _PyStackRef bottom_out; + _PyStackRef *top; + _PyStackRef *bottom; oparg = CURRENT_OPARG(); - top_in = stack_pointer[-1]; - bottom_in = stack_pointer[-2 - (oparg-2)]; - bottom_out = bottom_in; - top_out = top_in; + top = &stack_pointer[-1]; + bottom = &stack_pointer[-2 - (oparg-2)]; + _PyStackRef temp = bottom[0]; + bottom[0] = top[0]; + top[0] = temp; assert(oparg >= 2); - stack_pointer[-2 - (oparg-2)] = top_out; - stack_pointer[-1] = bottom_out; break; } @@ -5818,9 +6268,11 @@ val = stack_pointer[-1]; int is_none = PyStackRef_IsNone(val); if (!is_none) { - PyStackRef_CLOSE(val); stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(val); + stack_pointer = _PyFrame_GetStackPointer(frame); if (1) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); @@ -5835,9 +6287,11 @@ _PyStackRef val; val = stack_pointer[-1]; int is_none = PyStackRef_IsNone(val); - PyStackRef_CLOSE(val); stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(val); + stack_pointer = _PyFrame_GetStackPointer(frame); if (is_none) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); @@ -5903,7 +6357,9 @@ #endif if (exit->executor && !exit->executor->vm_data.valid) { exit->temperature = initial_temperature_backoff_counter(); + _PyFrame_SetStackPointer(frame, stack_pointer); Py_CLEAR(exit->executor); + stack_pointer = _PyFrame_GetStackPointer(frame); } if (exit->executor == NULL) { _Py_BackoffCounter temperature = exit->temperature; @@ -5920,7 +6376,7 @@ else { int chain_depth = current_executor->vm_data.chain_depth + 1; _PyFrame_SetStackPointer(frame, stack_pointer); - int optimized = _PyOptimizer_Optimize(frame, target, stack_pointer, &executor, chain_depth); + int optimized = _PyOptimizer_Optimize(frame, target, &executor, chain_depth); stack_pointer = _PyFrame_GetStackPointer(frame); if (optimized <= 0) { exit->temperature = restart_backoff_counter(temperature); @@ -5975,9 +6431,15 @@ _PyStackRef value; pop = stack_pointer[-1]; PyObject *ptr = (PyObject *)CURRENT_OPERAND0(); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(pop); + stack_pointer = _PyFrame_GetStackPointer(frame); value = PyStackRef_FromPyObjectImmortal(ptr); - stack_pointer[-1] = value; + stack_pointer[0] = value; + stack_pointer += 1; + assert(WITHIN_STACK_BOUNDS()); break; } @@ -6081,7 +6543,7 @@ GOTO_TIER_ONE(target); } _PyFrame_SetStackPointer(frame, stack_pointer); - int optimized = _PyOptimizer_Optimize(frame, target, stack_pointer, &executor, 0); + int optimized = _PyOptimizer_Optimize(frame, target, &executor, 0); stack_pointer = _PyFrame_GetStackPointer(frame); if (optimized <= 0) { exit->temperature = restart_backoff_counter(exit->temperature); @@ -6144,8 +6606,7 @@ case _ERROR_POP_N: { oparg = CURRENT_OPARG(); uint32_t target = (uint32_t)CURRENT_OPERAND0(); - stack_pointer += -oparg; - assert(WITHIN_STACK_BOUNDS()); + assert(oparg == 0); _PyFrame_SetStackPointer(frame, stack_pointer); frame->instr_ptr = _PyFrame_GetBytecode(frame) + target; stack_pointer = _PyFrame_GetStackPointer(frame); diff --git a/Python/flowgraph.c b/Python/flowgraph.c index 24561c1ee04db9..95ab53ce64301c 100644 --- a/Python/flowgraph.c +++ b/Python/flowgraph.c @@ -6,6 +6,7 @@ #include "pycore_compile.h" #include "pycore_intrinsics.h" #include "pycore_pymem.h" // _PyMem_IsPtrFreed() +#include "pycore_long.h" // _PY_IS_SMALL_INT() #include "pycore_opcode_utils.h" #include "pycore_opcode_metadata.h" // OPCODE_HAS_ARG, etc @@ -1336,6 +1337,17 @@ add_const(PyObject *newconst, PyObject *consts, PyObject *const_cache) return (int)index; } +static bool +is_constant_sequence(cfg_instr *inst, int n) +{ + for (int i = 0; i < n; i++) { + if(!loads_const(inst[i].i_opcode)) { + return false; + } + } + return true; +} + /* Replace LOAD_CONST c1, LOAD_CONST c2 ... LOAD_CONST cn, BUILD_TUPLE n with LOAD_CONST (c1, c2, ... cn). The consts table must still be in list form so that the @@ -1353,10 +1365,8 @@ fold_tuple_on_constants(PyObject *const_cache, assert(inst[n].i_opcode == BUILD_TUPLE); assert(inst[n].i_oparg == n); - for (int i = 0; i < n; i++) { - if (!loads_const(inst[i].i_opcode)) { - return SUCCESS; - } + if (!is_constant_sequence(inst, n)) { + return SUCCESS; } /* Buildup new tuple of constants */ @@ -1384,6 +1394,140 @@ fold_tuple_on_constants(PyObject *const_cache, return SUCCESS; } +#define MIN_CONST_SEQUENCE_SIZE 3 +/* Replace LOAD_CONST c1, LOAD_CONST c2 ... LOAD_CONST cN, BUILD_LIST N + with BUILD_LIST 0, LOAD_CONST (c1, c2, ... cN), LIST_EXTEND 1, + or BUILD_SET & SET_UPDATE respectively. +*/ +static int +optimize_if_const_list_or_set(PyObject *const_cache, cfg_instr* inst, int n, PyObject *consts) +{ + assert(PyDict_CheckExact(const_cache)); + assert(PyList_CheckExact(consts)); + assert(inst[n].i_oparg == n); + + int build = inst[n].i_opcode; + assert(build == BUILD_LIST || build == BUILD_SET); + int extend = build == BUILD_LIST ? LIST_EXTEND : SET_UPDATE; + + if (n < MIN_CONST_SEQUENCE_SIZE || !is_constant_sequence(inst, n)) { + return SUCCESS; + } + PyObject *newconst = PyTuple_New(n); + if (newconst == NULL) { + return ERROR; + } + for (int i = 0; i < n; i++) { + int op = inst[i].i_opcode; + int arg = inst[i].i_oparg; + PyObject *constant = get_const_value(op, arg, consts); + if (constant == NULL) { + return ERROR; + } + PyTuple_SET_ITEM(newconst, i, constant); + } + if (build == BUILD_SET) { + PyObject *frozenset = PyFrozenSet_New(newconst); + if (frozenset == NULL) { + return ERROR; + } + Py_SETREF(newconst, frozenset); + } + int index = add_const(newconst, consts, const_cache); + RETURN_IF_ERROR(index); + INSTR_SET_OP1(&inst[0], build, 0); + for (int i = 1; i < n - 1; i++) { + INSTR_SET_OP0(&inst[i], NOP); + } + INSTR_SET_OP1(&inst[n-1], LOAD_CONST, index); + INSTR_SET_OP1(&inst[n], extend, 1); + return SUCCESS; +} + +/* + Walk basic block upwards starting from "start" to collect instruction pair + that loads consts skipping NOP's in between. +*/ +static bool +find_load_const_pair(basicblock *bb, int start, cfg_instr **first, cfg_instr **second) +{ + cfg_instr *second_load_const = NULL; + while (start >= 0) { + cfg_instr *inst = &bb->b_instr[start--]; + if (inst->i_opcode == NOP) { + continue; + } + if (!loads_const(inst->i_opcode)) { + return false; + } + if (second_load_const == NULL) { + second_load_const = inst; + continue; + } + *first = inst; + *second = second_load_const; + return true; + } + return false; +} + +/* Determine opcode & oparg for freshly folded constant. */ +static int +newop_from_folded(PyObject *newconst, PyObject *consts, + PyObject *const_cache, int *newopcode, int *newoparg) +{ + if (PyLong_CheckExact(newconst)) { + int overflow; + long val = PyLong_AsLongAndOverflow(newconst, &overflow); + if (!overflow && _PY_IS_SMALL_INT(val)) { + *newopcode = LOAD_SMALL_INT; + *newoparg = val; + return SUCCESS; + } + } + *newopcode = LOAD_CONST; + *newoparg = add_const(newconst, consts, const_cache); + RETURN_IF_ERROR(*newoparg); + return SUCCESS; +} + +static int +optimize_if_const_subscr(basicblock *bb, int n, PyObject *consts, PyObject *const_cache) +{ + cfg_instr *subscr = &bb->b_instr[n]; + assert(subscr->i_opcode == BINARY_SUBSCR); + cfg_instr *arg, *idx; + if (!find_load_const_pair(bb, n-1, &arg, &idx)) { + return SUCCESS; + } + PyObject *o = NULL, *key = NULL; + if ((o = get_const_value(arg->i_opcode, arg->i_oparg, consts)) == NULL + || (key = get_const_value(idx->i_opcode, idx->i_oparg, consts)) == NULL) + { + goto error; + } + PyObject *newconst = PyObject_GetItem(o, key); + Py_DECREF(o); + Py_DECREF(key); + if (newconst == NULL) { + if (PyErr_ExceptionMatches(PyExc_KeyboardInterrupt)) { + return ERROR; + } + PyErr_Clear(); + return SUCCESS; + } + int newopcode, newoparg; + RETURN_IF_ERROR(newop_from_folded(newconst, consts, const_cache, &newopcode, &newoparg)); + INSTR_SET_OP1(subscr, newopcode, newoparg); + INSTR_SET_OP0(arg, NOP); + INSTR_SET_OP0(idx, NOP); + return SUCCESS; +error: + Py_XDECREF(o); + Py_XDECREF(key); + return ERROR; +} + #define VISITED (-1) // Replace an arbitrary run of SWAPs and NOPs with an optimal one that has the @@ -1751,6 +1895,14 @@ optimize_basic_block(PyObject *const_cache, basicblock *bb, PyObject *consts) } } break; + case BUILD_LIST: + case BUILD_SET: + if (i >= oparg) { + if (optimize_if_const_list_or_set(const_cache, inst-oparg, oparg, consts) < 0) { + goto error; + } + } + break; case POP_JUMP_IF_NOT_NONE: case POP_JUMP_IF_NONE: switch (target->i_opcode) { @@ -1881,6 +2033,9 @@ optimize_basic_block(PyObject *const_cache, basicblock *bb, PyObject *consts) INSTR_SET_OP0(inst, NOP); } break; + case BINARY_SUBSCR: + RETURN_IF_ERROR(optimize_if_const_subscr(bb, i, consts, const_cache)); + break; } } diff --git a/Python/gc.c b/Python/gc.c index 3fe0b7f814544d..0fb2f03b0406ad 100644 --- a/Python/gc.c +++ b/Python/gc.c @@ -994,7 +994,8 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old) /* copy-paste of weakrefobject.c's handle_callback() */ temp = PyObject_CallOneArg(callback, (PyObject *)wr); if (temp == NULL) { - PyErr_WriteUnraisable(callback); + PyErr_FormatUnraisable("Exception ignored on " + "calling weakref callback %R", callback); } else { Py_DECREF(temp); @@ -1779,7 +1780,7 @@ do_gc_callback(GCState *gcstate, const char *phase, "collected", stats->collected, "uncollectable", stats->uncollectable); if (info == NULL) { - PyErr_FormatUnraisable("Exception ignored on invoking gc callbacks"); + PyErr_FormatUnraisable("Exception ignored while invoking gc callbacks"); return; } } @@ -1787,7 +1788,7 @@ do_gc_callback(GCState *gcstate, const char *phase, PyObject *phase_obj = PyUnicode_FromString(phase); if (phase_obj == NULL) { Py_XDECREF(info); - PyErr_FormatUnraisable("Exception ignored on invoking gc callbacks"); + PyErr_FormatUnraisable("Exception ignored while invoking gc callbacks"); return; } @@ -1797,7 +1798,8 @@ do_gc_callback(GCState *gcstate, const char *phase, Py_INCREF(cb); /* make sure cb doesn't go away */ r = PyObject_Vectorcall(cb, stack, 2, NULL); if (r == NULL) { - PyErr_WriteUnraisable(cb); + PyErr_FormatUnraisable("Exception ignored while " + "calling GC callback %R", cb); } else { Py_DECREF(r); @@ -2086,13 +2088,14 @@ _PyGC_DumpShutdownStats(PyInterpreterState *interp) "gc", NULL, message, PyList_GET_SIZE(gcstate->garbage))) { - PyErr_WriteUnraisable(NULL); + PyErr_FormatUnraisable("Exception ignored in GC shutdown"); } if (gcstate->debug & _PyGC_DEBUG_UNCOLLECTABLE) { PyObject *repr = NULL, *bytes = NULL; repr = PyObject_Repr(gcstate->garbage); if (!repr || !(bytes = PyUnicode_EncodeFSDefault(repr))) { - PyErr_WriteUnraisable(gcstate->garbage); + PyErr_FormatUnraisable("Exception ignored in GC shutdown " + "while formatting garbage"); } else { PySys_WriteStderr( @@ -2344,9 +2347,12 @@ PyObject_GC_Del(void *op) #ifdef Py_DEBUG PyObject *exc = PyErr_GetRaisedException(); if (PyErr_WarnExplicitFormat(PyExc_ResourceWarning, "gc", 0, - "gc", NULL, "Object of type %s is not untracked before destruction", - Py_TYPE(op)->tp_name)) { - PyErr_WriteUnraisable(NULL); + "gc", NULL, + "Object of type %s is not untracked " + "before destruction", + Py_TYPE(op)->tp_name)) + { + PyErr_FormatUnraisable("Exception ignored on object deallocation"); } PyErr_SetRaisedException(exc); #endif diff --git a/Python/gc_free_threading.c b/Python/gc_free_threading.c index d1023d9351086f..10c76a67979884 100644 --- a/Python/gc_free_threading.c +++ b/Python/gc_free_threading.c @@ -21,6 +21,9 @@ // enable the "mark alive" pass of GC #define GC_ENABLE_MARK_ALIVE 1 +// if true, enable the use of "prefetch" CPU instructions +#define GC_ENABLE_PREFETCH_INSTRUCTIONS 1 + // include additional roots in "mark alive" pass #define GC_MARK_ALIVE_EXTRA_ROOTS 1 @@ -67,6 +70,10 @@ struct collection_state { PyInterpreterState *interp; GCState *gcstate; _PyGC_Reason reason; + // GH-129236: If we see an active frame without a valid stack pointer, + // we can't collect objects with deferred references because we may not + // see all references. + int skip_deferred_objects; Py_ssize_t collected; Py_ssize_t uncollectable; Py_ssize_t long_lived_total; @@ -413,9 +420,6 @@ gc_visit_heaps(PyInterpreterState *interp, mi_block_visit_fun *visitor, static inline void gc_visit_stackref(_PyStackRef stackref) { - // Note: we MUST check that it is deferred before checking the rest. - // Otherwise we might read into invalid memory due to non-deferred references - // being dead already. if (PyStackRef_IsDeferred(stackref) && !PyStackRef_IsNull(stackref)) { PyObject *obj = PyStackRef_AsPyObjectBorrow(stackref); if (_PyObject_GC_IS_TRACKED(obj) && !gc_is_frozen(obj)) { @@ -426,20 +430,27 @@ gc_visit_stackref(_PyStackRef stackref) // Add 1 to the gc_refs for every deferred reference on each thread's stack. static void -gc_visit_thread_stacks(PyInterpreterState *interp) +gc_visit_thread_stacks(PyInterpreterState *interp, struct collection_state *state) { _Py_FOR_EACH_TSTATE_BEGIN(interp, p) { for (_PyInterpreterFrame *f = p->current_frame; f != NULL; f = f->previous) { - PyObject *executable = PyStackRef_AsPyObjectBorrow(f->f_executable); - if (executable == NULL || !PyCode_Check(executable)) { + if (f->owner >= FRAME_OWNED_BY_INTERPRETER) { + continue; + } + + _PyStackRef *top = f->stackpointer; + if (top == NULL) { + // GH-129236: The stackpointer may be NULL in cases where + // the GC is run during a PyStackRef_CLOSE() call. Skip this + // frame and don't collect objects with deferred references. + state->skip_deferred_objects = 1; continue; } - PyCodeObject *co = (PyCodeObject *)executable; - int max_stack = co->co_nlocalsplus + co->co_stacksize; gc_visit_stackref(f->f_executable); - for (int i = 0; i < max_stack; i++) { - gc_visit_stackref(f->localsplus[i]); + while (top != f->localsplus) { + --top; + gc_visit_stackref(*top); } } } @@ -464,13 +475,193 @@ gc_maybe_untrack(PyObject *op) } #ifdef GC_ENABLE_MARK_ALIVE + +// prefetch buffer and stack ////////////////////////////////// + +// The buffer is a circular FIFO queue of PyObject pointers. We take +// care to not dereference these pointers until they are taken out of +// the buffer. A prefetch CPU instruction is issued when a pointer is +// put into the buffer. If all is working as expected, there will be +// enough time between the enqueue and dequeue so that the needed memory +// for the object, most importantly ob_gc_bits and ob_type words, will +// already be in the CPU cache. +#define BUFFER_SIZE 256 +#define BUFFER_HI 16 +#define BUFFER_LO 8 +#define BUFFER_MASK (BUFFER_SIZE - 1) + +// the buffer size must be an exact power of two +static_assert(BUFFER_SIZE > 0 && !(BUFFER_SIZE & BUFFER_MASK), + "Invalid BUFFER_SIZE, must be power of 2"); +// the code below assumes these relationships are true +static_assert(BUFFER_HI < BUFFER_SIZE && + BUFFER_LO < BUFFER_HI && + BUFFER_LO > 0, + "Invalid prefetch buffer level settings."); + +// Prefetch intructions will fetch the line of data from memory that +// contains the byte specified with the source operand to a location in +// the cache hierarchy specified by a locality hint. The instruction +// is only a hint and the CPU is free to ignore it. Instructions and +// behaviour are CPU specific but the definitions of locality hints +// below are mostly consistent. +// +// * T0 (temporal data) prefetch data into all levels of the cache hierarchy. +// +// * T1 (temporal data with respect to first level cache) prefetch data into +// level 2 cache and higher. +// +// * T2 (temporal data with respect to second level cache) prefetch data into +// level 3 cache and higher, or an implementation-specific choice. +// +// * NTA (non-temporal data with respect to all cache levels) prefetch data into +// non-temporal cache structure and into a location close to the processor, +// minimizing cache pollution. + +#if defined(__GNUC__) || defined(__clang__) + #define PREFETCH_T0(ptr) __builtin_prefetch(ptr, 0, 3) + #define PREFETCH_T1(ptr) __builtin_prefetch(ptr, 0, 2) + #define PREFETCH_T2(ptr) __builtin_prefetch(ptr, 0, 1) + #define PREFETCH_NTA(ptr) __builtin_prefetch(ptr, 0, 0) +#elif defined(_MSC_VER) && (defined(_M_X64) || defined(_M_I86)) && !defined(_M_ARM64EC) + #include + #define PREFETCH_T0(ptr) _mm_prefetch((const char*)(ptr), _MM_HINT_T0) + #define PREFETCH_T1(ptr) _mm_prefetch((const char*)(ptr), _MM_HINT_T1) + #define PREFETCH_T2(ptr) _mm_prefetch((const char*)(ptr), _MM_HINT_T2) + #define PREFETCH_NTA(ptr) _mm_prefetch((const char*)(ptr), _MM_HINT_NTA) +#elif defined (__aarch64__) + #define PREFETCH_T0(ptr) \ + do { __asm__ __volatile__("prfm pldl1keep, %0" ::"Q"(*(ptr))); } while (0) + #define PREFETCH_T1(ptr) \ + do { __asm__ __volatile__("prfm pldl2keep, %0" ::"Q"(*(ptr))); } while (0) + #define PREFETCH_T2(ptr) \ + do { __asm__ __volatile__("prfm pldl3keep, %0" ::"Q"(*(ptr))); } while (0) + #define PREFETCH_NTA(ptr) \ + do { __asm__ __volatile__("prfm pldl1strm, %0" ::"Q"(*(ptr))); } while (0) +#else + #define PREFETCH_T0(ptr) do { (void)(ptr); } while (0) /* disabled */ + #define PREFETCH_T1(ptr) do { (void)(ptr); } while (0) /* disabled */ + #define PREFETCH_T2(ptr) do { (void)(ptr); } while (0) /* disabled */ + #define PREFETCH_NTA(ptr) do { (void)(ptr); } while (0) /* disabled */ +#endif + +#ifdef GC_ENABLE_PREFETCH_INSTRUCTIONS + #define prefetch(ptr) PREFETCH_T1(ptr) +#else + #define prefetch(ptr) +#endif + +// a contigous sequence of PyObject pointers, can contain NULLs +typedef struct { + PyObject **start; + PyObject **end; +} gc_span_t; + +typedef struct { + Py_ssize_t size; + Py_ssize_t capacity; + gc_span_t *stack; +} gc_span_stack_t; + +typedef struct { + unsigned int in; + unsigned int out; + _PyObjectStack stack; + gc_span_stack_t spans; + PyObject *buffer[BUFFER_SIZE]; + bool use_prefetch; +} gc_mark_args_t; + + +// Returns number of entries in buffer +static inline unsigned int +gc_mark_buffer_len(gc_mark_args_t *args) +{ + return args->in - args->out; +} + +// Returns number of free entry slots in buffer +static inline unsigned int +gc_mark_buffer_avail(gc_mark_args_t *args) +{ + return BUFFER_SIZE - gc_mark_buffer_len(args); +} + +static inline bool +gc_mark_buffer_is_empty(gc_mark_args_t *args) +{ + return args->in == args->out; +} + +static inline bool +gc_mark_buffer_is_full(gc_mark_args_t *args) +{ + return gc_mark_buffer_len(args) == BUFFER_SIZE; +} + +static inline PyObject * +gc_mark_buffer_pop(gc_mark_args_t *args) +{ + assert(!gc_mark_buffer_is_empty(args)); + PyObject *op = args->buffer[args->out & BUFFER_MASK]; + args->out++; + return op; +} + +// Called when there is space in the buffer for the object. Issue the +// prefetch instruction and add it to the end of the buffer. +static inline void +gc_mark_buffer_push(PyObject *op, gc_mark_args_t *args) +{ + assert(!gc_mark_buffer_is_full(args)); + prefetch(op); + args->buffer[args->in & BUFFER_MASK] = op; + args->in++; +} + +// Called when we run out of space in the buffer or if the prefetching +// is disabled. The object will be pushed on the gc_mark_args.stack. static int -mark_alive_stack_push(PyObject *op, _PyObjectStack *stack) +gc_mark_stack_push(_PyObjectStack *ms, PyObject *op) +{ + if (_PyObjectStack_Push(ms, op) < 0) { + return -1; + } + return 0; +} + +static int +gc_mark_span_push(gc_span_stack_t *ss, PyObject **start, PyObject **end) +{ + if (start == end) { + return 0; + } + if (ss->size >= ss->capacity) { + if (ss->capacity == 0) { + ss->capacity = 256; + } + else { + ss->capacity *= 2; + } + ss->stack = (gc_span_t *)PyMem_Realloc(ss->stack, ss->capacity * sizeof(gc_span_t)); + if (ss->stack == NULL) { + return -1; + } + } + assert(end > start); + ss->stack[ss->size].start = start; + ss->stack[ss->size].end = end; + ss->size++; + return 0; +} + +static int +gc_mark_enqueue_no_buffer(PyObject *op, gc_mark_args_t *args) { if (op == NULL) { return 0; } - if (!_PyObject_GC_IS_TRACKED(op)) { + if (!gc_has_bit(op, _PyGC_BITS_TRACKED)) { return 0; } if (gc_is_alive(op)) { @@ -483,12 +674,68 @@ mark_alive_stack_push(PyObject *op, _PyObjectStack *stack) // Need to call tp_traverse on this object. Add to stack and mark it // alive so we don't traverse it a second time. gc_set_alive(op); - if (_PyObjectStack_Push(stack, op) < 0) { + if (_PyObjectStack_Push(&args->stack, op) < 0) { return -1; } return 0; } +static int +gc_mark_enqueue_buffer(PyObject *op, gc_mark_args_t *args) +{ + assert(op != NULL); + if (!gc_mark_buffer_is_full(args)) { + gc_mark_buffer_push(op, args); + return 0; + } + else { + return gc_mark_stack_push(&args->stack, op); + } +} + +// Called when we find an object that needs to be marked alive (either from a +// root or from calling tp_traverse). +static int +gc_mark_enqueue(PyObject *op, gc_mark_args_t *args) +{ + if (args->use_prefetch) { + return gc_mark_enqueue_buffer(op, args); + } + else { + return gc_mark_enqueue_no_buffer(op, args); + } +} + +// Called when we have a contigous sequence of PyObject pointers, either +// a tuple or list object. This will add the items to the buffer if there +// is space for them all otherwise push a new "span" on the span stack. Using +// spans has the advantage of not creating a deep _PyObjectStack stack when +// dealing with long sequences. Those sequences will be processed in smaller +// chunks by the gc_prime_from_spans() function. +static int +gc_mark_enqueue_span(PyObject **item, Py_ssize_t size, gc_mark_args_t *args) +{ + Py_ssize_t used = gc_mark_buffer_len(args); + Py_ssize_t free = BUFFER_SIZE - used; + if (free >= size) { + for (Py_ssize_t i = 0; i < size; i++) { + PyObject *op = item[i]; + if (op == NULL) { + continue; + } + gc_mark_buffer_push(op, args); + } + } + else { + assert(size > 0); + PyObject **end = &item[size]; + if (gc_mark_span_push(&args->spans, item, end) < 0) { + return -1; + } + } + return 0; +} + static bool gc_clear_alive_bits(const mi_heap_t *heap, const mi_heap_area_t *area, void *block, size_t block_size, void *args) @@ -503,28 +750,56 @@ gc_clear_alive_bits(const mi_heap_t *heap, const mi_heap_area_t *area, return true; } +static int +gc_mark_traverse_list(PyObject *self, void *args) +{ + PyListObject *list = (PyListObject *)self; + if (list->ob_item == NULL) { + return 0; + } + if (gc_mark_enqueue_span(list->ob_item, PyList_GET_SIZE(list), args) < 0) { + return -1; + } + return 0; +} + +static int +gc_mark_traverse_tuple(PyObject *self, void *args) +{ + _PyTuple_MaybeUntrack(self); + if (!gc_has_bit(self, _PyGC_BITS_TRACKED)) { + gc_clear_alive(self); + return 0; + } + PyTupleObject *tuple = _PyTuple_CAST(self); + if (gc_mark_enqueue_span(tuple->ob_item, Py_SIZE(tuple), args) < 0) { + return -1; + } + return 0; +} + static void gc_abort_mark_alive(PyInterpreterState *interp, struct collection_state *state, - _PyObjectStack *stack) + gc_mark_args_t *args) { - // We failed to allocate memory for "stack" while doing the "mark - // alive" phase. In that case, free the object stack and make sure - // that no objects have the alive bit set. - _PyObjectStack_Clear(stack); + // We failed to allocate memory while doing the "mark alive" phase. + // In that case, free the memory used for marking state and make + // sure that no objects have the alive bit set. + _PyObjectStack_Clear(&args->stack); + if (args->spans.stack != NULL) { + PyMem_Free(args->spans.stack); + } gc_visit_heaps(interp, &gc_clear_alive_bits, &state->base); } #ifdef GC_MARK_ALIVE_STACKS static int -gc_visit_stackref_mark_alive(_PyObjectStack *stack, _PyStackRef stackref) +gc_visit_stackref_mark_alive(gc_mark_args_t *args, _PyStackRef stackref) { - // Note: we MUST check that it is deferred before checking the rest. - // Otherwise we might read into invalid memory due to non-deferred references - // being dead already. - if (PyStackRef_IsDeferred(stackref) && !PyStackRef_IsNull(stackref)) { + if (!PyStackRef_IsNull(stackref)) { PyObject *op = PyStackRef_AsPyObjectBorrow(stackref); - if (mark_alive_stack_push(op, stack) < 0) { + if (gc_mark_enqueue(op, args) < 0) { return -1; } } @@ -532,29 +807,39 @@ gc_visit_stackref_mark_alive(_PyObjectStack *stack, _PyStackRef stackref) } static int -gc_visit_thread_stacks_mark_alive(PyInterpreterState *interp, _PyObjectStack *stack) +gc_visit_thread_stacks_mark_alive(PyInterpreterState *interp, gc_mark_args_t *args) { + int err = 0; _Py_FOR_EACH_TSTATE_BEGIN(interp, p) { for (_PyInterpreterFrame *f = p->current_frame; f != NULL; f = f->previous) { - PyObject *executable = PyStackRef_AsPyObjectBorrow(f->f_executable); - if (executable == NULL || !PyCode_Check(executable)) { + if (f->owner >= FRAME_OWNED_BY_INTERPRETER) { continue; } - PyCodeObject *co = (PyCodeObject *)executable; - int max_stack = co->co_nlocalsplus + co->co_stacksize; - if (gc_visit_stackref_mark_alive(stack, f->f_executable) < 0) { - return -1; + if (f->stackpointer == NULL) { + // GH-129236: The stackpointer may be NULL in cases where + // the GC is run during a PyStackRef_CLOSE() call. Skip this + // frame for now. + continue; + } + + _PyStackRef *top = f->stackpointer; + if (gc_visit_stackref_mark_alive(args, f->f_executable) < 0) { + err = -1; + goto exit; } - for (int i = 0; i < max_stack; i++) { - if (gc_visit_stackref_mark_alive(stack, f->localsplus[i]) < 0) { - return -1; + while (top != f->localsplus) { + --top; + if (gc_visit_stackref_mark_alive(args, *top) < 0) { + err = -1; + goto exit; } } } } +exit: _Py_FOR_EACH_TSTATE_END(interp); - return 0; + return err; } #endif // GC_MARK_ALIVE_STACKS #endif // GC_ENABLE_MARK_ALIVE @@ -789,14 +1074,23 @@ mark_heap_visitor(const mi_heap_t *heap, const mi_heap_area_t *area, return true; } - if (gc_is_alive(op)) { + _PyObject_ASSERT_WITH_MSG(op, gc_get_refs(op) >= 0, + "refcount is too small"); + + if (gc_is_alive(op) || !gc_is_unreachable(op)) { + // Object was already marked as reachable. return true; } - _PyObject_ASSERT_WITH_MSG(op, gc_get_refs(op) >= 0, - "refcount is too small"); + // GH-129236: If we've seen an active frame without a valid stack pointer, + // then we can't collect objects with deferred references because we may + // have missed some reference to the object on the stack. In that case, + // treat the object as reachable even if gc_refs is zero. + struct collection_state *state = (struct collection_state *)args; + int keep_alive = (state->skip_deferred_objects && + _PyObject_HasDeferredRefcount(op)); - if (gc_is_unreachable(op) && gc_get_refs(op) != 0) { + if (gc_get_refs(op) != 0 || keep_alive) { // Object is reachable but currently marked as unreachable. // Mark it as reachable and traverse its pointers to find // any other object that may be directly reachable from it. @@ -880,22 +1174,124 @@ static int move_legacy_finalizer_reachable(struct collection_state *state); #ifdef GC_ENABLE_MARK_ALIVE + +static void +gc_prime_from_spans(gc_mark_args_t *args) +{ + Py_ssize_t space = BUFFER_HI - gc_mark_buffer_len(args); + // there should always be at least this amount of space + assert(space <= gc_mark_buffer_avail(args)); + assert(space > 0); + gc_span_t entry = args->spans.stack[--args->spans.size]; + // spans on the stack should always have one or more elements + assert(entry.start < entry.end); + do { + PyObject *op = *entry.start; + entry.start++; + if (op != NULL) { + gc_mark_buffer_push(op, args); + space--; + if (space == 0) { + // buffer is as full as we want and not done with span + gc_mark_span_push(&args->spans, entry.start, entry.end); + return; + } + } + } while (entry.start < entry.end); +} + +static void +gc_prime_buffer(gc_mark_args_t *args) +{ + if (args->spans.size > 0) { + gc_prime_from_spans(args); + } + else { + // When priming, don't fill the buffer too full since that would + // likely cause the stack to be used shortly after when it + // fills. We want to use the buffer as much as possible and so + // we only fill to BUFFER_HI, not BUFFER_SIZE. + Py_ssize_t space = BUFFER_HI - gc_mark_buffer_len(args); + assert(space > 0); + do { + PyObject *op = _PyObjectStack_Pop(&args->stack); + if (op == NULL) { + return; + } + gc_mark_buffer_push(op, args); + space--; + } while (space > 0); + } +} + static int -propagate_alive_bits(_PyObjectStack *stack) +gc_propagate_alive_prefetch(gc_mark_args_t *args) { for (;;) { - PyObject *op = _PyObjectStack_Pop(stack); - if (op == NULL) { - break; + Py_ssize_t buf_used = gc_mark_buffer_len(args); + if (buf_used <= BUFFER_LO) { + // The mark buffer is getting empty. If it's too empty + // then there will not be enough delay between issuing + // the prefetch and when the object is actually accessed. + // Prime the buffer with object pointers from the stack or + // from the spans, if there are any available. + gc_prime_buffer(args); + if (gc_mark_buffer_is_empty(args)) { + return 0; + } } - assert(_PyObject_GC_IS_TRACKED(op)); - assert(gc_is_alive(op)); + PyObject *op = gc_mark_buffer_pop(args); + + if (!gc_has_bit(op, _PyGC_BITS_TRACKED)) { + continue; + } + + if (gc_is_alive(op)) { + continue; // already visited this object + } + + // Need to call tp_traverse on this object. Mark it alive so we + // don't traverse it a second time. + gc_set_alive(op); + traverseproc traverse = Py_TYPE(op)->tp_traverse; - if (traverse(op, (visitproc)&mark_alive_stack_push, stack) < 0) { + if (traverse == PyList_Type.tp_traverse) { + if (gc_mark_traverse_list(op, args) < 0) { + return -1; + } + } + else if (traverse == PyTuple_Type.tp_traverse) { + if (gc_mark_traverse_tuple(op, args) < 0) { + return -1; + } + } + else if (traverse(op, (visitproc)&gc_mark_enqueue_buffer, args) < 0) { return -1; } } - return 0; +} + +static int +gc_propagate_alive(gc_mark_args_t *args) +{ + if (args->use_prefetch) { + return gc_propagate_alive_prefetch(args); + } + else { + for (;;) { + PyObject *op = _PyObjectStack_Pop(&args->stack); + if (op == NULL) { + break; + } + assert(_PyObject_GC_IS_TRACKED(op)); + assert(gc_is_alive(op)); + traverseproc traverse = Py_TYPE(op)->tp_traverse; + if (traverse(op, (visitproc)&gc_mark_enqueue_no_buffer, args) < 0) { + return -1; + } + } + return 0; + } } // Using tp_traverse, mark everything reachable from known root objects @@ -915,48 +1311,64 @@ propagate_alive_bits(_PyObjectStack *stack) // // Returns -1 on failure (out of memory). static int -mark_alive_from_roots(PyInterpreterState *interp, - struct collection_state *state) +gc_mark_alive_from_roots(PyInterpreterState *interp, + struct collection_state *state) { #ifdef GC_DEBUG // Check that all objects don't have alive bit set gc_visit_heaps(interp, &validate_alive_bits, &state->base); #endif - _PyObjectStack stack = { NULL }; - - #define STACK_PUSH(op) \ - if (mark_alive_stack_push(op, &stack) < 0) { \ - gc_abort_mark_alive(interp, state, &stack); \ - return -1; \ + gc_mark_args_t mark_args = { 0 }; + + // Using prefetch instructions is only a win if the set of objects being + // examined by the GC does not fit into CPU caches. Otherwise, using the + // buffer and prefetch instructions is just overhead. Using the long lived + // object count seems a good estimate of if things will fit in the cache. + // On 64-bit platforms, the minimum object size is 32 bytes. A 4MB L2 cache + // would hold about 130k objects. + mark_args.use_prefetch = interp->gc.long_lived_total > 200000; + + #define MARK_ENQUEUE(op) \ + if (op != NULL ) { \ + if (gc_mark_enqueue(op, &mark_args) < 0) { \ + gc_abort_mark_alive(interp, state, &mark_args); \ + return -1; \ + } \ } - STACK_PUSH(interp->sysdict); + MARK_ENQUEUE(interp->sysdict); #ifdef GC_MARK_ALIVE_EXTRA_ROOTS - STACK_PUSH(interp->builtins); - STACK_PUSH(interp->dict); + MARK_ENQUEUE(interp->builtins); + MARK_ENQUEUE(interp->dict); struct types_state *types = &interp->types; for (int i = 0; i < _Py_MAX_MANAGED_STATIC_BUILTIN_TYPES; i++) { - STACK_PUSH(types->builtins.initialized[i].tp_dict); - STACK_PUSH(types->builtins.initialized[i].tp_subclasses); + MARK_ENQUEUE(types->builtins.initialized[i].tp_dict); + MARK_ENQUEUE(types->builtins.initialized[i].tp_subclasses); } for (int i = 0; i < _Py_MAX_MANAGED_STATIC_EXT_TYPES; i++) { - STACK_PUSH(types->for_extensions.initialized[i].tp_dict); - STACK_PUSH(types->for_extensions.initialized[i].tp_subclasses); + MARK_ENQUEUE(types->for_extensions.initialized[i].tp_dict); + MARK_ENQUEUE(types->for_extensions.initialized[i].tp_subclasses); } #endif #ifdef GC_MARK_ALIVE_STACKS - if (gc_visit_thread_stacks_mark_alive(interp, &stack) < 0) { - gc_abort_mark_alive(interp, state, &stack); + if (gc_visit_thread_stacks_mark_alive(interp, &mark_args) < 0) { + gc_abort_mark_alive(interp, state, &mark_args); return -1; } #endif - #undef STACK_PUSH + #undef MARK_ENQUEUE // Use tp_traverse to find everything reachable from roots. - if (propagate_alive_bits(&stack) < 0) { - gc_abort_mark_alive(interp, state, &stack); + if (gc_propagate_alive(&mark_args) < 0) { + gc_abort_mark_alive(interp, state, &mark_args); return -1; } + assert(mark_args.spans.size == 0); + if (mark_args.spans.stack != NULL) { + PyMem_Free(mark_args.spans.stack); + } + assert(mark_args.stack.head == NULL); + return 0; } #endif // GC_ENABLE_MARK_ALIVE @@ -985,7 +1397,7 @@ deduce_unreachable_heap(PyInterpreterState *interp, #endif // Visit the thread stacks to account for any deferred references. - gc_visit_thread_stacks(interp); + gc_visit_thread_stacks(interp, state); // Transitively mark reachable objects by clearing the // _PyGC_BITS_UNREACHABLE flag. @@ -1104,7 +1516,8 @@ call_weakref_callbacks(struct collection_state *state) /* copy-paste of weakrefobject.c's handle_callback() */ PyObject *temp = PyObject_CallOneArg(callback, (PyObject *)wr); if (temp == NULL) { - PyErr_WriteUnraisable(callback); + PyErr_FormatUnraisable("Exception ignored while " + "calling weakref callback %R", callback); } else { Py_DECREF(temp); @@ -1403,7 +1816,8 @@ invoke_gc_callback(PyThreadState *tstate, const char *phase, "collected", collected, "uncollectable", uncollectable); if (info == NULL) { - PyErr_FormatUnraisable("Exception ignored on invoking gc callbacks"); + PyErr_FormatUnraisable("Exception ignored while " + "invoking gc callbacks"); return; } } @@ -1411,7 +1825,8 @@ invoke_gc_callback(PyThreadState *tstate, const char *phase, PyObject *phase_obj = PyUnicode_FromString(phase); if (phase_obj == NULL) { Py_XDECREF(info); - PyErr_FormatUnraisable("Exception ignored on invoking gc callbacks"); + PyErr_FormatUnraisable("Exception ignored while " + "invoking gc callbacks"); return; } @@ -1421,7 +1836,8 @@ invoke_gc_callback(PyThreadState *tstate, const char *phase, Py_INCREF(cb); /* make sure cb doesn't go away */ r = PyObject_Vectorcall(cb, stack, 2, NULL); if (r == NULL) { - PyErr_WriteUnraisable(cb); + PyErr_FormatUnraisable("Exception ignored while " + "calling GC callback %R", cb); } else { Py_DECREF(r); @@ -1531,7 +1947,7 @@ gc_collect_internal(PyInterpreterState *interp, struct collection_state *state, if (!state->gcstate->freeze_active) { // Mark objects reachable from known roots as "alive". These will // be ignored for rest of the GC pass. - int err = mark_alive_from_roots(interp, state); + int err = gc_mark_alive_from_roots(interp, state); if (err < 0) { _PyEval_StartTheWorld(interp); PyErr_NoMemory(); @@ -2003,13 +2419,14 @@ _PyGC_DumpShutdownStats(PyInterpreterState *interp) "gc", NULL, message, PyList_GET_SIZE(gcstate->garbage))) { - PyErr_WriteUnraisable(NULL); + PyErr_FormatUnraisable("Exception ignored in GC shutdown"); } if (gcstate->debug & _PyGC_DEBUG_UNCOLLECTABLE) { PyObject *repr = NULL, *bytes = NULL; repr = PyObject_Repr(gcstate->garbage); if (!repr || !(bytes = PyUnicode_EncodeFSDefault(repr))) { - PyErr_WriteUnraisable(gcstate->garbage); + PyErr_FormatUnraisable("Exception ignored in GC shutdown " + "while formatting garbage"); } else { PySys_WriteStderr( @@ -2217,9 +2634,12 @@ PyObject_GC_Del(void *op) #ifdef Py_DEBUG PyObject *exc = PyErr_GetRaisedException(); if (PyErr_WarnExplicitFormat(PyExc_ResourceWarning, "gc", 0, - "gc", NULL, "Object of type %s is not untracked before destruction", - ((PyObject*)op)->ob_type->tp_name)) { - PyErr_WriteUnraisable(NULL); + "gc", NULL, + "Object of type %s is not untracked " + "before destruction", + Py_TYPE(op)->tp_name)) + { + PyErr_FormatUnraisable("Exception ignored on object deallocation"); } PyErr_SetRaisedException(exc); #endif diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h index ad044e62a38b1c..5820147ff712a9 100644 --- a/Python/generated_cases.c.h +++ b/Python/generated_cases.c.h @@ -57,7 +57,9 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(lhs); PyStackRef_CLOSE(rhs); - if (res_o == NULL) goto pop_2_error; + if (res_o == NULL) { + goto pop_2_error; + } res = PyStackRef_FromPyObjectSteal(res_o); } stack_pointer[-2] = res; @@ -95,7 +97,9 @@ ((PyFloatObject *)left_o)->ob_fval + ((PyFloatObject *)right_o)->ob_fval; PyObject *res_o = _PyFloat_FromDouble_ConsumeInputs(left, right, dres); - if (res_o == NULL) goto pop_2_error; + if (res_o == NULL) { + goto pop_2_error; + } res = PyStackRef_FromPyObjectSteal(res_o); } stack_pointer[-2] = res; @@ -132,7 +136,9 @@ PyObject *res_o = _PyLong_Add((PyLongObject *)left_o, (PyLongObject *)right_o); PyStackRef_CLOSE_SPECIALIZED(right, _PyLong_ExactDealloc); PyStackRef_CLOSE_SPECIALIZED(left, _PyLong_ExactDealloc); - if (res_o == NULL) goto pop_2_error; + if (res_o == NULL) { + goto pop_2_error; + } res = PyStackRef_FromPyObjectSteal(res_o); } stack_pointer[-2] = res; @@ -169,7 +175,9 @@ PyObject *res_o = PyUnicode_Concat(left_o, right_o); PyStackRef_CLOSE_SPECIALIZED(right, _PyUnicode_ExactDealloc); PyStackRef_CLOSE_SPECIALIZED(left, _PyUnicode_ExactDealloc); - if (res_o == NULL) goto pop_2_error; + if (res_o == NULL) { + goto pop_2_error; + } res = PyStackRef_FromPyObjectSteal(res_o); } stack_pointer[-2] = res; @@ -180,6 +188,7 @@ TARGET(BINARY_OP_EXTEND) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 6; INSTRUCTION_STATS(BINARY_OP_EXTEND); static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 5, "incorrect cache size"); @@ -274,7 +283,9 @@ PyUnicode_Append(&temp, right_o); *target_local = PyStackRef_FromPyObjectSteal(temp); PyStackRef_CLOSE_SPECIALIZED(right, _PyUnicode_ExactDealloc); - if (PyStackRef_IsNull(*target_local)) goto pop_2_error; + if (PyStackRef_IsNull(*target_local)) { + goto pop_2_error; + } #if TIER_ONE // The STORE_FAST is already done. This is done here in tier one, // and during trace projection in tier two: @@ -316,7 +327,9 @@ ((PyFloatObject *)left_o)->ob_fval * ((PyFloatObject *)right_o)->ob_fval; PyObject *res_o = _PyFloat_FromDouble_ConsumeInputs(left, right, dres); - if (res_o == NULL) goto pop_2_error; + if (res_o == NULL) { + goto pop_2_error; + } res = PyStackRef_FromPyObjectSteal(res_o); } stack_pointer[-2] = res; @@ -353,7 +366,9 @@ PyObject *res_o = _PyLong_Multiply((PyLongObject *)left_o, (PyLongObject *)right_o); PyStackRef_CLOSE_SPECIALIZED(right, _PyLong_ExactDealloc); PyStackRef_CLOSE_SPECIALIZED(left, _PyLong_ExactDealloc); - if (res_o == NULL) goto pop_2_error; + if (res_o == NULL) { + goto pop_2_error; + } res = PyStackRef_FromPyObjectSteal(res_o); } stack_pointer[-2] = res; @@ -391,7 +406,9 @@ ((PyFloatObject *)left_o)->ob_fval - ((PyFloatObject *)right_o)->ob_fval; PyObject *res_o = _PyFloat_FromDouble_ConsumeInputs(left, right, dres); - if (res_o == NULL) goto pop_2_error; + if (res_o == NULL) { + goto pop_2_error; + } res = PyStackRef_FromPyObjectSteal(res_o); } stack_pointer[-2] = res; @@ -428,7 +445,9 @@ PyObject *res_o = _PyLong_Subtract((PyLongObject *)left_o, (PyLongObject *)right_o); PyStackRef_CLOSE_SPECIALIZED(right, _PyLong_ExactDealloc); PyStackRef_CLOSE_SPECIALIZED(left, _PyLong_ExactDealloc); - if (res_o == NULL) goto pop_2_error; + if (res_o == NULL) { + goto pop_2_error; + } res = PyStackRef_FromPyObjectSteal(res_o); } stack_pointer[-2] = res; @@ -477,12 +496,18 @@ stack_pointer += 2; assert(WITHIN_STACK_BOUNDS()); } + stack_pointer += -3; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(container); - if (res_o == NULL) goto pop_3_error; + stack_pointer = _PyFrame_GetStackPointer(frame); + if (res_o == NULL) { + goto error; + } res = PyStackRef_FromPyObjectSteal(res_o); } - stack_pointer[-3] = res; - stack_pointer += -2; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } @@ -525,7 +550,9 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(container); PyStackRef_CLOSE(sub); - if (res_o == NULL) goto pop_2_error; + if (res_o == NULL) { + goto pop_2_error; + } res = PyStackRef_FromPyObjectSteal(res_o); } stack_pointer[-2] = res; @@ -560,7 +587,9 @@ } PyStackRef_CLOSE(dict_st); PyStackRef_CLOSE(sub_st); - if (rc <= 0) goto pop_2_error; + if (rc <= 0) { + goto pop_2_error; + } // not found or error res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2] = res; @@ -660,10 +689,14 @@ Py_INCREF(res_o); #endif PyStackRef_CLOSE_SPECIALIZED(sub_st, _PyLong_ExactDealloc); + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(list_st); + stack_pointer = _PyFrame_GetStackPointer(frame); res = PyStackRef_FromPyObjectSteal(res_o); - stack_pointer[-2] = res; - stack_pointer += -1; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } @@ -692,10 +725,14 @@ STAT_INC(BINARY_SUBSCR, hit); PyObject *res_o = (PyObject*)&_Py_SINGLETON(strings).ascii[c]; PyStackRef_CLOSE_SPECIALIZED(sub_st, _PyLong_ExactDealloc); + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(str_st); + stack_pointer = _PyFrame_GetStackPointer(frame); res = PyStackRef_FromPyObjectSteal(res_o); - stack_pointer[-2] = res; - stack_pointer += -1; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } @@ -724,10 +761,14 @@ assert(res_o != NULL); Py_INCREF(res_o); PyStackRef_CLOSE_SPECIALIZED(sub_st, _PyLong_ExactDealloc); + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(tuple_st); + stack_pointer = _PyFrame_GetStackPointer(frame); res = PyStackRef_FromPyObjectSteal(res_o); - stack_pointer[-2] = res; - stack_pointer += -1; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } @@ -762,11 +803,9 @@ for (int _i = oparg*2; --_i >= 0;) { PyStackRef_CLOSE(values[_i]); } - { - stack_pointer += -oparg*2; - assert(WITHIN_STACK_BOUNDS()); - goto error; - } + stack_pointer += -oparg*2; + assert(WITHIN_STACK_BOUNDS()); + goto error; } _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *map_o = _PyDict_FromItems( @@ -804,11 +843,9 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(values[_i]); } - { - stack_pointer += -oparg; - assert(WITHIN_STACK_BOUNDS()); - goto error; - } + stack_pointer += -oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; } int err = 0; for (int i = 0; i < oparg; i++) { @@ -874,11 +911,9 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(pieces[_i]); } - { - stack_pointer += -oparg; - assert(WITHIN_STACK_BOUNDS()); - goto error; - } + stack_pointer += -oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; } PyObject *str_o = _PyUnicode_JoinArray(&_Py_STR(empty), pieces_o, oparg); STACKREFS_TO_PYOBJECTS_CLEANUP(pieces_o); @@ -931,6 +966,7 @@ PREDICTED_CALL:; _Py_CODEUNIT* const this_instr = next_instr - 4; (void)this_instr; + opcode = CALL; _PyStackRef *callable; _PyStackRef *self_or_null; _PyStackRef *args; @@ -968,7 +1004,9 @@ PyObject *method = ((PyMethodObject *)callable_o)->im_func; _PyStackRef temp = callable[0]; func[0] = PyStackRef_FromPyObjectNew(method); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(temp); + stack_pointer = _PyFrame_GetStackPointer(frame); } } // _DO_CALL @@ -1016,11 +1054,9 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - { - stack_pointer += -2 - oparg; - assert(WITHIN_STACK_BOUNDS()); - goto error; - } + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; } _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = PyObject_Vectorcall( @@ -1046,7 +1082,9 @@ frame, this_instr, callable_o, arg); stack_pointer = _PyFrame_GetStackPointer(frame); if (err < 0) { + _PyFrame_SetStackPointer(frame, stack_pointer); Py_CLEAR(res_o); + stack_pointer = _PyFrame_GetStackPointer(frame); } } } @@ -1074,7 +1112,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; + if (err != 0) { + goto error; + } stack_pointer += 1 + oparg; assert(WITHIN_STACK_BOUNDS()); } @@ -1087,6 +1127,7 @@ TARGET(CALL_ALLOC_AND_ENTER_INIT) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 4; INSTRUCTION_STATS(CALL_ALLOC_AND_ENTER_INIT); static_assert(INLINE_CACHE_ENTRIES_CALL == 3, "incorrect cache size"); @@ -1132,7 +1173,9 @@ self[0] = PyStackRef_FromPyObjectSteal(self_o); _PyStackRef temp = callable[0]; init[0] = PyStackRef_FromPyObjectNew(init_func); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(temp); + stack_pointer = _PyFrame_GetStackPointer(frame); } // _CREATE_INIT_FRAME { @@ -1185,13 +1228,12 @@ TARGET(CALL_BOUND_METHOD_EXACT_ARGS) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 4; INSTRUCTION_STATS(CALL_BOUND_METHOD_EXACT_ARGS); static_assert(INLINE_CACHE_ENTRIES_CALL == 3, "incorrect cache size"); _PyStackRef *callable; _PyStackRef *null; - _PyStackRef *func; - _PyStackRef *self; _PyStackRef *self_or_null; _PyStackRef *args; _PyInterpreterFrame *new_frame; @@ -1209,19 +1251,20 @@ } // _INIT_CALL_BOUND_METHOD_EXACT_ARGS { - func = &stack_pointer[-2 - oparg]; - self = &stack_pointer[-1 - oparg]; + self_or_null = null; + assert(PyStackRef_IsNull(self_or_null[0])); PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); STAT_INC(CALL, hit); - self[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); + self_or_null[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); _PyStackRef temp = callable[0]; - func[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); + callable[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(temp); + stack_pointer = _PyFrame_GetStackPointer(frame); } // flush // _CHECK_FUNCTION_VERSION { - callable = &stack_pointer[-2 - oparg]; uint32_t func_version = read_u32(&this_instr[2].cache); PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); DEOPT_IF(!PyFunction_Check(callable_o), CALL); @@ -1230,7 +1273,6 @@ } // _CHECK_FUNCTION_EXACT_ARGS { - self_or_null = &stack_pointer[-1 - oparg]; PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); assert(PyFunction_Check(callable_o)); PyFunctionObject *func = (PyFunctionObject *)callable_o; @@ -1288,13 +1330,12 @@ TARGET(CALL_BOUND_METHOD_GENERAL) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 4; INSTRUCTION_STATS(CALL_BOUND_METHOD_GENERAL); static_assert(INLINE_CACHE_ENTRIES_CALL == 3, "incorrect cache size"); _PyStackRef *callable; _PyStackRef *null; - _PyStackRef *method; - _PyStackRef *self; _PyStackRef *self_or_null; _PyStackRef *args; _PyInterpreterFrame *new_frame; @@ -1317,23 +1358,22 @@ } // _EXPAND_METHOD { - method = &stack_pointer[-2 - oparg]; - self = &stack_pointer[-1 - oparg]; + self_or_null = null; PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); - assert(PyStackRef_IsNull(null[0])); + assert(PyStackRef_IsNull(self_or_null[0])); assert(Py_TYPE(callable_o) == &PyMethod_Type); - self[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); + self_or_null[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); _PyStackRef temp = callable[0]; - method[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); - assert(PyStackRef_FunctionCheck(method[0])); + callable[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); + assert(PyStackRef_FunctionCheck(callable[0])); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(temp); + stack_pointer = _PyFrame_GetStackPointer(frame); } // flush // _PY_FRAME_GENERAL { args = &stack_pointer[-oparg]; - self_or_null = &stack_pointer[-1 - oparg]; - callable = &stack_pointer[-2 - oparg]; PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); // oparg counts all of the args, but *not* self: int total_args = oparg; @@ -1419,11 +1459,9 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - { - stack_pointer += -2 - oparg; - assert(WITHIN_STACK_BOUNDS()); - goto error; - } + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; } _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = tp->tp_vectorcall((PyObject *)tp, args_o, total_args, NULL); @@ -1452,7 +1490,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; + if (err != 0) { + goto error; + } stack_pointer += 1 + oparg; assert(WITHIN_STACK_BOUNDS()); } @@ -1499,11 +1539,9 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - { - stack_pointer += -2 - oparg; - assert(WITHIN_STACK_BOUNDS()); - goto error; - } + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; } _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = ((PyCFunctionFast)(void(*)(void))cfunc)( @@ -1536,7 +1574,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; + if (err != 0) { + goto error; + } stack_pointer += 1 + oparg; assert(WITHIN_STACK_BOUNDS()); } @@ -1587,11 +1627,9 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - { - stack_pointer += -2 - oparg; - assert(WITHIN_STACK_BOUNDS()); - goto error; - } + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; } _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = cfunc(PyCFunction_GET_SELF(callable_o), args_o, total_args, NULL); @@ -1621,7 +1659,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; + if (err != 0) { + goto error; + } stack_pointer += 1 + oparg; assert(WITHIN_STACK_BOUNDS()); } @@ -1669,11 +1709,15 @@ stack_pointer = _PyFrame_GetStackPointer(frame); _Py_LeaveRecursiveCallTstate(tstate); assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(arg); + stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(callable[0]); + stack_pointer = _PyFrame_GetStackPointer(frame); if (res_o == NULL) { - stack_pointer += -2 - oparg; - assert(WITHIN_STACK_BOUNDS()); goto error; } res = PyStackRef_FromPyObjectSteal(res_o); @@ -1683,36 +1727,38 @@ _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); QSBR_QUIESCENT_STATE(tstate); if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { - stack_pointer[-2 - oparg] = res; - stack_pointer += -1 - oparg; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; - stack_pointer += 1 + oparg; + if (err != 0) { + goto error; + } + stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); } } - stack_pointer[-2 - oparg] = res; - stack_pointer += -1 - oparg; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } TARGET(CALL_FUNCTION_EX) { - frame->instr_ptr = next_instr; + _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 1; INSTRUCTION_STATS(CALL_FUNCTION_EX); - PREDICTED_CALL_FUNCTION_EX:; - _Py_CODEUNIT* const this_instr = next_instr - 1; - (void)this_instr; + opcode = CALL_FUNCTION_EX; _PyStackRef func; _PyStackRef callargs; _PyStackRef kwargs_in; _PyStackRef tuple; _PyStackRef kwargs_out; _PyStackRef func_st; + _PyStackRef null; _PyStackRef callargs_st; _PyStackRef kwargs_st; _PyStackRef result; @@ -1740,15 +1786,23 @@ goto error; } kwargs_out = kwargs_in; + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(callargs); + stack_pointer = _PyFrame_GetStackPointer(frame); tuple = PyStackRef_FromPyObjectSteal(tuple_o); + stack_pointer += 2; + assert(WITHIN_STACK_BOUNDS()); } } // _DO_CALL_FUNCTION_EX { kwargs_st = kwargs_out; callargs_st = tuple; + null = stack_pointer[-3]; func_st = func; + (void)null; PyObject *func = PyStackRef_AsPyObjectBorrow(func_st); // DICT_MERGE is called before this opcode if there are kwargs. // It converts all dict subtypes in kwargs into regular dicts. @@ -1790,7 +1844,9 @@ frame, this_instr, func, arg); stack_pointer = _PyFrame_GetStackPointer(frame); if (err < 0) { + _PyFrame_SetStackPointer(frame, stack_pointer); Py_CLEAR(result_o); + stack_pointer = _PyFrame_GetStackPointer(frame); } } } @@ -1806,7 +1862,7 @@ Py_ssize_t nargs = PyTuple_GET_SIZE(callargs); int code_flags = ((PyCodeObject *)PyFunction_GET_CODE(func))->co_flags; PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(func)); - stack_pointer += -3; + stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); _PyFrame_SetStackPointer(frame, stack_pointer); _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit_Ex( @@ -1814,7 +1870,7 @@ nargs, callargs, kwargs, frame); stack_pointer = _PyFrame_GetStackPointer(frame); // Need to sync the stack since we exit with DISPATCH_INLINED. - stack_pointer += -1; + stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); if (new_frame == NULL) { goto error; @@ -1833,12 +1889,24 @@ result_o = PyObject_Call(func, callargs, kwargs); stack_pointer = _PyFrame_GetStackPointer(frame); } + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_XCLOSE(kwargs_st); stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(callargs_st); + stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(func_st); - if (result_o == NULL) goto pop_4_error; + stack_pointer = _PyFrame_GetStackPointer(frame); + if (result_o == NULL) { + goto error; + } result = PyStackRef_FromPyObjectSteal(result_o); } // _CHECK_PERIODIC @@ -1846,19 +1914,21 @@ _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); QSBR_QUIESCENT_STATE(tstate); if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { - stack_pointer[-4] = result; - stack_pointer += -3; + stack_pointer[0] = result; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; - stack_pointer += 3; + if (err != 0) { + goto error; + } + stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); } } - stack_pointer[-4] = result; - stack_pointer += -3; + stack_pointer[0] = result; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } @@ -1875,7 +1945,9 @@ PyObject *res_o = _PyIntrinsics_UnaryFunctions[oparg].func(tstate, PyStackRef_AsPyObjectBorrow(value)); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(value); - if (res_o == NULL) goto pop_1_error; + if (res_o == NULL) { + goto pop_1_error; + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-1] = res; DISPATCH(); @@ -1898,7 +1970,9 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(value2_st); PyStackRef_CLOSE(value1_st); - if (res_o == NULL) goto pop_2_error; + if (res_o == NULL) { + goto pop_2_error; + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2] = res; stack_pointer += -1; @@ -1960,6 +2034,7 @@ PREDICTED_CALL_KW:; _Py_CODEUNIT* const this_instr = next_instr - 4; (void)this_instr; + opcode = CALL_KW; _PyStackRef *callable; _PyStackRef *self_or_null; _PyStackRef *args; @@ -2001,7 +2076,9 @@ PyObject *method = ((PyMethodObject *)callable_o)->im_func; _PyStackRef temp = callable[0]; func[0] = PyStackRef_FromPyObjectNew(method); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(temp); + stack_pointer = _PyFrame_GetStackPointer(frame); } kwnames_out = kwnames_in; } @@ -2035,9 +2112,13 @@ arguments, positional_args, kwnames_o, frame ); stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(kwnames); + stack_pointer = _PyFrame_GetStackPointer(frame); // Sync stack explicitly since we leave using DISPATCH_INLINED(). - stack_pointer += -3 - oparg; + stack_pointer += -2 - oparg; assert(WITHIN_STACK_BOUNDS()); // The frame has stolen all the arguments from the stack, // so there is no need to clean them up. @@ -2057,11 +2138,9 @@ PyStackRef_CLOSE(args[_i]); } PyStackRef_CLOSE(kwnames); - { - stack_pointer += -3 - oparg; - assert(WITHIN_STACK_BOUNDS()); - goto error; - } + stack_pointer += -3 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; } stack_pointer[-1] = kwnames; _PyFrame_SetStackPointer(frame, stack_pointer); @@ -2088,7 +2167,9 @@ frame, this_instr, callable_o, arg); stack_pointer = _PyFrame_GetStackPointer(frame); if (err < 0) { + _PyFrame_SetStackPointer(frame, stack_pointer); Py_CLEAR(res_o); + stack_pointer = _PyFrame_GetStackPointer(frame); } } } @@ -2113,14 +2194,13 @@ TARGET(CALL_KW_BOUND_METHOD) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 4; INSTRUCTION_STATS(CALL_KW_BOUND_METHOD); static_assert(INLINE_CACHE_ENTRIES_CALL_KW == 3, "incorrect cache size"); _PyStackRef *callable; _PyStackRef *null; _PyStackRef kwnames; - _PyStackRef *method; - _PyStackRef *self; _PyStackRef *self_or_null; _PyStackRef *args; _PyInterpreterFrame *new_frame; @@ -2143,24 +2223,23 @@ } // _EXPAND_METHOD_KW { - method = &stack_pointer[-3 - oparg]; - self = &stack_pointer[-2 - oparg]; + self_or_null = null; + assert(PyStackRef_IsNull(self_or_null[0])); _PyStackRef callable_s = callable[0]; PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable_s); - assert(PyStackRef_IsNull(null[0])); assert(Py_TYPE(callable_o) == &PyMethod_Type); - self[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); - method[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); - assert(PyStackRef_FunctionCheck(method[0])); + self_or_null[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self); + callable[0] = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func); + assert(PyStackRef_FunctionCheck(callable[0])); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(callable_s); + stack_pointer = _PyFrame_GetStackPointer(frame); } // flush // _PY_FRAME_KW { kwnames = stack_pointer[-1]; args = &stack_pointer[-1 - oparg]; - self_or_null = &stack_pointer[-2 - oparg]; - callable = &stack_pointer[-3 - oparg]; PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); // oparg counts all of the args, but *not* self: int total_args = oparg; @@ -2180,10 +2259,14 @@ arguments, positional_args, kwnames_o, frame ); stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(kwnames); + stack_pointer = _PyFrame_GetStackPointer(frame); // The frame has stolen all the arguments from the stack, // so there is no need to clean them up. - stack_pointer += -3 - oparg; + stack_pointer += -2 - oparg; assert(WITHIN_STACK_BOUNDS()); if (temp == NULL) { goto error; @@ -2221,6 +2304,7 @@ frame->instr_ptr = next_instr; next_instr += 4; INSTRUCTION_STATS(CALL_KW_NON_PY); + opcode = CALL_KW_NON_PY; static_assert(INLINE_CACHE_ENTRIES_CALL_KW == 3, "incorrect cache size"); _PyStackRef *callable; _PyStackRef kwnames; @@ -2260,11 +2344,9 @@ PyStackRef_CLOSE(args[_i]); } PyStackRef_CLOSE(kwnames); - { - stack_pointer += -3 - oparg; - assert(WITHIN_STACK_BOUNDS()); - goto error; - } + stack_pointer += -3 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; } PyObject *kwnames_o = PyStackRef_AsPyObjectBorrow(kwnames); int positional_args = total_args - (int)PyTuple_GET_SIZE(kwnames_o); @@ -2274,7 +2356,11 @@ positional_args | PY_VECTORCALL_ARGUMENTS_OFFSET, kwnames_o); stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(kwnames); + stack_pointer = _PyFrame_GetStackPointer(frame); STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); PyStackRef_CLOSE(callable[0]); @@ -2283,7 +2369,7 @@ PyStackRef_CLOSE(args[_i]); } if (res_o == NULL) { - stack_pointer += -3 - oparg; + stack_pointer += -2 - oparg; assert(WITHIN_STACK_BOUNDS()); goto error; } @@ -2294,25 +2380,28 @@ _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); QSBR_QUIESCENT_STATE(tstate); if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { - stack_pointer[-3 - oparg] = res; - stack_pointer += -2 - oparg; + stack_pointer[-2 - oparg] = res; + stack_pointer += -1 - oparg; assert(WITHIN_STACK_BOUNDS()); _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; - stack_pointer += 2 + oparg; + if (err != 0) { + goto error; + } + stack_pointer += 1 + oparg; assert(WITHIN_STACK_BOUNDS()); } } - stack_pointer[-3 - oparg] = res; - stack_pointer += -2 - oparg; + stack_pointer[-2 - oparg] = res; + stack_pointer += -1 - oparg; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } TARGET(CALL_KW_PY) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 4; INSTRUCTION_STATS(CALL_KW_PY); static_assert(INLINE_CACHE_ENTRIES_CALL_KW == 3, "incorrect cache size"); @@ -2359,10 +2448,14 @@ arguments, positional_args, kwnames_o, frame ); stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(kwnames); + stack_pointer = _PyFrame_GetStackPointer(frame); // The frame has stolen all the arguments from the stack, // so there is no need to clean them up. - stack_pointer += -3 - oparg; + stack_pointer += -2 - oparg; assert(WITHIN_STACK_BOUNDS()); if (temp == NULL) { goto error; @@ -2434,11 +2527,17 @@ if (res_o == NULL) { GOTO_ERROR(error); } + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(arg_stackref); + stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(callable[0]); + stack_pointer = _PyFrame_GetStackPointer(frame); res = PyStackRef_FromPyObjectSteal(res_o); - stack_pointer[-2 - oparg] = res; - stack_pointer += -1 - oparg; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } @@ -2467,17 +2566,25 @@ STAT_INC(CALL, hit); int err = _PyList_AppendTakeRef((PyListObject *)self_o, PyStackRef_AsPyObjectSteal(arg)); UNLOCK_OBJECT(self_o); + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(self); - PyStackRef_CLOSE(callable); - if (err) goto pop_3_error; - #if TIER_ONE + stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(callable); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (err) { + goto error; + } + #if TIER_ONE // Skip the following POP_TOP. This is done here in tier one, and // during trace projection in tier two: assert(next_instr->op.code == POP_TOP); SKIP_OVER(1); #endif - stack_pointer += -3; - assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } @@ -2520,11 +2627,9 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - { - stack_pointer += -2 - oparg; - assert(WITHIN_STACK_BOUNDS()); - goto error; - } + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; } _PyFrame_SetStackPointer(frame, stack_pointer); PyCFunctionFast cfunc = @@ -2556,7 +2661,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; + if (err != 0) { + goto error; + } stack_pointer += 1 + oparg; assert(WITHIN_STACK_BOUNDS()); } @@ -2606,11 +2713,9 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - { - stack_pointer += -2 - oparg; - assert(WITHIN_STACK_BOUNDS()); - goto error; - } + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; } _PyFrame_SetStackPointer(frame, stack_pointer); PyCFunctionFastWithKeywords cfunc = @@ -2642,7 +2747,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; + if (err != 0) { + goto error; + } stack_pointer += 1 + oparg; assert(WITHIN_STACK_BOUNDS()); } @@ -2694,11 +2801,15 @@ stack_pointer = _PyFrame_GetStackPointer(frame); _Py_LeaveRecursiveCallTstate(tstate); assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(self_stackref); + stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(callable[0]); + stack_pointer = _PyFrame_GetStackPointer(frame); if (res_o == NULL) { - stack_pointer += -2 - oparg; - assert(WITHIN_STACK_BOUNDS()); goto error; } res = PyStackRef_FromPyObjectSteal(res_o); @@ -2708,19 +2819,21 @@ _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); QSBR_QUIESCENT_STATE(tstate); if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { - stack_pointer[-2 - oparg] = res; - stack_pointer += -1 - oparg; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; - stack_pointer += 1 + oparg; + if (err != 0) { + goto error; + } + stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); } } - stack_pointer[-2 - oparg] = res; - stack_pointer += -1 - oparg; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } @@ -2743,8 +2856,9 @@ callable = &stack_pointer[-2 - oparg]; PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); int total_args = oparg; + _PyStackRef *arguments = args; if (!PyStackRef_IsNull(self_or_null[0])) { - args--; + arguments--; total_args++; } PyMethodDescrObject *method = (PyMethodDescrObject *)callable_o; @@ -2754,8 +2868,8 @@ DEOPT_IF(meth->ml_flags != METH_O, CALL); // CPython promises to check all non-vectorcall function calls. DEOPT_IF(tstate->c_recursion_remaining <= 0, CALL); - _PyStackRef arg_stackref = args[1]; - _PyStackRef self_stackref = args[0]; + _PyStackRef arg_stackref = arguments[1]; + _PyStackRef self_stackref = arguments[0]; DEOPT_IF(!Py_IS_TYPE(PyStackRef_AsPyObjectBorrow(self_stackref), method->d_common.d_type), CALL); STAT_INC(CALL, hit); @@ -2768,9 +2882,11 @@ stack_pointer = _PyFrame_GetStackPointer(frame); _Py_LeaveRecursiveCallTstate(tstate); assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); - PyStackRef_CLOSE(self_stackref); - PyStackRef_CLOSE(arg_stackref); PyStackRef_CLOSE(callable[0]); + PyStackRef_XCLOSE(self_or_null[0]); + for (int _i = oparg; --_i >= 0;) { + PyStackRef_CLOSE(args[_i]); + } if (res_o == NULL) { stack_pointer += -2 - oparg; assert(WITHIN_STACK_BOUNDS()); @@ -2789,7 +2905,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; + if (err != 0) { + goto error; + } stack_pointer += 1 + oparg; assert(WITHIN_STACK_BOUNDS()); } @@ -2804,6 +2922,7 @@ frame->instr_ptr = next_instr; next_instr += 4; INSTRUCTION_STATS(CALL_NON_PY_GENERAL); + opcode = CALL_NON_PY_GENERAL; static_assert(INLINE_CACHE_ENTRIES_CALL == 3, "incorrect cache size"); _PyStackRef *callable; _PyStackRef *self_or_null; @@ -2840,11 +2959,9 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - { - stack_pointer += -2 - oparg; - assert(WITHIN_STACK_BOUNDS()); - goto error; - } + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; } _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = PyObject_Vectorcall( @@ -2877,7 +2994,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; + if (err != 0) { + goto error; + } stack_pointer += 1 + oparg; assert(WITHIN_STACK_BOUNDS()); } @@ -2890,6 +3009,7 @@ TARGET(CALL_PY_EXACT_ARGS) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 4; INSTRUCTION_STATS(CALL_PY_EXACT_ARGS); static_assert(INLINE_CACHE_ENTRIES_CALL == 3, "incorrect cache size"); @@ -2971,6 +3091,7 @@ TARGET(CALL_PY_GENERAL) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 4; INSTRUCTION_STATS(CALL_PY_GENERAL); static_assert(INLINE_CACHE_ENTRIES_CALL == 3, "incorrect cache size"); @@ -3072,8 +3193,14 @@ _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = PyObject_Str(arg_o); stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -3; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(arg); - if (res_o == NULL) goto pop_3_error; + stack_pointer = _PyFrame_GetStackPointer(frame); + if (res_o == NULL) { + goto error; + } res = PyStackRef_FromPyObjectSteal(res_o); } // _CHECK_PERIODIC @@ -3081,19 +3208,21 @@ _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); QSBR_QUIESCENT_STATE(tstate); if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { - stack_pointer[-3] = res; - stack_pointer += -2; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; - stack_pointer += 2; + if (err != 0) { + goto error; + } + stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); } } - stack_pointer[-3] = res; - stack_pointer += -2; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } @@ -3123,8 +3252,14 @@ _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = PySequence_Tuple(arg_o); stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -3; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(arg); - if (res_o == NULL) goto pop_3_error; + stack_pointer = _PyFrame_GetStackPointer(frame); + if (res_o == NULL) { + goto error; + } res = PyStackRef_FromPyObjectSteal(res_o); } // _CHECK_PERIODIC @@ -3132,19 +3267,21 @@ _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); QSBR_QUIESCENT_STATE(tstate); if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { - stack_pointer[-3] = res; - stack_pointer += -2; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; - stack_pointer += 2; + if (err != 0) { + goto error; + } + stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); } } - stack_pointer[-3] = res; - stack_pointer += -2; + stack_pointer[0] = res; + stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } @@ -3170,10 +3307,12 @@ DEOPT_IF(callable_o != (PyObject *)&PyType_Type, CALL); STAT_INC(CALL, hit); res = PyStackRef_FromPyObjectSteal(Py_NewRef(Py_TYPE(arg_o))); - PyStackRef_CLOSE(arg); stack_pointer[-3] = res; stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(arg); + stack_pointer = _PyFrame_GetStackPointer(frame); DISPATCH(); } @@ -3205,9 +3344,13 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(exc_value_st); PyStackRef_CLOSE(match_type_st); - if (res < 0) goto pop_2_error; + if (res < 0) { + goto pop_2_error; + } assert((match_o == NULL) == (rest_o == NULL)); - if (match_o == NULL) goto pop_2_error; + if (match_o == NULL) { + goto pop_2_error; + } if (!Py_IsNone(match_o)) { stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); @@ -3283,6 +3426,7 @@ _PyErr_SetRaisedException(tstate, Py_NewRef(exc_value)); monitor_reraise(tstate, frame, this_instr); stack_pointer = _PyFrame_GetStackPointer(frame); + _PyFrame_SetStackPointer(frame, stack_pointer); goto exception_unwind; } stack_pointer[-3] = none; @@ -3318,7 +3462,7 @@ } OPCODE_DEFERRED_INC(COMPARE_OP); ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); - #endif /* ENABLE_SPECIALIZATION */ + #endif /* ENABLE_SPECIALIZATION_FT */ } // _COMPARE_OP { @@ -3330,7 +3474,9 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(left); PyStackRef_CLOSE(right); - if (res_o == NULL) goto pop_2_error; + if (res_o == NULL) { + goto pop_2_error; + } if (oparg & 16) { stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); @@ -3338,7 +3484,9 @@ int res_bool = PyObject_IsTrue(res_o); Py_DECREF(res_o); stack_pointer = _PyFrame_GetStackPointer(frame); - if (res_bool < 0) goto error; + if (res_bool < 0) { + goto error; + } res = res_bool ? PyStackRef_True : PyStackRef_False; } else { @@ -3509,7 +3657,9 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(left); PyStackRef_CLOSE(right); - if (res < 0) goto pop_2_error; + if (res < 0) { + goto pop_2_error; + } b = (res ^ oparg) ? PyStackRef_True : PyStackRef_False; } stack_pointer[-2] = b; @@ -3538,7 +3688,9 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(left); PyStackRef_CLOSE(right); - if (res < 0) goto pop_2_error; + if (res < 0) { + goto pop_2_error; + } b = (res ^ oparg) ? PyStackRef_True : PyStackRef_False; stack_pointer[-2] = b; stack_pointer += -1; @@ -3567,7 +3719,9 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(left); PyStackRef_CLOSE(right); - if (res < 0) goto pop_2_error; + if (res < 0) { + goto pop_2_error; + } b = (res ^ oparg) ? PyStackRef_True : PyStackRef_False; stack_pointer[-2] = b; stack_pointer += -1; @@ -3588,10 +3742,18 @@ _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *result_o = conv_fn(PyStackRef_AsPyObjectBorrow(value)); stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(value); - if (result_o == NULL) goto pop_1_error; + stack_pointer = _PyFrame_GetStackPointer(frame); + if (result_o == NULL) { + goto error; + } result = PyStackRef_FromPyObjectSteal(result_o); - stack_pointer[-1] = result; + stack_pointer[0] = result; + stack_pointer += 1; + assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } @@ -3639,7 +3801,9 @@ int err = PyObject_DelAttr(PyStackRef_AsPyObjectBorrow(owner), name); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(owner); - if (err) goto pop_1_error; + if (err) { + goto pop_1_error; + } stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); @@ -3679,7 +3843,11 @@ stack_pointer = _PyFrame_GetStackPointer(frame); goto error; } - SETLOCAL(oparg, PyStackRef_NULL); + _PyStackRef tmp = GETLOCAL(oparg); + GETLOCAL(oparg) = PyStackRef_NULL; + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_XCLOSE(tmp); + stack_pointer = _PyFrame_GetStackPointer(frame); DISPATCH(); } @@ -3749,7 +3917,9 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(container); PyStackRef_CLOSE(sub); - if (err) goto pop_2_error; + if (err) { + goto pop_2_error; + } stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); @@ -3841,6 +4011,7 @@ _PyErr_SetRaisedException(tstate, exc); monitor_reraise(tstate, frame, this_instr); stack_pointer = _PyFrame_GetStackPointer(frame); + _PyFrame_SetStackPointer(frame, stack_pointer); goto exception_unwind; } stack_pointer += -2; @@ -3858,9 +4029,11 @@ * This has the benign side effect that if value is * finalized it will see the location as the FOR_ITER's. */ - PyStackRef_CLOSE(value); stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(value); + stack_pointer = _PyFrame_GetStackPointer(frame); DISPATCH(); } @@ -3887,6 +4060,7 @@ (void)this_instr; next_instr += 1; INSTRUCTION_STATS(ENTER_EXECUTOR); + opcode = ENTER_EXECUTOR; #ifdef _Py_TIER2 PyCodeObject *code = _PyFrame_GetCode(frame); _PyExecutorObject *executor = code->co_executors->executors[oparg & 255]; @@ -3939,6 +4113,7 @@ frame->instr_ptr = next_instr; next_instr += 1; INSTRUCTION_STATS(EXTENDED_ARG); + opcode = EXTENDED_ARG; assert(oparg); opcode = next_instr->op.code; oparg = oparg << 8 | next_instr->op.arg; @@ -3960,14 +4135,24 @@ _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = PyObject_Format(value_o, NULL); stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(value); - if (res_o == NULL) goto pop_1_error; + stack_pointer = _PyFrame_GetStackPointer(frame); + if (res_o == NULL) { + goto error; + } res = PyStackRef_FromPyObjectSteal(res_o); } else { res = value; + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); } - stack_pointer[-1] = res; + stack_pointer[0] = res; + stack_pointer += 1; + assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } @@ -3985,7 +4170,9 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(value); PyStackRef_CLOSE(fmt_spec); - if (res_o == NULL) goto pop_2_error; + if (res_o == NULL) { + goto pop_2_error; + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2] = res; stack_pointer += -1; @@ -4188,7 +4375,9 @@ r->start = value + r->step; r->len--; PyObject *res = PyLong_FromLong(value); - if (res == NULL) goto error; + if (res == NULL) { + goto error; + } next = PyStackRef_FromPyObjectSteal(res); } stack_pointer[0] = next; @@ -4273,7 +4462,9 @@ iter_o = (*getter)(obj_o); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(obj); - if (iter_o == NULL) goto pop_1_error; + if (iter_o == NULL) { + goto pop_1_error; + } if (Py_TYPE(iter_o)->tp_as_async == NULL || Py_TYPE(iter_o)->tp_as_async->am_anext == NULL) { stack_pointer += -1; @@ -4323,7 +4514,9 @@ PyObject *iter_o = _PyEval_GetAwaitable(PyStackRef_AsPyObjectBorrow(iterable), oparg); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(iterable); - if (iter_o == NULL) goto pop_1_error; + if (iter_o == NULL) { + goto pop_1_error; + } iter = PyStackRef_FromPyObjectSteal(iter_o); stack_pointer[-1] = iter; DISPATCH(); @@ -4341,7 +4534,9 @@ PyObject *iter_o = PyObject_GetIter(PyStackRef_AsPyObjectBorrow(iterable)); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(iterable); - if (iter_o == NULL) goto pop_1_error; + if (iter_o == NULL) { + goto pop_1_error; + } iter = PyStackRef_FromPyObjectSteal(iter_o); stack_pointer[-1] = iter; DISPATCH(); @@ -4358,9 +4553,13 @@ _PyFrame_SetStackPointer(frame, stack_pointer); Py_ssize_t len_i = PyObject_Length(PyStackRef_AsPyObjectBorrow(obj)); stack_pointer = _PyFrame_GetStackPointer(frame); - if (len_i < 0) goto error; + if (len_i < 0) { + goto error; + } PyObject *len_o = PyLong_FromSsize_t(len_i); - if (len_o == NULL) goto error; + if (len_o == NULL) { + goto error; + } len = PyStackRef_FromPyObjectSteal(len_o); stack_pointer[0] = len; stack_pointer += 1; @@ -4422,7 +4621,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = _PyEval_ImportFrom(tstate, PyStackRef_AsPyObjectBorrow(from), name); stack_pointer = _PyFrame_GetStackPointer(frame); - if (res_o == NULL) goto error; + if (res_o == NULL) { + goto error; + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[0] = res; stack_pointer += 1; @@ -4447,7 +4648,9 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(level); PyStackRef_CLOSE(fromlist); - if (res_o == NULL) goto pop_2_error; + if (res_o == NULL) { + goto pop_2_error; + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-2] = res; stack_pointer += -1; @@ -4460,6 +4663,7 @@ (void)this_instr; next_instr += 4; INSTRUCTION_STATS(INSTRUMENTED_CALL); + opcode = INSTRUMENTED_CALL; _PyStackRef *callable; _PyStackRef *self_or_null; _PyStackRef *args; @@ -4481,7 +4685,9 @@ PyObject *method = ((PyMethodObject *)callable_o)->im_func; _PyStackRef temp = callable[0]; func[0] = PyStackRef_FromPyObjectNew(method); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(temp); + stack_pointer = _PyFrame_GetStackPointer(frame); } } // _MONITOR_CALL @@ -4509,7 +4715,9 @@ frame, this_instr, function, arg0 ); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err) goto error; + if (err) { + goto error; + } } // _DO_CALL { @@ -4555,11 +4763,9 @@ for (int _i = oparg; --_i >= 0;) { PyStackRef_CLOSE(args[_i]); } - { - stack_pointer += -2 - oparg; - assert(WITHIN_STACK_BOUNDS()); - goto error; - } + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; } _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *res_o = PyObject_Vectorcall( @@ -4585,7 +4791,9 @@ frame, this_instr, callable_o, arg); stack_pointer = _PyFrame_GetStackPointer(frame); if (err < 0) { + _PyFrame_SetStackPointer(frame, stack_pointer); Py_CLEAR(res_o); + stack_pointer = _PyFrame_GetStackPointer(frame); } } } @@ -4613,7 +4821,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; + if (err != 0) { + goto error; + } stack_pointer += 1 + oparg; assert(WITHIN_STACK_BOUNDS()); } @@ -4625,71 +4835,399 @@ } TARGET(INSTRUMENTED_CALL_FUNCTION_EX) { - frame->instr_ptr = next_instr; - next_instr += 1; - INSTRUCTION_STATS(INSTRUMENTED_CALL_FUNCTION_EX); - - goto PREDICTED_CALL_FUNCTION_EX; - } - - TARGET(INSTRUMENTED_CALL_KW) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; (void)this_instr; - next_instr += 4; - INSTRUCTION_STATS(INSTRUMENTED_CALL_KW); - uint16_t counter = read_u16(&this_instr[1].cache); - (void)counter; - uint32_t version = read_u32(&this_instr[2].cache); - (void)version; - int is_meth = !PyStackRef_IsNull(PEEK(oparg + 2)); - int total_args = oparg + is_meth; - PyObject *function = PyStackRef_AsPyObjectBorrow(PEEK(oparg + 3)); - PyObject *arg = total_args == 0 ? &_PyInstrumentation_MISSING - : PyStackRef_AsPyObjectBorrow(PEEK(total_args + 1)); - _PyFrame_SetStackPointer(frame, stack_pointer); - int err = _Py_call_instrumentation_2args( - tstate, PY_MONITORING_EVENT_CALL, - frame, this_instr, function, arg); - stack_pointer = _PyFrame_GetStackPointer(frame); - if (err) goto error; - PAUSE_ADAPTIVE_COUNTER(this_instr[1].counter); - goto PREDICTED_CALL_KW; - } - - TARGET(INSTRUMENTED_END_FOR) { - _Py_CODEUNIT* const this_instr = next_instr; - (void)this_instr; next_instr += 1; - INSTRUCTION_STATS(INSTRUMENTED_END_FOR); - _PyStackRef receiver; - _PyStackRef value; - value = stack_pointer[-1]; - receiver = stack_pointer[-2]; - /* Need to create a fake StopIteration error here, - * to conform to PEP 380 */ - if (PyStackRef_GenCheck(receiver)) { - _PyFrame_SetStackPointer(frame, stack_pointer); - int err = monitor_stop_iteration(tstate, frame, this_instr, PyStackRef_AsPyObjectBorrow(value)); - stack_pointer = _PyFrame_GetStackPointer(frame); - if (err) { - goto error; + INSTRUCTION_STATS(INSTRUMENTED_CALL_FUNCTION_EX); + opcode = INSTRUMENTED_CALL_FUNCTION_EX; + _PyStackRef func; + _PyStackRef callargs; + _PyStackRef kwargs_in; + _PyStackRef tuple; + _PyStackRef kwargs_out; + _PyStackRef func_st; + _PyStackRef null; + _PyStackRef callargs_st; + _PyStackRef kwargs_st; + _PyStackRef result; + // _MAKE_CALLARGS_A_TUPLE + { + kwargs_in = stack_pointer[-1]; + callargs = stack_pointer[-2]; + func = stack_pointer[-4]; + PyObject *callargs_o = PyStackRef_AsPyObjectBorrow(callargs); + if (PyTuple_CheckExact(callargs_o)) { + tuple = callargs; + kwargs_out = kwargs_in; } - } - PyStackRef_CLOSE(value); - stack_pointer += -1; - assert(WITHIN_STACK_BOUNDS()); - DISPATCH(); - } - - TARGET(INSTRUMENTED_END_SEND) { - _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; - (void)this_instr; - next_instr += 1; - INSTRUCTION_STATS(INSTRUMENTED_END_SEND); - _PyStackRef receiver; - _PyStackRef value; - _PyStackRef val; - value = stack_pointer[-1]; + else { + _PyFrame_SetStackPointer(frame, stack_pointer); + int err = _Py_Check_ArgsIterable(tstate, PyStackRef_AsPyObjectBorrow(func), callargs_o); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (err < 0) { + goto error; + } + _PyFrame_SetStackPointer(frame, stack_pointer); + PyObject *tuple_o = PySequence_Tuple(callargs_o); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (tuple_o == NULL) { + goto error; + } + kwargs_out = kwargs_in; + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(callargs); + stack_pointer = _PyFrame_GetStackPointer(frame); + tuple = PyStackRef_FromPyObjectSteal(tuple_o); + stack_pointer += 2; + assert(WITHIN_STACK_BOUNDS()); + } + } + // _DO_CALL_FUNCTION_EX + { + kwargs_st = kwargs_out; + callargs_st = tuple; + null = stack_pointer[-3]; + func_st = func; + (void)null; + PyObject *func = PyStackRef_AsPyObjectBorrow(func_st); + // DICT_MERGE is called before this opcode if there are kwargs. + // It converts all dict subtypes in kwargs into regular dicts. + EVAL_CALL_STAT_INC_IF_FUNCTION(EVAL_CALL_FUNCTION_EX, func); + PyObject *result_o; + assert(!_PyErr_Occurred(tstate)); + if (opcode == INSTRUMENTED_CALL_FUNCTION_EX) { + PyObject *callargs = PyStackRef_AsPyObjectBorrow(callargs_st); + PyObject *kwargs = PyStackRef_AsPyObjectBorrow(kwargs_st); + assert(kwargs == NULL || PyDict_CheckExact(kwargs)); + assert(PyTuple_CheckExact(callargs)); + PyObject *arg = PyTuple_GET_SIZE(callargs) > 0 ? + PyTuple_GET_ITEM(callargs, 0) : &_PyInstrumentation_MISSING; + stack_pointer[-2] = callargs_st; + stack_pointer[-1] = kwargs_st; + _PyFrame_SetStackPointer(frame, stack_pointer); + int err = _Py_call_instrumentation_2args( + tstate, PY_MONITORING_EVENT_CALL, + frame, this_instr, func, arg); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (err) { + goto error; + } + _PyFrame_SetStackPointer(frame, stack_pointer); + result_o = PyObject_Call(func, callargs, kwargs); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (!PyFunction_Check(func) && !PyMethod_Check(func)) { + if (result_o == NULL) { + _PyFrame_SetStackPointer(frame, stack_pointer); + _Py_call_instrumentation_exc2( + tstate, PY_MONITORING_EVENT_C_RAISE, + frame, this_instr, func, arg); + stack_pointer = _PyFrame_GetStackPointer(frame); + } + else { + _PyFrame_SetStackPointer(frame, stack_pointer); + int err = _Py_call_instrumentation_2args( + tstate, PY_MONITORING_EVENT_C_RETURN, + frame, this_instr, func, arg); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (err < 0) { + _PyFrame_SetStackPointer(frame, stack_pointer); + Py_CLEAR(result_o); + stack_pointer = _PyFrame_GetStackPointer(frame); + } + } + } + } + else { + if (Py_TYPE(func) == &PyFunction_Type && + tstate->interp->eval_frame == NULL && + ((PyFunctionObject *)func)->vectorcall == _PyFunction_Vectorcall) { + PyObject *callargs = PyStackRef_AsPyObjectSteal(callargs_st); + assert(PyTuple_CheckExact(callargs)); + PyObject *kwargs = PyStackRef_IsNull(kwargs_st) ? NULL : PyStackRef_AsPyObjectSteal(kwargs_st); + assert(kwargs == NULL || PyDict_CheckExact(kwargs)); + Py_ssize_t nargs = PyTuple_GET_SIZE(callargs); + int code_flags = ((PyCodeObject *)PyFunction_GET_CODE(func))->co_flags; + PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(func)); + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit_Ex( + tstate, func_st, locals, + nargs, callargs, kwargs, frame); + stack_pointer = _PyFrame_GetStackPointer(frame); + // Need to sync the stack since we exit with DISPATCH_INLINED. + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + if (new_frame == NULL) { + goto error; + } + assert( 1 == 1); + frame->return_offset = 1; + DISPATCH_INLINED(new_frame); + } + PyObject *callargs = PyStackRef_AsPyObjectBorrow(callargs_st); + assert(PyTuple_CheckExact(callargs)); + PyObject *kwargs = PyStackRef_AsPyObjectBorrow(kwargs_st); + assert(kwargs == NULL || PyDict_CheckExact(kwargs)); + stack_pointer[-2] = callargs_st; + stack_pointer[-1] = kwargs_st; + _PyFrame_SetStackPointer(frame, stack_pointer); + result_o = PyObject_Call(func, callargs, kwargs); + stack_pointer = _PyFrame_GetStackPointer(frame); + } + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_XCLOSE(kwargs_st); + stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(callargs_st); + stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -2; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(func_st); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (result_o == NULL) { + goto error; + } + result = PyStackRef_FromPyObjectSteal(result_o); + } + // _CHECK_PERIODIC + { + _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); + QSBR_QUIESCENT_STATE(tstate); + if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { + stack_pointer[0] = result; + stack_pointer += 1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + int err = _Py_HandlePending(tstate); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (err != 0) { + goto error; + } + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + } + } + stack_pointer[0] = result; + stack_pointer += 1; + assert(WITHIN_STACK_BOUNDS()); + DISPATCH(); + } + + TARGET(INSTRUMENTED_CALL_KW) { + _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; + next_instr += 4; + INSTRUCTION_STATS(INSTRUMENTED_CALL_KW); + opcode = INSTRUMENTED_CALL_KW; + _PyStackRef *callable; + _PyStackRef *self_or_null; + _PyStackRef *args; + _PyStackRef kwnames; + _PyStackRef kwnames_in; + _PyStackRef *func; + _PyStackRef *maybe_self; + _PyStackRef kwnames_out; + _PyStackRef res; + /* Skip 1 cache entry */ + /* Skip 2 cache entries */ + // _MONITOR_CALL_KW + { + args = &stack_pointer[-1 - oparg]; + self_or_null = &stack_pointer[-2 - oparg]; + callable = &stack_pointer[-3 - oparg]; + int is_meth = !PyStackRef_IsNull(self_or_null[0]); + PyObject *arg; + if (is_meth) { + arg = PyStackRef_AsPyObjectBorrow(self_or_null[0]); + } + else { + if (args) { + arg = PyStackRef_AsPyObjectBorrow(args[0]); + } + else { + arg = &_PyInstrumentation_MISSING; + } + } + PyObject *function = PyStackRef_AsPyObjectBorrow(callable[0]); + _PyFrame_SetStackPointer(frame, stack_pointer); + int err = _Py_call_instrumentation_2args( + tstate, PY_MONITORING_EVENT_CALL, + frame, this_instr, function, arg); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (err) { + goto error; + } + } + // _MAYBE_EXPAND_METHOD_KW + { + kwnames_in = stack_pointer[-1]; + func = &stack_pointer[-3 - oparg]; + maybe_self = &stack_pointer[-2 - oparg]; + if (PyStackRef_TYPE(callable[0]) == &PyMethod_Type && PyStackRef_IsNull(self_or_null[0])) { + PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); + PyObject *self = ((PyMethodObject *)callable_o)->im_self; + maybe_self[0] = PyStackRef_FromPyObjectNew(self); + PyObject *method = ((PyMethodObject *)callable_o)->im_func; + _PyStackRef temp = callable[0]; + func[0] = PyStackRef_FromPyObjectNew(method); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(temp); + stack_pointer = _PyFrame_GetStackPointer(frame); + } + kwnames_out = kwnames_in; + } + // _DO_CALL_KW + { + kwnames = kwnames_out; + args = &stack_pointer[-1 - oparg]; + self_or_null = &stack_pointer[-2 - oparg]; + callable = &stack_pointer[-3 - oparg]; + PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable[0]); + PyObject *kwnames_o = PyStackRef_AsPyObjectBorrow(kwnames); + // oparg counts all of the args, but *not* self: + int total_args = oparg; + _PyStackRef *arguments = args; + if (!PyStackRef_IsNull(self_or_null[0])) { + arguments--; + total_args++; + } + int positional_args = total_args - (int)PyTuple_GET_SIZE(kwnames_o); + // Check if the call can be inlined or not + if (Py_TYPE(callable_o) == &PyFunction_Type && + tstate->interp->eval_frame == NULL && + ((PyFunctionObject *)callable_o)->vectorcall == _PyFunction_Vectorcall) + { + int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(callable_o))->co_flags; + PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(callable_o)); + stack_pointer[-1] = kwnames; + _PyFrame_SetStackPointer(frame, stack_pointer); + _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit( + tstate, callable[0], locals, + arguments, positional_args, kwnames_o, frame + ); + stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(kwnames); + stack_pointer = _PyFrame_GetStackPointer(frame); + // Sync stack explicitly since we leave using DISPATCH_INLINED(). + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + // The frame has stolen all the arguments from the stack, + // so there is no need to clean them up. + if (new_frame == NULL) { + goto error; + } + assert( 4 == 1 + INLINE_CACHE_ENTRIES_CALL_KW); + frame->return_offset = 4 ; + DISPATCH_INLINED(new_frame); + } + /* Callable is not a normal Python function */ + STACKREFS_TO_PYOBJECTS(arguments, total_args, args_o); + if (CONVERSION_FAILED(args_o)) { + PyStackRef_CLOSE(callable[0]); + PyStackRef_XCLOSE(self_or_null[0]); + for (int _i = oparg; --_i >= 0;) { + PyStackRef_CLOSE(args[_i]); + } + PyStackRef_CLOSE(kwnames); + stack_pointer += -3 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; + } + stack_pointer[-1] = kwnames; + _PyFrame_SetStackPointer(frame, stack_pointer); + PyObject *res_o = PyObject_Vectorcall( + callable_o, args_o, + positional_args | PY_VECTORCALL_ARGUMENTS_OFFSET, + kwnames_o); + stack_pointer = _PyFrame_GetStackPointer(frame); + STACKREFS_TO_PYOBJECTS_CLEANUP(args_o); + if (opcode == INSTRUMENTED_CALL_KW) { + PyObject *arg = total_args == 0 ? + &_PyInstrumentation_MISSING : PyStackRef_AsPyObjectBorrow(arguments[0]); + if (res_o == NULL) { + _PyFrame_SetStackPointer(frame, stack_pointer); + _Py_call_instrumentation_exc2( + tstate, PY_MONITORING_EVENT_C_RAISE, + frame, this_instr, callable_o, arg); + stack_pointer = _PyFrame_GetStackPointer(frame); + } + else { + _PyFrame_SetStackPointer(frame, stack_pointer); + int err = _Py_call_instrumentation_2args( + tstate, PY_MONITORING_EVENT_C_RETURN, + frame, this_instr, callable_o, arg); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (err < 0) { + _PyFrame_SetStackPointer(frame, stack_pointer); + Py_CLEAR(res_o); + stack_pointer = _PyFrame_GetStackPointer(frame); + } + } + } + PyStackRef_CLOSE(callable[0]); + PyStackRef_XCLOSE(self_or_null[0]); + for (int _i = oparg; --_i >= 0;) { + PyStackRef_CLOSE(args[_i]); + } + PyStackRef_CLOSE(kwnames); + if (res_o == NULL) { + stack_pointer += -3 - oparg; + assert(WITHIN_STACK_BOUNDS()); + goto error; + } + res = PyStackRef_FromPyObjectSteal(res_o); + } + stack_pointer[-3 - oparg] = res; + stack_pointer += -2 - oparg; + assert(WITHIN_STACK_BOUNDS()); + DISPATCH(); + } + + TARGET(INSTRUMENTED_END_FOR) { + _Py_CODEUNIT* const this_instr = next_instr; + (void)this_instr; + next_instr += 1; + INSTRUCTION_STATS(INSTRUMENTED_END_FOR); + _PyStackRef receiver; + _PyStackRef value; + value = stack_pointer[-1]; + receiver = stack_pointer[-2]; + /* Need to create a fake StopIteration error here, + * to conform to PEP 380 */ + if (PyStackRef_GenCheck(receiver)) { + _PyFrame_SetStackPointer(frame, stack_pointer); + int err = monitor_stop_iteration(tstate, frame, this_instr, PyStackRef_AsPyObjectBorrow(value)); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (err) { + goto error; + } + } + PyStackRef_CLOSE(value); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + DISPATCH(); + } + + TARGET(INSTRUMENTED_END_SEND) { + _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; + next_instr += 1; + INSTRUCTION_STATS(INSTRUMENTED_END_SEND); + _PyStackRef receiver; + _PyStackRef value; + _PyStackRef val; + value = stack_pointer[-1]; receiver = stack_pointer[-2]; PyObject *receiver_o = PyStackRef_AsPyObjectBorrow(receiver); if (PyGen_Check(receiver_o) || PyCoro_CheckExact(receiver_o)) { @@ -4701,10 +5239,12 @@ } } val = value; - PyStackRef_CLOSE(receiver); stack_pointer[-2] = val; stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(receiver); + stack_pointer = _PyFrame_GetStackPointer(frame); DISPATCH(); } @@ -4750,11 +5290,14 @@ (void)this_instr; next_instr += 1; INSTRUCTION_STATS(INSTRUMENTED_INSTRUCTION); + opcode = INSTRUMENTED_INSTRUCTION; _PyFrame_SetStackPointer(frame, stack_pointer); int next_opcode = _Py_call_instrumentation_instruction( tstate, frame, this_instr); stack_pointer = _PyFrame_GetStackPointer(frame); - if (next_opcode < 0) goto error; + if (next_opcode < 0) { + goto error; + } next_instr = this_instr; if (_PyOpcode_Caches[next_opcode]) { PAUSE_ADAPTIVE_COUNTER(next_instr[1].counter); @@ -4778,7 +5321,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; + if (err != 0) { + goto error; + } } } // _MONITOR_JUMP_BACKWARD @@ -4803,6 +5348,7 @@ (void)this_instr; next_instr += 1; INSTRUCTION_STATS(INSTRUMENTED_LINE); + opcode = INSTRUMENTED_LINE; int original_opcode = 0; if (tstate->tracing) { PyCodeObject *code = _PyFrame_GetCode(frame); @@ -4840,11 +5386,90 @@ (void)this_instr; next_instr += 2; INSTRUCTION_STATS(INSTRUMENTED_LOAD_SUPER_ATTR); + opcode = INSTRUMENTED_LOAD_SUPER_ATTR; + _PyStackRef global_super_st; + _PyStackRef class_st; + _PyStackRef self_st; + _PyStackRef attr; + _PyStackRef null = PyStackRef_NULL; /* Skip 1 cache entry */ - // cancel out the decrement that will happen in LOAD_SUPER_ATTR; we - // don't want to specialize instrumented instructions - PAUSE_ADAPTIVE_COUNTER(this_instr[1].counter); - goto PREDICTED_LOAD_SUPER_ATTR; + // _LOAD_SUPER_ATTR + { + self_st = stack_pointer[-1]; + class_st = stack_pointer[-2]; + global_super_st = stack_pointer[-3]; + PyObject *global_super = PyStackRef_AsPyObjectBorrow(global_super_st); + PyObject *class = PyStackRef_AsPyObjectBorrow(class_st); + PyObject *self = PyStackRef_AsPyObjectBorrow(self_st); + if (opcode == INSTRUMENTED_LOAD_SUPER_ATTR) { + PyObject *arg = oparg & 2 ? class : &_PyInstrumentation_MISSING; + _PyFrame_SetStackPointer(frame, stack_pointer); + int err = _Py_call_instrumentation_2args( + tstate, PY_MONITORING_EVENT_CALL, + frame, this_instr, global_super, arg); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (err) { + PyStackRef_CLOSE(global_super_st); + PyStackRef_CLOSE(class_st); + PyStackRef_CLOSE(self_st); + goto pop_3_error; + } + } + // we make no attempt to optimize here; specializations should + // handle any case whose performance we care about + PyObject *stack[] = {class, self}; + _PyFrame_SetStackPointer(frame, stack_pointer); + PyObject *super = PyObject_Vectorcall(global_super, stack, oparg & 2, NULL); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (opcode == INSTRUMENTED_LOAD_SUPER_ATTR) { + PyObject *arg = oparg & 2 ? class : &_PyInstrumentation_MISSING; + if (super == NULL) { + _PyFrame_SetStackPointer(frame, stack_pointer); + _Py_call_instrumentation_exc2( + tstate, PY_MONITORING_EVENT_C_RAISE, + frame, this_instr, global_super, arg); + stack_pointer = _PyFrame_GetStackPointer(frame); + } + else { + _PyFrame_SetStackPointer(frame, stack_pointer); + int err = _Py_call_instrumentation_2args( + tstate, PY_MONITORING_EVENT_C_RETURN, + frame, this_instr, global_super, arg); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (err < 0) { + _PyFrame_SetStackPointer(frame, stack_pointer); + Py_CLEAR(super); + stack_pointer = _PyFrame_GetStackPointer(frame); + } + } + } + PyStackRef_CLOSE(global_super_st); + PyStackRef_CLOSE(class_st); + PyStackRef_CLOSE(self_st); + if (super == NULL) { + goto pop_3_error; + } + PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 2); + stack_pointer += -3; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyObject *attr_o = PyObject_GetAttr(super, name); + Py_DECREF(super); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (attr_o == NULL) { + goto error; + } + attr = PyStackRef_FromPyObjectSteal(attr_o); + } + // _PUSH_NULL_CONDITIONAL + { + null = PyStackRef_NULL; + } + stack_pointer[0] = attr; + if (oparg & 1) stack_pointer[1] = null; + stack_pointer += 1 + (oparg & 1); + assert(WITHIN_STACK_BOUNDS()); + DISPATCH(); } TARGET(INSTRUMENTED_NOT_TAKEN) { @@ -4867,9 +5492,11 @@ _PyStackRef iter; iter = stack_pointer[-1]; INSTRUMENTED_JUMP(prev_instr, this_instr+1, PY_MONITORING_EVENT_BRANCH_RIGHT); - PyStackRef_CLOSE(iter); stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(iter); + stack_pointer = _PyFrame_GetStackPointer(frame); DISPATCH(); } @@ -4902,7 +5529,9 @@ INSTRUMENTED_JUMP(this_instr, next_instr + oparg, PY_MONITORING_EVENT_BRANCH_RIGHT); } else { + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(value_stackref); + stack_pointer = _PyFrame_GetStackPointer(frame); } DISPATCH(); } @@ -4917,7 +5546,9 @@ int jump = !PyStackRef_IsNone(value_stackref); RECORD_BRANCH_TAKEN(this_instr[1].cache, jump); if (jump) { + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(value_stackref); + stack_pointer = _PyFrame_GetStackPointer(frame); INSTRUMENTED_JUMP(this_instr, next_instr + oparg, PY_MONITORING_EVENT_BRANCH_RIGHT); } DISPATCH(); @@ -4953,7 +5584,9 @@ _Py_CODEUNIT *bytecode = _PyEval_GetExecutableCode(tstate, _PyFrame_GetCode(frame)); stack_pointer = _PyFrame_GetStackPointer(frame); - if (bytecode == NULL) goto error; + if (bytecode == NULL) { + goto error; + } _PyFrame_SetStackPointer(frame, stack_pointer); ptrdiff_t off = this_instr - _PyFrame_GetBytecode(frame); stack_pointer = _PyFrame_GetStackPointer(frame); @@ -4992,7 +5625,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; + if (err != 0) { + goto error; + } } } } @@ -5002,7 +5637,9 @@ int err = _Py_call_instrumentation( tstate, oparg > 0, frame, this_instr); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err) goto error; + if (err) { + goto error; + } if (frame->instr_ptr != this_instr) { /* Instrumentation has jumped */ next_instr = frame->instr_ptr; @@ -5027,7 +5664,9 @@ tstate, PY_MONITORING_EVENT_PY_RETURN, frame, this_instr, PyStackRef_AsPyObjectBorrow(val)); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err) goto error; + if (err) { + goto error; + } } // _RETURN_VALUE { @@ -5159,11 +5798,58 @@ DISPATCH(); } - TARGET(JUMP_BACKWARD) { + TARGET(JUMP_BACKWARD) { + frame->instr_ptr = next_instr; + next_instr += 2; + INSTRUCTION_STATS(JUMP_BACKWARD); + PREDICTED_JUMP_BACKWARD:; + _Py_CODEUNIT* const this_instr = next_instr - 2; + (void)this_instr; + /* Skip 1 cache entry */ + // _SPECIALIZE_JUMP_BACKWARD + { + #if ENABLE_SPECIALIZATION + if (this_instr->op.code == JUMP_BACKWARD) { + this_instr->op.code = tstate->interp->jit ? JUMP_BACKWARD_JIT : JUMP_BACKWARD_NO_JIT; + // Need to re-dispatch so the warmup counter isn't off by one: + next_instr = this_instr; + DISPATCH_SAME_OPARG(); + } + #endif + } + // _CHECK_PERIODIC + { + _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); + QSBR_QUIESCENT_STATE(tstate); + if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { + _PyFrame_SetStackPointer(frame, stack_pointer); + int err = _Py_HandlePending(tstate); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (err != 0) { + goto error; + } + } + } + // _JUMP_BACKWARD_NO_INTERRUPT + { + /* This bytecode is used in the `yield from` or `await` loop. + * If there is an interrupt, we want it handled in the innermost + * generator or coroutine, so we deliberately do not check it here. + * (see bpo-30039). + */ + assert(oparg <= INSTR_OFFSET()); + JUMPBY(-oparg); + } + DISPATCH(); + } + + TARGET(JUMP_BACKWARD_JIT) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; (void)this_instr; next_instr += 2; - INSTRUCTION_STATS(JUMP_BACKWARD); + INSTRUCTION_STATS(JUMP_BACKWARD_JIT); + static_assert(1 == 1, "incorrect cache size"); + /* Skip 1 cache entry */ // _CHECK_PERIODIC { _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); @@ -5172,19 +5858,26 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; + if (err != 0) { + goto error; + } } } - // _JUMP_BACKWARD + // _JUMP_BACKWARD_NO_INTERRUPT { - uint16_t the_counter = read_u16(&this_instr[1].cache); - (void)the_counter; + /* This bytecode is used in the `yield from` or `await` loop. + * If there is an interrupt, we want it handled in the innermost + * generator or coroutine, so we deliberately do not check it here. + * (see bpo-30039). + */ assert(oparg <= INSTR_OFFSET()); JUMPBY(-oparg); + } + // _JIT + { #ifdef _Py_TIER2 - #if ENABLE_SPECIALIZATION _Py_BackoffCounter counter = this_instr[1].counter; - if (backoff_counter_triggers(counter) && this_instr->op.code == JUMP_BACKWARD) { + if (backoff_counter_triggers(counter) && this_instr->op.code == JUMP_BACKWARD_JIT) { _Py_CODEUNIT *start = this_instr; /* Back up over EXTENDED_ARGs so optimizer sees the whole instruction */ while (oparg > 255) { @@ -5193,11 +5886,13 @@ } _PyExecutorObject *executor; _PyFrame_SetStackPointer(frame, stack_pointer); - int optimized = _PyOptimizer_Optimize(frame, start, stack_pointer, &executor, 0); + int optimized = _PyOptimizer_Optimize(frame, start, &executor, 0); stack_pointer = _PyFrame_GetStackPointer(frame); if (optimized <= 0) { this_instr[1].counter = restart_backoff_counter(counter); - if (optimized < 0) goto error; + if (optimized < 0) { + goto error; + } } else { _PyFrame_SetStackPointer(frame, stack_pointer); @@ -5211,8 +5906,7 @@ else { ADVANCE_ADAPTIVE_COUNTER(this_instr[1].counter); } - #endif /* ENABLE_SPECIALIZATION */ - #endif /* _Py_TIER2 */ + #endif } DISPATCH(); } @@ -5226,10 +5920,43 @@ * generator or coroutine, so we deliberately do not check it here. * (see bpo-30039). */ + assert(oparg <= INSTR_OFFSET()); JUMPBY(-oparg); DISPATCH(); } + TARGET(JUMP_BACKWARD_NO_JIT) { + frame->instr_ptr = next_instr; + next_instr += 2; + INSTRUCTION_STATS(JUMP_BACKWARD_NO_JIT); + static_assert(1 == 1, "incorrect cache size"); + /* Skip 1 cache entry */ + // _CHECK_PERIODIC + { + _Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY(); + QSBR_QUIESCENT_STATE(tstate); + if (_Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker) & _PY_EVAL_EVENTS_MASK) { + _PyFrame_SetStackPointer(frame, stack_pointer); + int err = _Py_HandlePending(tstate); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (err != 0) { + goto error; + } + } + } + // _JUMP_BACKWARD_NO_INTERRUPT + { + /* This bytecode is used in the `yield from` or `await` loop. + * If there is an interrupt, we want it handled in the innermost + * generator or coroutine, so we deliberately do not check it here. + * (see bpo-30039). + */ + assert(oparg <= INSTR_OFFSET()); + JUMPBY(-oparg); + } + DISPATCH(); + } + TARGET(JUMP_FORWARD) { frame->instr_ptr = next_instr; next_instr += 1; @@ -5248,7 +5975,9 @@ list = stack_pointer[-2 - (oparg-1)]; int err = _PyList_AppendTakeRef((PyListObject *)PyStackRef_AsPyObjectBorrow(list), PyStackRef_AsPyObjectSteal(v)); - if (err < 0) goto pop_1_error; + if (err < 0) { + goto pop_1_error; + } stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); @@ -5347,7 +6076,9 @@ meth | NULL | arg1 | ... | argN */ PyStackRef_CLOSE(owner); - if (attr_o == NULL) goto pop_1_error; + if (attr_o == NULL) { + goto pop_1_error; + } self_or_null[0] = PyStackRef_NULL; } } @@ -5357,7 +6088,9 @@ attr_o = PyObject_GetAttr(PyStackRef_AsPyObjectBorrow(owner), name); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(owner); - if (attr_o == NULL) goto pop_1_error; + if (attr_o == NULL) { + goto pop_1_error; + } } attr = PyStackRef_FromPyObjectSteal(attr_o); } @@ -5369,6 +6102,7 @@ TARGET(LOAD_ATTR_CLASS) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 10; INSTRUCTION_STATS(LOAD_ATTR_CLASS); static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); @@ -5407,6 +6141,7 @@ TARGET(LOAD_ATTR_CLASS_WITH_METACLASS_CHECK) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 10; INSTRUCTION_STATS(LOAD_ATTR_CLASS_WITH_METACLASS_CHECK); static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); @@ -5451,6 +6186,7 @@ TARGET(LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 10; INSTRUCTION_STATS(LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN); static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); @@ -5487,6 +6223,7 @@ TARGET(LOAD_ATTR_INSTANCE_VALUE) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 10; INSTRUCTION_STATS(LOAD_ATTR_INSTANCE_VALUE); static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); @@ -5524,14 +6261,16 @@ attr = PyStackRef_FromPyObjectNew(attr_o); #endif STAT_INC(LOAD_ATTR, hit); + stack_pointer[-1] = attr; + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(owner); + stack_pointer = _PyFrame_GetStackPointer(frame); } /* Skip 5 cache entries */ // _PUSH_NULL_CONDITIONAL { null = PyStackRef_NULL; } - stack_pointer[-1] = attr; if (oparg & 1) stack_pointer[0] = null; stack_pointer += (oparg & 1); assert(WITHIN_STACK_BOUNDS()); @@ -5540,6 +6279,7 @@ TARGET(LOAD_ATTR_METHOD_LAZY_DICT) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 10; INSTRUCTION_STATS(LOAD_ATTR_METHOD_LAZY_DICT); static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); @@ -5583,6 +6323,7 @@ TARGET(LOAD_ATTR_METHOD_NO_DICT) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 10; INSTRUCTION_STATS(LOAD_ATTR_METHOD_NO_DICT); static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); @@ -5619,6 +6360,7 @@ TARGET(LOAD_ATTR_METHOD_WITH_VALUES) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 10; INSTRUCTION_STATS(LOAD_ATTR_METHOD_WITH_VALUES); static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); @@ -5669,6 +6411,7 @@ TARGET(LOAD_ATTR_MODULE) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 10; INSTRUCTION_STATS(LOAD_ATTR_MODULE); static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); @@ -5708,14 +6451,16 @@ attr = PyStackRef_FromPyObjectSteal(attr_o); #endif STAT_INC(LOAD_ATTR, hit); + stack_pointer[-1] = attr; + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(owner); + stack_pointer = _PyFrame_GetStackPointer(frame); } /* Skip 5 cache entries */ // _PUSH_NULL_CONDITIONAL { null = PyStackRef_NULL; } - stack_pointer[-1] = attr; if (oparg & 1) stack_pointer[0] = null; stack_pointer += (oparg & 1); assert(WITHIN_STACK_BOUNDS()); @@ -5724,6 +6469,7 @@ TARGET(LOAD_ATTR_NONDESCRIPTOR_NO_DICT) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 10; INSTRUCTION_STATS(LOAD_ATTR_NONDESCRIPTOR_NO_DICT); static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); @@ -5755,6 +6501,7 @@ TARGET(LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 10; INSTRUCTION_STATS(LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES); static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); @@ -5799,6 +6546,7 @@ TARGET(LOAD_ATTR_PROPERTY) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 10; INSTRUCTION_STATS(LOAD_ATTR_PROPERTY); static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); @@ -5864,6 +6612,7 @@ TARGET(LOAD_ATTR_SLOT) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 10; INSTRUCTION_STATS(LOAD_ATTR_SLOT); static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); @@ -5909,6 +6658,7 @@ TARGET(LOAD_ATTR_WITH_HINT) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 10; INSTRUCTION_STATS(LOAD_ATTR_WITH_HINT); static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); @@ -5986,7 +6736,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = PyMapping_GetOptionalItem(BUILTINS(), &_Py_ID(__build_class__), &bc_o); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err < 0) goto error; + if (err < 0) { + goto error; + } if (bc_o == NULL) { _PyFrame_SetStackPointer(frame, stack_pointer); _PyErr_SetString(tstate, PyExc_NameError, @@ -6035,11 +6787,21 @@ * marshalling can intern strings and make them immortal. */ PyObject *obj = GETITEM(FRAME_CO_CONSTS, oparg); value = PyStackRef_FromPyObjectNew(obj); - #if ENABLE_SPECIALIZATION + #if ENABLE_SPECIALIZATION_FT + #ifdef Py_GIL_DISABLED + uint8_t expected = LOAD_CONST; + if (!_Py_atomic_compare_exchange_uint8( + &this_instr->op.code, &expected, + _Py_IsImmortal(obj) ? LOAD_CONST_IMMORTAL : LOAD_CONST_MORTAL)) { + // We might lose a race with instrumentation, which we don't care about. + assert(expected >= MIN_INSTRUMENTED_OPCODE); + } + #else if (this_instr->op.code == LOAD_CONST) { this_instr->op.code = _Py_IsImmortal(obj) ? LOAD_CONST_IMMORTAL : LOAD_CONST_MORTAL; } #endif + #endif stack_pointer[0] = value; stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); @@ -6114,7 +6876,6 @@ INSTRUCTION_STATS(LOAD_FAST_AND_CLEAR); _PyStackRef value; value = GETLOCAL(oparg); - // do not use SETLOCAL here, it decrefs the old value GETLOCAL(oparg) = PyStackRef_NULL; stack_pointer[0] = value; stack_pointer += 1; @@ -6190,9 +6951,15 @@ goto error; } } + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(class_dict_st); + stack_pointer = _PyFrame_GetStackPointer(frame); value = PyStackRef_FromPyObjectSteal(value_o); - stack_pointer[-1] = value; + stack_pointer[0] = value; + stack_pointer += 1; + assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } @@ -6209,7 +6976,9 @@ int err = PyMapping_GetOptionalItem(PyStackRef_AsPyObjectBorrow(mod_or_class_dict), name, &v_o); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(mod_or_class_dict); - if (err < 0) goto pop_1_error; + if (err < 0) { + goto pop_1_error; + } if (v_o == NULL) { if (PyDict_CheckExact(GLOBALS()) && PyDict_CheckExact(BUILTINS())) @@ -6241,13 +7010,17 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = PyMapping_GetOptionalItem(GLOBALS(), name, &v_o); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err < 0) goto error; + if (err < 0) { + goto error; + } if (v_o == NULL) { /* namespace 2: builtins */ _PyFrame_SetStackPointer(frame, stack_pointer); int err = PyMapping_GetOptionalItem(BUILTINS(), name, &v_o); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err < 0) goto error; + if (err < 0) { + goto error; + } if (v_o == NULL) { _PyFrame_SetStackPointer(frame, stack_pointer); _PyEval_FormatExcCheckArg( @@ -6302,7 +7075,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); _PyEval_LoadGlobalStackRef(GLOBALS(), BUILTINS(), name, res); stack_pointer = _PyFrame_GetStackPointer(frame); - if (PyStackRef_IsNull(*res)) goto error; + if (PyStackRef_IsNull(*res)) { + goto error; + } } // _PUSH_NULL_CONDITIONAL { @@ -6316,6 +7091,7 @@ TARGET(LOAD_GLOBAL_BUILTIN) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 5; INSTRUCTION_STATS(LOAD_GLOBAL_BUILTIN); static_assert(INLINE_CACHE_ENTRIES_LOAD_GLOBAL == 4, "incorrect cache size"); @@ -6370,6 +7146,7 @@ TARGET(LOAD_GLOBAL_MODULE) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 5; INSTRUCTION_STATS(LOAD_GLOBAL_MODULE); static_assert(INLINE_CACHE_ENTRIES_LOAD_GLOBAL == 4, "incorrect cache size"); @@ -6443,7 +7220,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); PyObject *v_o = _PyEval_LoadName(tstate, frame, name); stack_pointer = _PyFrame_GetStackPointer(frame); - if (v_o == NULL) goto error; + if (v_o == NULL) { + goto error; + } v = PyStackRef_FromPyObjectSteal(v_o); stack_pointer[0] = v; stack_pointer += 1; @@ -6509,6 +7288,7 @@ PREDICTED_LOAD_SUPER_ATTR:; _Py_CODEUNIT* const this_instr = next_instr - 2; (void)this_instr; + opcode = LOAD_SUPER_ATTR; _PyStackRef global_super_st; _PyStackRef class_st; _PyStackRef self_st; @@ -6575,14 +7355,18 @@ frame, this_instr, global_super, arg); stack_pointer = _PyFrame_GetStackPointer(frame); if (err < 0) { + _PyFrame_SetStackPointer(frame, stack_pointer); Py_CLEAR(super); + stack_pointer = _PyFrame_GetStackPointer(frame); } } } PyStackRef_CLOSE(global_super_st); PyStackRef_CLOSE(class_st); PyStackRef_CLOSE(self_st); - if (super == NULL) goto pop_3_error; + if (super == NULL) { + goto pop_3_error; + } PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 2); stack_pointer += -3; assert(WITHIN_STACK_BOUNDS()); @@ -6590,7 +7374,9 @@ PyObject *attr_o = PyObject_GetAttr(super, name); Py_DECREF(super); stack_pointer = _PyFrame_GetStackPointer(frame); - if (attr_o == NULL) goto error; + if (attr_o == NULL) { + goto error; + } attr = PyStackRef_FromPyObjectSteal(attr_o); } // _PUSH_NULL_CONDITIONAL @@ -6631,7 +7417,9 @@ PyStackRef_CLOSE(global_super_st); PyStackRef_CLOSE(class_st); PyStackRef_CLOSE(self_st); - if (attr == NULL) goto pop_3_error; + if (attr == NULL) { + goto pop_3_error; + } attr_st = PyStackRef_FromPyObjectSteal(attr); stack_pointer[-3] = attr_st; stack_pointer += -2; @@ -6673,11 +7461,17 @@ if (method_found) { self_or_null = self_st; // transfer ownership } else { + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(self_st); + stack_pointer = _PyFrame_GetStackPointer(frame); self_or_null = PyStackRef_NULL; + stack_pointer += 1; + assert(WITHIN_STACK_BOUNDS()); } - PyStackRef_CLOSE(class_st); PyStackRef_CLOSE(global_super_st); + PyStackRef_CLOSE(class_st); attr = PyStackRef_FromPyObjectSteal(attr_o); stack_pointer[-3] = attr; stack_pointer[-2] = self_or_null; @@ -6697,7 +7491,11 @@ if (cell == NULL) { goto error; } - SETLOCAL(oparg, PyStackRef_FromPyObjectSteal(cell)); + _PyStackRef tmp = GETLOCAL(oparg); + GETLOCAL(oparg) = PyStackRef_FromPyObjectSteal(cell); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_XCLOSE(tmp); + stack_pointer = _PyFrame_GetStackPointer(frame); DISPATCH(); } @@ -6713,12 +7511,20 @@ PyFunctionObject *func_obj = (PyFunctionObject *) PyFunction_New(codeobj, GLOBALS()); stack_pointer = _PyFrame_GetStackPointer(frame); + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(codeobj_st); - if (func_obj == NULL) goto pop_1_error; + stack_pointer = _PyFrame_GetStackPointer(frame); + if (func_obj == NULL) { + goto error; + } _PyFunction_SetVersion( func_obj, ((PyCodeObject *)codeobj)->co_version); func = PyStackRef_FromPyObjectSteal((PyObject *)func_obj); - stack_pointer[-1] = func; + stack_pointer[0] = func; + stack_pointer += 1; + assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } @@ -6743,7 +7549,9 @@ PyStackRef_AsPyObjectSteal(value) ); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto pop_2_error; + if (err != 0) { + goto pop_2_error; + } stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); @@ -6777,7 +7585,9 @@ attrs = PyStackRef_FromPyObjectSteal(attrs_o); } else { - if (_PyErr_Occurred(tstate)) goto pop_3_error; + if (_PyErr_Occurred(tstate)) { + goto pop_3_error; + } // Error! attrs = PyStackRef_None; // Failure! } @@ -6801,7 +7611,9 @@ PyObject *values_or_none_o = _PyEval_MatchKeys(tstate, PyStackRef_AsPyObjectBorrow(subject), PyStackRef_AsPyObjectBorrow(keys)); stack_pointer = _PyFrame_GetStackPointer(frame); - if (values_or_none_o == NULL) goto error; + if (values_or_none_o == NULL) { + goto error; + } values_or_none = PyStackRef_FromPyObjectSteal(values_or_none_o); stack_pointer[0] = values_or_none; stack_pointer += 1; @@ -7051,6 +7863,7 @@ _PyFrame_SetStackPointer(frame, stack_pointer); monitor_reraise(tstate, frame, this_instr); stack_pointer = _PyFrame_GetStackPointer(frame); + _PyFrame_SetStackPointer(frame, stack_pointer); goto exception_unwind; } goto error; @@ -7096,6 +7909,7 @@ _PyErr_SetRaisedException(tstate, exc); monitor_reraise(tstate, frame, this_instr); stack_pointer = _PyFrame_GetStackPointer(frame); + _PyFrame_SetStackPointer(frame, stack_pointer); goto exception_unwind; } @@ -7124,7 +7938,9 @@ _Py_CODEUNIT *bytecode = _PyEval_GetExecutableCode(tstate, _PyFrame_GetCode(frame)); stack_pointer = _PyFrame_GetStackPointer(frame); - if (bytecode == NULL) goto error; + if (bytecode == NULL) { + goto error; + } _PyFrame_SetStackPointer(frame, stack_pointer); ptrdiff_t off = this_instr - _PyFrame_GetBytecode(frame); stack_pointer = _PyFrame_GetStackPointer(frame); @@ -7171,7 +7987,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = _Py_HandlePending(tstate); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err != 0) goto error; + if (err != 0) { + goto error; + } } } } @@ -7208,7 +8026,9 @@ _PyFrame_SetStackPointer(frame, stack_pointer); PyGenObject *gen = (PyGenObject *)_Py_MakeCoro(func); stack_pointer = _PyFrame_GetStackPointer(frame); - if (gen == NULL) goto error; + if (gen == NULL) { + goto error; + } assert(EMPTY()); _PyFrame_SetStackPointer(frame, stack_pointer); _PyInterpreterFrame *gen_frame = &gen->gi_iframe; @@ -7342,10 +8162,16 @@ goto pop_1_error; } } + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); PyStackRef_CLOSE(v); + stack_pointer = _PyFrame_GetStackPointer(frame); retval = PyStackRef_FromPyObjectSteal(retval_o); } - stack_pointer[-1] = retval; + stack_pointer[0] = retval; + stack_pointer += 1; + assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } @@ -7417,18 +8243,24 @@ _PyFrame_SetStackPointer(frame, stack_pointer); int err = PyMapping_GetOptionalItem(LOCALS(), &_Py_ID(__annotations__), &ann_dict); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err < 0) goto error; + if (err < 0) { + goto error; + } if (ann_dict == NULL) { _PyFrame_SetStackPointer(frame, stack_pointer); ann_dict = PyDict_New(); stack_pointer = _PyFrame_GetStackPointer(frame); - if (ann_dict == NULL) goto error; + if (ann_dict == NULL) { + goto error; + } _PyFrame_SetStackPointer(frame, stack_pointer); err = PyObject_SetItem(LOCALS(), &_Py_ID(__annotations__), ann_dict); Py_DECREF(ann_dict); stack_pointer = _PyFrame_GetStackPointer(frame); - if (err) goto error; + if (err) { + goto error; + } } else { _PyFrame_SetStackPointer(frame, stack_pointer); @@ -7451,7 +8283,9 @@ PyStackRef_AsPyObjectBorrow(v)); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(v); - if (err) goto pop_1_error; + if (err) { + goto pop_1_error; + } stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); @@ -7494,7 +8328,9 @@ PyStackRef_AsPyObjectBorrow(iterable)); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(iterable); - if (err < 0) goto pop_1_error; + if (err < 0) { + goto pop_1_error; + } stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); @@ -7538,7 +8374,9 @@ stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(v); PyStackRef_CLOSE(owner); - if (err) goto pop_2_error; + if (err) { + goto pop_2_error; + } } stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); @@ -7547,6 +8385,7 @@ TARGET(STORE_ATTR_INSTANCE_VALUE) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 5; INSTRUCTION_STATS(STORE_ATTR_INSTANCE_VALUE); static_assert(INLINE_CACHE_ENTRIES_STORE_ATTR == 4, "incorrect cache size"); @@ -7593,10 +8432,10 @@ _PyDictValues_AddToInsertionOrder(values, index); } UNLOCK_OBJECT(owner_o); - PyStackRef_CLOSE(owner); stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(owner); Py_XDECREF(old_value); stack_pointer = _PyFrame_GetStackPointer(frame); } @@ -7605,6 +8444,7 @@ TARGET(STORE_ATTR_SLOT) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 5; INSTRUCTION_STATS(STORE_ATTR_SLOT); static_assert(INLINE_CACHE_ENTRIES_STORE_ATTR == 4, "incorrect cache size"); @@ -7630,10 +8470,10 @@ PyObject *old_value = *(PyObject **)addr; FT_ATOMIC_STORE_PTR_RELEASE(*(PyObject **)addr, PyStackRef_AsPyObjectSteal(value)); UNLOCK_OBJECT(owner_o); - PyStackRef_CLOSE(owner); stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(owner); Py_XDECREF(old_value); stack_pointer = _PyFrame_GetStackPointer(frame); } @@ -7642,6 +8482,7 @@ TARGET(STORE_ATTR_WITH_HINT) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 5; INSTRUCTION_STATS(STORE_ATTR_WITH_HINT); static_assert(INLINE_CACHE_ENTRIES_STORE_ATTR == 4, "incorrect cache size"); @@ -7696,10 +8537,10 @@ // old_value should be DECREFed after GC track checking is done, if not, it could raise a segmentation fault, // when dict only holds the strong reference to value in ep->me_value. STAT_INC(STORE_ATTR, hit); - PyStackRef_CLOSE(owner); stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(owner); Py_XDECREF(old_value); stack_pointer = _PyFrame_GetStackPointer(frame); } @@ -7727,9 +8568,13 @@ INSTRUCTION_STATS(STORE_FAST); _PyStackRef value; value = stack_pointer[-1]; - SETLOCAL(oparg, value); + _PyStackRef tmp = GETLOCAL(oparg); + GETLOCAL(oparg) = value; stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_XCLOSE(tmp); + stack_pointer = _PyFrame_GetStackPointer(frame); DISPATCH(); } @@ -7742,9 +8587,13 @@ value1 = stack_pointer[-1]; uint32_t oparg1 = oparg >> 4; uint32_t oparg2 = oparg & 15; - SETLOCAL(oparg1, value1); + _PyStackRef tmp = GETLOCAL(oparg1); + GETLOCAL(oparg1) = value1; value2 = PyStackRef_DUP(GETLOCAL(oparg2)); stack_pointer[-1] = value2; + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_XCLOSE(tmp); + stack_pointer = _PyFrame_GetStackPointer(frame); DISPATCH(); } @@ -7758,10 +8607,20 @@ value2 = stack_pointer[-2]; uint32_t oparg1 = oparg >> 4; uint32_t oparg2 = oparg & 15; - SETLOCAL(oparg1, value1); - SETLOCAL(oparg2, value2); - stack_pointer += -2; + _PyStackRef tmp = GETLOCAL(oparg1); + GETLOCAL(oparg1) = value1; + stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_XCLOSE(tmp); + stack_pointer = _PyFrame_GetStackPointer(frame); + tmp = GETLOCAL(oparg2); + GETLOCAL(oparg2) = value2; + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_XCLOSE(tmp); + stack_pointer = _PyFrame_GetStackPointer(frame); DISPATCH(); } @@ -7776,7 +8635,9 @@ int err = PyDict_SetItem(GLOBALS(), name, PyStackRef_AsPyObjectBorrow(v)); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(v); - if (err) goto pop_1_error; + if (err) { + goto pop_1_error; + } stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); @@ -7810,7 +8671,9 @@ stack_pointer = _PyFrame_GetStackPointer(frame); } PyStackRef_CLOSE(v); - if (err) goto pop_1_error; + if (err) { + goto pop_1_error; + } stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); DISPATCH(); @@ -7857,7 +8720,9 @@ } PyStackRef_CLOSE(v); PyStackRef_CLOSE(container); - if (err) goto pop_4_error; + if (err) { + goto pop_4_error; + } } stack_pointer += -4; assert(WITHIN_STACK_BOUNDS()); @@ -7902,7 +8767,9 @@ PyStackRef_CLOSE(v); PyStackRef_CLOSE(container); PyStackRef_CLOSE(sub); - if (err) goto pop_3_error; + if (err) { + goto pop_3_error; + } } stack_pointer += -3; assert(WITHIN_STACK_BOUNDS()); @@ -7929,10 +8796,14 @@ PyStackRef_AsPyObjectSteal(sub), PyStackRef_AsPyObjectSteal(value)); stack_pointer = _PyFrame_GetStackPointer(frame); - PyStackRef_CLOSE(dict_st); - if (err) goto pop_3_error; stack_pointer += -3; assert(WITHIN_STACK_BOUNDS()); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(dict_st); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (err) { + goto error; + } DISPATCH(); } @@ -7967,10 +8838,10 @@ assert(old_value != NULL); UNLOCK_OBJECT(list); // unlock before decrefs! PyStackRef_CLOSE_SPECIALIZED(sub_st, _PyLong_ExactDealloc); - PyStackRef_CLOSE(list_st); stack_pointer += -3; assert(WITHIN_STACK_BOUNDS()); _PyFrame_SetStackPointer(frame, stack_pointer); + PyStackRef_CLOSE(list_st); Py_DECREF(old_value); stack_pointer = _PyFrame_GetStackPointer(frame); DISPATCH(); @@ -7980,17 +8851,14 @@ frame->instr_ptr = next_instr; next_instr += 1; INSTRUCTION_STATS(SWAP); - _PyStackRef bottom_in; - _PyStackRef top_in; - _PyStackRef top_out; - _PyStackRef bottom_out; - top_in = stack_pointer[-1]; - bottom_in = stack_pointer[-2 - (oparg-2)]; - bottom_out = bottom_in; - top_out = top_in; + _PyStackRef *bottom; + _PyStackRef *top; + top = &stack_pointer[-1]; + bottom = &stack_pointer[-2 - (oparg-2)]; + _PyStackRef temp = bottom[0]; + bottom[0] = top[0]; + top[0] = temp; assert(oparg >= 2); - stack_pointer[-2 - (oparg-2)] = top_out; - stack_pointer[-1] = bottom_out; DISPATCH(); } @@ -8027,7 +8895,9 @@ int err = PyObject_IsTrue(PyStackRef_AsPyObjectBorrow(value)); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(value); - if (err < 0) goto pop_1_error; + if (err < 0) { + goto pop_1_error; + } res = err ? PyStackRef_True : PyStackRef_False; } stack_pointer[-1] = res; @@ -8036,6 +8906,7 @@ TARGET(TO_BOOL_ALWAYS_TRUE) { _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + (void)this_instr; next_instr += 4; INSTRUCTION_STATS(TO_BOOL_ALWAYS_TRUE); static_assert(INLINE_CACHE_ENTRIES_TO_BOOL == 3, "incorrect cache size"); @@ -8174,7 +9045,9 @@ PyObject *res_o = PyNumber_Invert(PyStackRef_AsPyObjectBorrow(value)); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(value); - if (res_o == NULL) goto pop_1_error; + if (res_o == NULL) { + goto pop_1_error; + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-1] = res; DISPATCH(); @@ -8191,7 +9064,9 @@ PyObject *res_o = PyNumber_Negative(PyStackRef_AsPyObjectBorrow(value)); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(value); - if (res_o == NULL) goto pop_1_error; + if (res_o == NULL) { + goto pop_1_error; + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[-1] = res; DISPATCH(); @@ -8224,7 +9099,9 @@ int res = _PyEval_UnpackIterableStackRef(tstate, seq, oparg & 0xFF, oparg >> 8, top); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(seq); - if (res == 0) goto pop_1_error; + if (res == 0) { + goto pop_1_error; + } stack_pointer += (oparg & 0xFF) + (oparg >> 8); assert(WITHIN_STACK_BOUNDS()); DISPATCH(); @@ -8266,7 +9143,9 @@ int res = _PyEval_UnpackIterableStackRef(tstate, seq, oparg, -1, top); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(seq); - if (res == 0) goto pop_1_error; + if (res == 0) { + goto pop_1_error; + } } stack_pointer += -1 + oparg; assert(WITHIN_STACK_BOUNDS()); @@ -8395,7 +9274,9 @@ PyObject *res_o = PyObject_Vectorcall(exit_func_o, stack + 2 - has_self, (3 + has_self) | PY_VECTORCALL_ARGUMENTS_OFFSET, NULL); stack_pointer = _PyFrame_GetStackPointer(frame); - if (res_o == NULL) goto error; + if (res_o == NULL) { + goto error; + } res = PyStackRef_FromPyObjectSteal(res_o); stack_pointer[0] = res; stack_pointer += 1; @@ -8474,20 +9355,20 @@ pop_4_error: { - STACK_SHRINK(1); - goto pop_3_error; + STACK_SHRINK(4); + goto error; } pop_3_error: { - STACK_SHRINK(1); - goto pop_2_error; + STACK_SHRINK(3); + goto error; } pop_2_error: { - STACK_SHRINK(1); - goto pop_1_error; + STACK_SHRINK(2); + goto error; } pop_1_error: @@ -8501,8 +9382,10 @@ /* Double-check exception status. */ #ifdef NDEBUG if (!_PyErr_Occurred(tstate)) { + _PyFrame_SetStackPointer(frame, stack_pointer); _PyErr_SetString(tstate, PyExc_SystemError, "error return without exception set"); + stack_pointer = _PyFrame_GetStackPointer(frame); } #else assert(_PyErr_Occurred(tstate)); @@ -8511,37 +9394,47 @@ /* Log traceback info. */ assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); if (!_PyFrame_IsIncomplete(frame)) { + _PyFrame_SetStackPointer(frame, stack_pointer); PyFrameObject *f = _PyFrame_GetFrameObject(frame); + stack_pointer = _PyFrame_GetStackPointer(frame); if (f != NULL) { + _PyFrame_SetStackPointer(frame, stack_pointer); PyTraceBack_Here(f); + stack_pointer = _PyFrame_GetStackPointer(frame); } } + _PyFrame_SetStackPointer(frame, stack_pointer); _PyEval_MonitorRaise(tstate, frame, next_instr-1); + stack_pointer = _PyFrame_GetStackPointer(frame); + _PyFrame_SetStackPointer(frame, stack_pointer); goto exception_unwind; } exception_unwind: { + /* STACK SPILLED */ /* We can't use frame->instr_ptr here, as RERAISE may have set it */ int offset = INSTR_OFFSET()-1; int level, handler, lasti; - if (get_exception_handler(_PyFrame_GetCode(frame), offset, &level, &handler, &lasti) == 0) { + int handled = get_exception_handler(_PyFrame_GetCode(frame), offset, &level, &handler, &lasti); + if (handled == 0) { // No handlers, so exit. assert(_PyErr_Occurred(tstate)); /* Pop remaining stack entries. */ _PyStackRef *stackbase = _PyFrame_Stackbase(frame); - while (stack_pointer > stackbase) { - PyStackRef_XCLOSE(POP()); + while (frame->stackpointer > stackbase) { + _PyStackRef ref = _PyFrame_StackPop(frame); + PyStackRef_XCLOSE(ref); } - assert(STACK_LEVEL() == 0); - _PyFrame_SetStackPointer(frame, stack_pointer); monitor_unwind(tstate, frame, next_instr-1); goto exit_unwind; } assert(STACK_LEVEL() >= level); _PyStackRef *new_top = _PyFrame_Stackbase(frame) + level; - while (stack_pointer > new_top) { - PyStackRef_XCLOSE(POP()); + assert(frame->stackpointer >= new_top); + while (frame->stackpointer > new_top) { + _PyStackRef ref = _PyFrame_StackPop(frame); + PyStackRef_XCLOSE(ref); } if (lasti) { int frame_lasti = _PyInterpreterFrame_LASTI(frame); @@ -8549,16 +9442,17 @@ if (lasti == NULL) { goto exception_unwind; } - PUSH(PyStackRef_FromPyObjectSteal(lasti)); + _PyFrame_StackPush(frame, PyStackRef_FromPyObjectSteal(lasti)); } /* Make the raw exception data available to the handler, so a program can emulate the Python main loop. */ PyObject *exc = _PyErr_GetRaisedException(tstate); - PUSH(PyStackRef_FromPyObjectSteal(exc)); + _PyFrame_StackPush(frame, PyStackRef_FromPyObjectSteal(exc)); next_instr = _PyFrame_GetBytecode(frame) + handler; - if (monitor_handled(tstate, frame, next_instr, exc) < 0) { + int err = monitor_handled(tstate, frame, next_instr, exc); + if (err < 0) { goto exception_unwind; } /* Resume normal execution */ @@ -8567,11 +9461,13 @@ lltrace_resume_frame(frame); } #endif + stack_pointer = _PyFrame_GetStackPointer(frame); DISPATCH(); } exit_unwind: { + /* STACK SPILLED */ assert(_PyErr_Occurred(tstate)); _Py_LeaveRecursiveCallPy(tstate); assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); @@ -8586,14 +9482,37 @@ tstate->c_recursion_remaining += PY_EVAL_C_STACK_UNITS; return NULL; } - goto resume_with_error; + next_instr = frame->instr_ptr; + stack_pointer = _PyFrame_GetStackPointer(frame); + goto error; } - resume_with_error: + start_frame: { + /* STACK SPILLED */ + int too_deep = _Py_EnterRecursivePy(tstate); + if (too_deep) { + goto exit_unwind; + } next_instr = frame->instr_ptr; + #ifdef LLTRACE + { + int lltrace = maybe_lltrace_resume_frame(frame, GLOBALS()); + frame->lltrace = lltrace; + if (lltrace < 0) { + goto exit_unwind; + } + } + #endif + + #ifdef Py_DEBUG + /* _PyEval_EvalFrameDefault() must not be called with an exception set, + because it can clear it (directly or indirectly) and so the + caller loses its exception */ + assert(!_PyErr_Occurred(tstate)); + #endif stack_pointer = _PyFrame_GetStackPointer(frame); - goto error; + DISPATCH(); } /* END LABELS */ diff --git a/Python/import.c b/Python/import.c index 91fa6dd6f8226b..816fdc86bf0939 100644 --- a/Python/import.c +++ b/Python/import.c @@ -594,7 +594,8 @@ _PyImport_ClearModulesByIndex(PyInterpreterState *interp) if (PyList_SetSlice(MODULES_BY_INDEX(interp), 0, PyList_GET_SIZE(MODULES_BY_INDEX(interp)), NULL)) { - PyErr_FormatUnraisable("Exception ignored on clearing interpreters module list"); + PyErr_FormatUnraisable("Exception ignored while " + "clearing interpreters module list"); } } @@ -4092,13 +4093,15 @@ _PyImport_FiniCore(PyInterpreterState *interp) int verbose = _PyInterpreterState_GetConfig(interp)->verbose; if (_PySys_ClearAttrString(interp, "meta_path", verbose) < 0) { - PyErr_FormatUnraisable("Exception ignored on clearing sys.meta_path"); + PyErr_FormatUnraisable("Exception ignored while " + "clearing sys.meta_path"); } // XXX Pull in most of finalize_modules() in pylifecycle.c. if (_PySys_ClearAttrString(interp, "modules", verbose) < 0) { - PyErr_FormatUnraisable("Exception ignored on clearing sys.modules"); + PyErr_FormatUnraisable("Exception ignored while " + "clearing sys.modules"); } _PyImport_ClearCore(interp); @@ -4121,7 +4124,7 @@ init_zipimport(PyThreadState *tstate, int verbose) PySys_WriteStderr("# installing zipimport hook\n"); } - PyObject *zipimporter = _PyImport_GetModuleAttrString("zipimport", "zipimporter"); + PyObject *zipimporter = PyImport_ImportModuleAttrString("zipimport", "zipimporter"); if (zipimporter == NULL) { _PyErr_Clear(tstate); /* No zipimporter object -- okay */ if (verbose) { @@ -4173,10 +4176,12 @@ _PyImport_FiniExternal(PyInterpreterState *interp) // XXX Uninstall importlib metapath importers here? if (_PySys_ClearAttrString(interp, "path_importer_cache", verbose) < 0) { - PyErr_FormatUnraisable("Exception ignored on clearing sys.path_importer_cache"); + PyErr_FormatUnraisable("Exception ignored while " + "clearing sys.path_importer_cache"); } if (_PySys_ClearAttrString(interp, "path_hooks", verbose) < 0) { - PyErr_FormatUnraisable("Exception ignored on clearing sys.path_hooks"); + PyErr_FormatUnraisable("Exception ignored while " + "clearing sys.path_hooks"); } } @@ -4186,7 +4191,7 @@ _PyImport_FiniExternal(PyInterpreterState *interp) /******************/ PyObject * -_PyImport_GetModuleAttr(PyObject *modname, PyObject *attrname) +PyImport_ImportModuleAttr(PyObject *modname, PyObject *attrname) { PyObject *mod = PyImport_Import(modname); if (mod == NULL) { @@ -4198,7 +4203,7 @@ _PyImport_GetModuleAttr(PyObject *modname, PyObject *attrname) } PyObject * -_PyImport_GetModuleAttrString(const char *modname, const char *attrname) +PyImport_ImportModuleAttrString(const char *modname, const char *attrname) { PyObject *pmodname = PyUnicode_FromString(modname); if (pmodname == NULL) { @@ -4209,7 +4214,7 @@ _PyImport_GetModuleAttrString(const char *modname, const char *attrname) Py_DECREF(pmodname); return NULL; } - PyObject *result = _PyImport_GetModuleAttr(pmodname, pattrname); + PyObject *result = PyImport_ImportModuleAttr(pmodname, pattrname); Py_DECREF(pattrname); Py_DECREF(pmodname); return result; diff --git a/Python/jit.c b/Python/jit.c index 7dd0da7a45055a..092b873bc734e1 100644 --- a/Python/jit.c +++ b/Python/jit.c @@ -87,6 +87,7 @@ jit_free(unsigned char *memory, size_t size) jit_error("unable to free memory"); return -1; } + OPT_STAT_ADD(jit_freed_memory_size, size); return 0; } @@ -501,8 +502,8 @@ _PyJIT_Compile(_PyExecutorObject *executor, const _PyUOpInstruction trace[], siz // Round up to the nearest page: size_t page_size = get_page_size(); assert((page_size & (page_size - 1)) == 0); - size_t padding = page_size - ((code_size + data_size + state.trampolines.size) & (page_size - 1)); - size_t total_size = code_size + data_size + state.trampolines.size + padding; + size_t padding = page_size - ((code_size + state.trampolines.size + data_size) & (page_size - 1)); + size_t total_size = code_size + state.trampolines.size + data_size + padding; unsigned char *memory = jit_alloc(total_size); if (memory == NULL) { return -1; @@ -510,14 +511,21 @@ _PyJIT_Compile(_PyExecutorObject *executor, const _PyUOpInstruction trace[], siz #ifdef MAP_JIT pthread_jit_write_protect_np(0); #endif + // Collect memory stats + OPT_STAT_ADD(jit_total_memory_size, total_size); + OPT_STAT_ADD(jit_code_size, code_size); + OPT_STAT_ADD(jit_trampoline_size, state.trampolines.size); + OPT_STAT_ADD(jit_data_size, data_size); + OPT_STAT_ADD(jit_padding_size, padding); + OPT_HIST(total_size, trace_total_memory_hist); // Update the offsets of each instruction: for (size_t i = 0; i < length; i++) { state.instruction_starts[i] += (uintptr_t)memory; } // Loop again to emit the code: unsigned char *code = memory; - unsigned char *data = memory + code_size; - state.trampolines.mem = memory + code_size + data_size; + state.trampolines.mem = memory + code_size; + unsigned char *data = memory + code_size + state.trampolines.size; // Compile the shim, which handles converting between the native // calling convention and the calling convention used by jitted code // (which may be different for efficiency reasons). @@ -539,7 +547,7 @@ _PyJIT_Compile(_PyExecutorObject *executor, const _PyUOpInstruction trace[], siz code += group->code_size; data += group->data_size; assert(code == memory + code_size); - assert(data == memory + code_size + data_size); + assert(data == memory + code_size + state.trampolines.size + data_size); #ifdef MAP_JIT pthread_jit_write_protect_np(1); #endif @@ -563,7 +571,8 @@ _PyJIT_Free(_PyExecutorObject *executor) executor->jit_side_entry = NULL; executor->jit_size = 0; if (jit_free(memory, size)) { - PyErr_WriteUnraisable(NULL); + PyErr_FormatUnraisable("Exception ignored while " + "freeing JIT memory"); } } } diff --git a/Python/opcode_targets.h b/Python/opcode_targets.h index 873378b43374c5..2b84f0281e5356 100644 --- a/Python/opcode_targets.h +++ b/Python/opcode_targets.h @@ -194,6 +194,8 @@ static void *opcode_targets[256] = { &&TARGET_FOR_ITER_LIST, &&TARGET_FOR_ITER_RANGE, &&TARGET_FOR_ITER_TUPLE, + &&TARGET_JUMP_BACKWARD_JIT, + &&TARGET_JUMP_BACKWARD_NO_JIT, &&TARGET_LOAD_ATTR_CLASS, &&TARGET_LOAD_ATTR_CLASS_WITH_METACLASS_CHECK, &&TARGET_LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN, @@ -232,15 +234,10 @@ static void *opcode_targets[256] = { &&_unknown_opcode, &&_unknown_opcode, &&_unknown_opcode, - &&_unknown_opcode, - &&_unknown_opcode, &&TARGET_INSTRUMENTED_END_FOR, &&TARGET_INSTRUMENTED_POP_ITER, &&TARGET_INSTRUMENTED_END_SEND, - &&TARGET_INSTRUMENTED_LOAD_SUPER_ATTR, &&TARGET_INSTRUMENTED_FOR_ITER, - &&TARGET_INSTRUMENTED_CALL_KW, - &&TARGET_INSTRUMENTED_CALL_FUNCTION_EX, &&TARGET_INSTRUMENTED_INSTRUCTION, &&TARGET_INSTRUMENTED_JUMP_FORWARD, &&TARGET_INSTRUMENTED_NOT_TAKEN, @@ -251,7 +248,10 @@ static void *opcode_targets[256] = { &&TARGET_INSTRUMENTED_RESUME, &&TARGET_INSTRUMENTED_RETURN_VALUE, &&TARGET_INSTRUMENTED_YIELD_VALUE, + &&TARGET_INSTRUMENTED_LOAD_SUPER_ATTR, &&TARGET_INSTRUMENTED_CALL, + &&TARGET_INSTRUMENTED_CALL_KW, + &&TARGET_INSTRUMENTED_CALL_FUNCTION_EX, &&TARGET_INSTRUMENTED_JUMP_BACKWARD, &&TARGET_INSTRUMENTED_LINE, &&TARGET_ENTER_EXECUTOR, diff --git a/Python/optimizer.c b/Python/optimizer.c index e3950843964f11..97831f58098c95 100644 --- a/Python/optimizer.c +++ b/Python/optimizer.c @@ -91,70 +91,13 @@ insert_executor(PyCodeObject *code, _Py_CODEUNIT *instr, int index, _PyExecutorO instr->op.arg = index; } - -static int -never_optimize( - _PyOptimizerObject* self, - _PyInterpreterFrame *frame, - _Py_CODEUNIT *instr, - _PyExecutorObject **exec, - int Py_UNUSED(stack_entries), - bool Py_UNUSED(progress_needed)) -{ - // This may be called if the optimizer is reset - return 0; -} - -PyTypeObject _PyDefaultOptimizer_Type = { - PyVarObject_HEAD_INIT(&PyType_Type, 0) - .tp_name = "noop_optimizer", - .tp_basicsize = sizeof(_PyOptimizerObject), - .tp_itemsize = 0, - .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_DISALLOW_INSTANTIATION, -}; - -static _PyOptimizerObject _PyOptimizer_Default = { - PyObject_HEAD_INIT(&_PyDefaultOptimizer_Type) - .optimize = never_optimize, -}; - -_PyOptimizerObject * -_Py_GetOptimizer(void) -{ - PyInterpreterState *interp = _PyInterpreterState_GET(); - if (interp->optimizer == &_PyOptimizer_Default) { - return NULL; - } - Py_INCREF(interp->optimizer); - return interp->optimizer; -} - static _PyExecutorObject * make_executor_from_uops(_PyUOpInstruction *buffer, int length, const _PyBloomFilter *dependencies); -_PyOptimizerObject * -_Py_SetOptimizer(PyInterpreterState *interp, _PyOptimizerObject *optimizer) -{ - if (optimizer == NULL) { - optimizer = &_PyOptimizer_Default; - } - _PyOptimizerObject *old = interp->optimizer; - if (old == NULL) { - old = &_PyOptimizer_Default; - } - Py_INCREF(optimizer); - interp->optimizer = optimizer; - return old; -} - -int -_Py_SetTier2Optimizer(_PyOptimizerObject *optimizer) -{ - PyInterpreterState *interp = _PyInterpreterState_GET(); - _PyOptimizerObject *old = _Py_SetOptimizer(interp, optimizer); - Py_XDECREF(old); - return old == NULL ? -1 : 0; -} +static int +uop_optimize(_PyInterpreterFrame *frame, _Py_CODEUNIT *instr, + _PyExecutorObject **exec_ptr, int curr_stackentries, + bool progress_needed); /* Returns 1 if optimized, 0 if not optimized, and -1 for an error. * If optimized, *executor_ptr contains a new reference to the executor @@ -162,8 +105,10 @@ _Py_SetTier2Optimizer(_PyOptimizerObject *optimizer) int _PyOptimizer_Optimize( _PyInterpreterFrame *frame, _Py_CODEUNIT *start, - _PyStackRef *stack_pointer, _PyExecutorObject **executor_ptr, int chain_depth) + _PyExecutorObject **executor_ptr, int chain_depth) { + _PyStackRef *stack_pointer = frame->stackpointer; + assert(_PyInterpreterState_GET()->jit); // The first executor in a chain and the MAX_CHAIN_DEPTH'th executor *must* // make progress in order to avoid infinite loops or excessively-long // side-exit chains. We can only insert the executor into the bytecode if @@ -172,12 +117,10 @@ _PyOptimizer_Optimize( bool progress_needed = chain_depth == 0; PyCodeObject *code = _PyFrame_GetCode(frame); assert(PyCode_Check(code)); - PyInterpreterState *interp = _PyInterpreterState_GET(); if (progress_needed && !has_space_for_executor(code, start)) { return 0; } - _PyOptimizerObject *opt = interp->optimizer; - int err = opt->optimize(opt, frame, start, executor_ptr, (int)(stack_pointer - _PyFrame_Stackbase(frame)), progress_needed); + int err = uop_optimize(frame, start, executor_ptr, (int)(stack_pointer - _PyFrame_Stackbase(frame)), progress_needed); if (err <= 0) { return err; } @@ -684,6 +627,7 @@ translate_bytecode_to_trace( } case JUMP_BACKWARD: + case JUMP_BACKWARD_JIT: ADD_TO_TRACE(_CHECK_PERIODIC, 0, 0, target); _Py_FALLTHROUGH; case JUMP_BACKWARD_NO_INTERRUPT: @@ -1046,7 +990,6 @@ prepare_for_execution(_PyUOpInstruction *buffer, int length) current_error = next_spare; current_error_target = target; make_exit(&buffer[next_spare], _ERROR_POP_N, 0); - buffer[next_spare].oparg = popped; buffer[next_spare].operand0 = target; next_spare++; } @@ -1241,7 +1184,6 @@ int effective_trace_length(_PyUOpInstruction *buffer, int length) static int uop_optimize( - _PyOptimizerObject *self, _PyInterpreterFrame *frame, _Py_CODEUNIT *instr, _PyExecutorObject **exec_ptr, @@ -1299,31 +1241,6 @@ uop_optimize( return 1; } -static void -uop_opt_dealloc(PyObject *self) { - PyObject_Free(self); -} - -PyTypeObject _PyUOpOptimizer_Type = { - PyVarObject_HEAD_INIT(&PyType_Type, 0) - .tp_name = "uop_optimizer", - .tp_basicsize = sizeof(_PyOptimizerObject), - .tp_itemsize = 0, - .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_DISALLOW_INSTANTIATION, - .tp_dealloc = uop_opt_dealloc, -}; - -PyObject * -_PyOptimizer_NewUOpOptimizer(void) -{ - _PyOptimizerObject *opt = PyObject_New(_PyOptimizerObject, &_PyUOpOptimizer_Type); - if (opt == NULL) { - return NULL; - } - opt->optimize = uop_optimize; - return (PyObject *)opt; -} - /***************************************** * Executor management diff --git a/Python/optimizer_bytecodes.c b/Python/optimizer_bytecodes.c index fb14c4b7c8645b..91573e82841cc9 100644 --- a/Python/optimizer_bytecodes.c +++ b/Python/optimizer_bytecodes.c @@ -512,10 +512,11 @@ dummy_func(void) { top = bottom; } - op(_SWAP, (bottom_in, unused[oparg-2], top_in -- - top_out, unused[oparg-2], bottom_out)) { - bottom_out = bottom_in; - top_out = top_in; + op(_SWAP, (bottom[1], unused[oparg-2], top[1] -- bottom[1], unused[oparg-2], top[1])) { + JitOptSymbol *temp = bottom[0]; + bottom[0] = top[0]; + top[0] = temp; + assert(oparg >= 2); } op(_LOAD_ATTR_INSTANCE_VALUE, (offset/1, owner -- attr)) { @@ -629,10 +630,10 @@ dummy_func(void) { ctx->done = true; } - op(_INIT_CALL_BOUND_METHOD_EXACT_ARGS, (callable, unused, unused[oparg] -- func, self, unused[oparg])) { + op(_INIT_CALL_BOUND_METHOD_EXACT_ARGS, (callable[1], self_or_null[1], unused[oparg] -- callable[1], self_or_null[1], unused[oparg])) { (void)callable; - func = sym_new_not_null(ctx); - self = sym_new_not_null(ctx); + callable[0] = sym_new_not_null(ctx); + self_or_null[0] = sym_new_not_null(ctx); } op(_CHECK_FUNCTION_VERSION, (func_version/2, callable, self_or_null, unused[oparg] -- callable, self_or_null, unused[oparg])) { diff --git a/Python/optimizer_cases.c.h b/Python/optimizer_cases.c.h index 2497754745c28b..11f1815a977798 100644 --- a/Python/optimizer_cases.c.h +++ b/Python/optimizer_cases.c.h @@ -1105,8 +1105,6 @@ break; } - /* _INSTRUMENTED_LOAD_SUPER_ATTR is not a viable micro-op for tier 2 */ - case _LOAD_SUPER_ATTR_ATTR: { JitOptSymbol *attr_st; attr_st = sym_new_not_null(ctx); @@ -1837,12 +1835,6 @@ } case _EXPAND_METHOD: { - JitOptSymbol **method; - JitOptSymbol **self; - method = &stack_pointer[-2 - oparg]; - self = &stack_pointer[-1 - oparg]; - method[0] = sym_new_not_null(ctx); - self[0] = sym_new_not_null(ctx); break; } @@ -1870,15 +1862,13 @@ } case _INIT_CALL_BOUND_METHOD_EXACT_ARGS: { - JitOptSymbol *callable; - JitOptSymbol *func; - JitOptSymbol *self; - callable = stack_pointer[-2 - oparg]; + JitOptSymbol **self_or_null; + JitOptSymbol **callable; + self_or_null = &stack_pointer[-1 - oparg]; + callable = &stack_pointer[-2 - oparg]; (void)callable; - func = sym_new_not_null(ctx); - self = sym_new_not_null(ctx); - stack_pointer[-2 - oparg] = func; - stack_pointer[-1 - oparg] = self; + callable[0] = sym_new_not_null(ctx); + self_or_null[0] = sym_new_not_null(ctx); break; } @@ -2161,7 +2151,7 @@ break; } - /* _INSTRUMENTED_CALL_KW is not a viable micro-op for tier 2 */ + /* _MONITOR_CALL_KW is not a viable micro-op for tier 2 */ case _MAYBE_EXPAND_METHOD_KW: { JitOptSymbol **func; @@ -2214,12 +2204,6 @@ } case _EXPAND_METHOD_KW: { - JitOptSymbol **method; - JitOptSymbol **self; - method = &stack_pointer[-3 - oparg]; - self = &stack_pointer[-2 - oparg]; - method[0] = sym_new_not_null(ctx); - self[0] = sym_new_not_null(ctx); break; } @@ -2236,8 +2220,6 @@ break; } - /* _INSTRUMENTED_CALL_FUNCTION_EX is not a viable micro-op for tier 2 */ - case _MAKE_CALLARGS_A_TUPLE: { JitOptSymbol *tuple; JitOptSymbol *kwargs_out; @@ -2410,16 +2392,14 @@ } case _SWAP: { - JitOptSymbol *top_in; - JitOptSymbol *bottom_in; - JitOptSymbol *top_out; - JitOptSymbol *bottom_out; - top_in = stack_pointer[-1]; - bottom_in = stack_pointer[-2 - (oparg-2)]; - bottom_out = bottom_in; - top_out = top_in; - stack_pointer[-2 - (oparg-2)] = top_out; - stack_pointer[-1] = bottom_out; + JitOptSymbol **top; + JitOptSymbol **bottom; + top = &stack_pointer[-1]; + bottom = &stack_pointer[-2 - (oparg-2)]; + JitOptSymbol *temp = bottom[0]; + bottom[0] = top[0]; + top[0] = temp; + assert(oparg >= 2); break; } @@ -2639,8 +2619,6 @@ } case _ERROR_POP_N: { - stack_pointer += -oparg; - assert(WITHIN_STACK_BOUNDS()); break; } diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index 4cfc52acefefe0..aac206f0fd7dff 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -1311,14 +1311,7 @@ init_interp_main(PyThreadState *tstate) } else #endif { - PyObject *opt = _PyOptimizer_NewUOpOptimizer(); - if (opt == NULL) { - return _PyStatus_ERR("can't initialize optimizer"); - } - if (_Py_SetTier2Optimizer((_PyOptimizerObject *)opt)) { - return _PyStatus_ERR("can't install optimizer"); - } - Py_DECREF(opt); + interp->jit = true; } } } @@ -1487,13 +1480,15 @@ finalize_modules_delete_special(PyThreadState *tstate, int verbose) PySys_WriteStderr("# clear builtins._\n"); } if (PyDict_SetItemString(interp->builtins, "_", Py_None) < 0) { - PyErr_FormatUnraisable("Exception ignored on setting builtin variable _"); + PyErr_FormatUnraisable("Exception ignored while " + "setting builtin variable _"); } const char * const *p; for (p = sys_deletes; *p != NULL; p++) { if (_PySys_ClearAttrString(interp, *p, verbose) < 0) { - PyErr_FormatUnraisable("Exception ignored on clearing sys.%s", *p); + PyErr_FormatUnraisable("Exception ignored while " + "clearing sys.%s", *p); } } for (p = sys_files; *p != NULL; p+=2) { @@ -1504,13 +1499,15 @@ finalize_modules_delete_special(PyThreadState *tstate, int verbose) } PyObject *value; if (PyDict_GetItemStringRef(interp->sysdict, orig_name, &value) < 0) { - PyErr_FormatUnraisable("Exception ignored on restoring sys.%s", name); + PyErr_FormatUnraisable("Exception ignored while " + "restoring sys.%s", name); } if (value == NULL) { value = Py_NewRef(Py_None); } if (PyDict_SetItemString(interp->sysdict, name, value) < 0) { - PyErr_FormatUnraisable("Exception ignored on restoring sys.%s", name); + PyErr_FormatUnraisable("Exception ignored while " + "restoring sys.%s", name); } Py_DECREF(value); } @@ -1522,7 +1519,7 @@ finalize_remove_modules(PyObject *modules, int verbose) { PyObject *weaklist = PyList_New(0); if (weaklist == NULL) { - PyErr_FormatUnraisable("Exception ignored on removing modules"); + PyErr_FormatUnraisable("Exception ignored while removing modules"); } #define STORE_MODULE_WEAKREF(name, mod) \ @@ -1531,13 +1528,13 @@ finalize_remove_modules(PyObject *modules, int verbose) if (wr) { \ PyObject *tup = PyTuple_Pack(2, name, wr); \ if (!tup || PyList_Append(weaklist, tup) < 0) { \ - PyErr_FormatUnraisable("Exception ignored on removing modules"); \ + PyErr_FormatUnraisable("Exception ignored while removing modules"); \ } \ Py_XDECREF(tup); \ Py_DECREF(wr); \ } \ else { \ - PyErr_FormatUnraisable("Exception ignored on removing modules"); \ + PyErr_FormatUnraisable("Exception ignored while removing modules"); \ } \ } @@ -1548,7 +1545,7 @@ finalize_remove_modules(PyObject *modules, int verbose) } \ STORE_MODULE_WEAKREF(name, mod); \ if (PyObject_SetItem(modules, name, Py_None) < 0) { \ - PyErr_FormatUnraisable("Exception ignored on removing modules"); \ + PyErr_FormatUnraisable("Exception ignored while removing modules"); \ } \ } @@ -1562,14 +1559,14 @@ finalize_remove_modules(PyObject *modules, int verbose) else { PyObject *iterator = PyObject_GetIter(modules); if (iterator == NULL) { - PyErr_FormatUnraisable("Exception ignored on removing modules"); + PyErr_FormatUnraisable("Exception ignored while removing modules"); } else { PyObject *key; while ((key = PyIter_Next(iterator))) { PyObject *value = PyObject_GetItem(modules, key); if (value == NULL) { - PyErr_FormatUnraisable("Exception ignored on removing modules"); + PyErr_FormatUnraisable("Exception ignored while removing modules"); continue; } CLEAR_MODULE(key, value); @@ -1577,7 +1574,7 @@ finalize_remove_modules(PyObject *modules, int verbose) Py_DECREF(key); } if (PyErr_Occurred()) { - PyErr_FormatUnraisable("Exception ignored on removing modules"); + PyErr_FormatUnraisable("Exception ignored while removing modules"); } Py_DECREF(iterator); } @@ -1597,7 +1594,7 @@ finalize_clear_modules_dict(PyObject *modules) } else { if (PyObject_CallMethodNoArgs(modules, &_Py_ID(clear)) == NULL) { - PyErr_FormatUnraisable("Exception ignored on clearing sys.modules"); + PyErr_FormatUnraisable("Exception ignored while clearing sys.modules"); } } } @@ -1609,11 +1606,11 @@ finalize_restore_builtins(PyThreadState *tstate) PyInterpreterState *interp = tstate->interp; PyObject *dict = PyDict_Copy(interp->builtins); if (dict == NULL) { - PyErr_FormatUnraisable("Exception ignored on restoring builtins"); + PyErr_FormatUnraisable("Exception ignored while restoring builtins"); } PyDict_Clear(interp->builtins); if (PyDict_Update(interp->builtins, interp->builtins_copy)) { - PyErr_FormatUnraisable("Exception ignored on restoring builtins"); + PyErr_FormatUnraisable("Exception ignored while restoring builtins"); } Py_XDECREF(dict); } @@ -1670,11 +1667,10 @@ finalize_modules(PyThreadState *tstate) { PyInterpreterState *interp = tstate->interp; + // Invalidate all executors and turn off JIT: + interp->jit = false; #ifdef _Py_TIER2 - // Invalidate all executors and turn off tier 2 optimizer _Py_Executors_InvalidateAll(interp, 0); - _PyOptimizerObject *old = _Py_SetOptimizer(interp, NULL); - Py_XDECREF(old); #endif // Stop watching __builtin__ modifications @@ -1783,15 +1779,16 @@ flush_std_files(void) int status = 0; if (PySys_GetOptionalAttr(&_Py_ID(stdout), &file) < 0) { - PyErr_FormatUnraisable("Exception ignored on flushing sys.stdout"); status = -1; } else if (file != NULL && file != Py_None && !file_is_closed(file)) { if (_PyFile_Flush(file) < 0) { - PyErr_FormatUnraisable("Exception ignored on flushing sys.stdout"); status = -1; } } + if (status < 0) { + PyErr_FormatUnraisable("Exception ignored while flushing sys.stdout"); + } Py_XDECREF(file); if (PySys_GetOptionalAttr(&_Py_ID(stderr), &file) < 0) { @@ -2630,7 +2627,7 @@ create_stdio(const PyConfig *config, PyObject* io, #ifdef HAVE_WINDOWS_CONSOLE_IO /* Windows console IO is always UTF-8 encoded */ - PyTypeObject *winconsoleio_type = (PyTypeObject *)_PyImport_GetModuleAttr( + PyTypeObject *winconsoleio_type = (PyTypeObject *)PyImport_ImportModuleAttr( &_Py_ID(_io), &_Py_ID(_WindowsConsoleIO)); if (winconsoleio_type == NULL) { goto error; @@ -2735,7 +2732,7 @@ init_set_builtins_open(void) goto error; } - if (!(wrapper = _PyImport_GetModuleAttrString("io", "open"))) { + if (!(wrapper = PyImport_ImportModuleAttrString("io", "open"))) { goto error; } diff --git a/Python/pystate.c b/Python/pystate.c index 26047edb459480..e6770ef40df740 100644 --- a/Python/pystate.c +++ b/Python/pystate.c @@ -655,11 +655,9 @@ init_interpreter(PyInterpreterState *interp, } interp->sys_profile_initialized = false; interp->sys_trace_initialized = false; -#ifdef _Py_TIER2 - (void)_Py_SetOptimizer(interp, NULL); + interp->jit = false; interp->executor_list_head = NULL; interp->trace_run_counter = JIT_CLEANUP_THRESHOLD; -#endif if (interp != &runtime->_main_interpreter) { /* Fix the self-referential, statically initialized fields. */ interp->dtoa = (struct _dtoa_state)_dtoa_state_INIT(interp); @@ -829,12 +827,6 @@ interpreter_clear(PyInterpreterState *interp, PyThreadState *tstate) tstate->_status.cleared = 0; } -#ifdef _Py_TIER2 - _PyOptimizerObject *old = _Py_SetOptimizer(interp, NULL); - assert(old != NULL); - Py_DECREF(old); -#endif - /* It is possible that any of the objects below have a finalizer that runs Python code or otherwise relies on a thread state or even the interpreter state. For now we trust that isn't diff --git a/Python/pythonrun.c b/Python/pythonrun.c index a08edbd8ab1751..469e43241eddc6 100644 --- a/Python/pythonrun.c +++ b/Python/pythonrun.c @@ -1141,22 +1141,15 @@ _PyErr_Display(PyObject *file, PyObject *unused, PyObject *value, PyObject *tb) int unhandled_keyboard_interrupt = _PyRuntime.signals.unhandled_keyboard_interrupt; // Try first with the stdlib traceback module - PyObject *traceback_module = PyImport_ImportModule("traceback"); - - if (traceback_module == NULL) { - goto fallback; - } - - PyObject *print_exception_fn = PyObject_GetAttrString(traceback_module, "_print_exception_bltin"); - + PyObject *print_exception_fn = PyImport_ImportModuleAttrString( + "traceback", + "_print_exception_bltin"); if (print_exception_fn == NULL || !PyCallable_Check(print_exception_fn)) { - Py_DECREF(traceback_module); goto fallback; } PyObject* result = PyObject_CallOneArg(print_exception_fn, value); - Py_DECREF(traceback_module); Py_XDECREF(print_exception_fn); if (result) { Py_DECREF(result); @@ -1415,27 +1408,18 @@ run_mod(mod_ty mod, PyObject *filename, PyObject *globals, PyObject *locals, } if (interactive_src) { - PyObject *linecache_module = PyImport_ImportModule("linecache"); - - if (linecache_module == NULL) { - Py_DECREF(co); - Py_DECREF(interactive_filename); - return NULL; - } - - PyObject *print_tb_func = PyObject_GetAttrString(linecache_module, "_register_code"); - + PyObject *print_tb_func = PyImport_ImportModuleAttrString( + "linecache", + "_register_code"); if (print_tb_func == NULL) { Py_DECREF(co); Py_DECREF(interactive_filename); - Py_DECREF(linecache_module); return NULL; } if (!PyCallable_Check(print_tb_func)) { Py_DECREF(co); Py_DECREF(interactive_filename); - Py_DECREF(linecache_module); Py_DECREF(print_tb_func); PyErr_SetString(PyExc_ValueError, "linecache._register_code is not callable"); return NULL; @@ -1450,7 +1434,6 @@ run_mod(mod_ty mod, PyObject *filename, PyObject *globals, PyObject *locals, Py_DECREF(interactive_filename); - Py_DECREF(linecache_module); Py_XDECREF(print_tb_func); Py_XDECREF(result); if (!result) { diff --git a/Python/specialize.c b/Python/specialize.c index fa022346bdea6a..8831cfaa82be9b 100644 --- a/Python/specialize.c +++ b/Python/specialize.c @@ -309,6 +309,14 @@ print_optimization_stats(FILE *out, OptimizationStats *stats) ); } } + fprintf(out, "JIT total memory size: %" PRIu64 "\n", stats->jit_total_memory_size); + fprintf(out, "JIT code size: %" PRIu64 "\n", stats->jit_code_size); + fprintf(out, "JIT trampoline size: %" PRIu64 "\n", stats->jit_trampoline_size); + fprintf(out, "JIT data size: %" PRIu64 "\n", stats->jit_data_size); + fprintf(out, "JIT padding size: %" PRIu64 "\n", stats->jit_padding_size); + fprintf(out, "JIT freed memory size: %" PRIu64 "\n", stats->jit_freed_memory_size); + + print_histogram(out, "Trace total memory size", stats->trace_total_memory_hist); } #endif @@ -441,8 +449,7 @@ do { \ // Initialize warmup counters and optimize instructions. This cannot fail. void -_PyCode_Quicken(_Py_CODEUNIT *instructions, Py_ssize_t size, PyObject *consts, - int enable_counters) +_PyCode_Quicken(_Py_CODEUNIT *instructions, Py_ssize_t size, int enable_counters) { #if ENABLE_SPECIALIZATION_FT _Py_BackoffCounter jump_counter, adaptive_counter; @@ -581,6 +588,10 @@ _PyCode_Quicken(_Py_CODEUNIT *instructions, Py_ssize_t size, PyObject *consts, #define SPEC_FAIL_BINARY_OP_TRUE_DIVIDE_FLOAT 26 #define SPEC_FAIL_BINARY_OP_TRUE_DIVIDE_OTHER 27 #define SPEC_FAIL_BINARY_OP_XOR 28 +#define SPEC_FAIL_BINARY_OP_OR_INT 29 +#define SPEC_FAIL_BINARY_OP_OR_DIFFERENT_TYPES 30 +#define SPEC_FAIL_BINARY_OP_XOR_INT 31 +#define SPEC_FAIL_BINARY_OP_XOR_DIFFERENT_TYPES 32 /* Calls */ @@ -2379,6 +2390,12 @@ binary_op_fail_kind(int oparg, PyObject *lhs, PyObject *rhs) return SPEC_FAIL_BINARY_OP_MULTIPLY_OTHER; case NB_OR: case NB_INPLACE_OR: + if (!Py_IS_TYPE(lhs, Py_TYPE(rhs))) { + return SPEC_FAIL_BINARY_OP_OR_DIFFERENT_TYPES; + } + if (PyLong_CheckExact(lhs)) { + return SPEC_FAIL_BINARY_OP_OR_INT; + } return SPEC_FAIL_BINARY_OP_OR; case NB_POWER: case NB_INPLACE_POWER: @@ -2406,6 +2423,12 @@ binary_op_fail_kind(int oparg, PyObject *lhs, PyObject *rhs) return SPEC_FAIL_BINARY_OP_TRUE_DIVIDE_OTHER; case NB_XOR: case NB_INPLACE_XOR: + if (!Py_IS_TYPE(lhs, Py_TYPE(rhs))) { + return SPEC_FAIL_BINARY_OP_XOR_DIFFERENT_TYPES; + } + if (PyLong_CheckExact(lhs)) { + return SPEC_FAIL_BINARY_OP_XOR_INT; + } return SPEC_FAIL_BINARY_OP_XOR; } Py_UNREACHABLE(); @@ -2414,6 +2437,34 @@ binary_op_fail_kind(int oparg, PyObject *lhs, PyObject *rhs) /** Binary Op Specialization Extensions */ +/* long-long */ + +static inline int +is_compactlong(PyObject *v) +{ + return PyLong_CheckExact(v) && + _PyLong_IsCompact((PyLongObject *)v); +} + +static int +compactlongs_guard(PyObject *lhs, PyObject *rhs) +{ + return (is_compactlong(lhs) && is_compactlong(rhs)); +} + +#define BITWISE_LONGS_ACTION(NAME, OP) \ + static PyObject * \ + (NAME)(PyObject *lhs, PyObject *rhs) \ + { \ + Py_ssize_t rhs_val = _PyLong_CompactValue((PyLongObject *)rhs); \ + Py_ssize_t lhs_val = _PyLong_CompactValue((PyLongObject *)lhs); \ + return PyLong_FromSsize_t(lhs_val OP rhs_val); \ + } +BITWISE_LONGS_ACTION(compactlongs_or, |) +BITWISE_LONGS_ACTION(compactlongs_and, &) +BITWISE_LONGS_ACTION(compactlongs_xor, ^) +#undef BITWISE_LONGS_ACTION + /* float-long */ static inline int @@ -2484,6 +2535,15 @@ LONG_FLOAT_ACTION(compactlong_float_multiply, *) LONG_FLOAT_ACTION(compactlong_float_true_div, /) #undef LONG_FLOAT_ACTION +static _PyBinaryOpSpecializationDescr compactlongs_specs[NB_OPARG_LAST+1] = { + [NB_OR] = {compactlongs_guard, compactlongs_or}, + [NB_AND] = {compactlongs_guard, compactlongs_and}, + [NB_XOR] = {compactlongs_guard, compactlongs_xor}, + [NB_INPLACE_OR] = {compactlongs_guard, compactlongs_or}, + [NB_INPLACE_AND] = {compactlongs_guard, compactlongs_and}, + [NB_INPLACE_XOR] = {compactlongs_guard, compactlongs_xor}, +}; + static _PyBinaryOpSpecializationDescr float_compactlong_specs[NB_OPARG_LAST+1] = { [NB_ADD] = {float_compactlong_guard, float_compactlong_add}, [NB_SUBTRACT] = {float_compactlong_guard, float_compactlong_subtract}, @@ -2512,6 +2572,7 @@ binary_op_extended_specialization(PyObject *lhs, PyObject *rhs, int oparg, LOOKUP_SPEC(compactlong_float_specs, oparg); LOOKUP_SPEC(float_compactlong_specs, oparg); + LOOKUP_SPEC(compactlongs_specs, oparg); #undef LOOKUP_SPEC return 0; } diff --git a/Python/sysmodule.c b/Python/sysmodule.c index e6f49e643af77d..4110bb8239c085 100644 --- a/Python/sysmodule.c +++ b/Python/sysmodule.c @@ -1030,6 +1030,23 @@ sys__is_interned_impl(PyObject *module, PyObject *string) return PyUnicode_CHECK_INTERNED(string); } +/*[clinic input] +sys._is_immortal -> bool + + op: object + / + +Return True if the given object is "immortal" per PEP 683. + +This function should be used for specialized purposes only. +[clinic start generated code]*/ + +static int +sys__is_immortal_impl(PyObject *module, PyObject *op) +/*[clinic end generated code: output=c2f5d6a80efb8d1a input=4609c9bf5481db76]*/ +{ + return PyUnstable_IsImmortal(op); +} /* * Cached interned string objects used for calling the profile and @@ -2323,9 +2340,7 @@ sys_activate_stack_trampoline_impl(PyObject *module, const char *backend) { #ifdef PY_HAVE_PERF_TRAMPOLINE #ifdef _Py_JIT - _PyOptimizerObject* optimizer = _Py_GetOptimizer(); - if (optimizer != NULL) { - Py_DECREF(optimizer); + if (_PyInterpreterState_GET()->jit) { PyErr_SetString(PyExc_ValueError, "Cannot activate the perf trampoline if the JIT is active"); return NULL; } @@ -2648,6 +2663,7 @@ static PyMethodDef sys_methods[] = { SYS__GETFRAMEMODULENAME_METHODDEF SYS_GETWINDOWSVERSION_METHODDEF SYS__ENABLELEGACYWINDOWSFSENCODING_METHODDEF + SYS__IS_IMMORTAL_METHODDEF SYS_INTERN_METHODDEF SYS__IS_INTERNED_METHODDEF SYS_IS_FINALIZING_METHODDEF @@ -2915,6 +2931,7 @@ PySys_ResetWarnOptions(void) static int _PySys_AddWarnOptionWithError(PyThreadState *tstate, PyObject *option) { + assert(tstate != NULL); PyObject *warnoptions = get_warnoptions(tstate); if (warnoptions == NULL) { return -1; @@ -2932,11 +2949,11 @@ PyAPI_FUNC(void) PySys_AddWarnOptionUnicode(PyObject *option) { PyThreadState *tstate = _PyThreadState_GET(); + _Py_EnsureTstateNotNULL(tstate); + assert(!_PyErr_Occurred(tstate)); if (_PySys_AddWarnOptionWithError(tstate, option) < 0) { /* No return value, therefore clear error state if possible */ - if (tstate) { - _PyErr_Clear(tstate); - } + _PyErr_Clear(tstate); } } diff --git a/Tools/build/compute-changes.py b/Tools/build/compute-changes.py new file mode 100644 index 00000000000000..105ba58cc9d941 --- /dev/null +++ b/Tools/build/compute-changes.py @@ -0,0 +1,183 @@ +"""Determine which GitHub Actions workflows to run. + +Called by ``.github/workflows/reusable-context.yml``. +We only want to run tests on PRs when related files are changed, +or when someone triggers a manual workflow run. +This improves developer experience by not doing (slow) +unnecessary work in GHA, and saves CI resources. +""" + +from __future__ import annotations + +import os +import subprocess +from dataclasses import dataclass +from pathlib import Path + +TYPE_CHECKING = False +if TYPE_CHECKING: + from collections.abc import Set + +GITHUB_DEFAULT_BRANCH = os.environ["GITHUB_DEFAULT_BRANCH"] +GITHUB_CODEOWNERS_PATH = Path(".github/CODEOWNERS") +GITHUB_WORKFLOWS_PATH = Path(".github/workflows") +CONFIGURATION_FILE_NAMES = frozenset({ + ".pre-commit-config.yaml", + ".ruff.toml", + "mypy.ini", +}) +SUFFIXES_C_OR_CPP = frozenset({".c", ".h", ".cpp"}) +SUFFIXES_DOCUMENTATION = frozenset({".rst", ".md"}) + + +@dataclass(kw_only=True, slots=True) +class Outputs: + run_ci_fuzz: bool = False + run_docs: bool = False + run_tests: bool = False + run_windows_msi: bool = False + + +def compute_changes() -> None: + target_branch, head_branch = git_branches() + if target_branch and head_branch: + # Getting changed files only makes sense on a pull request + files = get_changed_files( + f"origin/{target_branch}", f"origin/{head_branch}" + ) + outputs = process_changed_files(files) + else: + # Otherwise, just run the tests + outputs = Outputs(run_tests=True) + outputs = process_target_branch(outputs, target_branch) + + if outputs.run_tests: + print("Run tests") + + if outputs.run_ci_fuzz: + print("Run CIFuzz tests") + else: + print("Branch too old for CIFuzz tests; or no C files were changed") + + if outputs.run_docs: + print("Build documentation") + + if outputs.run_windows_msi: + print("Build Windows MSI") + + print(outputs) + + write_github_output(outputs) + + +def git_branches() -> tuple[str, str]: + target_branch = os.environ.get("GITHUB_BASE_REF", "") + target_branch = target_branch.removeprefix("refs/heads/") + print(f"target branch: {target_branch!r}") + + head_branch = os.environ.get("GITHUB_HEAD_REF", "") + head_branch = head_branch.removeprefix("refs/heads/") + print(f"head branch: {head_branch!r}") + return target_branch, head_branch + + +def get_changed_files( + ref_a: str = GITHUB_DEFAULT_BRANCH, ref_b: str = "HEAD" +) -> Set[Path]: + """List the files changed between two Git refs, filtered by change type.""" + args = ("git", "diff", "--name-only", f"{ref_a}...{ref_b}", "--") + print(*args) + changed_files_result = subprocess.run( + args, stdout=subprocess.PIPE, check=True, encoding="utf-8" + ) + changed_files = changed_files_result.stdout.strip().splitlines() + return frozenset(map(Path, filter(None, map(str.strip, changed_files)))) + + +def process_changed_files(changed_files: Set[Path]) -> Outputs: + run_tests = False + run_ci_fuzz = False + run_docs = False + run_windows_msi = False + + for file in changed_files: + # Documentation files + doc_or_misc = file.parts[0] in {"Doc", "Misc"} + doc_file = file.suffix in SUFFIXES_DOCUMENTATION or doc_or_misc + + if file.parent == GITHUB_WORKFLOWS_PATH: + if file.name == "build.yml": + run_tests = run_ci_fuzz = True + if file.name == "reusable-docs.yml": + run_docs = True + if file.name == "reusable-windows-msi.yml": + run_windows_msi = True + + if not ( + doc_file + or file == GITHUB_CODEOWNERS_PATH + or file.name in CONFIGURATION_FILE_NAMES + ): + run_tests = True + + # The fuzz tests are pretty slow so they are executed only for PRs + # changing relevant files. + if file.suffix in SUFFIXES_C_OR_CPP: + run_ci_fuzz = True + if file.parts[:2] in { + ("configure",), + ("Modules", "_xxtestfuzz"), + }: + run_ci_fuzz = True + + # Check for changed documentation-related files + if doc_file: + run_docs = True + + # Check for changed MSI installer-related files + if file.parts[:2] == ("Tools", "msi"): + run_windows_msi = True + + return Outputs( + run_ci_fuzz=run_ci_fuzz, + run_docs=run_docs, + run_tests=run_tests, + run_windows_msi=run_windows_msi, + ) + + +def process_target_branch(outputs: Outputs, git_branch: str) -> Outputs: + if not git_branch: + outputs.run_tests = True + + # CIFuzz / OSS-Fuzz compatibility with older branches may be broken. + if git_branch != GITHUB_DEFAULT_BRANCH: + outputs.run_ci_fuzz = False + + if os.environ.get("GITHUB_EVENT_NAME", "").lower() == "workflow_dispatch": + outputs.run_docs = True + outputs.run_windows_msi = True + + return outputs + + +def write_github_output(outputs: Outputs) -> None: + # https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/store-information-in-variables#default-environment-variables + # https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/workflow-commands-for-github-actions#setting-an-output-parameter + if "GITHUB_OUTPUT" not in os.environ: + print("GITHUB_OUTPUT not defined!") + return + + with open(os.environ["GITHUB_OUTPUT"], "a", encoding="utf-8") as f: + f.write(f"run-ci-fuzz={bool_lower(outputs.run_ci_fuzz)}\n") + f.write(f"run-docs={bool_lower(outputs.run_docs)}\n") + f.write(f"run-tests={bool_lower(outputs.run_tests)}\n") + f.write(f"run-windows-msi={bool_lower(outputs.run_windows_msi)}\n") + + +def bool_lower(value: bool, /) -> str: + return "true" if value else "false" + + +if __name__ == "__main__": + compute_changes() diff --git a/Tools/c-analyzer/c_analyzer/datafiles.py b/Tools/c-analyzer/c_analyzer/datafiles.py index d5db3bd3ed74ac..79c201a5d3b92c 100644 --- a/Tools/c-analyzer/c_analyzer/datafiles.py +++ b/Tools/c-analyzer/c_analyzer/datafiles.py @@ -104,7 +104,12 @@ def _iter_ignored(infile, relroot): for v in varidinfo) if reason in bogus: reason = None - varid = _info.DeclID.from_row(varidinfo) + try: + varid = _info.DeclID.from_row(varidinfo) + except BaseException as e: + e.add_note(f"Error occurred when processing row {varidinfo} in {infile}.") + e.add_note(f"Could it be that you added a row which is not tab-delimited?") + raise e varid = varid.fix_filename(relroot, formatted=False, fixroot=False) yield varid, reason diff --git a/Tools/c-analyzer/cpython/ignored.tsv b/Tools/c-analyzer/cpython/ignored.tsv index 1aabe262eac480..be3ded9f07ef8a 100644 --- a/Tools/c-analyzer/cpython/ignored.tsv +++ b/Tools/c-analyzer/cpython/ignored.tsv @@ -381,6 +381,7 @@ Python/pylifecycle.c - INTERPRETER_TRAMPOLINE_CODEDEF - Python/pystate.c - initial - Python/specialize.c - adaptive_opcodes - Python/specialize.c - cache_requirements - +Python/specialize.c - compactlongs_specs - Python/specialize.c - float_compactlong_specs - Python/specialize.c - compactlong_float_specs - Python/stdlib_module_names.h - _Py_stdlib_module_names - @@ -388,9 +389,7 @@ Python/sysmodule.c - perf_map_state - Python/sysmodule.c - _PySys_ImplCacheTag - Python/sysmodule.c - _PySys_ImplName - Python/sysmodule.c - whatstrings - -Python/optimizer.c - _PyDefaultOptimizer_Type - Python/optimizer.c - _PyUOpExecutor_Type - -Python/optimizer.c - _PyUOpOptimizer_Type - Python/optimizer.c - _PyOptimizer_Default - Python/optimizer.c - _ColdExit_Type - Python/optimizer.c - Py_FatalErrorExecutor - @@ -447,6 +446,8 @@ Modules/_testcapi/exceptions.c - PyRecursingInfinitelyError_Type - Modules/_testcapi/heaptype.c - _testcapimodule - Modules/_testcapi/mem.c - FmData - Modules/_testcapi/mem.c - FmHook - +Modules/_testcapi/object.c - MyObject_dealloc_called - +Modules/_testcapi/object.c - MyType - Modules/_testcapi/structmember.c - test_structmembersType_OldAPI - Modules/_testcapi/watchers.c - g_dict_watch_events - Modules/_testcapi/watchers.c - g_dict_watchers_installed - diff --git a/Tools/cases_generator/analyzer.py b/Tools/cases_generator/analyzer.py index bc9c42e045a610..11a559bca474b0 100644 --- a/Tools/cases_generator/analyzer.py +++ b/Tools/cases_generator/analyzer.py @@ -5,9 +5,16 @@ import re from typing import Optional +@dataclass +class EscapingCall: + start: lexer.Token + call: lexer.Token + end: lexer.Token + kills: lexer.Token | None + @dataclass class Properties: - escaping_calls: dict[lexer.Token, tuple[lexer.Token, lexer.Token]] + escaping_calls: dict[lexer.Token, EscapingCall] escapes: bool error_with_pop: bool error_without_pop: bool @@ -24,6 +31,7 @@ class Properties: has_free: bool side_exit: bool pure: bool + uses_opcode: bool tier: int | None = None oparg_and_1: bool = False const_oparg: int = -1 @@ -41,7 +49,7 @@ def dump(self, indent: str) -> None: @staticmethod def from_list(properties: list["Properties"]) -> "Properties": - escaping_calls: dict[lexer.Token, tuple[lexer.Token, lexer.Token]] = {} + escaping_calls: dict[lexer.Token, EscapingCall] = {} for p in properties: escaping_calls.update(p.escaping_calls) return Properties( @@ -59,6 +67,7 @@ def from_list(properties: list["Properties"]) -> "Properties": uses_co_consts=any(p.uses_co_consts for p in properties), uses_co_names=any(p.uses_co_names for p in properties), uses_locals=any(p.uses_locals for p in properties), + uses_opcode=any(p.uses_opcode for p in properties), has_free=any(p.has_free for p in properties), side_exit=any(p.side_exit for p in properties), pure=all(p.pure for p in properties), @@ -85,6 +94,7 @@ def infallible(self) -> bool: uses_co_consts=False, uses_co_names=False, uses_locals=False, + uses_opcode=False, has_free=False, side_exit=False, pure=True, @@ -120,6 +130,8 @@ def size(self) -> int: return 0 + + @dataclass class StackItem: name: str @@ -218,7 +230,24 @@ def is_super(self) -> bool: return False +class Label: + + def __init__(self, name: str, spilled: bool, body: list[lexer.Token], properties: Properties): + self.name = name + self.spilled = spilled + self.body = body + self.properties = properties + + size:int = 0 + output_stores: list[lexer.Token] = [] + instruction_size = None + + def __str__(self) -> str: + return f"label({self.name})" + + Part = Uop | Skip | Flush +CodeSection = Uop | Label @dataclass @@ -258,12 +287,6 @@ def is_super(self) -> bool: return False -@dataclass -class Label: - name: str - body: list[lexer.Token] - - @dataclass class PseudoInstruction: name: str @@ -330,6 +353,17 @@ def convert_stack_item( cond = replace_op_arg_1 return StackItem(item.name, item.type, cond, item.size) +def check_unused(stack: list[StackItem], input_names: dict[str, lexer.Token]) -> None: + "Unused items cannot be on the stack above used, non-peek items" + seen_unused = False + for item in reversed(stack): + if item.name == "unused": + seen_unused = True + elif item.peek: + break + elif seen_unused: + raise analysis_error(f"Cannot have used input '{item.name}' below an unused value on the stack", input_names[item.name]) + def analyze_stack( op: parser.InstDef | parser.Pseudo, replace_op_arg_1: str | None = None @@ -374,6 +408,7 @@ def analyze_stack( for output in outputs: if variable_used(op, output.name): output.used = True + check_unused(inputs, input_names) return StackEffect(inputs, outputs) @@ -481,22 +516,24 @@ def in_frame_push(idx: int) -> bool: return refs -def variable_used(node: parser.InstDef, name: str) -> bool: +def variable_used(node: parser.CodeDef, name: str) -> bool: """Determine whether a variable with a given name is used in a node.""" return any( token.kind == "IDENTIFIER" and token.text == name for token in node.block.tokens ) -def oparg_used(node: parser.InstDef) -> bool: +def oparg_used(node: parser.CodeDef) -> bool: """Determine whether `oparg` is used in a node.""" return any( token.kind == "IDENTIFIER" and token.text == "oparg" for token in node.tokens ) -def tier_variable(node: parser.InstDef) -> int | None: +def tier_variable(node: parser.CodeDef) -> int | None: """Determine whether a tier variable is used in a node.""" + if isinstance(node, parser.LabelDef): + return None for token in node.tokens: if token.kind == "ANNOTATION": if token.text == "specializing": @@ -506,21 +543,19 @@ def tier_variable(node: parser.InstDef) -> int | None: return None -def has_error_with_pop(op: parser.InstDef) -> bool: +def has_error_with_pop(op: parser.CodeDef) -> bool: return ( variable_used(op, "ERROR_IF") or variable_used(op, "pop_1_error") or variable_used(op, "exception_unwind") - or variable_used(op, "resume_with_error") ) -def has_error_without_pop(op: parser.InstDef) -> bool: +def has_error_without_pop(op: parser.CodeDef) -> bool: return ( variable_used(op, "ERROR_NO_POP") or variable_used(op, "pop_1_error") or variable_used(op, "exception_unwind") - or variable_used(op, "resume_with_error") ) @@ -550,7 +585,6 @@ def has_error_without_pop(op: parser.InstDef) -> bool: "PyStackRef_AsPyObjectNew", "PyStackRef_AsPyObjectSteal", "PyStackRef_CLEAR", - "PyStackRef_CLOSE", "PyStackRef_CLOSE_SPECIALIZED", "PyStackRef_DUP", "PyStackRef_False", @@ -573,7 +607,6 @@ def has_error_without_pop(op: parser.InstDef) -> bool: "PyUnicode_GET_LENGTH", "PyUnicode_READ_CHAR", "Py_ARRAY_LENGTH", - "Py_CLEAR", "Py_FatalError", "Py_INCREF", "Py_IS_TYPE", @@ -634,6 +667,7 @@ def has_error_without_pop(op: parser.InstDef) -> bool: "_Py_STR", "_Py_TryIncrefCompare", "_Py_TryIncrefCompareStackRef", + "_Py_atomic_compare_exchange_uint8", "_Py_atomic_load_ptr_acquire", "_Py_atomic_load_uintptr_relaxed", "_Py_set_eval_breaker_bit", @@ -645,7 +679,7 @@ def has_error_without_pop(op: parser.InstDef) -> bool: "restart_backoff_counter", ) -def find_stmt_start(node: parser.InstDef, idx: int) -> lexer.Token: +def find_stmt_start(node: parser.CodeDef, idx: int) -> lexer.Token: assert idx < len(node.block.tokens) while True: tkn = node.block.tokens[idx-1] @@ -658,7 +692,7 @@ def find_stmt_start(node: parser.InstDef, idx: int) -> lexer.Token: return node.block.tokens[idx] -def find_stmt_end(node: parser.InstDef, idx: int) -> lexer.Token: +def find_stmt_end(node: parser.CodeDef, idx: int) -> lexer.Token: assert idx < len(node.block.tokens) while True: idx += 1 @@ -666,8 +700,8 @@ def find_stmt_end(node: parser.InstDef, idx: int) -> lexer.Token: if tkn.kind == "SEMI": return node.block.tokens[idx+1] -def check_escaping_calls(instr: parser.InstDef, escapes: dict[lexer.Token, tuple[lexer.Token, lexer.Token]]) -> None: - calls = {escapes[t][0] for t in escapes} +def check_escaping_calls(instr: parser.CodeDef, escapes: dict[lexer.Token, EscapingCall]) -> None: + calls = {e.call for e in escapes.values()} in_if = 0 tkn_iter = iter(instr.block.tokens) for tkn in tkn_iter: @@ -685,8 +719,8 @@ def check_escaping_calls(instr: parser.InstDef, escapes: dict[lexer.Token, tuple elif tkn in calls and in_if: raise analysis_error(f"Escaping call '{tkn.text} in condition", tkn) -def find_escaping_api_calls(instr: parser.InstDef) -> dict[lexer.Token, tuple[lexer.Token, lexer.Token]]: - result: dict[lexer.Token, tuple[lexer.Token, lexer.Token]] = {} +def find_escaping_api_calls(instr: parser.CodeDef) -> dict[lexer.Token, EscapingCall]: + result: dict[lexer.Token, EscapingCall] = {} tokens = instr.block.tokens for idx, tkn in enumerate(tokens): try: @@ -721,23 +755,30 @@ def find_escaping_api_calls(instr: parser.InstDef) -> dict[lexer.Token, tuple[le continue elif tkn.kind != "RBRACKET": continue + if tkn.text in ("PyStackRef_CLOSE", "PyStackRef_XCLOSE"): + if len(tokens) <= idx+2: + raise analysis_error("Unexpected end of file", next_tkn) + kills = tokens[idx+2] + if kills.kind != "IDENTIFIER": + raise analysis_error(f"Expected identifier, got '{kills.text}'", kills) + else: + kills = None start = find_stmt_start(instr, idx) end = find_stmt_end(instr, idx) - result[start] = tkn, end + result[start] = EscapingCall(start, tkn, end, kills) check_escaping_calls(instr, result) return result EXITS = { "DISPATCH", - "GO_TO_INSTRUCTION", "Py_UNREACHABLE", "DISPATCH_INLINED", "DISPATCH_GOTO", } -def always_exits(op: parser.InstDef) -> bool: +def always_exits(op: parser.CodeDef) -> bool: depth = 0 tkn_iter = iter(op.tokens) for tkn in tkn_iter: @@ -796,7 +837,7 @@ def effect_depends_on_oparg_1(op: parser.InstDef) -> bool: return False -def compute_properties(op: parser.InstDef) -> Properties: +def compute_properties(op: parser.CodeDef) -> Properties: escaping_calls = find_escaping_api_calls(op) has_free = ( variable_used(op, "PyCell_New") @@ -817,16 +858,9 @@ def compute_properties(op: parser.InstDef) -> Properties: ) error_with_pop = has_error_with_pop(op) error_without_pop = has_error_without_pop(op) - escapes = ( - bool(escaping_calls) or - variable_used(op, "Py_DECREF") or - variable_used(op, "Py_XDECREF") or - variable_used(op, "Py_CLEAR") or - variable_used(op, "PyStackRef_CLOSE") or - variable_used(op, "PyStackRef_XCLOSE") or - variable_used(op, "PyStackRef_CLEAR") or - variable_used(op, "SETLOCAL") - ) + escapes = bool(escaping_calls) + pure = False if isinstance(op, parser.LabelDef) else "pure" in op.annotations + no_save_ip = False if isinstance(op, parser.LabelDef) else "no_save_ip" in op.annotations return Properties( escaping_calls=escaping_calls, escapes=escapes, @@ -842,11 +876,11 @@ def compute_properties(op: parser.InstDef) -> Properties: stores_sp=variable_used(op, "SYNC_SP"), uses_co_consts=variable_used(op, "FRAME_CO_CONSTS"), uses_co_names=variable_used(op, "FRAME_CO_NAMES"), - uses_locals=(variable_used(op, "GETLOCAL") or variable_used(op, "SETLOCAL")) - and not has_free, + uses_locals=variable_used(op, "GETLOCAL") and not has_free, + uses_opcode=variable_used(op, "opcode"), has_free=has_free, - pure="pure" in op.annotations, - no_save_ip="no_save_ip" in op.annotations, + pure=pure, + no_save_ip=no_save_ip, tier=tier_variable(op), needs_prev=variable_used(op, "prev_instr"), ) @@ -1025,7 +1059,8 @@ def add_label( label: parser.LabelDef, labels: dict[str, Label], ) -> None: - labels[label.name] = Label(label.name, label.block.tokens) + properties = compute_properties(label) + labels[label.name] = Label(label.name, label.spilled, label.block.tokens, properties) def assign_opcodes( @@ -1178,17 +1213,6 @@ def analyze_forest(forest: list[parser.AstNode]) -> Analysis: add_label(node, labels) case _: pass - for uop in uops.values(): - tkn_iter = iter(uop.body) - for tkn in tkn_iter: - if tkn.kind == "IDENTIFIER" and tkn.text == "GO_TO_INSTRUCTION": - if next(tkn_iter).kind != "LPAREN": - continue - target = next(tkn_iter) - if target.kind != "IDENTIFIER": - continue - if target.text in instructions: - instructions[target.text].is_target = True for uop in uops.values(): uop.instruction_size = get_instruction_size_for_uop(instructions, uop) # Special case BINARY_OP_INPLACE_ADD_UNICODE diff --git a/Tools/cases_generator/generators_common.py b/Tools/cases_generator/generators_common.py index f1f166ae104ba5..1c572ec0512b37 100644 --- a/Tools/cases_generator/generators_common.py +++ b/Tools/cases_generator/generators_common.py @@ -7,6 +7,8 @@ Properties, StackItem, analysis_error, + Label, + CodeSection, ) from cwriter import CWriter from typing import Callable, TextIO, Iterator, Iterable @@ -90,7 +92,7 @@ def emit_to(out: CWriter, tkn_iter: TokenIterator, end: str) -> Token: ReplacementFunctionType = Callable[ - [Token, TokenIterator, Uop, Storage, Instruction | None], bool + [Token, TokenIterator, CodeSection, Storage, Instruction | None], bool ] def always_true(tkn: Token | None) -> bool: @@ -106,9 +108,10 @@ def always_true(tkn: Token | None) -> bool: class Emitter: out: CWriter + labels: dict[str, Label] _replacers: dict[str, ReplacementFunctionType] - def __init__(self, out: CWriter): + def __init__(self, out: CWriter, labels: dict[str, Label]): self._replacers = { "EXIT_IF": self.exit_if, "DEOPT_IF": self.deopt_if, @@ -120,25 +123,26 @@ def __init__(self, out: CWriter): "SYNC_SP": self.sync_sp, "SAVE_STACK": self.save_stack, "RELOAD_STACK": self.reload_stack, - "PyStackRef_CLOSE": self.stackref_close, - "PyStackRef_XCLOSE": self.stackref_close, "PyStackRef_CLOSE_SPECIALIZED": self.stackref_close_specialized, "PyStackRef_AsPyObjectSteal": self.stackref_steal, "DISPATCH": self.dispatch, "INSTRUCTION_SIZE": self.instruction_size, "POP_INPUT": self.pop_input, - "GO_TO_INSTRUCTION": self.go_to_instruction, + "stack_pointer": self.stack_pointer, } self.out = out + self.labels = labels def dispatch( self, tkn: Token, tkn_iter: TokenIterator, - uop: Uop, + uop: CodeSection, storage: Storage, inst: Instruction | None, ) -> bool: + if storage.spilled: + raise analysis_error("stack_pointer needs reloading before dispatch", tkn) self.emit(tkn) return False @@ -146,7 +150,7 @@ def deopt_if( self, tkn: Token, tkn_iter: TokenIterator, - uop: Uop, + uop: CodeSection, storage: Storage, inst: Instruction | None, ) -> bool: @@ -166,11 +170,18 @@ def deopt_if( exit_if = deopt_if + def goto_error(self, offset: int, label: str, storage: Storage) -> str: + if offset > 0: + return f"goto pop_{offset}_{label};" + if offset < 0: + storage.copy().flush(self.out) + return f"goto {label};" + def error_if( self, tkn: Token, tkn_iter: TokenIterator, - uop: Uop, + uop: CodeSection, storage: Storage, inst: Instruction | None, ) -> bool: @@ -188,30 +199,20 @@ def error_if( self.out.emit_at("if ", tkn) self.emit(lparen) emit_to(self.out, tkn_iter, "COMMA") - self.out.emit(") ") + self.out.emit(") {\n") label = next(tkn_iter).text next(tkn_iter) # RPAREN next(tkn_iter) # Semi colon storage.clear_inputs("at ERROR_IF") + c_offset = storage.stack.peek_offset() try: offset = -int(c_offset) except ValueError: offset = -1 - if offset > 0: - self.out.emit(f"goto pop_{offset}_") - self.out.emit(label) - self.out.emit(";\n") - elif offset == 0: - self.out.emit("goto ") - self.out.emit(label) - self.out.emit(";\n") - else: - self.out.emit("{\n") - storage.copy().flush(self.out) - self.out.emit("goto ") - self.out.emit(label) - self.out.emit(";\n") + self.out.emit(self.goto_error(offset, label, storage)) + self.out.emit("\n") + if not unconditional: self.out.emit("}\n") return not unconditional @@ -219,21 +220,21 @@ def error_no_pop( self, tkn: Token, tkn_iter: TokenIterator, - uop: Uop, + uop: CodeSection, storage: Storage, inst: Instruction | None, ) -> bool: next(tkn_iter) # LPAREN next(tkn_iter) # RPAREN next(tkn_iter) # Semi colon - self.out.emit_at("goto error;", tkn) + self.out.emit_at(self.goto_error(0, "error", storage), tkn) return False def decref_inputs( self, tkn: Token, tkn_iter: TokenIterator, - uop: Uop, + uop: CodeSection, storage: Storage, inst: Instruction | None, ) -> bool: @@ -269,7 +270,7 @@ def kill_inputs( self, tkn: Token, tkn_iter: TokenIterator, - uop: Uop, + uop: CodeSection, storage: Storage, inst: Instruction | None, ) -> bool: @@ -284,7 +285,7 @@ def kill( self, tkn: Token, tkn_iter: TokenIterator, - uop: Uop, + uop: CodeSection, storage: Storage, inst: Instruction | None, ) -> bool: @@ -320,31 +321,11 @@ def stackref_kill( live = var.name return True - def stackref_close( - self, - tkn: Token, - tkn_iter: TokenIterator, - uop: Uop, - storage: Storage, - inst: Instruction | None, - ) -> bool: - self.out.emit(tkn) - tkn = next(tkn_iter) - assert tkn.kind == "LPAREN" - self.out.emit(tkn) - name = next(tkn_iter) - self.out.emit(name) - if name.kind == "IDENTIFIER": - return self.stackref_kill(name, storage, True) - rparen = emit_to(self.out, tkn_iter, "RPAREN") - self.emit(rparen) - return True - def stackref_close_specialized( self, tkn: Token, tkn_iter: TokenIterator, - uop: Uop, + uop: CodeSection, storage: Storage, inst: Instruction | None, ) -> bool: @@ -374,7 +355,7 @@ def stackref_steal( self, tkn: Token, tkn_iter: TokenIterator, - uop: Uop, + uop: CodeSection, storage: Storage, inst: Instruction | None, ) -> bool: @@ -394,7 +375,7 @@ def sync_sp( self, tkn: Token, tkn_iter: TokenIterator, - uop: Uop, + uop: CodeSection, storage: Storage, inst: Instruction | None, ) -> bool: @@ -406,23 +387,32 @@ def sync_sp( self._print_storage(storage) return True - def go_to_instruction( + def stack_pointer( self, tkn: Token, tkn_iter: TokenIterator, - uop: Uop, + uop: CodeSection, storage: Storage, inst: Instruction | None, ) -> bool: - next(tkn_iter) - name = next(tkn_iter) - next(tkn_iter) - next(tkn_iter) - assert name.kind == "IDENTIFIER" - self.emit("\n") - self.emit(f"goto PREDICTED_{name.text};\n") + if storage.spilled: + raise analysis_error("stack_pointer is invalid when stack is spilled to memory", tkn) + self.emit(tkn) return True + def goto_label(self, goto: Token, label: Token, storage: Storage) -> None: + if label.text not in self.labels: + print(self.labels.keys()) + raise analysis_error(f"Label '{label.text}' does not exist", label) + label_node = self.labels[label.text] + if label_node.spilled: + if not storage.spilled: + self.emit_save(storage) + elif storage.spilled: + raise analysis_error("Cannot jump from spilled label without reloading the stack pointer", goto) + self.out.emit(goto) + self.out.emit(label) + def emit_save(self, storage: Storage) -> None: storage.save(self.out) self._print_storage(storage) @@ -431,7 +421,7 @@ def save_stack( self, tkn: Token, tkn_iter: TokenIterator, - uop: Uop, + uop: CodeSection, storage: Storage, inst: Instruction | None, ) -> bool: @@ -445,7 +435,7 @@ def pop_input( self, tkn: Token, tkn_iter: TokenIterator, - uop: Uop, + uop: CodeSection, storage: Storage, inst: Instruction | None, ) -> bool: @@ -472,7 +462,7 @@ def reload_stack( self, tkn: Token, tkn_iter: TokenIterator, - uop: Uop, + uop: CodeSection, storage: Storage, inst: Instruction | None, ) -> bool: @@ -485,7 +475,7 @@ def reload_stack( def instruction_size(self, tkn: Token, tkn_iter: TokenIterator, - uop: Uop, + uop: CodeSection, storage: Storage, inst: Instruction | None, ) -> bool: @@ -504,7 +494,7 @@ def _print_storage(self, storage: Storage) -> None: def _emit_if( self, tkn_iter: TokenIterator, - uop: Uop, + uop: CodeSection, storage: Storage, inst: Instruction | None, ) -> tuple[bool, Token, Storage]: @@ -564,7 +554,7 @@ def _emit_if( def _emit_block( self, tkn_iter: TokenIterator, - uop: Uop, + uop: CodeSection, storage: Storage, inst: Instruction | None, emit_first_brace: bool @@ -590,9 +580,15 @@ def _emit_block( self.out.start_line() line = tkn.line if tkn in escaping_calls: - if tkn != reload: + escape = escaping_calls[tkn] + if escape.kills is not None: + if tkn == reload: + self.emit_reload(storage) + self.stackref_kill(escape.kills, storage, True) + self.emit_save(storage) + elif tkn != reload: self.emit_save(storage) - _, reload = escaping_calls[tkn] + reload = escape.end elif tkn == reload: self.emit_reload(storage) if tkn.kind == "LBRACE": @@ -605,8 +601,9 @@ def _emit_block( return reachable, tkn, storage self.out.emit(tkn) elif tkn.kind == "GOTO": + label_tkn = next(tkn_iter) + self.goto_label(tkn, label_tkn, storage) reachable = False; - self.out.emit(tkn) elif tkn.kind == "IDENTIFIER": if tkn.text in self._replacers: if not self._replacers[tkn.text](tkn, tkn_iter, uop, storage, inst): @@ -634,20 +631,20 @@ def _emit_block( raise analysis_error(ex.args[0], tkn) from None raise analysis_error("Expecting closing brace. Reached end of file", tkn) - def emit_tokens( self, - uop: Uop, + code: CodeSection, storage: Storage, inst: Instruction | None, ) -> Storage: - tkn_iter = TokenIterator(uop.body) + tkn_iter = TokenIterator(code.body) self.out.start_line() - _, rbrace, storage = self._emit_block(tkn_iter, uop, storage, inst, False) + reachable, rbrace, storage = self._emit_block(tkn_iter, code, storage, inst, False) try: - self._print_storage(storage) - storage.push_outputs() - self._print_storage(storage) + if reachable: + self._print_storage(storage) + storage.push_outputs() + self._print_storage(storage) except StackError as ex: raise analysis_error(ex.args[0], rbrace) from None return storage diff --git a/Tools/cases_generator/lexer.py b/Tools/cases_generator/lexer.py index cf3c39762f29cb..6afca750be9b19 100644 --- a/Tools/cases_generator/lexer.py +++ b/Tools/cases_generator/lexer.py @@ -216,6 +216,8 @@ def choice(*opts: str) -> str: # A label in the DSL LABEL = "LABEL" kwds.append(LABEL) +SPILLED = "SPILLED" +kwds.append(SPILLED) keywords = {name.lower(): name for name in kwds} ANNOTATION = "ANNOTATION" diff --git a/Tools/cases_generator/optimizer_generator.py b/Tools/cases_generator/optimizer_generator.py index 5cfec4bfecbf07..6c33debd58e1fe 100644 --- a/Tools/cases_generator/optimizer_generator.py +++ b/Tools/cases_generator/optimizer_generator.py @@ -112,6 +112,9 @@ def emit_save(self, storage: Storage) -> None: def emit_reload(self, storage: Storage) -> None: pass + def goto_label(self, goto: Token, label: Token, storage: Storage) -> None: + self.out.emit(goto) + self.out.emit(label) def write_uop( override: Uop | None, @@ -145,7 +148,7 @@ def write_uop( cast = f"uint{cache.size*16}_t" out.emit(f"{type}{cache.name} = ({cast})this_instr->operand0;\n") if override: - emitter = OptimizerEmitter(out) + emitter = OptimizerEmitter(out, {}) # No reference management of inputs needed. for var in storage.inputs: # type: ignore[possibly-undefined] var.defined = False diff --git a/Tools/cases_generator/parser.py b/Tools/cases_generator/parser.py index 68bbb88719e682..696c5c16432990 100644 --- a/Tools/cases_generator/parser.py +++ b/Tools/cases_generator/parser.py @@ -13,6 +13,7 @@ AstNode, ) +CodeDef = InstDef | LabelDef def prettify_filename(filename: str) -> str: # Make filename more user-friendly and less platform-specific, diff --git a/Tools/cases_generator/parsing.py b/Tools/cases_generator/parsing.py index eb8c8a7ecd32e8..011f34de288871 100644 --- a/Tools/cases_generator/parsing.py +++ b/Tools/cases_generator/parsing.py @@ -153,6 +153,7 @@ class Pseudo(Node): @dataclass class LabelDef(Node): name: str + spilled: bool block: Block @@ -176,12 +177,15 @@ def definition(self) -> AstNode | None: @contextual def label_def(self) -> LabelDef | None: + spilled = False + if self.expect(lx.SPILLED): + spilled = True if self.expect(lx.LABEL): if self.expect(lx.LPAREN): if tkn := self.expect(lx.IDENTIFIER): if self.expect(lx.RPAREN): if block := self.block(): - return LabelDef(tkn.text, block) + return LabelDef(tkn.text, spilled, block) return None @contextual diff --git a/Tools/cases_generator/stack.py b/Tools/cases_generator/stack.py index 5121837ed8334b..729973f1e32758 100644 --- a/Tools/cases_generator/stack.py +++ b/Tools/cases_generator/stack.py @@ -570,7 +570,7 @@ def copy(self) -> "Storage": assert [v.name for v in inputs] == [v.name for v in self.inputs], (inputs, self.inputs) return Storage( new_stack, inputs, - self.copy_list(self.outputs), self.copy_list(self.peeks) + self.copy_list(self.outputs), self.copy_list(self.peeks), self.spilled ) def sanity_check(self) -> None: diff --git a/Tools/cases_generator/tier1_generator.py b/Tools/cases_generator/tier1_generator.py index 59ce5c95852d28..c7cf09e2ec4ede 100644 --- a/Tools/cases_generator/tier1_generator.py +++ b/Tools/cases_generator/tier1_generator.py @@ -184,25 +184,31 @@ def generate_tier1_labels( analysis: Analysis, outfile: TextIO, lines: bool ) -> None: out = CWriter(outfile, 2, lines) + emitter = Emitter(out, analysis.labels) out.emit("\n") for name, label in analysis.labels.items(): out.emit(f"{name}:\n") - for tkn in label.body: - out.emit(tkn) + out.emit("{\n") + storage = Storage(Stack(), [], [], []) + if label.spilled: + storage.spilled = 1 + out.emit("/* STACK SPILLED */\n") + emitter.emit_tokens(label, storage, None) out.emit("\n") + out.emit("}\n") out.emit("\n") def generate_tier1_cases( analysis: Analysis, outfile: TextIO, lines: bool ) -> None: out = CWriter(outfile, 2, lines) - emitter = Emitter(out) + emitter = Emitter(out, analysis.labels) out.emit("\n") for name, inst in sorted(analysis.instructions.items()): needs_this = uses_this(inst) out.emit("\n") out.emit(f"TARGET({name}) {{\n") - unused_guard = "(void)this_instr;\n" if inst.family is None else "" + unused_guard = "(void)this_instr;\n" if inst.properties.needs_prev: out.emit(f"_Py_CODEUNIT* const prev_instr = frame->instr_ptr;\n") if needs_this and not inst.is_target: @@ -220,6 +226,8 @@ def generate_tier1_cases( if needs_this: out.emit(f"_Py_CODEUNIT* const this_instr = next_instr - {inst.size};\n") out.emit(unused_guard) + if inst.properties.uses_opcode: + out.emit(f"opcode = {name};\n") if inst.family is not None: out.emit( f"static_assert({inst.family.size} == {inst.size-1}" diff --git a/Tools/cases_generator/tier2_generator.py b/Tools/cases_generator/tier2_generator.py index dd16a1a7eb28b5..5e23360cdc0aaf 100644 --- a/Tools/cases_generator/tier2_generator.py +++ b/Tools/cases_generator/tier2_generator.py @@ -9,6 +9,8 @@ Analysis, Instruction, Uop, + Label, + CodeSection, analyze_files, StackItem, analysis_error, @@ -65,51 +67,21 @@ def declare_variables(uop: Uop, out: CWriter) -> None: class Tier2Emitter(Emitter): - def __init__(self, out: CWriter): - super().__init__(out) + def __init__(self, out: CWriter, labels: dict[str, Label]): + super().__init__(out, labels) self._replacers["oparg"] = self.oparg - def error_if( - self, - tkn: Token, - tkn_iter: TokenIterator, - uop: Uop, - storage: Storage, - inst: Instruction | None, - ) -> bool: - self.out.emit_at("if ", tkn) - lparen = next(tkn_iter) - self.emit(lparen) - assert lparen.kind == "LPAREN" - first_tkn = next(tkn_iter) - self.out.emit(first_tkn) - emit_to(self.out, tkn_iter, "COMMA") - label = next(tkn_iter).text - next(tkn_iter) # RPAREN - next(tkn_iter) # Semi colon - self.emit(") JUMP_TO_ERROR();\n") - return not always_true(first_tkn) - - - def error_no_pop( - self, - tkn: Token, - tkn_iter: TokenIterator, - uop: Uop, - storage: Storage, - inst: Instruction | None, - ) -> bool: - next(tkn_iter) # LPAREN - next(tkn_iter) # RPAREN - next(tkn_iter) # Semi colon - self.out.emit_at("JUMP_TO_ERROR();", tkn) - return False + def goto_error(self, offset: int, label: str, storage: Storage) -> str: + # To do: Add jump targets for popping values. + if offset != 0: + storage.copy().flush(self.out) + return f"JUMP_TO_ERROR();" def deopt_if( self, tkn: Token, tkn_iter: TokenIterator, - uop: Uop, + uop: CodeSection, storage: Storage, inst: Instruction | None, ) -> bool: @@ -130,7 +102,7 @@ def exit_if( # type: ignore[override] self, tkn: Token, tkn_iter: TokenIterator, - uop: Uop, + uop: CodeSection, storage: Storage, inst: Instruction | None, ) -> bool: @@ -150,7 +122,7 @@ def oparg( self, tkn: Token, tkn_iter: TokenIterator, - uop: Uop, + uop: CodeSection, storage: Storage, inst: Instruction | None, ) -> bool: @@ -210,7 +182,7 @@ def generate_tier2( """ ) out = CWriter(outfile, 2, lines) - emitter = Tier2Emitter(out) + emitter = Tier2Emitter(out, analysis.labels) out.emit("\n") for name, uop in analysis.uops.items(): if uop.properties.tier == 1: diff --git a/Tools/gdb/libpython.py b/Tools/gdb/libpython.py index e0d92e21dc42b3..27aa6b0cc266d3 100755 --- a/Tools/gdb/libpython.py +++ b/Tools/gdb/libpython.py @@ -890,7 +890,7 @@ class PyLongObjectPtr(PyObjectPtr): def proxyval(self, visited): ''' - Python's Include/longinterpr.h has this declaration: + Python's Include/cpython/longinterpr.h has this declaration: typedef struct _PyLongValue { uintptr_t lv_tag; /* Number of digits, sign and flags */ @@ -909,8 +909,7 @@ def proxyval(self, visited): - 0: Positive - 1: Zero - 2: Negative - The third lowest bit of lv_tag is reserved for an immortality flag, but is - not currently used. + The third lowest bit of lv_tag is set to 1 for the small ints and 0 otherwise. where SHIFT can be either: #define PyLong_SHIFT 30 diff --git a/Tools/i18n/pygettext.py b/Tools/i18n/pygettext.py index f78ff16bff9039..d8a0e379ab82cb 100755 --- a/Tools/i18n/pygettext.py +++ b/Tools/i18n/pygettext.py @@ -1,41 +1,15 @@ #! /usr/bin/env python3 -# -*- coding: iso-8859-1 -*- -# Originally written by Barry Warsaw -# -# Minimally patched to make it even more xgettext compatible -# by Peter Funk -# -# 2002-11-22 Jürgen Hermann -# Added checks that _() only contains string literals, and -# command line args are resolved to module lists, i.e. you -# can now pass a filename, a module or package name, or a -# directory (including globbing chars, important for Win32). -# Made docstring fit in 80 chars wide displays using pydoc. -# -# for selftesting -try: - import fintl - _ = fintl.gettext -except ImportError: - _ = lambda s: s - -__doc__ = _("""pygettext -- Python equivalent of xgettext(1) +"""pygettext -- Python equivalent of xgettext(1) Many systems (Solaris, Linux, Gnu) provide extensive tools that ease the internationalization of C programs. Most of these tools are independent of the programming language and can be used from within Python programs. Martin von Loewis' work[1] helps considerably in this regard. -There's one problem though; xgettext is the program that scans source code -looking for message strings, but it groks only C (or C++). Python -introduces a few wrinkles, such as dual quoting characters, triple quoted -strings, and raw strings. xgettext understands none of this. - -Enter pygettext, which uses Python's standard tokenize module to scan -Python source code, generating .pot files identical to what GNU xgettext[2] -generates for C and C++ code. From there, the standard GNU tools can be -used. +pygettext uses Python's standard tokenize module to scan Python source +code, generating .pot files identical to what GNU xgettext[2] generates +for C and C++ code. From there, the standard GNU tools can be used. A word about marking Python strings as candidates for translation. GNU xgettext recognizes the following keywords: gettext, dgettext, dcgettext, @@ -61,6 +35,9 @@ option arguments is broken, and in these cases, pygettext just defines additional switches. +NOTE: The public interface of pygettext is limited to the command-line +interface only. The internal API is subject to change without notice. + Usage: pygettext [options] inputfile ... Options: @@ -153,16 +130,16 @@ conjunction with the -D option above. If `inputfile' is -, standard input is read. -""") +""" -import os +import ast +import getopt +import glob import importlib.machinery import importlib.util +import os import sys -import glob import time -import getopt -import ast import tokenize from collections import defaultdict from dataclasses import dataclass, field @@ -173,7 +150,7 @@ # The normal pot-file header. msgmerge and Emacs's po-mode work better if it's # there. -pot_header = _('''\ +pot_header = '''\ # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR ORGANIZATION # FIRST AUTHOR , YEAR. @@ -190,7 +167,7 @@ "Content-Transfer-Encoding: %(encoding)s\\n" "Generated-By: pygettext.py %(version)s\\n" -''') +''' def usage(code, msg=''): @@ -204,7 +181,7 @@ def make_escapes(pass_nonascii): global escapes, escape if pass_nonascii: # Allow non-ascii characters to pass through so that e.g. 'msgid - # "Höhe"' would result not result in 'msgid "H\366he"'. Otherwise we + # "Höhe"' would result not result in 'msgid "H\366he"'. Otherwise we # escape any character outside the 32..126 range. mod = 128 escape = escape_ascii @@ -224,6 +201,7 @@ def make_escapes(pass_nonascii): def escape_ascii(s, encoding): return ''.join(escapes[ord(c)] if ord(c) < 128 else c for c in s) + def escape_nonascii(s, encoding): return ''.join(escapes[b] for b in s.encode(encoding)) @@ -347,12 +325,6 @@ def add_location(self, filename, lineno, msgid_plural=None, *, is_docstring=Fals self.is_docstring |= is_docstring -def key_for(msgid, msgctxt=None): - if msgctxt is not None: - return (msgctxt, msgid) - return msgid - - class TokenEater: def __init__(self, options): self.__options = options @@ -373,6 +345,10 @@ def __call__(self, ttype, tstring, stup, etup, line): ## file=sys.stderr) self.__state(ttype, tstring, stup[0]) + @property + def messages(self): + return self.__messages + def __waiting(self, ttype, tstring, lineno): opts = self.__options # Do docstring extractions, if enabled @@ -416,7 +392,7 @@ def __waiting(self, ttype, tstring, lineno): if func_name not in opts.keywords: continue if len(call.args) != 1: - print(_( + print(( '*** %(file)s:%(lineno)s: Seen unexpected amount of' ' positional arguments in gettext call: %(source_segment)s' ) % { @@ -426,7 +402,7 @@ def __waiting(self, ttype, tstring, lineno): }, file=sys.stderr) continue if call.keywords: - print(_( + print(( '*** %(file)s:%(lineno)s: Seen unexpected keyword arguments' ' in gettext call: %(source_segment)s' ) % { @@ -437,7 +413,7 @@ def __waiting(self, ttype, tstring, lineno): continue arg = call.args[0] if not isinstance(arg, ast.Constant): - print(_( + print(( '*** %(file)s:%(lineno)s: Seen unexpected argument type' ' in gettext call: %(source_segment)s' ) % { @@ -532,7 +508,7 @@ def __addentry(self, msg, lineno=None, *, is_docstring=False): lineno = self.__lineno msgctxt = msg.get('msgctxt') msgid_plural = msg.get('msgid_plural') - key = key_for(msgid, msgctxt) + key = self._key_for(msgid, msgctxt) if key in self.__messages: self.__messages[key].add_location( self.__curfile, @@ -549,8 +525,14 @@ def __addentry(self, msg, lineno=None, *, is_docstring=False): is_docstring=is_docstring, ) + @staticmethod + def _key_for(msgid, msgctxt=None): + if msgctxt is not None: + return (msgctxt, msgid) + return msgid + def warn_unexpected_token(self, token): - print(_( + print(( '*** %(file)s:%(lineno)s: Seen unexpected token "%(token)s"' ) % { 'token': token, @@ -562,58 +544,58 @@ def set_filename(self, filename): self.__curfile = filename self.__freshmodule = 1 - def write(self, fp): - options = self.__options - timestamp = time.strftime('%Y-%m-%d %H:%M%z') - encoding = fp.encoding if fp.encoding else 'UTF-8' - print(pot_header % {'time': timestamp, 'version': __version__, - 'charset': encoding, - 'encoding': '8bit'}, file=fp) - - # Sort locations within each message by filename and lineno - sorted_keys = [ - (key, sorted(msg.locations)) - for key, msg in self.__messages.items() - ] - # Sort messages by locations - # For example, a message with locations [('test.py', 1), ('test.py', 2)] will - # appear before a message with locations [('test.py', 1), ('test.py', 3)] - sorted_keys.sort(key=itemgetter(1)) - - for key, locations in sorted_keys: - msg = self.__messages[key] - if options.writelocations: - # location comments are different b/w Solaris and GNU: - if options.locationstyle == options.SOLARIS: - for location in locations: - print(f'# File: {location.filename}, line: {location.lineno}', file=fp) - elif options.locationstyle == options.GNU: - # fit as many locations on one line, as long as the - # resulting line length doesn't exceed 'options.width' - locline = '#:' - for location in locations: - s = f' {location.filename}:{location.lineno}' - if len(locline) + len(s) <= options.width: - locline = locline + s - else: - print(locline, file=fp) - locline = f'#:{s}' - if len(locline) > 2: + +def write_pot_file(messages, options, fp): + timestamp = time.strftime('%Y-%m-%d %H:%M%z') + encoding = fp.encoding if fp.encoding else 'UTF-8' + print(pot_header % {'time': timestamp, 'version': __version__, + 'charset': encoding, + 'encoding': '8bit'}, file=fp) + + # Sort locations within each message by filename and lineno + sorted_keys = [ + (key, sorted(msg.locations)) + for key, msg in messages.items() + ] + # Sort messages by locations + # For example, a message with locations [('test.py', 1), ('test.py', 2)] will + # appear before a message with locations [('test.py', 1), ('test.py', 3)] + sorted_keys.sort(key=itemgetter(1)) + + for key, locations in sorted_keys: + msg = messages[key] + if options.writelocations: + # location comments are different b/w Solaris and GNU: + if options.locationstyle == options.SOLARIS: + for location in locations: + print(f'# File: {location.filename}, line: {location.lineno}', file=fp) + elif options.locationstyle == options.GNU: + # fit as many locations on one line, as long as the + # resulting line length doesn't exceed 'options.width' + locline = '#:' + for location in locations: + s = f' {location.filename}:{location.lineno}' + if len(locline) + len(s) <= options.width: + locline = locline + s + else: print(locline, file=fp) - if msg.is_docstring: - # If the entry was gleaned out of a docstring, then add a - # comment stating so. This is to aid translators who may wish - # to skip translating some unimportant docstrings. - print('#, docstring', file=fp) - if msg.msgctxt is not None: - print('msgctxt', normalize(msg.msgctxt, encoding), file=fp) - print('msgid', normalize(msg.msgid, encoding), file=fp) - if msg.msgid_plural is not None: - print('msgid_plural', normalize(msg.msgid_plural, encoding), file=fp) - print('msgstr[0] ""', file=fp) - print('msgstr[1] ""\n', file=fp) - else: - print('msgstr ""\n', file=fp) + locline = f'#:{s}' + if len(locline) > 2: + print(locline, file=fp) + if msg.is_docstring: + # If the entry was gleaned out of a docstring, then add a + # comment stating so. This is to aid translators who may wish + # to skip translating some unimportant docstrings. + print('#, docstring', file=fp) + if msg.msgctxt is not None: + print('msgctxt', normalize(msg.msgctxt, encoding), file=fp) + print('msgid', normalize(msg.msgid, encoding), file=fp) + if msg.msgid_plural is not None: + print('msgid_plural', normalize(msg.msgid_plural, encoding), file=fp) + print('msgstr[0] ""', file=fp) + print('msgstr[1] ""\n', file=fp) + else: + print('msgstr ""\n', file=fp) def main(): @@ -677,7 +659,7 @@ class Options: elif opt in ('-S', '--style'): options.locationstyle = locations.get(arg.lower()) if options.locationstyle is None: - usage(1, _('Invalid value for --style: %s') % arg) + usage(1, f'Invalid value for --style: {arg}') elif opt in ('-o', '--output'): options.outfile = arg elif opt in ('-p', '--output-dir'): @@ -685,13 +667,13 @@ class Options: elif opt in ('-v', '--verbose'): options.verbose = 1 elif opt in ('-V', '--version'): - print(_('pygettext.py (xgettext for Python) %s') % __version__) + print(f'pygettext.py (xgettext for Python) {__version__}') sys.exit(0) elif opt in ('-w', '--width'): try: options.width = int(arg) except ValueError: - usage(1, _('--width argument must be an integer: %s') % arg) + usage(1, f'--width argument must be an integer: {arg}') elif opt in ('-x', '--exclude-file'): options.excludefilename = arg elif opt in ('-X', '--no-docstrings'): @@ -719,8 +701,8 @@ class Options: with open(options.excludefilename) as fp: options.toexclude = fp.readlines() except IOError: - print(_( - "Can't read --exclude-file: %s") % options.excludefilename, file=sys.stderr) + print(f"Can't read --exclude-file: {options.excludefilename}", + file=sys.stderr) sys.exit(1) else: options.toexclude = [] @@ -739,12 +721,12 @@ class Options: for filename in args: if filename == '-': if options.verbose: - print(_('Reading standard input')) + print('Reading standard input') fp = sys.stdin.buffer closep = 0 else: if options.verbose: - print(_('Working on %s') % filename) + print(f'Working on {filename}') fp = open(filename, 'rb') closep = 1 try: @@ -771,7 +753,7 @@ class Options: fp = open(options.outfile, 'w') closep = 1 try: - eater.write(fp) + write_pot_file(eater.messages, options, fp) finally: if closep: fp.close() @@ -779,7 +761,3 @@ class Options: if __name__ == '__main__': main() - # some more test strings - # this one creates a warning - _('*** Seen unexpected token "%(token)s"') % {'token': 'test'} - _('more' 'than' 'one' 'string') diff --git a/Tools/jit/README.md b/Tools/jit/README.md index 801c64e4059ccc..4107265754f6ec 100644 --- a/Tools/jit/README.md +++ b/Tools/jit/README.md @@ -3,6 +3,8 @@ The JIT Compiler This version of CPython can be built with an experimental just-in-time compiler[^pep-744]. While most everything you already know about building and using CPython is unchanged, you will probably need to install a compatible version of LLVM first. +Python 3.11 or newer is required to build the JIT. + ## Installing LLVM The JIT compiler does not require end users to install any third-party dependencies, but part of it must be *built* using LLVM[^why-llvm]. You are *not* required to build the rest of CPython using LLVM, or even the same version of LLVM (in fact, this is uncommon). @@ -54,7 +56,7 @@ For `PCbuild`-based builds, pass the new `--experimental-jit` option to `build.b For all other builds, pass the new `--enable-experimental-jit` option to `configure`. -Otherwise, just configure and build as you normally would. Cross-compiling "just works", since the JIT is built for the host platform. +Otherwise, just configure and build as you normally would. Cross-compiling "just works", since the JIT is built for the host platform. The JIT can also be enabled or disabled using the `PYTHON_JIT` environment variable, even on builds where it is enabled or disabled by default. More details about configuring CPython with the JIT and optional values for `--enable-experimental-jit` can be found [here](https://docs.python.org/dev/whatsnew/3.13.html#experimental-jit-compiler). diff --git a/Tools/scripts/summarize_stats.py b/Tools/scripts/summarize_stats.py index bc7ccfe33e777d..161af09183a282 100644 --- a/Tools/scripts/summarize_stats.py +++ b/Tools/scripts/summarize_stats.py @@ -545,6 +545,41 @@ def get_optimizer_stats(self) -> dict[str, tuple[int, int | None]]: ): (incorrect_keys, attempts), } + def get_jit_memory_stats(self) -> dict[Doc, tuple[int, int | None]]: + jit_total_memory_size = self._data["JIT total memory size"] + jit_code_size = self._data["JIT code size"] + jit_trampoline_size = self._data["JIT trampoline size"] + jit_data_size = self._data["JIT data size"] + jit_padding_size = self._data["JIT padding size"] + jit_freed_memory_size = self._data["JIT freed memory size"] + + return { + Doc( + "Total memory size", + "The total size of the memory allocated for the JIT traces", + ): (jit_total_memory_size, None), + Doc( + "Code size", + "The size of the memory allocated for the code of the JIT traces", + ): (jit_code_size, jit_total_memory_size), + Doc( + "Trampoline size", + "The size of the memory allocated for the trampolines of the JIT traces", + ): (jit_trampoline_size, jit_total_memory_size), + Doc( + "Data size", + "The size of the memory allocated for the data of the JIT traces", + ): (jit_data_size, jit_total_memory_size), + Doc( + "Padding size", + "The size of the memory allocated for the padding of the JIT traces", + ): (jit_padding_size, jit_total_memory_size), + Doc( + "Freed memory size", + "The size of the memory freed from the JIT traces", + ): (jit_freed_memory_size, jit_total_memory_size), + } + def get_histogram(self, prefix: str) -> list[tuple[int, int]]: rows = [] for k, v in self._data.items(): @@ -1161,16 +1196,31 @@ def calc_optimizer_table(stats: Stats) -> Rows: for label, (value, den) in optimizer_stats.items() ] - def calc_histogram_table(key: str, den: str) -> RowCalculator: + def calc_jit_memory_table(stats: Stats) -> Rows: + jit_memory_stats = stats.get_jit_memory_stats() + + return [ + ( + label, + Count(value), + Ratio(value, den, percentage=label != "Total memory size"), + ) + for label, (value, den) in jit_memory_stats.items() + ] + + def calc_histogram_table(key: str, den: str | None = None) -> RowCalculator: def calc(stats: Stats) -> Rows: histogram = stats.get_histogram(key) - denominator = stats.get(den) + + if den: + denominator = stats.get(den) + else: + denominator = 0 + for _, v in histogram: + denominator += v rows: Rows = [] - last_non_zero = 0 for k, v in histogram: - if v != 0: - last_non_zero = len(rows) rows.append( ( f"<= {k:,d}", @@ -1178,9 +1228,19 @@ def calc(stats: Stats) -> Rows: Ratio(v, denominator), ) ) - # Don't include any zero entries at the end - rows = rows[: last_non_zero + 1] - return rows + # Don't include any leading and trailing zero entries + start = 0 + end = len(rows) - 1 + + while start <= end: + if rows[start][1] == 0: + start += 1 + elif rows[end][1] == 0: + end -= 1 + else: + break + + return rows[start:end+1] return calc @@ -1214,6 +1274,28 @@ def iter_optimization_tables(base_stats: Stats, head_stats: Stats | None = None) yield Table(("", "Count:", "Ratio:"), calc_optimization_table, JoinMode.CHANGE) yield Table(("", "Count:", "Ratio:"), calc_optimizer_table, JoinMode.CHANGE) + yield Section( + "JIT memory stats", + "JIT memory stats", + [ + Table( + ("", "Size (bytes):", "Ratio:"), + calc_jit_memory_table, + JoinMode.CHANGE + ) + ], + ) + yield Section( + "JIT trace total memory histogram", + "JIT trace total memory histogram", + [ + Table( + ("Size (bytes)", "Count", "Ratio:"), + calc_histogram_table("Trace total memory size"), + JoinMode.CHANGE_NO_SORT, + ) + ], + ) for name, den in [ ("Trace length", "Optimization traces created"), ("Optimized trace length", "Optimization traces created"), diff --git a/Tools/tsan/supressions.txt b/Tools/tsan/suppressions.txt similarity index 100% rename from Tools/tsan/supressions.txt rename to Tools/tsan/suppressions.txt diff --git a/configure b/configure index 885c2cf7828d6c..3eb787f788bfb9 100755 --- a/configure +++ b/configure @@ -4132,6 +4132,7 @@ then case $MACHDEP in aix*) MACHDEP="aix";; + freebsd*) MACHDEP="freebsd";; linux-android*) MACHDEP="android";; linux*) MACHDEP="linux";; cygwin*) MACHDEP="cygwin";; diff --git a/configure.ac b/configure.ac index f89a0801948ca5..c0130b8082cd8a 100644 --- a/configure.ac +++ b/configure.ac @@ -365,6 +365,7 @@ then case $MACHDEP in aix*) MACHDEP="aix";; + freebsd*) MACHDEP="freebsd";; linux-android*) MACHDEP="android";; linux*) MACHDEP="linux";; cygwin*) MACHDEP="cygwin";;